I am trying to copy all my posts in Livejournal to my new blog at blogger.com. I do this using a slightly modified example that comes with the python gdata client . I have a json file with all my posts imported from Livejournal. The problem is that blogger.com has a daily limit for publishing new blog entries per day - 50, so you can imagine that the 1300+ posts that I will have will be copied in a month, since I canβt programmatically enter captcha after 50 imports.
I recently found out that there is also a batch mode somewhere in gdata, but I could not figure out how to use it. Googling really didn't help.
Any advice or help would be greatly appreciated.
Thanks.
Update
Just in case, I use the following code
#!/usr/local/bin/python import json import requests from gdata import service import gdata import atom import getopt import sys from datetime import datetime as dt from datetime import timedelta as td from datetime import tzinfo as tz import time allEntries = json.load(open("todays_copy.json", "r")) class TZ(tz): def utcoffset(self, dt): return td(hours=-6) class BloggerExample: def __init__(self, email, password): # Authenticate using ClientLogin. self.service = service.GDataService(email, password) self.service.source = "Blogger_Python_Sample-1.0" self.service.service = "blogger" self.service.server = "www.blogger.com" self.service.ProgrammaticLogin() # Get the blog ID for the first blog. feed = self.service.Get("/feeds/default/blogs") self_link = feed.entry[0].GetSelfLink() if self_link: self.blog_id = self_link.href.split("/")[-1] def CreatePost(self, title, content, author_name, label, time): LABEL_SCHEME = "http://www.blogger.com/atom/ns#" # Create the entry to insert. entry = gdata.GDataEntry() entry.author.append(atom.Author(atom.Name(text=author_name))) entry.title = atom.Title(title_type="xhtml", text=title) entry.content = atom.Content(content_type="html", text=content) entry.published = atom.Published(time) entry.category.append(atom.Category(scheme=LABEL_SCHEME, term=label)) # Ask the service to insert the new entry. return self.service.Post(entry, "/feeds/" + self.blog_id + "/posts/default") def run(self, data): for year in allEntries: for month in year["yearlydata"]: for day in month["monthlydata"]: for entry in day["daylydata"]: # print year["year"], month["month"], day["day"], entry["title"].encode("utf-8") atime = dt.strptime(entry["time"], "%I:%M %p") hr = atime.hour mn = atime.minute ptime = dt(year["year"], int(month["month"]), int(day["day"]), hr, mn, 0, tzinfo=TZ()).isoformat("T") public_post = self.CreatePost(entry["title"], entry["content"], "My name", ",".join(entry["tags"]), ptime) print "%s, %s - published, Waiting 30 minutes" % (ptime, entry["title"].encode("utf-8")) time.sleep(30*60) def main(data): email = " my@email.com " password = "MyPassW0rd" sample = BloggerExample(email, password) sample.run(data) if __name__ == "__main__": main(allEntries)
python batch-processing blogger
Kaster
source share