+ lastsync = item['time']
+
+# The following code doesn't work because the server rejects our repeated calls.
+# http://www.livejournal.com/doc/server/ljp.csp.xml-rpc.getevents.html
+# contains the statement "You should use the syncitems selecttype in
+# conjuntions [sic] with the syncitems protocol mode", but provides
+# no other explanation about how these two function calls should
+# interact. Therefore we just do the above slow one-at-a-time method.
+
+#while True:
+# r = server.LJ.XMLRPC.getevents(dochallenge({
+# 'username': Username,
+# 'ver': 1,
+# 'selecttype': "syncitems",
+# 'lastsync': lastsync,
+# }, Password))
+# pprint.pprint(r)
+# if len(r['events']) == 0:
+# break
+# for item in r['events']:
+# writedump("%s/L-%d" % (Username, item['itemid']), item)
+# newentries += 1
+# lastsync = item['eventtime']
+
+print "Fetching journal comments for: %s" % Username
+
+try:
+ f = open("%s/comment.meta" % Username)
+ metacache = pickle.load(f)
+ f.close()
+except:
+ metacache = {}
+
+try:
+ f = open("%s/user.map" % Username)
+ usermap = pickle.load(f)
+ f.close()
+except:
+ usermap = {}
+
+maxid = lastmaxid
+while True:
+ r = urllib2.urlopen(urllib2.Request("http://livejournal.com/export_comments.bml?get=comment_meta&startid=%d" % (maxid+1), headers = {'Cookie': "ljsession="+ljsession}))
+ meta = xml.dom.minidom.parse(r)
+ r.close()
+ for c in meta.getElementsByTagName("comment"):
+ id = int(c.getAttribute("id"))
+ metacache[id] = {
+ 'posterid': c.getAttribute("posterid"),
+ 'state': c.getAttribute("state"),
+ }
+ if id > maxid:
+ maxid = id
+ for u in meta.getElementsByTagName("usermap"):
+ usermap[u.getAttribute("id")] = u.getAttribute("user")
+ if maxid >= int(meta.getElementsByTagName("maxid")[0].firstChild.nodeValue):
+ break
+
+f = open("%s/comment.meta" % Username, "w")
+pickle.dump(metacache, f)
+f.close()
+
+f = open("%s/user.map" % Username, "w")
+pickle.dump(usermap, f)
+f.close()
+
+newmaxid = maxid
+maxid = lastmaxid
+while True:
+ r = urllib2.urlopen(urllib2.Request("http://livejournal.com/export_comments.bml?get=comment_body&startid=%d" % (maxid+1), headers = {'Cookie': "ljsession="+ljsession}))
+ meta = xml.dom.minidom.parse(r)
+ r.close()
+ for c in meta.getElementsByTagName("comment"):
+ id = int(c.getAttribute("id"))
+ jitemid = c.getAttribute("jitemid")
+ comment = {
+ 'id': str(id),
+ 'parentid': c.getAttribute("parentid"),
+ 'subject': gettext(c.getElementsByTagName("subject")),
+ 'date': gettext(c.getElementsByTagName("date")),
+ 'body': gettext(c.getElementsByTagName("body")),
+ 'state': metacache[id]['state'],
+ }
+ if usermap.has_key(c.getAttribute("posterid")):
+ comment["user"] = usermap[c.getAttribute("posterid")]
+ try:
+ entry = xml.dom.minidom.parse("%s/C-%s" % (Username, jitemid))
+ except:
+ entry = xml.dom.minidom.getDOMImplementation().createDocument(None, "comments", None)
+ found = False
+ for d in entry.getElementsByTagName("comment"):
+ if int(d.getElementsByTagName("id")[0].firstChild.nodeValue) == id:
+ found = True
+ break
+ if found:
+ print "Warning: downloaded duplicate comment id %d in jitemid %s" % (id, jitemid)
+ else:
+ entry.documentElement.appendChild(createxml(entry, "comment", comment))
+ f = codecs.open("%s/C-%s" % (Username, jitemid), "w", "UTF-8")
+ entry.writexml(f)
+ f.close()
+ newcomments += 1
+ if id > maxid:
+ maxid = id
+ if maxid >= newmaxid:
+ break
+
+lastmaxid = maxid
+