3 # ljdump.py - livejournal archiver
4 # Greg Hewgill <greg@hewgill.com> http://hewgill.com
9 # This software is provided 'as-is', without any express or implied
10 # warranty. In no event will the author be held liable for any damages
11 # arising from the use of this software.
13 # Permission is granted to anyone to use this software for any purpose,
14 # including commercial applications, and to alter it and redistribute it
15 # freely, subject to the following restrictions:
17 # 1. The origin of this software must not be misrepresented; you must not
18 # claim that you wrote the original software. If you use this software
19 # in a product, an acknowledgment in the product documentation would be
20 # appreciated but is not required.
21 # 2. Altered source versions must be plainly marked as such, and must not be
22 # misrepresented as being the original software.
23 # 3. This notice may not be removed or altered from any source distribution.
25 # Copyright (c) 2005-2009 Greg Hewgill
27 import codecs, md5, os, pickle, pprint, re, shutil, sys, urllib2, xml.dom.minidom, xmlrpclib
28 from xml.sax import saxutils
36 def calcchallenge(challenge, password):
37 return md5.new(challenge+md5.new(password).hexdigest()).hexdigest()
39 def flatresponse(response):
42 name = response.readline()
46 name = name[:len(name)-1]
47 value = response.readline()
49 value = value[:len(value)-1]
53 def getljsession(server, username, password):
54 r = urllib2.urlopen(server+"/interface/flat", "mode=getchallenge")
55 response = flatresponse(r)
57 r = urllib2.urlopen(server+"/interface/flat", "mode=sessiongenerate&user=%s&auth_method=challenge&auth_challenge=%s&auth_response=%s" % (username, response['challenge'], calcchallenge(response['challenge'], password)))
58 response = flatresponse(r)
60 return response['ljsession']
62 def dochallenge(server, params, password):
63 challenge = server.LJ.XMLRPC.getchallenge()
65 'auth_method': "challenge",
66 'auth_challenge': challenge['challenge'],
67 'auth_response': calcchallenge(challenge['challenge'], password)
71 def dumpelement(f, name, e):
72 f.write("<%s>\n" % name)
74 if isinstance(e[k], {}.__class__):
75 dumpelement(f, k, e[k])
78 s = unicode(str(e[k]), "UTF-8")
79 except UnicodeDecodeError:
80 # fall back to Latin-1 for old entries that aren't UTF-8
81 s = unicode(str(e[k]), "cp1252")
82 f.write("<%s>%s</%s>\n" % (k, saxutils.escape(s), k))
83 f.write("</%s>\n" % name)
85 def writedump(fn, event):
86 f = codecs.open(fn, "w", "UTF-8")
87 f.write("""<?xml version="1.0"?>\n""")
88 dumpelement(f, "event", event)
91 def writelast(journal, lastsync, lastmaxid):
92 f = open("%s/.last" % journal, "w")
93 f.write("%s\n" % lastsync)
94 f.write("%s\n" % lastmaxid)
97 def createxml(doc, name, map):
98 e = doc.createElement(name)
100 me = doc.createElement(k)
101 me.appendChild(doc.createTextNode(map[k]))
108 return e[0].firstChild.nodeValue
110 def ljdump(Server, Username, Password, Journal):
111 m = re.search("(.*)/interface/xmlrpc", Server)
114 if Username != Journal:
115 authas = "&authas=%s" % Journal
119 print "Fetching journal entries for: %s" % Journal
122 print "Created subdirectory: %s" % Journal
126 ljsession = getljsession(Server, Username, Password)
128 server = xmlrpclib.ServerProxy(Server+"/interface/xmlrpc")
137 f = open("%s/.last" % Journal, "r")
138 lastsync = f.readline()
139 if lastsync[-1] == '\n':
140 lastsync = lastsync[:len(lastsync)-1]
141 lastmaxid = f.readline()
142 if len(lastmaxid) > 0 and lastmaxid[-1] == '\n':
143 lastmaxid = lastmaxid[:len(lastmaxid)-1]
147 lastmaxid = int(lastmaxid)
151 origlastsync = lastsync
153 r = server.LJ.XMLRPC.login(dochallenge(server, {
154 'username': Username,
159 userpics = dict(zip(map(str, r['pickws']), r['pickwurls']))
160 userpics['*'] = r['defaultpicurl']
163 r = server.LJ.XMLRPC.syncitems(dochallenge(server, {
164 'username': Username,
166 'lastsync': lastsync,
167 'usejournal': Journal,
170 if len(r['syncitems']) == 0:
172 for item in r['syncitems']:
173 if item['item'][0] == 'L':
174 print "Fetching journal entry %s (%s)" % (item['item'], item['action'])
176 e = server.LJ.XMLRPC.getevents(dochallenge(server, {
177 'username': Username,
180 'itemid': item['item'][2:],
181 'usejournal': Journal,
184 writedump("%s/%s" % (Journal, item['item']), e['events'][0])
187 print "Unexpected empty item: %s" % item['item']
189 except xmlrpclib.Fault, x:
190 print "Error getting item: %s" % item['item']
193 lastsync = item['time']
194 writelast(Journal, lastsync, lastmaxid)
196 # The following code doesn't work because the server rejects our repeated calls.
197 # http://www.livejournal.com/doc/server/ljp.csp.xml-rpc.getevents.html
198 # contains the statement "You should use the syncitems selecttype in
199 # conjuntions [sic] with the syncitems protocol mode", but provides
200 # no other explanation about how these two function calls should
201 # interact. Therefore we just do the above slow one-at-a-time method.
204 # r = server.LJ.XMLRPC.getevents(dochallenge(server, {
205 # 'username': Username,
207 # 'selecttype': "syncitems",
208 # 'lastsync': lastsync,
211 # if len(r['events']) == 0:
213 # for item in r['events']:
214 # writedump("%s/L-%d" % (Journal, item['itemid']), item)
216 # lastsync = item['eventtime']
218 print "Fetching journal comments for: %s" % Journal
221 f = open("%s/comment.meta" % Journal)
222 metacache = pickle.load(f)
228 f = open("%s/user.map" % Journal)
229 usermap = pickle.load(f)
238 r = urllib2.urlopen(urllib2.Request(Server+"/export_comments.bml?get=comment_meta&startid=%d%s" % (maxid+1, authas), headers = {'Cookie': "ljsession="+ljsession}))
239 meta = xml.dom.minidom.parse(r)
241 print "*** Error fetching comment meta, possibly not community maintainer?"
246 except AttributeError: # r is sometimes a dict for unknown reasons
248 for c in meta.getElementsByTagName("comment"):
249 id = int(c.getAttribute("id"))
251 'posterid': c.getAttribute("posterid"),
252 'state': c.getAttribute("state"),
256 for u in meta.getElementsByTagName("usermap"):
257 usermap[u.getAttribute("id")] = u.getAttribute("user")
258 if maxid >= int(meta.getElementsByTagName("maxid")[0].firstChild.nodeValue):
261 f = open("%s/comment.meta" % Journal, "w")
262 pickle.dump(metacache, f)
265 f = open("%s/user.map" % Journal, "w")
266 pickle.dump(usermap, f)
274 r = urllib2.urlopen(urllib2.Request(Server+"/export_comments.bml?get=comment_body&startid=%d%s" % (maxid+1, authas), headers = {'Cookie': "ljsession="+ljsession}))
275 meta = xml.dom.minidom.parse(r)
277 print "*** Error fetching comment body, possibly not community maintainer?"
281 for c in meta.getElementsByTagName("comment"):
282 id = int(c.getAttribute("id"))
283 jitemid = c.getAttribute("jitemid")
286 'parentid': c.getAttribute("parentid"),
287 'subject': gettext(c.getElementsByTagName("subject")),
288 'date': gettext(c.getElementsByTagName("date")),
289 'body': gettext(c.getElementsByTagName("body")),
290 'state': metacache[id]['state'],
292 if usermap.has_key(c.getAttribute("posterid")):
293 comment["user"] = usermap[c.getAttribute("posterid")]
295 entry = xml.dom.minidom.parse("%s/C-%s" % (Journal, jitemid))
297 entry = xml.dom.minidom.getDOMImplementation().createDocument(None, "comments", None)
299 for d in entry.getElementsByTagName("comment"):
300 if int(d.getElementsByTagName("id")[0].firstChild.nodeValue) == id:
304 print "Warning: downloaded duplicate comment id %d in jitemid %s" % (id, jitemid)
306 entry.documentElement.appendChild(createxml(entry, "comment", comment))
307 f = codecs.open("%s/C-%s" % (Journal, jitemid), "w", "UTF-8")
313 if maxid >= newmaxid:
318 writelast(Journal, lastsync, lastmaxid)
320 if Username == Journal:
321 print "Fetching userpics for: %s" % Username
322 f = open("%s/userpics.xml" % Username, "w")
323 print >>f, """<?xml version="1.0"?>"""
324 print >>f, "<userpics>"
326 print >>f, """<userpic keyword="%s" url="%s" />""" % (p, userpics[p])
327 pic = urllib2.urlopen(userpics[p])
328 ext = MimeExtensions.get(pic.info()["Content-Type"], "")
329 picfn = re.sub(r'[*?\\/:<>"|]', "_", p)
331 picfn = codecs.utf_8_decode(picfn)[0]
332 picf = open("%s/%s%s" % (Username, picfn, ext), "wb")
334 # for installations where the above utf_8_decode doesn't work
335 picfn = "".join([ord(x) < 128 and x or "_" for x in picfn])
336 picf = open("%s/%s%s" % (Username, picfn, ext), "wb")
337 shutil.copyfileobj(pic, picf)
340 print >>f, "</userpics>"
344 print "%d new entries, %d new comments (since %s)" % (newentries, newcomments, origlastsync)
346 print "%d new entries, %d new comments" % (newentries, newcomments)
348 print "%d errors" % errors
350 if __name__ == "__main__":
351 if os.access("ljdump.config", os.F_OK):
352 config = xml.dom.minidom.parse("ljdump.config")
353 server = config.documentElement.getElementsByTagName("server")[0].childNodes[0].data
354 username = config.documentElement.getElementsByTagName("username")[0].childNodes[0].data
355 password = config.documentElement.getElementsByTagName("password")[0].childNodes[0].data
356 journals = config.documentElement.getElementsByTagName("journal")
359 ljdump(server, username, password, e.childNodes[0].data)
361 ljdump(server, username, password, username)
363 from getpass import getpass
364 print "ljdump - livejournal archiver"
366 print "Enter your Livejournal username and password."
368 server = "http://livejournal.com"
369 username = raw_input("Username: ")
370 password = getpass("Password: ")
372 print "You may back up either your own journal, or a community."
373 print "If you are a community maintainer, you can back up both entries and comments."
374 print "If you are not a maintainer, you can back up only entries."
376 journal = raw_input("Journal to back up (or hit return to back up '%s'): " % username)
379 ljdump(server, username, password, journal)
381 ljdump(server, username, password, username)