Revision: 7924 Author: xqt Date: 2010-02-12 06:40:57 +0000 (Fri, 12 Feb 2010)
Log Message: ----------- global debug option for development purposes
Modified Paths: -------------- trunk/pywikipedia/blockpageschecker.py trunk/pywikipedia/djvutext.py trunk/pywikipedia/featured.py trunk/pywikipedia/pagefromfile.py trunk/pywikipedia/piper.py trunk/pywikipedia/table2wiki.py trunk/pywikipedia/wikipedia.py
Modified: trunk/pywikipedia/blockpageschecker.py =================================================================== --- trunk/pywikipedia/blockpageschecker.py 2010-02-11 15:20:48 UTC (rev 7923) +++ trunk/pywikipedia/blockpageschecker.py 2010-02-12 06:40:57 UTC (rev 7924) @@ -32,8 +32,8 @@ -always Doesn't ask every time if the bot should make the change or not, do it always.
--debug When the bot can't delete the template from the page (wrong - regex or something like that) it will ask you if it should open +-show When the bot can't delete the template from the page (wrong + regex or something like that) it will ask you if it should show the page on your browser. (attention: pages included may give false positives!)
@@ -53,13 +53,14 @@
python blockpageschecker.py -cat:Geography -always
-python blockpageschecker.py -debug -protectedpages:4 +python blockpageschecker.py -show -protectedpages:4
""" # # (C) Monobi a.k.a. Wikihermit, 2007 -# (C) Filnik, 2007-2008-2009 -# (C) NicDumZ, 2008 +# (C) Filnik, 2007-2009 +# (C) NicDumZ, 2008-2009 +# (C) Pywikipedia bot team, 2007-2010 # # Distributed under the terms of the MIT license. # @@ -188,7 +189,7 @@ return ('autoconfirmed-move', catchRegex) return ('editable', r'\A\n') # If editable means that we have no regex, won't change anything with this regex
-def debugQuest(site, page): +def showQuest(site, page): quest = pywikibot.inputChoice(u'Do you want to open the page?',['with browser', 'with gui', 'no'], ['b','g','n'], 'n') pathWiki = site.family.nicepath(site.lang) url = 'http://%s%s%s?&redirect=no' % (pywikibot.getSite().hostname(), pathWiki, page.urlname()) @@ -204,7 +205,7 @@ # Loading the comments global categoryToCheck, comment, project_inserted # always, define a generator to understand if the user sets one, defining what's genFactory - always = False; generator = False; debug = False + always = False; generator = False; show = False moveBlockCheck = False; genFactory = pagegenerators.GeneratorFactory() # To prevent Infinite loops errorCount = 0 @@ -214,8 +215,8 @@ always = True elif arg == '-move': moveBlockCheck = True - elif arg == '-debug': - debug = True + elif arg == '-show': + show = True elif arg.startswith('-protectedpages'): if len(arg) == 15: generator = site.protectedpages(namespace = 0) @@ -278,8 +279,8 @@ continue except pywikibot.IsRedirectPage: pywikibot.output("%s is a redirect! Skipping..." % pagename) - if debug: - debugQuest(site, page) + if show: + showQuest(site, page) continue """ # This check does not work :
Modified: trunk/pywikipedia/djvutext.py =================================================================== --- trunk/pywikipedia/djvutext.py 2010-02-11 15:20:48 UTC (rev 7923) +++ trunk/pywikipedia/djvutext.py 2010-02-12 06:40:57 UTC (rev 7924) @@ -6,7 +6,7 @@
The following parameters are supported:
- -debug If given, doesn't do any real changes, but only shows + -dry If given, doesn't do any real changes, but only shows what would have been changed. -ask Ask for confirmation before uploading each page. (Default: ask when overwriting pages) @@ -18,6 +18,11 @@ All other parameters will be regarded as part of the title of a single page, and the bot will only work on that single page. """ +# +# (C) Pywikipedia bot team, 2008-2010 +# +# Distributed under the terms of the MIT license. +# __version__ = '$Id$' import wikipedia import os, sys @@ -49,14 +54,14 @@ def __init__(self, djvu, index, pages, ask=False, debug=False): """ Constructor. Parameters: - djvu : filename - index : page name - pages : page range + djvu : filename + index : page name + pages : page range """ self.djvu = djvu self.index = index self.pages = pages - self.debug = debug + self.dry = debug self.ask = ask
def NoOfImages(self): @@ -103,9 +108,7 @@ def has_text(self): cmd = u"djvudump "%s" > "%s".out" % (self.djvu, self.djvu) os.system ( cmd.encode(sys.stdout.encoding) ) - f = codecs.open(u"%s.out" % self.djvu, 'r', config.textfile_encoding, 'replace') - s = f.read() f.close() return s.find('TXTz') >= 0 @@ -114,9 +117,7 @@ wikipedia.output(unicode("fetching page %d" % (pageno))) cmd = u"djvutxt --page=%d "%s" "%s.out"" % (pageno, self.djvu, self.djvu) os.system ( cmd.encode(sys.stdout.encoding) ) - f = codecs.open(u"%s.out" % self.djvu, 'r', config.textfile_encoding, 'replace') - djvu_text = f.read() f.close() return djvu_text @@ -137,7 +138,7 @@ text = u'<noinclude>{{PageQuality|1|%s}}<div class="pagetext">\n\n\n</noinclude>%s<noinclude><references/></div></noinclude>' % (self.username,djvutxt)
# convert to wikisyntax - # this adds a second line feed, which makes a new paragraph + # this adds a second line feed, which makes a new paragraph text = text.replace('', "\n")
# only save if something was changed @@ -156,8 +157,8 @@ wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) wikipedia.showDiff(old_text, text)
- if self.debug: - wikipedia.inputChoice(u'Debug mode... Press enter to continue', [], [], 'dummy') + if self.dry: + wikipedia.inputChoice(u'Dry mode... Press enter to continue', [], [], 'dummy') return
if ask: @@ -182,13 +183,13 @@ djvu = None pages = None # what would have been changed. - debug = False + dry = False ask = False
# Parse command line arguments for arg in wikipedia.handleArgs(): - if arg.startswith("-debug"): - debug = True + if arg.startswith("-dry"): + dry = True elif arg.startswith("-ask"): ask = True elif arg.startswith("-djvu:"): @@ -226,7 +227,7 @@
wikipedia.output(u"uploading text from %s to %s" % (djvu, index_page.aslink()) )
- bot = DjVuTextBot(djvu, index, pages, ask, debug) + bot = DjVuTextBot(djvu, index, pages, ask, dry) if not bot.has_text(): raise ValueError("No text layer in djvu file")
Modified: trunk/pywikipedia/featured.py =================================================================== --- trunk/pywikipedia/featured.py 2010-02-11 15:20:48 UTC (rev 7923) +++ trunk/pywikipedia/featured.py 2010-02-12 06:40:57 UTC (rev 7924) @@ -32,7 +32,7 @@
-quiet no corresponding pages are displayed.
--debug for debug purposes. No changes will be made. +-dry for debug purposes. No changes will be made.
usage: featured.py [-interactive] [-nocache] [-top] [-after:zzzz] [-fromlang:xx,yy--zz|-fromall]
@@ -41,8 +41,9 @@
# # (C) Maxim Razin, 2005 -# (C) Leonardo Gregianin, 2006-2007 -# (C) xqt, 2009 +# (C) Leonardo Gregianin, 2005-2008 +# (C) xqt, 2009-2010 +# (C) Pywikipedia bot team, 2005-2010 # # Distributed under the terms of the MIT license. # @@ -351,11 +352,11 @@ arts=[] try: if pType == 'good': - method=good_name[site.lang][0] - elif pType == 'list': - method=lists_name[site.lang][0] + method=good_name[site.lang][0] + elif pType == 'list': + method=lists_name[site.lang][0] else: - method=featured_name[site.lang][0] + method=featured_name[site.lang][0] except KeyError: wikipedia.output(u'Error: language %s doesn't has %s category source.' % (site.lang, feature)) return arts @@ -452,8 +453,7 @@ templates = template['_default'] return templates
- -def featuredWithInterwiki(fromsite, tosite, template_on_top, pType, quiet, debug = False): +def featuredWithInterwiki(fromsite, tosite, template_on_top, pType, quiet, dry = False): if not fromsite.lang in cache: cache[fromsite.lang]={} if not tosite.lang in cache[fromsite.lang]: @@ -523,7 +523,7 @@ text=(text[:m.end()] + (u" {{%s|%s}}" % (templatelist[0], fromsite.lang)) + text[m.end():]) - if not debug: + if not dry: try: atrans.put(text, comment) except wikipedia.LockedPage: @@ -541,7 +541,7 @@ doAll = False part = False quiet = False - debug = False + dry = False for arg in wikipedia.handleArgs(): if arg == '-interactive': interactive=1 @@ -564,8 +564,8 @@ processType = 'list' elif arg == '-quiet': quiet = True - elif arg == '-debug': - debug = True + elif arg == '-dry': + dry = True
if part: try: @@ -574,7 +574,7 @@ ll1,ll2=fromlang[0].split("--",1) if not ll1: ll1="" if not ll2: ll2="zzzzzzz" - if processType == 'good': + if processType == 'good': fromlang=[ll for ll in good_name.keys() if ll>=ll1 and ll<=ll2] elif processType == 'list': fromlang=[ll for ll in good_lists.keys() if ll>=ll1 and ll<=ll2] @@ -582,15 +582,15 @@ fromlang=[ll for ll in featured_name.keys() if ll>=ll1 and ll<=ll2] except: pass - + if doAll: - if processType == 'good': + if processType == 'good': fromlang=good_name.keys() - elif processType == 'list': + elif processType == 'list': fromlang=lists_name.keys() - else: - fromlang=featured_name.keys() - + else: + fromlang=featured_name.keys() + filename="cache/" + processType try: cache=pickle.load(file(filename,"rb")) @@ -603,7 +603,7 @@ sys.exit(1)
fromlang.sort() - + #test whether this site has template enabled hasTemplate = False if not featuredcount: @@ -623,7 +623,7 @@ break elif fromsite != wikipedia.getSite(): featuredWithInterwiki(fromsite, wikipedia.getSite(), - template_on_top, processType, quiet, debug) + template_on_top, processType, quiet, dry) except KeyboardInterrupt: wikipedia.output('\nQuitting program...') finally:
Modified: trunk/pywikipedia/pagefromfile.py =================================================================== --- trunk/pywikipedia/pagefromfile.py 2010-02-11 15:20:48 UTC (rev 7923) +++ trunk/pywikipedia/pagefromfile.py 2010-02-12 06:40:57 UTC (rev 7924) @@ -31,7 +31,7 @@ -autosummary Use MediaWikis autosummary when creating a new page, overrides -summary in this case -minor set minor edit flag on page edits --debug Do not really upload pages, just check and report +-dry Do not really upload pages, just check and report messages
If the page to be uploaded already exists: @@ -42,6 +42,7 @@ """ # # (C) Andre Engels, 2004 +# (C) Pywikipedia bot team, 2005-2010 # # Distributed under the terms of the MIT license. # @@ -139,7 +140,7 @@ self.summary = summary self.minor = minor self.autosummary = autosummary - self.debug = debug + self.dry = debug
def run(self): for title, contents in self.reader.run(): @@ -185,8 +186,8 @@ comment = '' wikipedia.setAction('')
- if self.debug: - wikipedia.output("*** Debug mode ***\n" + \ + if self.dry: + wikipedia.output("*** Dry mode ***\n" + \ "\03{lightpurple}title\03{default}: " + title + "\n" + \ "\03{lightpurple}contents\03{default}:\n" + contents + "\n" \ "\03{lightpurple}comment\03{default}: " + comment + "\n") @@ -282,7 +283,7 @@ summary = None minor = False autosummary = False - debug = False + dry = False
for arg in wikipedia.handleArgs(): if arg.startswith("-start:"): @@ -299,8 +300,8 @@ append = "Bottom" elif arg == "-force": force=True - elif arg == "-debug": - debug = True + elif arg == "-dry": + dry = True elif arg == "-safe": force = False append = None @@ -321,7 +322,7 @@
reader = PageFromFileReader(filename, pageStartMarker, pageEndMarker, titleStartMarker, titleEndMarker, include, notitle)
- bot = PageFromFileRobot(reader, force, append, summary, minor, autosummary, debug) + bot = PageFromFileRobot(reader, force, append, summary, minor, autosummary, dry) bot.run()
if __name__ == "__main__":
Modified: trunk/pywikipedia/piper.py =================================================================== --- trunk/pywikipedia/piper.py 2010-02-11 15:20:48 UTC (rev 7923) +++ trunk/pywikipedia/piper.py 2010-02-12 06:40:57 UTC (rev 7924) @@ -20,7 +20,7 @@
¶ms;
- -debug If given, doesn't do any real changes, but only shows + -dry If given, doesn't do any real changes, but only shows what would have been changed.
-always Always commit changes without asking you to accept them @@ -34,7 +34,13 @@ supported.
""" +# +# (C) Pywikipedia bot team, 2008-2010 +# +# Distributed under the terms of the MIT license. +# __version__ = '$Id$' + import wikipedia import pagegenerators
@@ -68,7 +74,7 @@ * always - If True, don't prompt for changes """ self.generator = generator - self.debug = debug + self.dry = debug self.always = always self.filters = filters
@@ -104,16 +110,6 @@
return unicode_text
- # debug - #def savePage(self, name, text): - # mungedName = name.replace(":", "_").replace("/", "_").replace(" ", "_") - # - # saveName = "/tmp/piper/%s" % mungedName - # file = open(saveName, 'w') - # file.write(text.encode("utf-8")) - # file.close() - # print "Wrote to %s" % saveName - def treat(self, page): """ Loads the given page, does some changes, and saves it. @@ -128,9 +124,6 @@ wikipedia.output(u"Page %s is a redirect; skipping." % page.aslink()) return
- # debug - # self.savePage(page.title(), text) - # Munge! for program in self.filters: text = self.pipe(program, text); @@ -142,7 +135,7 @@ wikipedia.output(u"\n\n>>> %s <<<" % page.title()) # show what was changed wikipedia.showDiff(page.get(), text) - if not self.debug: + if not self.dry: if not self.always: choice = wikipedia.inputChoice(u'Do you want to accept these changes?', ['Yes', 'No'], ['y', 'N'], 'N') else: @@ -169,9 +162,9 @@ # This temporary array is used to read the page title if one single # page to work on is specified by the arguments. pageTitleParts = [] - # If debug is True, doesn't do any real changes, but only show + # If dry is True, doesn't do any real changes, but only show # what would have been changed. - debug = False + dry = False # will become True when the user uses the -always flag. always = False # The program to pipe stuff through @@ -179,8 +172,8 @@
# Parse command line arguments for arg in wikipedia.handleArgs(): - if arg.startswith("-debug"): - debug = True + if arg.startswith("-dry"): + dry = True elif arg.startswith("-filter:"): prog = arg[8:] filters.append(prog) @@ -204,7 +197,7 @@ # The preloading generator is responsible for downloading multiple # pages from the wiki simultaneously. gen = pagegenerators.PreloadingGenerator(gen) - bot = PiperBot(gen, debug, filters, always) + bot = PiperBot(gen, dry, filters, always) bot.run() else: wikipedia.showHelp()
Modified: trunk/pywikipedia/table2wiki.py =================================================================== --- trunk/pywikipedia/table2wiki.py 2010-02-11 15:20:48 UTC (rev 7923) +++ trunk/pywikipedia/table2wiki.py 2010-02-12 06:40:57 UTC (rev 7924) @@ -46,10 +46,12 @@ Broken HTML tables will most likely result in broken wiki tables! Please check every article you change. """ - +# # (C) 2003 Thomas R. Koll, tomk32@tomk32.de +# (C) Pywikipedia bot team, 2003-2010 # # Distributed under the terms of the MIT license. +# __version__='$Id$'
import re, sys, time @@ -131,9 +133,8 @@ yield wikipedia.Page(wikipedia.getSite(), entry.title)
class Table2WikiRobot: - def __init__(self, generator, debug = False, quietMode = False): + def __init__(self, generator, quietMode = False): self.generator = generator - self.debug = debug self.quietMode = quietMode
def convertTable(self, table): @@ -465,16 +466,12 @@ if not table: # no more HTML tables left break - print ">> Table %i <<" % (convertedTables + 1) + wikipedia.output(">> Table %i <<" % (convertedTables + 1)) # convert the current table newTable, warningsThisTable, warnMsgsThisTable = self.convertTable(table) # show the changes for this table - if self.debug: - print table - print newTable - elif not self.quietMode: + if not self.quietMode: wikipedia.showDiff(table.replace('##table##', 'table'), newTable) - print "" warningSum += warningsThisTable for msg in warnMsgsThisTable: warningMessages += 'In table %i: %s' % (convertedTables + 1, msg) @@ -542,14 +539,12 @@ articles = [] # if -file is not used, this temporary array is used to read the page title. page_title = [] - debug = False
# Which namespaces should be processed? # default to [] which means all namespaces will be processed namespaces = []
xmlfilename = None - gen = None
# This factory is responsible for processing command line arguments @@ -584,8 +579,6 @@ print "Automatic mode!\n" elif arg.startswith('-quiet'): quietMode = True - elif arg.startswith('-debug'): - debug = True else: if not genFactory.handleArg(arg): page_title.append(arg) @@ -599,15 +592,15 @@
if not gen: gen = genFactory.getCombinedGenerator() - if not gen: - # show help + + if gen: + if namespaces != []: + gen = pagegenerators.NamespaceFilterPageGenerator(gen, namespaces) + preloadingGen = pagegenerators.PreloadingGenerator(gen) + bot = Table2WikiRobot(preloadingGen, quietMode) + bot.run() + else: wikipedia.showHelp('table2wiki') - sys.exit(0) - if namespaces != []: - gen = pagegenerators.NamespaceFilterPageGenerator(gen, namespaces) - preloadingGen = pagegenerators.PreloadingGenerator(gen) - bot = Table2WikiRobot(preloadingGen, debug, quietMode) - bot.run()
if __name__ == "__main__": try:
Modified: trunk/pywikipedia/wikipedia.py =================================================================== --- trunk/pywikipedia/wikipedia.py 2010-02-11 15:20:48 UTC (rev 7923) +++ trunk/pywikipedia/wikipedia.py 2010-02-12 06:40:57 UTC (rev 7924) @@ -116,7 +116,7 @@ """ from __future__ import generators # -# (C) Pywikipedia bot team, 2003-2009 +# (C) Pywikipedia bot team, 2003-2010 # # Distributed under the terms of the MIT license. # @@ -1286,7 +1286,7 @@ for s in self.getReferencesOld(follow_redirects, withTemplateInclusion, onlyTemplateInclusion, redirectsOnly): yield s return - + params = { 'action': 'query', 'list': [], @@ -1314,13 +1314,13 @@ params['eifilterredir'] = 'redirects' if not self.site().isAllowed('apihighlimits') and config.special_page_limit > 500: params['eilimit'] = 500 - + allDone = False - + while not allDone: if not internal: output(u'Getting references to %s via API...' % self.aslink()) - + datas = query.GetData(params, self.site()) data = datas['query'].values() if len(data) == 2: @@ -1333,7 +1333,7 @@ pg = Page(self.site(), blp['title'], defaultNamespace = blp['ns']) if pg in refPages: continue - + yield pg refPages.add(pg) if follow_redirects and 'redirect' in blp and 'redirlinks' in blp: @@ -1341,7 +1341,7 @@ plk = Page(self.site(), p['title'], defaultNamespace = p['ns']) if plk in refPages: continue - + yield plk refPages.add(plk) if follow_redirects and 'redirect' in p: @@ -1352,7 +1352,7 @@ continue else: continue - + if 'query-continue' in datas: if 'backlinks' in datas['query-continue']: params['blcontinue'] = datas['query-continue']['backlinks']['blcontinue'] @@ -1361,8 +1361,8 @@ params['eicontinue'] = datas['query-continue']['embeddedin']['eicontinue'] else: allDone = True - - + + def getReferencesOld(self, follow_redirects=True, withTemplateInclusion=True, onlyTemplateInclusion=False, redirectsOnly=False): @@ -2542,10 +2542,10 @@ dataQuery = self._versionhistory else: thisHistoryDone = True - + if not thisHistoryDone: dataQuery.extend(self._getVersionHistory(getAll, skip, reverseOrder, revCount)) - + if reverseOrder: # Return only revCount edits, even if the version history is extensive if dataQuery != []: @@ -2562,7 +2562,7 @@ if len(self._versionhistory) > revCount and not getAll: return self._versionhistory[:revCount] return self._versionhistory - + def _getVersionHistory(self, getAll = False, skipFirst = False, reverseOrder = False, revCount=500): """Load history informations by API query. @@ -2588,11 +2588,11 @@ while not thisHistoryDone: if reverseOrder: params['rvdir'] = 'newer' - + result = query.GetData(params, self.site()) if 'error' in result: raise RuntimeError("%s" % result['error']) - + if 'query-continue' in result and getAll: params['rvstartid'] = result['query-continue']['revisions']['rvstartid'] else: @@ -2680,8 +2680,7 @@ thisHistoryDone = True
return dataQ - - + def getVersionHistoryTable(self, forceReload=False, reverseOrder=False, getAll=False, revCount=500): """Return the version history as a wiki table.""" @@ -2771,13 +2770,13 @@ } if movesubpages: params['movesubpages'] = 1 - + if movetalkpage: params['movetalk'] = 1 - + if not leaveRedirect: params['noredirect'] = 1 - + result = query.GetData(params, self.site(), sysop=sysop) if 'error' in result: err = result['error']['code'] @@ -2811,8 +2810,7 @@ output(u'Page %s is moved and no longer exist.' % self.title() ) #delattr(self, '_contents') return True - - + def _moveOld(self, newtitle, reason=None, movetalkpage=True, movesubpages=False, sysop=False, throttle=True, deleteAndMove=False, safe=True, fixredirects=True, leaveRedirect=True):
@@ -2834,7 +2832,7 @@ reason = input(u'Please enter a reason for the move:') if self.isTalkPage(): movetalkpage = False - + host = self.site().hostname() address = self.site().move_address() token = self.site().getToken(sysop = sysop) @@ -2846,40 +2844,40 @@ if deleteAndMove: predata['wpDeleteAndMove'] = self.site().mediawiki_message('delete_and_move_confirm') predata['wpConfirm'] = '1' - + if movetalkpage: predata['wpMovetalk'] = '1' else: predata['wpMovetalk'] = '0' - + if self.site().versionnumber() >= 13: if fixredirects: predata['wpFixRedirects'] = '1' else: predata['wpFixRedirects'] = '0' - + if leaveRedirect: predata['wpLeaveRedirect'] = '1' else: predata['wpLeaveRedirect'] = '0' - + if movesubpages: predata['wpMovesubpages'] = '1' else: predata['wpMovesubpages'] = '0' - + if token: predata['wpEditToken'] = token - + response, data = self.site().postForm(address, predata, sysop = sysop) - + if data == u'' or self.site().mediawiki_message('pagemovedsub') in data: #Move Success if deleteAndMove: output(u'Page %s moved to %s, deleting the existing page' % (self.title(), newtitle)) else: output(u'Page %s moved to %s' % (self.title(), newtitle)) - + if hasattr(self, '_contents'): #self.__init__(self.site(), newtitle, defaultNamespace = self._namespace) try: @@ -2887,7 +2885,7 @@ except NoPage: output(u'Page %s is moved and no longer exist.' % self.title() ) #delattr(self, '_contents') - + return True else: #Move Failure @@ -3041,7 +3039,7 @@ output(u'Loading list of deleted revisions for [[%s]]...' % self.title())
self._deletedRevs = {} - + if config.use_api and self.site().versionnumber() >= 12: params = { 'action': 'query', @@ -3059,17 +3057,17 @@ for x in data['query']['deletedrevs']: if x['title'] != self.title(): continue - + for y in x['revisions']: count += 1 self._deletedRevs[parsetime2stamp(y['timestamp'])] = [y['timestamp'], y['user'], y['comment'] , y['*'], False] - + if 'query-continue' in data and data['query-continue']['deletedrevs']['drcontinue'].split('|')[1] == self.titleWithoutNamespace(): params['drcontinue'] = data['query-continue']['deletedrevs']['drcontinue'] else: break self._deletedRevsModified = False - + else: address = self.site().undelete_view_address(self.urlname()) text = self.site().getUrl(address, sysop = True) @@ -3086,7 +3084,7 @@ ]
self._deletedRevsModified = False - + return self._deletedRevs.keys()
def getDeletedRevision(self, timestamp, retrieveText=False): @@ -3146,7 +3144,7 @@ if ... #decide whether to undelete a revision pg.markDeletedRevision(rev) #mark for undeletion pg.undelete('This will restore only selected revisions.') - + """ # Login self._getActionUser(action = 'undelete', sysop = True) @@ -3173,25 +3171,25 @@ if self._deletedRevs[ts][4]: selected.append(ts) params['timestamps'] = ts, - + result = query.GetData(params, self.site(), sysop=True) if 'error' in result: raise RuntimeError("%s" % result['error']) elif 'undelete' in result: output(u'Page %s undeleted' % self.aslink()) - + return result - + else: address = self.site().undelete_address() - + formdata = { 'target': self.title(), 'wpComment': comment, 'wpEditToken': token, 'restore': self.site().mediawiki_message('undeletebtn') } - + if self._deletedRevs and self._deletedRevsModified: for ts in self._deletedRevs: if self._deletedRevs[ts][4]: @@ -3201,9 +3199,8 @@ #TODO: Check for errors below (have we succeeded? etc): result = self.site().postForm(address,formdata,sysop=True) output(u'Page %s undeleted' % self.aslink()) - + return result -
def protect(self, editcreate = 'sysop', move = 'sysop', unprotect = False, reason = None, editcreate_duration = 'infinite', move_duration = 'infinite', cascading = False, prompt = True, throttle = True): @@ -3257,7 +3254,7 @@ except NotImplementedError: return self._oldProtect( editcreate, move, unprotect, reason, editcreate_duration, move_duration, cascading, prompt, throttle) - + token = self.site().getToken(self, sysop = True)
# Translate 'none' to '' @@ -3275,14 +3272,14 @@
if self.exists(): protections.append("edit=%s" % editcreate) - + protections.append("move=%s" % move) expiry.append(move_duration) else: protections.append("create=%s" % editcreate) - + expiry.append(editcreate_duration) - + params = { 'action': 'protect', 'title': self.title(), @@ -3301,9 +3298,9 @@ output(u"NOTE: The page can't be protected with cascading and not also with only-sysop. Set cascading "off"") else: params['cascade'] = 1 - + result = query.GetData(params, self.site(), sysop=True) - + if 'error' in result: #error occured err = result['error']['code'] output('%s' % result) @@ -3315,7 +3312,7 @@ if result['protect']: output(u'Changed protection level of page %s.' % self.aslink()) return True - + return False
def _oldProtect(self, editcreate = 'sysop', move = 'sysop', unprotect = False, reason = None, editcreate_duration = 'infinite', @@ -3487,11 +3484,11 @@ data = self.getVersionHistory(getAll=True, revCount = limit) else: data = self.getVersionHistory(revCount = limit) - + result = [] for i in data: result.append({'user':i[2],'timestamp':i[1]}) - + return result
class ImagePage(Page): @@ -3568,8 +3565,7 @@ infos.append(info) if limit == 1: break - - + if 'query-continue' in data and limit != 1: params['iistart'] = data['query-continue']['imageinfo']['iistart'] else: @@ -3725,7 +3721,7 @@ 'iulimit': config.special_page_limit, #'': '', } - + while True: data = query.GetData(params, self.site()) if 'error' in data: @@ -3806,13 +3802,13 @@ raise RuntimeError(data['error']) else: break - + self.headerDoneApi(data['query']) if 'normalized' in data['query']: self._norm = dict([(x['from'],x['to']) for x in data['query']['normalized']]) for vals in data['query']['pages'].values(): self.oneDoneApi(vals) - + else: while True: try: @@ -3861,7 +3857,7 @@ for pl in self.pages: if not hasattr(pl,'_contents') and not hasattr(pl,'_getexception'): pl._getexception = NoPage - + def oneDone(self, entry): title = entry.title username = entry.username @@ -3964,7 +3960,7 @@ for id in self.site.family.namespaces: if self.site.family.isDefinedNSLanguage(id, lang) and id not in header.namespaces: output(u"WARNING: Family file %s includes namespace['%s'][%i], but it should be removed (namespace doesn't exist in the site)" % (self.site.family.name, lang, id)) - + def getData(self): address = self.site.export_address() pagenames = [page.sectionFreeTitle() for page in self.pages] @@ -4016,13 +4012,13 @@ for page2 in self.pages: if hasattr(self, '_norm') and page2.sectionFreeTitle() in self._norm: page2._title = self._norm[page2.sectionFreeTitle()] - + if page2.sectionFreeTitle() == page.sectionFreeTitle(): if 'missing' in data: page2._getexception = NoPage successful = True break - + if 'invalid' in data: page2._getexception = BadTitle successful = True @@ -4115,7 +4111,7 @@ for id in self.site.family.namespaces: if self.site.family.isDefinedNSLanguage(id, lang) and u'%i' % id not in header['namespaces']: output(u"WARNING: Family file %s includes namespace['%s'][%i], but it should be removed (namespace doesn't exist in the site)" % (self.site.family.name, lang, id ) ) - + def getDataApi(self): pagenames = [page.sectionFreeTitle() for page in self.pages] params = { @@ -5608,8 +5604,7 @@ #keep anonymous mode if not login and centralauth not enable self._cookies[index] = None self._isLoggedIn[index] = False - - + def _readCookies(self, filename): """read login cookie file and return a dictionary.""" try: @@ -5638,7 +5633,7 @@ # So we need add centralauth username data into siteattribute self._userName[index] = self._cookies[index][self.family.cross_projects_cookie_username]
- + for k, v in datas.iteritems(): #put key and values into save cache if self.family.cross_projects and k in self.family.cross_projects_cookies: @@ -5652,13 +5647,12 @@ f = open(config.datafilepath('login-data', filename), 'w') f.write(cache[0]) f.close() - + filename = '%s-%s-%s-login.data' % (self.family.name, self.lang, self.username(sysop)) f = open(config.datafilepath('login-data', filename), 'w') f.write(cache[1]) f.close() - - + def _removeCookies(self, name): # remove cookies. # ToDo: remove all local datas if cross_projects enable. @@ -5670,7 +5664,7 @@ file = config.datafilepath('login-data', '%s-%s-%s-login.data' % (self.family.name, self.lang, name)) if os.path.exists(file): os.remove(file) - + def updateCookies(self, datas, sysop = False): """Check and update the current cookies datas and save back to files.""" index = self._userIndex(sysop) @@ -5685,8 +5679,7 @@ self._cookies[index][k] = v
self._setupCookies(self._cookies[index], sysop) - - + def urlEncode(self, query): """Encode a query so that it can be sent using an http POST request.""" if not query: @@ -5926,7 +5919,6 @@ Returns the HTML text of the page converted to unicode. """
- if retry is None: retry = config.retry_on_fail
@@ -5939,7 +5931,7 @@ #'Cache-Control': 'max-age=0', #'': '', } - + if not no_hostname and self.cookies(sysop = sysop): headers['Cookie'] = self.cookies(sysop = sysop) if compress: @@ -6302,13 +6294,13 @@ # protection for key in other datatype if type(key) not in [str, unicode]: key = 'general' - + if self._info and key in self._info and not force: if dump: return self._info else: return self._info[key] - + params = { 'action':'query', 'meta':'siteinfo', @@ -6332,7 +6324,7 @@ data = query.GetData(params, self)['query'] except NotImplementedError: return None - + if not hasattr(self, '_info'): self._info = data else: @@ -6351,8 +6343,7 @@ return self._info[key] except KeyError: return None - - + def mediawiki_message(self, key, forceReload = False): """Return the MediaWiki message text for key "key" """ # Allmessages is retrieved once for all per created Site object @@ -6371,7 +6362,7 @@ api = True except NotImplementedError: api = False - + usePHP = False elementtree = True try: @@ -6562,7 +6553,7 @@ } if namespaces: params['srnamespace'] = namespaces - + offset = 0 while True: params['sroffset'] = offset @@ -7071,7 +7062,7 @@ } if redirect: params['rnredirect'] = 1 - + data = query.GetData(params, self) return Page(self, data['query']['random'][0]['title']) else: @@ -8024,7 +8015,7 @@ args may be passed as an argument, thereby overriding sys.argv
""" - global default_code, default_family, verbose + global default_code, default_family, verbose, debug # get commandline arguments if not args: args = sys.argv[1:] @@ -8061,9 +8052,12 @@ elif arg.startswith('-daemonize:'): import daemonize daemonize.daemonize(redirect_std = arg[11:]) - elif arg == "-cosmeticchanges" or arg == "-cc": + elif arg == '-cosmeticchanges' or arg == '-cc': config.cosmetic_changes = not config.cosmetic_changes output(u'NOTE: option cosmetic_changes is %s\n' % config.cosmetic_changes) + # global debug option for development purposes. Normally does nothing. + elif arg == '-debug': + debug = True else: # the argument is not global. Let the specific bot script care # about it. @@ -8082,6 +8076,7 @@ exec "import %s_interface as uiModule" % config.userinterface ui = uiModule.UI() verbose = 0 +debug = False
default_family = config.family default_code = config.mylang
pywikipedia-svn@lists.wikimedia.org