jenkins-bot has submitted this change and it was merged.
Change subject: Get rid of globalvar in interwiki.py ......................................................................
Get rid of globalvar in interwiki.py
Bug: T138077 Change-Id: Iedf444ba8665da1353e35359fd0d77482101fcc8 --- M scripts/interwiki.py 1 file changed, 114 insertions(+), 111 deletions(-)
Approvals: Xqt: Looks good to me, approved jenkins-bot: Verified
diff --git a/scripts/interwiki.py b/scripts/interwiki.py index ca5b44e..56faaf3 100755 --- a/scripts/interwiki.py +++ b/scripts/interwiki.py @@ -337,7 +337,7 @@ # (C) Daniel Herding, 2004 # (C) Yuri Astrakhan, 2005-2006 # (C) xqt, 2009-2014 -# (C) Pywikibot team, 2007-2015 +# (C) Pywikibot team, 2007-2016 # # Distributed under the terms of the MIT license. # @@ -462,13 +462,9 @@ }
-class Global(object): +class InterwikiBotConfig(object):
- """ - Container class for global settings. - - Use of globals outside of this is to be avoided. - """ + """Container class for interwikibot's settings."""
autonomous = False confirm = False @@ -847,14 +843,15 @@ this Object. """
- def __init__(self, originPage=None, hints=None): + def __init__(self, originPage=None, hints=None, conf=None): """ Constructor.
Takes as arguments the Page on the home wiki plus optionally a list of hints for translation """ - if globalvar.contentsondisk: + self.conf = conf + if self.conf.contentsondisk: if originPage: originPage = StoredPage(originPage)
@@ -873,11 +870,11 @@ # This is a list of all pages that are currently scheduled for # download. self.pending = PageTree() - if globalvar.hintsareright: + if self.conf.hintsareright: # This is a set of sites that we got hints to self.hintedsites = set() - self.translate(hints, globalvar.hintsareright) - self.confirm = globalvar.confirm + self.translate(hints, self.conf.hintsareright) + self.confirm = self.conf.confirm self.problemfound = False self.untranslated = None self.hintsAsked = False @@ -934,7 +931,7 @@
def translate(self, hints=None, keephintedsites=False): """Add the given translation hints to the todo list.""" - if globalvar.same and self.originPage: + if self.conf.same and self.originPage: if hints: hints += ['all:'] else: @@ -947,13 +944,13 @@ links = titletranslate.translate( self.originPage, hints=hints, - auto=globalvar.auto, - removebrackets=globalvar.hintnobracket, + auto=self.conf.auto, + removebrackets=self.conf.hintnobracket, site=site)
for link in links: page = pywikibot.Page(link) - if globalvar.contentsondisk: + if self.conf.contentsondisk: page = StoredPage(page) self.todo.add(page) self.foundIn[page] = [None] @@ -1016,7 +1013,7 @@ if self.forcedStop: return False # cannot check backlink before we have an origin page - if globalvar.nobackonly and self.originPage: + if self.conf.nobackonly and self.originPage: if page == self.originPage: try: pywikibot.output(u"%s has a backlink from %s." @@ -1031,7 +1028,7 @@ self.foundIn[page].append(linkingPage) return False else: - if globalvar.contentsondisk: + if self.conf.contentsondisk: page = StoredPage(page) self.foundIn[page] = [linkingPage] self.todo.add(page) @@ -1065,7 +1062,7 @@ crossTo.get('_default', [])) if linkedPage.namespace() in nsmatch: return False - if globalvar.autonomous: + if self.conf.autonomous: pywikibot.output( 'NOTE: Ignoring link from page %s in namespace %i to page ' '%s in namespace %i.' @@ -1122,7 +1119,7 @@
def wiktionaryMismatch(self, page): """Check for ignoring pages.""" - if self.originPage and globalvar.same == 'wiktionary': + if self.originPage and self.conf.same == 'wiktionary': if page.title().lower() != self.originPage.title().lower(): pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode" % (page, self.originPage)) @@ -1152,7 +1149,7 @@ """ if not self.originPage: return (False, None) # any page matches til we have an origin page - if globalvar.autonomous: + if self.conf.autonomous: if self.originPage.isDisambig() and not page.isDisambig(): pywikibot.output( u"NOTE: Ignoring link from disambiguation page %s to " @@ -1214,17 +1211,17 @@
def isIgnored(self, page): """Return True if pages is to be ignored.""" - if page.site.lang in globalvar.neverlink: + if page.site.lang in self.conf.neverlink: pywikibot.output(u"Skipping link %s to an ignored language" % page) return True - if page in globalvar.ignore: + if page in self.conf.ignore: pywikibot.output(u"Skipping link %s to an ignored page" % page) return True return False
def reportInterwikilessPage(self, page): """Report interwikiless page.""" - if not globalvar.quiet: + if not self.conf.quiet: pywikibot.output(u"NOTE: %s does not have any interwiki links" % self.originPage) if config.without_interwiki: @@ -1239,15 +1236,15 @@ if not self.workonme: # Do not ask hints for pages that we don't work on anyway return - if (self.untranslated or globalvar.askhints) and not self.hintsAsked \ + if (self.untranslated or self.conf.askhints) and not self.hintsAsked \ and self.originPage and self.originPage.exists() \ and not self.originPage.isRedirectPage() and \ not self.originPage.isCategoryRedirect(): # Only once! self.hintsAsked = True - if globalvar.untranslated: + if self.conf.untranslated: newhint = None - t = globalvar.showtextlink + t = self.conf.showtextlink if t: pywikibot.output(self.originPage.get()[:t]) # loop @@ -1255,7 +1252,7 @@ newhint = pywikibot.input( u'Give a hint (? to see pagetext):') if newhint == '?': - t += globalvar.showtextlinkadd + t += self.conf.showtextlinkadd pywikibot.output(self.originPage.get()[:t]) elif newhint and ':' not in newhint: pywikibot.output( @@ -1268,12 +1265,12 @@ links = titletranslate.translate( self.originPage, hints=[newhint], - auto=globalvar.auto, - removebrackets=globalvar.hintnobracket) + auto=self.conf.auto, + removebrackets=self.conf.hintnobracket) for link in links: page = pywikibot.Page(link) self.addIfNew(page, counter, None) - if globalvar.hintsareright: + if self.conf.hintsareright: self.hintedsites.add(page.site)
def batchLoaded(self, counter): @@ -1295,7 +1292,7 @@ self.done.add(page)
# make sure that none of the linked items is an auto item - if globalvar.skipauto: + if self.conf.skipauto: dictName, year = page.autoFormat() if dictName is not None: if self.originPage: @@ -1306,7 +1303,7 @@ page.site.lang, page, dictName, year))
# Abort processing if the bot is running in autonomous mode. - if globalvar.autonomous: + if self.conf.autonomous: self.makeForcedStop(counter)
# Register this fact at the todo-counter. @@ -1316,8 +1313,8 @@ # todo list.
if not page.exists(): - globalvar.remove.append(unicode(page)) - if not globalvar.quiet: + self.conf.remove.append(unicode(page)) + if not self.conf.quiet: pywikibot.output(u"NOTE: %s does not exist. Skipping." % page) if page == self.originPage: @@ -1338,14 +1335,14 @@ else: redirectTargetPage = page.getCategoryRedirectTarget() redir = 'category ' - if not globalvar.quiet: + if not self.conf.quiet: pywikibot.output(u"NOTE: %s is %sredirect to %s" % (page, redir, redirectTargetPage)) if self.originPage is None or page == self.originPage: # the 1st existig page becomes the origin page, if none was # supplied - if globalvar.initialredirect: - if globalvar.contentsondisk: + if self.conf.initialredirect: + if self.conf.contentsondisk: redirectTargetPage = StoredPage(redirectTargetPage) # don't follow another redirect; it might be a self loop if not redirectTargetPage.isRedirectPage() \ @@ -1360,12 +1357,12 @@ for site, count in self.todo.siteCounts(): counter.minus(site, count) self.todo = PageTree() - elif not globalvar.followredirect: - if not globalvar.quiet: + elif not self.conf.followredirect: + if not self.conf.quiet: pywikibot.output(u"NOTE: not following %sredirects." % redir) elif page.isStaticRedirect(): - if not globalvar.quiet: + if not self.conf.quiet: pywikibot.output( u"NOTE: not following static %sredirects." % redir) elif (page.site.family == redirectTargetPage.site.family and @@ -1380,8 +1377,8 @@ # must be behind the page.isRedirectPage() part # otherwise a redirect error would be raised elif page_empty_check(page): - globalvar.remove.append(unicode(page)) - if not globalvar.quiet: + self.conf.remove.append(unicode(page)) + if not self.conf.quiet: pywikibot.output(u"NOTE: %s is empty. Skipping." % page) if page == self.originPage: for site, count in self.todo.siteCounts(): @@ -1392,7 +1389,7 @@ continue
elif page.section(): - if not globalvar.quiet: + if not self.conf.quiet: pywikibot.output(u"NOTE: %s is a page section. Skipping." % page) continue @@ -1405,7 +1402,7 @@ try: iw = page.langlinks() except pywikibot.UnknownSite: - if not globalvar.quiet: + if not self.conf.quiet: pywikibot.output(u"NOTE: site %s does not exist." % page.site) continue @@ -1429,19 +1426,19 @@
if self.originPage == page: self.untranslated = (len(iw) == 0) - if globalvar.untranslatedonly: + if self.conf.untranslatedonly: # Ignore the interwiki links. iw = () - if globalvar.lacklanguage: - if globalvar.lacklanguage in [link.site.lang + if self.conf.lacklanguage: + if self.conf.lacklanguage in [link.site.lang for link in iw]: iw = () self.workonme = False - if len(iw) < globalvar.minlinks: + if len(iw) < self.conf.minlinks: iw = () self.workonme = False
- elif globalvar.autonomous and duplicate and not skip: + elif self.conf.autonomous and duplicate and not skip: pywikibot.output(u"Stopping work on %s because duplicate pages" " %s and %s are found" % (self.originPage, duplicate, page)) @@ -1473,14 +1470,14 @@
for link in iw: linkedPage = pywikibot.Page(link) - if globalvar.hintsareright: + if self.conf.hintsareright: if linkedPage.site in self.hintedsites: pywikibot.output( 'NOTE: %s: %s extra interwiki on hinted site ignored %s' % (self.originPage, page, linkedPage)) break if not self.skipPage(page, linkedPage, counter): - if globalvar.followinterwiki or page == self.originPage: + if self.conf.followinterwiki or page == self.originPage: if self.addIfNew(linkedPage, counter, page): # It is new. Also verify whether it is the second on the # same site @@ -1556,7 +1553,7 @@ errorCount += 1 self.problem(u"Found more than one link for %s" % site)
- if not errorCount and not globalvar.select: + if not errorCount and not self.conf.select: # no errors, so all lists have only one item for site, pages in new.items(): result[site] = pages[0] @@ -1569,7 +1566,7 @@
# We don't need to continue with the rest if we're in autonomous # mode. - if globalvar.autonomous: + if self.conf.autonomous: return None
# First loop over the ones that have more solutions @@ -1646,7 +1643,7 @@ return else: return - if not self.untranslated and globalvar.untranslatedonly: + if not self.untranslated and self.conf.untranslatedonly: return if self.forcedStop: # autonomous with problem pywikibot.output(u"======Aborted processing %s======" @@ -1679,8 +1676,8 @@ updatedSites = [] notUpdatedSites = [] # Process all languages here - globalvar.always = False - if globalvar.limittwo: + self.conf.always = False + if self.conf.limittwo: lclSite = self.originPage.site lclSiteDone = False frgnSiteDone = False @@ -1702,7 +1699,7 @@ notUpdatedSites.append(site) except GiveUpOnPage: break - elif (not globalvar.strictlimittwo and + elif (not self.conf.strictlimittwo and site in new and site != lclSite): old = {} @@ -1715,12 +1712,12 @@ % new[site]) continue mods, mcomment, adding, removing, modifying \ - = compareLanguages(old, new, insite=lclSite) - if ((len(removing) > 0 and not globalvar.autonomous) or + = compareLanguages(old, new, lclSite, self.conf.summary) + if ((len(removing) > 0 and not self.conf.autonomous) or (len(modifying) > 0 and self.problemfound) or (len(old) == 0) or - (globalvar.needlimit and - len(adding) + len(modifying) >= globalvar.needlimit + 1)): + (self.conf.needlimit and + len(adding) + len(modifying) >= self.conf.needlimit + 1)): try: if self.replaceLinks(new[site], new): updatedSites.append(site) @@ -1768,7 +1765,7 @@ after a KeyboardInterrupt for example is redundant, because the whole storage file will be eventually removed. """ - if globalvar.contentsondisk: + if self.conf.contentsondisk: for page in self.foundIn: # foundIn can contain either Page or StoredPage objects # calling the destructor on _contents will delete the @@ -1778,7 +1775,7 @@
def replaceLinks(self, page, newPages): """Return True if saving was successful.""" - if globalvar.localonly: + if self.conf.localonly: # In this case only continue on the Page we started with if page != self.originPage: raise SaveError(u'-localonly and page != originPage') @@ -1852,13 +1849,16 @@ old[page2.site] = page2
# Check what needs to get done - mods, mcomment, adding, removing, modifying = compareLanguages(old, - new, - insite=page.site) + mods, mcomment, adding, removing, modifying = compareLanguages( + old, + new, + page.site, + self.conf.summary + )
# When running in autonomous mode without -force switch, make sure we # don't remove any items, but allow addition of the new ones - if globalvar.autonomous and not globalvar.force and len(removing) > 0: + if self.conf.autonomous and not self.conf.force and len(removing) > 0: for rmsite in removing: # Sometimes sites have an erroneous link to itself as an # interwiki @@ -1867,8 +1867,8 @@ rmPage = old[rmsite] # put it to new means don't delete it if ( - not globalvar.cleanup or - unicode(rmPage) not in globalvar.remove + not self.conf.cleanup or + unicode(rmPage) not in self.conf.remove ): new[rmsite] = rmPage pywikibot.output( @@ -1876,11 +1876,14 @@ 'disambiguation state.' % rmPage) # Re-Check what needs to get done - mods, mcomment, adding, removing, modifying = compareLanguages(old, - new, - insite=page.site) + mods, mcomment, adding, removing, modifying = compareLanguages( + old, + new, + page.site, + self.conf.summary + ) if not mods: - if not globalvar.quiet: + if not self.conf.quiet: pywikibot.output(u'No changes needed on page %s' % page) return False
@@ -1919,13 +1922,13 @@ % (", ".join([x.code for x in removing]), page), createneed=False) ask = True - if globalvar.force or globalvar.cleanup: + if self.conf.force or self.conf.cleanup: ask = False - if globalvar.confirm and not globalvar.always: + if self.conf.confirm and not self.conf.always: ask = True # If we need to ask, do so if ask: - if globalvar.autonomous: + if self.conf.autonomous: # If we cannot ask, deny permission answer = 'n' else: @@ -1940,20 +1943,20 @@ return True elif answer == 'a': # don't ask for the rest of this subject - globalvar.always = True + self.conf.always = True answer = 'y' else: # If we do not need to ask, allow answer = 'y' # If we got permission to submit, do so if answer == 'y': - if not globalvar.quiet: + if not self.conf.quiet: pywikibot.output(u"NOTE: Updating live wiki...") timeout = 60 page.text = newtext while True: try: - page.save(summary=mcomment, async=globalvar.async, + page.save(summary=mcomment, async=self.conf.async, nocreate=True) except pywikibot.NoCreateError: pywikibot.exception() @@ -2062,7 +2065,7 @@ It controls which pages are queried from which languages when. """
- def __init__(self): + def __init__(self, conf=None): """Constructor.""" self.subjects = [] # We count how many pages still need to be loaded per site. @@ -2073,10 +2076,11 @@ self.counts = {} self.pageGenerator = None self.generated = 0 + self.conf = conf
def add(self, page, hints=None): """Add a single subject to the list.""" - subj = Subject(page, hints=hints) + subj = Subject(page, hints=hints, conf=self.conf) self.subjects.append(subj) for site, count in subj.openSites(): # Keep correct counters @@ -2119,7 +2123,7 @@ PageGenerator """ fs = self.firstSubject() - if fs and (not globalvar.quiet): + if fs and (not self.conf.quiet): pywikibot.output(u"NOTE: The first unfinished subject is %s" % fs.originPage) pywikibot.output(u"NOTE: Number of pages queued is %d, trying to add %d more." @@ -2132,16 +2136,16 @@ except IOError: pywikibot.output(u'IOError occurred; skipping') continue - if page in globalvar.skip: + if page in self.conf.skip: pywikibot.output(u'Skipping: %s is in the skip list' % page) continue - if globalvar.skipauto: + if self.conf.skipauto: dictName, year = page.autoFormat() if dictName is not None: pywikibot.output('Skipping: %s is an auto entry %s(%s)' % (page, dictName, year)) continue - if globalvar.parenthesesonly: + if self.conf.parenthesesonly: # Only yield pages that have ( ) in titles if "(" not in page.title(): continue @@ -2170,7 +2174,7 @@ until = first_upper(until) if page.title(withNamespace=False) > until: raise StopIteration - self.add(page, hints=globalvar.hints) + self.add(page, hints=self.conf.hints) self.generated += 1 if self.generateNumber: if self.generated >= self.generateNumber: @@ -2217,13 +2221,13 @@ # Do we still have enough subjects to work on for which the # home language has been retrieved? This is rough, because # some subjects may need to retrieve a second home-language page! - if len(self.subjects) - mycount < globalvar.minsubjects: + if len(self.subjects) - mycount < self.conf.minsubjects: # Can we make more home-language queries by adding subjects? - if self.pageGenerator and mycount < globalvar.maxquerysize: + if self.pageGenerator and mycount < self.conf.maxquerysize: timeout = 60 while timeout < 3600: try: - self.generateMore(globalvar.maxquerysize - mycount) + self.generateMore(self.conf.maxquerysize - mycount) except pywikibot.ServerError: # Could not extract allpages special page? pywikibot.output( @@ -2235,7 +2239,7 @@ else: break # If we have a few, getting the home language is a good thing. - if not globalvar.restoreAll: + if not self.conf.restoreAll: try: if self.counts[pywikibot.Site()] > 4: return pywikibot.Site() @@ -2267,7 +2271,7 @@ if pages: pageGroup.extend(pages) subjectGroup.append(subject) - if len(pageGroup) >= globalvar.maxquerysize: + if len(pageGroup) >= self.conf.maxquerysize: # We have found enough pages to fill the bandwidth. break if len(pageGroup) == 0: @@ -2319,7 +2323,7 @@ return len(self.subjects)
-def compareLanguages(old, new, insite): +def compareLanguages(old, new, insite, summary): """Compare changes and setup i18n message.""" oldiw = set(old) newiw = set(new) @@ -2329,7 +2333,7 @@ removing = sorted(oldiw - newiw) modifying = sorted(site for site in oldiw & newiw if old[site] != new[site])
- if not globalvar.summary and \ + if not summary and \ len(adding) + len(removing) + len(modifying) <= 3: # Use an extended format for the string linking to all added pages. fmt = lambda d, site: unicode(d[site]) # flake8: disable=E731 @@ -2353,7 +2357,7 @@ useFrom = False
if adding or removing or modifying: - mcomment += globalvar.summary + mcomment += summary comma = insite.mediawiki_message('comma-separator')
changes = {'adding': comma.join(fmt(new, x) for x in adding), @@ -2464,8 +2468,9 @@ local_args = pywikibot.handle_args(args) genFactory = pagegenerators.GeneratorFactory()
+ iwconf = InterwikiBotConfig() for arg in local_args: - if globalvar.readOptions(arg): + if iwconf.readOptions(arg): continue elif arg.startswith('-warnfile:'): warnfile = arg[10:] @@ -2477,7 +2482,7 @@ else: startyear = 1 # avoid problems where year pages link to centuries etc. - globalvar.followredirect = False + iwconf.followredirect = False hintlessPageGen = pagegenerators.YearPageGenerator(startyear) elif arg.startswith('-days'): if len(arg) > 6 and arg[5] == ':' and arg[6:].isdigit(): @@ -2494,8 +2499,8 @@ else: newPages = 100 elif arg.startswith('-restore'): - globalvar.restoreAll = arg[9:].lower() == 'all' - optRestore = not globalvar.restoreAll + iwconf.restoreAll = arg[9:].lower() == 'all' + optRestore = not iwconf.restoreAll elif arg == '-continue': optContinue = True elif arg == '-hintsonly': @@ -2516,15 +2521,15 @@ singlePageTitle = arg
# Do not use additional summary with autonomous mode - if globalvar.autonomous: - globalvar.summary = u'' - elif globalvar.summary: - globalvar.summary += u'; ' + if iwconf.autonomous: + iwconf.summary = '' + elif iwconf.summary: + iwconf.summary += '; '
site = pywikibot.Site() # ensure that we don't try to change main page mainpagename = site.siteinfo['mainpage'] - globalvar.skip.add(pywikibot.Page(site, mainpagename)) + iwconf.skip.add(pywikibot.Page(site, mainpagename))
if newPages is not None: if len(namespaces) == 0: @@ -2543,7 +2548,7 @@ hintlessPageGen = pagegenerators.NewpagesPageGenerator(total=newPages, namespaces=ns)
- elif optRestore or optContinue or globalvar.restoreAll: + elif optRestore or optContinue or iwconf.restoreAll: dumpFileName = pywikibot.config.datafilepath( 'data', 'interwiki-dumps', @@ -2572,7 +2577,7 @@ [hintlessPageGen, gen2]) restoredFiles.append(dumpFileName)
- bot = InterwikiBot() + bot = InterwikiBot(iwconf)
if not hintlessPageGen: hintlessPageGen = genFactory.getCombinedGenerator() @@ -2592,10 +2597,10 @@ singlePage = pywikibot.Page(pywikibot.Site(), singlePageTitle) else: singlePage = None - bot.add(singlePage, hints=globalvar.hints) + bot.add(singlePage, hints=iwconf.hints)
try: - append = not (optRestore or optContinue or globalvar.restoreAll) + append = not (optRestore or optContinue or iwconf.restoreAll) bot.run() except KeyboardInterrupt: dumpFileName = bot.dump(append) @@ -2603,7 +2608,7 @@ dumpFileName = bot.dump(append) raise finally: - if globalvar.contentsondisk: + if iwconf.contentsondisk: StoredPage.SPdeleteStore() if dumpFileName: try: @@ -2617,8 +2622,6 @@ except OSError: pass
-# =========== -globalvar = Global()
if __name__ == "__main__": main()
pywikibot-commits@lists.wikimedia.org