jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/462527 )
Change subject: [cleanup] cleanup scripts/interwiki.py ......................................................................
[cleanup] cleanup scripts/interwiki.py
- use single quotes for string literals and double quotes **only** if they consist of single quotes within them - use str.format(...) instead of modulo for type specifier arguments - remove preleading "u" from strings - use "+" to concatenate strings in some cases
Change-Id: Ie3caa13d11b92a89704d9310249cb0bb2850024d --- M scripts/interwiki.py 1 file changed, 177 insertions(+), 169 deletions(-)
Approvals: Xqt: Looks good to me, approved jenkins-bot: Verified
diff --git a/scripts/interwiki.py b/scripts/interwiki.py index 8d7a68c..79aaa9e 100755 --- a/scripts/interwiki.py +++ b/scripts/interwiki.py @@ -398,18 +398,18 @@ # A list of template names in different languages. # Pages which contain these shouldn't be changed. ignoreTemplates = { - '_default': [u'delete'], - 'ar': [u'قيد الاستخدام'], - 'cs': [u'Pracuje_se'], - 'de': [u'inuse', 'in use', u'in bearbeitung', u'inbearbeitung', - u'löschen', u'sla', - u'löschantrag', u'löschantragstext', - u'falschschreibung', - u'obsolete schreibung', 'veraltete schreibweise'], - 'en': [u'inuse', u'softredirect'], - 'fa': [u'در دست ویرایش ۲', u'حذف سریع'], - 'pdc': [u'lösche'], - 'zh': [u'inuse'], + '_default': ['delete'], + 'ar': ['قيد الاستخدام'], + 'cs': ['Pracuje_se'], + 'de': ['inuse', 'in use', 'in bearbeitung', 'inbearbeitung', + 'löschen', 'sla', + 'löschantrag', 'löschantragstext', + 'falschschreibung', + 'obsolete schreibung', 'veraltete schreibweise'], + 'en': ['inuse', 'softredirect'], + 'fa': ['در دست ویرایش ۲', 'حذف سریع'], + 'pdc': ['lösche'], + 'zh': ['inuse'], }
@@ -456,7 +456,7 @@ quiet = False restoreAll = False asynchronous = False - summary = u'' + summary = '' repository = False
def note(self, text): @@ -914,8 +914,8 @@ # Bug-check: Isn't there any work still in progress? We can't work on # different sites at a time! if len(self.pending) > 0: - raise "BUG: Can't start to work on %s; still working on %s" \ - % (site, self.pending) + raise "BUG: Can't start to work on {}; still working on {}".format( + site, self.pending) # Prepare a list of suitable pages result = [] for page in self.todo.filter(site): @@ -951,10 +951,10 @@ if self.conf.nobackonly and self.originPage: if page == self.originPage: try: - pywikibot.output(u"%s has a backlink from %s." - % (page, linkingPage)) + pywikibot.output('{} has a backlink from {}.' + .format(page, linkingPage)) except UnicodeDecodeError: - pywikibot.output(u"Found a backlink for a page.") + pywikibot.output('Found a backlink for a page.') self.makeForcedStop(counter) return False
@@ -999,10 +999,10 @@ return False if self.conf.autonomous: pywikibot.output( - 'NOTE: Ignoring link from page %s in namespace %i to page ' - '%s in namespace %i.' - % (linkingPage, linkingPage.namespace(), linkedPage, - linkedPage.namespace())) + 'NOTE: Ignoring link from page {} in namespace' + ' {} to page {} in namespace {}.' + .format(linkingPage, linkingPage.namespace(), linkedPage, + linkedPage.namespace())) # Fill up foundIn, so that we will not write this notice self.foundIn[linkedPage] = [linkingPage] return True @@ -1011,18 +1011,19 @@ linkedPage.site) if preferredPage: pywikibot.output( - 'NOTE: Ignoring link from page %s in namespace %i to ' - 'page %s in namespace %i because page %s in the ' + 'NOTE: Ignoring link from page {} in namespace {} to ' + 'page {} in namespace {} because page {} in the ' 'correct namespace has already been found.' - % (linkingPage, linkingPage.namespace(), linkedPage, - linkedPage.namespace(), preferredPage)) + .format(linkingPage, linkingPage.namespace(), + linkedPage, linkedPage.namespace(), + preferredPage)) return True else: choice = pywikibot.input_choice( - 'WARNING: %s is in namespace %i, but %s is in ' - 'namespace %i. Follow it anyway?' - % (self.originPage, self.originPage.namespace(), - linkedPage, linkedPage.namespace()), + 'WARNING: {} is in namespace {}, but {} is in ' + 'namespace {}. Follow it anyway?' + .format(self.originPage, self.originPage.namespace(), + linkedPage, linkedPage.namespace()), [('Yes', 'y'), ('No', 'n'), ('Add an alternative', 'a'), ('give up', 'g')], automatic_quit=False) @@ -1033,9 +1034,9 @@ self.makeForcedStop(counter) elif choice == 'a': newHint = pywikibot.input( - u'Give the alternative for language %s, not ' - u'using a language code:' - % linkedPage.site.lang) + 'Give the alternative for language {}, not ' + 'using a language code:' + .format(linkedPage.site.lang)) if newHint: alternativePage = pywikibot.Page( linkedPage.site, newHint) @@ -1045,8 +1046,8 @@ None) else: pywikibot.output( - u"NOTE: ignoring %s and its interwiki links" - % linkedPage) + 'NOTE: ignoring {} and its interwiki links' + .format(linkedPage)) return True else: # same namespaces, no problem @@ -1057,16 +1058,16 @@ """Check for ignoring pages.""" if self.originPage and self.conf.same == 'wiktionary': if page.title().lower() != self.originPage.title().lower(): - pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode" - % (page, self.originPage)) + pywikibot.output('NOTE: Ignoring {} for {} in wiktionary mode' + .format(page, self.originPage)) return True elif (page.title() != self.originPage.title() and self.originPage.namespace().case == 'case-sensitive' and page.namespace().case == 'case-sensitive'): pywikibot.output( - 'NOTE: Ignoring %s for %s in wiktionary mode because both ' - u"languages are uncapitalized." - % (page, self.originPage)) + 'NOTE: Ignoring {} for {} in wiktionary mode because both ' + 'languages are uncapitalized.' + .format(page, self.originPage)) return True return False
@@ -1088,13 +1089,13 @@ if self.conf.autonomous: if self.originPage.isDisambig() and not page.isDisambig(): pywikibot.output( - u"NOTE: Ignoring link from disambiguation page %s to " - u"non-disambiguation %s" % (self.originPage, page)) + 'NOTE: Ignoring link from disambiguation page {} to ' + 'non-disambiguation {}'.format(self.originPage, page)) return (True, None) elif not self.originPage.isDisambig() and page.isDisambig(): pywikibot.output( - u"NOTE: Ignoring link from non-disambiguation page %s to " - u"disambiguation %s" % (self.originPage, page)) + 'NOTE: Ignoring link from non-disambiguation page {} to ' + 'disambiguation {}'.format(self.originPage, page)) return (True, None) else: choice = 'y' @@ -1102,16 +1103,16 @@ disambig = self.getFoundDisambig(page.site) if disambig: pywikibot.output( - u"NOTE: Ignoring non-disambiguation page %s for %s " - u"because disambiguation page %s has already been " - u"found." - % (page, self.originPage, disambig)) + 'NOTE: Ignoring non-disambiguation page {} for {} ' + 'because disambiguation page {} has already been ' + 'found.' + .format(page, self.originPage, disambig)) return (True, None) else: choice = pywikibot.input_choice( - "WARNING: %s is a disambiguation page, but %s doesn't " - u"seem to be one. Follow it anyway?" - % (self.originPage, page), + "WARNING: {} is a disambiguation page, but {} doesn't " + 'seem to be one. Follow it anyway?' + .format(self.originPage, page), [('Yes', 'y'), ('No', 'n'), ('Add an alternative', 'a'), ('give up', 'g')], automatic_quit=False) @@ -1119,15 +1120,15 @@ nondisambig = self.getFoundNonDisambig(page.site) if nondisambig: pywikibot.output( - 'NOTE: Ignoring disambiguation page %s for %s because ' - u"non-disambiguation page %s has already been found." - % (page, self.originPage, nondisambig)) + 'NOTE: Ignoring disambiguation page {} for {} because ' + 'non-disambiguation page {} has already been found.' + .format(page, self.originPage, nondisambig)) return (True, None) else: choice = pywikibot.input_choice( - u'WARNING: %s doesn't seem to be a disambiguation ' - u'page, but %s is one. Follow it anyway?' - % (self.originPage, page), + "WARNING: {} doesn't seem to be a disambiguation " + 'page, but {} is one. Follow it anyway?' + .format(self.originPage, page), [('Yes', 'y'), ('No', 'n'), ('Add an alternative', 'a'), ('give up', 'g')], automatic_quit=False) @@ -1135,8 +1136,8 @@ return (True, None) elif choice == 'a': newHint = pywikibot.input( - u'Give the alternative for language %s, not using a ' - u'language code:' % page.site.lang) + 'Give the alternative for language {}, not using a ' + 'language code:'.format(page.site.lang)) alternativePage = pywikibot.Page(page.site, newHint) return (True, alternativePage) elif choice == 'g': @@ -1148,10 +1149,12 @@ def isIgnored(self, page): """Return True if pages is to be ignored.""" if page.site.lang in self.conf.neverlink: - pywikibot.output(u"Skipping link %s to an ignored language" % page) + pywikibot.output('Skipping link {} to an ignored language' + .format(page)) return True if page in self.conf.ignore: - pywikibot.output(u"Skipping link %s to an ignored page" % page) + pywikibot.output('Skipping link {} to an ignored page' + .format(page)) return True return False
@@ -1184,15 +1187,15 @@ # loop while True: newhint = pywikibot.input( - u'Give a hint (? to see pagetext):') + 'Give a hint (? to see pagetext):') if newhint == '?': t += self.conf.showtextlinkadd pywikibot.output(self.originPage.get()[:t]) elif newhint and ':' not in newhint: pywikibot.output( - u'Please enter a hint in the format ' - u'language:pagename or type nothing if you do not ' - u'have a hint.') + 'Please enter a hint in the format ' + 'language:pagename or type nothing if you do not ' + 'have a hint.') elif not newhint: break else: @@ -1231,10 +1234,10 @@ if dictName is not None: if self.originPage: pywikibot.warning( - '%s:%s relates to %s:%s, which is an ' - u'auto entry %s(%s)' - % (self.originPage.site.lang, self.originPage, - page.site.lang, page, dictName, year)) + '{}:{} relates to {}:{}, which is an ' + 'auto entry {}({})' + .format(self.originPage.site.lang, self.originPage, + page.site.lang, page, dictName, year))
# Abort processing if the bot is running in autonomous mode if self.conf.autonomous: @@ -1298,9 +1301,10 @@ not self.skipPage(page, redirectTargetPage, counter)): if self.addIfNew(redirectTargetPage, counter, page): if config.interwiki_shownew: - pywikibot.output(u"%s: %s gives new %sredirect %s" - % (self.originPage, page, redir, - redirectTargetPage)) + pywikibot.output('{}: {} gives new {}redirect {}' + .format(self.originPage, + page, redir, + redirectTargetPage)) continue
# must be behind the page.isRedirectPage() part @@ -1333,8 +1337,8 @@
(skip, alternativePage) = self.disambigMismatch(page, counter) if skip: - pywikibot.output(u"NOTE: ignoring %s and its interwiki links" - % page) + pywikibot.output('NOTE: ignoring {} and its interwiki links' + .format(page)) self.done.remove(page) iw = () if alternativePage: @@ -1363,9 +1367,9 @@ self.workonme = False
elif self.conf.autonomous and duplicate and not skip: - pywikibot.output('Stopping work on %s because duplicate pages' - " %s and %s are found" - % (self.originPage, duplicate, page)) + pywikibot.output('Stopping work on {} because duplicate pages' + ' {} and {} are found' + .format(self.originPage, duplicate, page)) self.makeForcedStop(counter) try: with codecs.open( @@ -1397,9 +1401,9 @@ if self.conf.hintsareright: if linkedPage.site in self.hintedsites: pywikibot.output( - 'NOTE: %s: %s extra interwiki on hinted site ' - 'ignored %s' - % (self.originPage, page, linkedPage)) + 'NOTE: {}: {} extra interwiki on hinted site ' + 'ignored {}' + .format(self.originPage, page, linkedPage)) break if not self.skipPage(page, linkedPage, counter): if self.conf.followinterwiki or page == self.originPage: @@ -1414,9 +1418,10 @@ # either may be a redirect to the other. # No way to find out quickly! pywikibot.output( - 'NOTE: %s: %s gives duplicate ' - 'interwiki on same site %s' - % (self.originPage, page, linkedPage)) + 'NOTE: {}: {} gives duplicate ' + 'interwiki on same site {}' + .format(self.originPage, page, + linkedPage)) break else: if config.interwiki_shownew: @@ -1439,7 +1444,7 @@
def problem(self, txt, createneed=True): """Report a problem with the resolution of this subject.""" - pywikibot.output(u"ERROR: %s" % txt) + pywikibot.output('ERROR: ' + txt) self.confirm = True if createneed: self.problemfound = True @@ -1448,9 +1453,9 @@ """Report found interlanguage links with conflicts.""" for page2 in sorted(self.foundIn[page]): if page2 is None: - pywikibot.output(u" " * indent + "Given as a hint.") + pywikibot.output(' ' * indent + 'Given as a hint.') else: - pywikibot.output(u" " * indent + unicode(page2)) + pywikibot.output(' ' * indent + unicode(page2))
def assemble(self): """Assemble language links.""" @@ -1470,7 +1475,7 @@ continue if site == self.originPage.site: if page != self.originPage: - self.problem(u"Found link to %s" % page) + self.problem('Found link to ' + page) self.whereReport(page) errorCount += 1 else: @@ -1483,7 +1488,7 @@ for site, pages in new.items(): if len(pages) > 1: errorCount += 1 - self.problem(u"Found more than one link for %s" % site) + self.problem('Found more than one link for ' + site)
if not errorCount and not self.conf.select: # no errors, so all lists have only one item @@ -1504,11 +1509,11 @@ # First loop over the ones that have more solutions for site, pages in new.items(): if len(pages) > 1: - pywikibot.output(u"=" * 30) - pywikibot.output(u"Links to %s" % site) + pywikibot.output('=' * 30) + pywikibot.output('Links to ' + site) for i, page2 in enumerate(pages, 1): - pywikibot.output(u" (%d) Found link to %s in:" - % (i, page2)) + pywikibot.output(' ({}) Found link to {} in:' + .format(i, page2)) self.whereReport(page2, indent=8)
# TODO: allow answer to repeat previous or go back after a @@ -1529,9 +1534,9 @@ for site, pages in new.items(): if len(pages) == 1: if not acceptall: - pywikibot.output(u"=" * 30) + pywikibot.output('=' * 30) page2 = pages[0] - pywikibot.output(u"Found link to %s in:" % page2) + pywikibot.output('Found link to {} in:'.format(page2)) self.whereReport(page2, indent=4) while True: if acceptall: @@ -1540,7 +1545,7 @@ # TODO: allow answer to repeat previous or go back # after a mistake answer = pywikibot.input_choice( - u'What should be done?', + 'What should be done?', [('accept', 'a'), ('reject', 'r'), ('give up', 'g'), ('accept all', 'l')], 'a', automatic_quit=False) @@ -1565,7 +1570,7 @@
""" if not self.isDone(): - raise Exception("Bugcheck: finish called before done") + raise Exception('Bugcheck: finish called before done') if not self.workonme: return if self.originPage: @@ -1578,8 +1583,8 @@ if not self.untranslated and self.conf.untranslatedonly: return if self.forcedStop: # autonomous with problem - pywikibot.output(u"======Aborted processing %s======" - % self.originPage) + pywikibot.output('======Aborted processing {}======' + .format(self.originPage)) return # The following check is not always correct and thus disabled. # self.done might contain no interwiki links because of the -neverlink @@ -1587,12 +1592,13 @@ # if len(self.done) == 1: # # No interwiki at all # return - pywikibot.output(u"======Post-processing %s======" % self.originPage) + pywikibot.output('======Post-processing {}======' + .format(self.originPage)) # Assemble list of accepted interwiki links new = self.assemble() if new is None: # User said give up - pywikibot.output(u"======Aborted processing %s======" - % self.originPage) + pywikibot.output('======Aborted processing {}======' + .format(self.originPage)) return
# Make sure new contains every page link, including the page we are @@ -1641,8 +1647,8 @@ page = pywikibot.Page(link) old[page.site] = page except pywikibot.NoPage: - pywikibot.output(u"BUG>>> %s no longer exists?" - % new[site]) + pywikibot.output('BUG>>> {} no longer exists?' + .format(new[site])) continue mods, mcomment, adding, removing, modifying \ = compareLanguages(old, new, lclSite, @@ -1713,20 +1719,21 @@ if self.conf.localonly: # In this case only continue on the Page we started with if page != self.originPage: - raise SaveError(u'-localonly and page != originPage') + raise SaveError('-localonly and page != originPage') if page.section(): # This is not a page, but a subpage. Do not edit it. - pywikibot.output('Not editing %s: not doing interwiki on subpages' - % page) - raise SaveError(u'Link has a #section') + pywikibot.output('Not editing {}: not doing interwiki on subpages' + .format(page)) + raise SaveError('Link has a #section') try: pagetext = page.get() except pywikibot.NoPage: - pywikibot.output(u"Not editing %s: page does not exist" % page) - raise SaveError(u'Page doesn't exist') + pywikibot.output('Not editing {}: page does not exist' + .format(page)) + raise SaveError("Page doesn't exist") if page_empty_check(page): - pywikibot.output(u"Not editing %s: page is empty" % page) - raise SaveError(u'Page is empty.') + pywikibot.output('Not editing {}: page is empty'.format(page)) + raise SaveError('Page is empty.')
# clone original newPages dictionary, so that we can modify it to the # local page's needs @@ -1741,7 +1748,7 @@ (ignorepage.site != page.site): if (ignorepage not in interwikis): pywikibot.output( - u"Ignoring link to %(to)s for %(from)s" + 'Ignoring link to %(to)s for %(from)s' % {'to': ignorepage, 'from': page}) new.pop(ignorepage.site) @@ -1762,13 +1769,13 @@ # site. pltmp = new[page.site] if pltmp != page: - s = u"None" + s = 'None' if pltmp is not None: s = pltmp pywikibot.output( - u"BUG>>> %s is not in the list of new links! Found %s." - % (page, s)) - raise SaveError(u'BUG: sanity check failed') + 'BUG>>> {} is not in the list of new links! Found {}.' + .format(page, s)) + raise SaveError('BUG: sanity check failed')
# Avoid adding an iw link back to itself del new[page.site] @@ -1808,9 +1815,8 @@ ): new[rmsite] = rmPage pywikibot.warning( - '%s is either deleted or has a mismatching ' - 'disambiguation state.' - % rmPage) + '{} is either deleted or has a mismatching ' + 'disambiguation state.'.format(rmPage)) # Re-Check what needs to get done mods, mcomment, adding, removing, modifying = compareLanguages( old, @@ -1825,7 +1831,7 @@ # Show a message in purple. pywikibot.output(color_format( '{lightpurple}Updating links on page {0}.{default}', page)) - pywikibot.output(u"Changes to be made: %s" % mods) + pywikibot.output('Changes to be made: {}'.format(mods)) oldtext = page.get() template = (page.namespace() == 10) newtext = textlib.replaceLanguageLinks(oldtext, new, @@ -1836,12 +1842,12 @@ if not botMayEdit(page): if template: pywikibot.output( - u'SKIPPING: %s should have interwiki links on subpage.' - % page) + 'SKIPPING: {} should have interwiki links on subpage.' + .format(page)) else: pywikibot.output( - u'SKIPPING: %s is under construction or to be deleted.' - % page) + 'SKIPPING: {} is under construction or to be deleted.' + .format(page)) return False if newtext == oldtext: return False @@ -1853,8 +1859,8 @@
# Allow for special case of a self-pointing interwiki link if removing and removing != [page.site]: - self.problem(u'Found incorrect link to %s in %s' - % (", ".join([x.code for x in removing]), page), + self.problem('Found incorrect link to {} in {}' + .format(', '.join([x.code for x in removing]), page), createneed=False) ask = True if self.conf.force or self.conf.cleanup: @@ -1867,7 +1873,7 @@ # If we cannot ask, deny permission answer = 'n' else: - answer = pywikibot.input_choice(u'Submit?', + answer = pywikibot.input_choice('Submit?', [('Yes', 'y'), ('No', 'n'), ('open in Browser', 'b'), ('Give up', 'g'), @@ -1897,46 +1903,48 @@ pywikibot.exception() return False except pywikibot.LockedPage: - pywikibot.output(u'Page %s is locked. Skipping.' % page) - raise SaveError(u'Locked') + pywikibot.output('Page {} is locked. Skipping.' + .format(page)) + raise SaveError('Locked') except pywikibot.EditConflict: pywikibot.output( 'ERROR putting page: An edit conflict occurred. ' 'Giving up.') - raise SaveError(u'Edit conflict') + raise SaveError('Edit conflict') except (pywikibot.SpamfilterError) as error: pywikibot.output( 'ERROR putting page: {0} blacklisted by spamfilter. ' 'Giving up.'.format(error.url)) - raise SaveError(u'Spam filter') + raise SaveError('Spam filter') except (pywikibot.PageNotSaved) as error: - pywikibot.output(u'ERROR putting page: %s' % (error.args,)) - raise SaveError(u'PageNotSaved') + pywikibot.output('ERROR putting page: {}' + .format(error.args,)) + raise SaveError('PageNotSaved') except (socket.error, IOError) as error: if timeout > 3600: raise - pywikibot.output(u'ERROR putting page: %s' % (error.args,)) - pywikibot.output('Sleeping %i seconds before trying again.' - % (timeout,)) + pywikibot.output('ERROR putting page: {}' + .format(error.args,)) + pywikibot.output('Sleeping {} seconds before trying again.' + .format(timeout,)) timeout *= 2 pywikibot.sleep(timeout) except pywikibot.ServerError: if timeout > 3600: raise - pywikibot.output(u'ERROR putting page: ServerError.') - pywikibot.output('Sleeping %i seconds before trying again.' - % (timeout,)) + pywikibot.output('ERROR putting page: ServerError.') + pywikibot.output('Sleeping {} seconds before trying again.' + .format(timeout,)) timeout *= 2 pywikibot.sleep(timeout) else: break return True elif answer == 'g': - raise GiveUpOnPage(u'User asked us to give up') + raise GiveUpOnPage('User asked us to give up') else: - raise LinkMustBeRemoved(u'Found incorrect link to %s in %s' - % (", ".join([x.code for x in removing]), - page)) + raise LinkMustBeRemoved('Found incorrect link to {} in {}'.format( + ', '.join([x.code for x in removing]), page))
def reportBacklinks(self, new, updatedSites): """ @@ -1958,7 +1966,7 @@ for l in page.iterlanglinks()} except pywikibot.NoPage: pywikibot.warning( - 'Page %s does no longer exist?!' % page) + 'Page {} does no longer exist?!'.format(page)) break # To speed things up, create a dictionary which maps sites # to pages. This assumes that there is only one interwiki @@ -1971,15 +1979,15 @@ try: linkedPage = linkedPagesDict[expectedPage.site] pywikibot.warning( - '%s: %s does not link to %s but to %s' - % (page.site.family.name, - page, expectedPage, linkedPage)) + '{}: {} does not link to {} but to {}' + .format(page.site.family.name, + page, expectedPage, linkedPage)) except KeyError: if not expectedPage.site.is_data_repository(): pywikibot.warning( - '%s: %s does not link to %s' - % (page.site.family.name, - page, expectedPage)) + '{}: {} does not link to {}' + .format(page.site.family.name, + page, expectedPage)) # Check for superfluous links for linkedPage in linkedPages: if linkedPage not in expectedPages: @@ -1988,11 +1996,11 @@ # In this case, it was already reported above. if linkedPage.site not in expectedSites: pywikibot.warning( - '%s: %s links to incorrect %s' - % (page.site.family.name, - page, linkedPage)) + '{}: {} links to incorrect {}' + .format(page.site.family.name, + page, linkedPage)) except (socket.error, IOError): - pywikibot.output(u'ERROR: could not report backlinks') + pywikibot.output('ERROR: could not report backlinks')
class InterwikiBot(object): @@ -2041,7 +2049,7 @@ dumpfn = pywikibot.config.datafilepath( 'data', 'interwiki-dumps', - '%s-%s.pickle' % (site.family.name, site.code) + '{0}-{1}.pickle'.format(site.family.name, site.code) ) if append: mode = 'appended' @@ -2172,8 +2180,8 @@ # Could not extract allpages special page? pywikibot.output( 'ERROR: could not retrieve more pages. ' - 'Will try again in %d seconds' - % timeout) + 'Will try again in {} seconds' + .format(timeout)) pywikibot.sleep(timeout) timeout *= 2 else: @@ -2199,7 +2207,7 @@ # First find the best language to work on site = self.selectQuerySite() if site is None: - pywikibot.output(u"NOTE: Nothing left to do") + pywikibot.output('NOTE: Nothing left to do') return False # Now assemble a reasonable list of pages to get subjectGroup = [] @@ -2215,7 +2223,7 @@ # We have found enough pages to fill the bandwidth. break if len(pageGroup) == 0: - pywikibot.output(u"NOTE: Nothing left to do 2") + pywikibot.output('NOTE: Nothing left to do 2') return False # Get the content of the assembled list in one blow gen = site.preloadpages(pageGroup, templates=True, langlinks=True, @@ -2283,7 +2291,7 @@ # Use short format, just the language code fmt = lambda d, site: site.code # noqa: E731
- mods = mcomment = u'' + mods = mcomment = ''
commentname = 'interwiki' if adding: @@ -2305,11 +2313,11 @@ changes = {'adding': comma.join(fmt(new, x) for x in adding), 'removing': comma.join(fmt(old, x) for x in removing), 'modifying': comma.join(fmt(new, x) for x in modifying), - 'from': u'' if not useFrom else old[modifying[0]]} + 'from': '' if not useFrom else old[modifying[0]]} en_changes = {'adding': ', '.join(fmt(new, x) for x in adding), 'removing': ', '.join(fmt(old, x) for x in removing), 'modifying': ', '.join(fmt(new, x) for x in modifying), - 'from': u'' if not useFrom else old[modifying[0]]} + 'from': '' if not useFrom else old[modifying[0]]}
mcomment += i18n.twtranslate(insite, commentname, changes) mods = i18n.twtranslate('en', commentname, en_changes) @@ -2349,8 +2357,8 @@ # titletranslate.py expects a list of strings, so we convert it back. # TODO: This is a quite ugly hack, in the future we should maybe make # titletranslate expect a list of pagelinks. - hintStrings = ['%s:%s' % (hintedPage.site.lang, - hintedPage.title()) + hintStrings = ['{}:{}'.format(hintedPage.site.lang, + hintedPage.title()) for hintedPage in pagelist] bot.add(page, hints=hintStrings)
@@ -2483,7 +2491,7 @@ if isinstance(ns, unicode) or isinstance(ns, str): index = site.namespaces.lookup_name(ns) if index is None: - raise ValueError(u'Unknown namespace: %s' % ns) + raise ValueError('Unknown namespace: ' + ns) ns = index.id namespaces = [] else: @@ -2495,7 +2503,7 @@ dumpFileName = pywikibot.config.datafilepath( 'data', 'interwiki-dumps', - u'%s-%s.pickle' % (site.family.name, site.code) + '{0}-{1}.pickle'.format(site.family.name, site.code) ) try: with open(dumpFileName, 'rb') as f: @@ -2513,7 +2521,7 @@ else: pywikibot.output( 'Dump file is empty?! Starting at the beginning.') - nextPage = "!" + nextPage = '!' namespace = 0 gen2 = site.allpages(start=nextPage, namespace=namespace, @@ -2536,7 +2544,7 @@ readWarnfile(warnfile, bot) else: if not singlePageTitle and not opthintsonly: - singlePageTitle = pywikibot.input(u'Which page to check:') + singlePageTitle = pywikibot.input('Which page to check:') if singlePageTitle: singlePage = pywikibot.Page(pywikibot.Site(), singlePageTitle) else: @@ -2568,5 +2576,5 @@ pass
-if __name__ == "__main__": +if __name__ == '__main__': main()
pywikibot-commits@lists.wikimedia.org