jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/462714 )
Change subject: [cleanup] cleanup scripts/delete.py
......................................................................
[cleanup] cleanup scripts/delete.py
- use str.format(...) instead of modulo for type specifier
arguments.
- use single quotes for string literals
- remove preleading "u" from strings
Change-Id: If1bf2491274705f33a0a156fd39367205cff0e98
---
M scripts/delete.py
1 file changed, 7 insertions(+), 7 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/delete.py b/scripts/delete.py
index 78adda2..ec62481 100755
--- a/scripts/delete.py
+++ b/scripts/delete.py
@@ -187,7 +187,7 @@
"""Process one page from the generator."""
if self.getOption('undelete'):
if self.current_page.exists():
- pywikibot.output(u'Skipping: {0} already exists.'.format(
+ pywikibot.output('Skipping: {0} already exists.'.format(
self.current_page))
else:
self.current_page.undelete(self.summary)
@@ -215,7 +215,7 @@
self.getOption('always'),
quit=True)
else:
- pywikibot.output(u'Skipping: {0} does not exist.'.format(
+ pywikibot.output('Skipping: {0} does not exist.'.format(
self.current_page))
@@ -243,7 +243,7 @@
options['always'] = True
elif arg.startswith('-summary'):
if len(arg) == len('-summary'):
- summary = pywikibot.input(u'Enter a reason for the deletion:')
+ summary = pywikibot.input('Enter a reason for the deletion:')
else:
summary = arg[len('-summary:'):]
elif arg.startswith('-images'):
@@ -292,9 +292,9 @@
# page generator to actually get the text of those pages.
if generator:
if summary is None:
- summary = pywikibot.input(u'Enter a reason for the %sdeletion:'
- % ['', 'un'][options.get('undelete',
- False)])
+ summary = pywikibot.input('Enter a reason for the {}deletion:'
+ .format(['', 'un'][options
+ .get('undelete', False)]))
bot = DeletionRobot(generator, summary, **options)
bot.run()
return True
@@ -303,5 +303,5 @@
return False
-if __name__ == "__main__":
+if __name__ == '__main__':
main()
--
To view, visit https://gerrit.wikimedia.org/r/462714
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: If1bf2491274705f33a0a156fd39367205cff0e98
Gerrit-Change-Number: 462714
Gerrit-PatchSet: 1
Gerrit-Owner: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/462681 )
Change subject: [cleanup] cleanup scripts/followlive.py
......................................................................
[cleanup] cleanup scripts/followlive.py
- use str.format(...) instead of modulo for type specifier
arguments.
- use "+" for concatenating strings in some cases
- removed some trailing spaces before "!"
Change-Id: I966cafcabe0c71c80c2f8117a45a003c91ea6f1c
---
M scripts/followlive.py
1 file changed, 12 insertions(+), 10 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/followlive.py b/scripts/followlive.py
index 9bb8499..0473dca 100644
--- a/scripts/followlive.py
+++ b/scripts/followlive.py
@@ -7,8 +7,8 @@
Script to follow new articles on a wikipedia and flag them
with a template or eventually blank them.
-There must be A LOT of bugs ! Use with caution and verify what
-it is doing !
+There must be A LOT of bugs! Use with caution and verify what
+it is doing!
The following parameters are supported:
@@ -439,8 +439,8 @@
if answer == 'q':
raise QuitKeyboardInterrupt
if answer == 'd':
- pywikibot.output('Trying to delete page [[%s]].'
- % self.page.title())
+ pywikibot.output('Trying to delete page [[{}]].'
+ .format(self.page.title()))
self.page.delete()
return
if answer == 'e':
@@ -460,11 +460,11 @@
{'content': self.content}))
except pywikibot.EditConflict:
pywikibot.output(
- 'An edit conflict occured ! Automatically retrying')
+ 'An edit conflict occured! Automatically retrying')
self.handle_bad_page(self)
return
if answer == '':
- pywikibot.output('Page correct ! Proceeding with next pages.')
+ pywikibot.output('Page correct! Proceeding with next pages.')
return
# Check user input:
if answer[0] == '':
@@ -495,17 +495,18 @@
templates)[self.questionlist[answer]]
if tpl['pos'] == 'top':
pywikibot.output(
- 'prepending %s...' % self.questionlist[answer])
+ 'prepending {}...'.format(self.questionlist[answer]))
self.content = self.questionlist[answer] + '\n' + self.content
elif tpl['pos'] == 'bottom':
- pywikibot.output('appending %s...' % self.questionlist[answer])
+ pywikibot.output('appending {}...'
+ .format(self.questionlist[answer]))
self.content += '\n' + self.questionlist[answer]
else:
raise RuntimeError(
'"pos" should be "top" or "bottom" for template {}. '
'Contact a developer.'.format(self.questionlist[answer]))
summary += tpl['msg'] + ' '
- pywikibot.output('Probably added %s' % self.questionlist[answer])
+ pywikibot.output('Probably added ' + self.questionlist[answer])
self.page.put(self.content, summary=summary)
pywikibot.output('with comment {}\n'.format(summary))
@@ -516,7 +517,8 @@
if self.could_be_bad():
pywikibot.output('Integrity of page doubtful...')
self.handle_bad_page()
- pywikibot.output('----- Current time: %s' % datetime.datetime.now())
+ pywikibot.output('----- Current time: {}'
+ .format(datetime.datetime.now()))
def init_page(self, item):
"""Init the page tuple before processing and return a page object.
--
To view, visit https://gerrit.wikimedia.org/r/462681
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I966cafcabe0c71c80c2f8117a45a003c91ea6f1c
Gerrit-Change-Number: 462681
Gerrit-PatchSet: 5
Gerrit-Owner: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/462683 )
Change subject: [cleanup] cleanup scripts/flickrripper.py
......................................................................
[cleanup] cleanup scripts/flickrripper.py
- use str.format(...) instead of modulo for type specifier
arguments.
Change-Id: Iafa8a11fd24cea16b2e3ba387eb0ff11410ce392
---
M scripts/flickrripper.py
1 file changed, 4 insertions(+), 4 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/flickrripper.py b/scripts/flickrripper.py
index 6437bce..795a45c 100755
--- a/scripts/flickrripper.py
+++ b/scripts/flickrripper.py
@@ -199,11 +199,11 @@
title = ''
# Should probably have the id of the photo as last resort.
- if pywikibot.Page(site, 'File:%s - %s - %s.jpg'
- % (title, project, username)).exists():
+ if pywikibot.Page(site, 'File:{} - {} - {}.jpg'
+ .format(title, project, username)).exists():
i = 1
while True:
- name = '%s - %s - %s (%d).jpg' % (title, project, username, i)
+ name = '{} - {} - {} ({}).jpg'.format(title, project, username, i)
if pywikibot.Page(site, 'File:' + name).exists():
i += 1
else:
@@ -246,7 +246,7 @@
The description is based on the info from flickrinfo and improved.
"""
- description = '== {{int:filedesc}} ==\n%s' % flinfoDescription
+ description = '== {{int:filedesc}} ==\n{}'.format(flinfoDescription)
if removeCategories:
description = textlib.removeCategoryLinks(description,
pywikibot.Site(
--
To view, visit https://gerrit.wikimedia.org/r/462683
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: Iafa8a11fd24cea16b2e3ba387eb0ff11410ce392
Gerrit-Change-Number: 462683
Gerrit-PatchSet: 2
Gerrit-Owner: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/462649 )
Change subject: [cleanup] cleanup scripts/freebasemappingupload.py
......................................................................
[cleanup] cleanup scripts/freebasemappingupload.py
- use str.format(...) instead of modulo for type specifier
arguments.
Change-Id: I3e2c7e70fcff11ddac2d278abc3a306081db7349
---
M scripts/freebasemappingupload.py
1 file changed, 5 insertions(+), 4 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/freebasemappingupload.py b/scripts/freebasemappingupload.py
index 21a4af5..6a13e42 100755
--- a/scripts/freebasemappingupload.py
+++ b/scripts/freebasemappingupload.py
@@ -83,15 +83,16 @@
else:
# Just pick up the first label
label = list(data.labels.values())[0]
- pywikibot.output('Parsed: %s <--> %s' % (qid, mid))
- pywikibot.output('%s is %s' % (data.getID(), label))
+ pywikibot.output('Parsed: {} <--> {}'.format(qid, mid))
+ pywikibot.output('{} is {}'.format(data.getID(), label))
if data.claims and 'P646' in data.claims:
# We assume that there is only one claim.
# If there are multiple ones, our logs might be wrong
# but the constraint value reports will catch them
if mid != data.claims['P646'][0].getTarget():
- pywikibot.output('Mismatch: expected %s, has %s instead'
- % (mid, data.claims['P646'][0].getTarget()))
+ pywikibot.output('Mismatch: expected {}, has {} instead'
+ .format(mid,
+ data.claims['P646'][0].getTarget()))
else:
pywikibot.output('Already has mid set, is consistent.')
else:
--
To view, visit https://gerrit.wikimedia.org/r/462649
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I3e2c7e70fcff11ddac2d278abc3a306081db7349
Gerrit-Change-Number: 462649
Gerrit-PatchSet: 3
Gerrit-Owner: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/462643 )
Change subject: [cleanup] cleanup scripts/harvest_template.py
......................................................................
[cleanup] cleanup scripts/harvest_template.py
- use single quotes for string literals
- use str.format(...) instead of modulo for type specifier
arguments
- remove preleading "u" from strings
Change-Id: I43e0a4b42f0d402b98ba09eab6c37f8b89d39746
---
M scripts/harvest_template.py
1 file changed, 21 insertions(+), 18 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/harvest_template.py b/scripts/harvest_template.py
index d8ffb78..73aff99 100755
--- a/scripts/harvest_template.py
+++ b/scripts/harvest_template.py
@@ -166,7 +166,8 @@
"""Fetch redirects of the title, so we can check against them."""
temp = pywikibot.Page(pywikibot.Site(), title, ns=10)
if not temp.exists():
- pywikibot.error(u'Template %s does not exist.' % temp.title())
+ pywikibot.error('Template {} does not exist.'
+ .format(temp.title()))
exit()
# Put some output here since it can take a while
@@ -186,13 +187,13 @@
try:
exists = linked_page.exists()
except pywikibot.exceptions.InvalidTitle:
- pywikibot.error('"%s" is not a valid title so it cannot be linked.'
- ' Skipping.' % link_text)
+ pywikibot.error('"{}" is not a valid title so it cannot be linked.'
+ ' Skipping.'.format(link_text))
return None
if not exists:
- pywikibot.output('%s does not exist so it cannot be linked. '
- 'Skipping.' % (linked_page))
+ pywikibot.output('{} does not exist so it cannot be linked. '
+ 'Skipping.'.format(linked_page))
return None
if linked_page.isRedirectPage():
@@ -204,12 +205,13 @@
linked_item = None
if not linked_item or not linked_item.exists():
- pywikibot.output('%s does not have a wikidata item to link with. '
- 'Skipping.' % (linked_page))
+ pywikibot.output('{} does not have a wikidata item to link with. '
+ 'Skipping.'.format(linked_page))
return None
if linked_item.title() == item.title():
- pywikibot.output('%s links to itself. Skipping.' % (linked_page))
+ pywikibot.output('{} links to itself. Skipping.'
+ .format(linked_page))
return None
return linked_item
@@ -240,8 +242,8 @@
ns=10).title(with_ns=False)
except pywikibot.exceptions.InvalidTitle:
pywikibot.error(
- "Failed parsing template; '%s' should be the template name."
- % template)
+ "Failed parsing template; '{}' should be "
+ 'the template name.'.format(template))
continue
if template not in self.templateTitles:
@@ -269,8 +271,9 @@
link_text = value
else:
pywikibot.output(
- '%s field %s value %s is not a wikilink. '
- 'Skipping.' % (claim.getID(), field, value))
+ '{} field {} value {} is not a wikilink. '
+ 'Skipping.'
+ .format(claim.getID(), field, value))
continue
linked_item = self._template_link_target(item, link_text)
@@ -299,8 +302,8 @@
continue
claim.setTarget(image)
else:
- pywikibot.output('%s is not a supported datatype.'
- % claim.type)
+ pywikibot.output('{} is not a supported datatype.'
+ .format(claim.type))
continue
# A generator might yield pages from multiple sites
@@ -332,13 +335,13 @@
if arg.startswith('-template'):
if len(arg) == 9:
template_title = pywikibot.input(
- u'Please enter the template to work on:')
+ 'Please enter the template to work on:')
else:
template_title = arg[10:]
elif arg.startswith('-create'):
options['create'] = True
elif gen.handleArg(arg):
- if arg.startswith(u'-transcludes:'):
+ if arg.startswith('-transcludes:'):
template_title = arg[13:]
else:
optional = arg.startswith('-')
@@ -379,12 +382,12 @@
generator = gen.getCombinedGenerator(preload=True)
if not generator:
- gen.handleArg(u'-transcludes:' + template_title)
+ gen.handleArg('-transcludes:' + template_title)
generator = gen.getCombinedGenerator(preload=True)
bot = HarvestRobot(generator, template_title, fields, **options)
bot.run()
-if __name__ == "__main__":
+if __name__ == '__main__':
main()
--
To view, visit https://gerrit.wikimedia.org/r/462643
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I43e0a4b42f0d402b98ba09eab6c37f8b89d39746
Gerrit-Change-Number: 462643
Gerrit-PatchSet: 2
Gerrit-Owner: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/462527 )
Change subject: [cleanup] cleanup scripts/interwiki.py
......................................................................
[cleanup] cleanup scripts/interwiki.py
- use single quotes for string literals and double quotes
**only** if they consist of single quotes within them
- use str.format(...) instead of modulo for type specifier
arguments
- remove preleading "u" from strings
- use "+" to concatenate strings in some cases
Change-Id: Ie3caa13d11b92a89704d9310249cb0bb2850024d
---
M scripts/interwiki.py
1 file changed, 177 insertions(+), 169 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index 8d7a68c..79aaa9e 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -398,18 +398,18 @@
# A list of template names in different languages.
# Pages which contain these shouldn't be changed.
ignoreTemplates = {
- '_default': [u'delete'],
- 'ar': [u'قيد الاستخدام'],
- 'cs': [u'Pracuje_se'],
- 'de': [u'inuse', 'in use', u'in bearbeitung', u'inbearbeitung',
- u'löschen', u'sla',
- u'löschantrag', u'löschantragstext',
- u'falschschreibung',
- u'obsolete schreibung', 'veraltete schreibweise'],
- 'en': [u'inuse', u'softredirect'],
- 'fa': [u'در دست ویرایش ۲', u'حذف سریع'],
- 'pdc': [u'lösche'],
- 'zh': [u'inuse'],
+ '_default': ['delete'],
+ 'ar': ['قيد الاستخدام'],
+ 'cs': ['Pracuje_se'],
+ 'de': ['inuse', 'in use', 'in bearbeitung', 'inbearbeitung',
+ 'löschen', 'sla',
+ 'löschantrag', 'löschantragstext',
+ 'falschschreibung',
+ 'obsolete schreibung', 'veraltete schreibweise'],
+ 'en': ['inuse', 'softredirect'],
+ 'fa': ['در دست ویرایش ۲', 'حذف سریع'],
+ 'pdc': ['lösche'],
+ 'zh': ['inuse'],
}
@@ -456,7 +456,7 @@
quiet = False
restoreAll = False
asynchronous = False
- summary = u''
+ summary = ''
repository = False
def note(self, text):
@@ -914,8 +914,8 @@
# Bug-check: Isn't there any work still in progress? We can't work on
# different sites at a time!
if len(self.pending) > 0:
- raise "BUG: Can't start to work on %s; still working on %s" \
- % (site, self.pending)
+ raise "BUG: Can't start to work on {}; still working on {}".format(
+ site, self.pending)
# Prepare a list of suitable pages
result = []
for page in self.todo.filter(site):
@@ -951,10 +951,10 @@
if self.conf.nobackonly and self.originPage:
if page == self.originPage:
try:
- pywikibot.output(u"%s has a backlink from %s."
- % (page, linkingPage))
+ pywikibot.output('{} has a backlink from {}.'
+ .format(page, linkingPage))
except UnicodeDecodeError:
- pywikibot.output(u"Found a backlink for a page.")
+ pywikibot.output('Found a backlink for a page.')
self.makeForcedStop(counter)
return False
@@ -999,10 +999,10 @@
return False
if self.conf.autonomous:
pywikibot.output(
- 'NOTE: Ignoring link from page %s in namespace %i to page '
- '%s in namespace %i.'
- % (linkingPage, linkingPage.namespace(), linkedPage,
- linkedPage.namespace()))
+ 'NOTE: Ignoring link from page {} in namespace'
+ ' {} to page {} in namespace {}.'
+ .format(linkingPage, linkingPage.namespace(), linkedPage,
+ linkedPage.namespace()))
# Fill up foundIn, so that we will not write this notice
self.foundIn[linkedPage] = [linkingPage]
return True
@@ -1011,18 +1011,19 @@
linkedPage.site)
if preferredPage:
pywikibot.output(
- 'NOTE: Ignoring link from page %s in namespace %i to '
- 'page %s in namespace %i because page %s in the '
+ 'NOTE: Ignoring link from page {} in namespace {} to '
+ 'page {} in namespace {} because page {} in the '
'correct namespace has already been found.'
- % (linkingPage, linkingPage.namespace(), linkedPage,
- linkedPage.namespace(), preferredPage))
+ .format(linkingPage, linkingPage.namespace(),
+ linkedPage, linkedPage.namespace(),
+ preferredPage))
return True
else:
choice = pywikibot.input_choice(
- 'WARNING: %s is in namespace %i, but %s is in '
- 'namespace %i. Follow it anyway?'
- % (self.originPage, self.originPage.namespace(),
- linkedPage, linkedPage.namespace()),
+ 'WARNING: {} is in namespace {}, but {} is in '
+ 'namespace {}. Follow it anyway?'
+ .format(self.originPage, self.originPage.namespace(),
+ linkedPage, linkedPage.namespace()),
[('Yes', 'y'), ('No', 'n'),
('Add an alternative', 'a'), ('give up', 'g')],
automatic_quit=False)
@@ -1033,9 +1034,9 @@
self.makeForcedStop(counter)
elif choice == 'a':
newHint = pywikibot.input(
- u'Give the alternative for language %s, not '
- u'using a language code:'
- % linkedPage.site.lang)
+ 'Give the alternative for language {}, not '
+ 'using a language code:'
+ .format(linkedPage.site.lang))
if newHint:
alternativePage = pywikibot.Page(
linkedPage.site, newHint)
@@ -1045,8 +1046,8 @@
None)
else:
pywikibot.output(
- u"NOTE: ignoring %s and its interwiki links"
- % linkedPage)
+ 'NOTE: ignoring {} and its interwiki links'
+ .format(linkedPage))
return True
else:
# same namespaces, no problem
@@ -1057,16 +1058,16 @@
"""Check for ignoring pages."""
if self.originPage and self.conf.same == 'wiktionary':
if page.title().lower() != self.originPage.title().lower():
- pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode"
- % (page, self.originPage))
+ pywikibot.output('NOTE: Ignoring {} for {} in wiktionary mode'
+ .format(page, self.originPage))
return True
elif (page.title() != self.originPage.title() and
self.originPage.namespace().case == 'case-sensitive' and
page.namespace().case == 'case-sensitive'):
pywikibot.output(
- 'NOTE: Ignoring %s for %s in wiktionary mode because both '
- u"languages are uncapitalized."
- % (page, self.originPage))
+ 'NOTE: Ignoring {} for {} in wiktionary mode because both '
+ 'languages are uncapitalized.'
+ .format(page, self.originPage))
return True
return False
@@ -1088,13 +1089,13 @@
if self.conf.autonomous:
if self.originPage.isDisambig() and not page.isDisambig():
pywikibot.output(
- u"NOTE: Ignoring link from disambiguation page %s to "
- u"non-disambiguation %s" % (self.originPage, page))
+ 'NOTE: Ignoring link from disambiguation page {} to '
+ 'non-disambiguation {}'.format(self.originPage, page))
return (True, None)
elif not self.originPage.isDisambig() and page.isDisambig():
pywikibot.output(
- u"NOTE: Ignoring link from non-disambiguation page %s to "
- u"disambiguation %s" % (self.originPage, page))
+ 'NOTE: Ignoring link from non-disambiguation page {} to '
+ 'disambiguation {}'.format(self.originPage, page))
return (True, None)
else:
choice = 'y'
@@ -1102,16 +1103,16 @@
disambig = self.getFoundDisambig(page.site)
if disambig:
pywikibot.output(
- u"NOTE: Ignoring non-disambiguation page %s for %s "
- u"because disambiguation page %s has already been "
- u"found."
- % (page, self.originPage, disambig))
+ 'NOTE: Ignoring non-disambiguation page {} for {} '
+ 'because disambiguation page {} has already been '
+ 'found.'
+ .format(page, self.originPage, disambig))
return (True, None)
else:
choice = pywikibot.input_choice(
- "WARNING: %s is a disambiguation page, but %s doesn't "
- u"seem to be one. Follow it anyway?"
- % (self.originPage, page),
+ "WARNING: {} is a disambiguation page, but {} doesn't "
+ 'seem to be one. Follow it anyway?'
+ .format(self.originPage, page),
[('Yes', 'y'), ('No', 'n'),
('Add an alternative', 'a'), ('give up', 'g')],
automatic_quit=False)
@@ -1119,15 +1120,15 @@
nondisambig = self.getFoundNonDisambig(page.site)
if nondisambig:
pywikibot.output(
- 'NOTE: Ignoring disambiguation page %s for %s because '
- u"non-disambiguation page %s has already been found."
- % (page, self.originPage, nondisambig))
+ 'NOTE: Ignoring disambiguation page {} for {} because '
+ 'non-disambiguation page {} has already been found.'
+ .format(page, self.originPage, nondisambig))
return (True, None)
else:
choice = pywikibot.input_choice(
- u'WARNING: %s doesn\'t seem to be a disambiguation '
- u'page, but %s is one. Follow it anyway?'
- % (self.originPage, page),
+ "WARNING: {} doesn't seem to be a disambiguation "
+ 'page, but {} is one. Follow it anyway?'
+ .format(self.originPage, page),
[('Yes', 'y'), ('No', 'n'),
('Add an alternative', 'a'), ('give up', 'g')],
automatic_quit=False)
@@ -1135,8 +1136,8 @@
return (True, None)
elif choice == 'a':
newHint = pywikibot.input(
- u'Give the alternative for language %s, not using a '
- u'language code:' % page.site.lang)
+ 'Give the alternative for language {}, not using a '
+ 'language code:'.format(page.site.lang))
alternativePage = pywikibot.Page(page.site, newHint)
return (True, alternativePage)
elif choice == 'g':
@@ -1148,10 +1149,12 @@
def isIgnored(self, page):
"""Return True if pages is to be ignored."""
if page.site.lang in self.conf.neverlink:
- pywikibot.output(u"Skipping link %s to an ignored language" % page)
+ pywikibot.output('Skipping link {} to an ignored language'
+ .format(page))
return True
if page in self.conf.ignore:
- pywikibot.output(u"Skipping link %s to an ignored page" % page)
+ pywikibot.output('Skipping link {} to an ignored page'
+ .format(page))
return True
return False
@@ -1184,15 +1187,15 @@
# loop
while True:
newhint = pywikibot.input(
- u'Give a hint (? to see pagetext):')
+ 'Give a hint (? to see pagetext):')
if newhint == '?':
t += self.conf.showtextlinkadd
pywikibot.output(self.originPage.get()[:t])
elif newhint and ':' not in newhint:
pywikibot.output(
- u'Please enter a hint in the format '
- u'language:pagename or type nothing if you do not '
- u'have a hint.')
+ 'Please enter a hint in the format '
+ 'language:pagename or type nothing if you do not '
+ 'have a hint.')
elif not newhint:
break
else:
@@ -1231,10 +1234,10 @@
if dictName is not None:
if self.originPage:
pywikibot.warning(
- '%s:%s relates to %s:%s, which is an '
- u'auto entry %s(%s)'
- % (self.originPage.site.lang, self.originPage,
- page.site.lang, page, dictName, year))
+ '{}:{} relates to {}:{}, which is an '
+ 'auto entry {}({})'
+ .format(self.originPage.site.lang, self.originPage,
+ page.site.lang, page, dictName, year))
# Abort processing if the bot is running in autonomous mode
if self.conf.autonomous:
@@ -1298,9 +1301,10 @@
not self.skipPage(page, redirectTargetPage, counter)):
if self.addIfNew(redirectTargetPage, counter, page):
if config.interwiki_shownew:
- pywikibot.output(u"%s: %s gives new %sredirect %s"
- % (self.originPage, page, redir,
- redirectTargetPage))
+ pywikibot.output('{}: {} gives new {}redirect {}'
+ .format(self.originPage,
+ page, redir,
+ redirectTargetPage))
continue
# must be behind the page.isRedirectPage() part
@@ -1333,8 +1337,8 @@
(skip, alternativePage) = self.disambigMismatch(page, counter)
if skip:
- pywikibot.output(u"NOTE: ignoring %s and its interwiki links"
- % page)
+ pywikibot.output('NOTE: ignoring {} and its interwiki links'
+ .format(page))
self.done.remove(page)
iw = ()
if alternativePage:
@@ -1363,9 +1367,9 @@
self.workonme = False
elif self.conf.autonomous and duplicate and not skip:
- pywikibot.output('Stopping work on %s because duplicate pages'
- " %s and %s are found"
- % (self.originPage, duplicate, page))
+ pywikibot.output('Stopping work on {} because duplicate pages'
+ ' {} and {} are found'
+ .format(self.originPage, duplicate, page))
self.makeForcedStop(counter)
try:
with codecs.open(
@@ -1397,9 +1401,9 @@
if self.conf.hintsareright:
if linkedPage.site in self.hintedsites:
pywikibot.output(
- 'NOTE: %s: %s extra interwiki on hinted site '
- 'ignored %s'
- % (self.originPage, page, linkedPage))
+ 'NOTE: {}: {} extra interwiki on hinted site '
+ 'ignored {}'
+ .format(self.originPage, page, linkedPage))
break
if not self.skipPage(page, linkedPage, counter):
if self.conf.followinterwiki or page == self.originPage:
@@ -1414,9 +1418,10 @@
# either may be a redirect to the other.
# No way to find out quickly!
pywikibot.output(
- 'NOTE: %s: %s gives duplicate '
- 'interwiki on same site %s'
- % (self.originPage, page, linkedPage))
+ 'NOTE: {}: {} gives duplicate '
+ 'interwiki on same site {}'
+ .format(self.originPage, page,
+ linkedPage))
break
else:
if config.interwiki_shownew:
@@ -1439,7 +1444,7 @@
def problem(self, txt, createneed=True):
"""Report a problem with the resolution of this subject."""
- pywikibot.output(u"ERROR: %s" % txt)
+ pywikibot.output('ERROR: ' + txt)
self.confirm = True
if createneed:
self.problemfound = True
@@ -1448,9 +1453,9 @@
"""Report found interlanguage links with conflicts."""
for page2 in sorted(self.foundIn[page]):
if page2 is None:
- pywikibot.output(u" " * indent + "Given as a hint.")
+ pywikibot.output(' ' * indent + 'Given as a hint.')
else:
- pywikibot.output(u" " * indent + unicode(page2))
+ pywikibot.output(' ' * indent + unicode(page2))
def assemble(self):
"""Assemble language links."""
@@ -1470,7 +1475,7 @@
continue
if site == self.originPage.site:
if page != self.originPage:
- self.problem(u"Found link to %s" % page)
+ self.problem('Found link to ' + page)
self.whereReport(page)
errorCount += 1
else:
@@ -1483,7 +1488,7 @@
for site, pages in new.items():
if len(pages) > 1:
errorCount += 1
- self.problem(u"Found more than one link for %s" % site)
+ self.problem('Found more than one link for ' + site)
if not errorCount and not self.conf.select:
# no errors, so all lists have only one item
@@ -1504,11 +1509,11 @@
# First loop over the ones that have more solutions
for site, pages in new.items():
if len(pages) > 1:
- pywikibot.output(u"=" * 30)
- pywikibot.output(u"Links to %s" % site)
+ pywikibot.output('=' * 30)
+ pywikibot.output('Links to ' + site)
for i, page2 in enumerate(pages, 1):
- pywikibot.output(u" (%d) Found link to %s in:"
- % (i, page2))
+ pywikibot.output(' ({}) Found link to {} in:'
+ .format(i, page2))
self.whereReport(page2, indent=8)
# TODO: allow answer to repeat previous or go back after a
@@ -1529,9 +1534,9 @@
for site, pages in new.items():
if len(pages) == 1:
if not acceptall:
- pywikibot.output(u"=" * 30)
+ pywikibot.output('=' * 30)
page2 = pages[0]
- pywikibot.output(u"Found link to %s in:" % page2)
+ pywikibot.output('Found link to {} in:'.format(page2))
self.whereReport(page2, indent=4)
while True:
if acceptall:
@@ -1540,7 +1545,7 @@
# TODO: allow answer to repeat previous or go back
# after a mistake
answer = pywikibot.input_choice(
- u'What should be done?',
+ 'What should be done?',
[('accept', 'a'), ('reject', 'r'),
('give up', 'g'), ('accept all', 'l')], 'a',
automatic_quit=False)
@@ -1565,7 +1570,7 @@
"""
if not self.isDone():
- raise Exception("Bugcheck: finish called before done")
+ raise Exception('Bugcheck: finish called before done')
if not self.workonme:
return
if self.originPage:
@@ -1578,8 +1583,8 @@
if not self.untranslated and self.conf.untranslatedonly:
return
if self.forcedStop: # autonomous with problem
- pywikibot.output(u"======Aborted processing %s======"
- % self.originPage)
+ pywikibot.output('======Aborted processing {}======'
+ .format(self.originPage))
return
# The following check is not always correct and thus disabled.
# self.done might contain no interwiki links because of the -neverlink
@@ -1587,12 +1592,13 @@
# if len(self.done) == 1:
# # No interwiki at all
# return
- pywikibot.output(u"======Post-processing %s======" % self.originPage)
+ pywikibot.output('======Post-processing {}======'
+ .format(self.originPage))
# Assemble list of accepted interwiki links
new = self.assemble()
if new is None: # User said give up
- pywikibot.output(u"======Aborted processing %s======"
- % self.originPage)
+ pywikibot.output('======Aborted processing {}======'
+ .format(self.originPage))
return
# Make sure new contains every page link, including the page we are
@@ -1641,8 +1647,8 @@
page = pywikibot.Page(link)
old[page.site] = page
except pywikibot.NoPage:
- pywikibot.output(u"BUG>>> %s no longer exists?"
- % new[site])
+ pywikibot.output('BUG>>> {} no longer exists?'
+ .format(new[site]))
continue
mods, mcomment, adding, removing, modifying \
= compareLanguages(old, new, lclSite,
@@ -1713,20 +1719,21 @@
if self.conf.localonly:
# In this case only continue on the Page we started with
if page != self.originPage:
- raise SaveError(u'-localonly and page != originPage')
+ raise SaveError('-localonly and page != originPage')
if page.section():
# This is not a page, but a subpage. Do not edit it.
- pywikibot.output('Not editing %s: not doing interwiki on subpages'
- % page)
- raise SaveError(u'Link has a #section')
+ pywikibot.output('Not editing {}: not doing interwiki on subpages'
+ .format(page))
+ raise SaveError('Link has a #section')
try:
pagetext = page.get()
except pywikibot.NoPage:
- pywikibot.output(u"Not editing %s: page does not exist" % page)
- raise SaveError(u'Page doesn\'t exist')
+ pywikibot.output('Not editing {}: page does not exist'
+ .format(page))
+ raise SaveError("Page doesn't exist")
if page_empty_check(page):
- pywikibot.output(u"Not editing %s: page is empty" % page)
- raise SaveError(u'Page is empty.')
+ pywikibot.output('Not editing {}: page is empty'.format(page))
+ raise SaveError('Page is empty.')
# clone original newPages dictionary, so that we can modify it to the
# local page's needs
@@ -1741,7 +1748,7 @@
(ignorepage.site != page.site):
if (ignorepage not in interwikis):
pywikibot.output(
- u"Ignoring link to %(to)s for %(from)s"
+ 'Ignoring link to %(to)s for %(from)s'
% {'to': ignorepage,
'from': page})
new.pop(ignorepage.site)
@@ -1762,13 +1769,13 @@
# site.
pltmp = new[page.site]
if pltmp != page:
- s = u"None"
+ s = 'None'
if pltmp is not None:
s = pltmp
pywikibot.output(
- u"BUG>>> %s is not in the list of new links! Found %s."
- % (page, s))
- raise SaveError(u'BUG: sanity check failed')
+ 'BUG>>> {} is not in the list of new links! Found {}.'
+ .format(page, s))
+ raise SaveError('BUG: sanity check failed')
# Avoid adding an iw link back to itself
del new[page.site]
@@ -1808,9 +1815,8 @@
):
new[rmsite] = rmPage
pywikibot.warning(
- '%s is either deleted or has a mismatching '
- 'disambiguation state.'
- % rmPage)
+ '{} is either deleted or has a mismatching '
+ 'disambiguation state.'.format(rmPage))
# Re-Check what needs to get done
mods, mcomment, adding, removing, modifying = compareLanguages(
old,
@@ -1825,7 +1831,7 @@
# Show a message in purple.
pywikibot.output(color_format(
'{lightpurple}Updating links on page {0}.{default}', page))
- pywikibot.output(u"Changes to be made: %s" % mods)
+ pywikibot.output('Changes to be made: {}'.format(mods))
oldtext = page.get()
template = (page.namespace() == 10)
newtext = textlib.replaceLanguageLinks(oldtext, new,
@@ -1836,12 +1842,12 @@
if not botMayEdit(page):
if template:
pywikibot.output(
- u'SKIPPING: %s should have interwiki links on subpage.'
- % page)
+ 'SKIPPING: {} should have interwiki links on subpage.'
+ .format(page))
else:
pywikibot.output(
- u'SKIPPING: %s is under construction or to be deleted.'
- % page)
+ 'SKIPPING: {} is under construction or to be deleted.'
+ .format(page))
return False
if newtext == oldtext:
return False
@@ -1853,8 +1859,8 @@
# Allow for special case of a self-pointing interwiki link
if removing and removing != [page.site]:
- self.problem(u'Found incorrect link to %s in %s'
- % (", ".join([x.code for x in removing]), page),
+ self.problem('Found incorrect link to {} in {}'
+ .format(', '.join([x.code for x in removing]), page),
createneed=False)
ask = True
if self.conf.force or self.conf.cleanup:
@@ -1867,7 +1873,7 @@
# If we cannot ask, deny permission
answer = 'n'
else:
- answer = pywikibot.input_choice(u'Submit?',
+ answer = pywikibot.input_choice('Submit?',
[('Yes', 'y'), ('No', 'n'),
('open in Browser', 'b'),
('Give up', 'g'),
@@ -1897,46 +1903,48 @@
pywikibot.exception()
return False
except pywikibot.LockedPage:
- pywikibot.output(u'Page %s is locked. Skipping.' % page)
- raise SaveError(u'Locked')
+ pywikibot.output('Page {} is locked. Skipping.'
+ .format(page))
+ raise SaveError('Locked')
except pywikibot.EditConflict:
pywikibot.output(
'ERROR putting page: An edit conflict occurred. '
'Giving up.')
- raise SaveError(u'Edit conflict')
+ raise SaveError('Edit conflict')
except (pywikibot.SpamfilterError) as error:
pywikibot.output(
'ERROR putting page: {0} blacklisted by spamfilter. '
'Giving up.'.format(error.url))
- raise SaveError(u'Spam filter')
+ raise SaveError('Spam filter')
except (pywikibot.PageNotSaved) as error:
- pywikibot.output(u'ERROR putting page: %s' % (error.args,))
- raise SaveError(u'PageNotSaved')
+ pywikibot.output('ERROR putting page: {}'
+ .format(error.args,))
+ raise SaveError('PageNotSaved')
except (socket.error, IOError) as error:
if timeout > 3600:
raise
- pywikibot.output(u'ERROR putting page: %s' % (error.args,))
- pywikibot.output('Sleeping %i seconds before trying again.'
- % (timeout,))
+ pywikibot.output('ERROR putting page: {}'
+ .format(error.args,))
+ pywikibot.output('Sleeping {} seconds before trying again.'
+ .format(timeout,))
timeout *= 2
pywikibot.sleep(timeout)
except pywikibot.ServerError:
if timeout > 3600:
raise
- pywikibot.output(u'ERROR putting page: ServerError.')
- pywikibot.output('Sleeping %i seconds before trying again.'
- % (timeout,))
+ pywikibot.output('ERROR putting page: ServerError.')
+ pywikibot.output('Sleeping {} seconds before trying again.'
+ .format(timeout,))
timeout *= 2
pywikibot.sleep(timeout)
else:
break
return True
elif answer == 'g':
- raise GiveUpOnPage(u'User asked us to give up')
+ raise GiveUpOnPage('User asked us to give up')
else:
- raise LinkMustBeRemoved(u'Found incorrect link to %s in %s'
- % (", ".join([x.code for x in removing]),
- page))
+ raise LinkMustBeRemoved('Found incorrect link to {} in {}'.format(
+ ', '.join([x.code for x in removing]), page))
def reportBacklinks(self, new, updatedSites):
"""
@@ -1958,7 +1966,7 @@
for l in page.iterlanglinks()}
except pywikibot.NoPage:
pywikibot.warning(
- 'Page %s does no longer exist?!' % page)
+ 'Page {} does no longer exist?!'.format(page))
break
# To speed things up, create a dictionary which maps sites
# to pages. This assumes that there is only one interwiki
@@ -1971,15 +1979,15 @@
try:
linkedPage = linkedPagesDict[expectedPage.site]
pywikibot.warning(
- '%s: %s does not link to %s but to %s'
- % (page.site.family.name,
- page, expectedPage, linkedPage))
+ '{}: {} does not link to {} but to {}'
+ .format(page.site.family.name,
+ page, expectedPage, linkedPage))
except KeyError:
if not expectedPage.site.is_data_repository():
pywikibot.warning(
- '%s: %s does not link to %s'
- % (page.site.family.name,
- page, expectedPage))
+ '{}: {} does not link to {}'
+ .format(page.site.family.name,
+ page, expectedPage))
# Check for superfluous links
for linkedPage in linkedPages:
if linkedPage not in expectedPages:
@@ -1988,11 +1996,11 @@
# In this case, it was already reported above.
if linkedPage.site not in expectedSites:
pywikibot.warning(
- '%s: %s links to incorrect %s'
- % (page.site.family.name,
- page, linkedPage))
+ '{}: {} links to incorrect {}'
+ .format(page.site.family.name,
+ page, linkedPage))
except (socket.error, IOError):
- pywikibot.output(u'ERROR: could not report backlinks')
+ pywikibot.output('ERROR: could not report backlinks')
class InterwikiBot(object):
@@ -2041,7 +2049,7 @@
dumpfn = pywikibot.config.datafilepath(
'data',
'interwiki-dumps',
- '%s-%s.pickle' % (site.family.name, site.code)
+ '{0}-{1}.pickle'.format(site.family.name, site.code)
)
if append:
mode = 'appended'
@@ -2172,8 +2180,8 @@
# Could not extract allpages special page?
pywikibot.output(
'ERROR: could not retrieve more pages. '
- 'Will try again in %d seconds'
- % timeout)
+ 'Will try again in {} seconds'
+ .format(timeout))
pywikibot.sleep(timeout)
timeout *= 2
else:
@@ -2199,7 +2207,7 @@
# First find the best language to work on
site = self.selectQuerySite()
if site is None:
- pywikibot.output(u"NOTE: Nothing left to do")
+ pywikibot.output('NOTE: Nothing left to do')
return False
# Now assemble a reasonable list of pages to get
subjectGroup = []
@@ -2215,7 +2223,7 @@
# We have found enough pages to fill the bandwidth.
break
if len(pageGroup) == 0:
- pywikibot.output(u"NOTE: Nothing left to do 2")
+ pywikibot.output('NOTE: Nothing left to do 2')
return False
# Get the content of the assembled list in one blow
gen = site.preloadpages(pageGroup, templates=True, langlinks=True,
@@ -2283,7 +2291,7 @@
# Use short format, just the language code
fmt = lambda d, site: site.code # noqa: E731
- mods = mcomment = u''
+ mods = mcomment = ''
commentname = 'interwiki'
if adding:
@@ -2305,11 +2313,11 @@
changes = {'adding': comma.join(fmt(new, x) for x in adding),
'removing': comma.join(fmt(old, x) for x in removing),
'modifying': comma.join(fmt(new, x) for x in modifying),
- 'from': u'' if not useFrom else old[modifying[0]]}
+ 'from': '' if not useFrom else old[modifying[0]]}
en_changes = {'adding': ', '.join(fmt(new, x) for x in adding),
'removing': ', '.join(fmt(old, x) for x in removing),
'modifying': ', '.join(fmt(new, x) for x in modifying),
- 'from': u'' if not useFrom else old[modifying[0]]}
+ 'from': '' if not useFrom else old[modifying[0]]}
mcomment += i18n.twtranslate(insite, commentname, changes)
mods = i18n.twtranslate('en', commentname, en_changes)
@@ -2349,8 +2357,8 @@
# titletranslate.py expects a list of strings, so we convert it back.
# TODO: This is a quite ugly hack, in the future we should maybe make
# titletranslate expect a list of pagelinks.
- hintStrings = ['%s:%s' % (hintedPage.site.lang,
- hintedPage.title())
+ hintStrings = ['{}:{}'.format(hintedPage.site.lang,
+ hintedPage.title())
for hintedPage in pagelist]
bot.add(page, hints=hintStrings)
@@ -2483,7 +2491,7 @@
if isinstance(ns, unicode) or isinstance(ns, str):
index = site.namespaces.lookup_name(ns)
if index is None:
- raise ValueError(u'Unknown namespace: %s' % ns)
+ raise ValueError('Unknown namespace: ' + ns)
ns = index.id
namespaces = []
else:
@@ -2495,7 +2503,7 @@
dumpFileName = pywikibot.config.datafilepath(
'data',
'interwiki-dumps',
- u'%s-%s.pickle' % (site.family.name, site.code)
+ '{0}-{1}.pickle'.format(site.family.name, site.code)
)
try:
with open(dumpFileName, 'rb') as f:
@@ -2513,7 +2521,7 @@
else:
pywikibot.output(
'Dump file is empty?! Starting at the beginning.')
- nextPage = "!"
+ nextPage = '!'
namespace = 0
gen2 = site.allpages(start=nextPage,
namespace=namespace,
@@ -2536,7 +2544,7 @@
readWarnfile(warnfile, bot)
else:
if not singlePageTitle and not opthintsonly:
- singlePageTitle = pywikibot.input(u'Which page to check:')
+ singlePageTitle = pywikibot.input('Which page to check:')
if singlePageTitle:
singlePage = pywikibot.Page(pywikibot.Site(), singlePageTitle)
else:
@@ -2568,5 +2576,5 @@
pass
-if __name__ == "__main__":
+if __name__ == '__main__':
main()
--
To view, visit https://gerrit.wikimedia.org/r/462527
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: Ie3caa13d11b92a89704d9310249cb0bb2850024d
Gerrit-Change-Number: 462527
Gerrit-PatchSet: 7
Gerrit-Owner: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/462549 )
Change subject: [cleanup] cleanup scripts/imagerecat.py
......................................................................
[cleanup] cleanup scripts/imagerecat.py
- use str.format(...) instead of modulo for type specifier
arguments
- remove preleading "u" from strings
Change-Id: I104c3254e601994df7730bd3298a908bd28adbd8
---
M scripts/imagerecat.py
1 file changed, 6 insertions(+), 6 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/imagerecat.py b/scripts/imagerecat.py
index d6996f9..8af4a9a 100755
--- a/scripts/imagerecat.py
+++ b/scripts/imagerecat.py
@@ -253,8 +253,8 @@
pywikibot.output('Dropping {}, {}'
.format(addresspart.tag, addresspart.text))
else:
- pywikibot.warning('%s, %s is not in addressparts lists'
- % (addresspart.tag, addresspart.text))
+ pywikibot.warning('{}, {} is not in addressparts lists'
+ .format(addresspart.tag, addresspart.text))
return result
@@ -324,7 +324,7 @@
"""Filter out disambiguation categories."""
result = []
for cat in categories:
- if (not pywikibot.Page(pywikibot.Site(u'commons', u'commons'),
+ if (not pywikibot.Page(pywikibot.Site('commons', 'commons'),
cat, ns=14).isDisambig()):
result.append(cat)
return result
@@ -334,7 +334,7 @@
"""If a category is a redirect, replace the category with the target."""
result = []
for cat in categories:
- categoryPage = pywikibot.Page(pywikibot.Site(u'commons', u'commons'),
+ categoryPage = pywikibot.Page(pywikibot.Site('commons', 'commons'),
cat, ns=14)
if categoryPage.isCategoryRedirect():
result.append(
@@ -358,7 +358,7 @@
listByCountry = []
listCountries = []
for cat in categories:
- if cat.endswith(u'by country'):
+ if cat.endswith('by country'):
listByCountry.append(cat)
# If cat contains 'by country' add it to the list
@@ -370,7 +370,7 @@
if len(listByCountry) > 0:
for bc in listByCountry:
category = pywikibot.Category(
- pywikibot.Site(u'commons', u'commons'), u'Category:' + bc)
+ pywikibot.Site('commons', 'commons'), 'Category:' + bc)
for subcategory in category.subcategories():
for country in listCountries:
if subcategory.title(with_ns=False).endswith(country):
--
To view, visit https://gerrit.wikimedia.org/r/462549
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I104c3254e601994df7730bd3298a908bd28adbd8
Gerrit-Change-Number: 462549
Gerrit-PatchSet: 1
Gerrit-Owner: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot (75)