jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/462790 )
Change subject: [cleanup] cleanup scripts/checkimages.py
......................................................................
[cleanup] cleanup scripts/checkimages.py
- use str.format(...) instead of modulo for type specifier
arguments.
Change-Id: I27e1e12aceae7269d604e5e91030708932421d6d
---
M scripts/checkimages.py
1 file changed, 44 insertions(+), 44 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/checkimages.py b/scripts/checkimages.py
index e71be3a..43333ee 100755
--- a/scripts/checkimages.py
+++ b/scripts/checkimages.py
@@ -635,7 +635,7 @@
except Exception:
pywikibot.exception()
pywikibot.output(
- 'Another error... skipping the user..')
+ 'Another error... skipping the user...')
def uploadBotChangeFunction(self, reportPageText, upBotArray):
"""Detect the user that has uploaded the file through upload bot."""
@@ -679,8 +679,8 @@
nick = reportPageObject.latest_file_info.user
except pywikibot.PageRelatedError:
pywikibot.output(
- 'Seems that %s has only the description and not the file...'
- % self.image_to_report)
+ 'Seems that {} has only the description and not the file...'
+ .format(self.image_to_report))
repme = self.list_entry + "problems '''with the APIs'''"
self.report_image(self.image_to_report, self.rep_page, self.com,
repme)
@@ -716,8 +716,8 @@
history = self.talk_page.getLatestEditors(limit=10)
latest_user = history[0]['user']
pywikibot.output(
- 'The latest user that has written something is: %s'
- % latest_user)
+ 'The latest user that has written something is: '
+ + latest_user)
if latest_user in self.bots:
second_text = True
# A block to prevent the second message if the bot also
@@ -769,8 +769,8 @@
except (pywikibot.NoPage, pywikibot.IsRedirectPage):
return
if self.sendemailActive:
- text_to_send = re.sub(r'__user-nickname__', r'%s'
- % self.luser, emailText)
+ text_to_send = re.sub(r'__user-nickname__', r'{}'
+ .format(self.luser), emailText)
emailClass = pywikibot.User(self.site, self.luser)
try:
emailClass.send_email(emailSubj, text_to_send)
@@ -780,7 +780,7 @@
def regexGenerator(self, regexp, textrun):
"""Find page to yield using regex to parse text."""
- regex = re.compile(r'%s' % regexp, re.UNICODE | re.DOTALL)
+ regex = re.compile(r'{}'.format(regexp), re.UNICODE | re.DOTALL)
results = regex.findall(textrun)
for image in results:
yield pywikibot.FilePage(self.site, image)
@@ -831,8 +831,8 @@
def checkImageOnCommons(self):
"""Checking if the file is on commons."""
- pywikibot.output('Checking if [[%s]] is on commons...'
- % self.imageName)
+ pywikibot.output('Checking if [[{}]] is on commons...'
+ .format(self.imageName))
try:
hash_found = self.image.latest_file_info.sha1
except pywikibot.NoPage:
@@ -849,8 +849,8 @@
if pywikibot.Page(self.site,
template) in templatesInTheImage:
pywikibot.output(
- "%s is on commons but it's a service image."
- % self.imageName)
+ "{} is on commons but it's a service image."
+ .format(self.imageName))
return True # continue with the check-part
pywikibot.output(self.imageName + ' is on commons!')
@@ -865,8 +865,8 @@
if re.findall(r'\bstemma\b', self.imageName.lower()) and \
self.site.code == 'it':
pywikibot.output(
- '%s has "stemma" inside, means that it\'s ok.'
- % self.imageName)
+ "{} has 'stemma' inside, means that it's ok."
+ .format(self.imageName))
return True
# It's not only on commons but the image needs a check
@@ -944,8 +944,8 @@
if not (re.findall(dupRegex, DupPageText) or
re.findall(dupRegex, older_page_text)):
pywikibot.output(
- '%s is a duplicate and has to be tagged...'
- % dup_page)
+ '{} is a duplicate and has to be tagged...'
+ .format(dup_page))
images_to_tag_list.append(dup_page.title())
string += '* {0}\n'.format(
dup_page.title(as_link=True, textlink=True))
@@ -1029,8 +1029,8 @@
):
# the image itself, not report also this as duplicate
continue
- repme += '\n** [[:%s%s]]' % (self.image_namespace,
- dup_page.title(as_url=True))
+ repme += '\n** [[:{}{}]]'.format(
+ self.image_namespace, dup_page.title(as_url=True))
result = self.report_image(self.imageName, self.rep_page,
self.com, repme, addings=False)
@@ -1071,8 +1071,8 @@
reported = True
# Skip if the message is already there.
if short_text in text_get:
- pywikibot.output('%s is already in the report page.'
- % image_to_report)
+ pywikibot.output('{} is already in the report page.'
+ .format(image_to_report))
reported = False
elif len(text_get) >= self.logFulNumber:
if self.logFullError:
@@ -1246,7 +1246,7 @@
if not self.licenses_found and licenses_TEST:
raise pywikibot.Error(
"Invalid or broken templates found in the image's "
- 'page %s!' % self.image)
+ 'page {}!'.format(self.image))
self.allLicenses = []
if not self.list_licenses:
@@ -1289,8 +1289,8 @@
pywikibot.output('File already fixed. Skipping.')
else:
pywikibot.output(
- "The file's description for %s contains %s..."
- % (self.imageName, self.name_used))
+ "The file's description for {} contains {}..."
+ .format(self.imageName, self.name_used))
if self.mex_used.lower() == 'default':
self.mex_used = self.unvertext
if self.imagestatus_used:
@@ -1312,8 +1312,8 @@
'<nowiki>%s</nowiki>') %
(self.imageName, self.license_found))
printWithTimeZone(
- '%s seems to have a fake license: %s, reporting...'
- % (self.imageName, self.license_found))
+ '{} seems to have a fake license: {}, reporting...'
+ .format(self.imageName, self.license_found))
self.report_image(self.imageName,
rep_text=rep_text_license_fake,
addings=False)
@@ -1369,8 +1369,8 @@
Let the users to fix the image's problem alone in the first x seconds.
"""
printWithTimeZone(
- 'Skipping the files uploaded less than %s seconds ago..'
- % wait_time)
+ 'Skipping the files uploaded less than {} seconds ago..'
+ .format(wait_time))
for page in generator:
image = pywikibot.FilePage(page)
try:
@@ -1383,10 +1383,10 @@
yield image
else:
pywikibot.warning(
- 'Skipping %s, uploaded %d %s ago..'
- % ((image.title(), delta.days, 'days')
- if delta.days > 0
- else (image.title(), delta.seconds, 'seconds')))
+ 'Skipping {}, uploaded {} {} ago..'
+ .format(image.title(), delta.days, 'days')
+ if delta.days > 0
+ else (image.title(), delta.seconds, 'seconds'))
def isTagged(self):
"""Understand if a file is already tagged or not."""
@@ -1430,7 +1430,7 @@
mexCatched = tupla[8]
for k in find_list:
if find_tipe.lower() == 'findonly':
- searchResults = re.findall(r'%s' % k.lower(),
+ searchResults = re.findall(r'{}'.format(k.lower()),
self.imageCheckText.lower())
if searchResults:
if searchResults[0] == self.imageCheckText.lower():
@@ -1443,7 +1443,7 @@
self.mex_used = mexCatched
break
elif find_tipe.lower() == 'find':
- if re.findall(r'%s' % k.lower(),
+ if re.findall(r'{}'.format(k.lower()),
self.imageCheckText.lower()):
self.some_problem = True
self.text_used = text
@@ -1491,20 +1491,20 @@
try:
self.imageCheckText = self.image.get()
except pywikibot.NoPage:
- pywikibot.output('Skipping %s because it has been deleted.'
- % self.imageName)
+ pywikibot.output('Skipping {} because it has been deleted.'
+ .format(self.imageName))
return
except pywikibot.IsRedirectPage:
- pywikibot.output("Skipping %s because it's a redirect."
- % self.imageName)
+ pywikibot.output("Skipping {} because it's a redirect."
+ .format(self.imageName))
return
# Delete the fields where the templates cannot be loaded
regex_nowiki = re.compile(r'<nowiki>(.*?)</nowiki>', re.DOTALL)
regex_pre = re.compile(r'<pre>(.*?)</pre>', re.DOTALL)
self.imageCheckText = regex_nowiki.sub('', self.imageCheckText)
self.imageCheckText = regex_pre.sub('', self.imageCheckText)
- # Deleting the useless template from the description (before adding sth
- # in the image the original text will be reloaded, don't worry).
+ # Deleting the useless template from the description (before adding
+ # sth in the image the original text will be reloaded, don't worry).
if self.isTagged():
printWithTimeZone('{} is already tagged...'.format(self.imageName))
return
@@ -1551,8 +1551,8 @@
smwl)
return
else:
- pywikibot.output('%s has only text and not the specific license...'
- % self.imageName)
+ pywikibot.output('{} has only text and not the specific '
+ 'license...'.format(self.imageName))
if hiddenTemplateFound and HiddenTN:
notification = HiddenTN % self.imageName
elif nn:
@@ -1692,7 +1692,7 @@
elif len(arg) > 4:
catName = str(arg[5:])
catSelected = pywikibot.Category(pywikibot.Site(),
- 'Category:%s' % catName)
+ 'Category:{}'.format(catName))
generator = catSelected.articles(namespaces=[6])
repeat = False
elif arg.startswith('-ref'):
@@ -1723,8 +1723,8 @@
if limit == 1:
pywikibot.output('Retrieving the latest file for checking...')
else:
- pywikibot.output('Retrieving the latest %d files for checking...'
- % limit)
+ pywikibot.output('Retrieving the latest {} files for checking...'
+ .format(limit))
while True:
# Defing the Main Class.
Bot = checkImagesBot(site, sendemailActive=sendemailActive,
--
To view, visit https://gerrit.wikimedia.org/r/462790
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I27e1e12aceae7269d604e5e91030708932421d6d
Gerrit-Change-Number: 462790
Gerrit-PatchSet: 3
Gerrit-Owner: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/462891 )
Change subject: [cleanup] cleanup scripts/category.py
......................................................................
[cleanup] cleanup scripts/category.py
- use str.format(...) instead of modulo for type specifier
arguments.
- use "+" to concatenate strings in some cases
- remove preleading "u" from strings
Change-Id: I34a808cd60a7e966991bbfbde3f2a67f55cbff67
---
M scripts/category.py
1 file changed, 19 insertions(+), 19 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/category.py b/scripts/category.py
index 6443dca..6e379bb 100755
--- a/scripts/category.py
+++ b/scripts/category.py
@@ -204,30 +204,30 @@
return redir_target
if self.create:
redir_target.text = ''
- pywikibot.output('Redirect target %s does not exist '
- 'yet; creating.'
- % redir_target.title(as_link=True))
+ pywikibot.output('Redirect target {} does not exist '
+ 'yet; creating.'.format(
+ redir_target.title(as_link=True)))
return redir_target
if self.edit_redirects:
return page
- pywikibot.warning('Redirect target %s can not '
- 'be modified; skipping.'
- % redir_target.title(as_link=True))
+ pywikibot.warning('Redirect target {} can not '
+ 'be modified; skipping.'.format(
+ redir_target.title(as_link=True)))
return None
if self.edit_redirects:
return page
- pywikibot.warning('Page %s is a redirect to %s; skipping.'
- % (page.title(as_link=True),
- redir_target.title(as_link=True)))
+ pywikibot.warning('Page {} is a redirect to {}; skipping.'
+ .format(page.title(as_link=True),
+ redir_target.title(as_link=True)))
return None
return page
if self.create:
page.text = ''
- pywikibot.output('Page %s does not exist yet; creating.'
- % page.title(as_link=True))
+ pywikibot.output('Page {} does not exist yet; creating.'
+ .format(page.title(as_link=True)))
return page
- pywikibot.warning('Page %s does not exist; skipping.'
- % page.title(as_link=True))
+ pywikibot.warning('Page {} does not exist; skipping.'
+ .format(page.title(as_link=True)))
return None
def determine_template_target(self, page):
@@ -300,8 +300,8 @@
if not self.is_loaded:
try:
if config.verbose_output:
- pywikibot.output('Reading dump from %s'
- % config.shortpath(self.filename))
+ pywikibot.output('Reading dump from '
+ + config.shortpath(self.filename))
with open_archive(self.filename, 'rb') as f:
databases = pickle.load(f)
# keys are categories, values are 2-tuples with lists as
@@ -778,8 +778,8 @@
Do not use this function from outside the class.
"""
# Some preparing
- pywikibot.output('Moving text from %s to %s.' % (
- self.oldcat.title(), self.newcat.title()))
+ pywikibot.output('Moving text from {} to {}.'.format(
+ self.oldcat.title(), self.newcat.title()))
comma = self.site.mediawiki_message('comma-separator')
authors = comma.join(self.oldcat.contributingUsers())
template_vars = {'oldcat': self.oldcat.title(), 'authors': authors}
@@ -1032,7 +1032,7 @@
def output_range(self, start, end):
"""Output a section and categories from the text."""
- pywikibot.output(self.text[start:end] + '…')
+ pywikibot.output(self.text[start:end] + '...')
# if categories weren't visible, show them additionally
if len(self.text) > end:
@@ -1315,7 +1315,7 @@
cat = pywikibot.Category(self.site, self.catTitle)
pywikibot.output('Generating tree...', newline=False)
tree = self.treeview(cat)
- pywikibot.output(u'')
+ pywikibot.output('')
if self.filename:
pywikibot.output('Saving results in ' + self.filename)
with codecs.open(self.filename, 'a', 'utf-8') as f:
--
To view, visit https://gerrit.wikimedia.org/r/462891
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I34a808cd60a7e966991bbfbde3f2a67f55cbff67
Gerrit-Change-Number: 462891
Gerrit-PatchSet: 1
Gerrit-Owner: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/462525 )
Change subject: [simplify] Introduce new verbose_output method
......................................................................
[simplify] Introduce new verbose_output method
Use a new verbose_output method instead of several
"if pywikibot.config.verbose_output" statements
detached from I4ef9009c91e
Change-Id: I939ba830222a1ea09c4d3503cc0dc38b51d44a07
---
M scripts/patrol.py
1 file changed, 34 insertions(+), 43 deletions(-)
Approvals:
Dalba: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/patrol.py b/scripts/patrol.py
index c48d50f..139d293 100755
--- a/scripts/patrol.py
+++ b/scripts/patrol.py
@@ -66,6 +66,12 @@
_logger = 'patrol'
+def verbose_output(string):
+ """Verbose output."""
+ if pywikibot.config.verbose_output:
+ pywikibot.output(string)
+
+
class PatrolBot(SingleSiteBot):
"""Bot marks the edits as patrolled based on info obtained by whitelist."""
@@ -126,8 +132,7 @@
# Check for a more recent version after versionchecktime in sec.
if (self.whitelist_load_ts and (time.time() - self.whitelist_load_ts <
self.getOption('versionchecktime'))):
- if pywikibot.config.verbose_output:
- pywikibot.output('Whitelist not stale yet')
+ verbose_output('Whitelist not stale yet')
return
whitelist_page = pywikibot.Page(self.site,
@@ -145,8 +150,7 @@
# As there hasn't been any change to the whitelist
# it has been effectively reloaded 'now'
self.whitelist_load_ts = time.time()
- if pywikibot.config.verbose_output:
- pywikibot.output('Whitelist not modified')
+ verbose_output('Whitelist not modified')
return
if self.whitelist:
@@ -167,8 +171,7 @@
def in_list(self, pagelist, title):
"""Check if title present in pagelist."""
- if pywikibot.config.verbose_output:
- pywikibot.output('Checking whitelist for: ' + title)
+ verbose_output('Checking whitelist for: ' + title)
# quick check for exact match
if title in pagelist:
@@ -176,25 +179,21 @@
# quick check for wildcard
if '' in pagelist:
- if pywikibot.config.verbose_output:
- pywikibot.output('wildcarded')
+ verbose_output('wildcarded')
return '.*'
for item in pagelist:
- if pywikibot.config.verbose_output:
- pywikibot.output('checking against whitelist item = ' + item)
+ verbose_output('checking against whitelist item = ' + item)
if isinstance(item, LinkedPagesRule):
- if pywikibot.config.verbose_output:
- pywikibot.output('invoking programmed rule')
+ verbose_output('invoking programmed rule')
if item.match(title):
return item
elif title_match(item, title):
return item
- if pywikibot.config.verbose_output:
- pywikibot.output('not found')
+ verbose_output('not found')
def parse_page_tuples(self, wikitext, user=None):
"""Parse page details apart from 'user:' for use."""
@@ -224,14 +223,12 @@
name, sep, prefix = obj.title.partition('/')
if name.lower() in self._prefixindex_aliases:
if not prefix:
- if pywikibot.config.verbose_output:
- pywikibot.output('Whitelist everything')
+ verbose_output('Whitelist everything')
page = ''
else:
page = prefix
- if pywikibot.config.verbose_output:
- pywikibot.output('Whitelist prefixindex hack '
- 'for: ' + page)
+ verbose_output('Whitelist prefixindex hack for: '
+ + page)
# p = pywikibot.Page(self.site, obj.target[20:])
# obj.namespace = p.namespace
# obj.target = p.title()
@@ -241,8 +238,7 @@
# 'user:'
# the user will be the target of subsequent rules
current_user = obj.title
- if pywikibot.config.verbose_output:
- pywikibot.output('Whitelist user: ' + current_user)
+ verbose_output('Whitelist user: ' + current_user)
continue
else:
page = obj.canonical_title()
@@ -250,19 +246,17 @@
if current_user:
if not user or current_user == user:
if self.is_wikisource_author_page(page):
- if pywikibot.config.verbose_output:
- pywikibot.output('Whitelist author: ' + page)
+ verbose_output('Whitelist author: ' + page)
page = LinkedPagesRule(page)
else:
- if pywikibot.config.verbose_output:
- pywikibot.output('Whitelist page: ' + page)
- if pywikibot.config.verbose_output:
- pywikibot.output('Adding {0}:{1}'
- .format(current_user, page))
+ verbose_output('Whitelist page: ' + page)
+ verbose_output('Adding {0}:{1}'
+ .format(current_user, page))
whitelist[current_user].append(page)
- elif pywikibot.config.verbose_output:
- pywikibot.output('Discarding whitelist page for '
- 'another user: ' + page)
+ else:
+ verbose_output(
+ 'Discarding whitelist page for another user: '
+ + page)
else:
raise Exception('No user set for page ' + page)
@@ -283,9 +277,8 @@
pywikibot.debug('Author ns: {0}; name: {1}'
.format(author_ns, author_ns_prefix), _logger)
if title.find(author_ns_prefix + ':') == 0:
- if pywikibot.config.verbose_output:
- author_page_name = title[len(author_ns_prefix) + 1:]
- pywikibot.output('Found author ' + author_page_name)
+ author_page_name = title[len(author_ns_prefix) + 1:]
+ verbose_output('Found author ' + author_page_name)
return True
def run(self, feed=None):
@@ -322,24 +315,22 @@
self.load_whitelist()
self.repeat_start_ts = time.time()
- if pywikibot.config.verbose_output or self.getOption('ask'):
- pywikibot.output('User {0} has created or modified page {1}'
- .format(username, title))
+ if self.getOption('ask'):
+ verbose_output('User {0} has created or modified page {1}'
+ .format(username, title))
if self.getOption('autopatroluserns') and (page['ns'] == 2 or
page['ns'] == 3):
# simple rule to whitelist any user editing their own userspace
if title.partition(':')[2].split('/')[0].startswith(username):
- if pywikibot.config.verbose_output:
- pywikibot.output('{0} is whitelisted to modify {1}'
- .format(username, title))
+ verbose_output('{0} is whitelisted to modify {1}'
+ .format(username, title))
choice = True
if not choice and username in self.whitelist:
if self.in_list(self.whitelist[username], title):
- if pywikibot.config.verbose_output:
- pywikibot.output('{0} is whitelisted to modify {1}'
- .format(username, title))
+ verbose_output('{0} is whitelisted to modify {1}'
+ .format(username, title))
choice = True
if self.getOption('ask'):
--
To view, visit https://gerrit.wikimedia.org/r/462525
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I939ba830222a1ea09c4d3503cc0dc38b51d44a07
Gerrit-Change-Number: 462525
Gerrit-PatchSet: 3
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/463089 )
Change subject: [cleanup] cleanup tests/[user_tests.py to wikibase_edit_tests.py]
......................................................................
[cleanup] cleanup tests/[user_tests.py to wikibase_edit_tests.py]
- use str.format(...) instead of modulo for type specifier
arguments.
- use single quotes for string literals
- remove preleading "u" fron strings
- indentation to make sure code lines are less than 79
characters.
Change-Id: Ibfe2f35a877e4074ce67619a0c4ce7a868e53985
---
M tests/utils.py
M tests/weblib_tests.py
M tests/weblinkchecker_tests.py
M tests/wikibase_edit_tests.py
4 files changed, 36 insertions(+), 28 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/tests/utils.py b/tests/utils.py
index 84ce7bf..e9b7cf9 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -255,10 +255,10 @@
"""
Context manager to assert certain APIError exceptions.
- This is build similar to the L{unittest.TestCase.assertError} implementation
- which creates an context manager. It then calls L{handle} which either
- returns this manager if no executing object given or calls the callable
- object.
+ This is build similar to the L{unittest.TestCase.assertError}
+ implementation which creates an context manager. It then calls L{handle}
+ which either returns this manager if no executing object given or calls
+ the callable object.
"""
def __init__(self, code, info, msg, test_case):
@@ -392,8 +392,8 @@
def submit(self):
"""Prevented method."""
- raise Exception(u'DryRequest rejecting request: %r'
- % self._params)
+ raise Exception('DryRequest rejecting request: {!r}'
+ .format(self._params))
class DrySite(pywikibot.site.APISite):
@@ -558,9 +558,9 @@
This patches the C{http} import in the given module to a class simulating
C{request} and C{fetch}. It has a C{data} attribute which is either a
- static value which the requests will return or it's a callable returning the
- data. If it's a callable it'll be called with the same parameters as the
- original function in the L{http} module. For fine grained control it's
+ static value which the requests will return or it's a callable returning
+ the data. If it's a callable it'll be called with the same parameters as
+ the original function in the L{http} module. For fine grained control it's
possible to override/monkey patch the C{before_request} and C{before_fetch}
methods. By default they just return C{data} directory or call it if it's
callable.
@@ -680,14 +680,15 @@
not isinstance(v, str)]
if unicode_env:
raise TypeError(
- '%s: unicode in os.environ: %r' % (e, unicode_env))
+ '{}: unicode in os.environ: {!r}'.format(e, unicode_env))
child_unicode_env = [(k, v) for k, v in env.items()
if not isinstance(k, str) or
not isinstance(v, str)]
if child_unicode_env:
raise TypeError(
- '%s: unicode in child env: %r' % (e, child_unicode_env))
+ '{}: unicode in child env: {!r}'
+ .format(e, child_unicode_env))
raise
if data_in is not None:
@@ -718,7 +719,8 @@
data_out = p.communicate()
return {'exit_code': p.returncode,
'stdout': data_out[0].decode(config.console_encoding),
- 'stderr': (stderr_lines + data_out[1]).decode(config.console_encoding)}
+ 'stderr': (stderr_lines + data_out[1])
+ .decode(config.console_encoding)}
def execute_pwb(args, data_in=None, timeout=0, error=None, overrides=None):
@@ -735,10 +737,10 @@
if overrides:
command.append('-c')
overrides = '; '.join(
- '%s = %s' % (key, value) for key, value in overrides.items())
+ '{} = {}'.format(key, value) for key, value in overrides.items())
command.append(
- 'import pwb; import pywikibot; %s; pwb.main()'
- % overrides)
+ 'import pwb; import pywikibot; {}; pwb.main()'
+ .format(overrides))
else:
command.append(_pwb_py)
diff --git a/tests/weblib_tests.py b/tests/weblib_tests.py
index a16e441..98d657b 100644
--- a/tests/weblib_tests.py
+++ b/tests/weblib_tests.py
@@ -50,17 +50,20 @@
"""Test Internet Archive for newest https://google.com."""
archivedversion = self._get_archive_url('https://google.com')
parsed = urlparse(archivedversion)
- self.assertIn(parsed.scheme, [u'http', u'https'])
- self.assertEqual(parsed.netloc, u'web.archive.org')
- self.assertTrue(parsed.path.strip('/').endswith('google.com'), parsed.path)
+ self.assertIn(parsed.scheme, ['http', 'https'])
+ self.assertEqual(parsed.netloc, 'web.archive.org')
+ self.assertTrue(parsed.path.strip('/').endswith('google.com'),
+ parsed.path)
def testInternetArchiveOlder(self):
"""Test Internet Archive for https://google.com as of June 2006."""
- archivedversion = self._get_archive_url('https://google.com', '20060601')
+ archivedversion = self._get_archive_url('https://google.com',
+ '20060601')
parsed = urlparse(archivedversion)
- self.assertIn(parsed.scheme, [u'http', u'https'])
- self.assertEqual(parsed.netloc, u'web.archive.org')
- self.assertTrue(parsed.path.strip('/').endswith('google.com'), parsed.path)
+ self.assertIn(parsed.scheme, ['http', 'https'])
+ self.assertEqual(parsed.netloc, 'web.archive.org')
+ self.assertTrue(parsed.path.strip('/').endswith('google.com'),
+ parsed.path)
self.assertIn('200606', parsed.path)
@@ -82,8 +85,10 @@
@unittest.expectedFailure # See T110640
def testWebCiteOlder(self):
"""Test WebCite for https://google.com as of January 2013."""
- archivedversion = self._get_archive_url('https://google.com', '20130101')
- self.assertEqual(archivedversion, 'http://www.webcitation.org/6DHSeh2L0')
+ archivedversion = self._get_archive_url('https://google.com',
+ '20130101')
+ self.assertEqual(archivedversion,
+ 'http://www.webcitation.org/6DHSeh2L0')
if __name__ == '__main__': # pragma: no cover
diff --git a/tests/weblinkchecker_tests.py b/tests/weblinkchecker_tests.py
index 8052d63..4608723 100644
--- a/tests/weblinkchecker_tests.py
+++ b/tests/weblinkchecker_tests.py
@@ -42,7 +42,8 @@
self.skipTest(e)
-class WeblibTestMementoInternetArchive(MementoTestCase, weblib_tests.TestInternetArchive):
+class WeblibTestMementoInternetArchive(MementoTestCase,
+ weblib_tests.TestInternetArchive):
"""Test InternetArchive Memento using old weblib tests."""
diff --git a/tests/wikibase_edit_tests.py b/tests/wikibase_edit_tests.py
index 3f1cf59..497cfa1 100644
--- a/tests/wikibase_edit_tests.py
+++ b/tests/wikibase_edit_tests.py
@@ -402,7 +402,7 @@
# Remove qualifier
claim = item.claims['P115'][0]
- qual_3 = claim.qualifiers[u'P580'][0]
+ qual_3 = claim.qualifiers['P580'][0]
claim.removeQualifier(qual_3)
# Check P580 qualifier removed but P88 qualifier remains
@@ -422,8 +422,8 @@
# Remove qualifiers
item.get(force=True)
claim = item.claims['P115'][0]
- qual_3 = claim.qualifiers[u'P580'][0]
- qual_4 = claim.qualifiers[u'P88'][0]
+ qual_3 = claim.qualifiers['P580'][0]
+ qual_4 = claim.qualifiers['P88'][0]
claim.removeQualifiers([qual_3, qual_4])
# Check P580 and P88 qualifiers are removed
--
To view, visit https://gerrit.wikimedia.org/r/463089
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: Ibfe2f35a877e4074ce67619a0c4ce7a868e53985
Gerrit-Change-Number: 463089
Gerrit-PatchSet: 3
Gerrit-Owner: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/462913 )
Change subject: [cleanup] cleanup scripts/maintenance/compat2core.py
......................................................................
[cleanup] cleanup scripts/maintenance/compat2core.py
- remove preleading "u" from strings
- use single quotes for string literals
- use str.format(...) instead of modulo for substituting
type specifier arguments with placeholders
Change-Id: Ie30db8720ed37861e42b76a4e6b2ddfda8ee5ca4
---
M scripts/maintenance/compat2core.py
1 file changed, 7 insertions(+), 6 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/maintenance/compat2core.py b/scripts/maintenance/compat2core.py
index cfc807d..00afe9d 100755
--- a/scripts/maintenance/compat2core.py
+++ b/scripts/maintenance/compat2core.py
@@ -56,7 +56,7 @@
('import catlib\r?\n', ''),
('import userlib\r?\n', ''),
# change wikipedia to pywikibot, exclude URLs
- (r'(?<!\.)wikipedia\.', u'pywikibot.'),
+ (r'(?<!\.)wikipedia\.', 'pywikibot.'),
# site instance call
(r'pywikibot\.getSite\s*\(\s*', 'pywikibot.Site('),
# lang is different from code. We should use code in core
@@ -152,21 +152,22 @@
'(no input to leave):')
if not self.source:
exit()
- if not self.source.endswith(u'.py'):
+ if not self.source.endswith('.py'):
self.source += '.py'
if os.path.exists(self.source):
break
self.source = os.path.join('scripts', self.source)
if os.path.exists(self.source):
break
- pywikibot.output(u'%s does not exist. Please retry.' % self.source)
+ pywikibot.output('{} does not exist. Please retry.'
+ .format(self.source))
self.source = None
def get_dest(self):
"""Ask for destination script name."""
- self.dest = u'%s-core.%s' % tuple(self.source.rsplit(u'.', 1))
+ self.dest = '%s-core.%s' % tuple(self.source.rsplit('.', 1))
if not self.warnonly and not pywikibot.input_yn(
- u'Destination file is %s.' % self.dest,
+ 'Destination file is {}.'.format(self.dest),
default=True, automatic_quit=False):
pywikibot.output('Quitting...')
exit()
@@ -209,6 +210,6 @@
bot.run()
-if __name__ == "__main__":
+if __name__ == '__main__':
pywikibot.stopme() # we do not work on any site
main()
--
To view, visit https://gerrit.wikimedia.org/r/462913
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: Ie30db8720ed37861e42b76a4e6b2ddfda8ee5ca4
Gerrit-Change-Number: 462913
Gerrit-PatchSet: 1
Gerrit-Owner: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/462764 )
Change subject: [cleanup] cleanup scripts/commonscat.py
......................................................................
[cleanup] cleanup scripts/commonscat.py
- use str.format(...) instead of modulo for type specifier
arguments.
- use single quotes for string literals
- use "+" for concatenating strings in some cases
Change-Id: I303434ee78d2b0c4b04d95eabf43338d78973364
---
M scripts/commonscat.py
1 file changed, 23 insertions(+), 22 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/commonscat.py b/scripts/commonscat.py
index 1ad4a1f..71a95cf 100755
--- a/scripts/commonscat.py
+++ b/scripts/commonscat.py
@@ -241,17 +241,17 @@
def treat(self, page):
"""Load the given page, do some changes, and save it."""
if not page.exists():
- pywikibot.output('Page %s does not exist. Skipping.'
- % page.title(as_link=True))
+ pywikibot.output('Page {} does not exist. Skipping.'
+ .format(page.title(as_link=True)))
elif page.isRedirectPage():
- pywikibot.output('Page %s is a redirect. Skipping.'
- % page.title(as_link=True))
+ pywikibot.output('Page {} is a redirect. Skipping.'
+ .format(page.title(as_link=True)))
elif page.isCategoryRedirect():
- pywikibot.output('Page %s is a category redirect. Skipping.'
- % page.title(as_link=True))
+ pywikibot.output('Page {} is a category redirect. Skipping.'
+ .format(page.title(as_link=True)))
elif page.isDisambig():
- pywikibot.output('Page %s is a disambiguation. Skipping.'
- % page.title(as_link=True))
+ pywikibot.output('Page {} is a disambiguation. Skipping.'
+ .format(page.title(as_link=True)))
else:
self.addCommonscat(page)
@@ -265,7 +265,7 @@
if code in commonscatTemplates:
return commonscatTemplates[code]
else:
- return commonscatTemplates[u'_default']
+ return commonscatTemplates['_default']
def skipPage(self, page):
"""Determine if the page should be skipped."""
@@ -299,16 +299,17 @@
page.site.code)
commonscatLink = self.getCommonscatLink(page)
if commonscatLink:
- pywikibot.output('Commonscat template is already on %s'
- % page.title())
+ pywikibot.output('Commonscat template is already on '
+ + page.title())
(currentCommonscatTemplate,
currentCommonscatTarget, LinkText, Note) = commonscatLink
checkedCommonscatTarget = self.checkCommonscatLink(
currentCommonscatTarget)
if (currentCommonscatTarget == checkedCommonscatTarget):
# The current commonscat link is good
- pywikibot.output('Commonscat link at %s to Category:%s is ok'
- % (page.title(), currentCommonscatTarget))
+ pywikibot.output('Commonscat link at {} to Category:{} is ok'
+ .format(page.title(),
+ currentCommonscatTarget))
return True
elif checkedCommonscatTarget != '':
# We have a new Commonscat link, replace the old one
@@ -327,8 +328,8 @@
# TODO: if the commonsLink == '', should it be removed?
elif self.skipPage(page):
- pywikibot.output('Found a template in the skip list. Skipping %s'
- % page.title())
+ pywikibot.output('Found a template in the skip list. Skipping '
+ + page.title())
else:
commonscatLink = self.findCommonscatLink(page)
if (commonscatLink != ''):
@@ -404,9 +405,9 @@
possibleCommonscat)
if (checkedCommonscat != ''):
pywikibot.output(
- 'Found link for %s at [[%s:%s]] to %s.'
- % (page.title(), ipage.site.code,
- ipage.title(), checkedCommonscat))
+ 'Found link for {} at [[{}:{}]] to {}.'
+ .format(page.title(), ipage.site.code, ipage.title(),
+ checkedCommonscat))
return checkedCommonscat
except pywikibot.BadTitle:
# The interwiki was incorrect
@@ -442,7 +443,7 @@
commonscatLinktext, commonscatNote)
return None
- def checkCommonscatLink(self, name=""):
+ def checkCommonscatLink(self, name=''):
"""Return the name of a valid commons category.
If the page is a redirect this function tries to follow it.
@@ -477,9 +478,9 @@
return self.checkCommonscatLink(m.group('newcat2'))
else:
pywikibot.output(
- "getCommonscat: %s deleted by %s. Couldn't find "
- 'move target in "%s"'
- % (commonsPage, loguser, logcomment))
+ "getCommonscat: {} deleted by {}. Couldn't find "
+ 'move target in "{}"'
+ .format(commonsPage, loguser, logcomment))
return ''
return ''
elif commonsPage.isRedirectPage():
--
To view, visit https://gerrit.wikimedia.org/r/462764
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I303434ee78d2b0c4b04d95eabf43338d78973364
Gerrit-Change-Number: 462764
Gerrit-PatchSet: 2
Gerrit-Owner: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot (75)