jenkins-bot has submitted this change and it was merged.
Change subject: Fix indents and enable E122 and E127
......................................................................
Fix indents and enable E122 and E127
Change-Id: I064d67bd7f85fc0c47e07ae5d30f61dbbd3be92b
---
M pywikibot/cosmetic_changes.py
M scripts/checkimages.py
M scripts/imagecopy_self.py
M scripts/match_images.py
M scripts/nowcommons.py
M scripts/reflinks.py
M scripts/script_wui.py
M scripts/solve_disambiguation.py
M tests/site_tests.py
M tox.ini
10 files changed, 35 insertions(+), 40 deletions(-)
Approvals:
John Vandenberg: Looks good to me, but someone else must approve
XZise: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/cosmetic_changes.py b/pywikibot/cosmetic_changes.py
index fffbeff..169359f 100755
--- a/pywikibot/cosmetic_changes.py
+++ b/pywikibot/cosmetic_changes.py
@@ -220,9 +220,11 @@
self.cleanUpSectionHeaders,
self.putSpacesInLists,
self.translateAndCapitalizeNamespaces,
-# FIXME: self.translateMagicWords,
+ # FIXME: fix bugs and re-enable
+ # self.translateMagicWords,
self.replaceDeprecatedTemplates,
-# FIXME: self.resolveHtmlEntities,
+ # FIXME: fix bugs and re-enable
+ # self.resolveHtmlEntities,
self.removeUselessSpaces,
self.removeNonBreakingSpaceBeforePercent,
diff --git a/scripts/checkimages.py b/scripts/checkimages.py
index c618a45..4ab4405 100755
--- a/scripts/checkimages.py
+++ b/scripts/checkimages.py
@@ -616,10 +616,8 @@
hiddentemplatesRaw = i18n.translate(self.site, HiddenTemplate)
self.hiddentemplates = set([pywikibot.Page(self.site, tmp)
for tmp in hiddentemplatesRaw])
- self.pageHidden = i18n.translate(self.site,
- PageWithHiddenTemplates)
- self.pageAllowed = i18n.translate(self.site,
- PageWithAllowedTemplates)
+ self.pageHidden = i18n.translate(self.site, PageWithHiddenTemplates)
+ self.pageAllowed = i18n.translate(self.site, PageWithAllowedTemplates)
self.comment = i18n.translate(self.site, msg_comm, fallback=True)
# Adding the bot's nickname at the notification text if needed.
self.bots = i18n.translate(self.site, bot_list)
@@ -967,10 +965,8 @@
dupRegex = i18n.translate(self.site, duplicatesRegex)
dupTalkHead = i18n.translate(self.site, duplicate_user_talk_head)
dupTalkText = i18n.translate(self.site, duplicates_user_talk_text)
- dupComment_talk = i18n.translate(self.site,
- duplicates_comment_talk)
- dupComment_image = i18n.translate(self.site,
- duplicates_comment_image)
+ dupComment_talk = i18n.translate(self.site, duplicates_comment_talk)
+ dupComment_image = i18n.translate(self.site, duplicates_comment_image)
imagePage = pywikibot.FilePage(self.site, self.imageName)
hash_found = imagePage.latest_file_info.sha1
duplicates = list(self.site.allimages(sha1=hash_found))
diff --git a/scripts/imagecopy_self.py b/scripts/imagecopy_self.py
index 73f458f..7cc3741 100644
--- a/scripts/imagecopy_self.py
+++ b/scripts/imagecopy_self.py
@@ -509,7 +509,7 @@
source = u'{{Own}}'
return source.strip() + u'<BR />Transferred from
[http://%(lang)s.%(family)s.org %(lang)s.%(family)s]' \
- % {u'lang': lang, u'family': family}
+ % {u'lang': lang, u'family': family}
def getAuthorText(self, imagepage):
"""Get the original uploader to put in the author field of the new
information template."""
@@ -887,13 +887,13 @@
timestamp, u'%Y-%m-%dT%H:%M:%SZ').strftime('%Y-%m-%d
%H:%M')
result += (u'* %(date)s [[:%(lang)s:user:%(username)s|%(username)s]]
%(resolution)s' +
u' (%(size)s bytes)
\'\'<nowiki>%(comment)s</nowiki>\'\'\n' % {
- u'lang': lang,
- u'family': family,
- u'date': date,
- u'username': username,
- u'resolution': resolution,
- u'size': size,
- u'comment': comment})
+ u'lang': lang,
+ u'family': family,
+ u'date': date,
+ u'username': username,
+ u'resolution': resolution,
+ u'size': size,
+ u'comment': comment})
return result
diff --git a/scripts/match_images.py b/scripts/match_images.py
index 80f8251..b855f94 100755
--- a/scripts/match_images.py
+++ b/scripts/match_images.py
@@ -96,8 +96,9 @@
'Center of image {5:>7.2%}\n'
' -------\n'
'Average {6:>7.2%}'.format(
- wholeScore, topleftScore, toprightScore, bottomleftScore,
- bottomrightScore, centerScore, averageScore))
+ wholeScore, topleftScore, toprightScore,
+ bottomleftScore, bottomrightScore, centerScore,
+ averageScore))
# Hard coded at 80%, change this later on.
if averageScore > 0.8:
diff --git a/scripts/nowcommons.py b/scripts/nowcommons.py
index 433dd9c..2b79247 100755
--- a/scripts/nowcommons.py
+++ b/scripts/nowcommons.py
@@ -346,13 +346,14 @@
pywikibot.output(u'NowCommons template not found.')
continue
commonsImagePage = pywikibot.FilePage(commons, 'Image:%s'
- % filenameOnCommons)
- if localImagePage.title(withNamespace=False) == \
- commonsImagePage.title(withNamespace=False) and
self.getOption('use_hash'):
+ % filenameOnCommons)
+ if (localImagePage.title(withNamespace=False) ==
+ commonsImagePage.title(withNamespace=False) and
+ self.getOption('use_hash')):
pywikibot.output(
u'The local and the commons images have the same name')
- if localImagePage.title(withNamespace=False) != \
- commonsImagePage.title(withNamespace=False):
+ if (localImagePage.title(withNamespace=False) !=
+ commonsImagePage.title(withNamespace=False)):
usingPages = list(localImagePage.usingPages())
if usingPages and usingPages != [localImagePage]:
pywikibot.output(color_format(
@@ -458,10 +459,7 @@
options = {}
for arg in pywikibot.handle_args(args):
- if arg.startswith('-') and \
- arg[1:] in ('always', 'replace', 'replaceloose',
'replaceonly'):
- options[arg[1:]] = True
- elif arg == '-replacealways':
+ if arg == '-replacealways':
options['replace'] = True
options['replacealways'] = True
elif arg == '-hash':
@@ -470,6 +468,10 @@
pywikibot.warning(u"The '-autonomous' argument is
DEPRECATED,"
u" use '-always' instead.")
options['always'] = True
+ elif arg.startswith('-'):
+ if arg[1:] in ('always', 'replace', 'replaceloose',
'replaceonly'):
+ options[arg[1:]] = True
+
bot = NowCommonsDeleteBot(**options)
bot.run()
diff --git a/scripts/reflinks.py b/scripts/reflinks.py
index 328b904..d7ad373 100755
--- a/scripts/reflinks.py
+++ b/scripts/reflinks.py
@@ -496,7 +496,7 @@
new_text = page.get()
if not page.canBeEdited():
pywikibot.output(u"You can't edit page %s"
- % page.title(asLink=True))
+ % page.title(asLink=True))
continue
except pywikibot.NoPage:
pywikibot.output(u'Page %s not found' % page.title(asLink=True))
diff --git a/scripts/script_wui.py b/scripts/script_wui.py
index a4a20b8..1e13f6c 100755
--- a/scripts/script_wui.py
+++ b/scripts/script_wui.py
@@ -45,7 +45,6 @@
# [ shell (rev-id) -> output mit shell rev-id ]
# [ shell rev-id (als eindeutige job/task-config bzw. script) -> crontab ]
# @todo Bei jeder Botbearbeitung wird der Name des Auftraggebers vermerkt
-# @todo (may be queue_security needed later in order to allow other
'super-users' too...)
# --- --- --- --- --- --- --- --- --- --- --- --- --- --- ---
# Writing code in Wikipedia:
#
@@ -108,11 +107,6 @@
'ConfCSSoutput': u'User:DrTrigonBot/Simulation',
'CRONMaxDelay': 5 * 60.0, # check all ~5 minutes
-# 'queue_security': ([u'DrTrigon', u'DrTrigonBot'],
u'Bot: exec'),
-# 'queue_security': ([u'DrTrigon'], u'Bot: exec'),
-
- # supported and allowed bot scripts
- # (at the moment all)
# forbidden parameters
# (at the moment none, but consider e.g. '-always' or allow it with
'-simulate' only!)
diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py
index c7460b0..0753773 100755
--- a/scripts/solve_disambiguation.py
+++ b/scripts/solve_disambiguation.py
@@ -938,7 +938,7 @@
for l in links]
except pywikibot.NoPage:
pywikibot.output(
-u"Page does not exist, using the first link in page %s."
+ 'Page does not exist; using first link in page %s.'
% disambPage.title())
links = disambPage.linkedPages()[:1]
links = [correctcap(l, disambPage.get())
diff --git a/tests/site_tests.py b/tests/site_tests.py
index 73ff338..b0b51f8 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -484,8 +484,8 @@
print('FAILURE wrt T92856:')
print(u'Sym. difference: "{0}"'.format(
u'", "'.join(
- u'{0}(a){1}'.format(link.namespace()espace(),
link.title(withNamespace=False))
- for link in namespace_links ^ links)))
+ u'{0}(a){1}'.format(link.namespace()espace(),
link.title(withNamespace=False))
+ for link in namespace_links ^ links)))
self.assertCountEqual(
set(mysite.pagelinks(mainpage, namespaces=[0, 1])) - links, [])
for target in mysite.preloadpages(mysite.pagelinks(mainpage,
diff --git a/tox.ini b/tox.ini
index 834d66d..a46ddf2 100644
--- a/tox.ini
+++ b/tox.ini
@@ -5,7 +5,7 @@
[params]
nose_skip = --ignore-files=(gui\.py|botirc\.py|rcstream\.py)
-flake8_ignore = D102,D103,E122,E127,{[flake8]ignore}
+flake8_ignore = D102,D103,{[flake8]ignore}
[testenv]
setenv =
--
To view, visit
https://gerrit.wikimedia.org/r/186341
To unsubscribe, visit
https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I064d67bd7f85fc0c47e07ae5d30f61dbbd3be92b
Gerrit-PatchSet: 25
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Ricordisamoa <ricordisamoa(a)openmailbox.org>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>