jenkins-bot has submitted this change and it was merged.
Change subject: [IMPROV] Reduce line length to 100 characters
......................................................................
[IMPROV] Reduce line length to 100 characters
This reduces the line length of most files to at most 100 characters. Some
changes are missing as there might be a better solution and some might be
changed in other patches more appropriate.
Change-Id: Iabdf520d6fb058fb3ca3d233f9221d62947c52a6
---
M docs/conf.py
M pywikibot/bot.py
M pywikibot/config2.py
M pywikibot/cosmetic_changes.py
M pywikibot/data/api.py
M pywikibot/date.py
M pywikibot/diff.py
M pywikibot/fixes.py
M pywikibot/logentries.py
M pywikibot/site.py
M pywikibot/textlib.py
M pywikibot/userinterfaces/terminal_interface_win32.py
M pywikibot/userinterfaces/win32_unicode.py
M scripts/blockreview.py
M scripts/cfd.py
M scripts/checkimages.py
M scripts/fixing_redirects.py
M scripts/flickrripper.py
M scripts/harvest_template.py
M scripts/illustrate_wikidata.py
M scripts/imagerecat.py
M scripts/imageuncat.py
M scripts/interwiki.py
M scripts/isbn.py
M scripts/login.py
M scripts/makecat.py
M scripts/noreferences.py
M scripts/nowcommons.py
M scripts/reflinks.py
M scripts/replace.py
M scripts/revertbot.py
M scripts/script_wui.py
M scripts/transferbot.py
M scripts/weblinkchecker.py
M scripts/welcome.py
M tests/data_ingestion_tests.py
M tests/date_tests.py
M tests/deprecation_tests.py
M tests/dry_api_tests.py
M tests/http_tests.py
M tests/i18n_tests.py
M tests/pagegenerators_tests.py
M tests/script_tests.py
M tests/site_tests.py
M tests/textlib_tests.py
M tests/timestripper_tests.py
M tests/tools_ip_tests.py
M tests/ui_tests.py
M tests/uploadbot_tests.py
M tests/wikidataquery_tests.py
50 files changed, 366 insertions(+), 215 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/docs/conf.py b/docs/conf.py
index 2e59f19..6df7ecf 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -27,7 +27,11 @@
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['sphinx.ext.autodoc', 'sphinx_epytext',
'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
+extensions = ['sphinx.ext.autodoc',
+ 'sphinx_epytext',
+ 'sphinx.ext.todo',
+ 'sphinx.ext.coverage',
+ 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
diff --git a/pywikibot/bot.py b/pywikibot/bot.py
index 2f85289..82df306 100644
--- a/pywikibot/bot.py
+++ b/pywikibot/bot.py
@@ -1971,8 +1971,8 @@
self.source_values = json.loads(page.get())
for family_code, family in self.source_values.items():
for source_lang in family:
- self.source_values[family_code][source_lang] =
pywikibot.ItemPage(self.repo,
-
family[source_lang])
+ self.source_values[family_code][source_lang] = pywikibot.ItemPage(
+ self.repo, family[source_lang])
def get_property_by_name(self, property_name):
"""
@@ -2044,7 +2044,8 @@
@return: Claim
"""
- if site.family.name in self.source_values and site.code in
self.source_values[site.family.name]:
+ if (site.family.name in self.source_values and
+ site.code in self.source_values[site.family.name]):
source = pywikibot.Claim(self.repo, 'P143')
source.setTarget(self.source_values.get(site.family.name).get(site.code))
return source
diff --git a/pywikibot/config2.py b/pywikibot/config2.py
index bd541a6..c3fc3b9 100644
--- a/pywikibot/config2.py
+++ b/pywikibot/config2.py
@@ -125,7 +125,8 @@
# User agent format.
# For the meaning and more help in customization see:
#
https://www.mediawiki.org/wiki/Manual:Pywikibot/User-agent
-user_agent_format = '{script_product} ({script_comments}) {pwb} ({revision})
{http_backend} {python}'
+user_agent_format = ('{script_product} ({script_comments}) {pwb} ({revision}) '
+ '{http_backend} {python}')
# The default interface for communicating with the site
# currently the only defined interface is 'APISite', so don't change this!
diff --git a/pywikibot/cosmetic_changes.py b/pywikibot/cosmetic_changes.py
index c85548c..864397e 100755
--- a/pywikibot/cosmetic_changes.py
+++ b/pywikibot/cosmetic_changes.py
@@ -909,7 +909,8 @@
# frequent field values to {{int:}} versions
text = textlib.replaceExcept(
text,
- r'([\r\n]\|[Ss]ource *\= *)(?:[Oo]wn work by uploader|[Oo]wn
work|[Ee]igene [Aa]rbeit) *([\r\n])',
+ r'([\r\n]\|[Ss]ource *\= *)'
+ r'(?:[Oo]wn work by uploader|[Oo]wn work|[Ee]igene [Aa]rbeit)
*([\r\n])',
r'\1{{own}}\2', exceptions, True)
text = textlib.replaceExcept(
text,
@@ -934,7 +935,8 @@
r'\1== {{int:filedesc}} ==', exceptions, True)
text = textlib.replaceExcept(
text,
- r'([\r\n]|^)\=\= *{{int:license-header}} *\=\=(?:[\r\n ]*)\=\=
*{{int:license-header}} *\=\=',
+ r'([\r\n]|^)\=\= *{{int:license-header}} *\=\=(?:[\r\n ]*)'
+ r'\=\= *{{int:license-header}} *\=\=',
r'\1== {{int:license-header}} ==', exceptions, True)
return text
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index 88701de..0d94f79 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -1746,7 +1746,8 @@
def __repr__(self):
"""Return internal representation."""
- return "%s.%s<%s->%r>" % (self.__class__.__module__,
self.__class__.__name__, self.site, str(self))
+ return '%s.%s<%s->%r>' % (self.__class__.__module__,
self.__class__.__name__,
+ self.site, str(self))
def _simulate(self, action):
"""Simulate action."""
diff --git a/pywikibot/date.py b/pywikibot/date.py
index 163b720..bfdd693 100644
--- a/pywikibot/date.py
+++ b/pywikibot/date.py
@@ -2270,9 +2270,11 @@
'MonthName': (lambda v: 1 <= v and v < 13, 1, 13),
'Number': (lambda v: 0 <= v and v < 1000000, 0, 1001),
'YearAD': (lambda v: 0 <= v and v < 2501, 0, 2501),
- 'YearBC': (lambda v: 0 <= v and v < 4001, 0, 501), # zh:
has years as old as 前1700年
+ # zh: has years as old as 前1700年
+ 'YearBC': (lambda v: 0 <= v and v < 4001, 0, 501),
'DecadeAD': (lambda v: 0 <= v and v < 2501, 0, 2501),
- 'DecadeBC': (lambda v: 0 <= v and v < 4001, 0, 501), # zh:
has decades as old as 前1700年代
+ # zh: has decades as old as 前1700年代
+ 'DecadeBC': (lambda v: 0 <= v and v < 4001, 0, 501),
# Some centuries use Roman numerals or a given list
# do not exceed them in testing
diff --git a/pywikibot/diff.py b/pywikibot/diff.py
index 7bde222..9a5ec48 100644
--- a/pywikibot/diff.py
+++ b/pywikibot/diff.py
@@ -578,7 +578,8 @@
comparands = {'deleted-context': [], 'added-context': []}
soup = BeautifulSoup(compare_string)
- for change_type, css_class in (('deleted-context',
'diff-deletedline'), ('added-context', 'diff-addedline')):
+ for change_type, css_class in (('deleted-context',
'diff-deletedline'),
+ ('added-context', 'diff-addedline')):
crutons = soup.find_all('td', class_=css_class)
for cruton in crutons:
cruton_string = ''.join(cruton.strings)
diff --git a/pywikibot/fixes.py b/pywikibot/fixes.py
index 1eb1d91..11e9577 100644
--- a/pywikibot/fixes.py
+++ b/pywikibot/fixes.py
@@ -152,8 +152,10 @@
r'DOS/4GW', # Software
r'ntfs-3g', # Dateisystem-Treiber
r'/\w(,\w)*/', # Laut-Aufzählung in der Linguistik
- r'[xyz](,[xyz])+', # Variablen in der Mathematik (unklar, ob
Leerzeichen hier Pflicht sind)
- r'(?m)^;(.*?)$', # Definitionslisten, dort gibt es oft
absichtlich Leerzeichen vor Doppelpunkten
+ # Variablen in der Mathematik (unklar, ob Leerzeichen hier Pflicht sind)
+ r'[xyz](,[xyz])+',
+ # Definitionslisten, dort gibt es oft absichtlich Leerzeichen vor
Doppelpunkten
+ r'(?m)^;(.*?)$',
r'\d+h( | )\d+m',
# Schreibweise für Zeiten, vor allem in Film-Infoboxen.
# Nicht korrekt, aber dafür schön kurz.
@@ -191,7 +193,8 @@
# external link and description separated by a dash.
# ATTENTION: while this is a mistake in most cases, there are some
# valid URLs that contain dashes!
- (r'\[(?P<url>https?://[^\|\]\s]+?) *\|
*(?P<label>[^\|\]]+?)\]', r'[\g<url> \g<label>]'),
+ (r'\[(?P<url>https?://[^\|\]\s]+?) *\|
*(?P<label>[^\|\]]+?)\]',
+ r'[\g<url> \g<label>]'),
# wiki link closed by single bracket.
# ATTENTION: There are some false positives, for example
# Brainfuck code examples or MS-DOS parameter instructions.
@@ -243,7 +246,8 @@
# external link and description separated by a dash, with
# whitespace in front of the dash, so that it is clear that
# the dash is not a legitimate part of the URL.
- (r'\[(?P<url>https?://[^\|\] \r\n]+?) +\|
*(?P<label>[^\|\]]+?)\]', r'[\g<url> \g<label>]'),
+ (r'\[(?P<url>https?://[^\|\] \r\n]+?) +\|
*(?P<label>[^\|\]]+?)\]',
+ r'[\g<url> \g<label>]'),
# dash in external link, where the correct end of the URL can
# be detected from the file extension. It is very unlikely that
# this will cause mistakes.
@@ -270,9 +274,11 @@
'replacements': [
(r'\batlantische(r|n|) Ozean', r'Atlantische\1 Ozean'),
(r'\bdeutsche(r|n|) Bundestag\b', r'Deutsche\1 Bundestag'),
- (r'\bdeutschen Bundestags\b', r'Deutschen Bundestags'), #
Aufpassen, z. B. 'deutsche Bundestagswahl'
+ # Aufpassen, z. B. 'deutsche Bundestagswahl'
+ (r'\bdeutschen Bundestags\b', r'Deutschen Bundestags'),
(r'\bdeutsche(r|n|) Reich\b', r'Deutsche\1 Reich'),
- (r'\bdeutschen Reichs\b', r'Deutschen Reichs'), # Aufpassen,
z. B. 'deutsche Reichsgrenzen'
+ # Aufpassen, z. B. 'deutsche Reichsgrenzen'
+ (r'\bdeutschen Reichs\b', r'Deutschen Reichs'),
(r'\bdritte(n|) Welt(?!krieg)', r'Dritte\1 Welt'),
(r'\bdreißigjährige(r|n|) Krieg', r'Dreißigjährige\1
Krieg'),
(r'\beuropäische(n|) Gemeinschaft', r'Europäische\1
Gemeinschaft'),
@@ -350,7 +356,7 @@
# (u'†\[\[(\d)', u'† [[\\1'),
(r'\[\[(\d+\. (?:Januar|Februar|März|April|Mai|Juni|Juli|August|'
r'September|Oktober|November|Dezember)) (\d{1,4})\]\]', r'[[\1]]
[[\2]]'),
- # Keine führende Null beim Datum (ersteinmal nur bei denen, bei denen auch
ein Leerzeichen fehlt)
+ # Keine führende Null beim Datum (erst einmal nur bei fehlenden Leerzeichen)
(r'0(\d+)\.(Januar|Februar|März|April|Mai|Juni|Juli|August|'
r'September|Oktober|November|Dezember)', r'\1. \2'),
# Kein Leerzeichen zwischen Tag und Monat
diff --git a/pywikibot/logentries.py b/pywikibot/logentries.py
index 017f558..825f166 100644
--- a/pywikibot/logentries.py
+++ b/pywikibot/logentries.py
@@ -138,7 +138,6 @@
def __init__(self, apidata, site):
"""Constructor."""
super(BlockEntry, self).__init__(apidata, site)
- # see
en.wikipedia.org/w/api.php?action=query&list=logevents&letype=block…
# When an autoblock is removed, the "title" field is not a page title
# (
https://bugzilla.wikimedia.org/show_bug.cgi?id=17781 )
pos = self.data['title'].find('#')
diff --git a/pywikibot/site.py b/pywikibot/site.py
index fc00d47..772eed4 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -4335,10 +4335,12 @@
"noimageredirect-anon":
"Bot is not logged in, and anon users are not authorized to create
"
"image redirects on %(site)s wiki",
- "noimageredirect": "User %(user)s not authorized to create image
redirects on %(site)s wiki",
+ 'noimageredirect': 'User %(user)s not authorized to create image
'
+ 'redirects on %(site)s wiki',
"filtered": "%(info)s",
"contenttoobig": "%(info)s",
- "noedit-anon": "Bot is not logged in, and anon users are not
authorized to edit on %(site)s wiki",
+ 'noedit-anon': 'Bot is not logged in, and anon users are not '
+ 'authorized to edit on %(site)s wiki',
"noedit": "User %(user)s not authorized to edit pages on %(site)s
wiki",
"missingtitle": NoCreateError,
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index 9984f0f..1612436 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -1094,12 +1094,11 @@
if site is None:
site = pywikibot.Site()
if site.sitename() == 'wikipedia:de' and "{{Personendaten" in
oldtext:
- raise pywikibot.Error("""\
-The Pywikibot is no longer allowed to touch categories on the German
-Wikipedia on pages that contain the Personendaten template because of the
-non-standard placement of that template.
-See
https://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/1#Posit…
-""")
+ raise pywikibot.Error(
+ 'The Pywikibot is no longer allowed to touch categories on the '
+ 'German\nWikipedia on pages that contain the Personendaten '
+ 'template because of the\nnon-standard placement of that
template.\n'
+ 'See
https://de.wikipedia.org/wiki/Hilfe:Personendaten#Kopiervorlage')
separator = site.family.category_text_separator
iseparator = site.family.interwiki_text_separator
separatorstripped = separator.strip()
diff --git a/pywikibot/userinterfaces/terminal_interface_win32.py
b/pywikibot/userinterfaces/terminal_interface_win32.py
index a8ff977..afe4841 100755
--- a/pywikibot/userinterfaces/terminal_interface_win32.py
+++ b/pywikibot/userinterfaces/terminal_interface_win32.py
@@ -88,11 +88,13 @@
lastColor = colorStack[-1]
else:
lastColor = 'default'
- ctypes.windll.kernel32.SetConsoleTextAttribute(std_out_handle,
windowsColors[lastColor])
+ ctypes.windll.kernel32.SetConsoleTextAttribute(
+ std_out_handle, windowsColors[lastColor])
else:
colorStack.append(newColor)
# set the new color
- ctypes.windll.kernel32.SetConsoleTextAttribute(std_out_handle,
windowsColors[newColor])
+ ctypes.windll.kernel32.SetConsoleTextAttribute(
+ std_out_handle, windowsColors[newColor])
text = text[tagM.end():]
# print the rest of the text
if PY2:
diff --git a/pywikibot/userinterfaces/win32_unicode.py
b/pywikibot/userinterfaces/win32_unicode.py
index ebb83a0..4af10bc 100755
--- a/pywikibot/userinterfaces/win32_unicode.py
+++ b/pywikibot/userinterfaces/win32_unicode.py
@@ -78,7 +78,8 @@
GetFileType = WINFUNCTYPE(DWORD, DWORD)(("GetFileType",
windll.kernel32))
FILE_TYPE_CHAR = 0x0002
FILE_TYPE_REMOTE = 0x8000
- GetConsoleMode = WINFUNCTYPE(BOOL, HANDLE,
POINTER(DWORD))(("GetConsoleMode", windll.kernel32))
+ GetConsoleMode = (WINFUNCTYPE(BOOL, HANDLE, POINTER(DWORD))
+ (("GetConsoleMode", windll.kernel32)))
INVALID_HANDLE_VALUE = DWORD(-1).value
def not_a_console(handle):
@@ -251,7 +252,8 @@
# This works around <http://bugs.python.org/issue2128>.
GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW",
windll.kernel32))
- CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR,
POINTER(c_int))(("CommandLineToArgvW", windll.shell32))
+ CommandLineToArgvW = (WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))
+ (("CommandLineToArgvW", windll.shell32)))
argc = c_int(0)
argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc))
diff --git a/scripts/blockreview.py b/scripts/blockreview.py
index 47b40bf..ad3391b 100755
--- a/scripts/blockreview.py
+++ b/scripts/blockreview.py
@@ -56,7 +56,8 @@
}
msg_user = {
- 'de': u'Bot: Administrator [[Benutzer:%(admin)s|%(admin)s]] für
Sperrprüfung benachrichtigt',
+ 'de': 'Bot: Administrator [[Benutzer:%(admin)s|%(admin)s]] für '
+ 'Sperrprüfung benachrichtigt',
}
msg_done = {
diff --git a/scripts/cfd.py b/scripts/cfd.py
index 5a6eaca..af6edcf 100755
--- a/scripts/cfd.py
+++ b/scripts/cfd.py
@@ -43,11 +43,14 @@
emptymode = re.compile(r"^===*\s*Empty then delete\s*===*\s*$", re.IGNORECASE)
deletemode = re.compile(r"^===*\s*Ready for deletion\s*===*\s*$",
re.IGNORECASE)
maintenance = re.compile(r"^===*\s*Old by month categories with
entries\s*===*\s*$", re.IGNORECASE)
-dateheader = re.compile(r"(\[\[Wikipedia:Categories[_ ]for[_
](?:discussion|deletion)/Log/([^\]]*?)\]\])",
- re.IGNORECASE)
-movecat =
re.compile(r"\[\[:Category:([^\]]*?)\]\][^\]]*?\[\[:Category:([^\]]*?)\]\]",
re.IGNORECASE)
+dateheader = re.compile(
+ r'(\[\[Wikipedia:Categories[_ ]for[_
](?:discussion|deletion)/Log/([^\]]*?)\]\])',
+ re.IGNORECASE)
+movecat =
re.compile(r'\[\[:Category:([^\]]*?)\]\][^\]]*?\[\[:Category:([^\]]*?)\]\]',
+ re.IGNORECASE)
deletecat = re.compile(r"\[\[:Category:([^\]]*?)\]\]", re.IGNORECASE)
-findday = re.compile(r"\[\[(Wikipedia:Categories for
(?:discussion|deletion)/Log/\d{4} \w+ \d+)#", re.IGNORECASE)
+findday = re.compile(r'\[\[(Wikipedia:Categories for
(?:discussion|deletion)/Log/\d{4} \w+ \d+)#',
+ re.IGNORECASE)
class ReCheck:
@@ -144,7 +147,8 @@
# easier to call delete.py on it.
thisDay = findDay(src, day)
if (mode == "Empty" or mode == "Delete") and thisDay !=
"None":
- summary = "Robot - Removing category " + src + " per
[[WP:CFD|CFD]] at " + thisDay + "."
+ summary = 'Robot - Removing category {0} per [[WP:CFD|CFD]] at
{1}.'.format(
+ src, thisDay)
else:
continue
robot = category.CategoryMoveRobot(oldcat=src, batch=True, comment=summary,
@@ -191,7 +195,8 @@
elif paramName == 'year':
year = paramVal
if day and month and year:
- return "[[Wikipedia:Categories for discussion/Log/%s %s
%s]]" % (year, month, day)
+ return ('[[Wikipedia:Categories for discussion/Log/%s %s
%s]]'
+ % (year, month, day))
return oldDay
if __name__ == "__main__":
diff --git a/scripts/checkimages.py b/scripts/checkimages.py
index 627a1a7..4afe309 100755
--- a/scripts/checkimages.py
+++ b/scripts/checkimages.py
@@ -185,7 +185,8 @@
'fr': u'{{Bienvenue nouveau\n~~~~\n',
'ga': u'{{subst:Fáilte}} - ~~~~\n',
'hu': u'{{subst:Üdvözlet|~~~~}}\n',
- 'it': u'<!-- inizio template di benvenuto
-->\n{{subst:Benvebot}}\n~~~~\n<!-- fine template di benvenuto -->',
+ 'it': '<!-- inizio template di benvenuto
-->\n{{subst:Benvebot}}\n~~~~\n'
+ '<!-- fine template di benvenuto -->',
'ja': u'{{subst:Welcome/intro}}\n{{subst:welcome|--~~~~}}\n',
'ko': u'{{환영}}--~~~~\n',
'ta': u'{{welcome}}\n~~~~\n',
@@ -254,7 +255,8 @@
'hu': u'A [[:Kép:%s]] fájlnak rossz a kiterjesztése, kérlek ellenőrízd.
~~~~',
'it':
u'{{subst:Progetto:Coordinamento/Immagini/Bot/Messaggi/Ext|%s|__botnick__}}
--~~~~',
'ko': u'[[:그림:%s]]의 파일 형식이 잘못되었습니다. 확인 바랍니다.--~~~~',
- 'ta': u'[[:படிமம்:%s]] இனங்காணப்படாத கோப்பு நீட்சியை கொண்டுள்ளது தயவு
செய்து ஒரு முறை சரி பார்க்கவும் ~~~~',
+ 'ta': '[[:படிமம்:%s]] இனங்காணப்படாத கோப்பு நீட்சியை கொண்டுள்ளது தயவு
செய்து ஒ'
+ 'ரு முறை சரி பார்க்கவும் ~~~~',
'ur': u'ملف [[:File:%s]] کی توسیع شاید درست نہیں ہے، براہ کرم جانچ لیں۔
~~~~',
'zh': u'您好,你上傳的[[:File:%s]]無法被識別,請檢查您的檔案,謝謝。--~~~~',
}
@@ -308,7 +310,8 @@
'hu': u"{{subst:adjforrást|Kép:%s}} \n Ezt az üzenetet ~~~ automatikusan
"
u"helyezte el a vitalapodon, kérdéseddel fordulj a gazdájához, vagy
"
u"a [[WP:KF|Kocsmafalhoz]]. --~~~~",
- 'it': u"{{subst:Progetto:Coordinamento/Immagini/Bot/Messaggi/Senza
licenza|%s|__botnick__}} --~~~~",
+ 'it': '{{subst:Progetto:Coordinamento/Immagini/Bot/Messaggi/Senza
licenza|'
+ '%s|__botnick__}} --~~~~',
'ja': u"\n{{subst:Image copyright|File:%s}}--~~~~",
'ko': u'\n{{subst:User:Kwjbot IV/untagged|%s}} --~~~~',
'ta': u'\n{{subst:Di-no license-notice|படிமம்:%s}} ~~~~',
@@ -466,7 +469,8 @@
u"[[File:Human-help-browser.svg|18px|link=Commons:Help
desk|?]]"
u" '''[[Commons:Help desk|→]] [[Commons:Help
desk]]''' in any "
u"language you like to use.'' --__botnick__ ~~~~~"),
- 'it':
u"{{subst:Progetto:Coordinamento/Immagini/Bot/Messaggi/Template_insufficiente|%s|__botnick__}}
--~~~~",
+ 'it': '{{subst:Progetto:Coordinamento/Immagini/Bot/Messaggi/'
+ 'Template_insufficiente|%s|__botnick__}} --~~~~',
'ko': u"\n{{subst:User:Kwj2772/whitetemplates|%s}} --~~~~",
}
@@ -489,7 +493,8 @@
# Message to put in the talk
duplicates_user_talk_text = {
'commons': u'{{subst:User:Filnik/duplicates|File:%s|File:%s}}', #
FIXME: it doesn't exist
- 'it':
u"{{subst:Progetto:Coordinamento/Immagini/Bot/Messaggi/Duplicati|%s|%s|__botnick__}}
--~~~~",
+ 'it':
'{{subst:Progetto:Coordinamento/Immagini/Bot/Messaggi/Duplicati|'
+ '%s|%s|__botnick__}} --~~~~',
}
# Comment used by the bot while it reports the problem in the uploader's talk
@@ -549,7 +554,8 @@
# where to send the warning-msg
uploadBots = {
'commons': [['File Upload Bot (Magnus Manske)',
- r'\|[Ss]ource=Transferred from .*?; transferred to Commons by
\[\[User:(.*?)\]\]']],
+ r'\|[Ss]ource=Transferred from .*?; '
+ r'transferred to Commons by \[\[User:(.*?)\]\]']],
}
# Service images that don't have to be deleted and/or reported has a template
diff --git a/scripts/fixing_redirects.py b/scripts/fixing_redirects.py
index 78821f2..b06d264 100755
--- a/scripts/fixing_redirects.py
+++ b/scripts/fixing_redirects.py
@@ -129,7 +129,8 @@
mysite = pywikibot.Site()
if mysite.sitename() == 'wikipedia:nl':
pywikibot.output(
- u'\03{lightred}There is consensus on the Dutch Wikipedia that bots should
not be used to fix redirects.\03{default}')
+ '\03{lightred}There is consensus on the Dutch Wikipedia that '
+ 'bots should not be used to fix redirects.\03{default}')
sys.exit()
if featured:
diff --git a/scripts/flickrripper.py b/scripts/flickrripper.py
index 4db896d..cd79724 100755
--- a/scripts/flickrripper.py
+++ b/scripts/flickrripper.py
@@ -207,10 +207,11 @@
% (title, project, username)).exists():
i = 1
while True:
- if pywikibot.Page(site, u'File:%s - %s - %s (%d).jpg' % (title,
project, username, i)).exists():
+ name = '%s - %s - %s (%d).jpg' % (title, project, username, i)
+ if pywikibot.Page(site, 'File:' + name).exists():
i += 1
else:
- return u'%s - %s - %s (%d).jpg' % (title, project, username, i)
+ return name
else:
return u'%s - %s - %s.jpg' % (title, project, username)
@@ -259,15 +260,17 @@
description = description.replace(u'{{cc-by-2.0}}\n', u'')
description = description.replace(u'{{flickrreview}}\n', u'')
description = description.replace(
- u'{{copyvio|Flickr, licensed as "All Rights Reserved" which is
not a free license --~~~~}}\n',
- u'')
+ '{{copyvio|Flickr, licensed as "All Rights Reserved" which is
not '
+ 'a free license --~~~~}}\n',
+ '')
description = description.replace(u'=={{int:license}}==',
u'=={{int:license}}==\n' + override)
elif flickrreview:
if reviewer:
- description = description.replace(u'{{flickrreview}}',
- u'{{flickrreview|' + reviewer +
-
'|{{subst:CURRENTYEAR}}-{{subst:CURRENTMONTH}}-{{subst:CURRENTDAY2}}}}')
+ description = description.replace(
+ '{{flickrreview}}',
+ '{{flickrreview|' + reviewer +
+
'|{{subst:CURRENTYEAR}}-{{subst:CURRENTMONTH}}-{{subst:CURRENTDAY2}}}}')
if addCategory:
description = description.replace(u'{{subst:unc}}\n', u'')
description = description + u'\n[[Category:' + addCategory +
']]\n'
@@ -309,7 +312,8 @@
pywikibot.warning('Switching to autonomous mode.')
autonomous = True
elif not autonomous:
- pywikibot.warning('Switching to autonomous mode because GUI interface
cannot be used')
+ pywikibot.warning('Switching to autonomous mode because GUI '
+ 'interface cannot be used')
pywikibot.warning(_tk_error)
autonomous = True
if autonomous:
@@ -318,7 +322,7 @@
skip = False
# pywikibot.output(newPhotoDescription)
# if (pywikibot.Page(title=u'File:'+ filename,
site=pywikibot.Site()).exists()):
- # I should probably check if the hash is the same and if not upload it under a
different name
+ # TODO: Check if the hash is the same and if not upload it under a different
name
# pywikibot.output(u'File:' + filename + u' already exists!')
# else:
# Do the actual upload
@@ -431,7 +435,8 @@
flickr = flickrapi.FlickrAPI(config.flickr['api_key'],
config.flickr['api_secret'])
(token, frob) = flickr.get_token_part_one(perms='read')
if not token:
- # The user still hasn't authorised this app yet, get_token_part_one()
will have spawn a browser window
+ # The user still hasn't authorised this app yet, get_token_part_one()
+ # will have spawn a browser window
pywikibot.input("Press ENTER after you authorized this program")
flickr.get_token_part_two((token, frob))
else:
diff --git a/scripts/harvest_template.py b/scripts/harvest_template.py
index c131a23..79a52c7 100755
--- a/scripts/harvest_template.py
+++ b/scripts/harvest_template.py
@@ -71,8 +71,8 @@
if temp.isRedirectPage():
temp = temp.getRedirectTarget()
titles = [page.title(withNamespace=False)
- for page
- in temp.getReferences(redirectsOnly=True, namespaces=[10],
follow_redirects=False)]
+ for page in temp.getReferences(redirectsOnly=True, namespaces=[10],
+ follow_redirects=False)]
titles.append(temp.title(withNamespace=False))
return titles
@@ -161,7 +161,8 @@
claim.setTarget(value.strip())
elif claim.type == 'commonsMedia':
commonssite = pywikibot.Site("commons",
"commons")
- imagelink = pywikibot.Link(value, source=commonssite,
defaultNamespace=6)
+ imagelink = pywikibot.Link(value, source=commonssite,
+ defaultNamespace=6)
image = pywikibot.FilePage(imagelink)
if image.isRedirectPage():
image =
pywikibot.FilePage(image.getRedirectTarget())
@@ -173,7 +174,8 @@
pywikibot.output("%s is not a supported
datatype." % claim.type)
continue
- pywikibot.output('Adding %s --> %s' %
(claim.getID(), claim.getTarget()))
+ pywikibot.output('Adding %s --> %s'
+ % (claim.getID(), claim.getTarget()))
item.addClaim(claim)
# A generator might yield pages from multiple sites
source = self.getSource(page.site)
diff --git a/scripts/illustrate_wikidata.py b/scripts/illustrate_wikidata.py
index cc1e4e2..4c7666e 100755
--- a/scripts/illustrate_wikidata.py
+++ b/scripts/illustrate_wikidata.py
@@ -65,7 +65,8 @@
claims = item.get().get('claims')
if self.wdproperty in claims:
- pywikibot.output(u'Item %s already contains image (%s)' %
(item.title(), self.wdproperty))
+ pywikibot.output('Item %s already contains image (%s)'
+ % (item.title(), self.wdproperty))
return
newclaim = pywikibot.Claim(self.repo, self.wdproperty)
diff --git a/scripts/imagerecat.py b/scripts/imagerecat.py
index 657bcbd..fd57caf 100755
--- a/scripts/imagerecat.py
+++ b/scripts/imagerecat.py
@@ -248,7 +248,8 @@
elif addresspart.tag in invalidParts:
pywikibot.output(u'Dropping %s, %s' % (addresspart.tag,
addresspart.text))
else:
- pywikibot.warning(u'%s, %s is not in addressparts lists' %
(addresspart.tag, addresspart.text))
+ pywikibot.warning('%s, %s is not in addressparts lists'
+ % (addresspart.tag, addresspart.text))
return result
@@ -409,7 +410,8 @@
if onlyFilter:
comment = u'Filtering categories'
else:
- comment = u'Image is categorized by a bot using data from
[[Commons:Tools#CommonSense|CommonSense]]'
+ comment = ('Image is categorized by a bot using data from '
+ '[[Commons:Tools#CommonSense|CommonSense]]')
pywikibot.showDiff(imagepage.text, newtext)
imagepage.text = newtext
imagepage.save(comment)
@@ -429,7 +431,8 @@
def getCheckCategoriesTemplate(usage, galleries, ncats):
"""Build the check categories template with all
parameters."""
- result = u'{{Check
categories|year={{subst:CURRENTYEAR}}|month={{subst:CURRENTMONTHNAME}}|day={{subst:CURRENTDAY}}\n'
+ result = ('{{Check categories|year={{subst:CURRENTYEAR}}|month={{subst:'
+ 'CURRENTMONTHNAME}}|day={{subst:CURRENTDAY}}\n')
usageCounter = 1
for (lang, project, article) in usage:
result += u'|lang%d=%s' % (usageCounter, lang)
diff --git a/scripts/imageuncat.py b/scripts/imageuncat.py
index 85a6d7e..73388e3 100755
--- a/scripts/imageuncat.py
+++ b/scripts/imageuncat.py
@@ -1247,7 +1247,8 @@
u'Zxx',
]
-puttext =
u'\n{{Uncategorized|year={{subst:CURRENTYEAR}}|month={{subst:CURRENTMONTHNAME}}|day={{subst:CURRENTDAY}}}}'
+puttext = ('\n{{Uncategorized|year={{subst:CURRENTYEAR}}|'
+ 'month={{subst:CURRENTMONTHNAME}}|day={{subst:CURRENTDAY}}}}')
putcomment = u'Please add categories to this image'
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index b8084d7..fb05532 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -1436,15 +1436,21 @@
f.write(u"* %s {Found more than one link for %s}"
% (self.originPage, page.site))
if config.interwiki_graph and config.interwiki_graph_url:
- filename = interwiki_graph.getFilename(self.originPage,
extension=config.interwiki_graph_formats[0])
- f.write(u" [%s%s graph]" % (config.interwiki_graph_url,
filename))
+ filename = interwiki_graph.getFilename(
+ self.originPage,
+ extension=config.interwiki_graph_formats[0])
+ f.write(
+ ' [%s%s graph]'
+ % (config.interwiki_graph_url, filename))
f.write("\n")
f.close()
# FIXME: What errors are we catching here?
# except: should be avoided!!
except:
# raise
- pywikibot.output(u'File autonomous_problems.dat open or
corrupted! Try again with -restore.')
+ pywikibot.output(
+ 'File autonomous_problems.dat open or corrupted! '
+ 'Try again with -restore.')
sys.exit()
iw = ()
elif page.isEmpty() and not page.isCategory():
@@ -1473,9 +1479,9 @@
if prevPage != linkedPage and prevPage.site == lpsite:
# Still, this could be "no problem" as
either may be a
# redirect to the other. No way to find out quickly!
- pywikibot.output(u"NOTE: %s: %s gives duplicate
interwiki on same site %s"
- % (self.originPage, page,
- linkedPage))
+ pywikibot.output(
+ 'NOTE: %s: %s gives duplicate interwiki on
same site %s'
+ % (self.originPage, page, linkedPage))
break
else:
if config.interwiki_shownew:
@@ -1673,7 +1679,8 @@
(not frgnSiteDone and site != lclSite and site in new):
if site == lclSite:
lclSiteDone = True # even if we fail the update
- if site.family.name in config.usernames and site.code in
config.usernames[site.family.name]:
+ if (site.family.name in config.usernames and
+ site.code in config.usernames[site.family.name]):
try:
if self.replaceLinks(new[site], new):
updatedSites.append(site)
@@ -1852,7 +1859,8 @@
new.pop(ignorepage.site)
else:
pywikibot.output(
- u"NOTE: Not removing interwiki from %(from)s to %(to)s
(exists both commented and non-commented)"
+ 'NOTE: Not removing interwiki from %(from)s to '
+ '%(to)s (exists both commented and non-commented)'
% {'to': ignorepage,
'from': page})
except KeyError:
@@ -2174,7 +2182,8 @@
if globalvar.skipauto:
dictName, year = page.autoFormat()
if dictName is not None:
- pywikibot.output(u'Skipping: %s is an auto entry
%s(%s)' % (page, dictName, year))
+ pywikibot.output('Skipping: %s is an auto entry
%s(%s)'
+ % (page, dictName, year))
continue
if globalvar.parenthesesonly:
# Only yield pages that have ( ) in titles
diff --git a/scripts/isbn.py b/scripts/isbn.py
index b5357d1..aafd4dc 100755
--- a/scripts/isbn.py
+++ b/scripts/isbn.py
@@ -67,8 +67,8 @@
'¶ms;': pagegenerators.parameterHelp,
}
-# Maps each group number to the list of its publisher number ranges.
-# Taken from
https://web.archive.org/web/20090823122028/http://www.isbn-international.or…
+# Maps each group number to the list of its publisher number ranges. Taken from:
+#
https://web.archive.org/web/20090823122028/http://www.isbn-international.or…
ranges = {
'0': [ # English speaking area
('00', '19'),
diff --git a/scripts/login.py b/scripts/login.py
index 1e87a4e..f51554d 100755
--- a/scripts/login.py
+++ b/scripts/login.py
@@ -79,8 +79,9 @@
for arg in pywikibot.handle_args(args):
if arg.startswith("-pass"):
if len(arg) == 5:
- password = pywikibot.input(u'Password for all accounts (no characters
will be shown):',
- password=True)
+ password = pywikibot.input(
+ 'Password for all accounts (no characters will be shown):',
+ password=True)
else:
password = arg[6:]
elif arg == "-sysop":
diff --git a/scripts/makecat.py b/scripts/makecat.py
index ded4a20..183fa04 100755
--- a/scripts/makecat.py
+++ b/scripts/makecat.py
@@ -213,8 +213,8 @@
u'%s:%s'
% (mysite.category_namespace(),
workingcatname))
- filename = pywikibot.config.datafilepath('category',
- workingcatname.encode('ascii',
'xmlcharrefreplace') + '_exclude.txt')
+ filename = pywikibot.config.datafilepath(
+ 'category', workingcatname.encode('ascii',
'xmlcharrefreplace') + '_exclude.txt')
try:
f = codecs.open(filename, 'r', encoding=mysite.encoding())
for line in f.readlines():
diff --git a/scripts/noreferences.py b/scripts/noreferences.py
index 11f03b0..bbefca6 100755
--- a/scripts/noreferences.py
+++ b/scripts/noreferences.py
@@ -366,7 +366,8 @@
# on your wiki, you don't have to enter anything here.
referencesTemplates = {
'wikipedia': {
- 'ar': [u'Reflist', u'مراجع', u'ثبت المراجع',
u'ثبت_المراجع', u'بداية المراجع', u'نهاية المراجع'],
+ 'ar': ['Reflist', 'مراجع', 'ثبت المراجع',
'ثبت_المراجع',
+ 'بداية المراجع', 'نهاية المراجع'],
'be': [u'Зноскі', u'Примечания', u'Reflist',
u'Спіс заўваг',
u'Заўвагі'],
'be-x-old': [u'Зноскі'],
diff --git a/scripts/nowcommons.py b/scripts/nowcommons.py
index b462ec7..cfc6bfb 100755
--- a/scripts/nowcommons.py
+++ b/scripts/nowcommons.py
@@ -404,7 +404,8 @@
if sha1 == commonsImagePage.latest_file_info.sha1:
pywikibot.output(
u'The image is identical to the one on Commons.')
- if len(localImagePage.getFileVersionHistory()) > 1 and not
self.getOption('use_hash'):
+ if (len(localImagePage.getFileVersionHistory()) > 1 and
+ not self.getOption('use_hash')):
pywikibot.output(
u"This image has a version history. Please \
delete it manually after making sure that the \
diff --git a/scripts/reflinks.py b/scripts/reflinks.py
index 197c445..9dc052a 100755
--- a/scripts/reflinks.py
+++ b/scripts/reflinks.py
@@ -461,7 +461,8 @@
self.TITLE =
re.compile(r'(?is)(?<=<title>).*?(?=</title>)')
# Matches content inside <script>/<style>/HTML comments
self.NON_HTML = re.compile(
-
br'(?is)<script[^>]*>.*?</script>|<style[^>]*>.*?</style>|<!--.*?-->|<!\[CDATA\[.*?\]\]>')
+
br'(?is)<script[^>]*>.*?</script>|<style[^>]*>.*?</style>|'
+ br'<!--.*?-->|<!\[CDATA\[.*?\]\]>')
# Authorized mime types for HTML pages
self.MIME = re.compile(
diff --git a/scripts/replace.py b/scripts/replace.py
index 31188d8..a2ceb17 100755
--- a/scripts/replace.py
+++ b/scripts/replace.py
@@ -644,7 +644,8 @@
if choice == 'a':
self.options['always'] = True
if choice == 'y':
- page.put_async(new_text, self.generate_summary(applied),
callback=self.count_changes)
+ page.put_async(new_text, self.generate_summary(applied),
+ callback=self.count_changes)
# choice must be 'N'
break
if self.getOption('always') and new_text != original_text:
diff --git a/scripts/revertbot.py b/scripts/revertbot.py
index c905f1f..e283477 100755
--- a/scripts/revertbot.py
+++ b/scripts/revertbot.py
@@ -97,7 +97,9 @@
rev = history[1]
else:
return False
- comment = i18n.twtranslate(pywikibot.Site(), 'revertbot-revert',
{'revid': rev[0], 'author': rev[2], 'timestamp': rev[1]})
+ comment = i18n.twtranslate(
+ pywikibot.Site(), 'revertbot-revert',
+ {'revid': rev[0], 'author': rev[2], 'timestamp':
rev[1]})
if self.comment:
comment += ': ' + self.comment
page = pywikibot.Page(self.site, item['title'])
diff --git a/scripts/script_wui.py b/scripts/script_wui.py
index aa87b53..d1eaf4e 100755
--- a/scripts/script_wui.py
+++ b/scripts/script_wui.py
@@ -50,7 +50,8 @@
#
# # patches to keep code running
# builtin_raw_input = __builtin__.raw_input
-# __builtin__.raw_input = lambda: 'n' # overwrite 'raw_input' to run
bot non-blocking and simulation mode
+# # overwrite 'raw_input' to run bot non-blocking and simulation mode
+# __builtin__.raw_input = lambda: 'n'
#
# # backup sys.argv; depreciated: if possible manipulate pywikibot.config instead
# sys_argv = copy.deepcopy( sys.argv )
@@ -98,7 +99,7 @@
'BotName':
pywikibot.config.usernames[pywikibot.config.family][pywikibot.config.mylang],
# protected !!! ('CSS' or other semi-protected page is essential here)
- 'ConfCSSshell': u'User:DrTrigon/DrTrigonBot/script_wui-shell.css',
# u'User:DrTrigonBot/Simon sagt' ?
+ 'ConfCSSshell': 'User:DrTrigon/DrTrigonBot/script_wui-shell.css',
'ConfCSScrontab':
u'User:DrTrigon/DrTrigonBot/script_wui-crontab.css',
# (may be protected but not that important... 'CSS' is not needed here !!!)
@@ -194,7 +195,9 @@
# (date supported only, thus [min] and [hour] dropped)
entry = crontab.CronTab(timestmp)
# find the delay from current minute (does not return 0.0 - but next)
- delay = entry.next(datetime.datetime.now().replace(second=0, microsecond=0) -
datetime.timedelta(microseconds=1))
+ now = datetime.datetime.now().replace(second=0, microsecond=0)
+ delay = entry.next(
+ now - datetime.timedelta(microseconds=1))
if (delay <= bot_config['CRONMaxDelay']):
pywikibot.output(u"CRONTAB: %s / %s / %s" % (page, rev,
timestmp))
diff --git a/scripts/transferbot.py b/scripts/transferbot.py
index 239a991..c849376 100755
--- a/scripts/transferbot.py
+++ b/scripts/transferbot.py
@@ -32,7 +32,7 @@
"""
#
# (C) Merlijn van Deen, 2014
-# (C) Pywikibot team, 2014
+# (C) Pywikibot team, 2015
#
# Distributed under the terms of the MIT license.
#
@@ -151,9 +151,10 @@
pywikibot.log("Getting page text.")
text = page.get(get_redirect=True)
- text += "<noinclude>\n\n<small>This page was moved from %s.
It's edit history can be viewed at %s</small></noinclude>" % (
- page.title(asLink=True, insite=targetpage.site),
- edithistpage.title(asLink=True, insite=targetpage.site))
+ text += ("<noinclude>\n\n<small>This page was moved from %s.
It's "
+ "edit history can be viewed at
%s</small></noinclude>"
+ % (page.title(asLink=True, insite=targetpage.site),
+ edithistpage.title(asLink=True, insite=targetpage.site)))
pywikibot.log("Getting edit history.")
historytable = page.getVersionHistoryTable()
diff --git a/scripts/weblinkchecker.py b/scripts/weblinkchecker.py
index e3c8caf..28ac685 100755
--- a/scripts/weblinkchecker.py
+++ b/scripts/weblinkchecker.py
@@ -276,8 +276,10 @@
# we fake being Firefox because some webservers block unknown
# clients, e.g.
https://images.google.de/images?q=Albit gives a 403
# when using the Pywikibot user agent.
- 'User-agent': 'Mozilla/5.0 (X11; U; Linux i686; de; rv:1.8)
Gecko/20051128 SUSE/1.5-0.1 Firefox/1.5',
- 'Accept':
'text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5',
+ 'User-agent': 'Mozilla/5.0 (X11; U; Linux i686; de; rv:1.8)
'
+ 'Gecko/20051128 SUSE/1.5-0.1 Firefox/1.5',
+ 'Accept': 'text/xml,application/xml,application/xhtml+xml,'
+ 'text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5',
'Accept-Language': 'de-de,de;q=0.8,en-us;q=0.5,en;q=0.3',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7',
'Keep-Alive': '30',
diff --git a/scripts/welcome.py b/scripts/welcome.py
index b77d9e2..baabfa3 100755
--- a/scripts/welcome.py
+++ b/scripts/welcome.py
@@ -215,7 +215,7 @@
# The page where the bot will save the log (e.g. Wikipedia:Welcome log).
#
-# ATTENTION: Not listed projects are disabled to log welcomed users, and no necessary to
set details.
+# ATTENTION: Projects not listed won't write a log to the wiki.
logbook = {
'ar': u'Project:سجل الترحيب',
'fr': u'Wikipedia:Prise de décision/Accueil automatique des nouveaux par
un robot/log',
@@ -287,7 +287,9 @@
}
# The page where the bot will report users with a possibly bad username.
report_page = {
- 'commons': {'_default': u'Project:Administrators\'
noticeboard/User problems/Usernames to be checked', },
+ 'commons': {
+ '_default': 'Project:Administrators\' noticeboard/User
problems/Usernames to be checked',
+ },
'wikipedia': {
'am': u'User:Beria/Report',
'ar': 'Project:إخطار الإداريين/أسماء مستخدمين للفحص',
diff --git a/tests/data_ingestion_tests.py b/tests/data_ingestion_tests.py
index 8c131ab..17e96ac 100644
--- a/tests/data_ingestion_tests.py
+++ b/tests/data_ingestion_tests.py
@@ -33,14 +33,15 @@
def setUp(self):
super(TestPhoto, self).setUp()
- self.obj =
data_ingestion.Photo(URL='http://upload.wikimedia.org/wikipedia/commons…png',
- metadata={'description.en':
'"Sounds" icon',
- 'source':
'http://commons.wikimedia.org/wiki/File:Sound-icon.svg',
- 'author': 'KDE artists |
Silstor',
- 'license': 'LGPL',
- 'set': 'Crystal SVG icon
set',
- 'name': 'Sound icon'},
- site=self.get_site('commons'))
+ self.obj = data_ingestion.Photo(
+
URL='http://upload.wikimedia.org/wikipedia/commons/f/fc/MP_sounds.png…png',
+ metadata={'description.en': '"Sounds" icon',
+ 'source':
'http://commons.wikimedia.org/wiki/File:Sound-icon.svg',
+ 'author': 'KDE artists | Silstor',
+ 'license': 'LGPL',
+ 'set': 'Crystal SVG icon set',
+ 'name': 'Sound icon'},
+ site=self.get_site('commons'))
def test_downloadPhoto(self):
"""Test download from
http://upload.wikimedia.org/."""
@@ -53,7 +54,8 @@
self.assertIn('MP sounds.png', [dup.replace("_", " ")
for dup in duplicates])
def test_getTitle(self):
- self.assertEqual(self.obj.getTitle("%(name)s - %(set)s.%(_ext)s"),
"Sound icon - Crystal SVG icon set.png")
+ self.assertEqual(self.obj.getTitle('%(name)s - %(set)s.%(_ext)s'),
+ 'Sound icon - Crystal SVG icon set.png')
def test_getDescription(self):
self.assertEqual(self.obj.getDescription('CrystalTemplate'),
@@ -82,10 +84,12 @@
self.obj = next(self.iterator)
def test_PhotoURL(self):
- self.assertEqual(self.obj.URL,
'http://upload.wikimedia.org/wikipedia/commons/f/fc/MP_sounds.png')
+ self.assertEqual(self.obj.URL,
+
'http://upload.wikimedia.org/wikipedia/commons/f/fc/MP_sounds.png')
def test_getTitle(self):
- self.assertEqual(self.obj.getTitle("%(name)s - %(set)s.%(_ext)s"),
"Sound icon - Crystal SVG icon set.png")
+ self.assertEqual(self.obj.getTitle('%(name)s - %(set)s.%(_ext)s'),
+ 'Sound icon - Crystal SVG icon set.png')
def test_getDescription(self):
self.assertEqual(self.obj.getDescription('CrystalTemplate'),
diff --git a/tests/date_tests.py b/tests/date_tests.py
index c9560ee..340fb8d 100644
--- a/tests/date_tests.py
+++ b/tests/date_tests.py
@@ -77,14 +77,16 @@
self.assertEqual(datetime(2012, 3, 10), date.apply_month_delta(datetime(2012, 1,
10), 2))
self.assertEqual(datetime(2012, 3, 31), date.apply_month_delta(datetime(2012, 1,
31), 2))
self.assertEqual(datetime(2012, 2, 29), date.apply_month_delta(datetime(2012, 1,
31)))
- self.assertEqual(datetime(2012, 3, 2), date.apply_month_delta(datetime(2012, 1,
31), add_overlap=True))
+ self.assertEqual(datetime(2012, 3, 2), date.apply_month_delta(datetime(2012, 1,
31),
+ add_overlap=True))
def test_apply_negative_delta(self):
"""Test adding months to a date."""
self.assertEqual(datetime(2012, 1, 10), date.apply_month_delta(datetime(2012, 3,
10), -2))
self.assertEqual(datetime(2012, 1, 31), date.apply_month_delta(datetime(2012, 3,
31), -2))
self.assertEqual(datetime(2012, 2, 29), date.apply_month_delta(datetime(2012, 3,
31), -1))
- self.assertEqual(datetime(2012, 3, 2), date.apply_month_delta(datetime(2012, 3,
31), -1, add_overlap=True))
+ self.assertEqual(datetime(2012, 3, 2), date.apply_month_delta(datetime(2012, 3,
31), -1,
+ add_overlap=True))
def test_get_delta(self):
"""Test that the delta is calculated correctly."""
diff --git a/tests/deprecation_tests.py b/tests/deprecation_tests.py
index eccac70..c13a229 100644
--- a/tests/deprecation_tests.py
+++ b/tests/deprecation_tests.py
@@ -393,7 +393,9 @@
rv = func(bah='b')
self.assertEqual(rv, 'b')
- self.assertDeprecation('bah argument of ' + __name__ + '.' +
func.__name__ + ' is deprecated; use foo instead.')
+ self.assertDeprecation(
+ 'bah argument of ' + __name__ + '.' + func.__name__ +
' is '
+ 'deprecated; use foo instead.')
self._reset_messages()
@@ -427,13 +429,15 @@
rv = deprecated_func_arg3(3, loud='3')
self.assertEqual(rv, 3)
- self.assertDeprecation('loud argument of ' + __name__ +
'.deprecated_func_arg3 is deprecated.')
+ self.assertDeprecation('loud argument of ' + __name__ +
+ '.deprecated_func_arg3 is deprecated.')
self._reset_messages()
rv = deprecated_func_arg3(4, old='4')
self.assertEqual(rv, 4)
- self.assertDeprecation('old argument of ' + __name__ +
'.deprecated_func_arg3 is deprecated.')
+ self.assertDeprecation('old argument of ' + __name__ +
+ '.deprecated_func_arg3 is deprecated.')
self._reset_messages()
diff --git a/tests/dry_api_tests.py b/tests/dry_api_tests.py
index 8225a9b..b674f72 100644
--- a/tests/dry_api_tests.py
+++ b/tests/dry_api_tests.py
@@ -57,7 +57,8 @@
self.diffsite = CachedRequest(expiry=1, site=self.altsite,
action='query', meta='userinfo')
def test_expiry_formats(self):
- self.assertEqual(self.req.expiry, CachedRequest(datetime.timedelta(days=1),
**self.parms).expiry)
+ self.assertEqual(self.req.expiry,
+ CachedRequest(datetime.timedelta(days=1), **self.parms).expiry)
def test_expired(self):
self.assertFalse(self.req._expired(datetime.datetime.now()))
diff --git a/tests/http_tests.py b/tests/http_tests.py
index 913fb8c..9ecebae 100644
--- a/tests/http_tests.py
+++ b/tests/http_tests.py
@@ -241,7 +241,8 @@
def setUp(self):
self.orig_format = config.user_agent_format
- config.user_agent_format = '{script_product} ({script_comments}) {pwb}
({revision}) {http_backend} {python}'
+ config.user_agent_format = ('{script_product} ({script_comments}) {pwb}
'
+ '({revision}) {http_backend} {python}')
def tearDown(self):
config.user_agent_format = self.orig_format
diff --git a/tests/i18n_tests.py b/tests/i18n_tests.py
index 791a455..aeb214e 100644
--- a/tests/i18n_tests.py
+++ b/tests/i18n_tests.py
@@ -277,13 +277,14 @@
def testMultipleWrongParameterLength(self):
"""Test wrong parameter length."""
- with self.assertRaisesRegex(ValueError, "Length of parameter does not match
PLURAL occurrences"):
+ err_msg = 'Length of parameter does not match PLURAL occurrences'
+ with self.assertRaisesRegex(ValueError, err_msg):
self.assertEqual(
i18n.twntranslate('de', 'test-multiple-plurals', (1, 2))
% {'action': u'Ändere', 'line':
u'drei'},
u'Bot: Ändere drei Zeilen von mehreren Seiten.')
- with self.assertRaisesRegex(ValueError, "Length of parameter does not match
PLURAL occurrences"):
+ with self.assertRaisesRegex(ValueError, err_msg):
self.assertEqual(
i18n.twntranslate('de', 'test-multiple-plurals',
["321"])
% {'action': u'Ändere', 'line':
u'dreihunderteinundzwanzig'},
diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py
index 4211188..dc55725 100755
--- a/tests/pagegenerators_tests.py
+++ b/tests/pagegenerators_tests.py
@@ -278,8 +278,9 @@
site = self.get_site()
titles = list(pagegenerators.TextfilePageGenerator(filename, site))
self.assertEqual(len(titles), len(self.expected_titles))
- expected_titles =
[expected_title[self.title_columns[site.namespaces[page.namespace()].case]]
- for expected_title, page in zip(self.expected_titles,
titles)]
+ expected_titles = [
+ expected_title[self.title_columns[site.namespaces[page.namespace()].case]]
+ for expected_title, page in zip(self.expected_titles, titles)]
self.assertPageTitlesEqual(titles, expected_titles)
def test_lines(self):
@@ -287,8 +288,9 @@
site = self.get_site()
titles = list(pagegenerators.TextfilePageGenerator(filename, site))
self.assertEqual(len(titles), len(self.expected_titles))
- expected_titles =
[expected_title[self.title_columns[site.namespaces[page.namespace()].case]]
- for expected_title, page in zip(self.expected_titles,
titles)]
+ expected_titles = [
+ expected_title[self.title_columns[site.namespaces[page.namespace()].case]]
+ for expected_title, page in zip(self.expected_titles, titles)]
self.assertPageTitlesEqual(titles, expected_titles)
@@ -531,7 +533,11 @@
False)
def test_nonexisting_qualifiers(self):
- """Test ItemClaimFilterPageGenerator on sample page using
qualifiers the page doesn't have."""
+ """
+ Test ItemClaimFilterPageGenerator on sample page.
+
+ The item does not have the searched qualifiers.
+ """
qualifiers = {
'P370': pywikibot.WbTime(1950, 1, 1, precision=9,
site=self.get_site()),
diff --git a/tests/script_tests.py b/tests/script_tests.py
index f7938cd..2b761ef 100644
--- a/tests/script_tests.py
+++ b/tests/script_tests.py
@@ -154,7 +154,8 @@
}
if sys.version_info[0] > 2:
- no_args_expected_results['replicate_wiki'] = 'error: the following
arguments are required: destination'
+ no_args_expected_results['replicate_wiki'] = (
+ 'error: the following arguments are required: destination')
else:
no_args_expected_results['replicate_wiki'] = 'error: too few
arguments'
diff --git a/tests/site_tests.py b/tests/site_tests.py
index 51dd11c..329be12 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -237,13 +237,20 @@
def testConstructors(self):
"""Test cases for site constructors."""
- self.assertEqual(pywikibot.site.APISite.fromDBName('enwiki'),
pywikibot.Site('en', 'wikipedia'))
- self.assertEqual(pywikibot.site.APISite.fromDBName('eswikisource'),
pywikibot.Site('es', 'wikisource'))
- self.assertEqual(pywikibot.site.APISite.fromDBName('dewikinews'),
pywikibot.Site('de', 'wikinews'))
- self.assertEqual(pywikibot.site.APISite.fromDBName('ukwikivoyage'),
pywikibot.Site('uk', 'wikivoyage'))
- self.assertEqual(pywikibot.site.APISite.fromDBName('metawiki'),
pywikibot.Site('meta', 'meta'))
- self.assertEqual(pywikibot.site.APISite.fromDBName('commonswiki'),
pywikibot.Site('commons', 'commons'))
- self.assertEqual(pywikibot.site.APISite.fromDBName('wikidatawiki'),
pywikibot.Site('wikidata', 'wikidata'))
+ self.assertEqual(pywikibot.site.APISite.fromDBName('enwiki'),
+ pywikibot.Site('en', 'wikipedia'))
+ self.assertEqual(pywikibot.site.APISite.fromDBName('eswikisource'),
+ pywikibot.Site('es', 'wikisource'))
+ self.assertEqual(pywikibot.site.APISite.fromDBName('dewikinews'),
+ pywikibot.Site('de', 'wikinews'))
+ self.assertEqual(pywikibot.site.APISite.fromDBName('ukwikivoyage'),
+ pywikibot.Site('uk', 'wikivoyage'))
+ self.assertEqual(pywikibot.site.APISite.fromDBName('metawiki'),
+ pywikibot.Site('meta', 'meta'))
+ self.assertEqual(pywikibot.site.APISite.fromDBName('commonswiki'),
+ pywikibot.Site('commons', 'commons'))
+ self.assertEqual(pywikibot.site.APISite.fromDBName('wikidatawiki'),
+ pywikibot.Site('wikidata', 'wikidata'))
def testLanguageMethods(self):
"""Test cases for languages() and related
methods."""
@@ -332,7 +339,8 @@
self.assertEqual(mysite.months_names[4], (u'May', u'May'))
self.assertEqual(mysite.list_to_text(('Pride', 'Prejudice')),
'Pride and Prejudice')
- self.assertEqual(mysite.list_to_text(('This', 'that', 'the
other')), 'This, that and the other')
+ self.assertEqual(mysite.list_to_text(('This', 'that', 'the
other')),
+ 'This, that and the other')
def testPageMethods(self):
"""Test ApiSite methods for getting page-specific
info."""
@@ -701,34 +709,41 @@
for t in range(1, len(timestamps)):
self.assertGreaterEqual(timestamps[t], timestamps[t - 1])
- for block in
mysite.blocks(starttime=pywikibot.Timestamp.fromISOformat("2008-07-01T00:00:01Z"),
total=5):
+ for block in mysite.blocks(
+
starttime=pywikibot.Timestamp.fromISOformat('2008-07-01T00:00:01Z'),
+ total=5):
self.assertIsInstance(block, dict)
for prop in props:
self.assertIn(prop, block)
- for block in
mysite.blocks(endtime=pywikibot.Timestamp.fromISOformat("2008-07-31T23:59:59Z"),
total=5):
+ for block in mysite.blocks(
+
endtime=pywikibot.Timestamp.fromISOformat('2008-07-31T23:59:59Z'),
+ total=5):
self.assertIsInstance(block, dict)
for prop in props:
self.assertIn(prop, block)
- for block in
mysite.blocks(starttime=pywikibot.Timestamp.fromISOformat("2008-08-02T00:00:01Z"),
-
endtime=pywikibot.Timestamp.fromISOformat("2008-08-02T23:59:59Z"),
- reverse=True, total=5):
+ for block in mysite.blocks(
+
starttime=pywikibot.Timestamp.fromISOformat('2008-08-02T00:00:01Z'),
+
endtime=pywikibot.Timestamp.fromISOformat("2008-08-02T23:59:59Z"),
+ reverse=True, total=5):
self.assertIsInstance(block, dict)
for prop in props:
self.assertIn(prop, block)
- for block in
mysite.blocks(starttime=pywikibot.Timestamp.fromISOformat("2008-08-03T23:59:59Z"),
-
endtime=pywikibot.Timestamp.fromISOformat("2008-08-03T00:00:01Z"),
- total=5):
+ for block in mysite.blocks(
+
starttime=pywikibot.Timestamp.fromISOformat('2008-08-03T23:59:59Z'),
+
endtime=pywikibot.Timestamp.fromISOformat("2008-08-03T00:00:01Z"),
+ total=5):
self.assertIsInstance(block, dict)
for prop in props:
self.assertIn(prop, block)
# starttime earlier than endtime
- self.assertRaises(pywikibot.Error, mysite.blocks,
+ self.assertRaises(pywikibot.Error, mysite.blocks, total=5,
starttime=pywikibot.Timestamp.fromISOformat("2008-08-03T00:00:01Z"),
-
endtime=pywikibot.Timestamp.fromISOformat("2008-08-03T23:59:59Z"), total=5)
+
endtime=pywikibot.Timestamp.fromISOformat('2008-08-03T23:59:59Z'))
# reverse: endtime earlier than starttime
self.assertRaises(pywikibot.Error, mysite.blocks,
starttime=pywikibot.Timestamp.fromISOformat("2008-08-03T23:59:59Z"),
-
endtime=pywikibot.Timestamp.fromISOformat("2008-08-03T00:00:01Z"), reverse=True,
total=5)
+
endtime=pywikibot.Timestamp.fromISOformat('2008-08-03T00:00:01Z'),
+ reverse=True, total=5)
for block in mysite.blocks(users='80.100.22.71', total=5):
self.assertIsInstance(block, dict)
self.assertEqual(block['user'], '80.100.22.71')
@@ -909,21 +924,25 @@
self.assertEqual(entry.page(), mainpage)
for entry in mysite.logevents(user=mysite.user(), total=3):
self.assertEqual(entry.user(), mysite.user())
- for entry in
mysite.logevents(start=pywikibot.Timestamp.fromISOformat("2008-09-01T00:00:01Z"),
total=5):
+ for entry in mysite.logevents(
+ start=pywikibot.Timestamp.fromISOformat('2008-09-01T00:00:01Z'),
total=5):
self.assertIsInstance(entry, pywikibot.logentries.LogEntry)
self.assertLessEqual(str(entry.timestamp()),
"2008-09-01T00:00:01Z")
- for entry in
mysite.logevents(end=pywikibot.Timestamp.fromISOformat("2008-09-02T23:59:59Z"),
total=5):
+ for entry in mysite.logevents(
+ end=pywikibot.Timestamp.fromISOformat('2008-09-02T23:59:59Z'),
total=5):
self.assertIsInstance(entry, pywikibot.logentries.LogEntry)
self.assertGreaterEqual(str(entry.timestamp()),
"2008-09-02T23:59:59Z")
- for entry in
mysite.logevents(start=pywikibot.Timestamp.fromISOformat("2008-02-02T00:00:01Z"),
-
end=pywikibot.Timestamp.fromISOformat("2008-02-02T23:59:59Z"),
- reverse=True, total=5):
+ for entry in mysite.logevents(
+ start=pywikibot.Timestamp.fromISOformat('2008-02-02T00:00:01Z'),
+ end=pywikibot.Timestamp.fromISOformat("2008-02-02T23:59:59Z"),
+ reverse=True, total=5):
self.assertIsInstance(entry, pywikibot.logentries.LogEntry)
self.assertTrue(
"2008-02-02T00:00:01Z" <= str(entry.timestamp()) <=
"2008-02-02T23:59:59Z")
- for entry in
mysite.logevents(start=pywikibot.Timestamp.fromISOformat("2008-02-03T23:59:59Z"),
-
end=pywikibot.Timestamp.fromISOformat("2008-02-03T00:00:01Z"),
- total=5):
+ for entry in mysite.logevents(
+ start=pywikibot.Timestamp.fromISOformat('2008-02-03T23:59:59Z'),
+ end=pywikibot.Timestamp.fromISOformat("2008-02-03T00:00:01Z"),
+ total=5):
self.assertIsInstance(entry, pywikibot.logentries.LogEntry)
self.assertTrue(
"2008-02-03T00:00:01Z" <= str(entry.timestamp()) <=
"2008-02-03T23:59:59Z")
@@ -934,7 +953,8 @@
# reverse: endtime earlier than starttime
self.assertRaises(pywikibot.Error, mysite.logevents,
start=pywikibot.Timestamp.fromISOformat("2008-02-03T23:59:59Z"),
-
end=pywikibot.Timestamp.fromISOformat("2008-02-03T00:00:01Z"), reverse=True,
total=5)
+
end=pywikibot.Timestamp.fromISOformat('2008-02-03T00:00:01Z'),
+ reverse=True, total=5)
def testRecentchanges(self):
"""Test the site.recentchanges() method."""
@@ -950,31 +970,37 @@
self.assertLessEqual(len(rc), 10)
self.assertTrue(all(isinstance(change, dict)
for change in rc))
- for change in
mysite.recentchanges(start=pywikibot.Timestamp.fromISOformat("2008-10-01T01:02:03Z"),
- total=5):
+ for change in mysite.recentchanges(
+ start=pywikibot.Timestamp.fromISOformat('2008-10-01T01:02:03Z'),
+ total=5):
self.assertIsInstance(change, dict)
self.assertLessEqual(change['timestamp'],
"2008-10-01T01:02:03Z")
- for change in
mysite.recentchanges(end=pywikibot.Timestamp.fromISOformat("2008-04-01T02:03:04Z"),
- total=5):
+ for change in mysite.recentchanges(
+ end=pywikibot.Timestamp.fromISOformat('2008-04-01T02:03:04Z'),
+ total=5):
self.assertIsInstance(change, dict)
self.assertGreaterEqual(change['timestamp'],
"2008-10-01T02:03:04Z")
- for change in
mysite.recentchanges(start=pywikibot.Timestamp.fromISOformat("2008-10-01T03:05:07Z"),
- total=5, reverse=True):
+ for change in mysite.recentchanges(
+ start=pywikibot.Timestamp.fromISOformat('2008-10-01T03:05:07Z'),
+ total=5, reverse=True):
self.assertIsInstance(change, dict)
self.assertGreaterEqual(change['timestamp'],
"2008-10-01T03:05:07Z")
- for change in
mysite.recentchanges(end=pywikibot.Timestamp.fromISOformat("2008-10-01T04:06:08Z"),
- total=5, reverse=True):
+ for change in mysite.recentchanges(
+ end=pywikibot.Timestamp.fromISOformat('2008-10-01T04:06:08Z'),
+ total=5, reverse=True):
self.assertIsInstance(change, dict)
self.assertLessEqual(change['timestamp'],
"2008-10-01T04:06:08Z")
- for change in
mysite.recentchanges(start=pywikibot.Timestamp.fromISOformat("2008-10-03T11:59:59Z"),
-
end=pywikibot.Timestamp.fromISOformat("2008-10-03T00:00:01Z"),
- total=5):
+ for change in mysite.recentchanges(
+ start=pywikibot.Timestamp.fromISOformat('2008-10-03T11:59:59Z'),
+ end=pywikibot.Timestamp.fromISOformat("2008-10-03T00:00:01Z"),
+ total=5):
self.assertIsInstance(change, dict)
self.assertTrue(
"2008-10-03T00:00:01Z" <= change['timestamp'] <=
"2008-10-03T11:59:59Z")
- for change in
mysite.recentchanges(start=pywikibot.Timestamp.fromISOformat("2008-10-05T06:00:01Z"),
-
end=pywikibot.Timestamp.fromISOformat("2008-10-05T23:59:59Z"),
- reverse=True, total=5):
+ for change in mysite.recentchanges(
+ start=pywikibot.Timestamp.fromISOformat('2008-10-05T06:00:01Z'),
+ end=pywikibot.Timestamp.fromISOformat("2008-10-05T23:59:59Z"),
+ reverse=True, total=5):
self.assertIsInstance(change, dict)
self.assertTrue(
"2008-10-05T06:00:01Z" <= change['timestamp'] <=
"2008-10-05T23:59:59Z")
@@ -985,7 +1011,8 @@
# reverse: end earlier than start
self.assertRaises(pywikibot.Error, mysite.recentchanges,
start=pywikibot.Timestamp.fromISOformat("2008-02-03T23:59:59Z"),
-
end=pywikibot.Timestamp.fromISOformat("2008-02-03T00:00:01Z"), reverse=True,
total=5)
+
end=pywikibot.Timestamp.fromISOformat('2008-02-03T00:00:01Z'),
+ reverse=True, total=5)
for change in mysite.recentchanges(namespaces=[6, 7], total=5):
self.assertIsInstance(change, dict)
self.assertIn('title', change)
@@ -1100,43 +1127,49 @@
def test_user_prefix_range(self):
"""Test the site.usercontribs() method."""
mysite = self.get_site()
- for contrib in mysite.usercontribs(userprefix="Jane",
-
start=pywikibot.Timestamp.fromISOformat("2008-10-06T01:02:03Z"),
- total=5):
+ for contrib in mysite.usercontribs(
+ userprefix='Jane',
+
start=pywikibot.Timestamp.fromISOformat("2008-10-06T01:02:03Z"),
+ total=5):
self.assertLessEqual(contrib['timestamp'],
"2008-10-06T01:02:03Z")
- for contrib in mysite.usercontribs(userprefix="Jane",
-
end=pywikibot.Timestamp.fromISOformat("2008-10-07T02:03:04Z"),
- total=5):
+ for contrib in mysite.usercontribs(
+ userprefix='Jane',
+ end=pywikibot.Timestamp.fromISOformat("2008-10-07T02:03:04Z"),
+ total=5):
self.assertGreaterEqual(contrib['timestamp'],
"2008-10-07T02:03:04Z")
def test_user_prefix_reverse(self):
"""Test the site.usercontribs() method with range
reversed."""
mysite = self.get_site()
- for contrib in mysite.usercontribs(userprefix="Brion",
-
start=pywikibot.Timestamp.fromISOformat("2008-10-08T03:05:07Z"),
- total=5, reverse=True):
+ for contrib in mysite.usercontribs(
+ userprefix='Brion',
+
start=pywikibot.Timestamp.fromISOformat("2008-10-08T03:05:07Z"),
+ total=5, reverse=True):
self.assertGreaterEqual(contrib['timestamp'],
"2008-10-08T03:05:07Z")
- for contrib in mysite.usercontribs(userprefix="Brion",
-
end=pywikibot.Timestamp.fromISOformat("2008-10-09T04:06:08Z"),
- total=5, reverse=True):
+ for contrib in mysite.usercontribs(
+ userprefix='Brion',
+ end=pywikibot.Timestamp.fromISOformat("2008-10-09T04:06:08Z"),
+ total=5, reverse=True):
self.assertLessEqual(contrib['timestamp'],
"2008-10-09T04:06:08Z")
- for contrib in mysite.usercontribs(userprefix="Tim",
-
start=pywikibot.Timestamp.fromISOformat("2008-10-10T11:59:59Z"),
-
end=pywikibot.Timestamp.fromISOformat("2008-10-10T00:00:01Z"),
- total=5):
+ for contrib in mysite.usercontribs(
+ userprefix='Tim',
+
start=pywikibot.Timestamp.fromISOformat("2008-10-10T11:59:59Z"),
+ end=pywikibot.Timestamp.fromISOformat("2008-10-10T00:00:01Z"),
+ total=5):
self.assertTrue(
"2008-10-10T00:00:01Z" <= contrib['timestamp'] <=
"2008-10-10T11:59:59Z")
def test_invalid_range(self):
"""Test the site.usercontribs() method with invalid
parameters."""
mysite = self.get_site()
- for contrib in mysite.usercontribs(userprefix="Tim",
-
start=pywikibot.Timestamp.fromISOformat("2008-10-11T06:00:01Z"),
-
end=pywikibot.Timestamp.fromISOformat("2008-10-11T23:59:59Z"),
- reverse=True, total=5):
+ for contrib in mysite.usercontribs(
+ userprefix='Tim',
+
start=pywikibot.Timestamp.fromISOformat("2008-10-11T06:00:01Z"),
+ end=pywikibot.Timestamp.fromISOformat("2008-10-11T23:59:59Z"),
+ reverse=True, total=5):
self.assertTrue(
"2008-10-11T06:00:01Z" <= contrib['timestamp'] <=
"2008-10-11T23:59:59Z")
# start earlier than end
@@ -1696,7 +1729,9 @@
self.assertTrue(-12 * 60 <= mysite.siteinfo['timeoffset'] <= +14 *
60)
self.assertEqual(mysite.siteinfo['timeoffset'] % 15, 0)
self.assertRegex(mysite.siteinfo['timezone'],
"([A-Z]{3,4}|[A-Z][a-z]+/[A-Z][a-z]+)")
- self.assertIsInstance(datetime.strptime(mysite.siteinfo['time'],
"%Y-%m-%dT%H:%M:%SZ"), datetime)
+ self.assertIsInstance(
+ datetime.strptime(mysite.siteinfo['time'],
'%Y-%m-%dT%H:%M:%SZ'),
+ datetime)
self.assertGreater(mysite.siteinfo['maxuploadsize'], 0)
self.assertIn(mysite.siteinfo['case'], ["first-letter",
"case-sensitive"])
self.assertEqual(mysite.case(), mysite.siteinfo['case'])
diff --git a/tests/textlib_tests.py b/tests/textlib_tests.py
index dda81dc..d6b8b69 100644
--- a/tests/textlib_tests.py
+++ b/tests/textlib_tests.py
@@ -62,14 +62,19 @@
def testSpacesInSection(self):
self.assertContains("enwiki_help_editing", u"Minor_edits")
- self.assertNotContains("enwiki_help_editing", u"#Minor
edits", "Incorrect, '#Minor edits' does not work")
- self.assertNotContains("enwiki_help_editing", u"Minor Edits",
"section hashes are case-sensitive")
- self.assertNotContains("enwiki_help_editing", u"Minor_Edits",
"section hashes are case-sensitive")
+ self.assertNotContains('enwiki_help_editing', '#Minor edits',
+ "Incorrect, '#Minor edits' does not
work")
+ self.assertNotContains('enwiki_help_editing', 'Minor Edits',
+ 'section hashes are case-sensitive')
+ self.assertNotContains('enwiki_help_editing', 'Minor_Edits',
+ 'section hashes are case-sensitive')
@unittest.expectedFailure
def testNonAlphabeticalCharactersInSection(self):
- self.assertContains("enwiki_help_editing",
u"Talk_.28discussion.29_pages", "As used in the TOC")
- self.assertContains("enwiki_help_editing",
u"Talk_(discussion)_pages", "Understood by mediawiki")
+ self.assertContains('enwiki_help_editing',
'Talk_.28discussion.29_pages',
+ 'As used in the TOC')
+ self.assertContains('enwiki_help_editing',
'Talk_(discussion)_pages',
+ 'Understood by mediawiki')
def test_spaces_outside_section(self):
self.assertContains("enwiki_help_editing", u"Naming
and_moving")
@@ -79,12 +84,17 @@
def test_link_in_section(self):
# section is ==[[Wiki markup]]==
self.assertContains("enwiki_help_editing", u"[[Wiki
markup]]", "Link as section header")
- self.assertContains("enwiki_help_editing", u"[[:Wiki
markup]]", "section header link with preleading colon")
- self.assertNotContains("enwiki_help_editing", u"Wiki markup",
"section header must be a link")
+ self.assertContains('enwiki_help_editing', '[[:Wiki markup]]',
+ 'section header link with preleading colon')
+ self.assertNotContains('enwiki_help_editing', 'Wiki markup',
+ 'section header must be a link')
# section is ===[[:Help]]ful tips===
- self.assertContains("enwiki_help_editing", u"[[Help]]ful
tips", "Containing link")
- self.assertContains("enwiki_help_editing", u"[[:Help]]ful
tips", "Containing link with preleading colon")
- self.assertNotContains("enwiki_help_editing", u"Helpful
tips", "section header must contain a link")
+ self.assertContains('enwiki_help_editing', '[[Help]]ful tips',
+ 'Containing link')
+ self.assertContains('enwiki_help_editing', '[[:Help]]ful tips',
+ 'Containing link with preleading colon')
+ self.assertNotContains('enwiki_help_editing', 'Helpful tips',
+ 'section header must contain a link')
class TestFormatInterwiki(TestCase):
@@ -283,7 +293,8 @@
self.assertEqual(func('{{a|b=<noinclude>{{{1}}}</noinclude>}}'),
[('a', OrderedDict((('b',
'<noinclude>{{{1}}}</noinclude>'), )))])
self.assertEqual(func('{{subst:a|b=c}}'), [('subst:a',
OrderedDict((('b', 'c'), )))])
- self.assertEqual(func('{{safesubst:a|b=c}}'), [('safesubst:a',
OrderedDict((('b', 'c'), )))])
+ self.assertEqual(func('{{safesubst:a|b=c}}'),
+ [('safesubst:a', OrderedDict((('b',
'c'), )))])
self.assertEqual(func('{{msgnw:a|b=c}}'), [('msgnw:a',
OrderedDict((('b', 'c'), )))])
self.assertEqual(func('{{Template:a|b=c}}'), [('Template:a',
OrderedDict((('b', 'c'), )))])
self.assertEqual(func('{{template:a|b=c}}'), [('template:a',
OrderedDict((('b', 'c'), )))])
diff --git a/tests/timestripper_tests.py b/tests/timestripper_tests.py
index a3f7e77..5d6a209 100644
--- a/tests/timestripper_tests.py
+++ b/tests/timestripper_tests.py
@@ -168,15 +168,19 @@
'ptwiki': {
'family': 'wikipedia',
'code': 'pt',
- 'match': u'19h48min de 3 de fevereiro de 2010 (UTC) 19h48min de
7 de fevereiro de 2010 (UTC)',
+ 'match': '19h48min de 3 de fevereiro de 2010 (UTC) 19h48min
'
+ 'de 7 de fevereiro de 2010 (UTC)',
},
'viwiki': {
'family': 'wikipedia',
'code': 'vi',
- 'match': u'19:48, ngày 15 tháng 9 năm 2008 (UTC) 19:48, ngày 7
tháng 2 năm 2010 (UTC)',
- 'match2': u'16:41, ngày 15 tháng 9 năm 2008 (UTC) 16:41, ngày 12
tháng 9 năm 2008 (UTC)',
- 'match3': u'21:18, ngày 13 tháng 8 năm 2014 (UTC) 21:18, ngày 14
tháng 8 năm 2014 (UTC)',
- 'nomatch1': u'21:18, ngày 13 March 8 năm 2014 (UTC) 21:18, ngày
14 March 8 năm 2014 (UTC)',
+ 'match': '19:48, ngày 15 tháng 9 năm 2008 (UTC) 19:48, ngày 7
tháng 2 năm 2010 (UTC)',
+ 'match2': '16:41, ngày 15 tháng 9 năm 2008 (UTC) 16:41, '
+ 'ngày 12 tháng 9 năm 2008 (UTC)',
+ 'match3': '21:18, ngày 13 tháng 8 năm 2014 (UTC) 21:18, '
+ 'ngày 14 tháng 8 năm 2014 (UTC)',
+ 'nomatch1': '21:18, ngày 13 March 8 năm 2014 (UTC) 21:18, '
+ 'ngày 14 March 8 năm 2014 (UTC)',
},
}
diff --git a/tests/tools_ip_tests.py b/tests/tools_ip_tests.py
index 4bfd507..659b756 100644
--- a/tests/tools_ip_tests.py
+++ b/tests/tools_ip_tests.py
@@ -73,8 +73,10 @@
self.ipv6test(False, "2001:0000:1234:0000:00001:C1C0:ABCD:0876") #
extra 0 not allowed!
self.ipv6test(False, " 2001:0000:1234:0000:0000:C1C0:ABCD:0876") #
leading space
self.ipv6test(False, "2001:0000:1234:0000:0000:C1C0:ABCD:0876 ") #
trailing space
- self.ipv6test(False, " 2001:0000:1234:0000:0000:C1C0:ABCD:0876 ") #
leading and trailing space
- self.ipv6test(False, "2001:0000:1234:0000:0000:C1C0:ABCD:0876 0") #
junk after valid address
+ # leading and trailing space
+ self.ipv6test(False, ' 2001:0000:1234:0000:0000:C1C0:ABCD:0876 ')
+ # junk after valid address
+ self.ipv6test(False, '2001:0000:1234:0000:0000:C1C0:ABCD:0876 0')
self.ipv6test(False, "2001:0000:1234: 0000:0000:C1C0:ABCD:0876") #
internal space
self.ipv6test(False, "3ffe:0b00:0000:0001:0000:0000:000a") # seven
segments
@@ -188,7 +190,8 @@
self.ipv6test(False, "2001:1:1:1:1:1:255Z255X255Y255") # garbage
instead of "." in IPv4
self.ipv6test(False, "::ffff:192x168.1.26") # ditto
self.ipv6test(True, "::ffff:192.168.1.1")
- self.ipv6test(True, "0:0:0:0:0:0:13.1.68.3") # IPv4-compatible IPv6
address, full, deprecated
+ # IPv4-compatible IPv6 address, full, deprecated
+ self.ipv6test(True, '0:0:0:0:0:0:13.1.68.3')
self.ipv6test(True, "0:0:0:0:0:FFFF:129.144.52.38") # IPv4-mapped IPv6
address, full
self.ipv6test(True, "::13.1.68.3") # IPv4-compatible IPv6 address,
compressed, deprecated
self.ipv6test(True, "::FFFF:129.144.52.38") # IPv4-mapped IPv6
address, compressed
@@ -617,7 +620,8 @@
# Additional cases:
#
http://crisp.tweakblogs.net/blog/2031/ipv6-validation-%28and-caveats%29.html
self.ipv6test(True, "0:a:b:c:d:e:f::")
- self.ipv6test(True, "::0:a:b:c:d:e:f") # syntactically correct, but
bad form (::0:... could be combined)
+ # syntactically correct, but bad form (::0:... could be combined)
+ self.ipv6test(True, '::0:a:b:c:d:e:f')
self.ipv6test(True, "a:b:c:d:e:f:0::")
self.ipv6test(False, "':10.0.0.1")
diff --git a/tests/ui_tests.py b/tests/ui_tests.py
index e802632..46ac664 100644
--- a/tests/ui_tests.py
+++ b/tests/ui_tests.py
@@ -572,7 +572,8 @@
# select all and copy to clipboard
self._app.window_().SetFocus()
self.waitForWindow()
- self._app.window_().TypeKeys('%
{UP}{UP}{UP}{RIGHT}{DOWN}{DOWN}{DOWN}{ENTER}{ENTER}', with_spaces=True)
+ self._app.window_().TypeKeys('%
{UP}{UP}{UP}{RIGHT}{DOWN}{DOWN}{DOWN}{ENTER}{ENTER}',
+ with_spaces=True)
while True:
data = self.getclip()
diff --git a/tests/uploadbot_tests.py b/tests/uploadbot_tests.py
index b7e2de2..f017ef9 100644
--- a/tests/uploadbot_tests.py
+++ b/tests/uploadbot_tests.py
@@ -54,11 +54,12 @@
def test_png_url(self):
"""Test uploading a png from url using
upload.py."""
- bot =
upload.UploadRobot(url=['https://upload.wikimedia.org/wikipedia/commons…ng'],
- description="pywikibot upload.py script
test",
- useFilename=None, keepFilename=True,
- verifyDescription=True, aborts=set(),
- ignoreWarning=True, targetSite=self.get_site())
+ bot = upload.UploadRobot(
+
url=['https://upload.wikimedia.org/wikipedia/commons/f/fc/MP_sounds.png…ng'],
+ description="pywikibot upload.py script test",
+ useFilename=None, keepFilename=True,
+ verifyDescription=True, aborts=set(),
+ ignoreWarning=True, targetSite=self.get_site())
bot.run()
diff --git a/tests/wikidataquery_tests.py b/tests/wikidataquery_tests.py
index 6254126..81ed4f9 100644
--- a/tests/wikidataquery_tests.py
+++ b/tests/wikidataquery_tests.py
@@ -141,12 +141,14 @@
self.assertEqual(str(q), 'between[569,,+00000002010-01-01T01:00:00Z]')
q = query.Between(569, begin, end)
- self.assertEqual(str(q),
'between[569,+00000001999-01-01T00:00:00Z,+00000002010-01-01T01:00:00Z]')
+ self.assertEqual(str(q),
+
'between[569,+00000001999-01-01T00:00:00Z,+00000002010-01-01T01:00:00Z]')
# try negative year
begin = pywikibot.WbTime(site=self.repo, year=-44)
q = query.Between(569, begin, end)
- self.assertEqual(str(q),
'between[569,-00000000044-01-01T00:00:00Z,+00000002010-01-01T01:00:00Z]')
+ self.assertEqual(str(q),
+
'between[569,-00000000044-01-01T00:00:00Z,+00000002010-01-01T01:00:00Z]')
def testQueriesDirectFromClaim(self):
"""Test construction of the right Query from a
page.Claim."""
@@ -192,13 +194,15 @@
qs1 = q1.AND(q2)
qs2 = q1.OR(qs1).AND(query.HasClaim(98))
- self.assertEqual(str(qs2), '(claim[99:100] OR (claim[99:100] AND
claim[99:101])) AND claim[98]')
+ self.assertEqual(str(qs2),
+ '(claim[99:100] OR (claim[99:100] AND claim[99:101])) AND
claim[98]')
# if the joiners are the same, no need to group
qs1 = q1.AND(q2)
qs2 = q1.AND(qs1).AND(query.HasClaim(98))
- self.assertEqual(str(qs2), 'claim[99:100] AND claim[99:100] AND claim[99:101]
AND claim[98]')
+ self.assertEqual(str(qs2),
+ 'claim[99:100] AND claim[99:100] AND claim[99:101] AND
claim[98]')
qs1 = query.HasClaim(100).AND(query.HasClaim(101))
qs2 = qs1.OR(query.HasClaim(102))
--
To view, visit
https://gerrit.wikimedia.org/r/225853
To unsubscribe, visit
https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Iabdf520d6fb058fb3ca3d233f9221d62947c52a6
Gerrit-PatchSet: 7
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Ricordisamoa <ricordisamoa(a)openmailbox.org>
Gerrit-Reviewer: Siebrand <siebrand(a)kitano.nl>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: Xavier Combelle <xavier.combelle(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>