jenkins-bot has submitted this change and it was merged.
Change subject: [IMPROV] Remove spaces between """ and it's text
......................................................................
[IMPROV] Remove spaces between """ and it's text
Change-Id: Ifaf10d3433dddbf68be39deff6478f5f1f4b691d
---
M generate_user_files.py
M pywikibot/bot.py
M pywikibot/comms/rcstream.py
M pywikibot/comms/threadedhttp.py
M pywikibot/data/wikistats.py
M pywikibot/exceptions.py
M pywikibot/family.py
M pywikibot/interwiki_graph.py
M pywikibot/page.py
M pywikibot/pagegenerators.py
M pywikibot/site.py
M pywikibot/userinterfaces/gui.py
M pywikibot/userinterfaces/terminal_interface_base.py
M pywikibot/userinterfaces/win32_unicode.py
M pywikibot/version.py
M scripts/__init__.py
M scripts/basic.py
M scripts/category.py
M scripts/checkimages.py
M scripts/commonscat.py
M scripts/delete.py
M scripts/disambredir.py
M scripts/featured.py
M scripts/flickrripper.py
M scripts/imagerecat.py
M scripts/maintenance/cache.py
M scripts/match_images.py
M scripts/newitem.py
M scripts/protect.py
M scripts/reflinks.py
M scripts/states_redirect.py
M scripts/version.py
M scripts/weblinkchecker.py
M tests/http_tests.py
M tests/i18n/__init__.py
M tests/i18n/test.py
M tests/pagegenerators_tests.py
M tests/ui_tests.py
M tests/wikibase_tests.py
39 files changed, 118 insertions(+), 118 deletions(-)
Approvals:
Mpaa: Looks good to me, approved
jenkins-bot: Verified
diff --git a/generate_user_files.py b/generate_user_files.py
index a1c83dc..c98c535 100644
--- a/generate_user_files.py
+++ b/generate_user_files.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-""" Script to create user files (user-config.py, user-fixes.py).
"""
+"""Script to create user files (user-config.py,
user-fixes.py)."""
#
# (C) Pywikibot team, 2010-2014
#
diff --git a/pywikibot/bot.py b/pywikibot/bot.py
index 2e670e8..e255580 100644
--- a/pywikibot/bot.py
+++ b/pywikibot/bot.py
@@ -1001,7 +1001,7 @@
stdout(msg)
def user_confirm(self, question):
- """ Obtain user response if bot option 'always' not
enabled. """
+ """Obtain user response if bot option 'always' not
enabled."""
if self.getOption('always'):
return True
diff --git a/pywikibot/comms/rcstream.py b/pywikibot/comms/rcstream.py
index b5d2230..688e0f6 100644
--- a/pywikibot/comms/rcstream.py
+++ b/pywikibot/comms/rcstream.py
@@ -58,7 +58,7 @@
"""
def __init__(self, wikihost, rchost, rcport=80, rcpath='/rc', total=None):
- """ Constructor for RcListenerThread. """
+ """Constructor for RcListenerThread."""
super(RcListenerThread, self).__init__()
self.rchost = rchost
self.rcport = rcport
@@ -116,13 +116,13 @@
self.client.define(GlobalListener)
def __repr__(self):
- """ Return representation. """
+ """Return representation."""
return "<rcstream for socketio://%s@%s:%s%s>" % (
self.wikihost, self.rchost, self.rcport, self.rcpath
)
def run(self):
- """ Threaded function. Runs insided the thread when started with
.start(). """
+ """Threaded function. Runs insided the thread when started with
.start()."""
self.running = True
while self.running:
self.client.wait(seconds=0.1)
@@ -132,12 +132,12 @@
self.queue.put(None)
def stop(self):
- """ Stop the thread. """
+ """Stop the thread."""
self.running = False
def rc_listener(wikihost, rchost, rcport=80, rcpath='/rc', total=None):
- """ RC Changes Generator. Yields changes received from RCstream.
+ """RC Changes Generator. Yields changes received from RCstream.
@param wikihost: the hostname of the wiki we want to get changes for. This
is passed to rcstream using a 'subscribe' command. Pass
@@ -191,7 +191,7 @@
def site_rc_listener(site, total=None):
- """ RC Changes Generator. Yields changes received from RCstream.
+ """RC Changes Generator. Yields changes received from RCstream.
@param site: the Pywikibot.Site object to yield live recent changes for
@type site: Pywikibot.BaseSite
diff --git a/pywikibot/comms/threadedhttp.py b/pywikibot/comms/threadedhttp.py
index b7a7422..eb892ae 100644
--- a/pywikibot/comms/threadedhttp.py
+++ b/pywikibot/comms/threadedhttp.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-""" Httplib2 threaded cookie layer.
+"""Httplib2 threaded cookie layer.
This class extends httplib2, adding support for:
- Cookies, guarded for cross-site redirects
diff --git a/pywikibot/data/wikistats.py b/pywikibot/data/wikistats.py
index 125eabc..b8f64fe 100644
--- a/pywikibot/data/wikistats.py
+++ b/pywikibot/data/wikistats.py
@@ -235,6 +235,6 @@
reverse=True)
def languages_by_size(self, table):
- """ Return ordered list of languages by size from WikiStats.
"""
+ """Return ordered list of languages by size from
WikiStats."""
# This assumes they appear in order of size in the WikiStats dump.
return [d['prefix'] for d in self.get(table)]
diff --git a/pywikibot/exceptions.py b/pywikibot/exceptions.py
index 4b4161d..3bb6a7a 100644
--- a/pywikibot/exceptions.py
+++ b/pywikibot/exceptions.py
@@ -174,7 +174,7 @@
message = "Edit to page %(title)s failed:\n%(reason)s"
def __init__(self, page, reason):
- """ Constructor.
+ """Constructor.
@param reason: Details of the problem
@type reason: Exception or basestring
@@ -276,7 +276,7 @@
u"Target page: %(target_page)s on %(target_site)s.")
def __init__(self, page, target_page):
- """ Constructor.
+ """Constructor.
@param target_page: Target page of the redirect.
@type reason: Page
diff --git a/pywikibot/family.py b/pywikibot/family.py
index cd90d87..0969041 100644
--- a/pywikibot/family.py
+++ b/pywikibot/family.py
@@ -1114,7 +1114,7 @@
# Which version of MediaWiki is used?
@deprecated('APISite.version()')
def version(self, code):
- """ Return MediaWiki version number as a string.
+ """Return MediaWiki version number as a string.
Use L{pywikibot.tools.MediaWikiVersion} to compare version strings.
"""
@@ -1143,7 +1143,7 @@
@deprecated("APISite.version()")
def versionnumber(self, code):
- """ DEPRECATED, use version() instead.
+ """DEPRECATED, use version() instead.
Use L{pywikibot.tools.MediaWikiVersion} to compare version strings.
Return an int identifying MediaWiki version.
diff --git a/pywikibot/interwiki_graph.py b/pywikibot/interwiki_graph.py
index 00fe657..b89f3dc 100644
--- a/pywikibot/interwiki_graph.py
+++ b/pywikibot/interwiki_graph.py
@@ -1,4 +1,4 @@
-""" Module with the Graphviz drawing calls. """
+"""Module with the Graphviz drawing calls."""
#
# (C) Pywikibot team, 2006-2010
#
diff --git a/pywikibot/page.py b/pywikibot/page.py
index c63af01..16ae491 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -2508,7 +2508,7 @@
@property
def username(self):
- """ The username.
+ """The username.
Convenience method that returns the title of the page with
namespace prefix omitted, which is the username.
@@ -2521,7 +2521,7 @@
return self.title(withNamespace=False)
def isRegistered(self, force=False):
- """ Determine if the user is registered on the site.
+ """Determine if the user is registered on the site.
It is possible to have a page named User:xyz and not have
a corresponding user with username xyz.
@@ -2540,14 +2540,14 @@
return self.getprops(force).get('missing') is None
def isAnonymous(self):
- """ Determine if the user is editing as an IP address.
+ """Determine if the user is editing as an IP address.
@return: bool
"""
return ip_regexp.match(self.username) is not None
def getprops(self, force=False):
- """ Return a properties about the user.
+ """Return a properties about the user.
@param force: if True, forces reloading the data from API
@type force: bool
@@ -2567,7 +2567,7 @@
@deprecated('User.registration()')
def registrationTime(self, force=False):
- """ DEPRECATED. Fetch registration date for this user.
+ """DEPRECATED. Fetch registration date for this user.
@param force: if True, forces reloading the data from API
@type force: bool
@@ -2580,7 +2580,7 @@
return 0
def registration(self, force=False):
- """ Fetch registration date for this user.
+ """Fetch registration date for this user.
@param force: if True, forces reloading the data from API
@type force: bool
@@ -2592,7 +2592,7 @@
return pywikibot.Timestamp.fromISOformat(reg)
def editCount(self, force=False):
- """ Return edit count for a registered user.
+ """Return edit count for a registered user.
Always returns 0 for 'anonymous' users.
@@ -2607,7 +2607,7 @@
return 0
def isBlocked(self, force=False):
- """ Determine whether the user is currently blocked.
+ """Determine whether the user is currently blocked.
@param force: if True, forces reloading the data from API
@type force: bool
@@ -2617,7 +2617,7 @@
return 'blockedby' in self.getprops(force)
def isEmailable(self, force=False):
- """ Determine whether emails may be send to this user through
MediaWiki.
+ """Determine whether emails may be send to this user through
MediaWiki.
@param force: if True, forces reloading the data from API
@type force: bool
@@ -2627,7 +2627,7 @@
return 'emailable' in self.getprops(force)
def groups(self, force=False):
- """ Return a list of groups to which this user belongs.
+ """Return a list of groups to which this user belongs.
The list of groups may be empty.
@@ -2642,7 +2642,7 @@
return []
def getUserPage(self, subpage=u''):
- """ Return a Page object relative to this user's main page.
+ """Return a Page object relative to this user's main page.
@param subpage: subpage part to be appended to the main
page title (optional)
@@ -2658,7 +2658,7 @@
return Page(Link(self.title() + subpage, self.site))
def getUserTalkPage(self, subpage=u''):
- """ Return a Page object relative to this user's main talk
page.
+ """Return a Page object relative to this user's main talk
page.
@param subpage: subpage part to be appended to the main
talk page title (optional)
@@ -2675,7 +2675,7 @@
self.site, defaultNamespace=3))
def sendMail(self, subject, text, ccme=False):
- """ Send an email to this user via MediaWiki's email
interface.
+ """Send an email to this user via MediaWiki's email
interface.
Return True on success, False otherwise.
This method can raise an UserActionRefuse exception in case this user
@@ -2750,7 +2750,7 @@
@deprecated("contributions")
@deprecate_arg("limit", "total") # To be consistent with rest of
framework
def editedPages(self, total=500):
- """ DEPRECATED. Use contributions().
+ """DEPRECATED. Use contributions().
Yields pywikibot.Page objects that this user has
edited, with an upper bound of 'total'. Pages returned are not
@@ -2765,7 +2765,7 @@
@deprecate_arg("limit", "total") # To be consistent with rest of
framework
@deprecate_arg("namespace", "namespaces")
def contributions(self, total=500, namespaces=[]):
- """ Yield tuples describing this user edits.
+ """Yield tuples describing this user edits.
Each tuple is composed of a pywikibot.Page object,
the revision id (int), the edit timestamp (as a pywikibot.Timestamp
@@ -2788,7 +2788,7 @@
@deprecate_arg("number", "total")
def uploadedImages(self, total=10):
- """ Yield tuples describing files uploaded by this user.
+ """Yield tuples describing files uploaded by this user.
Each tuple is composed of a pywikibot.Page, the timestamp (str in
ISO8601 format), comment (unicode) and a bool for pageid > 0.
@@ -2817,7 +2817,7 @@
"""
def __init__(self, site, title=u"", **kwargs):
- """ Constructor.
+ """Constructor.
If title is provided, either ns or entity_type must also be provided,
and will be checked against the title parsed using the Page
@@ -3250,7 +3250,7 @@
class ItemPage(WikibasePage):
- """ Wikibase entity of type 'item'.
+ """Wikibase entity of type 'item'.
A Wikibase item may be defined by either a 'Q' id (qid),
or by a site & title.
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index 1a1b15a..1a73e8f 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -1339,7 +1339,7 @@
@deprecated_args(inverse="quantifier")
def titlefilter(cls, generator, regex, quantifier='any',
ignore_namespace=True):
- """ Yield pages from another generator whose title matches regex.
+ """Yield pages from another generator whose title matches regex.
Uses regex option re.IGNORECASE depending on the quantifier parameter.
@@ -2074,7 +2074,7 @@
self.site = site
def queryYahoo(self, query):
- """ Perform a query using python package 'pYsearch'.
"""
+ """Perform a query using python package
'pYsearch'."""
try:
from yahoo.search.web import WebSearch
except ImportError:
diff --git a/pywikibot/site.py b/pywikibot/site.py
index 2a347bb..c0b3410 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -83,7 +83,7 @@
class LoginStatus(object):
- """ Enum for Login statuses.
+ """Enum for Login statuses.
>> LoginStatus.NOT_ATTEMPTED
-3
@@ -126,7 +126,7 @@
class Namespace(Iterable, ComparableMixin, UnicodeMixin):
- """ Namespace site data object.
+ """Namespace site data object.
This is backwards compatible with the structure of entries
in site._namespaces which were a list of::
@@ -607,7 +607,7 @@
return (self.family.name, self.code)
def __getstate__(self):
- """ Remove Lock based classes before pickling. """
+ """Remove Lock based classes before pickling."""
new = self.__dict__.copy()
del new['_pagemutex']
if '_throttle' in new:
@@ -615,7 +615,7 @@
return new
def __setstate__(self, attrs):
- """ Restore things removed in __getstate__. """
+ """Restore things removed in __getstate__."""
self.__dict__.update(attrs)
self._pagemutex = threading.Lock()
@@ -1019,7 +1019,7 @@
def must_be(group=None, right=None):
- """ Decorator to require a certain user status when method is called.
+ """Decorator to require a certain user status when method is called.
@param group: The group the logged in user should belong to
this parameter can be overridden by
@@ -1055,7 +1055,7 @@
def need_version(version):
- """ Decorator to require a certain MediaWiki version number.
+ """Decorator to require a certain MediaWiki version number.
@param version: the mw version number required
@type version: str
@@ -1678,7 +1678,7 @@
class_name='APISite')
def logout(self):
- """ Logout of the site and load details for the logged out user.
+ """Logout of the site and load details for the logged out user.
Also logs out of the global account if linked to the user.
"""
@@ -2020,7 +2020,7 @@
@need_version("1.12")
def expand_text(self, text, title=None, includecomments=None):
- """ Parse the given text for preprocessing and rendering.
+ """Parse the given text for preprocessing and rendering.
e.g expand templates and strip comments if includecomments
parameter is not True. Keeps text inside
@@ -2165,7 +2165,7 @@
@need_version("1.14")
@deprecated("has_extension")
def hasExtension(self, name, unknown=None):
- """ Determine whether extension `name` is loaded.
+ """Determine whether extension `name` is loaded.
Use L{has_extension} instead!
@@ -2182,7 +2182,7 @@
@need_version("1.14")
def has_extension(self, name):
- """ Determine whether extension `name` is loaded.
+ """Determine whether extension `name` is loaded.
@param name: The extension to check for, case insensitive
@type name: str
@@ -3388,7 +3388,7 @@
return self.allcategories(total=limit)
def isBot(self, username):
- """Return True is username is a bot user. """
+ """Return True is username is a bot user."""
return username in [userdata['name'] for userdata in self.botusers()]
def botusers(self, step=None, total=None):
diff --git a/pywikibot/userinterfaces/gui.py b/pywikibot/userinterfaces/gui.py
index 788c541..07a4ba5 100644
--- a/pywikibot/userinterfaces/gui.py
+++ b/pywikibot/userinterfaces/gui.py
@@ -445,7 +445,7 @@
class Tkdialog:
- """ The dialog window for image info."""
+ """The dialog window for image info."""
def __init__(self, photo_description, photo, filename):
"""Constructor."""
@@ -526,18 +526,18 @@
return imageTk
def ok_file(self):
- """ The user pressed the OK button. """
+ """The user pressed the OK button."""
self.filename = self.filename_field.get()
self.photo_description = self.description_field.get(0.0, Tkinter.END)
self.root.destroy()
def skip_file(self):
- """ The user pressed the Skip button. """
+ """The user pressed the Skip button."""
self.skip = True
self.root.destroy()
def show_dialog(self):
- """ Activate the dialog.
+ """Activate the dialog.
@return: new description, name, and if the image is skipped
@rtype: tuple of (unicode, unicode, bool)
diff --git a/pywikibot/userinterfaces/terminal_interface_base.py
b/pywikibot/userinterfaces/terminal_interface_base.py
index a900870..6e105de 100755
--- a/pywikibot/userinterfaces/terminal_interface_base.py
+++ b/pywikibot/userinterfaces/terminal_interface_base.py
@@ -403,7 +403,7 @@
self.UI = UI
def flush(self):
- """Flush the stream. """
+ """Flush the stream."""
self.stream.flush()
def emit(self, record):
diff --git a/pywikibot/userinterfaces/win32_unicode.py
b/pywikibot/userinterfaces/win32_unicode.py
index dc85cc2..9e4dded 100755
--- a/pywikibot/userinterfaces/win32_unicode.py
+++ b/pywikibot/userinterfaces/win32_unicode.py
@@ -1,4 +1,4 @@
-""" Stdout, stderr and argv support for unicode. """
+"""Stdout, stderr and argv support for unicode."""
##############################################
# Support for unicode in windows cmd.exe
# Posted on Stack Overflow [1], available under CC-BY-SA [2]
diff --git a/pywikibot/version.py b/pywikibot/version.py
index ce95775..377339c 100644
--- a/pywikibot/version.py
+++ b/pywikibot/version.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-""" Module to determine the pywikibot version (tag, revision and date).
"""
+"""Module to determine the pywikibot version (tag, revision and
date)."""
#
# (C) Merlijn 'valhallasw' van Deen, 2007-2014
# (C) xqt, 2010-2014
@@ -285,7 +285,7 @@
def package_versions(modules=None, builtins=False, standard_lib=None):
- """ Retrieve package version information.
+ """Retrieve package version information.
When builtins or standard_lib are None, they will be included only
if a version was found in the package.
diff --git a/scripts/__init__.py b/scripts/__init__.py
index 6eb2664..ab8c50a 100644
--- a/scripts/__init__.py
+++ b/scripts/__init__.py
@@ -1 +1 @@
-""" THIS DIRECTORY IS TO HOLD BOT SCRIPTS FOR THE NEW FRAMEWORK.
"""
+"""THIS DIRECTORY IS TO HOLD BOT SCRIPTS FOR THE NEW
FRAMEWORK."""
diff --git a/scripts/basic.py b/scripts/basic.py
index 9c315ea..33d6319 100755
--- a/scripts/basic.py
+++ b/scripts/basic.py
@@ -62,12 +62,12 @@
self.summary = i18n.twtranslate(site, 'basic-changing')
def run(self):
- """ Process each page from the generator. """
+ """Process each page from the generator."""
for page in self.generator:
self.treat(page)
def treat(self, page):
- """ Load the given page, does some changes, and saves it.
"""
+ """Load the given page, does some changes, and saves
it."""
text = self.load(page)
if not text:
return
@@ -84,7 +84,7 @@
pywikibot.output(u'Page %s not saved.' % page.title(asLink=True))
def load(self, page):
- """ Load the text of the given page. """
+ """Load the text of the given page."""
try:
# Load the page
text = page.get()
@@ -100,7 +100,7 @@
def save(self, text, page, comment=None, minorEdit=True,
botflag=True):
- """ Update the given page with new text. """
+ """Update the given page with new text."""
# only save if something was changed
if text != page.get():
# Show the title of the page we're working on.
diff --git a/scripts/category.py b/scripts/category.py
index bec702b..2236f11 100755
--- a/scripts/category.py
+++ b/scripts/category.py
@@ -953,7 +953,7 @@
class CategoryTreeRobot:
- """ Robot to create tree overviews of the category structure.
+ """Robot to create tree overviews of the category structure.
Parameters:
* catTitle - The category which will be the tree's root.
@@ -976,7 +976,7 @@
self.site = pywikibot.Site()
def treeview(self, cat, currentDepth=0, parent=None):
- """ Return a tree view of all subcategories of cat.
+ """Return a tree view of all subcategories of cat.
The multi-line string contains a tree view of all subcategories of cat,
up to level maxDepth. Recursively calls itself.
diff --git a/scripts/checkimages.py b/scripts/checkimages.py
index e792828..7b94bdb 100644
--- a/scripts/checkimages.py
+++ b/scripts/checkimages.py
@@ -638,7 +638,7 @@
def report(self, newtext, image_to_report, notification=None, head=None,
notification2=None, unver=True, commTalk=None, commImage=None):
- """ Function to make the reports easier. """
+ """Function to make the reports easier."""
self.image_to_report = image_to_report
self.newtext = newtext
self.head = head or u''
@@ -744,7 +744,7 @@
return True
def put_mex_in_talk(self):
- """ Function to put the warning in talk page of the
uploader."""
+ """Function to put the warning in talk page of the
uploader."""
commento2 = i18n.translate(self.site, msg_comm2, fallback=True)
emailPageName = i18n.translate(self.site, emailPageWithText)
emailSubj = i18n.translate(self.site, emailSubject)
@@ -937,7 +937,7 @@
return True
def checkImageDuplicated(self, duplicates_rollback):
- """ Function to check the duplicated files. """
+ """Function to check the duplicated files."""
dupText = i18n.translate(self.site, duplicatesText)
dupRegex = i18n.translate(self.site, duplicatesRegex)
dupTalkHead = i18n.translate(self.site, duplicate_user_talk_head)
@@ -1093,7 +1093,7 @@
def report_image(self, image_to_report, rep_page=None, com=None,
rep_text=None, addings=True, regex=None):
- """ Report the files to the report page when needed.
"""
+ """Report the files to the report page when
needed."""
if not rep_page:
rep_page = self.rep_page
@@ -1149,7 +1149,7 @@
return reported
def takesettings(self):
- """ Function to take the settings from the wiki.
"""
+ """Function to take the settings from the wiki."""
settingsPage = i18n.translate(self.site, page_with_settings)
try:
if not settingsPage:
@@ -1404,7 +1404,7 @@
return (self.license_found, self.whiteTemplatesFound)
def load(self, raw):
- """ Load a list of objects from a string using regex.
"""
+ """Load a list of objects from a string using
regex."""
list_loaded = []
# I search with a regex how many user have not the talk page
# and i put them in a list (i find it more easy and secure)
@@ -1417,7 +1417,7 @@
return list_loaded
def skipImages(self, skip_number, limit):
- """ Given a number of files, skip the first -number- files.
"""
+ """Given a number of files, skip the first -number-
files."""
# If the images to skip are more the images to check, make them the
# same number
if skip_number == 0:
@@ -1516,7 +1516,7 @@
return generator
def isTagged(self):
- """ Understand if a file is already tagged or not.
"""
+ """Understand if a file is already tagged or
not."""
# Is the image already tagged? If yes, no need to double-check, skip
for i in i18n.translate(self.site, txt_find):
# If there are {{ use regex, otherwise no (if there's not the
diff --git a/scripts/commonscat.py b/scripts/commonscat.py
index 74c9d39..b078987 100755
--- a/scripts/commonscat.py
+++ b/scripts/commonscat.py
@@ -234,7 +234,7 @@
self.site = pywikibot.Site()
def treat(self, page):
- """ Load the given page, do some changes, and save it.
"""
+ """Load the given page, do some changes, and save
it."""
if not page.exists():
pywikibot.output(u'Page %s does not exist. Skipping.'
% page.title(asLink=True))
@@ -342,7 +342,7 @@
def changeCommonscat(self, page=None, oldtemplate=u'', oldcat=u'',
newtemplate=u'', newcat=u'',
linktitle=u'',
description=NotImplemented): # pylint: disable=unused-argument
- """ Change the current commonscat template and target.
"""
+ """Change the current commonscat template and
target."""
if oldcat == '3=S' or linktitle == '3=S':
return # TODO: handle additional param on de-wiki
if not linktitle and (page.title().lower() in oldcat.lower() or
@@ -437,7 +437,7 @@
return None
def checkCommonscatLink(self, name=""):
- """ Return the name of a valid commons category.
+ """Return the name of a valid commons category.
If the page is a redirect this function tries to follow it.
If the page doesn't exists the function will return an empty string
diff --git a/scripts/delete.py b/scripts/delete.py
index 5bc1904..e6fb2f1 100644
--- a/scripts/delete.py
+++ b/scripts/delete.py
@@ -49,7 +49,7 @@
class DeletionRobot(CurrentPageBot):
- """ This robot allows deletion of pages en masse. """
+ """This robot allows deletion of pages en masse."""
def __init__(self, generator, summary, **kwargs):
"""
diff --git a/scripts/disambredir.py b/scripts/disambredir.py
index 8739f3a..af697bb 100644
--- a/scripts/disambredir.py
+++ b/scripts/disambredir.py
@@ -42,7 +42,7 @@
def treat(text, linkedPage, targetPage):
- """ Based on the method of the same name in solve_disambiguation.py.
"""
+ """Based on the method of the same name in
solve_disambiguation.py."""
# make a backup of the original text so we can show the changes later
mysite = pywikibot.Site()
linktrail = mysite.linktrail()
diff --git a/scripts/featured.py b/scripts/featured.py
index 184612f..b5957a6 100644
--- a/scripts/featured.py
+++ b/scripts/featured.py
@@ -498,7 +498,7 @@
return add_templates, remove_templates
def featuredWithInterwiki(self, fromsite, task):
- """ Read featured articles and find the corresponding pages.
+ """Read featured articles and find the corresponding pages.
Find corresponding pages on other sites, place the template and
remember the page in the cache dict.
diff --git a/scripts/flickrripper.py b/scripts/flickrripper.py
index 7a4fc9e..e7cd008 100644
--- a/scripts/flickrripper.py
+++ b/scripts/flickrripper.py
@@ -131,7 +131,7 @@
def findDuplicateImages(photo=None,
site=pywikibot.Site(u'commons', u'commons')):
- """ Find duplicate images.
+ """Find duplicate images.
Take the photo, calculate the SHA1 hash and ask the MediaWiki api
for a list of duplicates.
@@ -168,7 +168,7 @@
def getFilename(photoInfo=None, site=None, project=u'Flickr'):
- """ Build a good filename for the upload based on the username and
title.
+ """Build a good filename for the upload based on the username and
title.
Prevents naming collisions.
@@ -210,7 +210,7 @@
def cleanUpTitle(title):
- """ Clean up the title of a potential MediaWiki page.
+ """Clean up the title of a potential MediaWiki page.
Otherwise the title of the page might not be allowed by the software.
@@ -238,7 +238,7 @@
def buildDescription(flinfoDescription=u'', flickrreview=False,
reviewer=u'',
override=u'', addCategory=u'',
removeCategories=False):
- """ Build the final description for the image.
+ """Build the final description for the image.
The description is based on the info from flickrinfo and improved.
@@ -333,7 +333,7 @@
def getPhotos(flickr=None, user_id=u'', group_id=u'',
photoset_id=u'',
start_id='', end_id='', tags=u''):
- """ Loop over a set of Flickr photos. """
+ """Loop over a set of Flickr photos."""
found_start_id = not start_id
#
https://www.flickr.com/services/api/flickr.groups.pools.getPhotos.html
diff --git a/scripts/imagerecat.py b/scripts/imagerecat.py
index 7bf8f1d..ab8d03f 100644
--- a/scripts/imagerecat.py
+++ b/scripts/imagerecat.py
@@ -74,7 +74,7 @@
def categorizeImages(generator, onlyFilter, onlyUncat):
- """ Loop over all images in generator and try to categorize them.
+ """Loop over all images in generator and try to categorize them.
Get category suggestions from CommonSense.
@@ -113,7 +113,7 @@
def getCommonshelperCats(imagepage):
- """ Get category suggestions from CommonSense.
+ """Get category suggestions from CommonSense.
@rtype: list of unicode
@@ -290,7 +290,7 @@
def applyAllFilters(categories):
- """ Apply all filters on categories. """
+ """Apply all filters on categories."""
result = []
result = filterDisambiguation(categories)
result = followRedirects(result)
@@ -301,7 +301,7 @@
def filterBlacklist(categories):
- """ Filter out categories which are on the blacklist.
"""
+ """Filter out categories which are on the
blacklist."""
result = []
for cat in categories:
cat = cat.replace('_', ' ')
@@ -311,7 +311,7 @@
def filterDisambiguation(categories):
- """ Filter out disambiguation categories. """
+ """Filter out disambiguation categories."""
result = []
for cat in categories:
if (not pywikibot.Page(pywikibot.Site(u'commons', u'commons'),
@@ -321,7 +321,7 @@
def followRedirects(categories):
- """ If a category is a redirect, replace the category with the target.
"""
+ """If a category is a redirect, replace the category with the
target."""
result = []
for cat in categories:
categoryPage = pywikibot.Page(pywikibot.Site(u'commons',
u'commons'),
@@ -336,7 +336,7 @@
def filterCountries(categories):
- """ Try to filter out ...by country categories.
+ """Try to filter out ...by country categories.
First make a list of any ...by country categories and try to find some
countries. If a by country category has a subcategoy containing one of the
@@ -369,7 +369,7 @@
def filterParents(categories):
- """ Remove all parent categories from the set to prevent
overcategorization. """
+ """Remove all parent categories from the set to prevent
overcategorization."""
result = []
toFilter = u''
for cat in categories:
@@ -394,7 +394,7 @@
def saveImagePage(imagepage, newcats, usage, galleries, onlyFilter):
- """ Remove the old categories and add the new categories to the image.
"""
+ """Remove the old categories and add the new categories to the
image."""
newtext = textlib.removeCategoryLinks(imagepage.text, imagepage.site)
if not onlyFilter:
newtext = removeTemplates(newtext)
diff --git a/scripts/maintenance/cache.py b/scripts/maintenance/cache.py
index e9c96ff..2b84d3c 100644
--- a/scripts/maintenance/cache.py
+++ b/scripts/maintenance/cache.py
@@ -59,7 +59,7 @@
class ParseError(Exception):
- """ Error parsing. """
+ """Error parsing."""
class CacheEntry(api.CachedRequest):
@@ -67,7 +67,7 @@
"""A Request cache entry."""
def __init__(self, directory, filename):
- """ Constructor. """
+ """Constructor."""
self.directory = directory
self.filename = filename
@@ -78,11 +78,11 @@
return self._cachefile_path()
def _create_file_name(self):
- """ Filename of the cached entry. """
+ """Filename of the cached entry."""
return self.filename
def _get_cache_dir(self):
- """ Directory of the cached entry. """
+ """Directory of the cached entry."""
return self.directory
def _cachefile_path(self):
@@ -90,13 +90,13 @@
self._create_file_name())
def _load_cache(self):
- """ Load the cache entry. """
+ """Load the cache entry."""
with open(self._cachefile_path(), 'rb') as f:
self.key, self._data, self._cachetime = pickle.load(f)
return True
def parse_key(self):
- """ Parse the key loaded from the cache entry. """
+ """Parse the key loaded from the cache entry."""
# find the start of the first parameter
start = self.key.index('(')
# find the end of the first object
@@ -153,7 +153,7 @@
return self._parsed_key
def _rebuild(self):
- """ Reconstruct the original Request from the key.
"""
+ """Reconstruct the original Request from the
key."""
if hasattr(self, '_parsed_key'):
(site, username, login_status, params) = self._parsed_key
else:
@@ -171,7 +171,7 @@
self._params = dict(eval(params))
def _delete(self):
- """ Delete the cache entry. """
+ """Delete the cache entry."""
os.remove(self._cachefile_path())
@@ -248,25 +248,25 @@
def has_password(entry):
- """ Entry has a password in the entry. """
+ """Entry has a password in the entry."""
if 'lgpassword' in entry._uniquedescriptionstr():
return entry
def is_logout(entry):
- """ Entry is a logout entry. """
+ """Entry is a logout entry."""
if not entry._data and 'logout' in entry.key:
return entry
def empty_response(entry):
- """ Entry has no data. """
+ """Entry has no data."""
if not entry._data and 'logout' not in entry.key:
return entry
def not_accessed(entry):
- """ Entry has never been accessed. """
+ """Entry has never been accessed."""
if not hasattr(entry, 'stinfo'):
return
diff --git a/scripts/match_images.py b/scripts/match_images.py
index 55076e1..915d1be 100644
--- a/scripts/match_images.py
+++ b/scripts/match_images.py
@@ -108,7 +108,7 @@
def get_image_from_image_page(imagePage):
- """ Get the image object to work based on an imagePage object.
"""
+ """Get the image object to work based on an imagePage
object."""
imageBuffer = None
imageURL = imagePage.fileUrl()
imageURLopener = http.fetch(imageURL)
@@ -118,7 +118,7 @@
def match_images(imageA, imageB):
- """ Match two image objects. Return the ratio of pixels that match.
"""
+ """Match two image objects. Return the ratio of pixels that
match."""
histogramA = imageA.histogram()
histogramB = imageB.histogram()
@@ -139,7 +139,7 @@
def main(*args):
- """ Extracting file page information of images to work on and initiate
matching. """
+ """Extracting file page information of images to work on and initiate
matching."""
images = []
other_family = u''
other_lang = u''
diff --git a/scripts/newitem.py b/scripts/newitem.py
index acba3da..89577bb 100644
--- a/scripts/newitem.py
+++ b/scripts/newitem.py
@@ -34,7 +34,7 @@
class NewItemRobot(WikidataBot):
- """ A bot to create new items. """
+ """A bot to create new items."""
def __init__(self, generator, **kwargs):
"""Only accepts options defined in
availableOptions."""
@@ -59,7 +59,7 @@
% (self.lastEdit, self.lastEditBefore.isoformat()))
def treat(self, page, item):
- """ Treat page/item. """
+ """Treat page/item."""
if item and item.exists():
pywikibot.output(u'%s already has an item: %s.' % (page, item))
if self.getOption('touch'):
diff --git a/scripts/protect.py b/scripts/protect.py
index 2bfaade..c5d259a 100644
--- a/scripts/protect.py
+++ b/scripts/protect.py
@@ -63,7 +63,7 @@
class ProtectionRobot(Bot):
- """ This bot allows protection of pages en masse. """
+ """This bot allows protection of pages en masse."""
def __init__(self, generator, protections, **kwargs):
"""
diff --git a/scripts/reflinks.py b/scripts/reflinks.py
index 415d94d..f50083b 100644
--- a/scripts/reflinks.py
+++ b/scripts/reflinks.py
@@ -472,7 +472,7 @@
% (err_num, link, pagetitleaslink), toStdout=True)
def getPDFTitle(self, ref, f):
- """ Use pdfinfo to retrieve title from a PDF.
+ """Use pdfinfo to retrieve title from a PDF.
FIXME: Unix-only, I'm afraid.
diff --git a/scripts/states_redirect.py b/scripts/states_redirect.py
index 19c5e1c..f184df5 100644
--- a/scripts/states_redirect.py
+++ b/scripts/states_redirect.py
@@ -71,7 +71,7 @@
self.abbrev[subd.name] = subd.code[3:]
def treat(self, page):
- """ Re-directing process.
+ """Re-directing process.
Check if pages are in the given form Something, State, and
if so, create a redirect from Something, ST..
diff --git a/scripts/version.py b/scripts/version.py
index e50a0b6..fb43be1 100755
--- a/scripts/version.py
+++ b/scripts/version.py
@@ -1,6 +1,6 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-""" Script to determine the Pywikibot version (tag, revision and date).
"""
+"""Script to determine the Pywikibot version (tag, revision and
date)."""
#
# (C) Merlijn 'valhallasw' van Deen, 2007-2008
# (C) xqt, 2010-2014
diff --git a/scripts/weblinkchecker.py b/scripts/weblinkchecker.py
index ba33030..4c36069 100644
--- a/scripts/weblinkchecker.py
+++ b/scripts/weblinkchecker.py
@@ -501,7 +501,7 @@
class LinkCheckThread(threading.Thread):
- """ A thread responsible for checking one URL.
+ """A thread responsible for checking one URL.
After checking the page, it will die.
"""
@@ -654,7 +654,7 @@
return False
def save(self):
- """ Save the .dat file to disk. """
+ """Save the .dat file to disk."""
with open(self.datfilename, 'wb') as f:
pickle.dump(self.historyDict, f, protocol=config.pickle_protocol)
diff --git a/tests/http_tests.py b/tests/http_tests.py
index 63b5113..3c49acd 100644
--- a/tests/http_tests.py
+++ b/tests/http_tests.py
@@ -334,7 +334,7 @@
config.user_agent_format = self.orig_format
def test_default_user_agent(self):
- """ Config defined format string test. """
+ """Config defined format string test."""
self.assertTrue(http.user_agent().startswith(
pywikibot.calledModuleName()))
self.assertIn('Pywikibot/' + pywikibot.__release__, http.user_agent())
diff --git a/tests/i18n/__init__.py b/tests/i18n/__init__.py
index 66f0234..f1a0213 100644
--- a/tests/i18n/__init__.py
+++ b/tests/i18n/__init__.py
@@ -1 +1 @@
-""" Test i18n data package. """
+"""Test i18n data package."""
diff --git a/tests/i18n/test.py b/tests/i18n/test.py
index de5d92c..44afee0 100644
--- a/tests/i18n/test.py
+++ b/tests/i18n/test.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-""" Test i18n data. """
+"""Test i18n data."""
msg = {
'de': {
'test-plural': u'Bot: Ă„ndere %(num)d
{{PLURAL:num|Seite|Seiten}}.',
diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py
index 8a6166f..35e6320 100755
--- a/tests/pagegenerators_tests.py
+++ b/tests/pagegenerators_tests.py
@@ -792,7 +792,7 @@
class LiveRCPageGeneratorTestCase(WikimediaDefaultSiteTestCase):
- """ Test case for Live Recent Changes pagegenerator.
+ """Test case for Live Recent Changes pagegenerator.
Works best on a busy site, as three changes are requested
"""
diff --git a/tests/ui_tests.py b/tests/ui_tests.py
index 08ddcb8..087a230 100644
--- a/tests/ui_tests.py
+++ b/tests/ui_tests.py
@@ -408,7 +408,7 @@
'\x1b[0m normal text\n\x1b[0m')
def testOutputColorCascade_incorrect(self):
- """ Test incorrect behavior of testOutputColorCascade.
"""
+ """Test incorrect behavior of
testOutputColorCascade."""
pywikibot.output(self.str2)
self.assertEqual(newstdout.getvalue(), '')
self.assertEqual(
diff --git a/tests/wikibase_tests.py b/tests/wikibase_tests.py
index b0b7cfa..41e90c2 100644
--- a/tests/wikibase_tests.py
+++ b/tests/wikibase_tests.py
@@ -69,7 +69,7 @@
self.assertEqual(claim._formatValue(), {'entity-type': 'item',
'numeric-id': 1})
def test_cmp(self):
- """ Test WikibasePage.__cmp__. """
+ """Test WikibasePage.__cmp__."""
self.assertEqual(pywikibot.ItemPage.fromPage(self.mainpage),
pywikibot.ItemPage(self.get_repo(), 'q5296'))
--
To view, visit
https://gerrit.wikimedia.org/r/189740
To unsubscribe, visit
https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ifaf10d3433dddbf68be39deff6478f5f1f4b691d
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: Siebrand <siebrand(a)kitano.nl>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: jenkins-bot <>