jenkins-bot has submitted this change and it was merged.
Change subject: PEP257 scripts/
......................................................................
PEP257 scripts/
Used # noqa on Exception docstrings to avoid functional changes to
exception handling.
Change-Id: I07bb75e52ac9cbbf845e08a364526ed049e445a0
---
M scripts/add_text.py
M scripts/archivebot.py
M scripts/blockpageschecker.py
M scripts/blockreview.py
M scripts/capitalize_redirects.py
M scripts/casechecker.py
M scripts/cfd.py
M scripts/interwiki.py
M scripts/maintenance/__init__.py
M scripts/maintenance/compat2core.py
M scripts/maintenance/make_i18n_dict.py
M scripts/maintenance/wikimedia_sites.py
M scripts/makecat.py
M tox.ini
14 files changed, 155 insertions(+), 138 deletions(-)
Approvals:
XZise: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/add_text.py b/scripts/add_text.py
index d90e0cd..a9489dc 100644
--- a/scripts/add_text.py
+++ b/scripts/add_text.py
@@ -1,8 +1,10 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-"""
-This is a Bot written by Filnik to add a text at the end of the page but above
-categories, interwiki and template for the stars of the interwiki (default).
+r"""
+This is a Bot to add a text at the end of the content of the page.
+
+By default it adds the text above categories, interwiki and template
+for the stars of the interwiki.
Alternatively it may also add a text at the top of the page.
These command line parameters can be used to specify which pages to work on:
diff --git a/scripts/archivebot.py b/scripts/archivebot.py
index d57e2fc..2342d55 100644
--- a/scripts/archivebot.py
+++ b/scripts/archivebot.py
@@ -111,29 +111,39 @@
class MalformedConfigError(pywikibot.Error):
+
"""There is an error in the configuration template."""
class MissingConfigError(pywikibot.Error):
- """The config is missing in the header (either it's in one of the
threads
- or transcluded from another page).
+ """
+ The config is missing in the header.
+
+ It's in one of the threads or transcluded from another page.
"""
class AlgorithmError(MalformedConfigError):
+
"""Invalid specification of archiving algorithm."""
class ArchiveSecurityError(pywikibot.Error):
- """Archive is not a subpage of page being archived and key not
specified
- (or incorrect).
+ """
+ Page title is not a valid archive of page being archived.
+
+ The page title is neither a subpage of the page being archived,
+ nor does it match the key specified in the archive configuration template.
"""
def str2localized_duration(site, string):
- """Translate a duration written in the shorthand notation (ex.
"24h", "7d")
+ """
+ Localise a shorthand duration.
+
+ Translates a duration written in the shorthand notation (ex. "24h",
"7d")
into an expression in the local language of the wiki ("24 hours", "7
days").
"""
if string[-1] == 'd':
@@ -148,7 +158,10 @@
def str2time(string):
- """Accepts a string defining a time period:
+ """
+ Return a timedelta for a shorthand duration.
+
+ Accepts a string defining a time period:
7d - 7 days
36h - 36 hours
Returns the corresponding timedelta object.
@@ -162,7 +175,10 @@
def str2size(string):
- """Accepts a string defining a size:
+ """
+ Return a size for a shorthand size.
+
+ Accepts a string defining a size:
1337 - 1337 bytes
150K - 150 kilobytes
2M - 2 megabytes
@@ -209,8 +225,10 @@
class DiscussionThread(object):
- """An object representing a discussion thread on a page, that is
something
- of the form:
+ """
+ An object representing a discussion thread on a page.
+
+ It represents something that is of the form:
== Title of thread ==
@@ -270,8 +288,10 @@
class DiscussionPage(pywikibot.Page):
- """A class that represents a single discussion page as well as an
archive
- page. Feed threads to it and run an update() afterwards.
+ """
+ A class that represents a single page of discussion threads.
+
+ Feed threads to it and run an update() afterwards.
"""
def __init__(self, source, archiver, params=None):
@@ -362,6 +382,7 @@
class PageArchiver(object):
"""A class that encapsulates all archiving methods.
+
__init__ expects a pywikibot.Page object.
Execute by running the .run() method.
"""
@@ -431,6 +452,7 @@
def feed_archive(self, archive, thread, max_archive_size, params=None):
"""Feed the thread to one of the archives.
+
If it doesn't exist yet, create it.
If archive name is an empty string (or None),
discard the thread (/dev/null).
diff --git a/scripts/blockpageschecker.py b/scripts/blockpageschecker.py
index 52fd2fb..23785c9 100755
--- a/scripts/blockpageschecker.py
+++ b/scripts/blockpageschecker.py
@@ -1,11 +1,11 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
-This is a script originally written by Wikihermit and then rewritten by Filnik,
-to delete the templates used to warn in the pages that a page is blocked, when
-the page isn't blocked at all. Indeed, very often sysops block the pages for a
-setted time but then the forget to delete the warning! This script is useful if
-you want to delete those useless warning left in these pages.
+A bot to remove stale protection templates from pages that are not protected.
+
+Very often sysops block the pages for a setted time but then the forget to
+remove the warning! This script is useful if you want to remove those useless
+warning left in these pages.
Parameters:
diff --git a/scripts/blockreview.py b/scripts/blockreview.py
index 9abbe12..b4790bd 100644
--- a/scripts/blockreview.py
+++ b/scripts/blockreview.py
@@ -2,6 +2,7 @@
# -*- coding: utf-8 -*-
"""
This bot implements a blocking review process for de-wiki first.
+
For other sites this bot script must be changed.
This script is run by [[de:User:xqt]]. It should
@@ -27,6 +28,9 @@
class BlockreviewBot:
+
+ """Block review bot."""
+
# notes
note_admin = {
'de': u"""
@@ -78,10 +82,11 @@
def __init__(self, dry=False):
"""
- Constructor. Parameters:
- * generator - The page generator that determines on which pages
+ Constructor.
+
+ @param generator: The page generator that determines on which pages
to work on.
- * dry - If True, doesn't do any real changes, but only shows
+ @param dry: If True, doesn't do any real changes, but only shows
what would have been changed.
"""
self.site = pywikibot.Site()
@@ -109,7 +114,7 @@
% page.title(asLink=True))
def treat(self, userPage):
- """Loads the given page, does some changes, and saves
it."""
+ """Load the given page, does some changes, and saves
it."""
talkText = self.load(userPage)
if not talkText:
# sanity check. No talk page found.
@@ -256,7 +261,7 @@
ns=3)
def load(self, page):
- """Loads the given page, does some changes, and saves
it."""
+ """Load the given page and return the page
text."""
try:
# Load the page
text = page.get()
diff --git a/scripts/capitalize_redirects.py b/scripts/capitalize_redirects.py
index d74921c..a86b6e9 100644
--- a/scripts/capitalize_redirects.py
+++ b/scripts/capitalize_redirects.py
@@ -1,8 +1,9 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-
"""
-Bot to create capitalized redirects where the first character of the first
+Bot to create capitalized redirects.
+
+It creates redirects where the first character of the first
word is uppercase and the remaining characters and words are lowercase.
Command-line arguments:
@@ -40,6 +41,9 @@
class CapitalizeBot(Bot):
+
+ """Capitalization Bot."""
+
def __init__(self, generator, **kwargs):
self.availableOptions.update({
'titlecase': False,
diff --git a/scripts/casechecker.py b/scripts/casechecker.py
index 8bc2b23..97d84fe 100644
--- a/scripts/casechecker.py
+++ b/scripts/casechecker.py
@@ -1,8 +1,6 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-""" Script to enumerate all pages on the wiki and find all titles
-with mixed latin and cyrilic alphabets.
-"""
+"""Bot to find all pages on the wiki with mixed latin and cyrilic
alphabets."""
#
# (C) Pywikibot team, 2006-2014
#
@@ -83,6 +81,9 @@
class CaseChecker(object):
+
+ """Case checker."""
+
# These words are always in one language, even though they could be typed
# in both
alwaysInLocal = [u'СССР', u'Как', u'как']
diff --git a/scripts/cfd.py b/scripts/cfd.py
index c29e19d..8687735 100644
--- a/scripts/cfd.py
+++ b/scripts/cfd.py
@@ -1,8 +1,9 @@
# -*- coding: utf-8 -*-
"""
-This script processes the Categories for discussion working page. It parses
-out the actions that need to be taken as a result of CFD discussions (as posted
-to the working page by an administrator) and performs them.
+This script processes the Categories for discussion working page.
+
+It parses out the actions that need to be taken as a result of CFD discussions
+(as posted to the working page by an administrator) and performs them.
Syntax: python cfd.py
@@ -46,6 +47,9 @@
class ReCheck:
+
+ """Helper class."""
+
def __init__(self):
self.result = None
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index 89a04a6..396a085 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -1,8 +1,9 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
-Script to check language links for general pages. This works by downloading the
-page, and using existing translations plus hints from the command line to
+Script to check language links for general pages.
+
+Uses existing translations of a page, plus hints from the command line, to
download the equivalent pages from other languages. All of such pages are
downloaded as well and checked for interwiki links recursively until there are
no more links that are encountered. A rationalization process then selects the
@@ -369,7 +370,7 @@
"""An attempt to save a page with changed interwiki has
failed."""
-class LinkMustBeRemoved(SaveError):
+class LinkMustBeRemoved(SaveError): # noqa
"""
An interwiki link has to be removed, but this can't be done because of user
@@ -380,6 +381,8 @@
class GiveUpOnPage(pywikibot.Error):
"""The user chose not to work on this page and its linked pages any
more."""
+
+ pass
# Subpage templates. Must be in lower case,
@@ -456,6 +459,7 @@
"""
Container class for global settings.
+
Use of globals outside of this is to be avoided.
"""
@@ -624,9 +628,10 @@
class StoredPage(pywikibot.Page):
"""
- Store the Page contents on disk to avoid sucking too much
- memory when a big number of Page objects will be loaded
- at the same time.
+ Store the Page contents on disk.
+
+ This is to avoid sucking too much memory when a big number of Page objects
+ will be loaded at the same time.
"""
# Please prefix the class members names by SP
@@ -695,6 +700,7 @@
"""
Structure to manipulate a set of pages.
+
Allows filtering efficiently by Site.
"""
@@ -723,7 +729,7 @@
self.size = 0
def filter(self, site):
- """Iterates over pages that are in Site site."""
+ """Iterate over pages that are in Site site."""
try:
for page in self.tree[site]:
yield page
@@ -748,7 +754,7 @@
pass
def removeSite(self, site):
- """Removes all pages from Site site."""
+ """Remove all pages from Site site."""
try:
self.size -= len(self.tree[site])
del self.tree[site]
@@ -756,7 +762,7 @@
pass
def siteCounts(self):
- """Yields (Site, number of pages in site) pairs"""
+ """Yield (Site, number of pages in site) pairs."""
for site, d in self.tree.items():
yield site, len(d)
@@ -768,10 +774,10 @@
class Subject(object):
- """
- Class to follow the progress of a single 'subject' (i.e. a page with
- all its translations)
+ u"""
+ Class to follow the progress of a single 'subject'.
+ (i.e. a page with all its translations)
Subject is a transitive closure of the binary relation on Page:
"has_a_langlink_pointing_to".
@@ -826,9 +832,12 @@
"""
def __init__(self, originPage=None, hints=None):
- """Constructor. Takes as arguments the Page on the home wiki
- plus optionally a list of hints for translation
- """
+ """
+ Constructor.
+
+ Takes as arguments the Page on the home wiki
+ plus optionally a list of hints for translation
+ """
if globalvar.contentsondisk:
if originPage:
originPage = StoredPage(originPage)
@@ -869,6 +878,8 @@
def getFoundDisambig(self, site):
"""
+ Return the first disambiguation found.
+
If we found a disambiguation on the given site while working on the
subject, this method returns it. If several ones have been found, the
first one will be returned.
@@ -881,6 +892,8 @@
def getFoundNonDisambig(self, site):
"""
+ Return the first non-disambiguation found.
+
If we found a non-disambiguation on the given site while working on the
subject, this method returns it. If several ones have been found, the
first one will be returned.
@@ -894,6 +907,8 @@
def getFoundInCorrectNamespace(self, site):
"""
+ Return the first page in the extended namespace.
+
If we found a page that has the expected namespace on the given site
while working on the subject, this method returns it. If several ones
have been found, the first one will be returned.
@@ -942,7 +957,9 @@
def openSites(self):
"""
- Iterator. Yields (site, count) pairs:
+ Iterator.
+
+ Yields (site, count) pairs:
* site is a site where we still have work to do on
* count is the number of items in that Site that need work on
"""
@@ -950,6 +967,8 @@
def whatsNextPageBatch(self, site):
"""
+ Return the next page batch.
+
By calling this method, you 'promise' this instance that you will
preload all the 'site' Pages that are in the todo list.
@@ -972,7 +991,7 @@
return result
def makeForcedStop(self, counter):
- """Ends work on the page before the normal end."""
+ """End work on the page before the normal end."""
for site, count in self.todo.siteCounts():
counter.minus(site, count)
self.todo = PageTree()
@@ -980,8 +999,9 @@
def addIfNew(self, page, counter, linkingPage):
"""
- Adds the pagelink given to the todo list, but only if we didn't know
- it before. If it is added, update the counter accordingly.
+ Add the pagelink given to the todo list, if it hasnt been seen yet.
+
+ If it is added, update the counter accordingly.
Also remembers where we found the page, regardless of whether it had
already been found before or not.
@@ -1020,8 +1040,7 @@
def namespaceMismatch(self, linkingPage, linkedPage, counter):
"""
- Checks whether or not the given page has another namespace
- than the origin page.
+ Check whether or not the given page has a different namespace.
Returns True if the namespaces are different and the user
has selected not to follow the linked page.
@@ -1110,8 +1129,7 @@
def disambigMismatch(self, page, counter):
"""
- Checks whether or not the given page has the another disambiguation
- status than the origin page.
+ Check whether the given page has a different disambiguation status.
Returns a tuple (skip, alternativePage).
@@ -1247,6 +1265,8 @@
def batchLoaded(self, counter):
"""
+ Notify that the promised batch of pages was loaded.
+
This is called by a worker to tell us that the promised batch of
pages was loaded.
In other words, all the pages in self.pending have already
@@ -1603,11 +1623,12 @@
return result
def finish(self):
- """Round up the subject, making any necessary changes. This
method
- should be called exactly once after the todo list has gone empty.
+ """
+ Round up the subject, making any necessary changes.
+
+ This should be called exactly once after the todo list has gone empty.
"""
-
if not self.isDone():
raise Exception("Bugcheck: finish called before done")
if not self.workonme:
@@ -1805,7 +1826,7 @@
del page._contents
def replaceLinks(self, page, newPages):
- """Returns True if saving was successful."""
+ """Return True if saving was successful."""
if globalvar.localonly:
# In this case only continue on the Page we started with
if page != self.originPage:
@@ -2087,11 +2108,15 @@
class InterwikiBot(object):
- """A class keeping track of a list of subjects, controlling which
pages
- are queried from which languages when."""
+
+ """
+ A class keeping track of a list of subjects.
+
+ It controls which pages are queried from which languages when.
+ """
def __init__(self):
- """Constructor. We always start with empty
lists."""
+ """Constructor."""
self.subjects = []
# We count how many pages still need to be loaded per site.
# This allows us to find out from which site to retrieve pages next
@@ -2111,9 +2136,12 @@
self.plus(site, count)
def setPageGenerator(self, pageGenerator, number=None, until=None):
- """Add a generator of subjects. Once the list of subjects gets
- too small, this generator is called to produce more Pages
- """
+ """
+ Add a generator of subjects.
+
+ Once the list of subjects gets too small,
+ this generator is called to produce more Pages.
+ """
self.pageGenerator = pageGenerator
self.generateNumber = number
self.generateUntil = until
@@ -2136,10 +2164,12 @@
return dumpfn
def generateMore(self, number):
- """Generate more subjects. This is called internally when the
- list of subjects becomes too small, but only if there is a
- PageGenerator
- """
+ """Generate more subjects.
+
+ This is called internally when the
+ list of subjects becomes too small, but only if there is a
+ PageGenerator
+ """
fs = self.firstSubject()
if fs and (not globalvar.quiet):
pywikibot.output(u"NOTE: The first unfinished subject is %s"
@@ -2206,11 +2236,12 @@
return self.subjects[0]
def maxOpenSite(self):
- """Return the site that has the most
- open queries plus the number. If there is nothing left, return
- None. Only languages that are TODO for the first Subject
- are returned.
- """
+ """
+ Return the site that has the most open queries plus the number.
+
+ If there is nothing left, return None.
+ Only languages that are TODO for the first Subject are returned.
+ """
max = 0
maxlang = None
if not self.firstSubject():
@@ -2318,14 +2349,14 @@
return len(self) == 0 and self.pageGenerator is None
def plus(self, site, count=1):
- """This is a routine that the Subject class expects in a
counter."""
+ """Helper routine that the Subject class expects in a
counter."""
try:
self.counts[site] += count
except KeyError:
self.counts[site] = count
def minus(self, site, count=1):
- """This is a routine that the Subject class expects in a
counter."""
+ """Helper routine that the Subject class expects in a
counter."""
self.counts[site] -= count
def run(self):
diff --git a/scripts/maintenance/__init__.py b/scripts/maintenance/__init__.py
index c47da75..ee430b4 100644
--- a/scripts/maintenance/__init__.py
+++ b/scripts/maintenance/__init__.py
@@ -1 +1,2 @@
# THIS DIRECTORY IS TO HOLD BOT SCRIPTS FOR THE NEW FRAMEWORK
+"""Maintenance scripts."""
diff --git a/scripts/maintenance/compat2core.py b/scripts/maintenance/compat2core.py
index 90b318f..5311aab 100644
--- a/scripts/maintenance/compat2core.py
+++ b/scripts/maintenance/compat2core.py
@@ -1,8 +1,7 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
-This is a helper script to convert compat 1.0 scripts to the new core 2.0
-framework.
+A helper script to convert compat 1.0 scripts to the new core 2.0 framework.
NOTE: Please be aware that this script is not able to convert your codes
completely. It may support you with some automatic replacements and it gives
@@ -113,6 +112,8 @@
class ConvertBot(object):
+ """Script conversion bot."""
+
def __init__(self, filename=None, warnonly=False):
self.source = filename
self.warnonly = warnonly
diff --git a/scripts/maintenance/make_i18n_dict.py
b/scripts/maintenance/make_i18n_dict.py
index cd55a70..590ead5 100644
--- a/scripts/maintenance/make_i18n_dict.py
+++ b/scripts/maintenance/make_i18n_dict.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
"""
-Generate a i18n file from a given script
+Generate a i18n file from a given script.
usage:
@@ -28,6 +28,8 @@
class i18nBot(object):
+ """I18n bot."""
+
def __init__(self, script, *args):
modules = script.split('.')
self.scriptname = modules[0]
diff --git a/scripts/maintenance/wikimedia_sites.py
b/scripts/maintenance/wikimedia_sites.py
index 785b51b..83f2080 100644
--- a/scripts/maintenance/wikimedia_sites.py
+++ b/scripts/maintenance/wikimedia_sites.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-"""
-This script checks the language list of each Wikimedia multiple-language site
-against the language lists
-"""
+"""Script that updates the language lists in Wikimedia family
files."""
#
# (C) xqt, 2009-2014
# (C) Pywikibot team, 2008-2014
diff --git a/scripts/makecat.py b/scripts/makecat.py
index 0b28870..b92cbd3 100644
--- a/scripts/makecat.py
+++ b/scripts/makecat.py
@@ -48,7 +48,7 @@
def isdate(s):
- """returns true if s is a date or year."""
+ """Return true if s is a date or year."""
dict, val = date.getAutoFormat(pywikibot.Site().language(), s)
return dict is not None
diff --git a/tox.ini b/tox.ini
index e13d4d9..2ece018 100644
--- a/tox.ini
+++ b/tox.ini
@@ -48,60 +48,7 @@
./pywikibot/data/api.py \
./pywikibot/userinterfaces/transliteration.py \
./pywikibot/userinterfaces/terminal_interface.py \
- ./scripts/__init__.py \
- ./scripts/basic.py \
- ./scripts/category.py \
- ./scripts/category_redirect.py \
- ./scripts/claimit.py \
- ./scripts/clean_sandbox.py \
- ./scripts/commons_link.py \
- ./scripts/commonscat.py \
- ./scripts/coordinate_import.py \
- ./scripts/cosmetic_changes.py \
- ./scripts/create_categories.py \
- ./scripts/data_ingestion.py \
- ./scripts/delete.py \
- ./scripts/editarticle.py \
- ./scripts/flickrripper.py \
- ./scripts/freebasemappingupload.py \
- ./scripts/harvest_template.py \
- ./scripts/illustrate_wikidata.py \
- ./scripts/image.py \
- ./scripts/imagerecat.py \
- ./scripts/imagetransfer.py \
- ./scripts/imageuncat.py \
- ./scripts/isbn.py \
- ./scripts/listpages.py \
- ./scripts/login.py \
- ./scripts/lonelypages.py \
- ./scripts/newitem.py \
- ./scripts/misspelling.py \
- ./scripts/movepages.py \
- ./scripts/noreferences.py \
- ./scripts/nowcommons.py \
- ./scripts/pagefromfile.py \
- ./scripts/protect.py \
- ./scripts/redirect.py \
- ./scripts/reflinks.py \
- ./scripts/replace.py \
- ./scripts/replicate_wiki.py \
- ./scripts/revertbot.py \
- ./scripts/script_wui.py \
- ./scripts/selflink.py \
- ./scripts/shell.py \
- ./scripts/spamremove.py \
- ./scripts/template.py \
- ./scripts/templatecount.py \
- ./scripts/touch.py \
- ./scripts/transferbot.py \
- ./scripts/unlink.py \
- ./scripts/unusedfiles.py \
- ./scripts/version.py \
- ./scripts/watchlist.py \
- ./scripts/weblinkchecker.py \
- ./scripts/welcome.py \
- ./scripts/maintenance/cache.py \
- ./tests/
+ ./scripts/ ./tests/
deps = flake8>=2.2.5
flake8-docstrings
--
To view, visit
https://gerrit.wikimedia.org/r/169689
To unsubscribe, visit
https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I07bb75e52ac9cbbf845e08a364526ed049e445a0
Gerrit-PatchSet: 3
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Siebrand <siebrand(a)kitano.nl>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: jenkins-bot <>