XZise has submitted this change and it was merged.
Change subject: PropertyPage.get() fails
......................................................................
PropertyPage.get() fails
PropertyPage method get attempts to set 'type', which is a
property of superclass Property that does not have a setter, resulting
in an AttributeError:
self.type = self._content['datatype']
AttributeError: can't set attribute
Fix by putting the datatype into the underlying _type attribute.
Also use the 'type' property in PropertyPage.newClaim so that each newly
created Claim does not need to fetch the property type.
Change-Id: I845ec3ad91017e386e568ee5bcb85fd48644adfb
---
M pywikibot/page.py
M tests/wikibase_tests.py
2 files changed, 22 insertions(+), 2 deletions(-)
Approvals:
John Vandenberg: Looks good to me, but someone else must approve
XZise: Looks good to me, approved
diff --git a/pywikibot/page.py b/pywikibot/page.py
index fb5bffe..8052f0b 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -3172,7 +3172,7 @@
"""
if force or not hasattr(self, '_content'):
WikibasePage.get(self, force=force, *args)
- self.type = self._content['datatype']
+ self._type = self._content['datatype']
def newClaim(self, *args, **kwargs):
"""
@@ -3180,7 +3180,8 @@
@return: Claim
"""
- return Claim(self.site, self.getID(), *args, **kwargs)
+ return Claim(self.site, self.getID(), datatype=self.type,
+ *args, **kwargs)
class Claim(Property):
diff --git a/tests/wikibase_tests.py b/tests/wikibase_tests.py
index a9142dd..67df427 100644
--- a/tests/wikibase_tests.py
+++ b/tests/wikibase_tests.py
@@ -384,6 +384,25 @@
self.assertEquals(claim.type, 'globe-coordinate')
self.assertEquals(claim.getType(), 'globecoordinate')
+ def test_get(self):
+ property_page = pywikibot.PropertyPage(wikidata, 'P625')
+ property_page.get()
+ self.assertEquals(property_page.type, 'globe-coordinate')
+
+ def test_new_claim(self):
+ """Test that PropertyPage.newClaim uses cached datatype."""
+ property_page = pywikibot.PropertyPage(wikidata, 'P625')
+ property_page.get()
+ claim = property_page.newClaim()
+ self.assertEquals(claim.type, 'globe-coordinate')
+
+ # Now verify that it isnt fetching the type from the property
+ # data in the repo by setting the cache to the incorrect type
+ # and checking that it is the cached value that is used.
+ property_page._type = 'wikibase-item'
+ claim = property_page.newClaim()
+ self.assertEquals(claim.type, 'wikibase-item')
+
class TestClaimSetValue(PywikibotTestCase):
--
To view, visit https://gerrit.wikimedia.org/r/154012
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I845ec3ad91017e386e568ee5bcb85fd48644adfb
Gerrit-PatchSet: 3
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: [sync] Synchronize version with compat using "'" instead of '"'
......................................................................
[sync] Synchronize version with compat using "'" instead of '"'
Change-Id: Ie0be0baa5eea030449b5c16d30a68ff3865aa3cf
---
M pywikibot/families/lyricwiki_family.py
1 file changed, 1 insertion(+), 1 deletion(-)
Approvals:
XZise: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/families/lyricwiki_family.py b/pywikibot/families/lyricwiki_family.py
index dc84ff4..799bb85 100644
--- a/pywikibot/families/lyricwiki_family.py
+++ b/pywikibot/families/lyricwiki_family.py
@@ -24,7 +24,7 @@
def version(self, code):
"""Return the version for this family."""
- return "1.19.18"
+ return '1.19.18'
def scriptpath(self, code):
"""Return the script path for this family."""
--
To view, visit https://gerrit.wikimedia.org/r/158937
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ie0be0baa5eea030449b5c16d30a68ff3865aa3cf
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: jenkins-bot <>
XZise has submitted this change and it was merged.
Change subject: pep257-D205 fixes in pywikibot/
......................................................................
pep257-D205 fixes in pywikibot/
Fixes all D205 issues within the library,
and miscellaneous pep257 issues in the same code.
Also changes wikidataquery.Query.__str__ to raise
NotImplementedError instead of NotImplemented.
Change-Id: I1bd1375ffe01025b43c307cd1828471fbb5e05d9
---
M pywikibot/__init__.py
M pywikibot/comms/threadedhttp.py
M pywikibot/data/api.py
M pywikibot/data/wikidataquery.py
M pywikibot/date.py
M pywikibot/editor.py
M pywikibot/exceptions.py
M pywikibot/i18n.py
M pywikibot/interwiki_graph.py
M pywikibot/logentries.py
M pywikibot/login.py
M pywikibot/page.py
M pywikibot/pagegenerators.py
M pywikibot/site.py
M pywikibot/textlib.py
M pywikibot/tools.py
M pywikibot/userinterfaces/gui.py
M pywikibot/userinterfaces/terminal_interface_base.py
M pywikibot/version.py
M pywikibot/weblib.py
M pywikibot/xmlreader.py
21 files changed, 380 insertions(+), 192 deletions(-)
Approvals:
XZise: Looks good to me, approved
diff --git a/pywikibot/__init__.py b/pywikibot/__init__.py
index 184d875..e25bdc0 100644
--- a/pywikibot/__init__.py
+++ b/pywikibot/__init__.py
@@ -123,15 +123,15 @@
@classmethod
def fromtimestampformat(cls, ts):
- """Convert the internal MediaWiki timestamp format to a Timestamp object."""
+ """Convert a MediaWiki internal timestamp to a Timestamp object."""
return cls.strptime(ts, cls.mediawikiTSFormat)
def toISOformat(self):
- """Convert the Timestamp object to an ISO 8601 timestamp."""
+ """Convert object to an ISO 8601 timestamp."""
return self.strftime(self.ISO8601Format)
def totimestampformat(self):
- """Convert the Timestamp object to the internal MediaWiki timestamp format."""
+ """Convert object to a MediaWiki internal timestamp."""
return self.strftime(self.mediawikiTSFormat)
def __str__(self):
@@ -254,6 +254,8 @@
@property
def precision(self):
"""
+ Return the precision of the geo coordinate.
+
The biggest error (in degrees) will be given by the longitudinal error - the same error in meters becomes larger
(in degrees) further up north. We can thus ignore the latitudinal error.
@@ -431,6 +433,11 @@
@staticmethod
def fromWikibase(wb):
+ """
+ Create a WbQuanity from the JSON data given by the Wikibase API.
+
+ @param wb: Wikibase JSON
+ """
amount = eval(wb['amount'])
upperBound = eval(wb['upperBound'])
lowerBound = eval(wb['lowerBound'])
diff --git a/pywikibot/comms/threadedhttp.py b/pywikibot/comms/threadedhttp.py
index 9c85179..790190b 100644
--- a/pywikibot/comms/threadedhttp.py
+++ b/pywikibot/comms/threadedhttp.py
@@ -46,6 +46,8 @@
def __init__(self, maxnum=5):
"""
+ Constructor.
+
@param maxnum: Maximum number of connections per identifier.
The pool drops excessive connections added.
@@ -132,6 +134,8 @@
"""
def __init__(self, *args, **kwargs):
"""
+ Constructor.
+
@param cookiejar: (optional) CookieJar to use. A new one will be
used when not supplied.
@param connection_pool: (optional) Connection pool to use. A new one
@@ -244,7 +248,7 @@
def _follow_redirect(self, uri, method, body, headers, response,
content, max_redirects):
- """Internal function to follow a redirect recieved by L{request}"""
+ """Internal function to follow a redirect recieved by L{request}."""
(scheme, authority, absolute_uri,
defrag_uri) = httplib2.urlnorm(httplib2.iri2uri(uri))
if self.cache:
@@ -308,7 +312,11 @@
"""
def __init__(self, *args, **kwargs):
- """See C{Http.request} for parameters."""
+ """
+ Constructor.
+
+ See C{Http.request} for parameters.
+ """
self.args = args
self.kwargs = kwargs
self.data = None
@@ -319,6 +327,8 @@
"""Thread object to spawn multiple HTTP connection threads."""
def __init__(self, queue, cookiejar, connection_pool):
"""
+ Constructor.
+
@param queue: The C{Queue.Queue} object that contains L{HttpRequest}
objects.
@param cookiejar: The C{LockableCookieJar} cookie object to share among
@@ -380,8 +390,9 @@
# ========================================================================
class DummyRequest(object):
- """Simulated urllib2.Request object for httplib2
- implements only what's necessary for cookielib.CookieJar to work
+ """Simulated urllib2.Request object for httplib2.
+
+ Implements only what's necessary for cookielib.CookieJar to work.
"""
def __init__(self, url, headers=None):
self.url = url
@@ -424,8 +435,9 @@
class DummyResponse(object):
- """Simulated urllib2.Request object for httplib2
- implements only what's necessary for cookielib.CookieJar to work
+ """Simulated urllib2.Request object for httplib2.
+
+ Implements only what's necessary for cookielib.CookieJar to work.
"""
def __init__(self, response):
self.response = response
@@ -435,8 +447,9 @@
class DummyMessage(object):
- """Simulated mimetools.Message object for httplib2
- implements only what's necessary for cookielib.CookieJar to work
+ """Simulated mimetools.Message object for httplib2.
+
+ Implements only what's necessary for cookielib.CookieJar to work.
"""
def __init__(self, response):
self.response = response
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index c36f88e..a7b90c4 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
-"""
-Interface functions to Mediawiki's api.php
-"""
+"""Interface to Mediawiki's api.php."""
#
# (C) Pywikibot team, 2007-2014
#
@@ -141,6 +139,7 @@
"""
def __init__(self, **kwargs):
+ """Constructor."""
try:
self.site = kwargs.pop("site")
except KeyError:
@@ -713,7 +712,9 @@
@property
def __modules(self):
"""
- Instance cache: hold the query data for paraminfo on
+ Cache paraminfo in this request's Site object.
+
+ Hold the query data for paraminfo on
querymodule=self.module at self.site.
"""
@@ -938,6 +939,8 @@
def __init__(self, generator, g_content=False, **kwargs):
"""
+ Constructor.
+
Required and optional parameters are as for C{Request}, except that
action=query is assumed and generator is required.
@@ -999,7 +1002,7 @@
class PropertyGenerator(QueryGenerator):
- """Iterator for queries of type action=query&prop=...
+ """Iterator for queries of type action=query&prop=foo.
See the API documentation for types of page properties that can be
queried.
@@ -1014,6 +1017,8 @@
def __init__(self, prop, **kwargs):
"""
+ Constructor.
+
Required and optional parameters are as for C{Request}, except that
action=query is assumed and prop is required.
@@ -1027,7 +1032,7 @@
class ListGenerator(QueryGenerator):
- """Iterator for queries of type action=query&list=...
+ """Iterator for queries of type action=query&list=foo.
See the API documentation for types of lists that can be queried. Lists
include both side-wide information (such as 'allpages') and page-specific
@@ -1043,6 +1048,8 @@
def __init__(self, listaction, **kwargs):
"""
+ Constructor.
+
Required and optional parameters are as for C{Request}, except that
action=query is assumed and listaction is required.
@@ -1056,11 +1063,13 @@
class LogEntryListGenerator(ListGenerator):
"""
- Like ListGenerator, but specialized for listaction="logevents" :
- yields LogEntry objects instead of dicts.
+ Iterator for queries of list 'logevents'.
+
+ Yields LogEntry objects instead of dicts.
"""
def __init__(self, logtype=None, **kwargs):
+ """Constructor."""
ListGenerator.__init__(self, "logevents", **kwargs)
from pywikibot import logentries
@@ -1124,7 +1133,7 @@
def update_page(page, pagedict):
- """Update attributes of Page object page, based on query data in pagedict
+ """Update attributes of Page object page, based on query data in pagedict.
@param page: object to be updated
@type page: Page
diff --git a/pywikibot/data/wikidataquery.py b/pywikibot/data/wikidataquery.py
index 82da2b2..62f750f 100644
--- a/pywikibot/data/wikidataquery.py
+++ b/pywikibot/data/wikidataquery.py
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
-"""
-Objects representing WikidataQuery query syntax and API
-"""
+"""Objects representing WikidataQuery query syntax and API."""
#
# (C) Pywikibot team, 2013
#
@@ -26,15 +24,16 @@
def listify(x):
"""
- If given a non-list , encapsulate in a single-element list
+ If given a non-list, encapsulate in a single-element list.
"""
return x if isinstance(x, list) else [x]
class QuerySet():
"""
- A QuerySet represents a set of queries or other query sets, joined
- by operators (AND and OR).
+ A QuerySet represents a set of queries or other query sets.
+
+ Queries may be joined by operators (AND and OR).
A QuerySet stores this information as a list of Query(Sets) and
a joiner operator to join them all together
@@ -42,7 +41,7 @@
def __init__(self, q):
"""
- Initialise a query set from a Query or another QuerySet
+ Initialise a query set from a Query or another QuerySet.
"""
self.qs = [q]
@@ -72,21 +71,21 @@
def AND(self, args):
"""
- Add the given args (Queries or QuerySets) to the Query set as a
- logical conjuction (AND)
+ Add the given args (Queries or QuerySets) to the Query set as a logical conjuction (AND).
"""
return self.addJoiner(args, "AND")
def OR(self, args):
"""
- Add the given args (Queries or QuerySets) to the Query set as a
- logical disjunction (AND)
+ Add the given args (Queries or QuerySets) to the Query set as a logical disjunction (OR).
"""
return self.addJoiner(args, "OR")
def __str__(self):
"""
- Output as an API-ready string
+ Output as an API-ready string.
+
+ @rtype: str
"""
def bracketIfQuerySet(q):
@@ -107,8 +106,11 @@
class Query():
+
"""
- A query is a single query for the WikidataQuery API, for example
+ A query is a single query for the WikidataQuery API.
+
+ For example:
claim[100:60] or link[enwiki]
Construction of a Query can throw a TypeError if you feed it bad
@@ -117,34 +119,34 @@
def AND(self, ands):
"""
- Produce a query set ANDing this query and all the given query/sets
+ Produce a query set ANDing this query and all the given query/sets.
"""
return QuerySet(self).addJoiner(ands, "AND")
def OR(self, ors):
"""
- Produce a query set ORing this query and all the given query/sets
+ Produce a query set ORing this query and all the given query/sets.
"""
return QuerySet(self).addJoiner(ors, "OR")
def formatItem(self, item):
"""
- Default item formatting is string, which will work for queries,
- querysets, ints and strings
+ Default item formatting is string.
+
+ This will work for queries, querysets, ints and strings
"""
return str(item)
def formatList(self, l):
"""
- Format and comma-join a list
+ Format and comma-join a list.
"""
return ",".join([self.formatItem(x) for x in l])
@staticmethod
def isOrContainsOnlyTypes(items, types):
"""
- Either this item is one of the given types, or it is a list of
- only those types
+ Either this item is one of the given types, or it is a list of only those types.
"""
if isinstance(items, list):
for x in items:
@@ -170,8 +172,13 @@
def validate(self):
"""
+ Validate the query parameters.
+
Default validate result is a pass - subclasses need to implement
- this if they want to check their parameters
+ this if they want to check their parameters.
+
+ @return: True
+ @rtype: bool
"""
return True
@@ -181,8 +188,9 @@
def convertWDType(self, item):
"""
- Convert WD items like ItemPage or PropertyPage into integer IDs
- for use in query strings.
+ Convert Wikibase items like ItemPage or PropertyPage into integer IDs.
+
+ The resulting IDs may be used in query strings.
@param item A single item. One of ItemPages, PropertyPages, int
or anything that can be fed to int()
@@ -199,10 +207,13 @@
def __str__(self):
"""
- The __str__ method is critical, as this is what generates
- the string to be passed to the API
+ Generate a query string to be passed to the WDQ API.
+
+ Sub-classes must override this method.
+
+ @raise NotImplementedError: Always raised by this abstract method
"""
- raise NotImplemented
+ raise NotImplementedError
def __repr__(self):
return u"Query(%s)" % self
@@ -210,13 +221,16 @@
class HasClaim(Query):
"""
- This is a Query of the form "claim[prop:val]". It is subclassed by
+ This is a Query of the form "claim[prop:val]".
+
+ It is subclassed by
the other similar forms like noclaim and string
"""
queryType = "claim"
def __init__(self, prop, items=[]):
+ """Constructor."""
self.prop = self.convertWDType(prop)
if isinstance(items, Tree):
@@ -251,13 +265,13 @@
class StringClaim(HasClaim):
"""
- Query of the form string[PROPERTY:"STRING",...]
+ Query of the form string[PROPERTY:"STRING",...].
"""
queryType = "string"
def formatItem(self, x):
"""
- Strings need quote-wrapping
+ Strings need quote-wrapping.
"""
return '"%s"' % x
@@ -267,12 +281,14 @@
class Tree(Query):
"""
- Query of the form tree[ITEM,...][PROPERTY,...]<PROPERTY,...>
+ Query of the form tree[ITEM,...][PROPERTY,...]<PROPERTY,...>.
"""
queryType = "tree"
def __init__(self, item, forward=[], reverse=[]):
"""
+ Constructor.
+
@param item The root item
@param forward List of forward properties, can be empty
@param reverse List of reverse properties, can be empty
@@ -307,11 +323,12 @@
class Around(Query):
"""
- A query in the form around[PROPERTY,LATITUDE,LONGITUDE,RADIUS]
+ A query in the form around[PROPERTY,LATITUDE,LONGITUDE,RADIUS].
"""
queryType = "around"
def __init__(self, prop, coord, rad):
+ """Constructor."""
self.prop = self.convertWDType(prop)
self.lt = coord.lat
self.lg = coord.lon
@@ -327,7 +344,7 @@
class Between(Query):
"""
- A query in the form between[PROP, BEGIN, END]
+ A query in the form between[PROP, BEGIN, END].
You have to give prop and one of begin or end. Note that times have
to be in UTC, timezones are not supported by the API
@@ -339,6 +356,7 @@
queryType = "between"
def __init__(self, prop, begin=None, end=None):
+ """Constructor."""
self.prop = self.convertWDType(prop)
self.begin = begin
self.end = end
@@ -357,7 +375,7 @@
class Link(Query):
"""
- A query in the form link[LINK,...], which also includes nolink
+ A query in the form link[LINK,...], which also includes nolink.
All link elements have to be strings, or validation will throw
"""
@@ -365,6 +383,7 @@
queryType = "link"
def __init__(self, link):
+ """Constructor."""
self.link = listify(link)
self.validateOrRaise()
@@ -381,7 +400,7 @@
def fromClaim(claim):
"""
- Construct from a pywikibot.page Claim object
+ Construct from a pywikibot.page Claim object.
"""
if not isinstance(claim, Claim):
@@ -398,7 +417,9 @@
class WikidataQuery():
"""
- An interface to the WikidataQuery API. Default host is
+ An interface to the WikidataQuery API.
+
+ Default host is
https://wdq.wmflabs.org/, but you can substitute
a different one.
@@ -410,6 +431,7 @@
def __init__(self, host="https://wdq.wmflabs.org", cacheDir=None,
cacheMaxAge=60):
+ """Constructor."""
self.host = host
self.cacheMaxAge = cacheMaxAge
@@ -424,7 +446,8 @@
def getQueryString(self, q, labels=[], props=[]):
"""
- Get the query string for a given query or queryset
+ Get the query string for a given query or queryset.
+
@return query string including labels and props
"""
qStr = "q=%s" % quote(str(q))
@@ -439,16 +462,16 @@
def getCacheFilename(self, queryStr):
"""
- Encode a query into a unique and universally safe format
+ Encode a query into a unique and universally safe format.
"""
encQuery = hashlib.sha1(queryStr).hexdigest() + ".wdq_cache"
return os.path.join(self.cacheDir, encQuery)
def readFromCache(self, queryStr):
"""
- Check if we have cached this data recently enough, read it
- if we have. Returns None if the data is not there or if it is
- too old
+ Load the query result from the cache, if possible.
+
+ Returns None if the data is not there or if it is too old.
"""
if self.cacheMaxAge <= 0:
@@ -475,8 +498,9 @@
def saveToCache(self, q, data):
"""
- Save data from a query to a cache file, if enabled
- @ returns nothing
+ Save data from a query to a cache file, if enabled.
+
+ No return value.
"""
if self.cacheMaxAge <= 0:
@@ -500,7 +524,7 @@
def getDataFromHost(self, queryStr):
"""
- Go and fetch a query from the host's API
+ Go and fetch a query from the host's API.
"""
url = self.getUrl(queryStr)
@@ -520,7 +544,8 @@
def query(self, q, labels=[], props=[]):
"""
- Actually run a query over the API
+ Actually run a query over the API.
+
@return Python dict of the interpreted JSON or None on failure
"""
diff --git a/pywikibot/date.py b/pywikibot/date.py
index 1d39f77..60edd0d 100644
--- a/pywikibot/date.py
+++ b/pywikibot/date.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
-"""
-This file is not runnable, but it only consists of various
-lists which are required by some other programs.
-"""
+"""Date data and manipulation module."""
#
# (C) Rob W.W. Hooft, 2003
# (C) Daniel Herding, 2004
@@ -47,8 +44,11 @@
def multi(value, tuplst):
- """This method is used when more than one pattern is used for the same
- entry. Example: 1st century, 2nd century, etc.
+ """
+ Run multiple pattern checks for the same entry.
+
+ For example: 1st century, 2nd century, etc.
+
The tuplst is a list of tupples. Each tupple must contain two functions:
first to encode/decode a single value (e.g. simpleInt), second is a
predicate function with an integer parameter that returns true or false.
@@ -227,9 +227,14 @@
def alwaysTrue(x):
- """This function always returns True - its used for multiple value
- selection function to accept all other values
+ """
+ Return True, always.
+ It is used for multiple value selection function to accept all other values.
+
+ @param x: not used
+ @return: True
+ @rtype: bool
"""
return True
@@ -1931,8 +1936,9 @@
def addFmt1(lang, isMnthOfYear, patterns):
- """Add 12 month formats for a specific type ('January','Feb..), for a given
- language. The function must accept one parameter for the ->int or ->string
+ """Add 12 month formats for a specific type ('January','Feb..), for a given language.
+
+ The function must accept one parameter for the ->int or ->string
conversions, just like everywhere else in the formats map.
The patterns parameter is a list of 12 elements to be used for each month.
@@ -1961,6 +1967,7 @@
def makeMonthNamedList(lang, pattern, makeUpperCase=None):
"""Create a list of 12 elements based on the name of the month.
+
The language-dependent month name is used as a formating argument to the
pattern. The pattern must be have one %s that will be replaced by the
localized month name.
@@ -2291,9 +2298,13 @@
def getAutoFormat(lang, title, ignoreFirstLetterCase=True):
- """Return (dictName,value), where value can be a year, date, etc, and
- dictName is 'YearBC', 'December', etc.
+ """
+ Return first matching formatted date value.
+ @param lang: language code
+ @param title: value to format
+ @return: dictName ('YearBC', 'December', ...) and value (a year, date, ...)
+ @rtype: tuple
"""
for dictName, dict in formats.items():
try:
diff --git a/pywikibot/editor.py b/pywikibot/editor.py
index 1b6b1b5..6cdcf52 100644
--- a/pywikibot/editor.py
+++ b/pywikibot/editor.py
@@ -20,6 +20,8 @@
class TextEditor(object):
+ """Text editor."""
+
def command(self, tempFilename, text, jumpIndex=None):
command = config.editor
if jumpIndex:
@@ -69,7 +71,8 @@
def edit(self, text, jumpIndex=None, highlight=None):
"""
- Calls the editor and thus allows the user to change the text.
+ Call the editor and thus allows the user to change the text.
+
Returns the modified text. Halts the thread's operation until the editor
is closed.
diff --git a/pywikibot/exceptions.py b/pywikibot/exceptions.py
index 7feba4a..266b400 100644
--- a/pywikibot/exceptions.py
+++ b/pywikibot/exceptions.py
@@ -57,6 +57,7 @@
class Error(UnicodeMixin, Exception):
"""Pywikibot error"""
+
# NOTE: UnicodeMixin must be the first object Error class is derived from.
def __init__(self, arg):
self.unicode = arg
@@ -69,6 +70,9 @@
"""
Abstract Exception, used when the exception concerns a particular Page.
+
+ This class should be used when the Exception concerns a particular
+ Page, and when a generic message can be written once for all.
"""
# Preformated UNICODE message where the page title will be inserted
@@ -105,6 +109,7 @@
class PageSaveRelatedError(PageRelatedError):
"""Saving the page has failed"""
+
message = u"Page %s was not saved."
# This property maintains backwards compatibility with
@@ -119,6 +124,7 @@
class OtherPageSaveError(PageSaveRelatedError):
"""Saving the page has failed due to uncatchable error."""
+
message = "Edit to page %(title)s failed:\n%(reason)s"
def __init__(self, page, reason):
@@ -139,28 +145,41 @@
"""Username is not in user-config.py"""
+ pass
+
class NoPage(PageRelatedError):
"""Page does not exist"""
+
message = u"Page %s doesn't exist."
+
+ pass
class NoSuchSite(Error):
"""Site does not exist"""
+ pass
+
class IsRedirectPage(PageRelatedError):
"""Page is a redirect page"""
+
message = u"Page %s is a redirect page."
+
+ pass
class IsNotRedirectPage(PageRelatedError):
"""Page is not a redirect page"""
+
message = u"Page %s is not a redirect page."
+
+ pass
class CircularRedirect(PageRelatedError):
@@ -172,6 +191,7 @@
or indirectly redirects back to this one)
"""
+
message = u"Page %s is a circular redirect."
@@ -179,28 +199,41 @@
"""Invalid page title"""
+ pass
+
class LockedPage(PageSaveRelatedError):
"""Page is locked"""
+
message = u"Page %s is locked."
+
+ pass
class LockedNoPage(LockedPage):
"""Title is locked against creation"""
+
message = u"Page %s does not exist and is locked preventing creation."
+
+ pass
class CascadeLockedPage(LockedPage):
"""Page is locked due to cascading protection"""
+
message = u"Page %s is locked due to cascading protection."
+
+ pass
class SectionError(Error):
"""The section specified by # does not exist"""
+
+ pass
PageNotSaved = PageSaveRelatedError
@@ -209,26 +242,33 @@
class EditConflict(PageSaveRelatedError):
"""There has been an edit conflict while uploading the page"""
+
message = u"Page %s could not be saved due to an edit conflict"
+
+ pass
class PageDeletedConflict(EditConflict):
"""Page was deleted since being retrieved"""
+
message = u"Page %s has been deleted since last retrieved."
+
+ pass
class PageCreatedConflict(EditConflict):
"""Page was created by another user"""
+
message = u"Page %s has been created since last retrieved."
+
+ pass
class SpamfilterError(PageSaveRelatedError):
- """Saving the page has failed because the MediaWiki spam filter detected a
- blacklisted URL.
- """
+ """Page save failed because MediaWiki detected a blacklisted spam URL."""
message = "Edit to page %(title)s rejected by spam filter due to content:\n%(url)s"
@@ -241,16 +281,21 @@
"""Got unexpected server response"""
+ pass
+
class FatalServerError(ServerError):
- """A fatal server error that's not going to be corrected by just sending
- the request again."""
+ """A fatal server error will not be corrected by resending the request."""
+
+ pass
class Server504Error(Error):
"""Server timed out with HTTP 504 code"""
+
+ pass
class BadTitle(Error):
@@ -261,29 +306,40 @@
# been blocked, the bot operator should address the reason for the block
# before continuing.
+ pass
+
class UserBlocked(Error):
"""Your username or IP has been blocked"""
+
+ pass
class PageNotFound(Error):
"""Page not found in list"""
+ pass
+
class CaptchaError(Error):
"""Captcha is asked and config.solve_captcha == False."""
+ pass
+
class AutoblockUser(Error):
- """
+ """Requested action on a virtual autoblock user not valid.
+
The class AutoblockUserError is an exception that is raised whenever
an action is requested on a virtual autoblock user that's not available
for him (i.e. roughly everything except unblock).
"""
+
+ pass
class UserActionRefuse(Error):
@@ -307,6 +363,7 @@
pass
+
# TODO: Warn about the deprecated usage
import pywikibot.data.api
UploadWarning = pywikibot.data.api.UploadWarning
diff --git a/pywikibot/i18n.py b/pywikibot/i18n.py
index ffd143a..52ce1c9 100644
--- a/pywikibot/i18n.py
+++ b/pywikibot/i18n.py
@@ -1,7 +1,9 @@
# -*- coding: utf-8 -*-
"""
-Various i18n functions, both for the internal translation system
-and for TranslateWiki-based translations
+Various i18n functions.
+
+Helper functions for both the internal translation system
+and for TranslateWiki-based translations.
"""
#
# (C) Pywikibot team, 2004-2014
@@ -235,8 +237,8 @@
def _extract_plural(code, message, parameters):
- """Check for the plural variants in message and replace them depending on
- parameter settings.
+ """Check for the plural variants in message and replace them.
+
@param message: the message to be replaced
@type message: unicode string
@param parameters: plural parameters passed from other methods
@@ -346,8 +348,7 @@
def twtranslate(code, twtitle, parameters=None):
- """ Use TranslateWiki files to provide translations based on the TW title
- twtitle, which corresponds to a page on TW.
+ """Translate a message.
@param code The language code
@param twtitle The TranslateWiki string title, in <package>-<key> format
@@ -401,8 +402,7 @@
# Maybe this function should be merged with twtranslate
def twntranslate(code, twtitle, parameters=None):
- """ First implementation of plural support for translations based on the
- TranslateWiki title twtitle, which corresponds to a page on TranslateWiki.
+ """Translate a message with plural support.
@param code The language code
@param twtitle The TranslateWiki string title, in <package>-<key> format
@@ -475,14 +475,17 @@
def twhas_key(code, twtitle):
- """ Use TranslateWiki files to to check whether specified translation
- based on the TW title is provided. No code fallback is made.
+ """
+ Check if a message has a translation in the specified language code.
- @param code The language code
- @param twtitle The TranslateWiki string title, in <package>-<key> format
+ The translations are retrieved from i18n.<package>, based on the callers
+ import table.
- The translations are retrieved from i18n.<package>, based on the callers
- import table.
+ No code fallback is made.
+
+ @param code The language code
+ @param twtitle The TranslateWiki string title, in <package>-<key> format
+
"""
package = twtitle.split("-")[0]
transdict = getattr(__import__("i18n", fromlist=[package]), package).msg
@@ -494,6 +497,7 @@
def input(twtitle, parameters=None, password=False):
""" Ask the user a question, return the user's answer.
+
@param twtitle The TranslateWiki string title, in <package>-<key> format
@param parameters For passing parameters. In the future, this will
be used for plural support.
diff --git a/pywikibot/interwiki_graph.py b/pywikibot/interwiki_graph.py
index ac6ac1e..68ec8bd 100644
--- a/pywikibot/interwiki_graph.py
+++ b/pywikibot/interwiki_graph.py
@@ -26,6 +26,8 @@
class GraphSavingThread(threading.Thread):
"""
+ Threaded graph renderer.
+
Rendering a graph can take extremely long. We use
multithreading because of that.
@@ -50,7 +52,11 @@
class GraphDrawer:
+
+ """Graphviz (dot) code creator."""
+
def __init__(self, subject):
+ """Constructor."""
if not pydotfound:
raise GraphImpossible('pydot is not installed.')
self.graph = None
@@ -153,6 +159,16 @@
def getFilename(page, extension=None):
+ """
+ Create a filename that is unique for the page.
+
+ @param page: page used to create the new filename
+ @type page: Page
+ @param extension: file extension
+ @type extension: str
+ @return: filename of <family>-<lang>-<page>.<ext>
+ @rtype: str
+ """
filename = '%s-%s-%s' % (page.site.family.name,
page.site.language(),
page.titleForFilename())
diff --git a/pywikibot/logentries.py b/pywikibot/logentries.py
index d0e753f..6cd2cc6 100644
--- a/pywikibot/logentries.py
+++ b/pywikibot/logentries.py
@@ -19,8 +19,9 @@
class LogDict(dict):
"""
- Simple custom dictionary that raises a custom KeyError and logs
- debugging information when a key is missing
+ Simple custom dict that raises a custom KeyError when a key is missing.
+
+ It also logs debugging information when a key is missing.
"""
def __missing__(self, key):
pywikibot.debug(u"API log entry received:\n" + repr(self),
@@ -30,7 +31,7 @@
class LogEntry(object):
- """Generic log entry"""
+ """Generic log entry."""
# Log type expected. None for every type, or one of the (letype) str :
# block/patrol/etc...
@@ -38,7 +39,7 @@
_expectedType = None
def __init__(self, apidata):
- """Initialize object from a logevent dict returned by MW API"""
+ """Initialize object from a logevent dict returned by MW API."""
self.data = LogDict(apidata)
if self._expectedType is not None and self._expectedType != self.type():
raise Error("Wrong log type! Expecting %s, received %s instead."
@@ -57,7 +58,7 @@
return self.data['ns']
def title(self):
- """Page on which action was performed"""
+ """Page on which action was performed."""
if not hasattr(self, '_title'):
self._title = pywikibot.Page(pywikibot.Link(self.data['title']))
return self._title
@@ -73,7 +74,7 @@
return self.data['user']
def timestamp(self):
- """Timestamp object corresponding to event timestamp"""
+ """Timestamp object corresponding to event timestamp."""
if not hasattr(self, '_timestamp'):
self._timestamp = pywikibot.Timestamp.fromISOformat(self.data['timestamp'])
return self._timestamp
@@ -86,6 +87,7 @@
_expectedType = 'block'
def __init__(self, apidata):
+ """Constructor."""
super(BlockEntry, self).__init__(apidata)
# see en.wikipedia.org/w/api.php?action=query&list=logevents&letype=block&lelimit…
# When an autoblock is removed, the "title" field is not a page title
@@ -97,6 +99,8 @@
def title(self):
"""
+ Return the blocked account or IP.
+
* Returns the Page object of username or IP
if this block action targets a username or IP.
* Returns the blockid if this log reflects the removal of an autoblock
@@ -121,7 +125,8 @@
def flags(self):
"""
- Returns a list of (str) flags associated with the block entry.
+ Return a list of (str) flags associated with the block entry.
+
Raises an Error if the entry is an unblocking log entry
"""
if hasattr(self, '_flags'):
@@ -131,8 +136,9 @@
def duration(self):
"""
- Returns a datetime.timedelta representing the block duration,
- or None if block is indefinite
+ Return a datetime.timedelta representing the block duration.
+
+ It returns None if block is indefinite.
Raises an Error if the entry is an unblocking log entry
"""
if hasattr(self, '_duration'):
@@ -146,7 +152,8 @@
def expiry(self):
"""
- Returns a Timestamp representing the block expiry date
+ Return a Timestamp representing the block expiry date.
+
Raises an Error if the entry is an unblocking log entry
"""
if hasattr(self, '_expiry'):
@@ -185,8 +192,9 @@
def suppressedredirect(self):
"""
- Returns True if no redirect was created from the old title
- to the new title during the move
+ Return True if no redirect was created during the move.
+
+ @rtype: bool
"""
# Introduced in MW r47901
return 'suppressedredirect' in self.data['move']
@@ -213,6 +221,7 @@
Only available method is create()
"""
+
_logtypes = {
'block': BlockEntry,
'protect': ProtectEntry,
@@ -227,6 +236,8 @@
def __init__(self, logtype=None):
"""
+ Constructor.
+
@param logtype: The log type of the log entries, if known in advance.
If None, the Factory will fetch the log entry from
the data to create each object.
@@ -241,7 +252,8 @@
def create(self, logdata):
"""
- Instantiates the LogEntry object representing logdata
+ Instantiate the LogEntry object representing logdata.
+
@param logdata: <item> returned by the api
@type logdata: dict
@@ -252,7 +264,8 @@
@staticmethod
def _getEntryClass(logtype):
"""
- Returns the class corresponding to the @logtype string parameter.
+ Return the class corresponding to the @logtype string parameter.
+
Returns LogEntry if logtype is unknown or not supported
"""
try:
@@ -262,7 +275,7 @@
def _createFromData(self, logdata):
"""
- Checks for logtype from data, and creates the correct LogEntry
+ Check for logtype from data, and creates the correct LogEntry.
"""
try:
logtype = logdata['type']
diff --git a/pywikibot/login.py b/pywikibot/login.py
index 0026aba..1445d4b 100644
--- a/pywikibot/login.py
+++ b/pywikibot/login.py
@@ -74,8 +74,9 @@
def botAllowed(self):
"""
- Checks whether the bot is listed on a specific page to comply with
- the policy on the respective wiki.
+ Check whether the bot is listed on a specific page.
+
+ This allows bots to comply with the policy on the respective wiki.
"""
if self.site.family.name in botList \
and self.site.code in botList[self.site.family.name]:
diff --git a/pywikibot/page.py b/pywikibot/page.py
index 1fc54eb..17d99f7 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -121,8 +121,7 @@
return self._link.site
def version(self):
- """Return MediaWiki version number of the Site object for the wiki
- on which this Page resides.
+ """Return MediaWiki version number of the page site.
This is needed to use @need_version() decorator for methods of
Page objects.
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index 717d764..91c973b 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -1,6 +1,8 @@
# -*- coding: utf-8 -*-
"""
-This module offers a wide variety of page generators. A page generator is an
+This module offers a wide variety of page generators.
+
+A page generator is an
object that is iterable (see http://legacy.python.org/dev/peps/pep-0255/ ) and
that yields page objects on which other scripts can then work.
@@ -209,6 +211,7 @@
class GeneratorFactory(object):
"""Process command line arguments and return appropriate page generator.
+
This factory is responsible for processing command line arguments
that are used by many scripts and that determine which pages to work on.
"""
@@ -679,13 +682,13 @@
def InterwikiPageGenerator(page):
- """Iterator over all interwiki (non-language) links on a page."""
+ """Iterate over all interwiki (non-language) links on a page."""
for link in page.interwiki():
yield pywikibot.Page(link)
def LanguageLinksPageGenerator(page, step=None, total=None):
- """Iterator over all interwiki language links on a page."""
+ """Iterate over all interwiki language links on a page."""
for link in page.iterlanglinks(step=step, total=total):
yield pywikibot.Page(link)
@@ -694,7 +697,7 @@
withTemplateInclusion=True,
onlyTemplateInclusion=False,
step=None, total=None, content=False):
- '''Yields all pages referring to a specific page.'''
+ """Yield all pages referring to a specific page."""
return referredPage.getReferences(
follow_redirects=followRedirects,
withTemplateInclusion=withTemplateInclusion,
@@ -804,7 +807,7 @@
@deprecate_arg("number", "total")
def UserContributionsGenerator(username, namespaces=None, site=None,
step=None, total=None):
- """Yield unique pages edited by user:username
+ """Yield unique pages edited by user:username.
@param namespaces: list of namespace numbers to fetch contribs from
@@ -820,8 +823,7 @@
def NamespaceFilterPageGenerator(generator, namespaces, site=None):
"""
- Wraps around another generator. Yields only those pages that are in one
- of the given namespaces.
+ A generator yielding pages from another generator in given namespaces.
The namespace list can contain both integers (namespace numbers) and
strings/unicode strings (namespace names).
@@ -873,9 +875,11 @@
@classmethod
def __filter_match(cls, regex, string, quantifier):
- """ return True if string matches precompiled regex list with depending
- on the quantifier parameter (see below).
+ """Return True if string matches precompiled regex list.
+ @param quantifier: a qualifer
+ @type quantifier: str of 'all', 'any' or 'none'
+ @rtype: bool
"""
if quantifier == 'all':
match = all(r.search(string) for r in regex)
@@ -885,7 +889,7 @@
@classmethod
def __precompile(cls, regex, flag):
- """ precompile the regex list if needed """
+ """ precompile the regex list if needed. """
# Enable multiple regexes
if not isinstance(regex, list):
regex = [regex]
@@ -899,8 +903,9 @@
@deprecate_arg("inverse", "quantifier")
def titlefilter(cls, generator, regex, quantifier='any',
ignore_namespace=True):
- """ Yield pages from another generator whose title matches regex with
- options re.IGNORECASE dependig on the quantifier parameter.
+ """ Yield pages from another generator whose title matches regex.
+
+ Uses regex option re.IGNORECASE depending on the quantifier parameter.
If ignore_namespace is False, the whole page title is compared.
NOTE: if you want to check for a match at the beginning of the title,
you have to start the regex with "^"
@@ -933,10 +938,11 @@
@classmethod
def contentfilter(cls, generator, regex, quantifier='any'):
- """Yield pages from another generator whose body matches regex with
- options re.IGNORECASE|re.DOTALL dependig on the quantifier parameter.
+ """Yield pages from another generator whose body matches regex.
- For parameters see titlefilter above
+ Uses regex option re.IGNORECASE depending on the quantifier parameter.
+
+ For parameters see titlefilter above.
"""
reg = cls.__precompile(regex, re.IGNORECASE | re.DOTALL)
@@ -965,8 +971,9 @@
def FileGenerator(generator):
"""
- Wraps around another generator. Yields the same pages, but as FilePage
- objects instead of Page objects. Makes sense only if it is ascertained
+ Yield pages from another generator as FilePage objects.
+
+ Makes sense only if it is ascertained
that only images are being retrieved.
"""
for page in generator:
@@ -1102,8 +1109,7 @@
def WikidataItemGenerator(gen):
"""
- A wrapper generator used to take another generator
- and yield their relevant Wikidata items
+ A wrapper generator used to yield Wikidata items of another generator.
"""
for page in gen:
if isinstance(page, pywikibot.ItemPage):
@@ -1249,8 +1255,9 @@
def LinksearchPageGenerator(link, namespaces=None, step=None, total=None,
site=None):
- """Yield all pages that include a specified link, according to
- [[Special:Linksearch]].
+ """Yield all pages that include a specified link.
+
+ Obtains data from [[Special:Linksearch]].
"""
if site is None:
@@ -1262,7 +1269,7 @@
def SearchPageGenerator(query, step=None, total=None, namespaces=None,
site=None):
"""
- Provides a list of results using the internal MediaWiki search engine
+ Yield pages from the MediaWiki internal search engine.
"""
if site is None:
site = pywikibot.Site()
@@ -1272,7 +1279,10 @@
def UntaggedPageGenerator(untaggedProject, limit=500):
- """ Function to get the pages returned by this tool:
+ """
+ Yield pages from defunct toolserver UntaggedImages.php.
+
+ It was using this tool:
https://toolserver.org/~daniel/WikiSense/UntaggedImages.php
"""
URL = "https://toolserver.org/~daniel/WikiSense/UntaggedImages.php?"
@@ -1409,16 +1419,19 @@
def MySQLPageGenerator(query, site=None):
"""
- Requires oursql <https://pythonhosted.org/oursql/> or
- MySQLdb <https://sourceforge.net/projects/mysql-python/>
- Yields a list of pages based on a MySQL query. Each query
- should provide the page namespace and page title. An example
+ Yield a list of pages based on a MySQL query.
+
+ Each query should provide the page namespace and page title. An example
query that yields all ns0 pages might look like:
SELECT
page_namespace,
page_title,
FROM page
WHERE page_namespace = 0;
+
+ Requires oursql <https://pythonhosted.org/oursql/> or
+ MySQLdb <https://sourceforge.net/projects/mysql-python/>
+
@param query: MySQL query to execute
@param site: Site object or raw database name
@type site: pywikibot.Site|str
diff --git a/pywikibot/site.py b/pywikibot/site.py
index 39d8e5f..1b4ece7 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -949,7 +949,9 @@
def _get_siteinfo(self, prop, expiry):
"""
- Retrieve a siteinfo property. All properties which the site doesn't
+ Retrieve a siteinfo property.
+
+ All properties which the site doesn't
support contain the default value. Because pre-1.12 no data was
returned when a property doesn't exists, it queries each property
independetly if a property is invalid.
@@ -1519,8 +1521,7 @@
yield Notification.fromJSON(self, notif)
def notifications_mark_read(self, **kwargs):
- """Mark one, some or all notifications,
- selected via keyword arguments, as read.
+ """Mark selected notifications as read.
@return: whether the action was successful
@rtype: bool
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index 3af27e1..0221449 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -36,7 +36,7 @@
def unescape(s):
- """Replace escaped HTML-special characters by their originals"""
+ """Replace escaped HTML-special characters by their originals."""
if '&' not in s:
return s
s = s.replace("<", "<")
@@ -263,7 +263,7 @@
def removeDisabledParts(text, tags=['*']):
"""
- Return text without portions where wiki markup is disabled
+ Return text without portions where wiki markup is disabled.
Parts that can/will be removed are --
* HTML comments
@@ -296,7 +296,7 @@
def removeHTMLParts(text, keeptags=['tt', 'nowiki', 'small', 'sup']):
"""
- Return text without portions where HTML markup is disabled
+ Return text without portions where HTML markup is disabled.
Parts that can/will be removed are --
* HTML and all wiki tags
@@ -334,6 +334,7 @@
def isDisabled(text, index, tags=['*']):
"""
Return True if text[index] is disabled, e.g. by a comment or by nowiki tags.
+
For the tags parameter, see removeDisabledParts() above.
"""
@@ -483,8 +484,7 @@
def removeLanguageLinksAndSeparator(text, site=None, marker='', separator=''):
"""
- Return text with all inter-language links, plus any preceding whitespace
- and separator occurrences removed.
+ Return text with inter-language links and preceding separators removed.
If a link to an unknown language is encountered, a warning is printed.
If a marker is defined, that string is placed at the location of the
@@ -710,8 +710,7 @@
def removeCategoryLinksAndSeparator(text, site=None, marker='', separator=''):
"""
- Return text with all category links, plus any preceding whitespace
- and separator occurrences removed.
+ Return text with all category links and preceding separators removed.
Put the string marker after the last replacement (at the end of the text
if there is no replacement).
@@ -729,9 +728,9 @@
def replaceCategoryInPlace(oldtext, oldcat, newcat, site=None):
- """Replace the category oldcat with the category newcat and return
- the modified text.
+ """Replace the category oldcat with the category newcat.
+ @return: the modified text
"""
if site is None:
site = pywikibot.Site()
@@ -772,8 +771,7 @@
def replaceCategoryLinks(oldtext, new, site=None, addOnly=False):
"""
- Replace the category links given in the wikitext given
- in oldtext by the new links given in new.
+ Replace all existing category links with new category links.
'new' should be a list of Category objects or strings
which can be either the raw name or [[Category:..]].
@@ -923,9 +921,9 @@
@param text: The wikitext from which templates are extracted
@type text: unicode or string
-
+ @return: list of template name and params
+ @rtype: list of tuple
"""
-
if not (config.use_mwparserfromhell and mwparserfromhell):
return extract_templates_and_params_regex(text)
code = mwparserfromhell.parse(text)
@@ -940,10 +938,18 @@
def extract_templates_and_params_regex(text):
"""
- See the documentation for extract_templates_and_params
- This does basically the same thing, but uses regex.
- @param text:
- @return:
+ Extract templates with params using a regex.
+
+ This function should not be called directly.
+
+ Use extract_templates_and_params, which will fallback to using this
+ regex based implementation when the mwparserfromhell implementation
+ is not used.
+
+ @param text: The wikitext from which templates are extracted
+ @type text: unicode or string
+ @return: list of template name and params
+ @rtype: list of tuple
"""
# remove commented-out stuff etc.
@@ -1147,7 +1153,7 @@
class tzoneFixedOffset(datetime.tzinfo):
"""
- Class building tzinfo objects for fixed-offset time zones
+ Class building tzinfo objects for fixed-offset time zones.
@offset: a number indicating fixed offset in minutes east from UTC
@name: a string with name of the timezone"""
@@ -1176,7 +1182,7 @@
class TimeStripper(object):
"""
- Find timestamp in page text and returns it as timezone aware datetime object
+ Find timestamp in page and return it as timezone aware datetime object.
"""
def __init__(self, site=None):
@@ -1242,7 +1248,7 @@
def last_match_and_replace(self, txt, pat):
"""
- Take the rightmost match, to prevent spurious earlier matches, and replace with marker
+ Take the rightmost match, to prevent spurious earlier matches, and replace with marker.
"""
m = None
cnt = 0
@@ -1270,6 +1276,7 @@
def timestripper(self, line):
"""
Find timestamp in line and convert it to time zone aware datetime.
+
All the following items must be matched, otherwise None is returned:
-. year, month, hour, time, day, minute, tzinfo
diff --git a/pywikibot/tools.py b/pywikibot/tools.py
index 2cc284a..c90790e 100644
--- a/pywikibot/tools.py
+++ b/pywikibot/tools.py
@@ -35,9 +35,7 @@
class UnicodeMixin(object):
- """Mixin class to handle defining the proper __str__/__unicode__
- methods in Python 2 or 3.
- """
+ """Mixin class to add __str__ method in Python 2 or 3."""
if sys.version_info[0] >= 3:
def __str__(self):
diff --git a/pywikibot/userinterfaces/gui.py b/pywikibot/userinterfaces/gui.py
index db7f0c3..f141d9b 100644
--- a/pywikibot/userinterfaces/gui.py
+++ b/pywikibot/userinterfaces/gui.py
@@ -160,14 +160,15 @@
return "break"
def find_all(self, s):
- '''
- Highlight all occurrences of string s, and select the first one. If
- the string has already been highlighted, jump to the next occurrence
+ """
+ Highlight all occurrences of string s, and select the first one.
+
+ If the string has already been highlighted, jump to the next occurrence
after the current selection. (You cannot go backwards using the
button, but you can manually place the cursor anywhere in the
document to start searching from that point.)
- '''
+ """
if hasattr(self, "_highlight") and self._highlight == s:
try:
if self.get(Tkinter.SEL_FIRST, Tkinter.SEL_LAST) == s:
@@ -325,6 +326,8 @@
def edit(self, text, jumpIndex=None, highlight=None):
"""
+ Provide user with editor to modify text.
+
Parameters:
* text - a Unicode string
* jumpIndex - an integer: position at which to put the caret
diff --git a/pywikibot/userinterfaces/terminal_interface_base.py b/pywikibot/userinterfaces/terminal_interface_base.py
index e7a38af..03b51f8 100755
--- a/pywikibot/userinterfaces/terminal_interface_base.py
+++ b/pywikibot/userinterfaces/terminal_interface_base.py
@@ -114,6 +114,8 @@
def output(self, text, toStdout=False, targetStream=None):
"""
+ Output text to a stream.
+
If a character can't be displayed in the encoding used by the user's
terminal, it will be replaced with a question mark or by a
transliteration.
@@ -275,8 +277,9 @@
class TerminalHandler(logging.Handler):
- """A handler class that writes logging records, appropriately formatted, to
- a stream connected to a terminal. This class does not close the stream,
+ """A handler class that writes logging records to a terminal.
+
+ This class does not close the stream,
as sys.stdout or sys.stderr may be (and usually will be) used.
Slightly modified version of the StreamHandler class that ships with
diff --git a/pywikibot/version.py b/pywikibot/version.py
index 8fc5685..cbb70a2 100644
--- a/pywikibot/version.py
+++ b/pywikibot/version.py
@@ -32,7 +32,8 @@
def getversion(online=True):
- """Return a pywikibot version string
+ """Return a pywikibot version string.
+
@param online: (optional) Include information obtained online
"""
data = dict(getversiondict()) # copy dict to prevent changes in 'chache'
@@ -258,8 +259,10 @@
def getfileversion(filename):
- """ Retrieve revision number of file (__version__ variable containing Id tag)
- without importing it (thus can be done for any file)
+ """Retrieve revision number of file.
+
+ Extracts __version__ variable containing Id tag, without importing it.
+ (thus can be done for any file)
"""
_program_dir = _get_program_dir()
__version__ = None
diff --git a/pywikibot/weblib.py b/pywikibot/weblib.py
index 83f6a68..15b53ef 100644
--- a/pywikibot/weblib.py
+++ b/pywikibot/weblib.py
@@ -1,9 +1,5 @@
# -*- coding: utf-8 -*-
-"""
-Functions for manipulating external links
-or querying third-party sites.
-
-"""
+"""Functions for manipulating external links or querying third-party sites."""
#
# (C) Pywikibot team, 2013
#
diff --git a/pywikibot/xmlreader.py b/pywikibot/xmlreader.py
index 4f7988e..e327b6e 100644
--- a/pywikibot/xmlreader.py
+++ b/pywikibot/xmlreader.py
@@ -23,9 +23,10 @@
def parseRestrictions(restrictions):
"""
- Parses the characters within a restrictions tag and returns
- strings representing user groups allowed to edit and to move
- a page, where None means there are no restrictions.
+ Parse the characters within a restrictions tag.
+
+ Returns strings representing user groups allowed to edit and
+ to move a page, where None means there are no restrictions.
"""
if not restrictions:
return None, None
@@ -69,7 +70,9 @@
class XmlParserThread(threading.Thread):
"""
- This XML parser will run as a single thread. This allows the XmlDump
+ XML parser that will run as a single thread.
+
+ This allows the XmlDump
generator to yield pages before the parser has finished reading the
entire dump.
@@ -87,7 +90,9 @@
class XmlDump(object):
"""
- Represents an XML dump file. Reads the local file at initialization,
+ Represents an XML dump file.
+
+ Reads the local file at initialization,
parses it, and offers access to the resulting XmlEntries via a generator.
@param allrevisions: boolean
@@ -102,7 +107,7 @@
self._parse = self._parse_only_latest
def parse(self):
- """Generator using cElementTree iterparse function"""
+ """Generator using cElementTree iterparse function."""
if self.filename.endswith('.bz2'):
import bz2
source = bz2.BZ2File(self.filename)
@@ -133,7 +138,7 @@
yield rev
def _parse_only_latest(self, event, elem):
- """Parser that yields only the latest revision"""
+ """Parser that yields only the latest revision."""
if event == "end" and elem.tag == "{%s}page" % self.uri:
self._headers(elem)
revision = elem.find("{%s}revision" % self.uri)
@@ -142,7 +147,7 @@
self.root.clear()
def _parse_all(self, event, elem):
- """Parser that yields all revisions"""
+ """Parser that yields all revisions."""
if event == "start" and elem.tag == "{%s}page" % self.uri:
self._headers(elem)
if event == "end" and elem.tag == "{%s}revision" % self.uri:
@@ -151,6 +156,7 @@
self.root.clear()
def _headers(self, elem):
+ """Extract headers from XML chunk."""
self.title = elem.findtext("{%s}title" % self.uri)
self.ns = elem.findtext("{%s}ns" % self.uri)
self.pageid = elem.findtext("{%s}id" % self.uri)
@@ -160,7 +166,7 @@
self.restrictions)
def _create_revision(self, revision):
- """Create a Single revision"""
+ """Create a Single revision."""
revisionid = revision.findtext("{%s}id" % self.uri)
timestamp = revision.findtext("{%s}timestamp" % self.uri)
comment = revision.findtext("{%s}comment" % self.uri)
--
To view, visit https://gerrit.wikimedia.org/r/158377
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I1bd1375ffe01025b43c307cd1828471fbb5e05d9
Gerrit-PatchSet: 8
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Siebrand <siebrand(a)kitano.nl>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: jenkins-bot <>