jenkins-bot has submitted this change and it was merged.
Change subject: Use correct site for pagegenerators
......................................................................
Use correct site for pagegenerators
GeneratorFactory accepted a site parameter, but did not pass it to
many of the generators it instantiated.
Also delay loading of the site, if not specified, until in handleArg
after which pywikibot.handle_args has processed the global command
line arguments.
Plus PEP257 fixes.
Bug: 72120
Change-Id: Ibef4116e6894e722f1e95d092b4f178d91d7760c
---
M pywikibot/pagegenerators.py
M tox.ini
2 files changed, 297 insertions(+), 43 deletions(-)
Approvals:
XZise: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index ea34ddd..5dc88a3 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -221,14 +221,30 @@
"""
def __init__(self, site=None):
+ """
+ Constructor.
+
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
self.gens = []
self.namespaces = []
self.step = None
self.limit = None
self.articlefilter_list = []
- self.site = site
- if self.site is None:
- self.site = pywikibot.Site()
+ self._site = site
+
+ @property
+ def site(self):
+ """
+ Generator site.
+
+ @return: Site given to constructor, otherwise the default Site.
+ @rtype: L{pywikibot.site.BaseSite}
+ """
+ if not self._site:
+ self._site = pywikibot.Site()
+ return self._site
def getCombinedGenerator(self, gen=None):
"""Return the combination of all accumulated generators.
@@ -280,7 +296,8 @@
categoryname = categoryname[:ind]
cat = pywikibot.Category(pywikibot.Link(categoryname,
- defaultNamespace=14))
+ defaultNamespace=14,
+ source=self.site))
# Link constructor automatically prepends localized namespace
# if not included in user's input
return CategorizedPageGenerator(cat, start=startfrom,
@@ -300,7 +317,8 @@
startfrom = None
cat = pywikibot.Category(pywikibot.Link(categoryname,
- defaultNamespace=14))
+ defaultNamespace=14,
+ source=self.site))
return SubCategoriesPageGenerator(cat, start=startfrom,
recurse=recurse, content=content)
@@ -331,26 +349,29 @@
gen = FileLinksGenerator(fileLinksPage)
elif arg.startswith('-unusedfiles'):
if len(arg) == 12:
- gen = UnusedFilesGenerator()
+ gen = UnusedFilesGenerator(site=self.site)
else:
- gen = UnusedFilesGenerator(total=int(arg[13:]))
+ gen = UnusedFilesGenerator(total=int(arg[13:]), site=self.site)
elif arg.startswith('-lonelypages'):
if len(arg) == 12:
- gen = LonelyPagesPageGenerator()
+ gen = LonelyPagesPageGenerator(site=self.site)
else:
- gen = LonelyPagesPageGenerator(total=int(arg[13:]))
+ gen = LonelyPagesPageGenerator(total=int(arg[13:]),
+ site=self.site)
elif arg.startswith('-unwatched'):
if len(arg) == 10:
- gen = UnwatchedPagesPageGenerator()
+ gen = UnwatchedPagesPageGenerator(site=self.site)
else:
- gen = UnwatchedPagesPageGenerator(total=int(arg[11:]))
+ gen = UnwatchedPagesPageGenerator(total=int(arg[11:]),
+ site=self.site)
elif arg.startswith('-usercontribs'):
gen = UserContributionsGenerator(arg[14:])
elif arg.startswith('-withoutinterwiki'):
if len(arg) == 17:
- gen = WithoutInterwikiPageGenerator()
+ gen = WithoutInterwikiPageGenerator(site=self.site)
else:
- gen = WithoutInterwikiPageGenerator(total=int(arg[18:]))
+ gen = WithoutInterwikiPageGenerator(total=int(arg[18:]),
+ site=self.site)
elif arg.startswith('-interwiki'):
title = arg[11:]
if not title:
@@ -360,26 +381,31 @@
gen = InterwikiPageGenerator(page)
elif arg.startswith('-randomredirect'):
if len(arg) == 15:
- gen = RandomRedirectPageGenerator()
+ gen = RandomRedirectPageGenerator(site=self.site)
else:
- gen = RandomRedirectPageGenerator(total=int(arg[16:]))
+ gen = RandomRedirectPageGenerator(total=int(arg[16:]),
+ site=self.site)
elif arg.startswith('-random'):
if len(arg) == 7:
- gen = RandomPageGenerator()
+ gen = RandomPageGenerator(site=self.site)
else:
- gen = RandomPageGenerator(total=int(arg[8:]))
+ gen = RandomPageGenerator(total=int(arg[8:]), site=self.site)
elif arg.startswith('-recentchanges'):
if len(arg) >= 15:
- gen = RecentChangesPageGenerator(namespaces=self.namespaces, total=int(arg[15:]))
+ gen = RecentChangesPageGenerator(namespaces=self.namespaces,
+ total=int(arg[15:]),
+ site=self.site)
else:
- gen = RecentChangesPageGenerator(namespaces=self.namespaces, total=60)
+ gen = RecentChangesPageGenerator(namespaces=self.namespaces,
+ total=60,
+ site=self.site)
gen = DuplicateFilterPageGenerator(gen)
elif arg.startswith('-file'):
textfilename = arg[6:]
if not textfilename:
textfilename = pywikibot.input(
u'Please enter the local file name:')
- gen = TextfilePageGenerator(textfilename)
+ gen = TextfilePageGenerator(textfilename, site=self.site)
elif arg.startswith('-namespace') or arg.startswith('-ns'):
value = None
if arg.startswith('-ns:'):
@@ -434,11 +460,11 @@
self.site)
)]
elif arg.startswith('-uncatfiles'):
- gen = UnCategorizedImageGenerator()
+ gen = UnCategorizedImageGenerator(site=self.site)
elif arg.startswith('-uncatcat'):
- gen = UnCategorizedCategoryGenerator()
+ gen = UnCategorizedCategoryGenerator(site=self.site)
elif arg.startswith('-uncat'):
- gen = UnCategorizedPageGenerator()
+ gen = UnCategorizedPageGenerator(site=self.site)
elif arg.startswith('-ref'):
referredPageTitle = arg[5:]
if not referredPageTitle:
@@ -460,7 +486,7 @@
if not url:
url = pywikibot.input(
u'Pages with which weblink should be processed?')
- gen = LinksearchPageGenerator(url)
+ gen = LinksearchPageGenerator(url, site=self.site)
elif arg.startswith('-transcludes'):
transclusionPageTitle = arg[len('-transcludes:'):]
if not transclusionPageTitle:
@@ -482,26 +508,29 @@
namespace = firstpagelink.namespace
firstPageTitle = firstpagelink.title
gen = AllpagesPageGenerator(firstPageTitle, namespace,
- includeredirects=False)
+ includeredirects=False,
+ site=self.site)
elif arg.startswith('-prefixindex'):
prefix = arg[13:]
namespace = None
if not prefix:
prefix = pywikibot.input(
u'What page names are you looking for?')
- gen = PrefixingPageGenerator(prefix=prefix)
+ gen = PrefixingPageGenerator(prefix=prefix, site=self.site)
elif arg.startswith('-newimages'):
limit = arg[11:] or pywikibot.input(
u'How many images do you want to load?')
- gen = NewimagesPageGenerator(total=int(limit))
+ gen = NewimagesPageGenerator(total=int(limit), site=self.site)
elif arg.startswith('-newpages'):
# partial workaround for bug 67249
# to use -namespace/ns with -newpages, -ns must be given before -newpages
# otherwise default namespace is 0
+ total = 60
if len(arg) >= 10:
- gen = NewpagesPageGenerator(namespaces=self.namespaces, total=int(arg[10:]))
- else:
- gen = NewpagesPageGenerator(namespaces=self.namespaces, total=60)
+ total = int(arg[10:])
+ gen = NewpagesPageGenerator(namespaces=self.namespaces,
+ total=total,
+ site=self.site)
elif arg.startswith('-imagesused'):
imagelinkstitle = arg[len('-imagesused:'):]
if not imagelinkstitle:
@@ -516,7 +545,7 @@
mediawikiQuery = pywikibot.input(
u'What do you want to search for?')
# In order to be useful, all namespaces are required
- gen = SearchPageGenerator(mediawikiQuery, namespaces=[])
+ gen = SearchPageGenerator(mediawikiQuery, namespaces=[], site=self.site)
elif arg.startswith('-google'):
gen = GoogleSearchPageGenerator(arg[8:])
elif arg.startswith('-titleregex'):
@@ -533,21 +562,21 @@
self.articlefilter_list.append(arg[6:])
return True
elif arg.startswith('-yahoo'):
- gen = YahooSearchPageGenerator(arg[7:])
+ gen = YahooSearchPageGenerator(arg[7:], site=self.site)
elif arg.startswith('-untagged'):
- gen = UntaggedPageGenerator(arg[10:])
+ gen = UntaggedPageGenerator(arg[10:], site=self.site)
elif arg.startswith('-wikidataquery'):
query = arg[len('-wikidataquery:'):]
if not query:
query = pywikibot.input(
u'WikidataQuery string:')
- gen = WikidataQueryPageGenerator(query)
+ gen = WikidataQueryPageGenerator(query, site=self.site)
elif arg.startswith('-mysqlquery'):
query = arg[len('-mysqlquery:'):]
if not query:
query = pywikibot.input(
u'Mysql query string:')
- gen = MySQLPageGenerator(query)
+ gen = MySQLPageGenerator(query, site=self.site)
if gen:
self.gens.append(gen)
@@ -565,8 +594,12 @@
includeredirects equals the string 'only', only redirects are added.
@param step: Maximum number of pages to retrieve per API query
+ @type step: int
@param total: Maxmum number of pages to retrieve in total
+ @type total: int
@param content: If True, load current version of each page (default False)
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
"""
if site is None:
@@ -585,6 +618,17 @@
def PrefixingPageGenerator(prefix, namespace=None, includeredirects=True,
site=None, step=None, total=None, content=False):
+ """
+ Prefixed Page generator.
+
+ @param step: Maximum number of pages to retrieve per API query
+ @type step: int
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param content: If True, load current version of each page (default False)
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
prefixlink = pywikibot.Link(prefix, site)
@@ -608,6 +652,13 @@
namespaces=[0, ], step=None, total=None):
"""
Iterate Page objects for all new titles in a single namespace.
+
+ @param step: Maximum number of pages to retrieve per API query
+ @type step: int
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
"""
# API does not (yet) have a newpages function, so this tries to duplicate
# it by filtering the recentchanges output
@@ -627,7 +678,6 @@
showRedirects=None, showPatrolled=None,
topOnly=False, step=None, total=None,
user=None, excludeuser=None, site=None):
-
"""
Generate pages that are in the recent changes list.
@@ -665,9 +715,10 @@
@type user: basestring|list
@param excludeuser: if not None, exclude edits by this user or users
@type excludeuser: basestring|list
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
"""
-
if site is None:
site = pywikibot.Site()
for item in site.recentchanges(start=start, end=end, reverse=reverse,
@@ -775,7 +826,9 @@
@param filename: the name of the file that should be read. If no name is
given, the generator prompts the user.
- @param site: the default Site for which Page objects should be created
+ @type filename: unicode
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
"""
if filename is None:
@@ -803,7 +856,12 @@
def PagesFromTitlesGenerator(iterable, site=None):
- """Generate pages from the titles (unicode strings) yielded by iterable."""
+ """
+ Generate pages from the titles (unicode strings) yielded by iterable.
+
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for title in iterable:
@@ -817,7 +875,13 @@
step=None, total=None):
"""Yield unique pages edited by user:username.
+ @param step: Maximum number of pages to retrieve per API query
+ @type step: int
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
@param namespaces: list of namespace numbers to fetch contribs from
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
"""
if site is None:
@@ -839,6 +903,8 @@
NOTE: API-based generators that have a "namespaces" parameter perform
namespace filtering more efficiently than this generator.
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
"""
if site is None:
site = pywikibot.Site()
@@ -880,6 +946,8 @@
class RegexFilter(object):
+
+ """Regex filter."""
@classmethod
def __filter_match(cls, regex, string, quantifier):
@@ -1057,7 +1125,13 @@
@deprecated_args(pageNumber="step", lookahead=None)
def PreloadingGenerator(generator, step=50):
- """Yield preloaded pages taken from another generator."""
+ """
+ Yield preloaded pages taken from another generator.
+
+ @param generator: pages to iterate over
+ @param step: how many pages to preload at once
+ @type step: int
+ """
# pages may be on more than one site, for example if an interwiki
# generator is used, so use a separate preloader for each site
sites = {}
@@ -1086,6 +1160,7 @@
@param generator: pages to iterate over
@param step: how many pages to preload at once
+ @type step: int
"""
sites = {}
for page in generator:
@@ -1116,6 +1191,16 @@
@deprecated_args(number="total")
def NewimagesPageGenerator(step=None, total=None, site=None):
+ """
+ New file generator.
+
+ @param step: Maximum number of pages to retrieve per API query
+ @type step: int
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for entry in site.logevents(logtype="upload", step=step, total=total):
@@ -1127,6 +1212,11 @@
def WikidataItemGenerator(gen):
"""
A wrapper generator used to yield Wikidata items of another generator.
+
+ @param gen: Generator to wrap.
+ @type gen: generator
+ @return: Wrapped generator
+ @rtype: generator
"""
for page in gen:
if isinstance(page, pywikibot.ItemPage):
@@ -1142,6 +1232,14 @@
# TODO below
@deprecated_args(extension=None, number="total", repeat=None)
def UnusedFilesGenerator(total=100, site=None, extension=None):
+ """
+ Unused files generator.
+
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for page in site.unusedfiles(total=total):
@@ -1150,6 +1248,13 @@
@deprecated_args(number="total", repeat=None)
def WithoutInterwikiPageGenerator(total=100, site=None):
+ """
+ Page lacking interwikis generator.
+
+ @param total: Maxmum number of pages to retrieve in total
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for page in site.withoutinterwiki(total=total):
@@ -1158,6 +1263,14 @@
@deprecated_args(number="total", repeat=None)
def UnCategorizedCategoryGenerator(total=100, site=None):
+ """
+ Uncategorized category generator.
+
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for page in site.uncategorizedcategories(total=total):
@@ -1166,6 +1279,14 @@
@deprecated_args(number="total", repeat=None)
def UnCategorizedImageGenerator(total=100, site=None):
+ """
+ Uncategorized file generator.
+
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for page in site.uncategorizedimages(total=total):
@@ -1174,6 +1295,14 @@
@deprecated_args(number="total", repeat=None)
def UnCategorizedPageGenerator(total=100, site=None):
+ """
+ Uncategorized page generator.
+
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for page in site.uncategorizedpages(total=total):
@@ -1181,6 +1310,14 @@
def UnCategorizedTemplateGenerator(total=100, site=None):
+ """
+ Uncategorized template generator.
+
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for page in site.uncategorizedtemplates(total=total):
@@ -1189,6 +1326,14 @@
@deprecated_args(number="total", repeat=None)
def LonelyPagesPageGenerator(total=100, site=None):
+ """
+ Lonely page generator.
+
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for page in site.lonelypages(total=total):
@@ -1197,6 +1342,14 @@
@deprecated_args(number="total", repeat=None)
def UnwatchedPagesPageGenerator(total=100, site=None):
+ """
+ Unwatched page generator.
+
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for page in site.unwatchedpages(total=total):
@@ -1204,6 +1357,14 @@
def WantedPagesPageGenerator(total=100, site=None):
+ """
+ Wanted page generator.
+
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for page in site.wantedpages(total=total):
@@ -1212,6 +1373,14 @@
@deprecated_args(number="total", repeat=None)
def AncientPagesPageGenerator(total=100, site=None):
+ """
+ Ancient page generator.
+
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for page, timestamp in site.ancientpages(total=total):
@@ -1220,6 +1389,14 @@
@deprecated_args(number="total", repeat=None)
def DeadendPagesPageGenerator(total=100, site=None):
+ """
+ Dead-end page generator.
+
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for page in site.deadendpages(total=total):
@@ -1228,6 +1405,14 @@
@deprecated_args(number="total", repeat=None)
def LongPagesPageGenerator(total=100, site=None):
+ """
+ Long page generator.
+
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for page, length in site.longpages(total=total):
@@ -1236,6 +1421,14 @@
@deprecated_args(number="total", repeat=None)
def ShortPagesPageGenerator(total=100, site=None):
+ """
+ Short page generator.
+
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for page, length in site.shortpages(total=total):
@@ -1244,6 +1437,14 @@
@deprecated_args(number="total")
def RandomPageGenerator(total=10, site=None):
+ """
+ Random page generator.
+
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for page in site.randompages(total=total):
@@ -1252,6 +1453,14 @@
@deprecated_args(number="total")
def RandomRedirectPageGenerator(total=10, site=None):
+ """
+ Random redirect generator.
+
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
for page in site.randompages(total=total, redirects=True):
@@ -1264,6 +1473,12 @@
Obtains data from [[Special:Linksearch]].
+ @param step: Maximum number of pages to retrieve per API query
+ @type step: int
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
"""
if site is None:
site = pywikibot.Site()
@@ -1275,6 +1490,13 @@
site=None):
"""
Yield pages from the MediaWiki internal search engine.
+
+ @param step: Maximum number of pages to retrieve per API query
+ @type step: int
+ @param total: Maxmum number of pages to retrieve in total
+ @type total: int
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
"""
if site is None:
site = pywikibot.Site()
@@ -1283,12 +1505,15 @@
yield page
-def UntaggedPageGenerator(untaggedProject, limit=500):
+def UntaggedPageGenerator(untaggedProject, limit=500, site=None):
"""
Yield pages from defunct toolserver UntaggedImages.php.
It was using this tool:
https://toolserver.org/~daniel/WikiSense/UntaggedImages.php
+
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
"""
URL = "https://toolserver.org/~daniel/WikiSense/UntaggedImages.php?"
REGEXP = r"<td valign='top' title='Name'><a href='http[s]?://.*?" \
@@ -1304,9 +1529,11 @@
raise pywikibot.Error(
u'Nothing found at %s! Try to use the tool by yourself to be sure '
u'that it works!' % link)
+ if not site:
+ site = pywikibot.Site()
else:
for result in results:
- yield pywikibot.Page(pywikibot.Site(), result)
+ yield pywikibot.Page(site, result)
# following classes just ported from version 1 without revision; not tested
@@ -1325,6 +1552,12 @@
# values larger than 100 fail
def __init__(self, query=None, count=100, site=None):
+ """
+ Constructor.
+
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
self.query = query or pywikibot.input(u'Please enter the search query:')
self.count = count
if site is None:
@@ -1376,6 +1609,12 @@
"""
def __init__(self, query=None, site=None):
+ """
+ Constructor.
+
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
self.query = query or pywikibot.input(u'Please enter the search query:')
if site is None:
site = pywikibot.Site()
@@ -1439,7 +1678,7 @@
@param query: MySQL query to execute
@param site: Site object or raw database name
- @type site: pywikibot.Site|str
+ @type site: L{pywikibot.site.BaseSite} or str
@return: iterator of pywikibot.Page
"""
try:
@@ -1478,6 +1717,12 @@
def YearPageGenerator(start=1, end=2050, site=None):
+ """
+ Year page generator.
+
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
pywikibot.output(u"Starting with year %i" % start)
@@ -1491,6 +1736,12 @@
def DayPageGenerator(startMonth=1, endMonth=12, site=None):
+ """
+ Day page generator.
+
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
+ """
if site is None:
site = pywikibot.Site()
fd = date.FormatDate(site)
@@ -1505,6 +1756,8 @@
"""Generate pages that result from the given WikidataQuery.
@param query: the WikidataQuery query string.
+ @param site: Site for generator results.
+ @type site: L{pywikibot.site.BaseSite}
"""
if site is None:
diff --git a/tox.ini b/tox.ini
index f37d458..e002106 100644
--- a/tox.ini
+++ b/tox.ini
@@ -41,6 +41,7 @@
./pywikibot/page.py \
./pywikibot/plural.py \
./pywikibot/site.py \
+ ./pywikibot/pagegenerators.py \
./pywikibot/tools.py \
./pywikibot/version.py \
./pywikibot/weblib.py \
--
To view, visit https://gerrit.wikimedia.org/r/166942
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ibef4116e6894e722f1e95d092b4f178d91d7760c
Gerrit-PatchSet: 3
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: jenkins-bot <>
XZise has submitted this change and it was merged.
Change subject: MySQLPageGenerator fails due to wrong classname
......................................................................
MySQLPageGenerator fails due to wrong classname
The generator tries to compare against pywikibot.site.Site,
which doesnt exist. Use BaseSite instead.
Bug introduced in Aug 2013 - 2e35403.
Bug: 72127
Change-Id: I4876b53baebf656e196682c203c45e65551f200e
---
M pywikibot/pagegenerators.py
1 file changed, 1 insertion(+), 1 deletion(-)
Approvals:
XZise: Looks good to me, approved
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index ea34ddd..f6e3687 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -1448,7 +1448,7 @@
import MySQLdb as mysqldb
if site is None:
site = pywikibot.Site()
- if isinstance(site, pywikibot.site.Site):
+ if isinstance(site, pywikibot.site.BaseSite):
# We want to let people to set a custom dbname
# since the master dbname might not be exactly
# equal to the name on the replicated site
--
To view, visit https://gerrit.wikimedia.org/r/166983
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I4876b53baebf656e196682c203c45e65551f200e
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: [FIX] Site: Use unicode for errors with iw prefix
......................................................................
[FIX] Site: Use unicode for errors with iw prefix
Bug: 72093
Change-Id: Ie958751d4b4d841fdd1fd83a6301df506adcb9eb
---
M pywikibot/site.py
1 file changed, 2 insertions(+), 2 deletions(-)
Approvals:
Mpaa: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/site.py b/pywikibot/site.py
index 7c1156d..6645764 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -597,7 +597,7 @@
break
else:
raise KeyError(
- "'{0}' is not an interwiki prefix.".format(prefix))
+ u"'{0}' is not an interwiki prefix.".format(prefix))
try:
site = (pywikibot.Site(url=interwiki['url']),
'local' in interwiki)
@@ -608,7 +608,7 @@
return site[0]
else:
raise SiteDefinitionError(
- "No family/site found for prefix '{0}'".format(prefix))
+ u"No family/site found for prefix '{0}'".format(prefix))
def local_interwiki(self, prefix):
"""
--
To view, visit https://gerrit.wikimedia.org/r/166897
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ie958751d4b4d841fdd1fd83a6301df506adcb9eb
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: pep8-ify handleArgs to be handle_args
......................................................................
pep8-ify handleArgs to be handle_args
Rename the method, with args passed as a list instead of as *args,
and add do_help parameter as an easy way for the caller to handle
-help and also prevent sys.exit() being invoked.
Change-Id: I6eb20ca4b6ba8886f2d1fb31afcb2b2f86da40df
---
M pywikibot/__init__.py
M pywikibot/bot.py
2 files changed, 27 insertions(+), 10 deletions(-)
Approvals:
John Vandenberg: Looks good to me, but someone else must approve
XZise: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/__init__.py b/pywikibot/__init__.py
index 0444a33..ad0c475 100644
--- a/pywikibot/__init__.py
+++ b/pywikibot/__init__.py
@@ -29,8 +29,10 @@
from pywikibot import config2 as config
from pywikibot.bot import (
output, warning, error, critical, debug, stdout, exception,
- input, input_choice, inputChoice, handleArgs, showHelp, ui, log,
+ input, input_choice, inputChoice, handle_args, showHelp, ui, log,
calledModuleName, Bot, WikidataBot, QuitKeyboardInterrupt,
+ # the following are flagged as deprecated on usage
+ handleArgs,
)
from pywikibot.exceptions import (
Error, InvalidTitle, BadTitle, NoPage, SectionError,
@@ -66,8 +68,8 @@
'ItemPage', 'PropertyPage', 'Claim', 'TimeStripper',
'html2unicode', 'url2unicode', 'unicode2html',
'stdout', 'output', 'warning', 'error', 'critical', 'debug',
- 'exception', 'input_choice',
- 'input', 'inputChoice', 'handleArgs', 'showHelp', 'ui', 'log',
+ 'exception', 'input_choice', 'input', 'inputChoice',
+ 'handle_args', 'handleArgs', 'showHelp', 'ui', 'log',
'calledModuleName', 'Bot', 'WikidataBot',
'Error', 'InvalidTitle', 'BadTitle', 'NoPage', 'SectionError',
'SiteDefinitionError', 'NoSuchSite', 'UnknownSite', 'UnknownFamily',
diff --git a/pywikibot/bot.py b/pywikibot/bot.py
index 3321c76..ed90608 100644
--- a/pywikibot/bot.py
+++ b/pywikibot/bot.py
@@ -585,16 +585,25 @@
return os.path.basename(called)
-def handleArgs(*args):
- """Handle standard command line arguments, return the rest as a list.
+def handle_args(args=None, do_help=True):
+ """
+ Handle standard command line arguments, and return the rest as a list.
Takes the command line arguments as Unicode strings, processes all
- global parameters such as -lang or -log. Returns a list of all arguments
- that are not global. This makes sure that global arguments are applied
- first, regardless of the order in which the arguments were given.
+ global parameters such as -lang or -log, initialises the logging layer,
+ which emits startup information into log at level 'verbose'.
+
+ This makes sure that global arguments are applied first,
+ regardless of the order in which the arguments were given.
args may be passed as an argument, thereby overriding sys.argv
+ @param args: Command line arguments
+ @type args: list of unicode
+ @param do_help: Handle parameter '-help' to show help and invoke sys.exit
+ @type do_help: bool
+ @return: list of arguments not recognised globally
+ @rtype: list of unicode
"""
# get commandline arguments if necessary
if not args:
@@ -609,9 +618,9 @@
moduleName = "terminal-interface"
nonGlobalArgs = []
username = None
- do_help = False
+ do_help = None if do_help else False
for arg in args:
- if arg == '-help':
+ if do_help is not False and arg == '-help':
do_help = True
elif arg.startswith('-family:'):
config.family = arg[len("-family:"):]
@@ -721,10 +730,16 @@
if do_help:
showHelp()
sys.exit(0)
+
pywikibot.debug(u"handleArgs() completed.", _logger)
return nonGlobalArgs
+def handleArgs(*args):
+ """DEPRECATED. Use handle_args()."""
+ return handle_args(args)
+
+
def showHelp(module_name=None):
"""Show help for the Bot."""
if not module_name:
--
To view, visit https://gerrit.wikimedia.org/r/165972
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I6eb20ca4b6ba8886f2d1fb31afcb2b2f86da40df
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: Allow @unittest.expectedFailure on all TestCase methods
......................................................................
Allow @unittest.expectedFailure on all TestCase methods
TestCase allows test methods to be 'multi-site' tests, which
will be re-run for each site in the TestCase sites matrix.
The metaclass has previously checked that the test method only
has 1 or 2 arguments. @unittest.expectedFailure replaces the
test method with a decorated version that appears as if it has
zero arguments, due to the way decorators work. This change
allows test methods with 0 arguments.
Change-Id: Ib6d74af168e0bad93df65da2965daffa6b3951e0
---
M tests/aspects.py
1 file changed, 9 insertions(+), 0 deletions(-)
Approvals:
John Vandenberg: Looks good to me, but someone else must approve
XZise: Looks good to me, approved
jenkins-bot: Verified
diff --git a/tests/aspects.py b/tests/aspects.py
index 08db133..7ff221f 100644
--- a/tests/aspects.py
+++ b/tests/aspects.py
@@ -340,6 +340,8 @@
return super(MetaTestCaseClass, cls).__new__(cls, name, bases, dct)
+ # The following section is only processed if the test uses sites.
+
if 'cacheinfo' in dct and dct['cacheinfo']:
bases = tuple([CacheInfoMixin] + list(bases))
@@ -355,9 +357,16 @@
for test in tests:
test_func = dct[test]
+ # method decorated with unittest.expectedFailure has no arguments
+ # so it is assumed to not be a multi-site test method.
+ if test_func.__code__.co_argcount == 0:
+ continue
+
+ # a normal test method only accepts 'self'
if test_func.__code__.co_argcount == 1:
continue
+ # a multi-site test method only accepts 'self' and the site-key
if test_func.__code__.co_argcount != 2:
raise Exception(
'%s: Test method %s must accept either 1 or 2 arguments; '
--
To view, visit https://gerrit.wikimedia.org/r/166374
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ib6d74af168e0bad93df65da2965daffa6b3951e0
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: jenkins-bot <>
XZise has submitted this change and it was merged.
Change subject: @expectedFailureIf(TRAVIS=true) on imageusage test
......................................................................
@expectedFailureIf(TRAVIS=true) on imageusage test
Add new @expectedFailureIf to the imageusage test which has been
failing frequently recently, so failure is allowed on TRAVIS, but
not on other test hosts.
Bug: 71971
Change-Id: I0fa7c8cced3b24c8e8914b7f05290b8be2523575
---
M tests/site_tests.py
M tests/utils.py
2 files changed, 17 insertions(+), 0 deletions(-)
Approvals:
John Vandenberg: Looks good to me, but someone else must approve
XZise: Looks good to me, approved
Mpaa: Looks good to me, but someone else must approve
diff --git a/tests/site_tests.py b/tests/site_tests.py
index 248d03d..4e2fb16 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -9,6 +9,7 @@
import sys
+import os
from collections import Iterable
from datetime import datetime
import re
@@ -17,6 +18,8 @@
from pywikibot import config
from pywikibot.tools import MediaWikiVersion as LV
from pywikibot.data import api
+
+from tests.utils import expectedFailureIf
from tests.aspects import (
unittest, TestCase,
DefaultSiteTestCase,
@@ -646,6 +649,7 @@
self.assertIsInstance(using, pywikibot.Page)
self.assertIn(imagepage, list(using.imagelinks()))
+ @expectedFailureIf(os.environ.get('TRAVIS', 'false') == 'true')
def test_image_usage_in_redirects(self):
"""Test the site.imageusage() method on redirects only."""
mysite = self.get_site()
diff --git a/tests/utils.py b/tests/utils.py
index e59f959..ac24c5d 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -18,6 +18,19 @@
PywikibotTestCase = aspects.TestCase
+def expectedFailureIf(expect):
+ """
+ Unit test decorator to expect/allow failure under conditions.
+
+ @param expect: Flag to check if failure is allowed
+ @type expect: bool
+ """
+ if expect:
+ return unittest.expectedFailure
+ else:
+ return lambda orig: orig
+
+
class DummySiteinfo():
def __init__(self, cache):
--
To view, visit https://gerrit.wikimedia.org/r/166532
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I0fa7c8cced3b24c8e8914b7f05290b8be2523575
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: jenkins-bot <>