Revision: 6926
Author: russblau
Date: 2009-05-29 18:56:04 +0000 (Fri, 29 May 2009)
Log Message:
-----------
Use "step" and "total" parameters in all Site methods that call API
generators (first step in regularizing this throughout the framework), and introduce a
convenience generator factory to reduce code duplication.
Modified Paths:
--------------
branches/rewrite/pywikibot/page.py
branches/rewrite/pywikibot/site.py
branches/rewrite/tests/site_tests.py
Modified: branches/rewrite/pywikibot/page.py
===================================================================
--- branches/rewrite/pywikibot/page.py 2009-05-27 15:03:23 UTC (rev 6925)
+++ branches/rewrite/pywikibot/page.py 2009-05-29 18:56:04 UTC (rev 6926)
@@ -941,7 +941,7 @@
else:
limit = revCount
self.site().loadrevisions(self, getText=False, rvdir=reverseOrder,
- limit=limit)
+ total=limit)
if getAll:
revCount = len(self._revisions)
return [ ( self._revisions[rev].revid,
@@ -1512,7 +1512,7 @@
def isEmptyCategory(self):
"""Return True if category has no members (including
subcategories)."""
- for member in self.site().categorymembers(self, limit=1):
+ for member in self.site().categorymembers(self, total=1):
return False
return True
Modified: branches/rewrite/pywikibot/site.py
===================================================================
--- branches/rewrite/pywikibot/site.py 2009-05-27 15:03:23 UTC (rev 6925)
+++ branches/rewrite/pywikibot/site.py 2009-05-29 18:56:04 UTC (rev 6926)
@@ -587,9 +587,9 @@
def __init__(self, code, fam=None, user=None, sysop=None):
BaseSite.__init__(self, code, fam, user, sysop)
self._namespaces = {
- # these are the MediaWiki built-in names, which always work
- # localized names are loaded later upon accessing the wiki
- # namespace prefixes are always case-insensitive, but the
+ # These are the MediaWiki built-in names, which always work.
+ # Localized names are loaded later upon accessing the wiki.
+ # Namespace prefixes are always case-insensitive, but the
# canonical forms are capitalized
-2: [u"Media"],
-1: [u"Special"],
@@ -629,6 +629,40 @@
if not language[:1].upper() + language[1:] in self.namespaces():
self._validlanguages.append(language)
+ def _generator(self, gen_class, type_arg=None, namespaces=None,
+ step=None, total=None, **args):
+ """Convenience method that returns an API generator.
+
+ All keyword args not listed below are passed to the generator's
+ constructor unchanged.
+
+ @param gen_class: the type of generator to construct (must be
+ a subclass of pywikibot.data.api.QueryGenerator)
+ @param type_arg: query type argument to be passed to generator's
+ constructor unchanged (not all types require this)
+ @type type_arg: str
+ @param namespaces: if not None, limit the query to namespaces in this
+ list
+ @type namespaces: int, or list of ints
+ @param step: if not None, limit each API call to this many items
+ @type step: int
+ @param total: if not None, limit the generator to yielding this many
+ items in total
+ @type total: int
+
+ """
+ if type_arg is not None:
+ gen = gen_class(type_arg, site=self, **args)
+ else:
+ gen = gen_class(site=self, **args)
+ if namespaces is not None:
+ gen.set_namespace(namespaces)
+ if step is not None and int(step) > 0:
+ gen.set_query_increment(int(step))
+ if total is not None and int(total) > 0:
+ gen.set_maximum_items(int(total))
+ return gen
+
def logged_in(self, sysop=False):
"""Return True if logged in with specified privileges, otherwise
False.
@@ -766,8 +800,7 @@
def mediawiki_message(self, key):
"""Return the MediaWiki message text for key "key"
"""
if not key in self._msgcache:
- msg_query = api.QueryGenerator(site=self, meta="allmessages",
- amfilter=key)
+ msg_query = api.QueryGenerator(meta="allmessages", amfilter=key)
for msg in msg_query:
if msg['name'] == key and not 'missing' in msg:
self._msgcache[key] = msg['*']
@@ -904,9 +937,10 @@
def loadpageinfo(self, page):
"""Load page info from api and save in page
attributes"""
title = page.title(withSection=False)
- query = api.PropertyGenerator("info", site=self,
- titles=title.encode(self.encoding()),
- inprop="protection")
+ query = self._generator(api.PropertyGenerator,
+ type_arg="info",
+ titles=title.encode(self.encoding()),
+ inprop="protection")
for pageitem in query:
if pageitem['title'] != title:
if pageitem['title'] in query.normalized_to \
@@ -927,11 +961,13 @@
"""
title = page.title(withSection=False)
- query = api.PropertyGenerator("imageinfo", site=self,
- titles=title.encode(self.encoding()),
- iiprop=["timestamp", "user",
"comment",
- "url", "size",
"sha1", "mime",
- "metadata",
"archivename"])
+ query = self._generator(api.PropertyGenerator,
+ type_arg="imageinfo",
+ titles=title.encode(self.encoding()),
+ iiprop=["timestamp", "user",
"comment",
+ "url", "size",
"sha1", "mime",
+ "metadata", "archivename"]
+ )
if history:
query.request["iilimit"] = "max"
for pageitem in query:
@@ -1040,7 +1076,7 @@
props += '|templates'
if langlinks:
props += '|langlinks'
- rvgen = api.PropertyGenerator(props, site=self)
+ rvgen = api.PropertyGenerator(props)
rvgen.set_maximum_items(-1) # suppress use of "rvlimit" parameter
if len(pageids) == len(sublist):
# only use pageids if all pages have them
@@ -1077,7 +1113,7 @@
see API documentation for full list of types
"""
- query = api.PropertyGenerator("info|revisions", site=self,
+ query = api.PropertyGenerator("info|revisions",
titles=page.title(withSection=False),
intoken=tokentype)
for item in query:
@@ -1093,7 +1129,7 @@
# following group of methods map more-or-less directly to API queries
def pagebacklinks(self, page, followRedirects=False, filterRedirects=None,
- namespaces=None):
+ namespaces=None, step=None, total=None):
"""Iterate all pages that link to the given page.
@param page: The Page to get links to.
@@ -1104,12 +1140,14 @@
both (no filtering).
@param namespaces: If present, only return links from the namespaces
in this list.
+ @param step: Limit on number of pages to retrieve per API query.
+ @param total: Maximum number of pages to retrieve in total.
"""
bltitle = page.title(withSection=False).encode(self.encoding())
- blgen = api.PageGenerator("backlinks", gbltitle=bltitle, site=self)
- if namespaces is not None:
- blgen.set_namespace(namespaces)
+ blgen = self._generator(api.PageGenerator, type_arg="backlinks",
+ gbltitle=bltitle, namespaces=namespaces,
+ step=step, total=total)
if filterRedirects is not None:
blgen.request["gblfilterredir"] = filterRedirects and
"redirects"\
or
"nonredirects"
@@ -1118,8 +1156,10 @@
# links identified by MediaWiki as redirects may not really be,
# so we have to check each "redirect" page and see if it
# really redirects to this page
- redirgen = api.PageGenerator("backlinks", gbltitle=bltitle,
- site=self,
gblfilterredir="redirects")
+ redirgen = self._generator(api.PageGenerator,
+ type_arg="backlinks",
+ gbltitle=bltitle,
+ gblfilterredir="redirects")
genlist = {None: blgen}
for redir in redirgen:
if redir == page:
@@ -1139,7 +1179,8 @@
return itertools.chain(*genlist.values())
return blgen
- def page_embeddedin(self, page, filterRedirects=None, namespaces=None):
+ def page_embeddedin(self, page, filterRedirects=None, namespaces=None,
+ step=None, total=None):
"""Iterate all pages that embedded the given page as a template.
@param page: The Page to get inclusions for.
@@ -1151,9 +1192,9 @@
"""
eititle = page.title(withSection=False).encode(self.encoding())
- eigen = api.PageGenerator("embeddedin", geititle=eititle, site=self)
- if namespaces is not None:
- eigen.set_namespace(namespaces)
+ eigen = self._generator(api.PageGenerator, type_arg="embeddedin",
+ geititle=eititle, namespaces=namespaces,
+ step=step, total=total)
if filterRedirects is not None:
eigen.request["geifilterredir"] = filterRedirects and
"redirects"\
or
"nonredirects"
@@ -1161,24 +1202,30 @@
def pagereferences(self, page, followRedirects=False, filterRedirects=None,
withTemplateInclusion=True, onlyTemplateInclusion=False,
- namespaces=None):
+ namespaces=None, step=None, total=None):
"""Convenience method combining pagebacklinks and
page_embeddedin."""
if onlyTemplateInclusion:
- return self.page_embeddedin(page, namespaces=namespaces)
+ return self.page_embeddedin(page, namespaces=namespaces,
+ step=step, total=total)
if not withTemplateInclusion:
return self.pagebacklinks(page, followRedirects,
- namespaces=namespaces)
+ namespaces=namespaces,
+ step=step, total=total)
import itertools
- return itertools.chain(
- self.pagebacklinks(page, followRedirects,
- filterRedirects, namespaces=namespaces),
- self.page_embeddedin(page, filterRedirects,
- namespaces=namespaces)
- )
+ return itertools.islice(
+ itertools.chain(
+ self.pagebacklinks(
+ page, followRedirects, filterRedirects,
+ namespaces=namespaces, step=step),
+ self.page_embeddedin(
+ page, filterRedirects, namespaces=namespaces,
+ step=step)
+ ),
+ total)
def pagelinks(self, page, namespaces=None, follow_redirects=False,
- limit=None):
+ step=None, total=None):
"""Iterate internal wikilinks contained (or transcluded) on page.
@param namespaces: Only iterate pages in these namespaces (default: all)
@@ -1187,9 +1234,8 @@
rather than the redirect page
"""
- plgen = api.PageGenerator("links", site=self)
- if isinstance(limit, int):
- plgen.set_maximum_items(limit)
+ plgen = self._generator(api.PageGenerator, type_arg="links",
+ namespaces=namespaces, step=step, total=total)
if hasattr(page, "_pageid"):
plgen.request['pageids'] = str(page._pageid)
else:
@@ -1197,15 +1243,14 @@
plgen.request['titles'] = pltitle
if follow_redirects:
plgen.request['redirects'] = ''
- if namespaces is not None:
- plgen.set_namespace(namespaces)
return plgen
@deprecate_arg("withSortKey", None) # Sortkey doesn't work with
generator
- def pagecategories(self, page, withSortKey=None):
+ def pagecategories(self, page, step=None, total=None):
"""Iterate categories to which page belongs."""
- clgen = api.CategoryPageGenerator("categories", site=self)
+ clgen = self._generator(api.CategoryPageGenerator,
+ type_arg="categories", step=step, total=total)
if hasattr(page, "_pageid"):
clgen.request['pageids'] = str(page._pageid)
else:
@@ -1213,23 +1258,24 @@
clgen.request['titles'] = cltitle
return clgen
- def pageimages(self, page):
+ def pageimages(self, page, step=None, total=None):
"""Iterate images used (not just linked) on the
page."""
imtitle = page.title(withSection=False).encode(self.encoding())
- imgen = api.ImagePageGenerator("images", titles=imtitle, site=self)
+ imgen = self._generator(api.ImagePageGenerator, type_arg="images",
+ titles=imtitle, step=step, total=total)
return imgen
- def pagetemplates(self, page, namespaces=None):
+ def pagetemplates(self, page, namespaces=None, step=None, total=None):
"""Iterate templates transcluded (not just linked) on the
page."""
tltitle = page.title(withSection=False).encode(self.encoding())
- tlgen = api.PageGenerator("templates", titles=tltitle, site=self)
- if namespaces is not None:
- tlgen.set_namespace(namespaces)
+ tlgen = self._generator(api.PageGenerator, type_arg="templates",
+ titles=tltitle, namespaces=namespaces,
+ step=step, total=total)
return tlgen
- def categorymembers(self, category, namespaces=None, limit=None):
+ def categorymembers(self, category, namespaces=None, step=None, total=None):
"""Iterate members of specified category.
@param category: The Category to iterate.
@@ -1239,8 +1285,6 @@
however, that the iterated values are always Page objects, even
if in the Category or Image namespace.
@type namespaces: list of ints
- @param limit: maximum number of pages to iterate (default: all)
- @type limit: int
"""
if category.namespace() != 14:
@@ -1248,18 +1292,18 @@
u"categorymembers: non-Category page '%s' specified"
% category.title())
cmtitle = category.title(withSection=False).encode(self.encoding())
- cmgen = api.PageGenerator("categorymembers", gcmtitle=cmtitle,
- gcmprop="ids|title|sortkey", site=self)
- if namespaces is not None:
- cmgen.set_namespace(namespaces)
- if isinstance(limit, int):
- cmgen.set_maximum_items(limit)
+ cmgen = self._generator(api.PageGenerator,
+ type_arg="categorymembers",
+ gcmtitle=cmtitle,
+ gcmprop="ids|title|sortkey",
+ namespaces=namespaces, step=step,
+ total=total)
return cmgen
def loadrevisions(self, page=None, getText=False, revids=None,
- limit=None, startid=None, endid=None, starttime=None,
- endtime=None, rvdir=None, user=None, excludeuser=None,
- section=None, sysop=False):
+ startid=None, endid=None, starttime=None,
+ endtime=None, rvdir=None, user=None, excludeuser=None,
+ section=None, sysop=False, step=None, total=None):
"""Retrieve and store revision information.
By default, retrieves the last (current) revision of the page,
@@ -1283,8 +1327,6 @@
@param revids: retrieve only the specified revision ids (required
unless page is specified)
@type revids: list of ints
- @param limit: Retrieve no more than this number of revisions
- @type limit: int
@param startid: retrieve revisions starting with this revid
@param endid: stop upon retrieving this revid
@param starttime: retrieve revisions starting at this Timestamp
@@ -1305,7 +1347,8 @@
rvdir is None and
user is None and
excludeuser is None and
- limit is None) # if True, we are retrieving current revision
+ step is None and
+ total is None) # if True, retrieving current revision
# check for invalid argument combinations
if page is None and revids is None:
@@ -1333,15 +1376,17 @@
# assemble API request
if revids is None:
rvtitle = page.title(withSection=False).encode(self.encoding())
- rvgen = api.PropertyGenerator(u"info|revisions", titles=rvtitle,
- site=self)
+ rvgen = self._generator(api.PropertyGenerator,
+ type_arg=u"info|revisions",
+ titles=rvtitle, step=step, total=total)
else:
if isinstance(revids, (int, basestring)):
ids = unicode(revids)
else:
ids = u"|".join(unicode(r) for r in revids)
- rvgen = api.PropertyGenerator(u"info|revisions", revids=ids,
- site=self)
+ rvgen = self._generator(api.PropertyGenerator,
+ type_arg=u"info|revisions", revids=ids,
+ step=step, total=total)
if getText:
rvgen.request[u"rvprop"] = \
u"ids|flags|timestamp|user|comment|content"
@@ -1349,8 +1394,6 @@
rvgen.request[u"rvsection"] = unicode(section)
if latest or "revids" in rvgen.request:
rvgen.set_maximum_items(-1) # suppress use of rvlimit parameter
- elif isinstance(limit, int):
- rvgen.set_maximum_items(limit)
if rvdir:
rvgen.request[u"rvdir"] = u"newer"
elif rvdir is not None:
@@ -1385,12 +1428,13 @@
# No such function in the API (this method isn't called anywhere)
raise NotImplementedError
- def pagelanglinks(self, page):
+ def pagelanglinks(self, page, step=None, total=None):
"""Iterate all interlanguage links on page, yielding Link
objects."""
lltitle = page.title(withSection=False)
- llquery = api.PropertyGenerator("langlinks",
- titles=lltitle.encode(self.encoding()),
- site=self)
+ llquery = self._generator(api.PropertyGenerator,
+ type_arg="langlinks",
+ titles=lltitle.encode(self.encoding()),
+ step=step, total=total)
for pageitem in llquery:
if pageitem['title'] != lltitle:
raise Error(
@@ -1403,12 +1447,12 @@
linkdata['*'],
source=self)
- def page_extlinks(self, page):
+ def page_extlinks(self, page, step=None, total=None):
"""Iterate all external links on page, yielding URL
strings."""
eltitle = page.title(withSection=False)
- elquery = api.PropertyGenerator("extlinks",
- titles=eltitle.encode(self.encoding()),
- site=self)
+ elquery = self._generator(api.PropertyGenerator, type_arg="extlinks",
+ titles=eltitle.encode(self.encoding()),
+ step=step, total=total)
for pageitem in elquery:
if pageitem['title'] != eltitle:
raise RuntimeError(
@@ -1419,13 +1463,12 @@
for linkdata in pageitem['extlinks']:
yield linkdata['*']
- # TODO: implement a method to retrieve categoryinfo
def getcategoryinfo(self, category):
"""Retrieve data on contents of category."""
cititle = category.title(withSection=False)
- ciquery = api.PropertyGenerator("categoryinfo",
- titles=cititle.encode(self.encoding()),
- site=self)
+ ciquery = self._generator(api.PropertyGenerator,
+ type_arg="categoryinfo",
+ titles=cititle.encode(self.encoding()))
for pageitem in ciquery:
if pageitem['title'] != cititle:
raise Error(
@@ -1442,10 +1485,11 @@
return category._catinfo
@deprecate_arg("throttle", None)
+ @deprecate_arg("limit", "total")
def allpages(self, start="!", prefix="", namespace=0,
filterredir=None,
filterlanglinks=None, minsize=None, maxsize=None,
- protect_type=None, protect_level=None, limit=None,
- reverse=False, includeredirects=None):
+ protect_type=None, protect_level=None, reverse=False,
+ includeredirects=None, step=None, total=None):
"""Iterate pages in a single namespace.
Note: parameters includeRedirects and throttle are deprecated and
@@ -1469,8 +1513,6 @@
@type protect_type: str
@param protect_level: only yield pages that have protection at this
level; can only be used if protect_type is specified
- @param limit: maximum number of pages to iterate (default: iterate
- all pages in namespace)
@param reverse: if True, iterate in reverse Unicode lexigraphic
order (default: iterate in forward order)
@param includeredirects: DEPRECATED, use filterredirs instead
@@ -1489,8 +1531,9 @@
else:
filterredirs = False
- apgen = api.PageGenerator("allpages", gapnamespace=str(namespace),
- gapfrom=start, site=self)
+ apgen = self._generator(api.PageGenerator, type_arg="allpages",
+ gapnamespace=str(namespace),
+ gapfrom=start, step=step, total=total)
if prefix:
apgen.request["gapprefix"] = prefix
if filterredir is not None:
@@ -1509,8 +1552,6 @@
apgen.request["gapprtype"] = protect_type
if isinstance(protect_level, basestring):
apgen.request["gapprlevel"] = protect_level
- if isinstance(limit, int):
- apgen.set_maximum_items(limit)
if reverse:
apgen.request["gapdir"] = "descending"
return apgen
@@ -1527,7 +1568,7 @@
def alllinks(self, start="!", prefix="", namespace=0,
unique=False,
- limit=None, fromids=False):
+ fromids=False, step=None, total=None):
"""Iterate all links to pages (which need not exist) in one
namespace.
Note that, in practice, links that were found on pages that have
@@ -1540,8 +1581,6 @@
(default: 0)
@param unique: If True, only iterate each link title once (default:
iterate once for each linking page)
- @param limit: maximum number of pages to iterate (default: iterate
- all pages in namespace)
@param fromids: if True, include the pageid of the page containing
each link (default: False) as the '_fromid' attribute of the Page;
cannot be combined with unique
@@ -1551,12 +1590,11 @@
raise Error("alllinks: unique and fromids cannot both be True.")
if not isinstance(namespace, int):
raise Error("alllinks: only one namespace permitted.")
- algen = api.ListGenerator("alllinks", alnamespace=str(namespace),
- alfrom=start, site=self)
+ algen = self._generator(api.ListGenerator, type_arg="alllinks",
+ alnamespace=str(namespace), alfrom=start,
+ step=step, total=total)
if prefix:
algen.request["alprefix"] = prefix
- if isinstance(limit, int):
- algen.set_maximum_items(limit)
if unique:
algen.request["alunique"] = ""
if fromids:
@@ -1567,7 +1605,7 @@
p._fromid = link['fromid']
yield p
- def allcategories(self, start="!", prefix="", limit=None,
+ def allcategories(self, start="!", prefix="", step=None,
total=None,
reverse=False):
"""Iterate categories used (which need not have a Category page).
@@ -1577,18 +1615,15 @@
@param start: Start at this category title (category need not exist).
@param prefix: Only yield categories starting with this string.
- @param limit: maximum number of categories to iterate (default:
- iterate all)
@param reverse: if True, iterate in reverse Unicode lexigraphic
order (default: iterate in forward order)
"""
- acgen = api.CategoryPageGenerator("allcategories",
- gacfrom=start, site=self)
+ acgen = self._generator(api.CategoryPageGenerator,
+ type_arg="allcategories", gacfrom=start,
+ step=step, total=total)
if prefix:
acgen.request["gacprefix"] = prefix
- if isinstance(limit, int):
- acgen.set_maximum_items(limit)
if reverse:
acgen.request["gacdir"] = "descending"
return acgen
@@ -1600,9 +1635,10 @@
limit = None
else:
limit = number
- return self.allcategories(limit=limit)
+ return self.allcategories(total=limit)
- def allusers(self, start="!", prefix="", limit=None,
group=None):
+ def allusers(self, start="!", prefix="", group=None, step=None,
+ total=None):
"""Iterate registered users, ordered by username.
Iterated values are dicts containing 'name', 'editcount',
@@ -1613,31 +1649,28 @@
@param start: start at this username (name need not exist)
@param prefix: only iterate usernames starting with this substring
- @param limit: maximum number of users to iterate (default: all)
@param group: only iterate users that are members of this group
@type group: str
"""
- augen = api.ListGenerator("allusers", aufrom=start,
- auprop="editcount|groups|registration",
- site=self)
+ augen = self._generator(api.ListGenerator, type_arg="allusers",
+ auprop="editcount|groups|registration",
+ aufrom=start, step=step, total=total)
if prefix:
augen.request["auprefix"] = prefix
if group:
augen.request["augroup"] = group
- if isinstance(limit, int):
- augen.set_maximum_items(limit)
return augen
def allimages(self, start="!", prefix="", minsize=None,
maxsize=None,
- limit=None, reverse=False, sha1=None, sha1base36=None):
+ reverse=False, sha1=None, sha1base36=None, step=None,
+ total=None):
"""Iterate all images, ordered by image title.
Yields ImagePages, but these pages need not exist on the wiki.
@param start: start at this title (name need not exist)
@param prefix: only iterate titles starting with this substring
- @param limit: maximum number of titles to iterate (default: all)
@param minsize: only iterate images of at least this many bytes
@param maxsize: only iterate images of no more than this many bytes
@param reverse: if True, iterate in reverse lexigraphic order
@@ -1646,12 +1679,11 @@
@param sha1base36: same as sha1 but in base 36
"""
- aigen = api.ImagePageGenerator("allimages", gaifrom=start,
- site=self)
+ aigen = self._generator(api.ImagePageGenerator,
+ type_arg="allimages", gaifrom=start,
+ step=step, total=total)
if prefix:
aigen.request["gaiprefix"] = prefix
- if isinstance(limit, int):
- aigen.set_maximum_items(limit)
if isinstance(minsize, int):
aigen.request["gaiminsize"] = str(minsize)
if isinstance(maxsize, int):
@@ -1665,7 +1697,7 @@
return aigen
def blocks(self, starttime=None, endtime=None, reverse=False,
- blockids=None, users=None, limit=None):
+ blockids=None, users=None, step=None, total=None):
"""Iterate all current blocks, in order of creation.
Note that logevents only logs user blocks, while this method
@@ -1678,7 +1710,6 @@
@param reverse: if True, iterate oldest blocks first (default: newest)
@param blockids: only iterate blocks with these id numbers
@param users: only iterate blocks affecting these usernames or IPs
- @param limit: maximum number of blocks to iterate (default: all)
"""
if starttime and endtime:
@@ -1690,7 +1721,8 @@
if endtime > starttime:
raise pywikibot.Error(
"blocks: endtime must be before starttime with reverse=False")
- bkgen = api.ListGenerator("blocks", site=self)
+ bkgen = self._generator(api.ListGenerator, type_arg="blocks",
+ step=step, total=total)
bkgen.request["bkprop"] = \
"id|user|by|timestamp|expiry|reason|range|flags"
if starttime:
@@ -1703,57 +1735,46 @@
bkgen.request["bkids"] = blockids
if users:
bkgen.request["bkusers"] = users
- if isinstance(limit, int):
- bkgen.set_maximum_items(limit)
return bkgen
def exturlusage(self, url, protocol="http", namespaces=None,
- limit=None):
+ step=None, total=None):
"""Iterate Pages that contain links to the given URL.
@param url: The URL to search for (without the protocol prefix);
this many include a '*' as a wildcard, only at the start of the
hostname
@param protocol: The protocol prefix (default: "http")
- @param namespaces: Only iterate pages in these namespaces (default: all)
- @type namespaces: list of ints
- @param limit: Only iterate this many linking pages (default: all)
"""
- eugen = api.PageGenerator("exturlusage", geuquery=url,
- geuprotocol=protocol, site=self)
- if namespaces is not None:
- eugen.set_namespace(namespaces)
- if isinstance(limit, int):
- eugen.set_maximum_items(limit)
+ eugen = self._generator(api.PageGenerator, type_arg="exturlusage",
+ geuquery=url, geuprotocol=protocol,
+ namespaces=namespaces, step=step,
+ total=total)
return eugen
def imageusage(self, image, namespaces=None, filterredir=None,
- limit=None):
+ step=None, total=None):
"""Iterate Pages that contain links to the given ImagePage.
- @param image: the image to search for (ImagePage need not exist on the wiki)
+ @param image: the image to search for (ImagePage need not exist on
+ the wiki)
@type image: ImagePage
- @param namespaces: Only iterate pages in these namespaces (default: all)
- @type namespaces: list of ints
@param filterredir: if True, only yield redirects; if False (and not
None), only yield non-redirects (default: yield both)
- @param limit: Only iterate this many linking pages (default: all)
"""
- iugen = api.PageGenerator("imageusage", site=self,
- giutitle=image.title(withSection=False))
- if namespaces is not None:
- iugen.set_namespace(namespaces)
- if isinstance(limit, int):
- iugen.set_maximum_items(limit)
+ iuargs = dict(giutitle=image.title(withSection=False))
if filterredir is not None:
- iugen.request["giufilterredir"] = (filterredir and
"redirects"
- or "nonredirects")
+ iuargs["giufilterredir"] = (filterredir and "redirects"
+ or "nonredirects")
+ iugen = self._generator(api.PageGenerator, type_arg="imageusage",
+ namespaces=namespaces, step=step,
+ total=total, **iuargs)
return iugen
def logevents(self, logtype=None, user=None, page=None,
- start=None, end=None, reverse=False, limit=None):
+ start=None, end=None, reverse=False, step=None, total=None):
"""Iterate all log entries.
@param logtype: only iterate entries of this type (see wiki
@@ -1765,7 +1786,6 @@
@param start: only iterate entries from and after this Timestamp
@param end: only iterate entries up to and through this Timestamp
@param reverse: if True, iterate oldest entries first (default: newest)
- @param limit: only iterate up to this many entries
"""
if start and end:
@@ -1777,7 +1797,8 @@
if start < end:
raise Error(
"logevents: start must be later than end with
reverse=False")
- legen = api.LogEntryListGenerator(logtype, site=self)
+ legen = self._generator(api.LogEntryListGenerator, type_arg=logtype,
+ step=step, total=total)
if logtype is not None:
legen.request["letype"] = logtype
if user is not None:
@@ -1790,22 +1811,19 @@
legen.request["leend"] = str(end)
if reverse:
legen.request["ledir"] = "newer"
- if isinstance(limit, int):
- legen.set_maximum_items(limit)
return legen
- def recentchanges(self, start=None, end=None, reverse=False, limit=None,
+ def recentchanges(self, start=None, end=None, reverse=False,
namespaces=None, pagelist=None, changetype=None,
showMinor=None, showBot=None, showAnon=None,
- showRedirects=None, showPatrolled=None):
+ showRedirects=None, showPatrolled=None, step=None,
+ total=None):
"""Iterate recent changes.
@param start: Timestamp to start listing from
@param end: Timestamp to end listing at
@param reverse: if True, start with oldest changes (default: newest)
@param limit: iterate no more than this number of entries
- @param namespaces: iterate changes to pages in these namespaces only
- @type namespaces: list of ints
@param pagelist: iterate changes to pages in this list only
@param pagelist: list of Pages
@param changetype: only iterate changes of this type ("edit" for
@@ -1832,19 +1850,17 @@
if start < end:
raise Error(
"recentchanges: start must be later than end with reverse=False")
- rcgen = api.ListGenerator("recentchanges", site=self,
- rcprop="user|comment|timestamp|title|ids"
- "|redirect|patrolled|loginfo|flags")
+ rcgen = self._generator(api.ListGenerator, type_arg="recentchanges",
+ rcprop="user|comment|timestamp|title|ids"
+ "|redirect|patrolled|loginfo|flags",
+ namespaces=namespaces, step=step,
+ total=total)
if start is not None:
rcgen.request["rcstart"] = str(start)
if end is not None:
rcgen.request["rcend"] = str(end)
if reverse:
rcgen.request["rcdir"] = "newer"
- if isinstance(limit, int):
- rcgen.set_maximum_items(limit)
- if namespaces is not None:
- rcgen.set_namespace(namespaces)
if pagelist:
if self.versionnumber() > 14:
pywikibot.output(
@@ -1870,7 +1886,7 @@
@deprecate_arg("number", "limit")
def search(self, searchstring, namespaces=None, where="text",
- getredirects=False, limit=None):
+ getredirects=False, step=None, total=None):
"""Iterate Pages that contain the searchstring.
Note that this may include non-existing Pages if the wiki's database
@@ -1883,30 +1899,27 @@
@param namespaces: search only in these namespaces (defaults to 0)
@type namespaces: list of ints
@param getredirects: if True, include redirects in results
- @param limit: maximum number of results to iterate
"""
if not searchstring:
raise Error("search: searchstring cannot be empty")
if where not in ("text", "titles"):
raise Error("search: unrecognized 'where' value: %s" %
where)
- srgen = api.PageGenerator("search", gsrsearch=searchstring,
- gsrwhat=where, site=self)
if not namespaces:
pywikibot.output(u"search: namespaces cannot be empty; using
[0].",
level=pywikibot.WARNING)
namespaces = [0]
- if namespaces is not None:
- srgen.set_namespace(namespaces)
+ srgen = self._generator(api.PageGenerator, type_arg="search",
+ gsrsearch=searchstring, gsrwhat=where,
+ namespaces=namespaces, step=step,
+ total=total)
if getredirects:
srgen.request["gsrredirects"] = ""
- if isinstance(limit, int):
- srgen.set_maximum_items(limit)
return srgen
def usercontribs(self, user=None, userprefix=None, start=None, end=None,
- reverse=False, limit=None, namespaces=None,
- showMinor=None):
+ reverse=False, namespaces=None, showMinor=None,
+ step=None, total=None):
"""Iterate contributions by a particular user.
Iterated values are in the same format as recentchanges.
@@ -1917,9 +1930,6 @@
@param start: Iterate contributions starting at this Timestamp
@param end: Iterate contributions ending at this Timestamp
@param reverse: Iterate oldest contributions first (default: newest)
- @param limit: Maximum number of contributions to iterate
- @param namespaces: Only iterate contributions in these namespaces
- @type namespaces: list of ints
@param showMinor: if True, iterate only minor edits; if False and
not None, iterate only non-minor edits (default: iterate both)
@@ -1936,8 +1946,10 @@
if start < end:
raise Error(
"usercontribs: start must be later than end with
reverse=False")
- ucgen = api.ListGenerator("usercontribs", site=self,
- ucprop="ids|title|timestamp|comment|flags")
+ ucgen = self._generator(api.ListGenerator, type_arg="usercontribs",
+ ucprop="ids|title|timestamp|comment|flags",
+ namespaces=namespaces, step=step,
+ total=total)
if user:
ucgen.request["ucuser"] = user
if userprefix:
@@ -1948,17 +1960,13 @@
ucgen.request["ucend"] = str(end)
if reverse:
ucgen.request["ucdir"] = "newer"
- if isinstance(limit, int):
- ucgen.set_maximum_items(limit)
- if namespaces is not None:
- ucgen.set_namespace(namespaces)
if showMinor is not None:
ucgen.request["ucshow"] = showMinor and "minor" or
"!minor"
return ucgen
def watchlist_revs(self, start=None, end=None, reverse=False,
namespaces=None, showMinor=None, showBot=None,
- showAnon=None, limit=None):
+ showAnon=None, step=None, total=None):
"""Iterate revisions to pages on the bot user's watchlist.
Iterated values will be in same format as recentchanges.
@@ -1966,16 +1974,12 @@
@param start: Iterate revisions starting at this Timestamp
@param end: Iterate revisions ending at this Timestamp
@param reverse: Iterate oldest revisions first (default: newest)
- @param namespaces: only iterate revisions to pages in these
- namespaces (default: all)
- @type namespaces: list of ints
@param showMinor: if True, only list minor edits; if False (and not
None), only list non-minor edits
@param showBot: if True, only list bot edits; if False (and not
None), only list non-bot edits
@param showAnon: if True, only list anon edits; if False (and not
None), only list non-anon edits
- @param limit: Maximum number of revisions to iterate
"""
if start and end:
@@ -1987,8 +1991,10 @@
if start < end:
raise Error(
"watchlist_revs: start must be later than end with reverse=False")
- wlgen = api.ListGenerator("watchlist", wlallrev="",
site=self,
- wlprop="user|comment|timestamp|title|ids|flags")
+ wlgen = self._generator(api.ListGenerator, type_arg="watchlist",
+
wlprop="user|comment|timestamp|title|ids|flags",
+ wlallrev="", namespaces=namespaces,
+ step=step, total=total)
#TODO: allow users to ask for "patrol" as well?
if start is not None:
wlgen.request["wlstart"] = str(start)
@@ -1996,10 +2002,6 @@
wlgen.request["wlend"] = str(end)
if reverse:
wlgen.request["wldir"] = "newer"
- if isinstance(limit, int):
- wlgen.set_maximum_items(limit)
- if namespaces is not None:
- wlgen.set_namespace(namespaces)
filters = {'minor': showMinor,
'bot': showBot,
'anon': showAnon}
@@ -2011,8 +2013,8 @@
wlgen.request["wlshow"] = "|".join(wlshow)
return wlgen
- def deletedrevs(self, page, start=None, end=None, reverse=None, limit=None,
- get_text=False):
+ def deletedrevs(self, page, start=None, end=None, reverse=None,
+ get_text=False, step=None, total=None):
"""Iterate deleted revisions.
Each value returned by the iterator will be a dict containing the
@@ -2025,7 +2027,6 @@
@param start: Iterate revisions starting at this Timestamp
@param end: Iterate revisions ending at this Timestamp
@param reverse: Iterate oldest revisions first (default: newest)
- @param limit: Iterate no more than this number of revisions.
@param get_text: If True, retrieve the content of each revision and
an undelete token
@@ -2061,9 +2062,10 @@
"deletedrevs: User:%s not authorized to view deleted content."
% self.user())
- drgen = api.ListGenerator("deletedrevs", site=self,
- titles=page.title(withSection=False),
- drprop="revid|user|comment|minor")
+ drgen = self._generator(api.ListGenerator, type_arg="deletedrevs",
+ titles=page.title(withSection=False),
+ drprop="revid|user|comment|minor",
+ step=step, total=total)
if get_text:
drgen.request['drprop'] = drgen.request['drprop'] +
"|content|token"
if start is not None:
@@ -2072,8 +2074,6 @@
drgen.request["drend"] = str(end)
if reverse:
drgen.request["drdir"] = "newer"
- if isinstance(limit, int):
- drgen.set_maximum_items(limit)
return drgen
def users(self, usernames):
@@ -2085,26 +2085,26 @@
"""
if not isinstance(usernames, basestring):
usernames = u"|".join(usernames)
- usgen = api.ListGenerator("users", ususers=usernames, site=self,
- usprop="blockinfo|groups|editcount|registration")
+ usgen = api.ListGenerator(
+ "users", ususers=usernames, site=self,
+ usprop="blockinfo|groups|editcount|registration")
return usgen
- def randompages(self, limit=1, namespaces=None, redirects=False):
+ def randompages(self, step=None, total=1, namespaces=None,
+ redirects=False):
"""Iterate a number of random pages.
Pages are listed in a fixed sequence, only the starting point is
random.
- @param limit: the maximum number of pages to iterate (default: 1)
+ @param total: the maximum number of pages to iterate (default: 1)
@param namespaces: only iterate pages in these namespaces.
@param redirects: if True, include only redirect pages in results
(default: include only non-redirects)
"""
- rngen = api.PageGenerator("random", site=self)
- rngen.set_maximum_items(limit)
- if namespaces is not None:
- rngen.set_namespace(namespaces)
+ rngen = self._generator(api.PageGenerator, type_arg="random",
+ namespaces=namespaces, step=step, total=total)
if redirects:
rngen.request["grnredirect"] = ""
return rngen
@@ -2476,15 +2476,17 @@
#TODO: implement patrol
+ @deprecated("Site().exturlusage")
def linksearch(self, siteurl, limit=None):
"""Backwards-compatible interface to
exturlusage()"""
- return self.exturlusage(siteurl, limit=limit)
+ return self.exturlusage(siteurl, total=limit)
+ @deprecated('Site().logevents(logtype="upload",...)')
@deprecate_arg("repeat", None)
def newimages(self, number=100, lestart=None, leend=None, leuser=None,
letitle=None):
"""Yield ImagePages from most recent uploads"""
- return self.logevents(logtype="upload", limit=number, start=lestart,
+ return self.logevents(logtype="upload", total=number, start=lestart,
end=leend, user=leuser, title=letitle)
def getImagesFromAnHash(self, hash_found=None):
Modified: branches/rewrite/tests/site_tests.py
===================================================================
--- branches/rewrite/tests/site_tests.py 2009-05-27 15:03:23 UTC (rev 6925)
+++ branches/rewrite/tests/site_tests.py 2009-05-29 18:56:04 UTC (rev 6926)
@@ -45,7 +45,7 @@
self.assertType(mysite.linktrail(), basestring)
self.assertType(mysite.redirect(default=True), basestring)
self.assertType(mysite.disambcategory(), pywikibot.Category)
- self.assertEqual(mysite.linkto("foo"), u"[[Foo]]")
+ self.assertEqual(mysite.linkto("foo"), u"[[Foo]]") #
deprecated
self.assertFalse(mysite.isInterwikiLink("foo"))
self.assertType(mysite.redirectRegex().pattern, basestring)
self.assertType(mysite.category_on_one_line(), bool)
@@ -173,7 +173,7 @@
"""Test that preloading works"""
count = 0
- for page in mysite.preloadpages(mysite.pagelinks(mainpage, limit=10)):
+ for page in mysite.preloadpages(mysite.pagelinks(mainpage, total=10)):
self.assertType(page, pywikibot.Page)
self.assertType(page.exists(), bool)
if page.exists():
@@ -233,7 +233,7 @@
set(mysite.pagelinks(mainpage, namespaces=[0, 1]))))
for target in mysite.preloadpages(
mysite.pagelinks(mainpage, follow_redirects=True,
- limit=5)):
+ total=5)):
self.assertType(target, pywikibot.Page)
self.assertFalse(target.isRedirectPage())
# test pagecategories
@@ -268,65 +268,65 @@
def testAllPages(self):
"""Test the site.allpages() method"""
- fwd = list(mysite.allpages(limit=10))
+ fwd = list(mysite.allpages(total=10))
self.assertTrue(len(fwd) <= 10)
for page in fwd:
self.assertType(page, pywikibot.Page)
self.assertTrue(mysite.page_exists(page))
self.assertEqual(page.namespace(), 0)
- rev = list(mysite.allpages(reverse=True, start="Aa", limit=12))
+ rev = list(mysite.allpages(reverse=True, start="Aa", total=12))
self.assertTrue(len(rev) <= 12)
for page in rev:
self.assertType(page, pywikibot.Page)
self.assertTrue(mysite.page_exists(page))
self.assertEqual(page.namespace(), 0)
self.assertTrue(page.title() <= "Aa")
- for page in mysite.allpages(start="Py", limit=5):
+ for page in mysite.allpages(start="Py", total=5):
self.assertType(page, pywikibot.Page)
self.assertTrue(mysite.page_exists(page))
self.assertEqual(page.namespace(), 0)
self.assertTrue(page.title() >= "Py")
- for page in mysite.allpages(prefix="Pre", limit=5):
+ for page in mysite.allpages(prefix="Pre", total=5):
self.assertType(page, pywikibot.Page)
self.assertTrue(mysite.page_exists(page))
self.assertEqual(page.namespace(), 0)
self.assertTrue(page.title().startswith("Pre"))
- for page in mysite.allpages(namespace=1, limit=5):
+ for page in mysite.allpages(namespace=1, total=5):
self.assertType(page, pywikibot.Page)
self.assertTrue(mysite.page_exists(page))
self.assertEqual(page.namespace(), 1)
- for page in mysite.allpages(filterredir=True, limit=5):
+ for page in mysite.allpages(filterredir=True, total=5):
self.assertType(page, pywikibot.Page)
self.assertTrue(mysite.page_exists(page))
self.assertEqual(page.namespace(), 0)
self.assertTrue(page.isRedirectPage())
- for page in mysite.allpages(filterredir=False, limit=5):
+ for page in mysite.allpages(filterredir=False, total=5):
self.assertType(page, pywikibot.Page)
self.assertTrue(mysite.page_exists(page))
self.assertEqual(page.namespace(), 0)
self.assertFalse(page.isRedirectPage())
-## for page in mysite.allpages(filterlanglinks=True, limit=5):
+## for page in mysite.allpages(filterlanglinks=True, total=5):
## self.assertType(page, pywikibot.Page)
## self.assertTrue(mysite.page_exists(page))
## self.assertEqual(page.namespace(), 0)
-## for page in mysite.allpages(filterlanglinks=False, limit=5):
+## for page in mysite.allpages(filterlanglinks=False, total=5):
## self.assertType(page, pywikibot.Page)
## self.assertTrue(mysite.page_exists(page))
## self.assertEqual(page.namespace(), 0)
- for page in mysite.allpages(minsize=100, limit=5):
+ for page in mysite.allpages(minsize=100, total=5):
self.assertType(page, pywikibot.Page)
self.assertTrue(mysite.page_exists(page))
self.assertTrue(len(page.text) >= 100)
- for page in mysite.allpages(maxsize=200, limit=5):
+ for page in mysite.allpages(maxsize=200, total=5):
self.assertType(page, pywikibot.Page)
self.assertTrue(mysite.page_exists(page))
self.assertTrue(len(page.text) <= 200)
- for page in mysite.allpages(protect_type="edit", limit=5):
+ for page in mysite.allpages(protect_type="edit", total=5):
self.assertType(page, pywikibot.Page)
self.assertTrue(mysite.page_exists(page))
self.assertTrue("edit" in page._protection)
for page in mysite.allpages(protect_type="edit",
- protect_level="sysop", limit=5):
+ protect_level="sysop", total=5):
self.assertType(page, pywikibot.Page)
self.assertTrue(mysite.page_exists(page))
self.assertTrue("edit" in page._protection)
@@ -335,24 +335,24 @@
def testAllLinks(self):
"""Test the site.alllinks() method"""
- fwd = list(mysite.alllinks(limit=10))
+ fwd = list(mysite.alllinks(total=10))
self.assertTrue(len(fwd) <= 10)
self.assertTrue(all(isinstance(link, pywikibot.Page) for link in fwd))
- uniq = list(mysite.alllinks(limit=10, unique=True))
+ uniq = list(mysite.alllinks(total=10, unique=True))
self.assertTrue(all(link in uniq for link in fwd))
- for page in mysite.alllinks(start="Link", limit=5):
+ for page in mysite.alllinks(start="Link", total=5):
self.assertType(page, pywikibot.Page)
self.assertEqual(page.namespace(), 0)
self.assertTrue(page.title() >= "Link")
- for page in mysite.alllinks(prefix="Fix", limit=5):
+ for page in mysite.alllinks(prefix="Fix", total=5):
self.assertType(page, pywikibot.Page)
self.assertEqual(page.namespace(), 0)
self.assertTrue(page.title().startswith("Fix"))
- for page in mysite.alllinks(namespace=1, limit=5):
+ for page in mysite.alllinks(namespace=1, total=5):
self.assertType(page, pywikibot.Page)
self.assertEqual(page.namespace(), 1)
for page in mysite.alllinks(start="From", namespace=4, fromids=True,
- limit=5):
+ total=5):
self.assertType(page, pywikibot.Page)
self.assertTrue(page.title(withNamespace=False) >= "From")
self.assertTrue(hasattr(page, "_fromid"))
@@ -362,44 +362,44 @@
def testAllCategories(self):
"""Test the site.allcategories() method"""
- ac = list(mysite.allcategories(limit=10))
+ ac = list(mysite.allcategories(total=10))
self.assertTrue(len(ac) <= 10)
self.assertTrue(all(isinstance(cat, pywikibot.Category)
for cat in ac))
- for cat in mysite.allcategories(limit=5, start="Abc"):
+ for cat in mysite.allcategories(total=5, start="Abc"):
self.assertType(cat, pywikibot.Category)
self.assertTrue(cat.title(withNamespace=False) >= "Abc")
- for cat in mysite.allcategories(limit=5, prefix="Def"):
+ for cat in mysite.allcategories(total=5, prefix="Def"):
self.assertType(cat, pywikibot.Category)
self.assertTrue(cat.title(withNamespace=False).startswith("Def"))
## # Bug # 15985
-## for cat in mysite.allcategories(limit=5, start="Hij", reverse=True):
+## for cat in mysite.allcategories(total=5, start="Hij", reverse=True):
## self.assertType(cat, pywikibot.Category)
## self.assertTrue(cat.title(withNamespace=False) <= "Hij")
def testAllUsers(self):
"""Test the site.allusers() method"""
- au = list(mysite.allusers(limit=10))
+ au = list(mysite.allusers(total=10))
self.assertTrue(len(au) <= 10)
for user in au:
self.assertType(user, dict)
self.assertTrue("name" in user)
self.assertTrue("editcount" in user)
self.assertTrue("registration" in user)
- for user in mysite.allusers(start="B", limit=5):
+ for user in mysite.allusers(start="B", total=5):
self.assertType(user, dict)
self.assertTrue("name" in user)
self.assertTrue(user["name"] >= "B")
self.assertTrue("editcount" in user)
self.assertTrue("registration" in user)
- for user in mysite.allusers(prefix="C", limit=5):
+ for user in mysite.allusers(prefix="C", total=5):
self.assertType(user, dict)
self.assertTrue("name" in user)
self.assertTrue(user["name"].startswith("C"))
self.assertTrue("editcount" in user)
self.assertTrue("registration" in user)
- for user in mysite.allusers(prefix="D", group="sysop",
limit=5):
+ for user in mysite.allusers(prefix="D", group="sysop",
total=5):
self.assertType(user, dict)
self.assertTrue("name" in user)
self.assertTrue(user["name"].startswith("D"))
@@ -410,28 +410,28 @@
def testAllImages(self):
"""Test the site.allimages() method"""
- ai = list(mysite.allimages(limit=10))
+ ai = list(mysite.allimages(total=10))
self.assertTrue(len(ai) <= 10)
self.assertTrue(all(isinstance(image, pywikibot.ImagePage)
for image in ai))
- for impage in mysite.allimages(start="Ba", limit=5):
+ for impage in mysite.allimages(start="Ba", total=5):
self.assertType(impage, pywikibot.ImagePage)
self.assertTrue(mysite.page_exists(impage))
self.assertTrue(impage.title(withNamespace=False) >= "Ba")
## # Bug # 15985
-## for impage in mysite.allimages(start="Da", reverse=True, limit=5):
+## for impage in mysite.allimages(start="Da", reverse=True, total=5):
## self.assertType(impage, pywikibot.ImagePage)
## self.assertTrue(mysite.page_exists(impage))
## self.assertTrue(impage.title() <= "Da")
- for impage in mysite.allimages(prefix="Ch", limit=5):
+ for impage in mysite.allimages(prefix="Ch", total=5):
self.assertType(impage, pywikibot.ImagePage)
self.assertTrue(mysite.page_exists(impage))
self.assertTrue(impage.title(withNamespace=False).startswith("Ch"))
- for impage in mysite.allimages(minsize=100, limit=5):
+ for impage in mysite.allimages(minsize=100, total=5):
self.assertType(impage, pywikibot.ImagePage)
self.assertTrue(mysite.page_exists(impage))
self.assertTrue(impage._imageinfo["size"] >= 100)
- for impage in mysite.allimages(maxsize=2000, limit=5):
+ for impage in mysite.allimages(maxsize=2000, total=5):
self.assertType(impage, pywikibot.ImagePage)
self.assertTrue(mysite.page_exists(impage))
self.assertTrue(impage._imageinfo["size"] <= 2000)
@@ -440,7 +440,7 @@
"""Test the site.blocks() method"""
props = ("id", "by", "timestamp",
"expiry", "reason")
- bl = list(mysite.blocks(limit=10))
+ bl = list(mysite.blocks(total=10))
self.assertTrue(len(bl) <= 10)
for block in bl:
self.assertType(block, dict)
@@ -451,7 +451,7 @@
for t in xrange(1, len(timestamps)):
self.assertTrue(timestamps[t] <= timestamps[t-1])
- b2 = list(mysite.blocks(limit=10, reverse=True))
+ b2 = list(mysite.blocks(total=10, reverse=True))
self.assertTrue(len(b2) <= 10)
for block in b2:
self.assertType(block, dict)
@@ -462,35 +462,35 @@
for t in xrange(1, len(timestamps)):
self.assertTrue(timestamps[t] >= timestamps[t-1])
- for block in mysite.blocks(starttime="2008-07-01T00:00:01Z", limit=5):
+ for block in mysite.blocks(starttime="2008-07-01T00:00:01Z", total=5):
self.assertType(block, dict)
for prop in props:
self.assertTrue(prop in block)
- for block in mysite.blocks(endtime="2008-07-31T23:59:59Z", limit=5):
+ for block in mysite.blocks(endtime="2008-07-31T23:59:59Z", total=5):
self.assertType(block, dict)
for prop in props:
self.assertTrue(prop in block)
for block in mysite.blocks(starttime="2008-08-02T00:00:01Z",
endtime="2008-08-02T23:59:59Z",
- reverse=True, limit=5):
+ reverse=True, total=5):
self.assertType(block, dict)
for prop in props:
self.assertTrue(prop in block)
for block in mysite.blocks(starttime="2008-08-03T23:59:59Z",
endtime="2008-08-03T00:00:01Z",
- limit=5):
+ total=5):
self.assertType(block, dict)
for prop in props:
self.assertTrue(prop in block)
# starttime earlier than endtime
self.assertRaises(pywikibot.Error, mysite.blocks,
starttime="2008-08-03T00:00:01Z",
- endtime="2008-08-03T23:59:59Z", limit=5)
+ endtime="2008-08-03T23:59:59Z", total=5)
# reverse: endtime earlier than starttime
self.assertRaises(pywikibot.Error, mysite.blocks,
starttime="2008-08-03T23:59:59Z",
- endtime="2008-08-03T00:00:01Z", reverse=True,
limit=5)
- for block in mysite.blocks(users=mysite.user(), limit=5):
+ endtime="2008-08-03T00:00:01Z", reverse=True,
total=5)
+ for block in mysite.blocks(users=mysite.user(), total=5):
self.assertType(block, dict)
self.assertEqual(block['user'], mysite.user())
@@ -498,119 +498,119 @@
"""Test the site.exturlusage() method"""
url = "www.google.com"
- eu = list(mysite.exturlusage(url, limit=10))
+ eu = list(mysite.exturlusage(url, total=10))
self.assertTrue(len(eu) <= 10)
self.assertTrue(all(isinstance(link, pywikibot.Page)
for link in eu))
- for link in mysite.exturlusage(url, namespaces=[2, 3], limit=5):
+ for link in mysite.exturlusage(url, namespaces=[2, 3], total=5):
self.assertType(link, pywikibot.Page)
self.assertTrue(link.namespace() in (2, 3))
def testImageusage(self):
"""Test the site.imageusage() method"""
- iu = list(mysite.imageusage(imagepage, limit=10))
+ iu = list(mysite.imageusage(imagepage, total=10))
self.assertTrue(len(iu) <= 10)
self.assertTrue(all(isinstance(link, pywikibot.Page)
for link in iu))
- for using in mysite.imageusage(imagepage, namespaces=[3,4], limit=5):
+ for using in mysite.imageusage(imagepage, namespaces=[3,4], total=5):
self.assertType(using, pywikibot.Page)
self.assertTrue(imagepage in list(using.imagelinks()))
- for using in mysite.imageusage(imagepage, filterredir=True, limit=5):
+ for using in mysite.imageusage(imagepage, filterredir=True, total=5):
self.assertType(using, pywikibot.Page)
self.assertTrue(using.isRedirectPage())
- for using in mysite.imageusage(imagepage, filterredir=True, limit=5):
+ for using in mysite.imageusage(imagepage, filterredir=True, total=5):
self.assertType(using, pywikibot.Page)
self.assertFalse(using.isRedirectPage())
def testLogEvents(self):
"""Test the site.logevents() method"""
- le = list(mysite.logevents(limit=10))
+ le = list(mysite.logevents(total=10))
self.assertTrue(len(le) <= 10)
self.assertTrue(all(isinstance(entry, dict) and "type" in entry
for entry in le))
for typ in ("block", "protect", "rights",
"delete", "upload",
"move", "import", "patrol",
"merge"):
- for entry in mysite.logevents(logtype=typ, limit=3):
+ for entry in mysite.logevents(logtype=typ, total=3):
self.assertEqual(entry["type"], typ)
- for entry in mysite.logevents(page=mainpage, limit=3):
+ for entry in mysite.logevents(page=mainpage, total=3):
self.assertTrue("title" in entry
and entry["title"] == mainpage.title())
- for entry in mysite.logevents(user=mysite.user(), limit=3):
+ for entry in mysite.logevents(user=mysite.user(), total=3):
self.assertTrue("user" in entry
and entry["user"] == mysite.user())
- for entry in mysite.logevents(start="2008-09-01T00:00:01Z", limit=5):
+ for entry in mysite.logevents(start="2008-09-01T00:00:01Z", total=5):
self.assertType(entry, dict)
self.assertTrue(entry['timestamp'] <=
"2008-09-01T00:00:01Z")
- for entry in mysite.logevents(end="2008-09-02T23:59:59Z", limit=5):
+ for entry in mysite.logevents(end="2008-09-02T23:59:59Z", total=5):
self.assertType(entry, dict)
self.assertTrue(entry['timestamp'] >=
"2008-09-02T23:59:59Z")
for entry in mysite.logevents(start="2008-02-02T00:00:01Z",
end="2008-02-02T23:59:59Z",
- reverse=True, limit=5):
+ reverse=True, total=5):
self.assertType(entry, dict)
self.assertTrue("2008-02-02T00:00:01Z" <=
entry['timestamp']
<= "2008-02-02T23:59:59Z")
for entry in mysite.logevents(start="2008-02-03T23:59:59Z",
end="2008-02-03T00:00:01Z",
- limit=5):
+ total=5):
self.assertType(entry, dict)
self.assertTrue("2008-02-03T00:00:01Z" <=
entry['timestamp']
<= "2008-02-03T23:59:59Z")
# starttime earlier than endtime
self.assertRaises(pywikibot.Error, mysite.logevents,
start="2008-02-03T00:00:01Z",
- end="2008-02-03T23:59:59Z", limit=5)
+ end="2008-02-03T23:59:59Z", total=5)
# reverse: endtime earlier than starttime
self.assertRaises(pywikibot.Error, mysite.logevents,
start="2008-02-03T23:59:59Z",
- end="2008-02-03T00:00:01Z", reverse=True, limit=5)
+ end="2008-02-03T00:00:01Z", reverse=True, total=5)
def testRecentchanges(self):
"""Test the site.recentchanges() method"""
- rc = list(mysite.recentchanges(limit=10))
+ rc = list(mysite.recentchanges(total=10))
self.assertTrue(len(rc) <= 10)
self.assertTrue(all(isinstance(change, dict)
for change in rc))
for change in mysite.recentchanges(start="2008-10-01T01:02:03Z",
- limit=5):
+ total=5):
self.assertType(change, dict)
self.assertTrue(change['timestamp'] <=
"2008-10-01T01:02:03Z")
for change in mysite.recentchanges(end="2008-04-01T02:03:04Z",
- limit=5):
+ total=5):
self.assertType(change, dict)
self.assertTrue(change['timestamp'] >=
"2008-10-01T02:03:04Z")
for change in mysite.recentchanges(start="2008-10-01T03:05:07Z",
- limit=5, reverse=True):
+ total=5, reverse=True):
self.assertType(change, dict)
self.assertTrue(change['timestamp'] >=
"2008-10-01T03:05:07Z")
for change in mysite.recentchanges(end="2008-10-01T04:06:08Z",
- limit=5, reverse=True):
+ total=5, reverse=True):
self.assertType(change, dict)
self.assertTrue(change['timestamp'] <=
"2008-10-01T04:06:08Z")
for change in mysite.recentchanges(start="2008-10-03T11:59:59Z",
end="2008-10-03T00:00:01Z",
- limit=5):
+ total=5):
self.assertType(change, dict)
self.assertTrue("2008-10-03T00:00:01Z" <=
change['timestamp']
<= "2008-10-03T11:59:59Z")
for change in mysite.recentchanges(start="2008-10-05T06:00:01Z",
end="2008-10-05T23:59:59Z",
- reverse=True, limit=5):
+ reverse=True, total=5):
self.assertType(change, dict)
self.assertTrue("2008-10-05T06:00:01Z" <=
change['timestamp']
<= "2008-10-05T23:59:59Z")
# start earlier than end
self.assertRaises(pywikibot.Error, mysite.recentchanges,
start="2008-02-03T00:00:01Z",
- end="2008-02-03T23:59:59Z", limit=5)
+ end="2008-02-03T23:59:59Z", total=5)
# reverse: end earlier than start
self.assertRaises(pywikibot.Error, mysite.recentchanges,
start="2008-02-03T23:59:59Z",
- end="2008-02-03T00:00:01Z", reverse=True, limit=5)
- for change in mysite.recentchanges(namespaces=[6,7], limit=5):
+ end="2008-02-03T00:00:01Z", reverse=True, total=5)
+ for change in mysite.recentchanges(namespaces=[6,7], total=5):
self.assertType(change, dict)
self.assertTrue("title" in change and "ns" in change)
title = change['title']
@@ -619,63 +619,63 @@
self.assertTrue(mysite.ns_index(prefix) in [6,7])
self.assertTrue(change["ns"] in [6,7])
for change in mysite.recentchanges(pagelist=[mainpage, imagepage],
- limit=5):
+ total=5):
self.assertType(change, dict)
self.assertTrue("title" in change)
self.assertTrue(change["title"] in (mainpage.title(),
imagepage.title()))
for typ in ("edit", "new", "log"):
- for change in mysite.recentchanges(changetype=typ, limit=5):
+ for change in mysite.recentchanges(changetype=typ, total=5):
self.assertType(change, dict)
self.assertTrue("type" in change)
self.assertEqual(change["type"], typ)
- for change in mysite.recentchanges(showMinor=True, limit=5):
+ for change in mysite.recentchanges(showMinor=True, total=5):
self.assertType(change, dict)
self.assertTrue("minor" in change)
- for change in mysite.recentchanges(showMinor=False, limit=5):
+ for change in mysite.recentchanges(showMinor=False, total=5):
self.assertType(change, dict)
self.assertTrue("minor" not in change)
- for change in mysite.recentchanges(showBot=True, limit=5):
+ for change in mysite.recentchanges(showBot=True, total=5):
self.assertType(change, dict)
self.assertTrue("bot" in change)
- for change in mysite.recentchanges(showBot=False, limit=5):
+ for change in mysite.recentchanges(showBot=False, total=5):
self.assertType(change, dict)
self.assertTrue("bot" not in change)
- for change in mysite.recentchanges(showAnon=True, limit=5):
+ for change in mysite.recentchanges(showAnon=True, total=5):
self.assertType(change, dict)
- for change in mysite.recentchanges(showAnon=False, limit=5):
+ for change in mysite.recentchanges(showAnon=False, total=5):
self.assertType(change, dict)
- for change in mysite.recentchanges(showRedirects=True, limit=5):
+ for change in mysite.recentchanges(showRedirects=True, total=5):
self.assertType(change, dict)
self.assertTrue("redirect" in change)
- for change in mysite.recentchanges(showRedirects=False, limit=5):
+ for change in mysite.recentchanges(showRedirects=False, total=5):
self.assertType(change, dict)
self.assertTrue("redirect" not in change)
- for change in mysite.recentchanges(showPatrolled=True, limit=5):
+ for change in mysite.recentchanges(showPatrolled=True, total=5):
self.assertType(change, dict)
self.assertTrue("patrolled" in change)
- for change in mysite.recentchanges(showPatrolled=False, limit=5):
+ for change in mysite.recentchanges(showPatrolled=False, total=5):
self.assertType(change, dict)
self.assertTrue("patrolled" not in change)
def testSearch(self):
"""Test the site.search() method"""
- se = list(mysite.search("wiki", limit=10))
+ se = list(mysite.search("wiki", total=10))
self.assertTrue(len(se) <= 10)
self.assertTrue(all(isinstance(hit, pywikibot.Page)
for hit in se))
self.assertTrue(all(hit.namespace() == 0 for hit in se))
- for hit in mysite.search("common", namespaces=4, limit=5):
+ for hit in mysite.search("common", namespaces=4, total=5):
self.assertType(hit, pywikibot.Page)
self.assertEqual(hit.namespace(), 4)
- for hit in mysite.search("word", namespaces=[5,6,7], limit=5):
+ for hit in mysite.search("word", namespaces=[5,6,7], total=5):
self.assertType(hit, pywikibot.Page)
self.assertTrue(hit.namespace() in [5,6,7])
- for hit in mysite.search("another", namespaces="8|9|10",
limit=5):
+ for hit in mysite.search("another", namespaces="8|9|10",
total=5):
self.assertType(hit, pywikibot.Page)
self.assertTrue(hit.namespace() in [8,9,10])
- for hit in mysite.search("wiki", namespaces=0, limit=10,
+ for hit in mysite.search("wiki", namespaces=0, total=10,
getredirects=True):
self.assertType(hit, pywikibot.Page)
self.assertEqual(hit.namespace(), 0)
@@ -683,120 +683,120 @@
def testUsercontribs(self):
"""Test the site.usercontribs() method"""
- uc = list(mysite.usercontribs(user=mysite.user(), limit=10))
+ uc = list(mysite.usercontribs(user=mysite.user(), total=10))
self.assertTrue(len(uc) <= 10)
self.assertTrue(all(isinstance(contrib, dict)
for contrib in uc))
self.assertTrue(all("user" in contrib
and contrib["user"] == mysite.user()
for contrib in uc))
- for contrib in mysite.usercontribs(userprefix="John", limit=5):
+ for contrib in mysite.usercontribs(userprefix="John", total=5):
self.assertType(contrib, dict)
for key in ("user", "title", "ns",
"pageid", "revid"):
self.assertTrue(key in contrib)
self.assertTrue(contrib["user"].startswith("John"))
for contrib in mysite.usercontribs(userprefix="Jane",
start="2008-10-06T01:02:03Z",
- limit=5):
+ total=5):
self.assertTrue(contrib['timestamp'] <=
"2008-10-06T01:02:03Z")
for contrib in mysite.usercontribs(userprefix="Jane",
end="2008-10-07T02:03:04Z",
- limit=5):
+ total=5):
self.assertTrue(contrib['timestamp'] >=
"2008-10-07T02:03:04Z")
for contrib in mysite.usercontribs(userprefix="Brion",
start="2008-10-08T03:05:07Z",
- limit=5, reverse=True):
+ total=5, reverse=True):
self.assertTrue(contrib['timestamp'] >=
"2008-10-08T03:05:07Z")
for contrib in mysite.usercontribs(userprefix="Brion",
end="2008-10-09T04:06:08Z",
- limit=5, reverse=True):
+ total=5, reverse=True):
self.assertTrue(contrib['timestamp'] <=
"2008-10-09T04:06:08Z")
for contrib in mysite.usercontribs(userprefix="Tim",
start="2008-10-10T11:59:59Z",
end="2008-10-10T00:00:01Z",
- limit=5):
+ total=5):
self.assertTrue("2008-10-10T00:00:01Z" <=
contrib['timestamp']
<= "2008-10-10T11:59:59Z")
for contrib in mysite.usercontribs(userprefix="Tim",
start="2008-10-11T06:00:01Z",
end="2008-10-11T23:59:59Z",
- reverse=True, limit=5):
+ reverse=True, total=5):
self.assertTrue("2008-10-11T06:00:01Z" <=
contrib['timestamp']
<= "2008-10-11T23:59:59Z")
# start earlier than end
self.assertRaises(pywikibot.Error, mysite.usercontribs,
userprefix="Jim",
start="2008-10-03T00:00:01Z",
- end="2008-10-03T23:59:59Z", limit=5)
+ end="2008-10-03T23:59:59Z", total=5)
# reverse: end earlier than start
self.assertRaises(pywikibot.Error, mysite.usercontribs,
userprefix="Jim",
start="2008-10-03T23:59:59Z",
- end="2008-10-03T00:00:01Z", reverse=True, limit=5)
+ end="2008-10-03T00:00:01Z", reverse=True, total=5)
for contrib in mysite.usercontribs(user=mysite.user(),
- namespaces=14, limit=5):
+ namespaces=14, total=5):
self.assertType(contrib, dict)
self.assertTrue("title" in contrib)
self.assertTrue(contrib["title"].startswith(mysite.namespace(14)))
for contrib in mysite.usercontribs(user=mysite.user(),
- namespaces=[10,11], limit=5):
+ namespaces=[10,11], total=5):
self.assertType(contrib, dict)
self.assertTrue("title" in contrib)
self.assertTrue(contrib["ns"] in (10, 11))
for contrib in mysite.usercontribs(user=mysite.user(),
- showMinor=True, limit=5):
+ showMinor=True, total=5):
self.assertType(contrib, dict)
self.assertTrue("minor" in contrib)
for contrib in mysite.usercontribs(user=mysite.user(),
- showMinor=False, limit=5):
+ showMinor=False, total=5):
self.assertType(contrib, dict)
self.assertTrue("minor" not in contrib)
def testWatchlistrevs(self):
"""Test the site.watchlist_revs() method"""
- wl = list(mysite.watchlist_revs(limit=10))
+ wl = list(mysite.watchlist_revs(total=10))
self.assertTrue(len(wl) <= 10)
self.assertTrue(all(isinstance(rev, dict)
for rev in wl))
for rev in mysite.watchlist_revs(start="2008-10-11T01:02:03Z",
- limit=5):
+ total=5):
self.assertType(rev, dict)
self.assertTrue(rev['timestamp'] <=
"2008-10-11T01:02:03Z")
for rev in mysite.watchlist_revs(end="2008-04-01T02:03:04Z",
- limit=5):
+ total=5):
self.assertType(rev, dict)
self.assertTrue(rev['timestamp'] >=
"2008-10-11T02:03:04Z")
for rev in mysite.watchlist_revs(start="2008-10-11T03:05:07Z",
- limit=5, reverse=True):
+ total=5, reverse=True):
self.assertType(rev, dict)
self.assertTrue(rev['timestamp'] >=
"2008-10-11T03:05:07Z")
for rev in mysite.watchlist_revs(end="2008-10-11T04:06:08Z",
- limit=5, reverse=True):
+ total=5, reverse=True):
self.assertType(rev, dict)
self.assertTrue(rev['timestamp'] <=
"2008-10-11T04:06:08Z")
for rev in mysite.watchlist_revs(start="2008-10-13T11:59:59Z",
end="2008-10-13T00:00:01Z",
- limit=5):
+ total=5):
self.assertType(rev, dict)
self.assertTrue("2008-10-13T00:00:01Z" <=
rev['timestamp']
<= "2008-10-13T11:59:59Z")
for rev in mysite.watchlist_revs(start="2008-10-15T06:00:01Z",
end="2008-10-15T23:59:59Z",
- reverse=True, limit=5):
+ reverse=True, total=5):
self.assertType(rev, dict)
self.assertTrue("2008-10-15T06:00:01Z" <=
rev['timestamp']
<= "2008-10-15T23:59:59Z")
# start earlier than end
self.assertRaises(pywikibot.Error, mysite.watchlist_revs,
start="2008-09-03T00:00:01Z",
- end="2008-09-03T23:59:59Z", limit=5)
+ end="2008-09-03T23:59:59Z", total=5)
# reverse: end earlier than start
self.assertRaises(pywikibot.Error, mysite.watchlist_revs,
start="2008-09-03T23:59:59Z",
- end="2008-09-03T00:00:01Z", reverse=True, limit=5)
- for rev in mysite.watchlist_revs(namespaces=[6,7], limit=5):
+ end="2008-09-03T00:00:01Z", reverse=True, total=5)
+ for rev in mysite.watchlist_revs(namespaces=[6,7], total=5):
self.assertType(rev, dict)
self.assertTrue("title" in rev and "ns" in rev)
title = rev['title']
@@ -804,21 +804,21 @@
prefix = title[ : title.index(":")]
self.assertTrue(mysite.ns_index(prefix) in [6,7])
self.assertTrue(rev["ns"] in [6,7])
- for rev in mysite.watchlist_revs(showMinor=True, limit=5):
+ for rev in mysite.watchlist_revs(showMinor=True, total=5):
self.assertType(rev, dict)
self.assertTrue("minor" in rev)
- for rev in mysite.watchlist_revs(showMinor=False, limit=5):
+ for rev in mysite.watchlist_revs(showMinor=False, total=5):
self.assertType(rev, dict)
self.assertTrue("minor" not in rev)
- for rev in mysite.watchlist_revs(showBot=True, limit=5):
+ for rev in mysite.watchlist_revs(showBot=True, total=5):
self.assertType(rev, dict)
self.assertTrue("bot" in rev)
- for rev in mysite.watchlist_revs(showBot=False, limit=5):
+ for rev in mysite.watchlist_revs(showBot=False, total=5):
self.assertType(rev, dict)
self.assertTrue("bot" not in rev)
- for rev in mysite.watchlist_revs(showAnon=True, limit=5):
+ for rev in mysite.watchlist_revs(showAnon=True, total=5):
self.assertType(rev, dict)
- for rev in mysite.watchlist_revs(showAnon=False, limit=5):
+ for rev in mysite.watchlist_revs(showAnon=False, total=5):
self.assertType(rev, dict)
def testDeletedrevs(self):
@@ -831,54 +831,54 @@
logger.warn(
"Cannot test Site.deleted_revs; no sysop account
configured.")
return
- dr = list(mysite.deletedrevs(limit=10, page=mainpage))
+ dr = list(mysite.deletedrevs(total=10, page=mainpage))
self.assertTrue(len(dr) <= 10)
self.assertTrue(all(isinstance(rev, dict)
for rev in dr))
- dr2 = list(mysite.deletedrevs(page=mainpage, limit=10))
+ dr2 = list(mysite.deletedrevs(page=mainpage, total=10))
self.assertTrue(len(dr2) <= 10)
self.assertTrue(all(isinstance(rev, dict)
for rev in dr2))
for rev in mysite.deletedrevs(start="2008-10-11T01:02:03Z",
- page=mainpage, limit=5):
+ page=mainpage, total=5):
self.assertType(rev, dict)
self.assertTrue(rev['timestamp'] <=
"2008-10-11T01:02:03Z")
for rev in mysite.deletedrevs(end="2008-04-01T02:03:04Z",
- page=mainpage, limit=5):
+ page=mainpage, total=5):
self.assertType(rev, dict)
self.assertTrue(rev['timestamp'] >=
"2008-10-11T02:03:04Z")
for rev in mysite.deletedrevs(start="2008-10-11T03:05:07Z",
- page=mainpage, limit=5,
+ page=mainpage, total=5,
reverse=True):
self.assertType(rev, dict)
self.assertTrue(rev['timestamp'] >=
"2008-10-11T03:05:07Z")
for rev in mysite.deletedrevs(end="2008-10-11T04:06:08Z",
- page=mainpage, limit=5,
+ page=mainpage, total=5,
reverse=True):
self.assertType(rev, dict)
self.assertTrue(rev['timestamp'] <=
"2008-10-11T04:06:08Z")
for rev in mysite.deletedrevs(start="2008-10-13T11:59:59Z",
end="2008-10-13T00:00:01Z",
- page=mainpage, limit=5):
+ page=mainpage, total=5):
self.assertType(rev, dict)
self.assertTrue("2008-10-13T00:00:01Z" <=
rev['timestamp']
<= "2008-10-13T11:59:59Z")
for rev in mysite.deletedrevs(start="2008-10-15T06:00:01Z",
end="2008-10-15T23:59:59Z",
page=mainpage, reverse=True,
- limit=5):
+ total=5):
self.assertType(rev, dict)
self.assertTrue("2008-10-15T06:00:01Z" <=
rev['timestamp']
<= "2008-10-15T23:59:59Z")
# start earlier than end
self.assertRaises(pywikibot.Error, mysite.deletedrevs,
page=mainpage, start="2008-09-03T00:00:01Z",
- end="2008-09-03T23:59:59Z", limit=5)
+ end="2008-09-03T23:59:59Z", total=5)
# reverse: end earlier than start
self.assertRaises(pywikibot.Error, mysite.deletedrevs,
page=mainpage, start="2008-09-03T23:59:59Z",
end="2008-09-03T00:00:01Z", reverse=True,
- limit=5)
+ total=5)
def testUsers(self):
"""Test the site.users() method"""
@@ -895,15 +895,15 @@
def testRandompages(self):
"""Test the site.randompages() method"""
- rn = list(mysite.randompages(limit=10))
+ rn = list(mysite.randompages(total=10))
self.assertTrue(len(rn) <= 10)
self.assertTrue(all(isinstance(a_page, pywikibot.Page)
for a_page in rn))
self.assertFalse(all(a_page.isRedirectPage() for a_page in rn))
- for rndpage in mysite.randompages(limit=5, redirects=True):
+ for rndpage in mysite.randompages(total=5, redirects=True):
self.assertType(rndpage, pywikibot.Page)
self.assertTrue(rndpage.isRedirectPage())
- for rndpage in mysite.randompages(limit=5, namespaces=[6, 7]):
+ for rndpage in mysite.randompages(total=5, namespaces=[6, 7]):
self.assertType(rndpage, pywikibot.Page)
self.assertTrue(rndpage.namespace() in [6, 7])