Revision: 8270
Author: xqt
Date: 2010-06-10 09:28:42 +0000 (Thu, 10 Jun 2010)
Log Message:
-----------
compatibility work on NewpagesPageGenerator() (patch request bug #3013195 by stanlekub) Thanks
Modified Paths:
--------------
branches/rewrite/pywikibot/pagegenerators.py
Modified: branches/rewrite/pywikibot/pagegenerators.py
===================================================================
--- branches/rewrite/pywikibot/pagegenerators.py 2010-06-09 19:19:34 UTC (rev 8269)
+++ branches/rewrite/pywikibot/pagegenerators.py 2010-06-10 09:28:42 UTC (rev 8270)
@@ -70,8 +70,8 @@
-search Work on all pages that are found in a MediaWiki search
across all namespaces.
--namespace -ns Filter the page generator to only yield pages in the
- specified namespaces. Separate multiple namespace
+-namespace Filter the page generator to only yield pages in the
+-ns specified namespaces. Separate multiple namespace
numbers with commas. Example "-ns:0,2,4"
-interwiki Work on the given page and all equivalent pages in other
@@ -160,18 +160,21 @@
class GeneratorFactory(object):
- """Process command line arguments and return appropriate page generator."""
+ """Process command line arguments and return appropriate page generator.
+ This factory is responsible for processing command line arguments
+ that are used by many scripts and that determine which pages to work on.
+ """
def __init__(self):
self.gens = []
self.namespaces = []
self.step = None
self.limit = None
+
def getCombinedGenerator(self):
"""Return the combination of all accumulated generators.
Only call this after all arguments have been parsed.
-
"""
namespaces = [int(n) for n in self.namespaces]
for i in xrange(len(self.gens)):
@@ -443,7 +446,7 @@
return False
-def AllpagesPageGenerator(start ='!', namespace=0, includeredirects=True,
+def AllpagesPageGenerator(start='!', namespace=0, includeredirects=True,
site=None, step=None, total=None):
"""
Iterate Page objects for all titles in a single namespace.
@@ -487,18 +490,23 @@
filterredir=filterredir, step=step, total=total)
@deprecate_arg("number", "total")
+@deprecate_arg("namespace", "namespaces")
+@deprecate_arg("repeat", None)
def NewpagesPageGenerator(get_redirect=False, repeat=False, site=None,
- step=None, total=None):
+ namespaces=[0,], step=None, total=None):
+ """
+ Iterate Page objects for all new titles in a single namespace.
+ """
# API does not (yet) have a newpages function, so this tries to duplicate
# it by filtering the recentchanges output
# defaults to namespace 0 because that's how Special:Newpages defaults
if site is None:
site = pywikibot.Site()
for item in site.recentchanges(showRedirects=get_redirect,
- changetype="new", namespaces=0, step=step, total=total):
+ changetype="new", namespaces=namespaces,
+ step=step, total=total):
yield pywikibot.Page(pywikibot.Link(item["title"], site))
-
def RecentChangesPageGenerator(start=None, end=None, reverse=False,
namespaces=None, pagelist=None,
changetype=None, showMinor=None,
@@ -539,7 +547,6 @@
step=step, total=total):
yield pywikibot.Page(pywikibot.Link(item["title"], site))
-
def FileLinksGenerator(referredImagePage, step=None, total=None):
return referredImagePage.usingPages(step=step, total=total)
@@ -589,7 +596,6 @@
if start is None or a.title(withNamespace=False) >= start:
yield a
-
def SubCategoriesPageGenerator(category, recurse=False, start=None,
step=None, total=None):
'''
@@ -607,7 +613,6 @@
if start is None or s.title(withNamespace=False) >= start:
yield s
-
def LinkedPageGenerator(linkingPage, step=None, total=None):
"""Yields all pages linked from a specific page."""
return linkingPage.linkedPages(step=step, total=total)
@@ -620,7 +625,7 @@
generator will yield each corresponding Page object.
@param filename: the name of the file that should be read. If no name is
- given, the generator prompts the user.
+ given, the generator prompts the user.
@param site: the default Site for which Page objects should be created
"""
@@ -797,7 +802,7 @@
for page in site.withoutinterwiki(number=number, repeat=repeat):
yield page
-def UnCategorizedCategoryGenerator(number = 100, repeat = False, site = None):
+def UnCategorizedCategoryGenerator(number=100, repeat=False, site=None):
if site is None:
site = pywikibot.Site()
for page in site.uncategorizedcategories(number=number, repeat=repeat):
@@ -809,7 +814,7 @@
for page in site.uncategorizedimages(number=number, repeat=repeat):
yield page
-def UnCategorizedPageGenerator(number = 100, repeat = False, site = None):
+def UnCategorizedPageGenerator(number=100, repeat=False, site=None):
if site is None:
site = pywikibot.Site()
for page in site.uncategorizedpages(number=number, repeat=repeat):
Revision: 8267
Author: xqt
Date: 2010-06-09 15:19:07 +0000 (Wed, 09 Jun 2010)
Log Message:
-----------
new methods for userlib: username, isAnonymous(), isEmailable(); mark editedPages as deprecated; remove _contributionsOld() (won't fix)
Modified Paths:
--------------
trunk/pywikipedia/userlib.py
Modified: trunk/pywikipedia/userlib.py
===================================================================
--- trunk/pywikipedia/userlib.py 2010-06-09 15:13:25 UTC (rev 8266)
+++ trunk/pywikipedia/userlib.py 2010-06-09 15:19:07 UTC (rev 8267)
@@ -10,15 +10,16 @@
__version__ = '$Id$'
import re
-import wikipedia, query
+import wikipedia as pywikibot
+import query
-class AutoblockUser(wikipedia.Error):
+class AutoblockUser(pywikibot.Error):
"""
The class AutoblockUserError is an exception that is raised whenever
an action is requested on a virtual autoblock user that's not available
for him (i.e. roughly everything except unblock).
"""
-class UserActionRefuse(wikipedia.Error): pass
+class UserActionRefuse(pywikibot.Error): pass
class BlockError(UserActionRefuse): pass
@@ -30,50 +31,61 @@
class AlreadyUnblocked(UnblockError): pass
-class InvalidUser(wikipedia.InvalidTitle):
+class InvalidUser(pywikibot.InvalidTitle):
"""The mediawiki API does not allow IP lookups."""
pass
+ip_regexp = re.compile(r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}' \
+ r'(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$')
+
class User(object):
+ """A class that represents a Wiki user.
"""
- A class that represents a Wiki user.
- Has getters for the user's User: an User talk: (sub-)pages,
- as well as methods for blocking and unblocking.
- """
def __init__(self, site, name):
- """
- Initializer for a User object.
+ """Initializer for a User object.
Parameters:
- site - a wikipedia.Site object
+ site - a pywikibot.Site object
name - name of the user, without the trailing User:
"""
+ if len(name) > 1 and name[0] == u'#':
+ self._isAutoblock = True
+ else:
+ self._isAutoblock = False
+ if self._isAutoblock:
+ # This user is probably being queried for purpose of lifting
+ # an autoblock.
+ pywikibot.output(
+ "This is an autoblock ID, you can only use to unblock it.")
if type(site) in [str, unicode]:
- self._site = wikipedia.getSite(site)
+ self._site = pywikibot.getSite(site)
else:
self._site = site
+ # None means not loaded
self._name = name
- self._blocked = None #None mean not loaded
- self._groups = None #None mean not loaded
- #self._editcount = -1 # -1 mean not loaded
+ self._blocked = None
+ self._groups = None
self._registrationTime = -1
#if self.site().versionnumber() >= 16:
# self._urToken = None
- if name[0] == '#':
- # This user is probably being queried for purpose of lifting an
- # autoblock.
- wikipedia.output(
- "This is an autoblock ID, you can only use to unblock it.")
+
def site(self):
return self._site
def name(self):
+ return self.username
+
+ @property
+ def username(self):
return self._name
+ def isAnonymous(self):
+ return ip_regexp.match(self.username) is not None
+
def __str__(self):
- return u'%s:%s' % (self.site() , self.name() )
+ return u'%s:%s' % (self.site() , self.name())
def __repr__(self):
return self.__str__()
@@ -82,58 +94,104 @@
getall(self.site(), [self], force=True)
return
- def registrationTime(self, force = False):
+ def registrationTime(self, force=False):
if not hasattr(self, '_registrationTime') or force:
self._load()
return self._registrationTime
- def editCount(self, force = False):
+ def editCount(self, force=False):
+ """ Return edit count for this user as int.
+
+ @param force: if True, forces reloading the data
+ @type force: bool
+ """
if not hasattr(self, '_editcount') or force:
self._load()
return self._editcount
- def isBlocked(self, force = False):
+ def isBlocked(self, force=False):
+ """ Return True if this user is currently blocked, False otherwise.
+
+ @param force: if True, forces reloading the data
+ @type force: bool
+ """
if not self._blocked or force:
self._load()
return self._blocked
- def groups(self, force = False):
+ def isEmailable(self, force=False):
+ """ Return True if emails can be send to this user through mediawiki,
+ False otherwise.
+
+ @param force: if True, forces reloading the data
+ @type force: bool
+ """
+ if not hasattr(self, '_mailable'):
+ self._load()
+ return self._mailable
+
+ def groups(self, force=False):
+ """ Return a list of groups to wich this user belongs. The return value
+ is guaranteed to be a list object, possibly empty.
+
+ @param force: if True, forces reloading the data
+ @type force: bool
+ """
if not self._groups or force:
self._load()
return self._groups
- def getUserPage(self, subpage=''):
- if self.name()[0] == '#':
+ def getUserPage(self, subpage=u''):
+ """ Return a pywikibot.Page object corresponding to this user's main
+ page, or a subpage of it if subpage is set.
+
+ @param subpage: subpage part to be appended to the main
+ page title (optional)
+ @type subpage: unicode
+ """
+ if self._isAutoblock:
#This user is probably being queried for purpose of lifting
#an autoblock, so has no user pages per se.
- raise AutoblockUser
+ raise AutoblockUser("This is an autoblock ID, you can only use to unblock it.")
if subpage:
- subpage = '/' + subpage
- return wikipedia.Page(self.site(), self.name() + subpage, defaultNamespace=2)
+ subpage = u'/' + subpage
+ return pywikibot.Page(self.site(), self.name() + subpage, defaultNamespace=2)
- def getUserTalkPage(self, subpage=''):
- if self.name()[0] == '#':
+ def getUserTalkPage(self, subpage=u''):
+ """ Return a pywikibot.Page object corresponding to this user's main
+ talk page, or a subpage of it if subpage is set.
+
+ @param subpage: subpage part to be appended to the main
+ talk page title (optional)
+ @type subpage: unicode
+ """
+ if self._isAutoblock:
#This user is probably being queried for purpose of lifting
#an autoblock, so has no user talk pages per se.
- raise AutoblockUser
+ raise AutoblockUser("This is an autoblock ID, you can only use to unblock it.")
if subpage:
- subpage = '/' + subpage
- return wikipedia.Page(self.site(), self.name() + subpage, defaultNamespace=3)
+ subpage = u'/' + subpage
+ return pywikibot.Page(self.site(), self.name() + subpage,
+ defaultNamespace=3)
- def editedPages(self, limit=500):
- """ Deprecated function that wraps 'contributions'
- for backwards compatibility
+ def sendMail(self, subject=u'', text=u'', ccMe = False):
+ """ Send an email to this user via mediawiki's email interface.
+ Return True on success, False otherwise.
+ This method can raise an UserActionRefuse exception in case this user
+ doesn't allow sending email to him or the currently logged in bot
+ doesn't have the right to send emails.
+
+ @param subject: the subject header of the mail
+ @type subject: unicode
+ @param text: mail body
+ @type text: unicode
+ @param ccme: if True, sends a copy of this email to the bot
+ @type ccme: bool
"""
- for page in self.contributions(limit):
- yield page[0]
-
- def sendMail(self, subject = u'', text = u'', ccMe = False):
- if not hasattr(self, '_mailable'):
- self._load()
- if not self._mailable:
- raise UserActionRefuse("This user is not mailable")
+ if not self.isEmailable():
+ raise UserActionRefuse('This user is not mailable')
if not self.site().isAllowed('sendemail'):
- raise UserActionRefuse("You don't have permission to send mail")
+ raise UserActionRefuse('You don\'t have permission to send mail')
if not self.site().has_api() or self.site().versionnumber() < 14:
return self.sendMailOld(subject, text, ccMe)
@@ -147,16 +205,14 @@
}
if ccMe:
params['ccme'] = 1
- result = query.GetData(params, self.site())
- if 'error' in result:
- code = result['error']['code']
- if code == 'usermaildisabled ':
- wikipedia.output("User mail has been disabled")
- #elif code == '':
- #
- elif 'emailuser' in result:
- if result['emailuser']['result'] == 'Success':
- wikipedia.output(u'Email sent.')
+ maildata = query.GetData(params, self.site())
+ if 'error' in maildata:
+ code = maildata['error']['code']
+ if code == u'usermaildisabled ':
+ pywikibot.output(u'User mail has been disabled')
+ elif 'emailuser' in maildata:
+ if maildata['emailuser']['result'] == u'Success':
+ pywikibot.output(u'Email sent.')
return True
return False
@@ -175,26 +231,42 @@
response, data = self.site().postForm(address, predata, sysop = False)
if data:
if 'var wgAction = "success";' in data:
- wikipedia.output(u'Email sent.')
+ pywikibot.output(u'Email sent.')
return True
else:
- wikipedia.output(u'Email not sent.')
+ pywikibot.output(u'Email not sent.')
return False
else:
- wikipedia.output(u'No data found.')
+ pywikibot.output(u'No data found.')
return False
- def contributions(self, limit = 500, namespace = []):
- """ Yields pages that the user has edited, with an upper bound of ``limit''.
- Pages returned are not guaranteed to be unique
- (straight Special:Contributions parsing, in chunks of 500 items)."""
+ @pywikibot.deprecated('contributions()')
+ def editedPages(self, limit=500):
+ """ Deprecated function that wraps 'contributions' for backwards
+ compatibility. Yields pywikibot.Page objects that this user has
+ edited, with an upper bound of 'limit'. Pages returned are not
+ guaranteed to be unique.
+
+ @param limit: limit result to this number of pages.
+ @type limit: int.
+ """
+ for item in self.contributions(limit):
+ yield item[0]
+
+ def contributions(self, limit=500, namespace=[]):
+ """ Yield tuples describing this user edits with an upper bound of
+ 'limit'. Each tuple is composed of a pywikibot.Page object,
+ the revision id (int), the edit timestamp and the comment (unicode).
+ Pages returned are not guaranteed to be unique.
+
+ @param limit: limit result to this number of pages
+ @type limit: int
+ @param namespace: only iterate links in these namespaces
+ @type namespace: list
+ """
if not self.site().has_api():
raise NotImplementedError
- # please stay this in comment until the regex is fixed
- # for pg, oldid, date, comment in self._ContributionsOld(limit):
- # yield pg, oldid, date, comment
- # return
params = {
'action': 'query',
@@ -204,8 +276,8 @@
'uclimit': int(limit),
'ucdir': 'older',
}
- if limit > wikipedia.config.special_page_limit:
- params['uclimit'] = wikipedia.config.special_page_limit
+ if limit > pywikibot.config.special_page_limit:
+ params['uclimit'] = pywikibot.config.special_page_limit
if limit > 5000 and self.site().isAllowed('apihighlimits'):
params['uclimit'] = 5000
if namespace:
@@ -216,13 +288,12 @@
while True:
result = query.GetData(params, self.site())
if 'error' in result:
- wikipedia.output('%s' % result)
- raise wikipedia.Error
- for c in result['query']['usercontribs']:
- yield (wikipedia.Page(self.site(), c['title'], defaultNamespace=c['ns']),
- c['revid'],
- wikipedia.parsetime2stamp(c['timestamp']),
- c['comment']
+ pywikibot.output('%s' % result)
+ raise pywikibot.Error
+ for contrib in result['query']['usercontribs']:
+ ts = pywikibot.parsetime2stamp(contrib['timestamp'])
+ yield (pywikibot.Page(self.site(), contrib['title'], defaultNamespace=contrib['ns']),
+ contrib['revid'], ts, contrib['comment']
)
nbresults += 1
if nbresults >= limit:
@@ -233,60 +304,25 @@
break
return
- def _contributionsOld(self, limit = 250, namespace = []):
- if self.name()[0] == '#':
- #This user is probably being queried for purpose of lifting
- #an autoblock, so has no contribs.
- raise AutoblockUser
- #
- #TODO: fix contribRX regex
- #
- offset = 0
- step = min(limit,500)
- older_str = None
- if self.site().versionnumber() <= 11:
- older_str = self.site().mediawiki_message('sp-contributions-older')
- else:
- older_str = self.site().mediawiki_message('pager-older-n')
- if older_str.startswith('{{PLURAL:$1'):
- older_str = older_str[13:]
- older_str = older_str[older_str.find('|')+1:]
- older_str = older_str[:-2]
- older_str = older_str.replace('$1',str(step))
- address = self.site().contribs_address(self.name(),limit=step)
- contribRX = re.compile(r'<li[^>]*> *<a href="(?P<url>[^"]*?)" title="[^"]+">(?P<date>[^<]+)</a>.*>%s</a>\) *(<span class="[^"]+">[A-Za-z]</span>)* *<a href="[^"]+" (class="[^"]+" )?title="[^"]+">(?P<title>[^<]+)</a> *(?P<comment>.*?)(?P<top><strong> *\(top\) *</strong>)? *(<span class="mw-rollback-link">\[<a href="[^"]+token=(?P<rollbackToken>[^"]+)%2B%5C".*%s</a>\]</span>)? *</li>' % (self.site().mediawiki_message('diff'),self.site().mediawiki_message('rollback') ) )
- while offset < limit:
- data = self.site().getUrl(address)
- for pg in contribRX.finditer(data):
- url = pg.group('url')
- oldid = url[url.find('&oldid=')+11:]
- date = pg.group('date')
- comment = pg.group('comment')
- #rollbackToken = pg.group('rollbackToken')
- top = None
- if pg.group('top'):
- top = True
- # top, new, minor, should all go in a flags field
- yield wikipedia.Page(self.site(), pg.group('title')), oldid, date, comment
+ def uploadedImages(self, number=10):
+ """ Yield tuples describing files uploaded by this user.
+ Each tuple is composed of a pywikibot.Page, the timestamp
+ comment (unicode) and a bool (always False...).
+ Pages returned are not guaranteed to be unique.
- offset += 1
- if offset == limit:
- break
- nextRX = re.search('\(<a href="(?P<address>[^"]+)"[^>]*>' + older_str + '</a>\)',data)
- if nextRX:
- address = nextRX.group('address').replace('&','&')
- else:
- break
-
- def uploadedImages(self, number = 10):
+ @param total: limit result to this number of pages
+ @type total: int
+ """
+ if self.isAnonymous():
+ raise StopIteration
if not self.site().has_api() or self.site().versionnumber() < 11:
for c in self._uploadedImagesOld(number):
yield c
return
- for s in self.site().logpages(number, mode = 'upload', user = self.name(), dump = True):
- yield wikipedia.ImagePage(self.site(), s['title']), s['timestamp'], s['comment'], s['pageid'] > 0
+ for item in self.site().logpages(number, mode='upload', user=self.username, dump=True):
+ yield pywikibot.ImagePage(self.site(), item['title']), item['timestamp'], item['comment'], item['pageid'] > 0
return
def _uploadedImagesOld(self, number = 10):
@@ -309,7 +345,7 @@
date = m.group('date')
comment = m.group('comment') or ''
- yield wikipedia.ImagePage(self.site(), image), date, comment, deleted
+ yield pywikibot.ImagePage(self.site(), image), date, comment, deleted
def block(self, expiry = None, reason = None, anon= True, noCreate = False,
onAutoblock = False, banMail = False, watchUser = False, allowUsertalk = True,
@@ -331,7 +367,7 @@
The default values for block options are set to as most unrestrictive
"""
- if self.name()[0] == '#':
+ if self._isAutoblock:
#This user is probably being queried for purpose of lifting
#an autoblock, so can't be blocked.
raise AutoblockUser
@@ -340,9 +376,9 @@
self.site()._getActionUser('block', sysop=True)
if not expiry:
- expiry = wikipedia.input(u'Please enter the expiry time for the block:')
+ expiry = pywikibot.input(u'Please enter the expiry time for the block:')
if not reason:
- reason = wikipedia.input(u'Please enter a reason for the block:')
+ reason = pywikibot.input(u'Please enter a reason for the block:')
if not self.site().has_api() or self.site().versionnumber() < 12:
return self._blockOld(expiry, reason, anon, noCreate,
@@ -391,7 +427,7 @@
elif 'block' in data: #success
return True
else:
- wikipedia.output("Unknown Error, result: %s" % data)
+ pywikibot.output("Unknown Error, result: %s" % data)
raise BlockError
raise False
@@ -412,7 +448,7 @@
if reason is None:
reason = input(u'Please enter a reason for the block:')
token = self.site().getToken(self, sysop = True)
- wikipedia.output(u"Blocking [[User:%s]]..." % self.name())
+ pywikibot.output(u"Blocking [[User:%s]]..." % self.name())
boolStr = ['0','1']
predata = {
'wpBlockAddress': self.name(),
@@ -452,17 +488,17 @@
self._unblock(blockID,reason)
def _getBlockID(self):
- wikipedia.output(u"Getting block id for [[User:%s]]..." % self.name())
+ pywikibot.output(u"Getting block id for [[User:%s]]..." % self.name())
address = self.site().blocksearch_address(self.name())
data = self.site().getUrl(address)
bIDre = re.search(r'action=unblock&id=(\d+)', data)
if not bIDre:
- wikipedia.output(data)
+ pywikibot.output(data)
raise BlockIDError
return bIDre.group(1)
def _unblock(self, blockID, reason):
- wikipedia.output(u"Unblocking [[User:%s]]..." % self.name())
+ pywikibot.output(u"Unblocking [[User:%s]]..." % self.name())
token = self.site().getToken(self, sysop = True)
predata = {
'id': blockID,
@@ -486,7 +522,9 @@
"""
users = list(users) # if pages is an iterator, we need to make it a list
- if len(users) > 1: wikipedia.output(u'Getting %d users data from %s...' % (len(users), site))
+ if len(users) > 1:
+ pywikibot.output(u'Getting %d users data from %s...'
+ % (len(users), site))
if len(users) > 250: # max load prevents HTTPError 400
for urg in range(0, len(users), 250):
@@ -511,8 +549,8 @@
for user in users:
if not hasattr(user, '_editcount') or force:
self.users.append(user)
- elif wikipedia.verbose:
- wikipedia.output(u"BUGWARNING: %s already done!" % user.name())
+ elif pywikibot.verbose:
+ pywikibot.output(u"BUGWARNING: %s already done!" % user.name())
def run(self):
if self.users:
@@ -536,7 +574,7 @@
else:
uj._groups = []
if x['registration']:
- uj._registrationTime = wikipedia.parsetime2stamp(x['registration'])
+ uj._registrationTime = pywikibot.parsetime2stamp(x['registration'])
else:
uj._registrationTime = 0
uj._mailable = ("emailable" in x)
@@ -560,16 +598,16 @@
if __name__ == '__main__':
"""
- Simple testing code for the [[User:Example]] on the English Wikipedia.
+ Simple testing code for the [[User:Example]] on the English pywikibot.
"""
- wikipedia.output("""
+ pywikibot.output("""
This module is not for direct usage from the command prompt.
In code, the usage is as follows:
>>> exampleUser = User("en", 'Example')
- >>> wikipedia.output(exampleUser.getUserPage().get())
- >>> wikipedia.output(exampleUser.getUserPage('Lipsum').get())
- >>> wikipedia.output(exampleUser.getUserTalkPage().get())
+ >>> pywikibot.output(exampleUser.getUserPage().get())
+ >>> pywikibot.output(exampleUser.getUserPage('Lipsum').get())
+ >>> pywikibot.output(exampleUser.getUserTalkPage().get())
""")
# unit tests
import tests.test_userlib
Revision: 8261
Author: xqt
Date: 2010-06-09 12:40:26 +0000 (Wed, 09 Jun 2010)
Log Message:
-----------
Improved User() class (patch request bug #3011428 by stanlekub). Thanks
Modified Paths:
--------------
branches/rewrite/pywikibot/page.py
Modified: branches/rewrite/pywikibot/page.py
===================================================================
--- branches/rewrite/pywikibot/page.py 2010-06-09 07:15:01 UTC (rev 8260)
+++ branches/rewrite/pywikibot/page.py 2010-06-09 12:40:26 UTC (rev 8261)
@@ -1758,39 +1758,61 @@
return sorted(list(set(self.categories())))
+ip_regexp = re.compile(r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}' \
+ r'(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$')
+
class User(Page):
"""A class that represents a Wiki user.
"""
@deprecate_arg("insite", None)
def __init__(self, source, title=u''):
- """All parameters are the same as for Page() constructor.
+ """Initializer for a User object.
+ All parameters are the same as for Page() constructor.
"""
if len(title) > 1 and title[0] == u'#':
- self.is_autoblock = True
+ self._isAutoblock = True
title = title[1:]
else:
- self.is_autoblock = False
+ self._isAutoblock = False
Page.__init__(self, source, title, ns=2)
if self.namespace() != 2:
raise ValueError(u"'%s' is not in the user namespace!"
% title)
- if self.is_autoblock:
+ if self._isAutoblock:
# This user is probably being queried for purpose of lifting
# an autoblock.
- pywikibot.output("This is an autoblock ID, "
- "you can only use to unblock it.")
+ pywikibot.output(
+ "This is an autoblock ID, you can only use to unblock it.")
+ def name(self):
+ return self.username
+
@property
def username(self):
""" Convenience method that returns the title of the page with
namespace prefix omitted, aka the username, as a Unicode string.
"""
- if self.is_autoblock:
+ if self._isAutoblock:
return u'#' + self.title(withNamespace=False)
else:
return self.title(withNamespace=False)
+ def isRegistered(self, force=False):
+ """ Return True if a user with this name is registered on this site,
+ False otherwise.
+
+ @param force: if True, forces reloading the data from API
+ @type force: bool
+ """
+ if self.isAnonymous():
+ return False
+ else:
+ return self.getprops(force).get('missing') is None
+
+ def isAnonymous(self):
+ return ip_regexp.match(self.username) is not None
+
def getprops(self, force=False):
""" Return a Dictionnary that contains user's properties. Use cached
values if already called before, otherwise fetch data from the API.
@@ -1801,37 +1823,41 @@
if force:
del self._userprops
if not hasattr(self, '_userprops'):
- usrequest = pywikibot.data.api.Request(
- site=self.site,
- action='query',
- list='users',
- usprop='blockinfo|groups|editcount|registration|emailable',
- ususers=self.username,
- )
- usdata = usrequest.submit()
- assert 'query' in usdata, \
- "API users response lacks 'query' key"
- assert 'users' in usdata['query'], \
- "API users response lacks 'users' key"
- if u'missing' in usdata['query']['users'][0] or \
- u'invalid' in usdata['query']['users'][0]:
- raise pywikibot.Error(u'No such user or invaild username (%s)'\
- % self.username)
- self._userprops = usdata['query']['users'][0]
+ self._userprops = list(self.site.users([self.username,]))[0]
+ if self.isAnonymous():
+ r = list(self.site.blocks(users=self.username))
+ if r:
+ self._userprops['blockedby'] = r[0]['by']
+ self._userprops['blockreason'] = r[0]['reason']
return self._userprops
+ @deprecated('User.registration()')
def registrationTime(self, force=False):
- """ Return registration time for this user, as a Unicode string in
- ISO8601 format, or None if the date is unknown.
+ """ Return registration date for this user, as a long in
+ Mediawiki's internal timestamp format, or 0 if the date is unknown.
@param force: if True, forces reloading the data from API
@type force: bool
"""
- if 'registration' in self.getprops(force):
- return self.getprops()['registration']
+ if self.registration():
+ return long(self.registration().strftime('%Y%m%d%H%M%S'))
+ else:
+ return 0
+ def registration(self, force=False):
+ """ Return registration date for this user as a pywikibot.Timestamp
+ object, or None if the date is unknown.
+
+ @param force: if True, forces reloading the data from API
+ @type force: bool
+ """
+ reg = self.getprops(force).get('registration')
+ if reg:
+ return pywikibot.Timestamp.fromISOformat(reg)
+
def editCount(self, force=False):
- """ Return edit count for this user as int.
+ """ Return edit count for this user as int. This is always 0 for
+ 'anonymous' users.
@param force: if True, forces reloading the data from API
@type force: bool
@@ -1878,7 +1904,7 @@
page title (optional)
@type subpage: unicode
"""
- if self.is_autoblock:
+ if self._isAutoblock:
#This user is probably being queried for purpose of lifting
#an autoblock, so has no user pages per se.
raise AutoblockUser("This is an autoblock ID, you can only use to unblock it.")
@@ -1894,7 +1920,7 @@
talk page title (optional)
@type subpage: unicode
"""
- if self.is_autoblock:
+ if self._isAutoblock:
#This user is probably being queried for purpose of lifting
#an autoblock, so has no user talk pages per se.
raise AutoblockUser("This is an autoblock ID, you can only use to unblock it.")
@@ -1962,8 +1988,8 @@
@deprecate_arg("limit", "total") # To be consistent with rest of framework
@deprecate_arg("namespace", "namespaces")
def contributions(self, total=500, namespaces=[]):
- """ Yield tuples describing this user edits.
- Each tuple is composed of a pywikibot.Page object,
+ """ Yield tuples describing this user edits with an upper bound of
+ 'limit'. Each tuple is composed of a pywikibot.Page object,
the revision id (int), the edit timestamp (as int in mediawiki's
internal format), and the comment (unicode).
Pages returned are not guaranteed to be unique.
@@ -1977,23 +2003,25 @@
namespaces=namespaces, total=total):
ts = pywikibot.Timestamp.fromISOformat(contrib['timestamp'])
ts = int(ts.strftime("%Y%m%d%H%M%S"))
- yield Page(Link(contrib['title'], self.site,
- defaultNamespace=contrib['ns'])), \
+ yield Page(self.site, contrib['title'], contrib['ns']), \
contrib['revid'], ts, contrib['comment']
@deprecate_arg("number", "total")
def uploadedImages(self, total=10):
""" Yield tuples describing files uploaded by this user.
Each tuple is composed of a pywikibot.Page, the timestamp (str in
- ISO8601 format), comment (unicode) and a bool (always False...).
+ ISO8601 format), comment (unicode) and a bool for pageid > 0.
Pages returned are not guaranteed to be unique.
@param total: limit result to this number of pages
@type total: int
"""
+ if not self.isRegistered():
+ raise StopIteration
for item in self.site.logevents(logtype='upload', user=self.username,
total=total):
- yield item.title(), str(item.timestamp()), item.comment(), False
+ yield ImagePage(self.site, item.title().title()), \
+ unicode(item.timestamp()), item.comment(), item.pageid() > 0
class Revision(object):
"""A structure holding information about a single revision of a Page."""