[Pywikipedia-l] SVN: [6445] branches/rewrite/pywikibot

russblau at svn.wikimedia.org russblau at svn.wikimedia.org
Thu Feb 26 21:30:24 UTC 2009


Revision: 6445
Author:   russblau
Date:     2009-02-26 21:30:24 +0000 (Thu, 26 Feb 2009)

Log Message:
-----------
Implement the (relatively) new "prop=categoryinfo" query

Modified Paths:
--------------
    branches/rewrite/pywikibot/data/api.py
    branches/rewrite/pywikibot/page.py
    branches/rewrite/pywikibot/site.py

Modified: branches/rewrite/pywikibot/data/api.py
===================================================================
--- branches/rewrite/pywikibot/data/api.py	2009-02-26 16:03:34 UTC (rev 6444)
+++ branches/rewrite/pywikibot/data/api.py	2009-02-26 21:30:24 UTC (rev 6445)
@@ -493,9 +493,9 @@
         QueryGenerator.__init__(self, generator=generator, **kwargs)
         # get some basic information about every page generated
         if 'prop' in self.request:
-            self.request['prop'] += "|info|imageinfo"
+            self.request['prop'] += "|info|imageinfo|categoryinfo"
         else:
-            self.request['prop'] = 'info|imageinfo'
+            self.request['prop'] = 'info|imageinfo|categoryinfo'
         if "inprop" in self.request:
             if "protection" not in self.request["inprop"]:
                 self.request["inprop"] += "|protection"
@@ -671,6 +671,8 @@
         page._revid = pagedict['lastrevid']
         if page._revid in page._revisions:
             page._text = page._revisions[page._revid].text
+    if "categoryinfo" in pagedict:
+        page._catinfo = pagedict["categoryinfo"]
 
 
 if __name__ == "__main__":

Modified: branches/rewrite/pywikibot/page.py
===================================================================
--- branches/rewrite/pywikibot/page.py	2009-02-26 16:03:34 UTC (rev 6444)
+++ branches/rewrite/pywikibot/page.py	2009-02-26 21:30:24 UTC (rev 6445)
@@ -1169,6 +1169,20 @@
                 pywikibot.output(u"Saving page %s failed: %s"
                                  % (self.title(asLink=True), error.message))
 
+    @property
+    def categoryinfo(self):
+        """If supported, return a dict containing category content values:
+
+        Numbers of pages, subcategories, files, and total contents.
+
+        """
+        if not self.isCategory():
+            return None # should this raise an exception??
+        try:
+            return self.site().categoryinfo(self)
+        except NotImplementedError:
+            return None
+
 ######## DEPRECATED METHODS ########
 
     def encoding(self):

Modified: branches/rewrite/pywikibot/site.py
===================================================================
--- branches/rewrite/pywikibot/site.py	2009-02-26 16:03:34 UTC (rev 6444)
+++ branches/rewrite/pywikibot/site.py	2009-02-26 21:30:24 UTC (rev 6445)
@@ -1384,6 +1384,25 @@
             for linkdata in pageitem['extlinks']:
                 yield linkdata['*']
 
+    # TODO: implement a method to retrieve categoryinfo
+    def getcategoryinfo(self, category):
+        """Retrieve data on contents of category."""
+        cititle = category.title(withSection=False)
+        ciquery = api.PropertyGenerator("categoryinfo",
+                                        titles=cititle.encode(self.encoding()),
+                                        site=self)
+        for pageitem in ciquery:
+            if pageitem['title'] != cititle:
+                raise Error(
+                    u"categoryinfo: Query on %s returned data on '%s'"
+                    % (category, pageitem['title']))
+            api.update_page(category, pageitem)
+
+    def categoryinfo(self, category):
+        if not hasattr(category, "_catinfo"):
+            self.getcategoryinfo(category)
+        return category._catinfo
+
     @deprecate_arg("throttle", None)
     @deprecate_arg("includeredirects", "filterredir")
     def allpages(self, start="!", prefix="", namespace=0, filterredir=None,
@@ -2175,7 +2194,7 @@
                 self.unlock_page(page)
                 if "nochange" in result["edit"]:
                     # null edit, page not changed
-                    # TODO: do we want to notify the user of this?
+                    #TODO: do we want to notify the user of this?
                     return True
                 page._revid = result["edit"]["newrevid"]
                 # see http://www.mediawiki.org/wiki/API:Wikimania_2006_API_discussion#Notes
@@ -2302,7 +2321,7 @@
         if "move" not in result:
             pywikibot.output(u"movepage: %s" % result, level=pywikibot.ERROR)
             raise Error("movepage: unexpected response")
-        # TODO: Check for talkmove-error messages
+        #TODO: Check for talkmove-error messages
         if "talkmove-error-code" in result["move"]:
             pywikibot.output(u"movepage: Talk page %s not moved"
                               % (page.toggleTalkPage().title(asLink=True)),
@@ -2417,9 +2436,9 @@
         finally:
             self.unlock_page(page)
 
-    # TODO: implement undelete
+    #TODO: implement undelete
 
-    # TODO: implement patrol
+    #TODO: implement patrol
 
     def linksearch(self, siteurl, limit=None):
         """Backwards-compatible interface to exturlusage()"""
@@ -2452,7 +2471,7 @@
 #### METHODS NOT IMPLEMENTED YET ####
 class NotImplementedYet:
 
-    # TODO: is this needed any more? can it be obtained from the http module?
+    #TODO: is this needed any more? can it be obtained from the http module?
     def cookies(self, sysop = False):
         """Return a string containing the user's current cookies."""
         self._loadCookies(sysop = sysop)
@@ -2494,7 +2513,7 @@
                 f.close()
 
     # THESE ARE FUNCTIONS NOT YET IMPLEMENTED IN THE API
-    # TODO: avoid code duplication for the following methods
+    #TODO: avoid code duplication for the following methods
     def newpages(self, number = 10, get_redirect = False, repeat = False):
         """Yield new articles (as Page objects) from Special:Newpages.
 





More information about the Pywikipedia-l mailing list