[Pywikipedia-svn] SVN: [7203] trunk/pywikipedia

alexsh at svn.wikimedia.org alexsh at svn.wikimedia.org
Thu Sep 3 16:47:53 UTC 2009


Revision: 7203
Author:   alexsh
Date:     2009-09-03 16:47:53 +0000 (Thu, 03 Sep 2009)

Log Message:
-----------
Query.py: follow up verbose to wikipedia.verbose

Modified Paths:
--------------
    trunk/pywikipedia/casechecker.py
    trunk/pywikipedia/query.py

Modified: trunk/pywikipedia/casechecker.py
===================================================================
--- trunk/pywikipedia/casechecker.py	2009-09-03 16:46:52 UTC (rev 7202)
+++ trunk/pywikipedia/casechecker.py	2009-09-03 16:47:53 UTC (rev 7203)
@@ -207,7 +207,7 @@
                 'indexpageids' : '',
             }
 
-            data = query.GetData(wlparams, wikipedia.getSite(self.site.lang), wikipedia.verbose, encodeTitle=False)
+            data = query.GetData(wlparams, wikipedia.getSite(self.site.lang), encodeTitle=False)
             if len(data['query']['pageids']) == 1:
                 pageid = data['query']['pageids'][0]
                 links = data['query']['pages'][pageid]['links']
@@ -232,7 +232,7 @@
                 while True:
                     # Get data
                     self.params['gapfrom'] = self.apfrom
-                    data = query.GetData(self.params, self.site, wikipedia.verbose, True)
+                    data = query.GetData(self.params, self.site)
                     try:
                         self.apfrom = data['query-continue']['allpages']['gapfrom']
                     except:

Modified: trunk/pywikipedia/query.py
===================================================================
--- trunk/pywikipedia/query.py	2009-09-03 16:46:52 UTC (rev 7202)
+++ trunk/pywikipedia/query.py	2009-09-03 16:47:53 UTC (rev 7203)
@@ -33,10 +33,10 @@
     import simplejson as json
     
 
-def GetData(params, site = None, verbose = False, useAPI = True, retryCount = 5, encodeTitle = True, sysop = False, back_response = False):
+def GetData(params, site = None, useAPI = True, retryCount = 5, encodeTitle = True, sysop = False, back_response = False):
     """Get data from the query api, and convert it into a data object
     """
-    if site is None:
+    if not site:
         site = wikipedia.getSite()
 
     for k,v in params.iteritems():
@@ -74,7 +74,7 @@
     else:
         path = site.query_address() + urllib.urlencode(params.items())
 
-    if verbose:
+    if wikipedia.verbose:
         if titlecount > 0:
             wikipedia.output(u"Requesting %d titles from %s:%s" % (titlecount, site.lang, path))
         else:
@@ -92,7 +92,7 @@
                 res = urllib2.urlopen(urllib2.Request(site.protocol() + '://' + site.hostname() + address, site.urlEncode(params)))
                 jsontext = res.read()
             elif params['action'] in postAC:
-                res, jsontext = site.postData(path, urllib.urlencode(params.items()), cookies=site.cookies(sysop=sysop), sysop=sysop)
+                res, jsontext = site.postForm(path, params, sysop, site.cookies(sysop = sysop) )
             else:
                 if back_response:
                     res, jsontext = site.getUrl( path, retry=True, data=data, sysop=sysop, back_response=True)





More information about the Pywikipedia-svn mailing list