Revision: 7114 Author: alexsh Date: 2009-08-05 21:40:30 +0000 (Wed, 05 Aug 2009)
Log Message: ----------- Remove all 'useAPI=True' tag for function set useAPI default True
Modified Paths: -------------- trunk/pywikipedia/casechecker.py trunk/pywikipedia/catlib.py trunk/pywikipedia/imageuncat.py trunk/pywikipedia/maintenance/family_check.py trunk/pywikipedia/welcome.py trunk/pywikipedia/wikipedia.py
Modified: trunk/pywikipedia/casechecker.py =================================================================== --- trunk/pywikipedia/casechecker.py 2009-08-05 21:39:21 UTC (rev 7113) +++ trunk/pywikipedia/casechecker.py 2009-08-05 21:40:30 UTC (rev 7114) @@ -200,14 +200,14 @@ wlpage = self.whitelists[self.site.lang] wikipedia.output(u'Loading whitelist from %s' % wlpage) wlparams = { - 'action' : 'query', - 'prop' : 'links', - 'titles' : wlpage, - 'redirects' : '', - 'indexpageids' : '', - } + 'action' : 'query', + 'prop' : 'links', + 'titles' : wlpage, + 'redirects' : '', + 'indexpageids' : '', + }
- data = query.GetData(self.site.lang, wlparams, wikipedia.verbose, useAPI=True, encodeTitle=False) + data = query.GetData(wlparams, wikipedia.getSite(self.site.lang), wikipedia.verbose, encodeTitle=False) if len(data['query']['pageids']) == 1: pageid = data['query']['pageids'][0] links = data['query']['pages'][pageid]['links']
Modified: trunk/pywikipedia/catlib.py =================================================================== --- trunk/pywikipedia/catlib.py 2009-08-05 21:39:21 UTC (rev 7113) +++ trunk/pywikipedia/catlib.py 2009-08-05 21:40:30 UTC (rev 7114) @@ -533,8 +533,7 @@ 'cmtitle' :CatName, }
- data = query.GetData(params, site = site, - useAPI = True, encodeTitle = False) + data = query.GetData(params, site, encodeTitle = False) categories_parsed.append(CatName) try: members = data['query']['categorymembers']
Modified: trunk/pywikipedia/imageuncat.py =================================================================== --- trunk/pywikipedia/imageuncat.py 2009-08-05 21:39:21 UTC (rev 7113) +++ trunk/pywikipedia/imageuncat.py 2009-08-05 21:40:30 UTC (rev 7114) @@ -1243,7 +1243,7 @@ 'leend' :today.strftime(dateformat) }
- data = query.GetData(params, site, useAPI = True, encodeTitle = False) + data = query.GetData(params, site, encodeTitle = False) try: for item in data['query']['logevents']: result.append(item['title']) @@ -1279,7 +1279,7 @@ 'rctype' :'edit|log', }
- data = query.GetData(params, site, useAPI = True, encodeTitle = False) + data = query.GetData(params, site, encodeTitle = False) try: for item in data['query']['recentchanges']: result.append(item['title'])
Modified: trunk/pywikipedia/maintenance/family_check.py =================================================================== --- trunk/pywikipedia/maintenance/family_check.py 2009-08-05 21:39:21 UTC (rev 7113) +++ trunk/pywikipedia/maintenance/family_check.py 2009-08-05 21:40:30 UTC (rev 7114) @@ -17,7 +17,7 @@ 'meta': 'siteinfo', 'siprop': 'namespaces'} try: - data = query.GetData(predata, site = site, useAPI = True)['query']['namespaces'] + data = query.GetData(predata, site)['query']['namespaces'] except wikipedia.ServerError, e: wikipedia.output(u'Warning! %s: %s' % (site, e)) return
Modified: trunk/pywikipedia/welcome.py =================================================================== --- trunk/pywikipedia/welcome.py 2009-08-05 21:39:21 UTC (rev 7113) +++ trunk/pywikipedia/welcome.py 2009-08-05 21:40:30 UTC (rev 7114) @@ -506,8 +506,7 @@ 'usprop' :'blockinfo', }
- data = query.GetData(params, - useAPI = True, encodeTitle = False) + data = query.GetData(params, encodeTitle = False) # If there's not the blockedby parameter (that means the user isn't blocked), it will return False otherwise True. try: blockedBy = data['query']['users'][0]['blockedby']
Modified: trunk/pywikipedia/wikipedia.py =================================================================== --- trunk/pywikipedia/wikipedia.py 2009-08-05 21:39:21 UTC (rev 7113) +++ trunk/pywikipedia/wikipedia.py 2009-08-05 21:40:30 UTC (rev 7114) @@ -919,8 +919,7 @@ 'prop' :'info', 'titles' :self.title(), } - data = query.GetData(params, - useAPI = True, encodeTitle = False) + data = query.GetData(params, encodeTitle = False) pageid = data['query']['pages'].keys()[0] if data['query']['pages'][pageid].keys()[0] == 'lastrevid': return data['query']['pages'][pageid]['lastrevid'] # if ok, @@ -952,8 +951,7 @@ 'tllimit' :tllimit, }
- data = query.GetData(params, - useAPI = True, encodeTitle = False) + data = query.GetData(params, encodeTitle = False) try: pageid = data['query']['pages'].keys()[0] except KeyError: @@ -1327,7 +1325,7 @@ #if titles: # predata['titles'] = query.ListToParam(titles)
- text = query.GetData(predata, useAPI = True)['query']['pages'] + text = query.GetData(predata)['query']['pages']
for pageid in text: if text[pageid].has_key('missing'): @@ -2765,7 +2763,7 @@ 'titles' :self.title(), } try: - data = query.GetData(params, useAPI = True, encodeTitle = False)['query']['pages'] + data = query.GetData(params, encodeTitle = False)['query']['pages'] except KeyError: raise NoPage(u'API Error, nothing found in the APIs')
@@ -2829,7 +2827,7 @@ 'titles' :self.title(), 'iiprop' :'url', } - imagedata = query.GetData(params, useAPI = True, encodeTitle = False) + imagedata = query.GetData(params, encodeTitle = False) try: url=imagedata['query']['pages'].values()[0]['imageinfo'][0]['url'] # urlR = re.compile(r'<div class="fullImageLink" id="file">.*?<a href="(?P<url>[^ ]+?)"(?! class="image")|<span class="dangerousLink"><a href="(?P<url2>.+?)"', re.DOTALL) @@ -2899,7 +2897,7 @@ 'prop' :'imageinfo', 'titles' :self.title(), } - data = query.GetData(params, useAPI = True, encodeTitle = False) + data = query.GetData(params, encodeTitle = False) try: # We don't know the page's id, if any other better idea please change it pageid = data['query']['pages'].keys()[0] @@ -2921,7 +2919,7 @@ 'iiprop' :'sha1', } # First of all we need the Hash that identify an image - data = query.GetData(params, useAPI = True, encodeTitle = False) + data = query.GetData(params, encodeTitle = False) pageid = data['query']['pages'].keys()[0] try: hash_found = data['query']['pages'][pageid][u'imageinfo'][0][u'sha1'] @@ -4505,7 +4503,7 @@ 'meta': 'userinfo', 'uiprop': 'blockinfo', } - data = query.GetData(params, self, useAPI = True)['query']['userinfo'] + data = query.GetData(params, self)['query']['userinfo'] return data.has_key('blockby') except NotImplementedError: return False @@ -5121,7 +5119,7 @@ 'meta':'allmessages', } try: - datas = query.GetData(params, useAPI = True)['query']['allmessages'] + datas = query.GetData(params)['query']['allmessages'] except KeyError: raise ServerError("The APIs don't return data, the site may be down") except NotImplementedError: @@ -5222,7 +5220,7 @@ 'meta': 'userinfo', 'uiprop': 'blockinfo|groups|rights|hasmsg|ratelimits|preferencestoken', } - text = query.GetData(params, site = self, useAPI = True, sysop=sysop)['query']['userinfo'] + text = query.GetData(params, self, sysop=sysop)['query']['userinfo'] ##output('%s' % text) # for debug use only else: url = self.edit_address('Non-existing_page') @@ -5503,8 +5501,7 @@ if leuser is not None: params['leuser'] = leuser if letitle is not None: params['letitle'] = letitle while True: - data = query.GetData(params, - useAPI = True, encodeTitle = False) + data = query.GetData(params, encodeTitle = False) try: imagesData = data['query']['logevents'] except KeyError: @@ -5572,8 +5569,7 @@ if rcshow is not None: params['rcshow'] = rcshow if rctype is not None: params['rctype'] = rctype while True: - data = query.GetData(params, - useAPI = True, encodeTitle = False) + data = query.GetData(params, encodeTitle = False) try: rcData = data['query']['recentchanges'] except KeyError: @@ -5709,7 +5705,7 @@ 'rnlimit': '1', #'': '', } - data = query.GetData(params, useAPI = True) + data = query.GetData(params) return Page(self, data['query']['random'][0]['title']) else: """Yield random page via Special:Random""" @@ -5727,7 +5723,7 @@ 'rnlimit': '1', 'rnredirect': '1', } - data = query.GetData(params, useAPI = True) + data = query.GetData(params) return Page(self, data['query']['random'][0]['title']) else: """Yield random redirect page via Special:RandomRedirect.""" @@ -5781,7 +5777,7 @@ params['apfrom'] = start if throttle: get_throttle() - data = query.GetData(params, useAPI = True) + data = query.GetData(params)
for p in data['query']['allpages']: yield Page(self, p['title']) @@ -5973,7 +5969,7 @@ 'euquery': url, } while True: - data = query.GetData(params, useAPI = True) + data = query.GetData(params) if data['query']['exturlusage'] == []: break for pages in data['query']['exturlusage']: @@ -6586,7 +6582,7 @@ 'list' :'allimages', 'aisha1' :hash_found, } - allimages = query.GetData(params, site = getSite(self.lang, self.family), useAPI = True, encodeTitle = False)['query']['allimages'] + allimages = query.GetData(params, getSite(self.lang, self.family), encodeTitle = False)['query']['allimages'] files = list() for imagedata in allimages: image = imagedata[u'name']