Revision: 8194 Author: alexsh Date: 2010-05-18 15:27:06 +0000 (Tue, 18 May 2010)
Log Message: ----------- code cleanup, auto cleanup duplicate page object in Page().getReferences().
Modified Paths: -------------- trunk/pywikipedia/featured.py trunk/pywikipedia/userlib.py trunk/pywikipedia/wikipedia.py
Modified: trunk/pywikipedia/featured.py =================================================================== --- trunk/pywikipedia/featured.py 2010-05-18 01:34:52 UTC (rev 8193) +++ trunk/pywikipedia/featured.py 2010-05-18 15:27:06 UTC (rev 8194) @@ -54,13 +54,11 @@ import wikipedia, catlib, config
def CAT(site,name): - name = site.namespace(14) + ':' + name - cat=catlib.Category(site, name) + cat = catlib.Category(site, name) return cat.articles()
def BACK(site,name): - name = site.namespace(10) + ':' + name - p=wikipedia.Page(site, name) + p = wikipedia.Page(site, name, defaultNamespace=10) return [page for page in p.getReferences(follow_redirects = False, onlyTemplateInclusion=True)]
msg = {
Modified: trunk/pywikipedia/userlib.py =================================================================== --- trunk/pywikipedia/userlib.py 2010-05-18 01:34:52 UTC (rev 8193) +++ trunk/pywikipedia/userlib.py 2010-05-18 15:27:06 UTC (rev 8194) @@ -281,8 +281,8 @@
def uploadedImages(self, number = 10): if not self.site().has_api() or self.site().versionnumber() < 11: - for p,t,c,a in self._uploadedImagesOld(number): - yield p,t,c,a + for c in self._uploadedImagesOld(number): + yield c return
for s in self.site().logpages(number, mode = 'upload', user = self.name(), dump = True):
Modified: trunk/pywikipedia/wikipedia.py =================================================================== --- trunk/pywikipedia/wikipedia.py 2010-05-18 01:34:52 UTC (rev 8193) +++ trunk/pywikipedia/wikipedia.py 2010-05-18 15:27:06 UTC (rev 8194) @@ -1251,7 +1251,7 @@ datas = query.GetData(params, self.site()) data = datas['query'].values() if len(data) == 2: - data = data[0] + data[1] + data = list(set(data[0] + data[1])) else: data = data[0]
@@ -6489,12 +6489,11 @@ """Yield Pages from results of Special:Linksearch for 'siteurl'.""" cache = [] R = re.compile('title ?="([^<>]*?)">[^<>]*</a></li>') - api = self.has_api() urlsToRetrieve = [siteurl] if not siteurl.startswith('*.'): urlsToRetrieve.append('*.' + siteurl)
- if api and self.versionnumber() >= 11: + if self.has_api() and self.versionnumber() >= 11: output(u'Querying API exturlusage...') for url in urlsToRetrieve: params = {
pywikipedia-svn@lists.wikimedia.org