Revision: 6652 Author: cosoleto Date: 2009-04-21 22:04:22 +0000 (Tue, 21 Apr 2009)
Log Message: ----------- Use iter methods where possible in 'for'. They are faster because don't have to create a new list object (see PEP 290). Converted more remaining has_key() cases, as already done in r6630.
Modified Paths: -------------- trunk/pywikipedia/copyright.py trunk/pywikipedia/family.py trunk/pywikipedia/gui.py trunk/pywikipedia/interwiki.py trunk/pywikipedia/wikipedia.py trunk/pywikipedia/wiktionary.py
Modified: trunk/pywikipedia/copyright.py =================================================================== --- trunk/pywikipedia/copyright.py 2009-04-21 18:45:28 UTC (rev 6651) +++ trunk/pywikipedia/copyright.py 2009-04-21 22:04:22 UTC (rev 6652) @@ -271,7 +271,7 @@
def skip_section(text): l = list() - for s in sections_to_skip.values(): + for s in sections_to_skip.itervalues(): l.extend(s) sect_titles = '|'.join(l)
Modified: trunk/pywikipedia/family.py =================================================================== --- trunk/pywikipedia/family.py 2009-04-21 18:45:28 UTC (rev 6651) +++ trunk/pywikipedia/family.py 2009-04-21 22:04:22 UTC (rev 6652) @@ -3087,7 +3087,7 @@ This is supposed to be called in the constructor of the family.""" self.langs[code] = location
- for num, val in namespaces.items(): + for num, val in namespaces.iteritems(): self.namespaces[num][code]=val
def get_known_families(self, site):
Modified: trunk/pywikipedia/gui.py =================================================================== --- trunk/pywikipedia/gui.py 2009-04-21 18:45:28 UTC (rev 6651) +++ trunk/pywikipedia/gui.py 2009-04-21 22:04:22 UTC (rev 6652) @@ -87,7 +87,7 @@ '<<select-all>>': ['<Control-Key-a>'], '<<undo>>': ['<Control-Key-z>', '<Control-Key-Z>'], } - for event, keylist in keydefs.items(): + for event, keylist in keydefs.iteritems(): if keylist: self.event_add(event, *keylist)
Modified: trunk/pywikipedia/interwiki.py =================================================================== --- trunk/pywikipedia/interwiki.py 2009-04-21 18:45:28 UTC (rev 6651) +++ trunk/pywikipedia/interwiki.py 2009-04-21 22:04:22 UTC (rev 6652) @@ -652,7 +652,7 @@ wikipedia.output("%s has a backlink from %s."%(page,linkingPage)) self.makeForcedStop(counter) return False - if self.foundIn.has_key(page): + if page in self.foundIn: # not new self.foundIn[page].append(linkingPage) return False @@ -670,7 +670,7 @@ Returns True if the namespaces are different and the user has selected not to follow the linked page. """ - if self.foundIn.has_key(linkedPage): + if linkedPage in self.foundIn: # We have seen this page before, don't ask again. return False elif self.originPage.namespace() != linkedPage.namespace(): @@ -985,7 +985,7 @@ new[site] = [page] # See if new{} contains any problematic values result = {} - for site, pages in new.items(): + for site, pages in new.iteritems(): if len(pages) > 1: errorCount += 1 self.problem("Found more than one link for %s" % site) @@ -1003,7 +1003,7 @@ return None
# First loop over the ones that have more solutions - for site, pages in new.items(): + for site, pages in new.iteritems(): if len(pages) > 1: wikipedia.output(u"=" * 30) wikipedia.output(u"Links to %s" % site) @@ -1032,7 +1032,7 @@ # Loop over the ones that have one solution, so are in principle # not a problem. acceptall = False - for site, pages in new.items(): + for site, pages in new.iteritems(): if len(pages) == 1: if not acceptall: wikipedia.output(u"=" * 30) @@ -1056,7 +1056,7 @@ # None acceptable break else: # errorCount <= 0, hence there are no lists longer than one. - for site, pages in new.items(): + for site, pages in new.iteritems(): result[site] = pages[0] return result
@@ -1092,7 +1092,7 @@
# Make sure new contains every page link, including the page we are processing # replaceLinks will skip the site it's working on. - if not new.has_key(self.originPage.site()): + if self.originPage.site() not in new: new[self.originPage.site()] = self.originPage
#self.replaceLinks(self.originPage, new, True, bot) @@ -1106,10 +1106,10 @@ frgnSiteDone = False for siteCode in lclSite.family.languages_by_size + [s for s in lclSite.family.langs.keys() if (not s in lclSite.family.languages_by_size and not s in lclSite.family.obsolete)]: site = wikipedia.getSite(code = siteCode) - if (not lclSiteDone and site == lclSite) or (not frgnSiteDone and site != lclSite and new.has_key(site)): + if (not lclSiteDone and site == lclSite) or (not frgnSiteDone and site != lclSite and site in new): if site == lclSite: lclSiteDone = True # even if we fail the update - if config.usernames.has_key(site.family.name) and config.usernames[site.family.name].has_key(site.lang): + if site.family.name in config.usernames and site.lang in config.usernames[site.family.name]: try: if self.replaceLinks(new[site], new, bot): updatedSites.append(site) @@ -1119,7 +1119,7 @@ notUpdatedSites.append(site) except GiveUpOnPage: break - elif not globalvar.strictlimittwo and new.has_key(site) and site != lclSite: + elif not globalvar.strictlimittwo and site in new and site != lclSite: old={} try: for page in new[site].interwiki(): @@ -1141,7 +1141,7 @@ else: for (site, page) in new.iteritems(): # if we have an account for this site - if config.usernames.has_key(site.family.name) and config.usernames[site.family.name].has_key(site.lang): + if site.family.name in config.usernames and site.lang in config.usernames[site.family.name]: # Try to do the changes try: if self.replaceLinks(page, new, bot):
Modified: trunk/pywikipedia/wikipedia.py =================================================================== --- trunk/pywikipedia/wikipedia.py 2009-04-21 18:45:28 UTC (rev 6651) +++ trunk/pywikipedia/wikipedia.py 2009-04-21 22:04:22 UTC (rev 6652) @@ -2529,7 +2529,7 @@ }
if self._deletedRevs != None and self._deletedRevsModified: - for ts in self._deletedRevs.keys(): + for ts in self._deletedRevs: if self._deletedRevs[ts][4]: formdata['ts'+ts] = '1'
@@ -3089,7 +3089,7 @@ else: output(u"WARNING: Missing namespace in family file %s: namespace['%s'][%i] (it is set to '%s')" % (self.site.family.name, lang, id, nshdr)) for id in self.site.family.namespaces: - if self.site.family.isDefinedNSLanguage(id, lang) and not header.namespaces.has_key(id): + if self.site.family.isDefinedNSLanguage(id, lang) and id not in header.namespaces: output(u"WARNING: Family file %s includes namespace['%s'][%i], but it should be removed (namespace doesn't exist in the site)" % (self.site.family.name, lang, id))
def getData(self): @@ -3236,7 +3236,7 @@ self.checktime = time.time() processes[self.pid] = self.checktime f = open(self.logfn(), 'w') - for p in processes.keys(): + for p in processes: f.write(str(p)+' '+str(processes[p])+'\n') f.close() self.process_multiplicity = count @@ -3303,7 +3303,7 @@ except (IndexError,ValueError): pass # Sometimes the file gets corrupted - ignore that line f = open(self.logfn(), 'w') - for p in processes.keys(): + for p in processes: f.write(str(p)+' '+str(processes[p])+'\n') f.close()
@@ -6969,7 +6969,7 @@ if os.path.isfile(COOKIEFILE): cj.load(COOKIEFILE) passman = urllib2.HTTPPasswordMgrWithDefaultRealm() - for site in config.authenticate.keys(): + for site in config.authenticate: passman.add_password(None, site, config.authenticate[site][0], config.authenticate[site][1]) authhandler = urllib2.HTTPBasicAuthHandler(passman) authenticateURLopener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj),authhandler)
Modified: trunk/pywikipedia/wiktionary.py =================================================================== --- trunk/pywikipedia/wiktionary.py 2009-04-21 18:45:28 UTC (rev 6651) +++ trunk/pywikipedia/wiktionary.py 2009-04-21 22:04:22 UTC (rev 6652) @@ -372,7 +372,7 @@ if header.type==u'lang': context['lang']=header.contents if header.type==u'pos': - if not(context.has_key('lang')): + if 'lang' not in context: # This entry lacks a language indicator, # so we assume it is the same language as the Wiktionary we're working on context['lang']=self.wikilang @@ -498,7 +498,7 @@ sample = plural = diminutive = label = definition = '' examples = []
- if not(self.entries.has_key(contentblock['context']['lang'])): + if contentblock['context']['lang'] not in self.entries: # If no entry for this language has been foreseen yet # let's create one anentry = Entry(contentblock['context']['lang']) @@ -525,7 +525,7 @@ # Make sure we store the last definition if definition: ameaning = Meaning(term=theterm, definition=definition, label=label, examples=examples) - if not(self.entries.has_key(contentblock['context']['lang'])): + if contentblock['context']['lang'] not in self.entries: # If no entry for this language has been foreseen yet # let's create one anentry = Entry(contentblock['context']['lang']) @@ -1016,16 +1016,16 @@ self.header = self.header.replace('{{','').replace('}}','').strip().lower()
# Now we know the content of the header, let's try to find out what it means: - if pos.has_key(self.header): + if self.header in pos: self.type=u'pos' self.contents=pos[self.header] - if langnames.has_key(self.header): + if self.header in langnames: self.type=u'lang' self.contents=self.header - if invertedlangnames.has_key(self.header): + if self.header in invertedlangnames: self.type=u'lang' self.contents=invertedlangnames[self.header] - if otherheaders.has_key(self.header): + if self.header in otherheaders: self.type=u'other' self.contents=otherheaders[self.header]
pywikipedia-svn@lists.wikimedia.org