Revision: 6630
Author: cosoleto
Date: 2009-04-19 08:47:15 +0000 (Sun, 19 Apr 2009)
Log Message:
-----------
Replaced 'has_key()' with 'key in dict'. has_key() is deprecated since Python 2.2, removed in Python 3.0, and performed 20-25% slower as it requires an attribute search and a function call (see also PEP 290).
Modified Paths:
--------------
trunk/pywikipedia/category.py
trunk/pywikipedia/copyright_clean.py
trunk/pywikipedia/family.py
trunk/pywikipedia/login.py
trunk/pywikipedia/rcsort.py
trunk/pywikipedia/replace.py
trunk/pywikipedia/solve_disambiguation.py
trunk/pywikipedia/warnfile.py
trunk/pywikipedia/watchlist.py
trunk/pywikipedia/wikipedia.py
trunk/pywikipedia/wikipediatools.py
Modified: trunk/pywikipedia/category.py
===================================================================
--- trunk/pywikipedia/category.py 2009-04-18 17:11:21 UTC (rev 6629)
+++ trunk/pywikipedia/category.py 2009-04-19 08:47:15 UTC (rev 6630)
@@ -234,7 +234,7 @@
server next time it's required.
'''
# if we already know which subcategories exist here
- if self.catContentDB.has_key(supercat):
+ if supercat in self.catContentDB:
return self.catContentDB[supercat][0]
else:
subcatlist = supercat.subcategoriesList()
@@ -250,7 +250,7 @@
server next time it's required.
'''
# if we already know which articles exist here
- if self.catContentDB.has_key(cat):
+ if cat in self.catContentDB:
return self.catContentDB[cat][1]
else:
subcatlist = cat.subcategoriesList()
@@ -261,7 +261,7 @@
def getSupercats(self, subcat):
# if we already know which subcategories exist here
- if self.superclassDB.has_key(subcat):
+ if subcat in self.superclassDB:
return self.superclassDB[subcat]
else:
supercatlist = subcat.supercategoriesList()
Modified: trunk/pywikipedia/copyright_clean.py
===================================================================
--- trunk/pywikipedia/copyright_clean.py 2009-04-18 17:11:21 UTC (rev 6629)
+++ trunk/pywikipedia/copyright_clean.py 2009-04-19 08:47:15 UTC (rev 6630)
@@ -89,14 +89,14 @@
for pageobjs in query_results_titles:
for key in pageobjs['query']['pages']:
if pageobjs['query']['pages'][key]['title'] == title:
- if pageobjs['query']['pages'][key].has_key('missing'):
+ if 'missing' in pageobjs['query']['pages'][key]:
wikipedia.output('* ' + title)
return False
return True
def revid_exist(revid):
for pageobjs in query_results_revids:
- if pageobjs['query'].has_key('badrevids'):
+ if 'badrevids' pageobjs['query']:
for id in pageobjs['query']['badrevids']:
if id == int(revid):
# print rv
Modified: trunk/pywikipedia/family.py
===================================================================
--- trunk/pywikipedia/family.py 2009-04-18 17:11:21 UTC (rev 6629)
+++ trunk/pywikipedia/family.py 2009-04-19 08:47:15 UTC (rev 6630)
@@ -3094,7 +3094,7 @@
return self.known_families
def linktrail(self, code, fallback = '_default'):
- if self.linktrails.has_key(code):
+ if code in self.linktrails:
return self.linktrails[code]
elif fallback:
return self.linktrails[fallback]
@@ -3143,12 +3143,12 @@
def isDefinedNS(self, ns_number):
"""Return True if the namespace has been defined in this family.
"""
- return self.namespaces.has_key(ns_number)
+ return ns_number in self.namespaces
def isNsI18N(self, ns_number, code):
"""Return True if the namespace has been internationalized.
(it has a custom entry for a given language)"""
- return self.namespaces[ns_number].has_key(code)
+ return code in self.namespaces[ns_number]
def isDefinedNSLanguage(self, ns_number, code, fallback='_default'):
"""Return True if the namespace has been defined in this family
@@ -3227,7 +3227,7 @@
% code)
def disambig(self, code, fallback = '_default'):
- if self.disambiguationTemplates.has_key(code):
+ if code in self.disambiguationTemplates:
return self.disambiguationTemplates[code]
elif fallback:
return self.disambiguationTemplates[fallback]
Modified: trunk/pywikipedia/login.py
===================================================================
--- trunk/pywikipedia/login.py 2009-04-18 17:11:21 UTC (rev 6629)
+++ trunk/pywikipedia/login.py 2009-04-19 08:47:15 UTC (rev 6630)
@@ -99,7 +99,7 @@
Checks whether the bot is listed on a specific page to comply with
the policy on the respective wiki.
"""
- if botList.has_key(self.site.family.name) and botList[self.site.family.name].has_key(self.site.language()):
+ if self.site.family.name in botList and self.site.language() in botList[self.site.family.name]:
botListPageTitle = botList[self.site.family.name][self.site.language()]
botListPage = wikipedia.Page(self.site, botListPageTitle)
for linkedPage in botListPage.linkedPages():
Modified: trunk/pywikipedia/rcsort.py
===================================================================
--- trunk/pywikipedia/rcsort.py 2009-04-18 17:11:21 UTC (rev 6629)
+++ trunk/pywikipedia/rcsort.py 2009-04-19 08:47:15 UTC (rev 6630)
@@ -29,7 +29,7 @@
print "-->"
mysite = wikipedia.getSite()
-newbies = form.has_key('newbies')
+newbies = 'newbies' in form
if newbies:
post = 'title=Speciaal:Bijdragen&target=newbies'
Modified: trunk/pywikipedia/replace.py
===================================================================
--- trunk/pywikipedia/replace.py 2009-04-18 17:11:21 UTC (rev 6629)
+++ trunk/pywikipedia/replace.py 2009-04-19 08:47:15 UTC (rev 6630)
@@ -193,9 +193,9 @@
self.skipping = bool(xmlStart)
self.excsInside = []
- if self.exceptions.has_key('inside-tags'):
+ if 'inside-tags' in self.exceptions:
self.excsInside += self.exceptions['inside-tags']
- if self.exceptions.has_key('inside'):
+ if 'inside' in self.exceptions:
self.excsInside += self.exceptions['inside']
import xmlreader
self.site = wikipedia.getSite()
@@ -226,11 +226,11 @@
pass
def isTitleExcepted(self, title):
- if self.exceptions.has_key('title'):
+ if 'title' in self.exceptions:
for exc in self.exceptions['title']:
if exc.search(title):
return True
- if self.exceptions.has_key('require-title'):
+ if 'require-title' in self.exceptions:
for req in self.exceptions['require-title']:
if not req.search(title): # if not all requirements are met:
return True
@@ -238,7 +238,7 @@
return False
def isTextExcepted(self, text):
- if self.exceptions.has_key('text-contains'):
+ if 'text-contains' in self.exceptions:
for exc in self.exceptions['text-contains']:
if exc.search(text):
return True
@@ -305,11 +305,11 @@
"""
Iff one of the exceptions applies for the given title, returns True.
"""
- if self.exceptions.has_key('title'):
+ if 'title' in self.exceptions:
for exc in self.exceptions['title']:
if exc.search(title):
return True
- if self.exceptions.has_key('require-title'):
+ if 'require-title' in self.exceptions:
for req in self.exceptions['require-title']:
if not req.search(title):
return True
@@ -320,7 +320,7 @@
Iff one of the exceptions applies for the given page contents,
returns True.
"""
- if self.exceptions.has_key('text-contains'):
+ if 'text-contains' in self.exceptions:
for exc in self.exceptions['text-contains']:
if exc.search(original_text):
return True
@@ -333,9 +333,9 @@
"""
new_text = original_text
exceptions = []
- if self.exceptions.has_key('inside-tags'):
+ if 'inside-tags' in self.exceptions:
exceptions += self.exceptions['inside-tags']
- if self.exceptions.has_key('inside'):
+ if 'inside' in self.exceptions:
exceptions += self.exceptions['inside']
for old, new in self.replacements:
if self.sleep != None:
@@ -620,13 +620,13 @@
wikipedia.output(u'Available predefined fixes are: %s'
% fixes.fixes.keys())
return
- if fix.has_key('regex'):
+ if 'regex' in fix:
regex = fix['regex']
- if fix.has_key('msg'):
+ if 'msg' in fix:
editSummary = wikipedia.translate(wikipedia.getSite(), fix['msg'])
- if fix.has_key('exceptions'):
+ if 'exceptions' in fix:
exceptions = fix['exceptions']
- if fix.has_key('nocase'):
+ if 'nocase' in fix:
caseInsensitive = fix['nocase']
replacements = fix['replacements']
@@ -648,7 +648,7 @@
replacements[i] = oldR, new
for exceptionCategory in ['title', 'require-title', 'text-contains', 'inside']:
- if exceptions.has_key(exceptionCategory):
+ if exceptionCategory in exceptions:
patterns = exceptions[exceptionCategory]
if not regex:
patterns = [re.escape(pattern) for pattern in patterns]
Modified: trunk/pywikipedia/solve_disambiguation.py
===================================================================
--- trunk/pywikipedia/solve_disambiguation.py 2009-04-18 17:11:21 UTC (rev 6629)
+++ trunk/pywikipedia/solve_disambiguation.py 2009-04-19 08:47:15 UTC (rev 6630)
@@ -411,7 +411,7 @@
refs = [page for page in self.disambPage.getReferences(follow_redirects = False, withTemplateInclusion = False)]
wikipedia.output(u"Found %d references." % len(refs))
# Remove ignorables
- if ignore_title.has_key(self.disambPage.site().family.name) and ignore_title[self.disambPage.site().family.name].has_key(self.disambPage.site().lang):
+ if self.disambPage.site().family.name in ignore_title and self.disambPage.site().lang in ignore_title[self.disambPage.site().family.name]:
for ig in ignore_title[self.disambPage.site().family.name][self.disambPage.site().lang]:
for i in range(len(refs)-1, -1, -1):
if re.match(ig, refs[i].title()):
@@ -542,7 +542,7 @@
def setupRegexes(self):
# compile regular expressions
self.ignore_contents_regexes = []
- if self.ignore_contents.has_key(self.mylang):
+ if self.mylang in self.ignore_contents:
for ig in self.ignore_contents[self.mylang]:
self.ignore_contents_regexes.append(re.compile(ig))
@@ -789,7 +789,7 @@
def findAlternatives(self, disambPage):
if disambPage.isRedirectPage() and not self.primary:
- if self.primary_redir_template.has_key(disambPage.site().lang) and self.primary_redir_template[disambPage.site().lang] in disambPage.templates(get_redirect = True):
+ if disambPage.site().lang in self.primary_redir_template and self.primary_redir_template[disambPage.site().lang] in disambPage.templates(get_redirect = True):
baseTerm = disambPage.title()
for template in disambPage.templatesWithParams(get_redirect = True):
if template[0] == self.primary_redir_template[disambPage.site().lang] and len(template[1]) > 0:
@@ -860,7 +860,7 @@
targets = wikipedia.translate(self.mysite, unknown_msg)
# first check whether user has customized the edit comment
- if wikipedia.config.disambiguation_comment.has_key(self.mysite.family.name) and wikipedia.config.disambiguation_comment[self.mysite.family.name].has_key(self.mylang):
+ if self.mysite.family.name in wikipedia.config.disambiguation_comment and self.mylang in wikipedia.config.disambiguation_comment[self.mysite.family.name]:
try:
self.comment = wikipedia.translate(self.mysite,
wikipedia.config.disambiguation_comment[
@@ -886,9 +886,9 @@
def run(self):
if self.main_only:
- if not ignore_title.has_key(self.mysite.family.name):
+ if self.mysite.family.name not in ignore_title:
ignore_title[self.mysite.family.name] = {}
- if not ignore_title[self.mysite.family.name].has_key(self.mylang):
+ if self.mylang not in ignore_title[self.mysite.family.name]:
ignore_title[self.mysite.family.name][self.mylang] = []
ignore_title[self.mysite.family.name][self.mylang] += [
u'%s:' % namespace for namespace in self.mysite.namespaces()]
Modified: trunk/pywikipedia/warnfile.py
===================================================================
--- trunk/pywikipedia/warnfile.py 2009-04-18 17:11:21 UTC (rev 6629)
+++ trunk/pywikipedia/warnfile.py 2009-04-19 08:47:15 UTC (rev 6630)
@@ -46,11 +46,11 @@
targetSite = mysite.getSite(code = m.group('targetlang'))
targetPage = wikipedia.Page(targetSite, m.group('targettitle'))
if removing:
- if not removeHints.has_key(page):
+ if page not in removeHints:
removeHints[page]=[]
removeHints[page].append(targetPage)
else:
- if not hints.has_key(page):
+ if page not in hints:
hints[page]=[]
hints[page].append(targetPage)
except wikipedia.Error:
@@ -80,11 +80,11 @@
continue
new={}
new.update(old)
- if hints.has_key(page):
+ if page in hints:
for page2 in hints[page]:
site = page2.site()
new[site] = page2
- if removeHints.has_key(page):
+ if page in removeHints:
for page2 in removeHints[page]:
site = page2.site()
try:
Modified: trunk/pywikipedia/watchlist.py
===================================================================
--- trunk/pywikipedia/watchlist.py 2009-04-18 17:11:21 UTC (rev 6629)
+++ trunk/pywikipedia/watchlist.py 2009-04-19 08:47:15 UTC (rev 6630)
@@ -30,7 +30,7 @@
def get(site = None):
if site is None:
site = wikipedia.getSite()
- if cache.has_key(site):
+ if site in cache:
# Use cached copy if it exists.
watchlist = cache[site]
else:
Modified: trunk/pywikipedia/wikipedia.py
===================================================================
--- trunk/pywikipedia/wikipedia.py 2009-04-18 17:11:21 UTC (rev 6629)
+++ trunk/pywikipedia/wikipedia.py 2009-04-19 08:47:15 UTC (rev 6630)
@@ -2460,7 +2460,7 @@
"""
if self._deletedRevs == None:
self.loadDeletedRevisions()
- if not self._deletedRevs.has_key(timestamp):
+ if timestamp not in self._deletedRevs:
#TODO: Throw an exception instead?
return None
@@ -2485,7 +2485,7 @@
"""
if self._deletedRevs == None:
self.loadDeletedRevisions()
- if not self._deletedRevs.has_key(timestamp):
+ if timestamp not in self._deletedRevs:
#TODO: Throw an exception?
return None
self._deletedRevs[timestamp][4] = undelete
@@ -3417,7 +3417,7 @@
if isinstance(exc, str) or isinstance(exc, unicode):
# assume it's a reference to the exceptionRegexes dictionary
# defined above.
- if not exceptionRegexes.has_key(exc):
+ if exc not in exceptionRegexes:
raise ValueError("Unknown tag type: " + exc)
dontTouchRegexes.append(exceptionRegexes[exc])
else:
@@ -4104,7 +4104,7 @@
unicodeCodepoint = int(match.group('hex'), 16)
elif match.group('name'):
name = match.group('name')
- if htmlentitydefs.name2codepoint.has_key(name):
+ if name in htmlentitydefs.name2codepoint:
# We found a known HTML entity.
unicodeCodepoint = htmlentitydefs.name2codepoint[name]
result += text[:match.start()]
@@ -4337,7 +4337,7 @@
self.family = fam
# if we got an outdated language code, use the new one instead.
- if self.family.obsolete.has_key(self.lang):
+ if self.lang in self.family.obsolete:
if self.family.obsolete[self.lang] is not None:
self.lang = self.family.obsolete[self.lang]
else:
@@ -6126,7 +6126,7 @@
def interwiki_putfirst_doubled(self, list_of_links):
# TODO: is this even needed? No family in the framework has this
# dictionary defined!
- if self.family.interwiki_putfirst_doubled.has_key(self.lang):
+ if self.lang in self.family.interwiki_putfirst_doubled:
if len(list_of_links) >= self.family.interwiki_putfirst_doubled[self.lang][0]:
list_of_links2 = []
for lang in list_of_links:
@@ -6177,7 +6177,7 @@
# may or may not actually exist on the wiki), use
# self.family.namespaces.keys()
- if _namespaceCache.has_key(self):
+ if self in _namespaceCache:
return _namespaceCache[self]
else:
nslist = []
@@ -6276,7 +6276,7 @@
if fam == None:
fam = default_family
key = '%s:%s:%s:%s' % (fam, code, user, persistent_http)
- if not _sites.has_key(key):
+ if key not in _sites:
_sites[key] = Site(code=code, fam=fam, user=user,
persistent_http=persistent_http)
ret = _sites[key]
@@ -6537,18 +6537,18 @@
if hasattr(code,'lang'):
code = code.lang
- if xdict.has_key('wikipedia') :
- if xdict.has_key(default_family):
+ if 'wikipedia' in xdict:
+ if default_family in xdict:
xdict = xdict[default_family]
else:
xdict = xdict['wikipedia']
- if xdict.has_key(code):
+ if code in xdict:
return xdict[code]
for alt in altlang(code):
- if xdict.has_key(alt):
+ if alt in xdict:
return xdict[alt]
- if xdict.has_key('en'):
+ if 'en' in xdict:
return xdict['en']
return xdict.values()[0]
Modified: trunk/pywikipedia/wikipediatools.py
===================================================================
--- trunk/pywikipedia/wikipediatools.py 2009-04-18 17:11:21 UTC (rev 6629)
+++ trunk/pywikipedia/wikipediatools.py 2009-04-19 08:47:15 UTC (rev 6630)
@@ -28,7 +28,7 @@
sys.argv.remove(arg)
break
else:
- if os.environ.has_key("PYWIKIBOT_DIR"):
+ if "PYWIKIBOT_DIR" in os.environ:
base_dir = os.environ["PYWIKIBOT_DIR"]
else:
if os.path.exists('user-config.py'):
Revision: 6629
Author: shizhao
Date: 2009-04-18 17:11:21 +0000 (Sat, 18 Apr 2009)
Log Message:
-----------
update: "-count" only counts
Modified Paths:
--------------
trunk/pywikipedia/featured.py
Modified: trunk/pywikipedia/featured.py
===================================================================
--- trunk/pywikipedia/featured.py 2009-04-18 16:57:21 UTC (rev 6628)
+++ trunk/pywikipedia/featured.py 2009-04-18 17:11:21 UTC (rev 6629)
@@ -17,7 +17,7 @@
* -top : using -top if you want moving {{Link FA|lang}} to top of interwiki.
DEFAULT: placing {{Link FA|lang}} right next to corresponding interwiki.
-* -count : counts how many featured articles of an languages (using "-fromlang" argument)
+* -count : Only counts how many featured articles of an languages (using "-fromlang" argument)
or all wikipedias (using "-fromall" argument). (merge /archive/featuredcount.py) like:
featured.py -fromlang:en,he -count
(give counts how many featured articles of en and he wp)
Revision: 6624
Author: shizhao
Date: 2009-04-18 15:47:05 +0000 (Sat, 18 Apr 2009)
Log Message:
-----------
fix time
Modified Paths:
--------------
trunk/pywikipedia/weblinkchecker.py
Modified: trunk/pywikipedia/weblinkchecker.py
===================================================================
--- trunk/pywikipedia/weblinkchecker.py 2009-04-18 15:41:40 UTC (rev 6623)
+++ trunk/pywikipedia/weblinkchecker.py 2009-04-18 15:47:05 UTC (rev 6624)
@@ -607,7 +607,7 @@
# if the first time we found this link longer than a week ago,
# it should probably be fixed or removed. We'll list it in a file
# so that it can be removed manually.
- if timeSinceFirstFound > 60 * 60 * 2:
+ if timeSinceFirstFound > 60 * 60 * 24 * 7:
# search for archived page
iac = InternetArchiveConsulter(url)
archiveURL = iac.getArchiveURL()