[Pywikipedia-svn] SVN: [7336] trunk/pywikipedia

alexsh at svn.wikimedia.org alexsh at svn.wikimedia.org
Tue Sep 29 18:27:04 UTC 2009


Revision: 7336
Author:   alexsh
Date:     2009-09-29 18:27:04 +0000 (Tue, 29 Sep 2009)

Log Message:
-----------
remove has_key for deprecated

Modified Paths:
--------------
    trunk/pywikipedia/archive/mediawiki_messages.py
    trunk/pywikipedia/archive/translator.py
    trunk/pywikipedia/archivebot.py
    trunk/pywikipedia/category_redirect.py
    trunk/pywikipedia/catlib.py
    trunk/pywikipedia/censure.py
    trunk/pywikipedia/commonscat.py
    trunk/pywikipedia/cosmetic_changes.py
    trunk/pywikipedia/imagecopy.py
    trunk/pywikipedia/imagetransfer.py
    trunk/pywikipedia/maintcont.py
    trunk/pywikipedia/maintenance/family_check.py
    trunk/pywikipedia/makecat.py
    trunk/pywikipedia/misspelling.py
    trunk/pywikipedia/nowcommons.py
    trunk/pywikipedia/redirect.py
    trunk/pywikipedia/reflinks.py
    trunk/pywikipedia/revertbot.py
    trunk/pywikipedia/speedy_delete.py
    trunk/pywikipedia/splitwarning.py
    trunk/pywikipedia/standardize_notes.py
    trunk/pywikipedia/titletranslate.py
    trunk/pywikipedia/weblinkchecker.py
    trunk/pywikipedia/wikipedia.py
    trunk/pywikipedia/wiktionary/header.py
    trunk/pywikipedia/wiktionary/meaning.py
    trunk/pywikipedia/wiktionary/wiktionarypage.py
    trunk/pywikipedia/wiktionary/wiktionarypagetest.py

Modified: trunk/pywikipedia/archive/mediawiki_messages.py
===================================================================
--- trunk/pywikipedia/archive/mediawiki_messages.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/archive/mediawiki_messages.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -64,7 +64,7 @@
 
 def get(key, site = None, allowreload = True):
     site = site or wikipedia.getSite()
-    if loaded.has_key(site):
+    if site in loaded:
         # Use cached copy if it exists.
         dictionary = loaded[site]
     else:
@@ -84,7 +84,7 @@
         f.close()
         loaded[site] = dictionary
     key = key[0].lower() + key[1:]
-    if dictionary.has_key(key):
+    if key in dictionary:
         return dictionary[key]
     elif allowreload:
         refresh_messages(site = site)

Modified: trunk/pywikipedia/archive/translator.py
===================================================================
--- trunk/pywikipedia/archive/translator.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/archive/translator.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -425,33 +425,33 @@
     else:
         print_debug("\n Translating type " + type)
         # check if the translation database knows this type of table
-        if not types.has_key(type):
+        if not type in types:
             print "Unknown table type: " + type
             return
-        if types.get(type).has_key("translations"):
+        if "translations" in types.get(type):
             print_debug("\nDirect translations for type " + type + "\n")
             for item in types.get(type).get("translations"):
                 # check if the translation database includes the source language
-                if not item.has_key(from_lang):
+                if not from_lang in item:
                     print_debug(from_lang + " translation for item not found in translation table, skipping item")
                     continue
                 # if it's necessary to replace a substring
                 if string.find(text, item.get(from_lang)) > -1:
                      # check if the translation database includes the target language
-                     if not item.has_key(to_lang):
+                     if not to_lang in item:
                          print_debug("Can't translate \"" + item.get(from_lang) + "\". Please make sure that there is a translation in copy_table.py.")
                      else:
                          print_debug(item.get(from_lang) + " => " + item.get(to_lang))
                          # translate a substring
                          text = string.replace(text, item.get(from_lang), item.get(to_lang))
-        if types.get(type).has_key("regexes"):
+        if 'regexes' in types.get(type):
             # work on regular expressions
             print_debug("\nWorking on regular expressions for type " + type + "\n")
             regexes = types.get(type).get("regexes")
-            if regexes.has_key(from_lang):
+            if from_lang in regexes:
                 for item in regexes.get(from_lang):
                     # only work on regular expressions that have a replacement for the target language
-                    if regexes.get(from_lang).get(item).has_key(to_lang):
+                    if to_lang in regexes.get(from_lang).get(item):
                         replacement = regexes.get(from_lang).get(item).get(to_lang)
                         regex = re.compile(item)
                         # if the regular expression doesn't match anyway, we don't want it to print a debug message
@@ -459,7 +459,7 @@
                             print_debug(item + " => " + replacement)
                             text = re.sub(regex, replacement, text)
         # recursively use translation lists which are included in the current list
-        if types.get(type).has_key("includes"):
+        if "includes" in types.get(type):
             for inc in types.get(type).get("includes"):
                 text = translate(text, inc, from_lang, debug_mode, to_lang)
         return text

Modified: trunk/pywikipedia/archivebot.py
===================================================================
--- trunk/pywikipedia/archivebot.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/archivebot.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -125,7 +125,7 @@
 }
 
 def message(key, lang=Site.language()):
-    if not messages.has_key(lang):
+    if not lang in messages:
         lang = '_default'
     return messages[lang][key]
 
@@ -222,7 +222,7 @@
     for page_d in result['query']['embeddedin']:
         yield wikipedia.Page(Site, page_d['title'])
     
-    if result.has_key('query-continue'):
+    if 'query-continue' in result:
         eicontinue = result['query-continue']['embeddedin']['eicontinue']
         for page in generateTransclusions(Site, template, namespaces, eicontinue):
             yield page
@@ -458,7 +458,7 @@
             return
         if not self.force and not self.Page.title+'/' == archive[:len(self.Page.title)+1] and not self.key_ok():
             raise ArchiveSecurityError
-        if not self.archives.has_key(archive):
+        if not archive in self.archives:
             self.archives[archive] = DiscussionPage(archive,self,vars)
         return self.archives[archive].feedThread(thread,maxArchiveSize)
 

Modified: trunk/pywikipedia/category_redirect.py
===================================================================
--- trunk/pywikipedia/category_redirect.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/category_redirect.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -303,8 +303,7 @@
         querydata = {'action': 'query',
                      'maxlag': str(wikipedia.config.maxlag)}
         querydata = query.CombineParams(querydata, data)
-        if not querydata.has_key("action")\
-                or not querydata['action'] == 'query':
+        if not "action" in querydata or not querydata['action'] == 'query':
             raise ValueError(
                 "query_results: 'action' set to value other than 'query'"
                 )
@@ -330,7 +329,7 @@
                 wikipedia.output(u"Invalid API response received; retrying...")
                 time.sleep(5)
                 continue
-            if type(result) is dict and result.has_key("error"):
+            if type(result) is dict and "error" in result:
                 if result['error']['code'] == "maxlag":
                     print "Pausing due to server lag.\r",
                     time.sleep(5)
@@ -353,7 +352,7 @@
                 # query returned no results
                 return
             yield result['query']
-            if result.has_key("query-continue"):
+            if 'query-continue' in result:
                 assert len(result['query-continue'].keys()) == 1, \
                        "More than one query-continue key returned: %s" \
                        % result['query-continue'].keys()

Modified: trunk/pywikipedia/catlib.py
===================================================================
--- trunk/pywikipedia/catlib.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/catlib.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -228,7 +228,7 @@
                 else:
                     yield ARTICLE, wikipedia.Page(self.site(), memb['title'])
             # try to find a link to the next list page
-            if data.has_key('query-continue'):
+            if 'query-continue' in data:
                 currentPageOffset = data['query-continue']['categorymembers']['cmcontinue']
             else:
                 break

Modified: trunk/pywikipedia/censure.py
===================================================================
--- trunk/pywikipedia/censure.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/censure.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -32,7 +32,7 @@
     }
 
 site = wikipedia.getSite()
-if not badWordList.has_key(site.language() + '.' + site.family.name) or not logPages.has_key(site.language() + '.' + site.family.name):
+if not (site.language() + '.' + site.family.name) in badWordList or not (site.language() + '.' + site.family.name) in logPages:
     wikipedia.output('Error: your language isn\'t supported, see the source code for further details')
     sys.exit(1)
 ownWordPage = wikipedia.Page(site, badWordList[site.language() + '.' + site.family.name])

Modified: trunk/pywikipedia/commonscat.py
===================================================================
--- trunk/pywikipedia/commonscat.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/commonscat.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -177,7 +177,7 @@
     '''
     Get the template name in a language. Expects the language code, returns the translation.
     '''
-    if commonscatTemplates.has_key(lang):
+    if lang in commonscatTemplates:
         return commonscatTemplates[lang]
     else:
         return u'Commonscat'
@@ -186,7 +186,7 @@
     '''
     Do we want to skip this page?
     '''
-    if ignoreTemplates.has_key(page.site().language()):
+    if page.site().language() in ignoreTemplates:
         templatesInThePage = page.templates()
         templatesWithParams = page.templatesWithParams()
         for template in ignoreTemplates[page.site().language()]:

Modified: trunk/pywikipedia/cosmetic_changes.py
===================================================================
--- trunk/pywikipedia/cosmetic_changes.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/cosmetic_changes.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -417,7 +417,7 @@
         return text
 
     def removeDeprecatedTemplates(self, text):
-        if deprecatedTemplates.has_key(self.site.family.name) and deprecatedTemplates[self.site.family.name].has_key(self.site.lang):
+        if self.site.family.name in deprecatedTemplates and self.site.lang in deprecatedTemplates[self.site.family.name]:
             for template in deprecatedTemplates[self.site.family.name][self.site.lang]:
                 if not self.site.nocapitalize:
                     template = '[' + template[0].upper() + template[0].lower() + ']' + template[1:]

Modified: trunk/pywikipedia/imagecopy.py
===================================================================
--- trunk/pywikipedia/imagecopy.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/imagecopy.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -285,17 +285,17 @@
             imtxt=self.imagePage.get(force=True)
 
             #Remove the move to commons templates
-            if moveToCommonsTemplate.has_key(self.imagePage.site().language()):
+            if self.imagePage.site().language() in moveToCommonsTemplate:
                 for moveTemplate in moveToCommonsTemplate[self.imagePage.site().language()]:
                     imtxt = re.sub(u'(?i)\{\{' + moveTemplate + u'\}\}', u'', imtxt)
 
             #add {{NowCommons}}
-            if nowCommonsTemplate.has_key(self.imagePage.site().language()):
+            if self.imagePage.site().language() in nowCommonsTemplate:
                 addTemplate = nowCommonsTemplate[self.imagePage.site().language()] % self.newname
             else:
                 addTemplate = nowCommonsTemplate['_default'] % self.newname
 
-            if nowCommonsMessage.has_key(self.imagePage.site().language()):
+            if self.imagePage.site().language() in nowCommonsMessage:
                 commentText = nowCommonsMessage[self.imagePage.site().language()]
             else:
                 commentText = nowCommonsMessage['_default']
@@ -308,7 +308,7 @@
 
             #If the image is uploaded under a different name, replace all instances
             if self.imagePage.titleWithoutNamespace() != self.newname:
-                if imageMoveMessage.has_key(self.imagePage.site().language()):
+                if self.imagePage.site().language() in imageMoveMessage:
                     moveSummary = imageMoveMessage[self.imagePage.site().language()] % (self.imagePage.titleWithoutNamespace(), self.newname)
                 else:
                     moveSummary = imageMoveMessage['_default'] % (self.imagePage.titleWithoutNamespace(), self.newname)

Modified: trunk/pywikipedia/imagetransfer.py
===================================================================
--- trunk/pywikipedia/imagetransfer.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/imagetransfer.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -156,7 +156,7 @@
         try:
             description = sourceImagePage.get()
             # try to translate license templates
-            if licenseTemplates.has_key((sourceSite.sitename(), self.targetSite.sitename())):
+            if (sourceSite.sitename(), self.targetSite.sitename()) in licenseTemplates:
                 for old, new in licenseTemplates[(sourceSite.sitename(), self.targetSite.sitename())].iteritems():
                     new = '{{%s}}' % new
                     old = re.compile('{{%s}}' % old)
@@ -181,10 +181,10 @@
                 # upload to Commons was successful
                 reason = wikipedia.translate(sourceSite, nowCommonsMessage)
                 # try to delete the original image if we have a sysop account
-                if config.sysopnames.has_key(sourceSite.family.name) and config.sysopnames[sourceSite.family.name].has_key(sourceSite.lang):
+                if sourceSite.family.name in config.sysopnames and sourceSite.lang in config.sysopnames[sourceSite.family.name]:
                     if sourceImagePage.delete(reason):
                         return
-                if nowCommonsTemplate.has_key(sourceSite.lang) and config.usernames.has_key(sourceSite.family.name) and config.usernames[sourceSite.family.name].has_key(sourceSite.lang):
+                if sourceSite.lang in nowCommonsTemplate and sourceSite.family.name in config.usernames and sourceSite.lang in config.usernames[sourceSite.family.name]:
                     # add the nowCommons template.
                     wikipedia.output(u'Adding nowCommons template to %s' % sourceImagePage.title())
                     sourceImagePage.put(sourceImagePage.get() + '\n\n' + nowCommonsTemplate[sourceSite.lang] % targetFilename, comment = nowCommonsMessage[sourceSite.lang])

Modified: trunk/pywikipedia/maintcont.py
===================================================================
--- trunk/pywikipedia/maintcont.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/maintcont.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -79,7 +79,7 @@
                 ingroup += 1
         if ingroup == 0:
             return
-        if projtasks.has_key(group):
+        if group in projtasks:
             grt = projtasks[group]
         else:
             grt = tasks

Modified: trunk/pywikipedia/maintenance/family_check.py
===================================================================
--- trunk/pywikipedia/maintenance/family_check.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/maintenance/family_check.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -39,7 +39,7 @@
     wikipedia.output(u'Checking namespaces for %s' % family.name)
     result = {}
     for lang in family.langs:
-        if not family.obsolete.has_key(lang):
+        if not lang in family.obsolete:
             site = wikipedia.getSite(lang, family)
             wikipedia.output(u'Checking %s' % site)
             namespaces = check_namespaces(site)

Modified: trunk/pywikipedia/makecat.py
===================================================================
--- trunk/pywikipedia/makecat.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/makecat.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -71,7 +71,7 @@
     if main:
         if pl.namespace() != 0:
             return False
-    if checked.has_key(pl):
+    if pl in checked:
         return False
     if skipdates:
         if isdate(pl.title()):

Modified: trunk/pywikipedia/misspelling.py
===================================================================
--- trunk/pywikipedia/misspelling.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/misspelling.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -67,7 +67,7 @@
         solve_disambiguation.DisambiguationRobot.__init__(self, always, [], True, self.createPageGenerator(firstPageTitle), False, main_only)
 
     def createPageGenerator(self, firstPageTitle):
-        if self.misspellingCategory.has_key(wikipedia.getSite().lang):
+        if wikipedia.getSite().lang in self.misspellingCategory:
             misspellingCategoryTitle = self.misspellingCategory[wikipedia.getSite().lang]
             misspellingCategory = catlib.Category(wikipedia.getSite(), misspellingCategoryTitle)
             generator = pagegenerators.CategorizedPageGenerator(misspellingCategory, recurse = True, start = firstPageTitle)

Modified: trunk/pywikipedia/nowcommons.py
===================================================================
--- trunk/pywikipedia/nowcommons.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/nowcommons.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -189,7 +189,7 @@
         self.nowCommonsTemplate = wikipedia.Page(self.site, 'Template:' + ncList[0])
 
     def ncTemplates(self):
-        if nowCommons.has_key(self.site.lang):
+        if self.site.lang in nowCommons:
             return nowCommons[self.site.lang]
         else:
             return nowCommons['_default']

Modified: trunk/pywikipedia/redirect.py
===================================================================
--- trunk/pywikipedia/redirect.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/redirect.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -491,7 +491,7 @@
                 num += 1
                 # check if the value - that is, the redirect target - is a
                 # redirect as well
-                if num > self.offset and redict.has_key(value):
+                if num > self.offset and value in redict:
                     yield key
                     wikipedia.output(u'\nChecking redirect %i of %i...'
                                      % (num + 1, len(redict)))
@@ -602,7 +602,7 @@
                         try:
                             redir_page.delete(reason, prompt = False)
                         except wikipedia.NoUsername:
-                            if sd_template.has_key(targetPage.site().lang) and sd_tagging_sum.has_key(targetPage.site().lang):
+                            if targetPage.site().lang in sd_template and targetPage.site().lang in sd_tagging_sum:
                                 wikipedia.output("No sysop in user-config.py, put page to speedy deletion.")
                                 content = redir_page.get(get_redirect=True)
                                 content = wikipedia.translate(targetPage.site().lang,sd_template)+"\n"+content
@@ -697,8 +697,8 @@
                                           targetPage.site(),
                                           targetPage.sectionFreeTitle()
                                       ).get(get_redirect=True)
-                        if sd_template.has_key(targetPage.site().lang) \
-                                and sd_tagging_sum.has_key(targetPage.site().lang):
+                        if targetPage.site().lang in sd_template \
+                                and targetPage.site().lang in sd_tagging_sum:
                             wikipedia.output(u"Tagging redirect for deletion")
                             # Delete the two redirects
                             content = wikipedia.translate(targetPage.site().lang,

Modified: trunk/pywikipedia/reflinks.py
===================================================================
--- trunk/pywikipedia/reflinks.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/reflinks.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -264,11 +264,11 @@
 
             params = match.group('params')
             group = self.GROUPS.match(params)
-            if not foundRefs.has_key(group):
+            if not group in foundRefs:
                 foundRefs[group] = {}
 
             groupdict = foundRefs[group]
-            if groupdict.has_key(content):
+            if content in groupdict:
                 v = groupdict[content]
                 v[1].append(match.group())
             else:
@@ -285,7 +285,7 @@
 
                     if name == 'population':
                         wikipedia.output(content)
-                    if not foundRefNames.has_key(name):
+                    if not name in foundRefNames:
                         # first time ever we meet this name
                         if name == 'population':
                             print "in"
@@ -299,7 +299,7 @@
             groupdict[content] = v
 
         id = 1
-        while foundRefNames.has_key(self.autogen + str(id)):
+        while self.autogen + str(id) in foundRefNames:
             id += 1
         for (g, d) in foundRefs.iteritems():
             if g:

Modified: trunk/pywikipedia/revertbot.py
===================================================================
--- trunk/pywikipedia/revertbot.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/revertbot.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -36,9 +36,9 @@
             except StopIteration:
                 self.log(u'Fetching new batch of contributions')
                 response, data = query.GetData(predata, self.site, back_response = True)
-                if data.has_key('error'):
+                if 'error' in data:
                     raise RuntimeError(data['error'])
-                if data.has_key('query-continue'):
+                if 'query-continue' in data:
                     predata['uccontinue'] = data['query-continue']['usercontribs']
                 else:
                     never_continue = True
@@ -80,7 +80,7 @@
         }
         response, data = query.GetData(predata, self.site, back_response = True)
 
-        if data.has_key('error'):
+        if 'error' in data:
             raise RuntimeError(data['error'])
 
         pages = data['query'].get('pages', ())

Modified: trunk/pywikipedia/speedy_delete.py
===================================================================
--- trunk/pywikipedia/speedy_delete.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/speedy_delete.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -463,7 +463,7 @@
             reasons = wikipedia.translate(self.mySite, self.deletion_messages)
 
             for templateName in templateNames:
-                if reasons.has_key(templateName):
+                if templateName in reasons:
                     if type(reasons[templateName]) is not unicode:
                         #Make alias to delete_reasons
                         reason = wikipedia.translate(self.mySite, self.delete_reasons)[reasons[templateName]]
@@ -481,15 +481,15 @@
 
         # We don't use wikipedia.translate() here because for some languages the
         # entry is intentionally left out.
-        if self.delete_reasons.has_key(self.mySite.family.name):
-            if self.delete_reasons[self.mySite.family.name].has_key(page.site().lang):
+        if self.mySite.family.name in self.delete_reasons:
+            if page.site().lang in self.delete_reasons[self.mySite.family.name]:
                 localReasons = wikipedia.translate(page.site().lang, self.delete_reasons)
                 wikipedia.output(u'')
                 for key, reason in     localReasons.iteritems():
                     wikipedia.output((key + ':').ljust(8) + reason)
                 wikipedia.output(u'')
                 reason = wikipedia.input(u'Please enter the reason for deletion, choose a default reason, or press enter for the suggested message:')
-                if localReasons.has_key(reason.strip()):
+                if reason.strip() in localReasons:
                     reason = localReasons[reason]
             else:
                 reason = wikipedia.input(u'Please enter the reason for deletion, or press enter for the suggested message:')

Modified: trunk/pywikipedia/splitwarning.py
===================================================================
--- trunk/pywikipedia/splitwarning.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/splitwarning.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -26,7 +26,7 @@
         family = m.group('family')
         code = m.group('code')
         if code in wikipedia.getSite().languages():
-            if not files.has_key(code):
+            if not code in files:
                 files[code] = codecs.open(
                                   wikipedia.config.datafilepath('logs',
                                          'warning-%s-%s.log' % (family, code)),

Modified: trunk/pywikipedia/standardize_notes.py
===================================================================
--- trunk/pywikipedia/standardize_notes.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/standardize_notes.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -542,7 +542,7 @@
                     if m.group('reftype').lower() in ( 'ref', 'ref_num', 'ref_label' ):    # confirm ref
                         refkey = m.group('refname').strip()
                         if refkey != '':
-                            if refusage.has_key( refkey ):
+                            if refkey in refusage:
                                 # wikipedia.output( u'refusage[%s] = %s' % (refkey,refusage[refkey]) )
                                 if refusage[refkey][2] == 0:    # if first use of reference
                                     text_line=text_line[:m.start(0)] + '{{ref|%s}}' % (refkey) + text_line[m.end(0):]
@@ -783,7 +783,7 @@
                 if m.group('reftype').lower() in ( 'ref', 'ref_num', 'ref_label' ):    # confirm ref
                     refkey = m.group('refname').strip()
                     if refkey != '':
-                        if refusage.has_key(refkey):
+                        if refkey in refusage:
                             refusage[refkey][1] += 1    # duplicate use of reference
                             duplicatefound = True
                         else:
@@ -1100,11 +1100,11 @@
         except KeyError:
             wikipedia.output(u'Available predefined fixes are: %s' % fixes.keys())
             return
-        if fix.has_key('regex'):
+        if 'regex' in fix:
             regex = fix['regex']
-        if fix.has_key('msg'):
+        if 'msg' in fix:
             editSummary = wikipedia.translate(wikipedia.getSite(), fix['msg'])
-        if fix.has_key('exceptions'):
+        if 'exceptions' in fix:
             exceptions = fix['exceptions']
         replacements = fix['replacements']
 

Modified: trunk/pywikipedia/titletranslate.py
===================================================================
--- trunk/pywikipedia/titletranslate.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/titletranslate.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -65,13 +65,13 @@
         # search inside all dictionaries for this link
         dictName, value = date.getAutoFormat( page.site().language(), page.title() )
         if dictName:
-            if not (dictName == 'yearsBC' and date.maxyearBC.has_key(page.site().language()) and value > date.maxyearBC[page.site().language()]) or (dictName == 'yearsAD' and date.maxyearAD.has_key(page.site().language()) and value > date.maxyearAD[page.site().language()]):
+            if not (dictName == 'yearsBC' and page.site().language() in date.maxyearBC and value > date.maxyearBC[page.site().language()]) or (dictName == 'yearsAD' and page.site().language() in date.maxyearAD and value > date.maxyearAD[page.site().language()]):
                 wikipedia.output(u'TitleTranslate: %s was recognized as %s with value %d' % (page.title(),dictName,value))
                 for entryLang, entry in date.formats[dictName].iteritems():
                     if entryLang != page.site().language():
-                        if dictName == 'yearsBC' and date.maxyearBC.has_key(entryLang) and value > date.maxyearBC[entryLang]:
+                        if dictName == 'yearsBC' and entryLang in date.maxyearBC and value > date.maxyearBC[entryLang]:
                             pass
-                        elif dictName == 'yearsAD' and date.maxyearAD.has_key(entryLang) and value > date.maxyearAD[entryLang]:
+                        elif dictName == 'yearsAD' and entryLang in date.maxyearAD and value > date.maxyearAD[entryLang]:
                             pass
             else:
                             newname = entry(value)

Modified: trunk/pywikipedia/weblinkchecker.py
===================================================================
--- trunk/pywikipedia/weblinkchecker.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/weblinkchecker.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -599,7 +599,7 @@
         """
         self.semaphore.acquire()
         now = time.time()
-        if self.historyDict.has_key(url):
+        if url in self.historyDict:
             timeSinceFirstFound = now - self.historyDict[url][0][1]
             timeSinceLastFound= now - self.historyDict[url][-1][1]
             # if the last time we found this dead link is less than an hour
@@ -623,7 +623,7 @@
         If the link was previously found dead, removes it from the .dat file
         and returns True, else returns False.
         """
-        if self.historyDict.has_key(url):
+        if url in self.historyDict:
             self.semaphore.acquire()
             try:
                 del self.historyDict[url]

Modified: trunk/pywikipedia/wikipedia.py
===================================================================
--- trunk/pywikipedia/wikipedia.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/wikipedia.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -920,11 +920,11 @@
             'titles'    :self.title(),
             }
         data = query.GetData(params, self.site(), encodeTitle = False)['query']['pages'].values()[0]
-        if data.has_key('redirect'):
+        if 'redirect' in data:
             raise IsRedirectPage
-        elif data.has_key('missing'):
+        elif 'missing' in data:
             raise NoPage
-        elif data.has_key('lastrevid'):
+        elif 'lastrevid' in data:
             return data['lastrevid'] # if ok, return the last revid
         else:
             # should not exists, OR we have problems.
@@ -960,7 +960,7 @@
                                     for tmp in data['query']['pages'][pageid].values()[0] ])
             except TypeError:
                 pass
-            if data.has_key('query-continue'):
+            if 'query-continue' in data:
                 params["tlcontinue"] = data["query-continue"]["templates"]["tlcontinue"]
             else:
                 break
@@ -1202,7 +1202,7 @@
                                  convertEntities=BeautifulSoup.HTML_ENTITIES,
                                  parseOnlyThese=content)
             next_text = body.find(text=nextpattern)
-            if next_text is not None and next_text.parent.has_key('href'):
+            if next_text is not None and 'href' in next_text:
                 path = next_text.parent['href'].replace("&", "&")
             else:
                 path = ""
@@ -1336,10 +1336,10 @@
         text = query.GetData(predata, self.site())['query']['pages']
 
         for pageid in text:
-            if text[pageid].has_key('missing'):
+            if 'missing' in text[pageid]:
                 self._getexception = NoPage
                 raise NoPage('Page %s does not exist' % self.aslink())
-            elif not text[pageid].has_key('pageid'):
+            elif not 'pageid' in text[pageid]:
                 # Don't know what may happen here.
                 # We may want to have better error handling
                 raise Error("BUG> API problem.")
@@ -1579,7 +1579,7 @@
                 if retry_delay > 30:
                     retry_delay = 30
                 continue
-            if data.has_key('error'):
+            if 'error' in data:
                 #All available error key in edit mode: (from ApiBase.php)
                 # 'noimageredirect-anon':"Anonymous users can't create image redirects",
                 # 'noimageredirect':"You don't have permission to create image redirects",
@@ -1599,7 +1599,7 @@
                     output("error occured, code:%s\ninfo:%s\nstatus:%s\nresponse:%s" % (
                         data['error']['code'], data['error']['info'], response.status, response.reason))
                     faked = params
-                    if faked.has_key('text'):
+                    if 'text' in faked:
                         del faked['text']
                     output("OriginalData:%s" % faked)
                     del faked
@@ -2627,7 +2627,7 @@
                     'reason': reason,
                 }
                 datas = query.GetData(params, self.site(), sysop = True)
-                if datas.has_key('delete'):
+                if 'delete' in datas:
                     output(u'Page %s deleted' % self.aslink(forceInterwiki = True))
                     return True
                 else:
@@ -4772,7 +4772,7 @@
                 data = query.GetData(params, self)['userinfo']
             else:
                 data = query.GetData(params, self)['query']['userinfo']
-            return data.has_key('blockby')
+            return 'blockedby' in data
         except NotImplementedError:
             return False
 
@@ -4870,9 +4870,9 @@
 
     def solveCaptcha(self, data):
         if type(data) == dict: # API Mode result
-            if data.has_key('edit') and  data['edit']['result'] != u"Success":
+            if 'edit' in data and  data['edit']['result'] != u"Success":
                 data = data['edit']
-            if data.has_key("captcha"):
+            if "captcha" in data:
                 data = data['captcha']
                 captype = data['type']
                 id = data['id']
@@ -5151,17 +5151,17 @@
 
         if type(text) == dict: #text is dict, query from API
             # Check for blocks
-            if text.has_key('blockedby') and not self._isBlocked[index]:
+            if 'blockedby' in text and not self._isBlocked[index]:
                 # Write a warning if not shown earlier
                 if sysop:
                     account = 'Your sysop account'
                 else:
                     account = 'Your account'
                 output(u'WARNING: %s on %s is blocked. Editing using this account will stop the run.' % (account, self))
-            self._isBlocked[index] = text.has_key('blockedby')
+            self._isBlocked[index] = 'blockedby' in text
 
             # Check for new messages, the data must had key 'messages' in dict.
-            if text.has_key('messages'):
+            if 'messages' in text:
                 if not self._messages[index]:
                     # User has *new* messages
                     if sysop:
@@ -5179,7 +5179,7 @@
             # Get username.
             # The data in anonymous mode had key 'anon'
             # if 'anon' exist, username is IP address, not to collect it right now
-            if not text.has_key('anon'):
+            if not 'anon' in text:
                 self._isLoggedIn[index] = True
                 self._userName[index] = text['name']
             else:
@@ -5187,7 +5187,7 @@
                 self._userName[index] = None
 
             # Get user groups and rights
-            if text.has_key('groups'):
+            if 'groups' in text:
                 self._rights[index] = text['groups']
                 self._rights[index].extend(text['rights'])
                 # Warnings
@@ -5219,7 +5219,7 @@
             self._rights[index] = list(set(self._rights[index]))
 
             # Get token
-            if text.has_key('preferencestoken'):
+            if 'preferencestoken' in text:
                 self._token[index] = text['preferencestoken']
                 if self._rights[index] is not None:
                     # Token and rights are loaded - user data is now loaded
@@ -5233,7 +5233,7 @@
                     'intoken': 'edit',
                 }
                 data = query.GetData(params, self, sysop=sysop)['query']['pages'].values()[0]
-                if data.has_key('edittoken'):
+                if 'edittoken' in data:
                     self._token[index] = data['edittoken']
                     self._userData[index] = True
                 else:
@@ -5410,7 +5410,7 @@
                     try:
                         datas = query.GetData(params, self)['query']['allmessages']
                         self._mediawiki_messages = _dict([(tag['name'].lower(), tag['*'])
-                                for tag in datas if not tag.has_key('missing')])
+                                for tag in datas if not 'missing' in tag])
                     except NotImplementedError:
                         api = False
                         continue
@@ -5473,7 +5473,7 @@
 
     def has_mediawiki_message(self, key):
         """Return True iff this site defines a MediaWiki message for 'key'."""
-        #return self._mediawiki_messages.has_key(key)
+        #return key in self._mediawiki_messages
         try:
             v = self.mediawiki_message(key)
             return True
@@ -5839,7 +5839,7 @@
 
             for imageData in imagesData:
                 comment = ''
-                if imageData.has_key('comment'):
+                if 'comment' in imageData:
                     comment = imageData['comment']
                 pageid = imageData['pageid']
                 title = imageData['title']
@@ -6115,7 +6115,7 @@
                 #count += 1
                 yield Page(self, p['title'])
 
-            if data.has_key('query-continue'):
+            if 'query-continue' in data:
                 params['apfrom'] = data['query-continue']['allpages']['apfrom']
             else:
                 break
@@ -6314,7 +6314,7 @@
                             else:
                                 cache.append(pages['title'])
                                 yield Page(self, pages['title'])
-                    if data.has_key(u'query-continue'):
+                    if 'query-continue' in data:
                             params['euoffset'] = data[u'query-continue'][u'exturlusage'][u'euoffset']
                     else:
                             break

Modified: trunk/pywikipedia/wiktionary/header.py
===================================================================
--- trunk/pywikipedia/wiktionary/header.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/wiktionary/header.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -49,16 +49,16 @@
         self.header = self.header.replace('{{','').replace('}}','').strip().lower()
 
         # Now we know the content of the header, let's try to find out what it means:
-        if pos.has_key(self.header):
+        if self.header in pos:
             self.type=u'pos'
             self.contents=pos[self.header]
-        if langnames.has_key(self.header):
+        if self.header in langnames:
             self.type=u'lang'
             self.contents=self.header
-        if invertedlangnames.has_key(self.header):
+        if self.header in invertedlangnames:
             self.type=u'lang'
             self.contents=invertedlangnames[self.header]
-        if otherheaders.has_key(self.header):
+        if self.header in otherheaders:
             self.type=u'other'
             self.contents=otherheaders[self.header]
 

Modified: trunk/pywikipedia/wiktionary/meaning.py
===================================================================
--- trunk/pywikipedia/wiktionary/meaning.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/wiktionary/meaning.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -104,9 +104,9 @@
             trans = translationswikiline[colon+1:]
             # Look up lang and convert to an ISO abbreviation
             isolang=''
-            if structs.langnames.has_key(lang):
+            if lang in structs.langnames:
                 isolang=lang
-            elif structs.invertedlangnames.has_key(lang):
+            elif lang in structs.invertedlangnames:
                 isolang=structs.invertedlangnames[lang]
 
             # We need to prepare the line a bit to make it more easily parseable

Modified: trunk/pywikipedia/wiktionary/wiktionarypage.py
===================================================================
--- trunk/pywikipedia/wiktionary/wiktionarypage.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/wiktionary/wiktionarypage.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -149,7 +149,7 @@
                 if aheader.type==u'lang':
                     context['lang']=aheader.contents
                 if aheader.type==u'pos':
-                    if not(context.has_key('lang')):
+                    if not 'lang' in context:
                         # This entry lacks a language indicator,
                         # so we assume it is the same language as the Wiktionary we're working on
                         context['lang']=self.wikilang
@@ -285,7 +285,7 @@
                             sample = plural = diminutive = label = definition = ''
                             examples = []
 
-                            if not(self.entries.has_key(contentblock['context']['lang'])):
+                            if not contentblock['context']['lang'] in self.entries:
                                 # If no entry for this language has been foreseen yet
                                 # let's create one
                                 anentry = entry.Entry(contentblock['context']['lang'])
@@ -312,7 +312,7 @@
             # Make sure we store the last definition
             if definition:
                 ameaning = meaning.Meaning(term=theterm, definition=definition, label=label, examples=examples)
-                if not(self.entries.has_key(contentblock['context']['lang'])):
+                if not contentblock['context']['lang'] in self.entries:
                     # If no entry for this language has been foreseen yet
                     # let's create one
                     anentry = entry.Entry(contentblock['context']['lang'])

Modified: trunk/pywikipedia/wiktionary/wiktionarypagetest.py
===================================================================
--- trunk/pywikipedia/wiktionary/wiktionarypagetest.py	2009-09-29 17:58:52 UTC (rev 7335)
+++ trunk/pywikipedia/wiktionary/wiktionarypagetest.py	2009-09-29 18:27:04 UTC (rev 7336)
@@ -458,7 +458,7 @@
                 definitions=internalrepresentation[entrylang][3]
                 reftrans={}
                 for definition in definitions:
-                    if definition.has_key('trans') and definition['trans']!='':
+                    if 'trans' in definition and definition['trans']!='':
                         reftrans[definition['concisedef']] = definition['trans']
 
                 resulttrans={}
@@ -470,7 +470,7 @@
                         resulttrans[resultmeaning.concisedef] = resultmeaning.getTranslations()
 
                 for concisedef in resulttrans.keys():
-                    if concisedef!='' and reftrans.has_key(concisedef) and resulttrans.has_key(concisedef):
+                    if concisedef != '' and concisedef in reftrans and concisedef in resulttrans:
                         print concisedef
                         print resulttrans[concisedef]
 #                        raw_input()





More information about the Pywikipedia-svn mailing list