jenkins-bot has submitted this change and it was merged.
Change subject: [IMPROV] Partial Python 3 compatibility ......................................................................
[IMPROV] Partial Python 3 compatibility
This makes the code partially compatible with Python 3: - using values(), items() and keys() instead of the iter* counterparts. - not testing the iter* counterparts in Python 3 - print uses parenthesis - in api.py it prevents checks of None > int (but there might be other) - 'raise X(Y)' instead of 'raise(X, Y)' - 'except X as Y' instead of 'except X, Y' - uses next(X) instead of X.next() - if a class adds a next() method it adds an __next__ alias - filter/map are usually replaced with a list or generator expression. - remove the usages of 'ur' as a string prefix
Change-Id: I80de55ad5d22abd6c0e89d86e3c892b89253a2a4 --- M generate_user_files.py M pywikibot/bot.py M pywikibot/data/api.py M pywikibot/data/wikidataquery.py M pywikibot/interwiki_graph.py M pywikibot/page.py M pywikibot/site.py M pywikibot/tools.py M pywikibot/userinterfaces/gui.py M pywikibot/userinterfaces/terminal_interface_base.py M scripts/blockreview.py M scripts/casechecker.py M scripts/catall.py M scripts/category_redirect.py M scripts/checkimages.py M scripts/claimit.py M scripts/commons_link.py M scripts/data_ingestion.py M scripts/freebasemappingupload.py M scripts/imagetransfer.py M scripts/lonelypages.py M scripts/maintenance/make_i18n_dict.py M scripts/maintenance/wikimedia_sites.py M scripts/redirect.py M scripts/reflinks.py M scripts/replace.py M scripts/replicate_wiki.py M scripts/revertbot.py M scripts/selflink.py M scripts/solve_disambiguation.py M scripts/weblinkchecker.py M scripts/welcome.py M tests/page_tests.py M tests/site_tests.py 34 files changed, 136 insertions(+), 127 deletions(-)
Approvals: John Vandenberg: Looks good to me, approved jenkins-bot: Verified
diff --git a/generate_user_files.py b/generate_user_files.py index 9cf7aeb..5366687 100644 --- a/generate_user_files.py +++ b/generate_user_files.py @@ -299,8 +299,8 @@ def create_user_fixes(): _fnf = os.path.join(base_dir, "user-fixes.py") if not file_exists(_fnf): - f = codecs.open(_fnf, "w", "utf-8") - f.write(ur"""# -*- coding: utf-8 -*- + with codecs.open(_fnf, "w", "utf-8") as f: + f.write(r"""# -*- coding: utf-8 -*-
# # This is only an example. Don't use it. @@ -312,12 +312,11 @@ '_default':u'no summary specified', }, 'replacements': [ - (ur'\bword\b', u'two words'), + (r'\bword\b', u'two words'), ] }
""") - f.close() print(u"'%s' written." % _fnf)
if __name__ == "__main__": diff --git a/pywikibot/bot.py b/pywikibot/bot.py index 2417922..58dbc06 100644 --- a/pywikibot/bot.py +++ b/pywikibot/bot.py @@ -469,7 +469,7 @@ ... or alternatively: ... - except Exception, e: + except Exception as e: pywikibot.exception(e) ... """ @@ -932,7 +932,7 @@ """ page = pywikibot.Page(self.repo, u'List of wikis/python', ns=4) self.source_values = json.loads(page.get()) - for family_code, family in self.source_values.iteritems(): + for family_code, family in self.source_values.items(): for source_lang in family: self.source_values[family_code][source_lang] = pywikibot.ItemPage(self.repo, family[source_lang]) diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py index 11df17b..92a619f 100644 --- a/pywikibot/data/api.py +++ b/pywikibot/data/api.py @@ -220,6 +220,10 @@ def iteritems(self): return iter(self.params.items())
+ def items(self): + """Return a list of tuples containg the parameters in any order.""" + return list(self.params.items()) + @property def mime(self): """Return whether mime parameters are defined.""" @@ -596,10 +600,10 @@ max(login_status, pywikibot.site.LoginStatus.NOT_LOGGED_IN)) user_key = repr(user_key)
- return repr(self.site) + user_key + repr(sorted(self.iteritems())) + return repr(self.site) + user_key + repr(sorted(self.items()))
def _create_file_name(self): - self.http_params() # normalize self.iteritems() + self.http_params() # normalize self.params return hashlib.sha256( self._uniquedescriptionstr().encode('utf-8') ).hexdigest() @@ -898,7 +902,7 @@ else: count += 1 # note: self.limit could be -1 - if self.limit > 0 and count >= self.limit: + if self.limit and self.limit > 0 and count >= self.limit: return if self.module == "random" and self.limit: # "random" module does not return "query-continue" diff --git a/pywikibot/data/wikidataquery.py b/pywikibot/data/wikidataquery.py index a46b83e..82da2b2 100644 --- a/pywikibot/data/wikidataquery.py +++ b/pywikibot/data/wikidataquery.py @@ -177,7 +177,7 @@
def validateOrRaise(self, msg=None): if not self.validate(): - raise(TypeError, msg) + raise TypeError(msg)
def convertWDType(self, item): """ @@ -281,10 +281,12 @@ # check sensible things coming in, as we lose info once we do # type conversion if not self.isOrContainsOnlyTypes(item, [int, ItemPage]): - raise(TypeError, "The item paramter must contain or be integer IDs or page.ItemPages") + raise TypeError("The item paramter must contain or be integer IDs " + "or page.ItemPages") elif (not self.isOrContainsOnlyTypes(forward, [int, PropertyPage]) or not self.isOrContainsOnlyTypes(reverse, [int, PropertyPage])): - raise(TypeError, "The forward and reverse parameters must contain or be integer IDs or page.PropertyPages") + raise TypeError("The forward and reverse parameters must contain " + "or be integer IDs or page.PropertyPages")
self.item = self.convertWDTypes(item) self.forward = self.convertWDTypes(forward) @@ -383,15 +385,15 @@ """
if not isinstance(claim, Claim): - raise(TypeError, "claim must be a page.Claim") + raise TypeError("claim must be a page.Claim")
if claim.type == 'wikibase-item': return HasClaim(claim.getID(numeric=True), claim.getTarget().getID(numeric=True)) if claim.type == 'string': return StringClaim(claim.getID(numeric=True), claim.getTarget()) else: - raise(TypeError, "Cannot construct a query from a claim of type %s" - % claim.type) + raise TypeError("Cannot construct a query from a claim of type %s" + % claim.type)
class WikidataQuery(): diff --git a/pywikibot/interwiki_graph.py b/pywikibot/interwiki_graph.py index 3fec68b..1bdc6bc 100644 --- a/pywikibot/interwiki_graph.py +++ b/pywikibot/interwiki_graph.py @@ -78,9 +78,10 @@ node.set_color('green') node.set_style('filled,bold') # if we found more than one valid page for this language: - if len(filter(lambda p: p.site == page.site and p.exists() - and not p.isRedirectPage(), # noqa - list(self.subject.foundIn.keys()))) > 1: + # TODO: Only iterate through at most 2 elements + if len([p for p in self.subject.foundIn.keys() + if p.site == page.site and p.exists() and + not p.isRedirectPage()]) > 1: # noqa # mark conflict by octagonal node node.set_shape('octagon') self.graph.add_node(node) diff --git a/pywikibot/page.py b/pywikibot/page.py index e77b23e..c3966df 100644 --- a/pywikibot/page.py +++ b/pywikibot/page.py @@ -1154,7 +1154,7 @@ if include_obsolete: return self._langlinks else: - return filter(lambda i: not i.site.obsolete, self._langlinks) + return [i for i in self._langlinks if not i.site.obsolete]
def iterlanglinks(self, step=None, total=None, include_obsolete=False): """Iterate all inter-language links on this page. @@ -2786,12 +2786,12 @@ if prop not in data: continue data[prop] = WikibasePage._normalizeLanguages(data[prop]) - for key, value in data[prop].iteritems(): + for key, value in data[prop].items(): if isinstance(value, basestring): data[prop][key] = {'language': key, 'value': value}
if 'aliases' in data: - for key, values in data['aliases'].iteritems(): + for key, values in data['aliases'].items(): if (isinstance(values, list) and isinstance(values[0], basestring)): data['aliases'][key] = [{'language': key, 'value': value} diff --git a/pywikibot/site.py b/pywikibot/site.py index 13da02c..8e42f27 100644 --- a/pywikibot/site.py +++ b/pywikibot/site.py @@ -3448,6 +3448,7 @@ captcha = result["edit"]["captcha"] req['captchaid'] = captcha['id'] if captcha["type"] == "math": + # TODO: Should the input be parsed through eval in py3? req['captchaword'] = input(captcha["question"]) continue elif "url" in captcha: diff --git a/pywikibot/tools.py b/pywikibot/tools.py index 26c56fb..2cc284a 100644 --- a/pywikibot/tools.py +++ b/pywikibot/tools.py @@ -26,6 +26,13 @@ debug = warning = print
+def empty_iterator(): + # http://stackoverflow.com/a/13243870/473890 + """An iterator which does nothing.""" + return + yield + + class UnicodeMixin(object):
"""Mixin class to handle defining the proper __str__/__unicode__ @@ -159,13 +166,13 @@ Example:
>>> i = itergroup(xrange(25), 10) - >>> print i.next() + >>> print next(i) [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] - >>> print i.next() + >>> print next(i) [10, 11, 12, 13, 14, 15, 16, 17, 18, 19] - >>> print i.next() + >>> print next(i) [20, 21, 22, 23, 24] - >>> print i.next() + >>> print next(i) Traceback (most recent call last): ... StopIteration @@ -247,11 +254,9 @@ """Initialise the default as an empty string.""" str.__init__(self)
- # http://stackoverflow.com/a/13243870/473890 def _empty_iter(self): - """An iterator which does nothing.""" - return - yield + """An iterator which does nothing and drops the argument.""" + return empty_iterator()
def __getitem__(self, key): """Raise always a L{CombinedError}.""" @@ -349,8 +354,8 @@
if target_module is None: target_module = target.__module__ - if hasattr(target, 'im_class'): - target_module += '.' + target.im_class.__name__ + if hasattr(target, '__self__'): + target_module += '.' + target.__self__.__class__.__name__ if target_module and target_module[-1] != '.': target_module += '.' if source_module is '.': diff --git a/pywikibot/userinterfaces/gui.py b/pywikibot/userinterfaces/gui.py index 25f68d4..abdbe76 100644 --- a/pywikibot/userinterfaces/gui.py +++ b/pywikibot/userinterfaces/gui.py @@ -95,7 +95,7 @@ '<<undo>>': ['<Control-Key-z>', '<Control-Key-Z>'], }
- for event, keylist in keydefs.iteritems(): + for event, keylist in keydefs.items(): if keylist: self.event_add(event, *keylist)
@@ -335,7 +335,7 @@ if highlight: self.find_all(highlight) if jumpIndex: - print jumpIndex + print(jumpIndex) # lines are indexed starting at 1 line = text[:jumpIndex].count('\n') + 1 column = jumpIndex - (text[:jumpIndex].rfind('\n') + 1) diff --git a/pywikibot/userinterfaces/terminal_interface_base.py b/pywikibot/userinterfaces/terminal_interface_base.py index c1363e1..e7a38af 100755 --- a/pywikibot/userinterfaces/terminal_interface_base.py +++ b/pywikibot/userinterfaces/terminal_interface_base.py @@ -168,7 +168,10 @@ self._print(text, targetStream)
def _raw_input(self): - return raw_input() + if sys.version_info[0] >= 3: + return input() + else: + return raw_input()
def input(self, question, password=False): """ diff --git a/scripts/blockreview.py b/scripts/blockreview.py index 3e1be78..f184f57 100644 --- a/scripts/blockreview.py +++ b/scripts/blockreview.py @@ -145,7 +145,7 @@ from pywikibot import pagegenerators as pg gen = pg.PreloadingGenerator(self.SysopGenerator()) for sysop in gen: - print sysop.title() + print(sysop.title())
talkText = talkText.replace(u'{{%s}}' % unblock_tpl, u'{{%s|2}}' % unblock_tpl) @@ -227,9 +227,8 @@
def getInfo(self, user): if not self.info: - self.info = self.site.logpages(1, mode='block', - title=user.getUserPage().title(), - dump=True).next() + self.info = next(self.site.logpages( + 1, mode='block', title=user.getUserPage().title(), dump=True)) self.parts = { 'admin': self.info['user'], 'user': self.info['title'], @@ -296,7 +295,7 @@ pywikibot.output( u'Skipping %s because of edit conflict' % (page.title())) - except pywikibot.SpamfilterError, error: + except pywikibot.SpamfilterError as error: pywikibot.output( u'Cannot change %s because of spam blacklist entry ' u'%s' % (page.title(), error.url)) diff --git a/scripts/casechecker.py b/scripts/casechecker.py index a05f5f4..3c92791 100644 --- a/scripts/casechecker.py +++ b/scripts/casechecker.py @@ -358,7 +358,7 @@ return
firstItem = True - for pageID, page in data['query']['pages'].iteritems(): + for pageID, page in data['query']['pages'].items(): printed = False title = page['title'] self.currentTitle = title @@ -588,7 +588,7 @@
if len(mapLcl) + len(mapLat) - ambigBadWordsCount < count: # We cannot auto-translate - offer a list of suggested words - suggestions = mapLcl.values() + mapLat.values() + suggestions = list(mapLcl.values()) + list(mapLat.values()) if len(suggestions) > 0: infoText += u", word suggestions: " + u', '.join( [self.ColorCodeWord(t) for t in suggestions]) @@ -650,9 +650,9 @@ return pagesExist[0] elif len(pagesExist) == 0 and len(pagesRedir) > 0: if len(pagesRedir) == 1: - return pagesRedir.keys()[0] + return list(pagesRedir.keys())[0] t = None - for k, v in pagesRedir.iteritems(): + for v in pagesRedir.values(): if not t: t = v # first item elif t != v: @@ -660,7 +660,7 @@ else: # all redirects point to the same target # pick the first one, doesn't matter what it is - return pagesRedir.keys()[0] + return list(pagesRedir.keys())[0]
if not self.autonomous: pywikibot.output(u'Could not auto-decide for page %s. Which link ' diff --git a/scripts/catall.py b/scripts/catall.py index ccd9f5c..9c176a9 100755 --- a/scripts/catall.py +++ b/scripts/catall.py @@ -92,8 +92,8 @@ cats = p.categories() if not cats: pywikibot.output(u"========== %s ==========" % p.title()) - print "No categories" - print "-" * 40 + print("No categories") + print("-" * 40) newcats = choosecats(text) if newcats != [] and newcats is not None: make_categories(p, newcats, mysite) @@ -101,7 +101,7 @@ pywikibot.output(u"========== %s ==========" % p.title()) for c in cats: pywikibot.output(c.title()) - print "-" * 40 + print("-" * 40) newcats = choosecats(text) if newcats is None: make_categories(p, [], mysite) diff --git a/scripts/category_redirect.py b/scripts/category_redirect.py index d7d9b81..5eb105a 100755 --- a/scripts/category_redirect.py +++ b/scripts/category_redirect.py @@ -183,14 +183,13 @@ datafile = pywikibot.config.datafilepath("%s-catmovebot-data" % self.site.dbName()) try: - inp = open(datafile, "rb") - record = cPickle.load(inp) - inp.close() + with open(datafile, "rb") as inp: + record = cPickle.load(inp) except IOError: record = {} if record: - cPickle.dump(record, open(datafile + ".bak", "wb"), -1) - + with open(datafile + ".bak", "wb") as f: + cPickle.dump(record, f, -1) try: template_list = self.site.family.category_redirect_templates[ self.site.code] @@ -390,7 +389,8 @@ except: pass
- cPickle.dump(record, open(datafile, "wb"), -1) + with open(datafile, "wb") as f: + cPickle.dump(record, f, -1)
self.log_text.sort() problems.sort() diff --git a/scripts/checkimages.py b/scripts/checkimages.py index 77e6caf..1343c6e 100644 --- a/scripts/checkimages.py +++ b/scripts/checkimages.py @@ -971,7 +971,7 @@
pywikibot.output(u'%s is on commons!' % self.imageName) on_commons_text = self.image.getImagePageHtml() - if re.search(ur"<div class=(?:'|")sharedUploadNotice(?:'|")>", + if re.search(r"<div class=(?:'|")sharedUploadNotice(?:'|")>", on_commons_text): pywikibot.output( u"But, the file doesn't exist on your project! Skip...") diff --git a/scripts/claimit.py b/scripts/claimit.py index 0dfa761..443f628 100755 --- a/scripts/claimit.py +++ b/scripts/claimit.py @@ -179,7 +179,7 @@ elif claim.type == 'string': target = commandline_claims[i + 1] elif claim.type == 'globe-coordinate': - coord_args = map(float, commandline_claims[i + 1].split(',')) + coord_args = [float(c) for c in commandline_claims[i + 1].split(',')] if len(coord_args) >= 3: precision = coord_args[2] else: diff --git a/scripts/commons_link.py b/scripts/commons_link.py index ec761ce..79b4580 100644 --- a/scripts/commons_link.py +++ b/scripts/commons_link.py @@ -50,9 +50,9 @@ super(CommonsLinkBot, self).__init__(**kwargs)
self.generator = generator - self.findTemplate = re.compile(ur'{{[Ss]isterlinks') - self.findTemplate2 = re.compile(ur'{{[Cc]ommonscat') - self.findTemplate3 = re.compile(ur'{{[Cc]ommons') + self.findTemplate = re.compile(r'{{[Ss]isterlinks') + self.findTemplate2 = re.compile(r'{{[Cc]ommonscat') + self.findTemplate3 = re.compile(r'{{[Cc]ommons')
def run(self): if not all((self.getOption('action'), self.generator)): diff --git a/scripts/data_ingestion.py b/scripts/data_ingestion.py index 58a0286..a791b0d 100755 --- a/scripts/data_ingestion.py +++ b/scripts/data_ingestion.py @@ -173,7 +173,7 @@ field = field.strip() value = value.strip() configuration[field] = value - print configuration + print(configuration) return configuration
@@ -302,5 +302,5 @@ try: main() finally: - print "All done!" + print("All done!") ''' diff --git a/scripts/freebasemappingupload.py b/scripts/freebasemappingupload.py index 9aae10a..59be5ce 100644 --- a/scripts/freebasemappingupload.py +++ b/scripts/freebasemappingupload.py @@ -75,7 +75,7 @@ label = data.labels['en'] else: # Just pick up the first label - label = data.labels.values()[0] + label = list(data.labels.values())[0] pywikibot.output('Parsed: %s <--> %s' % (qid, mid)) pywikibot.output('%s is %s' % (data.getID(), label)) if data.claims and 'P646' in data.claims: diff --git a/scripts/imagetransfer.py b/scripts/imagetransfer.py index dba7b0c..d4573ef 100644 --- a/scripts/imagetransfer.py +++ b/scripts/imagetransfer.py @@ -174,7 +174,7 @@ # try to translate license templates if (sourceSite.sitename(), self.targetSite.sitename()) in licenseTemplates: for old, new in licenseTemplates[(sourceSite.sitename(), - self.targetSite.sitename())].iteritems(): + self.targetSite.sitename())].items(): new = '{{%s}}' % new old = re.compile('{{%s}}' % old) description = textlib.replaceExcept(description, old, new, @@ -189,10 +189,10 @@ description += "\r\n\r\n" + unicode(sourceImagePage) except pywikibot.NoPage: description = '' - print "Image does not exist or description page is empty." + print("Image does not exist or description page is empty.") except pywikibot.IsRedirectPage: description = '' - print "Image description page is redirect." + print("Image description page is redirect.") else: bot = upload.UploadRobot(url=url, description=description, targetSite=self.targetSite, @@ -225,7 +225,7 @@ for i in range(len(imagelist)): image = imagelist[i] #sourceSite = sourceImagePage.site - print "-" * 60 + print("-" * 60) pywikibot.output(u"%s. Found image: %s" % (i, image.title(asLink=True))) try: @@ -243,7 +243,7 @@ targetImage.get(throttle=False) pywikibot.output(u"Image with this name is already on %s." % self.targetSite) - print "-" * 60 + print("-" * 60) pywikibot.output(targetImage.get(throttle=False)) sys.exit() except pywikibot.NoPage: @@ -255,7 +255,7 @@
except pywikibot.NoPage: break - print "=" * 60 + print("=" * 60)
def run(self): for page in self.generator: diff --git a/scripts/lonelypages.py b/scripts/lonelypages.py index 95b51fb..f956d2a 100644 --- a/scripts/lonelypages.py +++ b/scripts/lonelypages.py @@ -65,12 +65,12 @@
# Use regex to prevent to put the same template twice! exception_regex = { - 'ar': [ur'{{(?:قالب:|)(يتيمة)[|}]'], + 'ar': [u'\{\{(?:قالب:|)(يتيمة)[\|\}]'], 'ca': [r'{{(?:template:|)(orfe)[|}]'], 'en': [r'{{(?:template:|)(orphan)[|}]', r'{{(?:template:|)(wi)[|}]'], 'it': [r'{{(?:template:|)(o|a)[|}]'], - 'ja': [ur'{{(?:template:|)(孤立)[|}]'], + 'ja': [u'\{\{(?:template:|)(孤立)[\|\}]'], 'zh': [r'{{(?:template:|)(orphan)[|}]'], }
diff --git a/scripts/maintenance/make_i18n_dict.py b/scripts/maintenance/make_i18n_dict.py index d23e805..cd55a70 100644 --- a/scripts/maintenance/make_i18n_dict.py +++ b/scripts/maintenance/make_i18n_dict.py @@ -42,7 +42,7 @@ self.dict = dict()
def print_all(self): - keys = self.dict.keys() + keys = list(self.dict.keys()) keys.remove('qqq') keys.sort() keys.insert(0, 'qqq') @@ -50,21 +50,21 @@ keys.remove('en') keys.insert(0, 'en')
- print "# -*- coding: utf-8 -*-" - print "msg = {" + print("# -*- coding: utf-8 -*-") + print("msg = {") for code in keys: - print " '%s': {" % code + print(" '%s': {" % code) for msg in self.messages: label = "%s-%s" % (self.scriptname, msg) if label in self.dict[code]: - print " '%s': u'%s'," % (label, - self.dict[code][label]) - print " }," - print "};" + print(" '%s': u'%s'," % (label, + self.dict[code][label])) + print(" },") + print("};")
def read(self, item): msg = getattr(self.script, item) - keys = msg.keys() + keys = list(msg.keys()) keys.append('qqq') for code in keys: label = "%s-%s" % (self.scriptname, item) @@ -79,7 +79,7 @@ self.dict[code] = {} self.dict[code][label] = msg[code] if 'en' not in keys: - print 'WARNING: "en" key missing for message %s' % item + print('WARNING: "en" key missing for message %s' % item)
def run(self): for msg in self.messages: @@ -87,4 +87,4 @@ self.print_all()
if __name__ == "__main__": - print __doc__ + print(__doc__) diff --git a/scripts/maintenance/wikimedia_sites.py b/scripts/maintenance/wikimedia_sites.py index 275237b..76a9361 100644 --- a/scripts/maintenance/wikimedia_sites.py +++ b/scripts/maintenance/wikimedia_sites.py @@ -38,9 +38,7 @@
def update_family(families): - if not families: - families = familiesDict.keys() - for family in families: + for family in families or familiesDict.keys(): pywikibot.output('\nChecking family %s:' % family)
original = Family(family).languages_by_size @@ -89,7 +87,7 @@ family_file_name = 'pywikibot/families/%s_family.py' % family family_file = codecs.open(family_file_name, 'r', 'utf8') family_text = family_file.read() - old = re.findall(ur'(?msu)^ {8}self.languages_by_size.+?]', + old = re.findall(r'(?msu)^ {8}self.languages_by_size.+?]', family_text)[0] family_text = family_text.replace(old, text) family_file = codecs.open(family_file_name, 'w', 'utf8') diff --git a/scripts/redirect.py b/scripts/redirect.py index 72e64bb..d6eb487 100755 --- a/scripts/redirect.py +++ b/scripts/redirect.py @@ -292,7 +292,7 @@ ## u'Getting a list of all redirects and of all page titles...') ## redirs, pageTitles = self.get_redirects_from_dump( ## alsoGetPageTitles=True) -## for (key, value) in redirs.iteritems(): +## for (key, value) in redirs.items(): ## if value not in pageTitles: ## yield key
diff --git a/scripts/reflinks.py b/scripts/reflinks.py index a7f50f0..2d35316 100644 --- a/scripts/reflinks.py +++ b/scripts/reflinks.py @@ -203,6 +203,8 @@ if linksInRef.search(entry.text): return page
+ __next__ = next +
class RefLink:
diff --git a/scripts/replace.py b/scripts/replace.py index 79b841a..a8e6b46 100755 --- a/scripts/replace.py +++ b/scripts/replace.py @@ -598,7 +598,7 @@ fix = fixes.fixes[fix] except KeyError: pywikibot.output(u'Available predefined fixes are: %s' - % fixes.fixes.keys()) + % ', '.join(fixes.fixes.keys())) return if "regex" in fix: regex = fix['regex'] diff --git a/scripts/replicate_wiki.py b/scripts/replicate_wiki.py index b873640..e7976da 100644 --- a/scripts/replicate_wiki.py +++ b/scripts/replicate_wiki.py @@ -31,7 +31,6 @@ #
import sys -from itertools import imap
import pywikibot from pywikibot import config, Page @@ -39,8 +38,7 @@
def namespaces(site): """dict from namespace number to prefix.""" - ns = dict(map(lambda n: (site.getNamespaceIndex(n), n), - site.namespaces())) + ns = dict((site.getNamespaceIndex(n), n) for n in site.namespaces()) ns[0] = '' return ns
@@ -79,7 +77,7 @@ pywikibot.output('%s %s' % (k, nsd[k])) sys.exit()
- self.sites = map(lambda s: pywikibot.Site(s, family), sites) + self.sites = [pywikibot.Site(s, family) for s in sites]
self.differences = {} self.user_diff = {} @@ -128,8 +126,8 @@ def check_namespace(self, namespace): """Check an entire namespace.""" pywikibot.output("\nCHECKING NAMESPACE %s" % namespace) - pages = imap(lambda p: p.title(), - self.original.allpages('!', namespace=namespace)) + pages = (p.title() for p in self.original.allpages( + '!', namespace=namespace)) for p in pages: if p not in ['MediaWiki:Sidebar', 'MediaWiki:Mainpage', 'MediaWiki:Sitenotice', 'MediaWiki:MenuSidebar']: @@ -150,15 +148,15 @@ 'User:%s/sync.py overview' % site.user()) output = "== Pages that differ from original ==\n\n" if self.differences[site]: - output += "".join(map(lambda l: '* [[:%s]]\n' % l, - self.differences[site])) + output += "".join('* [[:%s]]\n' % l for l in + self.differences[site]) else: output += "All important pages are the same"
output += "\n\n== Admins from original that are missing here ==\n\n" if self.user_diff[site]: - output += "".join(map(lambda l: '* %s\n' % l.replace('_', ' '), - self.user_diff[site])) + output += "".join('* %s\n' % l.replace('_', ' ') for l in + self.user_diff[site]) else: output += "All users from original are also present on this wiki"
diff --git a/scripts/revertbot.py b/scripts/revertbot.py index 345c72c..1b6d54e 100644 --- a/scripts/revertbot.py +++ b/scripts/revertbot.py @@ -52,11 +52,11 @@
def get_contributions(self, max=500, ns=None): count = 0 - iterator = iter(xrange(0)) + iterator = pywikibot.tools.empty_iterator() never_continue = False while count != max or never_continue: try: - item = iterator.next() + item = next(iterator) except StopIteration: self.log(u'Fetching new batch of contributions') data = list(pywikibot.Site().usercontribs(user=self.user, namespaces=ns, total=max)) diff --git a/scripts/selflink.py b/scripts/selflink.py index f618aaa..ee436bc 100644 --- a/scripts/selflink.py +++ b/scripts/selflink.py @@ -77,7 +77,7 @@ return False try: linkedPage = pywikibot.Page(page.site, title=match.group('title')) - except pywikibot.InvalidTitle, err: + except pywikibot.InvalidTitle as err: pywikibot.warning(u'%s' % err) return False
diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py index 8a6cffb..a10c069 100644 --- a/scripts/solve_disambiguation.py +++ b/scripts/solve_disambiguation.py @@ -492,11 +492,9 @@ return None
def makeAlternativesUnique(self): - # remove duplicate entries - result = {} - for i in self.alternatives: - result[i] = None - self.alternatives = result.keys() + # remove duplicate entries stable + unique = set(self.alternatives) + self.alternatives = [alt for alt in self.alternatives if alt in unique]
def listAlternatives(self): list = u'\n' diff --git a/scripts/weblinkchecker.py b/scripts/weblinkchecker.py index 80c9736..1ead1d9 100644 --- a/scripts/weblinkchecker.py +++ b/scripts/weblinkchecker.py @@ -214,6 +214,8 @@ except NameError: pass
+ __next__ = next +
class LinkChecker(object):
@@ -381,12 +383,12 @@ """ try: wasRedirected = self.resolveRedirect(useHEAD=useHEAD) - except UnicodeError, error: + except UnicodeError as error: return False, u'Encoding Error: %s (%s)' % ( error.__class__.__name__, unicode(error)) - except httplib.error, error: + except httplib.error as error: return False, u'HTTP Error: %s' % error.__class__.__name__ - except socket.error, error: + except socket.error as error: # https://docs.python.org/2/library/socket.html : # socket.error : # The accompanying value is either a string telling what went @@ -399,7 +401,7 @@ try: msg = error[1] except IndexError: - print u'### DEBUG information for #2972249' + print(u'### DEBUG information for #2972249') raise IndexError(type(error)) # TODO: decode msg. On Linux, it's encoded in UTF-8. # How is it encoded in Windows? Or can we somehow just @@ -447,16 +449,16 @@ else: try: conn = self.getConnection() - except httplib.error, error: + except httplib.error as error: return False, u'HTTP Error: %s' % error.__class__.__name__ try: conn.request('GET', '%s%s' % (self.path, self.query), None, self.header) - except socket.error, error: + except socket.error as error: return False, u'Socket Error: %s' % repr(error[1]) try: self.response = conn.getresponse() - except Exception, error: + except Exception as error: return False, u'Error: %s' % error # read the server's encoding, in case we need it later self.readEncodingFromResponse(self.response) @@ -706,7 +708,7 @@ 'weblinkchecker-summary')) try: talkPage.put(content, comment) - except pywikibot.SpamfilterError, error: + except pywikibot.SpamfilterError as error: pywikibot.output( u'\03{lightaqua}** SpamfilterError while trying to ' u'change %s: %s\03{default}' @@ -770,13 +772,10 @@ def RepeatPageGenerator(): history = History(None) pageTitles = set() - for (key, value) in history.historyDict.iteritems(): + for value in history.historyDict.values(): for entry in value: - pageTitle = entry[0] - pageTitles.add(pageTitle) - pageTitles = list(pageTitles) - pageTitles.sort() - for pageTitle in pageTitles: + pageTitles.add(entry[0]) + for pageTitle in sorted(pageTitles): page = pywikibot.Page(pywikibot.Site(), pageTitle) yield page
diff --git a/scripts/welcome.py b/scripts/welcome.py index b5fa34b..529984e 100644 --- a/scripts/welcome.py +++ b/scripts/welcome.py @@ -964,9 +964,8 @@ if globalvar.randomSign and globalvar.saveSignIndex and \ bot.welcomed_users: import cPickle - f = file(filename, 'w') - cPickle.dump(bot.welcomed_users, f) - f.close() + with open(filename, 'w') as f: + cPickle.dump(bot.welcomed_users, f)
if __name__ == "__main__": main() diff --git a/tests/page_tests.py b/tests/page_tests.py index ec8f6f4..c82a0e9 100644 --- a/tests/page_tests.py +++ b/tests/page_tests.py @@ -70,11 +70,11 @@ """Test that Link() normalizes namespace names""" for num in self.namespaces: for prefix in self.namespaces[num]: - l = pywikibot.page.Link(prefix + self.titles.keys()[0], + l = pywikibot.page.Link(prefix + list(self.titles.keys())[0], self.enwiki) self.assertEqual(l.namespace, num) # namespace prefixes are case-insensitive - m = pywikibot.page.Link(prefix.lower() + self.titles.keys()[1], + m = pywikibot.page.Link(prefix.lower() + list(self.titles.keys())[1], self.enwiki) self.assertEqual(m.namespace, num)
diff --git a/tests/site_tests.py b/tests/site_tests.py index bac24c9..96079a5 100644 --- a/tests/site_tests.py +++ b/tests/site_tests.py @@ -414,7 +414,7 @@ self.assertTrue(page.title(withNamespace=False) >= "From") self.assertTrue(hasattr(page, "_fromid")) errgen = mysite.alllinks(unique=True, fromids=True) - self.assertRaises(pywikibot.Error, errgen.next) + self.assertRaises(pywikibot.Error, next, errgen)
def testAllCategories(self): """Test the site.allcategories() method""" @@ -1030,9 +1030,10 @@ self.assertNotIn(not_exists, mysite.siteinfo) self.assertEqual(len(mysite.siteinfo.get(not_exists)), 0) self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists))) - self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists).iteritems())) - self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists).itervalues())) - self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists).iterkeys())) + if sys.version_info[0] == 2: + self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists).iteritems())) + self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists).itervalues())) + self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists).iterkeys())) self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists).items())) self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists).values())) self.assertFalse(entered_loop(mysite.siteinfo.get(not_exists).keys()))
pywikibot-commits@lists.wikimedia.org