jenkins-bot has submitted this change and it was merged.
Change subject: Fixed dictionary-related items
......................................................................
Fixed dictionary-related items
Change-Id: Ia3e5160409afe3e18e6f27d76f77daf7f57cbfd8
---
M pywikibot/__init__.py
M pywikibot/bot.py
M pywikibot/botirc.py
M pywikibot/config2.py
M pywikibot/data/api.py
M pywikibot/date.py
M pywikibot/families/anarchopedia_family.py
M pywikibot/families/omegawiki_family.py
M pywikibot/families/wikisource_family.py
M pywikibot/families/wiktionary_family.py
M pywikibot/family.py
M pywikibot/i18n.py
M pywikibot/interwiki_graph.py
M pywikibot/page.py
M pywikibot/site.py
M pywikibot/textlib.py
M pywikibot/titletranslate.py
M pywikibot/userinterfaces/terminal_interface_unix.py
M pywikibot/userinterfaces/terminal_interface_win32.py
19 files changed, 53 insertions(+), 53 deletions(-)
Approvals:
Merlijn van Deen: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/__init__.py b/pywikibot/__init__.py
index 92e9d63..a91fed6 100644
--- a/pywikibot/__init__.py
+++ b/pywikibot/__init__.py
@@ -521,7 +521,7 @@
# only need one drop() call because all throttles use the same global pid
try:
- _sites.values()[0].throttle.drop()
+ list(_sites.values())[0].throttle.drop()
pywikibot.log(u"Dropped throttle(s).")
except IndexError:
pass
diff --git a/pywikibot/bot.py b/pywikibot/bot.py
index 2db129f..b571fe7 100644
--- a/pywikibot/bot.py
+++ b/pywikibot/bot.py
@@ -264,7 +264,7 @@
# imported modules
log(u'MODULES:')
- for item in sys.modules.keys():
+ for item in list(sys.modules.keys()):
ver = version.getfileversion('%s.py' % item.replace('.',
'/'))
if ver:
log(u' %s' % ver)
@@ -719,7 +719,7 @@
module = __import__('%s' % modname)
helpText = module.__doc__.decode('utf-8')
if hasattr(module, 'docuReplacements'):
- for key, value in module.docuReplacements.iteritems():
+ for key, value in module.docuReplacements.items():
helpText = helpText.replace(key, value.strip('\n\r'))
pywikibot.stdout(helpText) # output to STDOUT
except Exception:
diff --git a/pywikibot/botirc.py b/pywikibot/botirc.py
index 8a847a9..d374a93 100644
--- a/pywikibot/botirc.py
+++ b/pywikibot/botirc.py
@@ -59,7 +59,7 @@
self.site = site
self.other_ns = re.compile(
u'14\[\[07(' + u'|'.join([item[0] for item in
- site.namespaces().values() if item[0]]) +
u')')
+ list(site.namespaces().values()) if item[0]]) +
u')')
self.api_url = self.site.family.apipath(self.site.lang)
self.api_url +=
'?action=query&meta=siteinfo&siprop=statistics&format=xml'
self.api_found = re.compile(r'articles="(.*?)"')
diff --git a/pywikibot/config2.py b/pywikibot/config2.py
index 003c6e4..9457c9b 100644
--- a/pywikibot/config2.py
+++ b/pywikibot/config2.py
@@ -655,7 +655,7 @@
# Store current variables and their types.
_glv = {}
_glv.update(globals())
-_gl = _glv.keys()
+_gl = list(_glv.keys())
_tp = {}
for _key in _gl:
if _key[0] != '_':
@@ -681,7 +681,7 @@
% {'fn': _filename})
# Test for obsoleted and/or unknown variables.
-for _key, _val in globals().items():
+for _key, _val in list(globals().items()):
if _key.startswith('_'):
pass
elif _key in _gl:
@@ -740,7 +740,7 @@
_all = 0
else:
print("Unknown arg %(_arg)s ignored" % locals())
- _k = globals().keys()
+ _k = list(globals().keys())
_k.sort()
for _name in _k:
if _name[0] != '_':
@@ -750,7 +750,7 @@
print(_name, "=", repr(globals()[_name]))
# cleanup all locally-defined variables
-for __var in globals().keys():
+for __var in list(globals().keys()):
if __var.startswith("_") and not __var.startswith("__"):
del __sys.modules[__name__].__dict__[__var]
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index 313f2e2..2d8c4d9 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -155,7 +155,7 @@
del self.params[key]
def keys(self):
- return self.params.keys()
+ return list(self.params.keys())
def __contains__(self, key):
return self.params.__contains__(key)
@@ -167,7 +167,7 @@
return len(self.params)
def iteritems(self):
- return self.params.iteritems()
+ return iter(self.params.items())
def http_params(self):
"""Return the parameters formatted for inclusion in an HTTP
request."""
@@ -278,7 +278,7 @@
eoh = body.find(marker)
body = body[eoh + len(marker):]
# retrieve the headers from the MIME object
- mimehead = dict(container.items())
+ mimehead = dict(list(container.items()))
rawdata = http.request(self.site, uri, ssl, method="POST",
headers=mimehead, body=body)
else:
@@ -680,7 +680,7 @@
if isinstance(resultdata, dict):
pywikibot.debug(u"%s received %s; limit=%s"
% (self.__class__.__name__,
- resultdata.keys(),
+ list(resultdata.keys()),
self.limit),
_logger)
if "results" in resultdata:
@@ -722,7 +722,7 @@
% self.continuekey)
return
update = self.data["query-continue"][self.continuekey]
- for key, value in update.iteritems():
+ for key, value in update.items():
# query-continue can return ints
if isinstance(value, int):
value = str(value)
diff --git a/pywikibot/date.py b/pywikibot/date.py
index 46ccb54..6b2e59e 100644
--- a/pywikibot/date.py
+++ b/pywikibot/date.py
@@ -2282,7 +2282,7 @@
dictName is 'YearBC', 'December', etc.
"""
- for dictName, dict in formats.iteritems():
+ for dictName, dict in formats.items():
try:
year = dict[lang](title)
return dictName, year
@@ -2352,7 +2352,7 @@
pywikibot.output((u"Processing %s with limits from %d to %d and step
%d"
% (formatName, start, stop - 1, step)))
- for code, convFunc in formats[formatName].iteritems():
+ for code, convFunc in formats[formatName].items():
## import time
## startClock = time.clock()
for value in xrange(start, stop, step):
diff --git a/pywikibot/families/anarchopedia_family.py
b/pywikibot/families/anarchopedia_family.py
index 037d015..5929098 100644
--- a/pywikibot/families/anarchopedia_family.py
+++ b/pywikibot/families/anarchopedia_family.py
@@ -19,7 +19,7 @@
for l in self.languages_by_size:
self.langs[l] = '%s.anarchopedia.org' % l
- self.nocapitalize = self.langs.keys()
+ self.nocapitalize = list(self.langs.keys())
self.obsolete = {
'ara': 'ar',
diff --git a/pywikibot/families/omegawiki_family.py
b/pywikibot/families/omegawiki_family.py
index 4722987..f2ae535 100644
--- a/pywikibot/families/omegawiki_family.py
+++ b/pywikibot/families/omegawiki_family.py
@@ -16,7 +16,7 @@
# On most Wikipedias page names must start with a capital letter, but some
# languages don't use this.
- self.nocapitalize = self.langs.keys()
+ self.nocapitalize = list(self.langs.keys())
def hostname(self, code):
return 'www.omegawiki.org'
diff --git a/pywikibot/families/wikisource_family.py
b/pywikibot/families/wikisource_family.py
index 8cde673..0e79f2f 100644
--- a/pywikibot/families/wikisource_family.py
+++ b/pywikibot/families/wikisource_family.py
@@ -87,6 +87,6 @@
'zh': [102],
}
- for key, values in self.authornamespaces.iteritems():
+ for key, values in self.authornamespaces.items():
for item in values:
self.crossnamespace[item].update({key: self.authornamespaces})
diff --git a/pywikibot/families/wiktionary_family.py
b/pywikibot/families/wiktionary_family.py
index c769951..116be5f 100644
--- a/pywikibot/families/wiktionary_family.py
+++ b/pywikibot/families/wiktionary_family.py
@@ -45,7 +45,7 @@
# Other than most Wikipedias, page names must not start with a capital
# letter on ALL Wiktionaries.
- self.nocapitalize = self.langs.keys()
+ self.nocapitalize = list(self.langs.keys())
# Which languages have a special order for putting interlanguage links,
# and what order is it? If a language is not in interwiki_putfirst,
diff --git a/pywikibot/family.py b/pywikibot/family.py
index 8b2dd72..667309a 100644
--- a/pywikibot/family.py
+++ b/pywikibot/family.py
@@ -831,8 +831,8 @@
@property
def iwkeys(self):
if self.interwiki_forward:
- return pywikibot.Family(self.interwiki_forward).langs.keys()
- return self.langs.keys()
+ return list(pywikibot.Family(self.interwiki_forward).langs.keys())
+ return list(self.langs.keys())
def _addlang(self, code, location, namespaces={}):
"""Add a new language to the langs and namespaces of the family.
diff --git a/pywikibot/i18n.py b/pywikibot/i18n.py
index df64ef5..f032e2e 100644
--- a/pywikibot/i18n.py
+++ b/pywikibot/i18n.py
@@ -283,8 +283,8 @@
code = alt
break
else:
- trans = xdict.values()[0]
- code = xdict.keys()[0]
+ trans = list(xdict.values())[0]
+ code = list(xdict.keys())[0]
if not trans:
return # return None if we have no translation found
if parameters is None:
diff --git a/pywikibot/interwiki_graph.py b/pywikibot/interwiki_graph.py
index ee043ae..7e27d01 100644
--- a/pywikibot/interwiki_graph.py
+++ b/pywikibot/interwiki_graph.py
@@ -78,7 +78,7 @@
# if we found more than one valid page for this language:
if len(filter(lambda p: p.site == page.site and p.exists()
and not p.isRedirectPage(), # noqa
- self.subject.foundIn.keys())) > 1:
+ list(self.subject.foundIn.keys()))) > 1:
# mark conflict by octagonal node
node.set_shape('octagon')
self.graph.add_node(node)
@@ -134,14 +134,14 @@
# create empty graph
self.graph = pydot.Dot()
# self.graph.set('concentrate', 'true')
- for page in self.subject.foundIn.iterkeys():
+ for page in self.subject.foundIn.keys():
# a node for each found page
self.addNode(page)
# mark start node by pointing there from a black dot.
firstLabel = self.getLabel(self.subject.originPage)
self.graph.add_node(pydot.Node('start', shape='point'))
self.graph.add_edge(pydot.Edge('start', firstLabel))
- for page, referrers in self.subject.foundIn.iteritems():
+ for page, referrers in self.subject.foundIn.items():
for refPage in referrers:
self.addDirectedEdge(page, refPage)
self.saveGraphFile()
diff --git a/pywikibot/page.py b/pywikibot/page.py
index 049aab0..c5636bf 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -854,12 +854,12 @@
if config.cosmetic_changes_mylang_only:
cc = ((family == config.family and
self.site.lang == config.mylang) or
- family in config.cosmetic_changes_enable.keys() and
+ family in list(config.cosmetic_changes_enable.keys()) and
self.site.lang in config.cosmetic_changes_enable[family])
else:
cc = True
cc = (cc and not
- (family in config.cosmetic_changes_disable.keys() and
+ (family in list(config.cosmetic_changes_disable.keys()) and
self.site.lang in config.cosmetic_changes_disable[family]))
if not cc:
return
@@ -2389,7 +2389,7 @@
"""
if force or not hasattr(self, '_content'):
data = self.repo.loadcontent(self._defined_by(), *args)
- self.id = data.keys()[0]
+ self.id = list(data.keys())[0]
self._content = data[self.id]
if 'lastrevid' in self._content:
self.lastrevid = self._content['lastrevid']
@@ -2514,7 +2514,7 @@
value should be a list of strings.
"""
aliases = self.__normalizeLanguages(aliases)
- for (key, strings) in aliases.items():
+ for (key, strings) in list(aliases.items()):
aliases[key] = [{'language': key, 'value': i} for i in
strings]
data = {'aliases': aliases}
self.editEntity(data, **kwargs)
@@ -2832,7 +2832,7 @@
more handling.
"""
source = collections.defaultdict(list)
- for prop in data['snaks'].values():
+ for prop in list(data['snaks'].values()):
for claimsnak in prop:
claim = Claim.fromJSON(site, {'mainsnak': claimsnak,
'hash': data['hash']})
@@ -3186,7 +3186,7 @@
t = t[t.index(u":"):].lstrip(u":").lstrip(u"
")
self._namespace = ns
break
- if prefix in fam.langs.keys()\
+ if prefix in list(fam.langs.keys())\
or prefix in fam.get_known_families(site=self._site):
# looks like an interwiki link
if not firstPass:
@@ -3195,7 +3195,7 @@
"Improperly formatted interwiki link '%s'"
% self._text)
t = t[t.index(u":"):].lstrip(u":").lstrip(u"
")
- if prefix in fam.langs.keys():
+ if prefix in list(fam.langs.keys()):
newsite = pywikibot.Site(prefix, fam)
else:
otherlang = self._site.code
diff --git a/pywikibot/site.py b/pywikibot/site.py
index 9399afa..9f36234 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -61,7 +61,7 @@
@classmethod
def name(cls, search_value):
- for key, value in cls.__dict__.iteritems():
+ for key, value in cls.__dict__.items():
if key == key.upper() and value == search_value:
return key
raise KeyError("Value %r could not be found in this enum"
@@ -135,7 +135,7 @@
raise NoSuchSite("Language %s in family %s is obsolete"
% (self.__code, self.__family.name))
if self.__code not in self.languages():
- if self.__family.name in self.__family.langs.keys() and \
+ if self.__family.name in list(self.__family.langs.keys()) and \
len(self.__family.langs) == 1:
oldcode = self.__code
self.__code = self.__family.name
@@ -258,12 +258,12 @@
def languages(self):
"""Return list of all valid language codes for this site's
Family."""
- return self.family.langs.keys()
+ return list(self.family.langs.keys())
def validLanguageLinks(self):
"""Return list of language codes that can be used in interwiki
links."""
- nsnames = [name for name in self.namespaces().itervalues()]
+ nsnames = [name for name in self.namespaces().values()]
return [lang for lang in self.languages()
if lang[:1].upper() + lang[1:] not in nsnames]
@@ -1407,7 +1407,7 @@
if target_title == title or "pages" not in result['query']:
# no "pages" element indicates a circular redirect
raise pywikibot.CircularRedirect(redirmap[title])
- pagedata = result['query']['pages'].values()[0]
+ pagedata = list(result['query']['pages'].values())[0]
# there should be only one value in 'pages', and it is the target
if self.sametitle(pagedata['title'], target_title):
target = pywikibot.Page(self, pagedata['title'],
pagedata['ns'])
@@ -1450,7 +1450,7 @@
# only use pageids if all pages have them
rvgen.request["pageids"] = "|".join(pageids)
else:
- rvgen.request["titles"] = "|".join(cache.keys())
+ rvgen.request["titles"] =
"|".join(list(cache.keys()))
rvgen.request[u"rvprop"] =
u"ids|flags|timestamp|user|comment|content"
pywikibot.output(u"Retrieving %s pages from %s."
% (len(cache), self))
@@ -1477,7 +1477,7 @@
except KeyError:
pywikibot.debug(u"No 'title' in %s" % pagedata,
_logger)
pywikibot.debug(u"pageids=%s" % pageids, _logger)
- pywikibot.debug(u"titles=%s" % cache.keys(), _logger)
+ pywikibot.debug(u"titles=%s" % list(cache.keys()),
_logger)
continue
page = cache[pagedata['title']]
api.update_page(page, pagedata)
@@ -1559,7 +1559,7 @@
namespaces=namespaces,
content=content
)
- return itertools.chain(*genlist.values())
+ return itertools.chain(*list(genlist.values()))
return blgen
def page_embeddedin(self, page, filterRedirects=None, namespaces=None,
@@ -2429,7 +2429,7 @@
if where not in ("text", "titles"):
raise Error("search: unrecognized 'where' value: %s" %
where)
if namespaces == []:
- namespaces = [ns for ns in self.namespaces().keys() if ns >= 0]
+ namespaces = [ns for ns in list(self.namespaces().keys()) if ns >= 0]
if not namespaces:
pywikibot.warning(u"search: namespaces cannot be empty; using
[0].")
namespaces = [0]
@@ -2960,7 +2960,7 @@
% page.title(asLink=True))
last_rev = page._revisions[page.latestRevision()]
last_user = last_rev.user
- for rev in sorted(page._revisions.keys(), reverse=True):
+ for rev in sorted(list(page._revisions.keys()), reverse=True):
# start with most recent revision first
if rev.user != last_user:
prev_user = rev.user
@@ -3263,7 +3263,7 @@
result = result["upload"]
pywikibot.debug(result, _logger)
if "warnings" in result:
- warning = result["warnings"].keys()[0]
+ warning = list(result["warnings"].keys())[0]
message = result["warnings"][warning]
raise pywikibot.UploadWarning(upload_warnings[warning]
% {'msg': message})
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index 4459b1c..0cdd3ba 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -90,7 +90,7 @@
# also finds links to foreign sites with preleading ":"
'interwiki': re.compile(r'(?i)\[\[:?(%s)\s?:[^\]]*\]\][\s]*'
% '|'.join(site.validLanguageLinks() +
- site.family.obsolete.keys())),
+ list(site.family.obsolete.keys()))),
# Wikidata property inclusions
'property':
re.compile(r'(?i)\{\{\s*#property:\s*p\d+\s*\}\}'),
# Module invocations (currently only Lua)
@@ -275,7 +275,7 @@
'syntaxhighlight': r'<syntaxhighlight
.*?</syntaxhighlight>',
}
if '*' in tags:
- tags = regexes.keys()
+ tags = list(regexes.keys())
# add alias
tags = set(tags)
if 'source' in tags:
@@ -430,7 +430,7 @@
# language, or if it's e.g. a category tag or an internal link
if lang in fam.obsolete:
lang = fam.obsolete[lang]
- if lang in fam.langs.keys():
+ if lang in list(fam.langs.keys()):
if '|' in pagetitle:
# ignore text after the pipe
pagetitle = pagetitle[:pagetitle.index('|')]
@@ -462,7 +462,7 @@
# This regular expression will find every interwiki link, plus trailing
# whitespace.
languages = '|'.join(site.validLanguageLinks() +
- site.family.obsolete.keys())
+ list(site.family.obsolete.keys()))
interwikiR = re.compile(r'\[\[(%s)\s?:[^\[\]\n]*\]\][\s]*'
% languages, re.IGNORECASE)
text = replaceExcept(text, interwikiR, '',
@@ -595,7 +595,7 @@
if not links:
return ''
- ar = interwikiSort(links.keys(), insite)
+ ar = interwikiSort(list(links.keys()), insite)
s = []
for site in ar:
try:
diff --git a/pywikibot/titletranslate.py b/pywikibot/titletranslate.py
index 9e1d15d..6ef5f0d 100644
--- a/pywikibot/titletranslate.py
+++ b/pywikibot/titletranslate.py
@@ -94,7 +94,7 @@
pywikibot.output(
u'TitleTranslate: %s was recognized as %s with value %d'
% (page.title(), dictName, value))
- for entryLang, entry in date.formats[dictName].iteritems():
+ for entryLang, entry in date.formats[dictName].items():
if entryLang != page.site.code:
if (dictName == 'yearsBC' and
entryLang in date.maxyearBC and
@@ -118,7 +118,7 @@
def appendFormatedDates(result, dictName, value):
- for code, func in date.formats[dictName].iteritems():
+ for code, func in date.formats[dictName].items():
result.append(u'[[%s:%s]]' % (code, func(value)))
diff --git a/pywikibot/userinterfaces/terminal_interface_unix.py
b/pywikibot/userinterfaces/terminal_interface_unix.py
index 0ba718c..e8e7246 100755
--- a/pywikibot/userinterfaces/terminal_interface_unix.py
+++ b/pywikibot/userinterfaces/terminal_interface_unix.py
@@ -41,7 +41,7 @@
def printColorized(self, text, targetStream):
lastColor = None
totalcount = 0
- for key, value in unixColors.iteritems():
+ for key, value in unixColors.items():
ckey = '\03{%s}' % key
totalcount += text.count(ckey)
text = text.replace(ckey, value)
diff --git a/pywikibot/userinterfaces/terminal_interface_win32.py
b/pywikibot/userinterfaces/terminal_interface_win32.py
index 3b8cea1..d315fa7 100755
--- a/pywikibot/userinterfaces/terminal_interface_win32.py
+++ b/pywikibot/userinterfaces/terminal_interface_win32.py
@@ -35,7 +35,7 @@
'white': 15,
}
-colorTagR = re.compile('\03{(?P<name>%s)}' %
'|'.join(windowsColors.keys()))
+colorTagR = re.compile('\03{(?P<name>%s)}' %
'|'.join(list(windowsColors.keys())))
# Compat for python <= 2.5
--
To view, visit
https://gerrit.wikimedia.org/r/97877
To unsubscribe, visit
https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ia3e5160409afe3e18e6f27d76f77daf7f57cbfd8
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Aaron1011 <aa1ronham(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: jenkins-bot