jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/423343 )
Change subject: [cleanup] enable C401, C402, C405 checks after py2.6 has been dropped ......................................................................
[cleanup] enable C401, C402, C405 checks after py2.6 has been dropped
Change-Id: I2e0beeba198c004900f70a622985b3f049524fd6 --- M pywikibot/comms/http.py M pywikibot/config2.py M pywikibot/data/api.py M pywikibot/data/wikistats.py M pywikibot/date.py M pywikibot/diff.py M pywikibot/family.py M pywikibot/interwiki_graph.py M pywikibot/page.py M pywikibot/site.py M pywikibot/tools/__init__.py M pywikibot/tools/formatter.py M pywikibot/version.py M scripts/casechecker.py M scripts/checkimages.py M scripts/interwiki.py M scripts/nowcommons.py M scripts/patrol.py M scripts/protect.py M scripts/redirect.py M scripts/replicate_wiki.py M scripts/shell.py M tests/api_tests.py M tests/archivebot_tests.py M tests/aspects.py M tests/family_tests.py M tests/interwiki_graph_tests.py M tests/namespace_tests.py M tests/page_tests.py M tests/pagegenerators_tests.py M tests/paraminfo_tests.py M tests/proofreadpage_tests.py M tests/script_tests.py M tests/site_tests.py M tests/sparql_tests.py M tests/textlib_tests.py M tests/tools_tests.py M tests/utils.py M tox.ini 39 files changed, 236 insertions(+), 243 deletions(-)
Approvals: Dalba: Looks good to me, approved jenkins-bot: Verified
diff --git a/pywikibot/comms/http.py b/pywikibot/comms/http.py index d98ab8a..0ac5187 100644 --- a/pywikibot/comms/http.py +++ b/pywikibot/comms/http.py @@ -364,7 +364,7 @@ body = http_request.body headers = http_request.headers if PY2 and headers: - headers = dict((key, str(value)) for key, value in headers.items()) + headers = {key: str(value) for key, value in headers.items()} auth = get_authentication(uri) if auth is not None and len(auth) == 4: if isinstance(requests_oauthlib, ImportError): diff --git a/pywikibot/config2.py b/pywikibot/config2.py index 7e5436b..35ede44 100644 --- a/pywikibot/config2.py +++ b/pywikibot/config2.py @@ -983,8 +983,8 @@
# System-level and User-level changes. # Store current variables and their types. -_glv = dict((_key, _val) for _key, _val in globals().items() - if _key[0] != '_' and _key not in _imports) +_glv = {_key: _val for _key, _val in globals().items() + if _key[0] != '_' and _key not in _imports} _gl = list(_glv.keys()) _tp = {} for _key in _gl: diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py index e1c41ae..278ff81 100644 --- a/pywikibot/data/api.py +++ b/pywikibot/data/api.py @@ -264,8 +264,9 @@
# v1.18 and earlier paraminfo doesnt include modules; must use 'query' # Assume that by v1.26, it will be desirable to prefetch 'query' - if _mw_ver > MediaWikiVersion('1.26') or _mw_ver < MediaWikiVersion('1.19'): - self.preloaded_modules |= set(['query']) + if _mw_ver > MediaWikiVersion('1.26') \ + or _mw_ver < MediaWikiVersion('1.19'): + self.preloaded_modules |= {'query'}
self._fetch(self.preloaded_modules)
@@ -294,7 +295,7 @@
if 'query' not in self._modules: assert 'query' not in self._paraminfo - self._fetch(set(['query'])) + self._fetch({'query'}) assert 'query' in self._modules
def _emulate_pageset(self): @@ -621,7 +622,7 @@ if 'query' not in self._paraminfo: pywikibot.debug('paraminfo batch: added query', _logger) module_batch.append('query') - self.preloaded_modules |= set(['query']) + self.preloaded_modules |= {'query'}
params = { 'action': 'paraminfo', @@ -709,9 +710,9 @@ # Boolean submodule info added to MW API in afa153ae if self.site.version() < MediaWikiVersion('1.24wmf18'): if module == 'main': - params = set(['action']) + params = {'action'} elif module == 'query': - params = set(['prop', 'list', 'meta']) + params = {'prop', 'list', 'meta'} else: params = set() for param in parameters: @@ -745,11 +746,11 @@
assert(self._action_modules)
- return set('query+' + mod if '+' not in mod and - mod in self.query_modules and - mod not in self._action_modules - else mod - for mod in modules) + return {'query+' + mod + if '+' not in mod and mod in self.query_modules + and mod not in self._action_modules + else mod + for mod in modules}
def normalize_modules(self, modules): """ @@ -833,7 +834,7 @@ If the key does not include a '+' and is not present in the top level of the API, it will fallback to looking for the key 'query+x'. """ - self.fetch(set([key])) + self.fetch({key}) if key in self._paraminfo: return self._paraminfo[key] elif '+' not in key: @@ -959,7 +960,7 @@ @staticmethod def _prefix_submodules(modules, prefix): """Prefix submodules with path.""" - return set('{0}+{1}'.format(prefix, mod) for mod in modules) + return {'{0}+{1}'.format(prefix, mod) for mod in modules}
@property @deprecated('prefix_map') @@ -981,9 +982,10 @@ This loads paraminfo for all modules. """ if not self._prefix_map: - self._prefix_map = dict((module, prefix) for module, prefix in - self.attributes('prefix').items() - if prefix) + self._prefix_map = {module: prefix + for module, prefix + in self.attributes('prefix').items() + if prefix} return self._prefix_map.copy()
def attributes(self, attribute, modules=None): @@ -1004,9 +1006,8 @@ modules = self.module_paths self.fetch(modules)
- return dict((mod, self[mod][attribute]) - for mod in modules - if attribute in self[mod]) + return {mod: self[mod][attribute] + for mod in modules if attribute in self[mod]}
@deprecated('attributes') def module_attribute_map(self, attribute, modules=None): @@ -1027,9 +1028,8 @@
self.fetch(modules)
- return dict((mod, self[mod][attribute]) - for mod in modules - if self[mod][attribute]) + return {mod: self[mod][attribute] + for mod in modules if self[mod][attribute]}
@property @deprecated('parameter()') @@ -1517,21 +1517,21 @@ else: raise ValueError('Request was not a super class of ' '{0!r}'.format(cls)) - args -= set(['self']) + args -= {'self'} old_kwargs = set(kwargs) # all kwargs defined above but not in args indicate 'kwargs' mode if old_kwargs - args: # Move all kwargs into parameters - parameters = dict((name, value) for name, value in kwargs.items() - if name not in args or name == 'parameters') + parameters = {name: value for name, value in kwargs.items() + if name not in args or name == 'parameters'} if 'parameters' in parameters: cls._warn_both() # Copy only arguments and not the parameters - kwargs = dict((name, value) for name, value in kwargs.items() - if name in args or name == 'self') + kwargs = {name: value for name, value in kwargs.items() + if name in args or name == 'self'} kwargs['parameters'] = parameters # Make sure that all arguments have remained - assert(old_kwargs | set(['parameters']) == + assert(old_kwargs | {'parameters'} == set(kwargs) | set(kwargs['parameters'])) assert(('parameters' in old_kwargs) is ('parameters' in kwargs['parameters'])) @@ -1901,7 +1901,7 @@ for mod_type_name in ('list', 'prop', 'generator'): modules.update(self._params.get(mod_type_name, [])) else: - modules = set([self.action]) + modules = {self.action} if modules: self.site._paraminfo.fetch(modules) use_get = all('mustbeposted' not in self.site._paraminfo[mod] @@ -2138,8 +2138,8 @@ if code == 'badtoken': user_tokens = self.site.tokens._tokens[self.site.user()] # all token values mapped to their type - tokens = dict((token, t_type) - for t_type, token in user_tokens.items()) + tokens = {token: t_type + for t_type, token in user_tokens.items()} # determine which tokens are bad invalid_param = {} for name, param in self._params.items(): @@ -2552,9 +2552,9 @@
self.site._paraminfo.fetch('query+' + mod for mod in self.modules)
- limited_modules = set( - mod for mod in self.modules - if self.site._paraminfo.parameter('query+' + mod, 'limit')) + limited_modules = {mod for mod in self.modules + if self.site._paraminfo.parameter('query+' + mod, + 'limit')}
if not limited_modules: self.limited_module = None @@ -2810,9 +2810,9 @@ self.limit), _logger) if "normalized" in self.data["query"]: - self.normalized = dict((item['to'], item['from']) - for item in - self.data["query"]["normalized"]) + self.normalized = { + item['to']: item['from'] + for item in self.data['query']['normalized']} else: self.normalized = {} for item in resultdata: diff --git a/pywikibot/data/wikistats.py b/pywikibot/data/wikistats.py index 44cc297..adeef39 100644 --- a/pywikibot/data/wikistats.py +++ b/pywikibot/data/wikistats.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Objects representing WikiStats API.""" # -# (C) Pywikibot team, 2014-2017 +# (C) Pywikibot team, 2014-2018 # # Distributed under the terms of the MIT license. from __future__ import absolute_import, unicode_literals @@ -45,12 +45,12 @@
MISC_SITES_TABLE = 'mediawikis'
- WMF_MULTILANG_TABLES = set([ + WMF_MULTILANG_TABLES = { 'wikipedias', 'wiktionaries', 'wikisources', 'wikinews', 'wikibooks', 'wikiquotes', 'wikivoyage', 'wikiversity', - ]) + }
- OTHER_MULTILANG_TABLES = set([ + OTHER_MULTILANG_TABLES = { 'uncyclomedia', 'rodovid', 'wikifur', @@ -61,9 +61,9 @@ 'lxde', 'pardus', 'gentoo', - ]) + }
- OTHER_TABLES = set([ + OTHER_TABLES = { # Farms 'wikia', 'wikkii', @@ -79,9 +79,9 @@ 'w3cwikis', 'neoseeker', 'sourceforge', - ]) + }
- ALL_TABLES = (set([MISC_SITES_TABLE]) | WMF_MULTILANG_TABLES | + ALL_TABLES = ({MISC_SITES_TABLE} | WMF_MULTILANG_TABLES | OTHER_MULTILANG_TABLES | OTHER_TABLES)
ALL_KEYS = set(FAMILY_MAPPING.keys()) | ALL_TABLES @@ -222,8 +222,7 @@ @type format: 'xml' or 'csv', or None to autoselect. @rtype: dict """ - return dict((data['prefix'], data) - for data in self.get(table, format)) + return {data['prefix']: data for data in self.get(table, format)}
def sorted(self, table, key): """ diff --git a/pywikibot/date.py b/pywikibot/date.py index b6c45ab..a9db80a 100644 --- a/pywikibot/date.py +++ b/pywikibot/date.py @@ -7,7 +7,7 @@ # (C) Andre Engels, 2004-2005 # (C) Yuri Astrakhan, 2005-2006 (<Firstname><Lastname>@gmail.com) # (years/decades/centuries/millenniums str <=> int conversions) -# (C) Pywikibot team, 2004-2017 +# (C) Pywikibot team, 2004-2018 # # Distributed under the terms of the MIT license. # @@ -261,28 +261,28 @@
# Helper for KN: digits representation _knDigits = u'೦೧೨೩೪೫೬೭೮೯' -_knDigitsToLocal = dict((ord(unicode(i)), _knDigits[i]) for i in range(10)) -_knLocalToDigits = dict((ord(_knDigits[i]), unicode(i)) for i in range(10)) +_knDigitsToLocal = {ord(unicode(i)): _knDigits[i] for i in range(10)} +_knLocalToDigits = {ord(_knDigits[i]): unicode(i) for i in range(10)}
# Helper for Urdu/Persian languages _faDigits = u'۰۱۲۳۴۵۶۷۸۹' -_faDigitsToLocal = dict((ord(unicode(i)), _faDigits[i]) for i in range(10)) -_faLocalToDigits = dict((ord(_faDigits[i]), unicode(i)) for i in range(10)) +_faDigitsToLocal = {ord(unicode(i)): _faDigits[i] for i in range(10)} +_faLocalToDigits = {ord(_faDigits[i]): unicode(i) for i in range(10)}
# Helper for HI:, MR: _hiDigits = u'०१२३४५६७८९' -_hiDigitsToLocal = dict((ord(unicode(i)), _hiDigits[i]) for i in range(10)) -_hiLocalToDigits = dict((ord(_hiDigits[i]), unicode(i)) for i in range(10)) +_hiDigitsToLocal = {ord(unicode(i)): _hiDigits[i] for i in range(10)} +_hiLocalToDigits = {ord(_hiDigits[i]): unicode(i) for i in range(10)}
# Helper for BN: _bnDigits = u'০১২৩৪৫৬৭৮৯' -_bnDigitsToLocal = dict((ord(unicode(i)), _bnDigits[i]) for i in range(10)) -_bnLocalToDigits = dict((ord(_bnDigits[i]), unicode(i)) for i in range(10)) +_bnDigitsToLocal = {ord(unicode(i)): _bnDigits[i] for i in range(10)} +_bnLocalToDigits = {ord(_bnDigits[i]): unicode(i) for i in range(10)}
# Helper for GU: _guDigits = u'૦૧૨૩૪૫૬૭૮૯' -_guDigitsToLocal = dict((ord(unicode(i)), _guDigits[i]) for i in range(10)) -_guLocalToDigits = dict((ord(_guDigits[i]), unicode(i)) for i in range(10)) +_guDigitsToLocal = {ord(unicode(i)): _guDigits[i] for i in range(10)} +_guLocalToDigits = {ord(_guDigits[i]): unicode(i) for i in range(10)}
def intToLocalDigitsStr(value, digitsToLocalDict): diff --git a/pywikibot/diff.py b/pywikibot/diff.py index 95669e9..4556c1f 100644 --- a/pywikibot/diff.py +++ b/pywikibot/diff.py @@ -234,7 +234,7 @@
@property def reviewed(self): - assert len(set(hunk.reviewed for hunk in self._hunks)) == 1, \ + assert len({hunk.reviewed for hunk in self._hunks}) == 1, \ 'All hunks should have the same review status' return self._hunks[0].reviewed
diff --git a/pywikibot/family.py b/pywikibot/family.py index 632aff5..ff18c20 100644 --- a/pywikibot/family.py +++ b/pywikibot/family.py @@ -1430,8 +1430,7 @@ @return: mapping of old codes to new codes (or None) @rtype: dict """ - data = dict((code, None) - for code in self.interwiki_removals) + data = {code: None for code in self.interwiki_removals} data.update(self.interwiki_replacements) return FrozenDict(data, 'Family.obsolete not updatable; ' @@ -1506,8 +1505,8 @@ if hasattr(self, 'test_codes'): codes = codes + self.test_codes
- self.langs = dict( - (code, '%s.%s' % (code, self.domain)) for code in codes) + self.langs = {code: '{0}.{1}'.format(code, self.domain) + for code in codes}
super(SubdomainFamily, self).__init__()
diff --git a/pywikibot/interwiki_graph.py b/pywikibot/interwiki_graph.py index e14797b..00166ed 100644 --- a/pywikibot/interwiki_graph.py +++ b/pywikibot/interwiki_graph.py @@ -157,9 +157,9 @@ each_site = [page.site for page in page_list if page.exists() and not page.isRedirectPage()]
- return set(x[0] for x in itertools.takewhile( + return {x[0] for x in itertools.takewhile( lambda x: x[1] > 1, - Counter(each_site).most_common())) + Counter(each_site).most_common())}
def addNode(self, page): """Add a node for page.""" diff --git a/pywikibot/page.py b/pywikibot/page.py index fa29cf4..6b4d8cf 100644 --- a/pywikibot/page.py +++ b/pywikibot/page.py @@ -970,7 +970,7 @@ try: default = set(self.site.family.disambig('_default')) except KeyError: - default = set([u'Disambig']) + default = {'Disambig'} try: distl = self.site.family.disambig(self.site.code, fallback=False) @@ -980,25 +980,22 @@ disambigpages = Page(self.site, "MediaWiki:Disambiguationspage") if disambigpages.exists(): - disambigs = set(link.title(withNamespace=False) - for link in disambigpages.linkedPages() - if link.namespace() == 10) + disambigs = {link.title(withNamespace=False) + for link in disambigpages.linkedPages() + if link.namespace() == 10} elif self.site.has_mediawiki_message('disambiguationspage'): message = self.site.mediawiki_message( 'disambiguationspage').split(':', 1)[1] # add the default template(s) for default mw message # only - disambigs = set([first_upper(message)]) | default + disambigs = {first_upper(message)} | default else: disambigs = default self.site._disambigtemplates = disambigs else: # Normalize template capitalization - self.site._disambigtemplates = set( - first_upper(t) for t in distl - ) - templates = set(tl.title(withNamespace=False) - for tl in self.templates()) + self.site._disambigtemplates = {first_upper(t) for t in distl} + templates = {tl.title(withNamespace=False) for tl in self.templates()} disambigs = set() # always use cached disambig templates disambigs.update(self.site._disambigtemplates) @@ -1118,7 +1115,7 @@
p_types = set(self.site.protection_types()) if not self.exists(): - return set(['create']) if 'create' in p_types else set() + return {'create'} if 'create' in p_types else set() else: p_types.remove('create') # no existing page allows that if not self.is_filepage(): # only file pages allow upload @@ -2059,8 +2056,8 @@ if unprotect: warn(u'"unprotect" argument of protect() is deprecated', DeprecationWarning, 2) - protections = dict( - (p_type, "") for p_type in self.applicable_protections()) + protections = {p_type: '' + for p_type in self.applicable_protections()} answer = 'y' if called_using_deprecated_arg and prompt is None: prompt = True @@ -2219,7 +2216,7 @@ def getRestrictions(self): """DEPRECATED. Use self.protection() instead.""" restrictions = self.protection() - return dict((k, list(restrictions[k])) for k in restrictions) + return {k: list(restrictions[k]) for k in restrictions}
def __getattr__(self, name): """Generic disabled method warnings.""" @@ -5944,10 +5941,10 @@ 158: 382, # ž 159: 376 # Ÿ } - # ensuring that illegal   and , which have no known values, - # don't get converted to chr(129), chr(141) or chr(157) - ignore = (set(map(lambda x: convertIllegalHtmlEntities.get(x, x), ignore)) | - set([129, 141, 157])) + # ensuring that illegal   and , which have no known + # values, don't get converted to chr(129), chr(141) or chr(157) + ignore = (set(map(lambda x: convertIllegalHtmlEntities.get(x, x), + ignore)) | {129, 141, 157})
def handle_entity(match): if match.group('decimal'): diff --git a/pywikibot/site.py b/pywikibot/site.py index e952892..050d8be 100644 --- a/pywikibot/site.py +++ b/pywikibot/site.py @@ -419,9 +419,8 @@ @classmethod def builtin_namespaces(cls, use_image_name=False, case='first-letter'): """Return a dict of the builtin namespaces.""" - return dict((i, cls(i, use_image_name=use_image_name, - case=cls.default_case(i, case))) - for i in range(-2, 16)) + return {i: cls(i, use_image_name=use_image_name, + case=cls.default_case(i, case)) for i in range(-2, 16)}
@staticmethod def normalize_name(name): @@ -692,8 +691,8 @@ # _iw_sites is a local cache to return a APISite instance depending # on the interwiki prefix of that site if self._map is None: - self._map = dict((iw['prefix'], _IWEntry('local' in iw, iw['url'])) - for iw in self._site.siteinfo['interwikimap']) + self._map = {iw['prefix']: _IWEntry('local' in iw, iw['url']) + for iw in self._site.siteinfo['interwikimap']} return self._map
def __getitem__(self, prefix): @@ -710,8 +709,8 @@
def get_by_url(self, url): """Return a set of prefixes applying to the URL.""" - return set(prefix for prefix, iw_entry in self._iw_sites - if iw_entry.url == url) + return {prefix for prefix, iw_entry in self._iw_sites + if iw_entry.url == url}
class BaseSite(ComparableMixin): @@ -2252,9 +2251,9 @@ self._useroptions['_name'] = ( None if 'anon' in uidata['query']['userinfo'] else uidata['query']['userinfo']['name']) - return set(ns for ns in self.namespaces.values() if ns.id >= 0 and - self._useroptions['searchNs{0}'.format(ns.id)] - in ['1', True]) + return {ns for ns in self.namespaces.values() if ns.id >= 0 + and self._useroptions['searchNs{0}'.format(ns.id)] + in ['1', True]}
@property def article_path(self): @@ -2385,7 +2384,7 @@ raise KeyError("Site %s has no message '%s'" % (self, key))
- return dict((_key, self._msgcache[_key]) for _key in keys) + return {_key: self._msgcache[_key] for _key in keys}
@deprecated_args(forceReload=None) def mediawiki_message(self, key): @@ -2565,8 +2564,8 @@ """Return list of localized "word" magic words for the site.""" if not hasattr(self, "_magicwords"): magicwords = self.siteinfo.get("magicwords", cache=False) - self._magicwords = dict((item["name"], item["aliases"]) - for item in magicwords) + self._magicwords = {item['name']: item['aliases'] + for item in magicwords}
if word in self._magicwords: return self._magicwords[word] @@ -2596,8 +2595,7 @@ """ # NOTE: this is needed, since the API can give false positives! try: - keywords = set(s.lstrip("#") - for s in self.getmagicwords("redirect")) + keywords = {s.lstrip('#') for s in self.getmagicwords('redirect')} keywords.add("REDIRECT") # just in case pattern = "(?:" + "|".join(keywords) + ")" except KeyError: @@ -3181,12 +3179,13 @@ "getredirtarget: No 'redirects' found for page %s." % title.encode(self.encoding()))
- redirmap = dict((item['from'], - {'title': item['to'], - 'section': u'#' + item['tofragment'] - if 'tofragment' in item and item['tofragment'] - else ''}) - for item in result['query']['redirects']) + redirmap = {item['from']: {'title': item['to'], + 'section': '#' + + item['tofragment'] + if 'tofragment' in item + and item['tofragment'] + else ''} + for item in result['query']['redirects']}
# Normalize title for item in result['query'].get('normalized', []): @@ -3491,7 +3490,7 @@ query = api.PropertyGenerator( 'info', titles='Dummy page', - intoken=valid_tokens - set(['patrol']), + intoken=valid_tokens - {'patrol'}, site=self) query.request._warning_handler = warn_handler
@@ -3547,9 +3546,9 @@ data = data['query']
if 'tokens' in data and data['tokens']: - user_tokens = dict((key[:-5], val) - for key, val in data['tokens'].items() - if val != '+\') + user_tokens = {key[:-5]: val + for key, val in data['tokens'].items() + if val != '+\'}
return user_tokens
@@ -3873,7 +3872,7 @@ "categorymembers: startsort must be less than endsort")
if isinstance(member_type, basestring): - member_type = set([member_type]) + member_type = {member_type}
if (member_type and (sortby == 'timestamp' or @@ -5029,7 +5028,7 @@ "cascadeprotected": CascadeLockedPage, 'titleblacklist-forbidden': TitleblacklistError, } - _ep_text_overrides = set(['appendtext', 'prependtext', 'undo']) + _ep_text_overrides = {'appendtext', 'prependtext', 'undo'}
@must_be(group='user') def editpage(self, page, summary=None, minor=True, notminor=False, @@ -5120,7 +5119,7 @@ if basetimestamp and 'basetimestamp' not in kwargs: params['basetimestamp'] = basetimestamp
- watch_items = set(["watch", "unwatch", "preferences", "nochange"]) + watch_items = {'watch', 'unwatch', 'preferences', 'nochange'} if watch in watch_items: if MediaWikiVersion(self.version()) < MediaWikiVersion("1.16"): if watch in ['preferences', 'nochange']: @@ -5739,11 +5738,11 @@ raise Error('No rcid, revid or revision provided.')
if isinstance(rcid, int) or isinstance(rcid, basestring): - rcid = set([rcid]) + rcid = {rcid} if isinstance(revid, int) or isinstance(revid, basestring): - revid = set([revid]) + revid = {revid} if isinstance(revision, pywikibot.page.Revision): - revision = set([revision]) + revision = {revision}
# Handle param=None. rcid = rcid or set() @@ -5757,7 +5756,7 @@ u'Support of "revid" parameter\n' u'is not implemented in MediaWiki version < "1.22"') else: - combined_revid = set(revid) | set(r.revid for r in revision) + combined_revid = set(revid) | {r.revid for r in revision}
gen = itertools.chain( zip_longest(rcid, [], fillvalue='rcid'), @@ -7507,9 +7506,9 @@ # Only separated from get_item to avoid the deprecation message via # _get_propertyitem def _get_item(self, source, **params): - assert set(params) <= set(['props']), \ + assert set(params) <= {'props'}, \ 'Only "props" is a valid kwarg, not {0}'.format(set(params) - - set(['props'])) + {'props'}) if isinstance(source, int) or \ isinstance(source, basestring) and source.isdigit(): ids = 'q' + str(source) @@ -7902,7 +7901,7 @@ for claim in claims: baserevid = self._get_baserevid(claim, baserevid)
- items = set(claim.on_item for claim in claims if claim.on_item) + items = {claim.on_item for claim in claims if claim.on_item} assert len(items) == 1
params = { diff --git a/pywikibot/tools/__init__.py b/pywikibot/tools/__init__.py index 7438303..95eae41 100644 --- a/pywikibot/tools/__init__.py +++ b/pywikibot/tools/__init__.py @@ -1545,8 +1545,8 @@ deprecated.update(arg_names[:len(__args) - len(args)]) # remove at most |arg_names| entries from the back new_args = tuple(__args[:max(len(args), len(__args) - len(arg_names))]) - new_kwargs = dict((arg, val) for arg, val in __kw.items() - if arg not in arg_names) + new_kwargs = {arg: val for arg, val in __kw.items() + if arg not in arg_names}
if deprecated: # sort them according to arg_names diff --git a/pywikibot/tools/formatter.py b/pywikibot/tools/formatter.py index b2b942b..8050725 100644 --- a/pywikibot/tools/formatter.py +++ b/pywikibot/tools/formatter.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Module containing various formatting related utilities.""" # -# (C) Pywikibot team, 2015-2017 +# (C) Pywikibot team, 2015-2018 # # Distributed under the terms of the MIT license. # @@ -67,7 +67,7 @@ colors = set(colors) # Dot.product of colors to create all possible combinations of foreground # and background colors. - colors |= set('%s;%s' % (c1, c2) for c1 in colors for c2 in colors) + colors |= {'{0};{1}'.format(c1, c2) for c1 in colors for c2 in colors}
def get_value(self, key, args, kwargs): """Get value, filling in 'color' when it is a valid color.""" diff --git a/pywikibot/version.py b/pywikibot/version.py index 77f4c34..9621679 100644 --- a/pywikibot/version.py +++ b/pywikibot/version.py @@ -495,11 +495,11 @@
std_lib_dir = get_python_lib(standard_lib=True)
- root_packages = set(key.split('.')[0] for key in modules) + root_packages = {key.split('.')[0] for key in modules}
- builtin_packages = set(name.split('.')[0] for name in root_packages - if name in sys.builtin_module_names or - '_' + name in sys.builtin_module_names) + builtin_packages = {name.split('.')[0] for name in root_packages + if name in sys.builtin_module_names + or '_' + name in sys.builtin_module_names}
# Improve performance by removing builtins from the list if possible. if builtins is False: diff --git a/scripts/casechecker.py b/scripts/casechecker.py index 0ed8f82..92aa7a6 100755 --- a/scripts/casechecker.py +++ b/scripts/casechecker.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- """Bot to find all pages on the wiki with mixed latin and cyrilic alphabets.""" # -# (C) Pywikibot team, 2006-2017 +# (C) Pywikibot team, 2006-2018 # # Distributed under the terms of the MIT license. # @@ -171,20 +171,20 @@ self.titleList = [self.Page(t) for t in f] self.failedTitles += '.failed'
- self.lclToLatDict = dict( - (ord(self.localSuspects[i]), self.latinSuspects[i]) - for i in xrange(len(self.localSuspects))) - self.latToLclDict = dict( - (ord(self.latinSuspects[i]), self.localSuspects[i]) - for i in xrange(len(self.localSuspects))) + self.lclToLatDict = { + ord(self.localSuspects[i]): self.latinSuspects[i] + for i in xrange(len(self.localSuspects))} + self.latToLclDict = { + ord(self.latinSuspects[i]): self.localSuspects[i] + for i in xrange(len(self.localSuspects))}
if self.localKeyboard is not None: - self.lclToLatKeybDict = dict( - (ord(self.localKeyboard[i]), self.latinKeyboard[i]) - for i in xrange(len(self.localKeyboard))) - self.latToLclKeybDict = dict( - (ord(self.latinKeyboard[i]), self.localKeyboard[i]) - for i in xrange(len(self.localKeyboard))) + self.lclToLatKeybDict = { + ord(self.localKeyboard[i]): self.latinKeyboard[i] + for i in xrange(len(self.localKeyboard))} + self.latToLclKeybDict = { + ord(self.latinKeyboard[i]): self.localKeyboard[i] + for i in xrange(len(self.localKeyboard))} else: self.lclToLatKeybDict = {} self.latToLclKeybDict = {} @@ -464,9 +464,9 @@ badWords = list(self.FindBadWords(title)) if len(badWords) > 0: # Allow known words, allow any roman numerals with local suffixes - badWords = set(i for i in badWords - if i not in self.knownWords and - self.romanNumSfxPtrn.match(i) is not None) + badWords = {i for i in badWords + if i not in self.knownWords + and self.romanNumSfxPtrn.match(i) is not None}
if len(badWords) == 0 or self.Page(title).is_filepage(): return diff --git a/scripts/checkimages.py b/scripts/checkimages.py index 02b340e..3beb321 100755 --- a/scripts/checkimages.py +++ b/scripts/checkimages.py @@ -558,9 +558,9 @@ self.list_entry = '\n* [[:{0}%s]] '.format(self.image_namespace) self.com = i18n.translate(self.site, msg_comm10, fallback=True) hiddentemplatesRaw = i18n.translate(self.site, HiddenTemplate) - self.hiddentemplates = set( + self.hiddentemplates = { pywikibot.Page(self.site, tmp, ns=self.site.namespaces.TEMPLATE) - for tmp in hiddentemplatesRaw) + for tmp in hiddentemplatesRaw} self.pageHidden = i18n.translate(self.site, PageWithHiddenTemplates) self.pageAllowed = i18n.translate(self.site, PageWithAllowedTemplates) self.comment = i18n.twtranslate(self.site.lang, diff --git a/scripts/interwiki.py b/scripts/interwiki.py index 6b9f336..5e03fd2 100755 --- a/scripts/interwiki.py +++ b/scripts/interwiki.py @@ -1983,8 +1983,8 @@ page = new[site] if not page.section(): try: - linkedPages = set(pywikibot.Page(l) - for l in page.iterlanglinks()) + linkedPages = {pywikibot.Page(l) + for l in page.iterlanglinks()} except pywikibot.NoPage: pywikibot.warning( 'Page %s does no longer exist?!' % page) diff --git a/scripts/nowcommons.py b/scripts/nowcommons.py index d3f0b40..538a515 100755 --- a/scripts/nowcommons.py +++ b/scripts/nowcommons.py @@ -45,8 +45,8 @@ # # (C) Wikipedian, 2006-2007 # (C) Siebrand Mazeland, 2007-2008 -# (C) xqt, 2010-2017 -# (C) Pywikibot team, 2006-2017 +# (C) xqt, 2010-2018 +# (C) Pywikibot team, 2006-2018 # # Distributed under the terms of the MIT license. # @@ -206,8 +206,8 @@ def nc_templates(self): """A set of now commons template Page instances.""" if not hasattr(self, '_nc_templates'): - self._nc_templates = set(pywikibot.Page(self.site, title, ns=10) - for title in self.ncTemplates()) + self._nc_templates = {pywikibot.Page(self.site, title, ns=10) + for title in self.ncTemplates()} return self._nc_templates
@property diff --git a/scripts/patrol.py b/scripts/patrol.py index 4fc9e60..659b76a 100755 --- a/scripts/patrol.py +++ b/scripts/patrol.py @@ -124,8 +124,8 @@ self.patrol_counter = 0 # and how many times an action was taken for entry in self.site.siteinfo['specialpagealiases']: if entry['realname'] == 'Prefixindex': - self._prefixindex_aliases = set(alias.lower() - for alias in entry['aliases']) + self._prefixindex_aliases = {alias.lower() + for alias in entry['aliases']} break else: raise RuntimeError('No alias for "prefixindex"') diff --git a/scripts/protect.py b/scripts/protect.py index 47f7551..8791124 100755 --- a/scripts/protect.py +++ b/scripts/protect.py @@ -51,7 +51,7 @@ # Written by https://it.wikisource.org/wiki/Utente:Qualc1 # Created by modifying delete.py # -# (C) Pywikibot team, 2008-2017 +# (C) Pywikibot team, 2008-2018 # # Distributed under the terms of the MIT license. # @@ -245,8 +245,8 @@ protection_levels) # set the default value for all # None (not the string 'none') will be ignored by Site.protect() - combined_protections = dict( - (p_type, default_level) for p_type in protection_types) + combined_protections = {p_type: default_level + for p_type in protection_types} for p_type, level in protections.items(): level = check_protection_level(p_type, level, protection_levels, default_level) diff --git a/scripts/redirect.py b/scripts/redirect.py index 4fd4448..2511f46 100755 --- a/scripts/redirect.py +++ b/scripts/redirect.py @@ -73,8 +73,8 @@ # # (C) Daniel Herding, 2004 # (C) Purodha Blissenbach, 2009 -# (C) xqt, 2009-2017 -# (C) Pywikibot team, 2004-2017 +# (C) xqt, 2009-2018 +# (C) Pywikibot team, 2004-2018 # # Distributed under the terms of the MIT license. # @@ -267,8 +267,8 @@ raise RuntimeError("No results given.") redirects = {} pages = {} - redirects = dict((x['from'], x['to']) - for x in data['query']['redirects']) + redirects = {x['from']: x['to'] + for x in data['query']['redirects']}
for pagetitle in data['query']['pages'].values(): if 'missing' in pagetitle and 'pageid' not in pagetitle: diff --git a/scripts/replicate_wiki.py b/scripts/replicate_wiki.py index 7009737..187a009 100755 --- a/scripts/replicate_wiki.py +++ b/scripts/replicate_wiki.py @@ -42,7 +42,7 @@ """ # # (C) Kasper Souren, 2012-2013 -# (C) Pywikibot team, 2013-2017 +# (C) Pywikibot team, 2013-2018 # # Distributed under the terms of the MIT license. # @@ -61,7 +61,7 @@ @deprecated('BaseSite.namespaces') def namespaces(site): """Return a dictionary from namespace number to prefix.""" - return dict((n.id, n.custom_name) for n in site.namespaces) + return {n.id: n.custom_name for n in site.namespaces}
def multiple_replace(text, word_dict): diff --git a/scripts/shell.py b/scripts/shell.py index 50cff05..07b16af 100755 --- a/scripts/shell.py +++ b/scripts/shell.py @@ -9,7 +9,7 @@
If no arguments are given, the pywikibot library will not be loaded. """ -# (C) Pywikibot team, 2014-2017 +# (C) Pywikibot team, 2014-2018 # # Distributed under the terms of the MIT license. # @@ -31,7 +31,7 @@ if __name__ == "__main__": import sys args = [] - if set(sys.argv) - set(['shell', 'shell.py']): + if set(sys.argv) - {'shell', 'shell.py'}: args = sys.argv del sys main(*args) diff --git a/tests/api_tests.py b/tests/api_tests.py index 38015fe..aabfcb8 100644 --- a/tests/api_tests.py +++ b/tests/api_tests.py @@ -190,7 +190,7 @@ else: assert 'query' not in modules original_generate_submodules(modules) - pi = api.ParamInfo(self.site, set(['query', 'main'])) + pi = api.ParamInfo(self.site, {'query', 'main'}) self.assertEqual(len(pi), 0) original_generate_submodules = pi._generate_submodules pi._generate_submodules = patched_generate_submodules @@ -202,7 +202,7 @@ """Test initializing with only the pageset.""" site = self.get_site() self.assertNotIn('query', api.ParamInfo.init_modules) - pi = api.ParamInfo(site, set(['pageset'])) + pi = api.ParamInfo(site, {'pageset'}) self.assertNotIn('query', api.ParamInfo.init_modules) self.assertEqual(len(pi), 0) pi._init() @@ -232,7 +232,7 @@ def test_generators(self): """Test requesting the generator parameter.""" site = self.get_site() - pi = api.ParamInfo(site, set(['pageset', 'query'])) + pi = api.ParamInfo(site, {'pageset', 'query'}) self.assertEqual(len(pi), 0) pi._init()
@@ -548,7 +548,7 @@ options['c'] = None self.assertCountEqual(['a', 'b'], list(options.keys())) self.assertCountEqual([True, False], list(options.values())) - self.assertEqual(set(), set(options.values()) - set([True, False])) + self.assertEqual(set(), set(options.values()) - {True, False}) self.assertCountEqual([('a', True), ('b', False)], list(options.items()))
@@ -808,7 +808,7 @@ 'limit': {'max': 10}, 'namespace': {'multi': True} } - mysite._paraminfo.query_modules_with_limits = set(['allpages']) + mysite._paraminfo.query_modules_with_limits = {'allpages'} self.gen = api.ListGenerator(listaction="allpages", site=mysite)
def test_namespace_none(self): diff --git a/tests/archivebot_tests.py b/tests/archivebot_tests.py index 7270059..f79cd4b 100644 --- a/tests/archivebot_tests.py +++ b/tests/archivebot_tests.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Tests for archivebot scripts.""" # -# (C) Pywikibot team, 2016-2017 +# (C) Pywikibot team, 2016-2018 # # Distributed under the terms of the MIT license. # @@ -121,8 +121,7 @@ """Test archivebot script on 40+ Wikipedia sites."""
family = 'wikipedia' - sites = dict((code, {'family': 'wikipedia', 'code': code}) - for code in THREADS) + sites = {code: {'family': 'wikipedia', 'code': code} for code in THREADS}
cached = True
@@ -188,8 +187,8 @@ """
family = 'wikipedia' - sites = dict((code, {'family': 'wikipedia', 'code': code}) - for code in THREADS_WITH_UPDATED_FORMAT) + sites = {code: {'family': 'wikipedia', 'code': code} + for code in THREADS_WITH_UPDATED_FORMAT}
cached = True
diff --git a/tests/aspects.py b/tests/aspects.py index 041e51b..f51a627 100644 --- a/tests/aspects.py +++ b/tests/aspects.py @@ -7,7 +7,7 @@ mixin to show cache usage is included. """ # -# (C) Pywikibot team, 2014-2017 +# (C) Pywikibot team, 2014-2018 # # Distributed under the terms of the MIT license. # @@ -157,7 +157,7 @@ @type namespaces: int or set of int """ if isinstance(namespaces, int): - namespaces = set([namespaces]) + namespaces = {namespaces}
self.assertIn(page.namespace(), namespaces, "%s not in namespace %r" % (page, namespaces)) @@ -222,7 +222,7 @@ @type namespaces: int or set of int """ if isinstance(namespaces, int): - namespaces = set([namespaces]) + namespaces = {namespaces}
for page in gen: self.assertPageInNamespaces(page, namespaces) @@ -241,7 +241,7 @@ @param skip: bool """ if isinstance(namespaces, int): - namespaces = set([namespaces]) + namespaces = {namespaces} else: assert isinstance(namespaces, set)
@@ -511,14 +511,16 @@ if issubclass(cls, HttpbinTestCase): # If test uses httpbin, then check is pytest test runner is used # and pytest_httpbin module is installed. - httpbin_used = hasattr(sys, '_test_runner_pytest') and pytest_httpbin + httpbin_used = hasattr(sys, + '_test_runner_pytest') and pytest_httpbin else: httpbin_used = False
- # If pytest_httpbin will be used during tests, then remove httpbin.org from sites. + # If pytest_httpbin will be used during tests, then remove httpbin.org + # from sites. if httpbin_used: - cls.sites = dict((k, v) for k, v in cls.sites.items() - if 'httpbin.org' not in v['hostname']) + cls.sites = {k: v for k, v in cls.sites.items() + if 'httpbin.org' not in v['hostname']}
for key, data in cls.sites.items(): if 'hostname' not in data: diff --git a/tests/family_tests.py b/tests/family_tests.py index 19a841d..36fc96e 100644 --- a/tests/family_tests.py +++ b/tests/family_tests.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Tests for the family module.""" # -# (C) Pywikibot team, 2014-2017 +# (C) Pywikibot team, 2014-2018 # # Distributed under the terms of the MIT license. # @@ -56,8 +56,8 @@ if isinstance(f, SingleSiteFamily): self.assertIsNotNone(f.code) self.assertIsNotNone(f.domain) - self.assertEqual(set(f.langs), set([f.code])) - self.assertEqual(set(f.codes), set([f.code])) + self.assertEqual(set(f.langs), {f.code}) + self.assertEqual(set(f.codes), {f.code})
def test_family_load_invalid(self): """Test that an invalid family raised UnknownFamily exception.""" diff --git a/tests/interwiki_graph_tests.py b/tests/interwiki_graph_tests.py index c2fd0b7..cb42311 100644 --- a/tests/interwiki_graph_tests.py +++ b/tests/interwiki_graph_tests.py @@ -2,7 +2,7 @@ # -*- coding: utf-8 -*- """Test Interwiki Graph functionality.""" # -# (C) Pywikibot team, 2015-2016 +# (C) Pywikibot team, 2015-2018 # # Distributed under the terms of the MIT license. # @@ -70,7 +70,7 @@
drawer = interwiki_graph.GraphDrawer(data)
- self.assertEqual(set([self.pages['en'].site]), drawer._octagon_site_set()) + self.assertEqual({self.pages['en'].site}, drawer._octagon_site_set())
drawer.createGraph()
diff --git a/tests/namespace_tests.py b/tests/namespace_tests.py index 0e658e6..48aac4f 100644 --- a/tests/namespace_tests.py +++ b/tests/namespace_tests.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Tests for the Namespace class.""" # -# (C) Pywikibot team, 2014 +# (C) Pywikibot team, 2014-2018 # # Distributed under the terms of the MIT license. # @@ -348,7 +348,7 @@ """Test performing set minus operation on set of Namespace objects.""" namespaces = Namespace.builtin_namespaces(use_image_name=False)
- excluded_namespaces = set([-1, -2]) + excluded_namespaces = {-1, -2}
positive_namespaces = set(namespaces) - excluded_namespaces
diff --git a/tests/page_tests.py b/tests/page_tests.py index 2563b54..cdc29af 100644 --- a/tests/page_tests.py +++ b/tests/page_tests.py @@ -471,7 +471,7 @@ if not site.has_extension('Disambiguator'): raise unittest.SkipTest('Disambiguator extension not loaded on test site') pg = pywikibot.Page(site, 'Random') - pg._pageprops = set(['disambiguation', '']) + pg._pageprops = {'disambiguation', ''} self.assertTrue(pg.isDisambig()) pg._pageprops = set() self.assertFalse(pg.isDisambig()) @@ -878,7 +878,7 @@ self.assertIsInstance(top_two[0], tuple) self.assertIsInstance(top_two[0][0], basestring) self.assertIsInstance(top_two[0][1], int) - top_two_usernames = set([top_two[0][0], top_two[1][0]]) + top_two_usernames = {top_two[0][0], top_two[1][0]} self.assertEqual(len(top_two_usernames), 2) top_two_counts = ([top_two[0][1], top_two[1][1]]) top_two_edit_count = mp.revision_count(top_two_usernames) @@ -1022,7 +1022,7 @@ pp2 = p2.applicable_protections() pp3 = p3.applicable_protections()
- self.assertEqual(pp1, set(['create'])) + self.assertEqual(pp1, {'create'}) self.assertIn('edit', pp2) self.assertNotIn('create', pp2) self.assertNotIn('upload', pp2) diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py index f2e6c4a..7b2b9ff 100755 --- a/tests/pagegenerators_tests.py +++ b/tests/pagegenerators_tests.py @@ -398,7 +398,7 @@ for item in items] self.assertEqual(sorted(timestamps), timestamps) self.assertTrue(all(item['ns'] == 0 for item in items)) - self.assertEqual(len(set(item['revid'] for item in items)), self.length) + self.assertEqual(len({item['revid'] for item in items}), self.length)
class TestTextfilePageGenerator(DefaultSiteTestCase): @@ -626,35 +626,35 @@ """Test one namespace.""" gf = pagegenerators.GeneratorFactory(site=self.get_site()) gf.handleArg('-ns:2') - self.assertEqual(gf.namespaces, set([2])) + self.assertEqual(gf.namespaces, {2})
def test_two_namespaces(self): """Test two namespaces.""" gf = pagegenerators.GeneratorFactory(site=self.get_site()) gf.handleArg('-ns:2') gf.handleArg('-ns:Talk') - self.assertEqual(gf.namespaces, set([2, 1])) + self.assertEqual(gf.namespaces, {2, 1})
def test_two_named_namespaces(self): """Test two named namespaces.""" gf = pagegenerators.GeneratorFactory(site=self.get_site()) gf.handleArg('-ns:Talk,File') - self.assertEqual(gf.namespaces, set([1, 6])) + self.assertEqual(gf.namespaces, {1, 6})
def test_two_numeric_namespaces(self): """Test two namespaces delimited by colon.""" gf = pagegenerators.GeneratorFactory(site=self.get_site()) gf.handleArg('-ns:1,6') - self.assertEqual(gf.namespaces, set([1, 6])) + self.assertEqual(gf.namespaces, {1, 6})
def test_immutable_namespaces_on_read(self): """Test immutable namespaces on read.""" gf = pagegenerators.GeneratorFactory(site=self.get_site()) gf.handleArg('-ns:1,6') - self.assertEqual(gf.namespaces, set([1, 6])) + self.assertEqual(gf.namespaces, {1, 6}) self.assertIsInstance(gf.namespaces, frozenset) gf.handleArg('-ns:0') - self.assertEqual(gf.namespaces, set([1, 6])) + self.assertEqual(gf.namespaces, {1, 6})
def test_unsupported_quality_level_filter(self): """Test unsupported option.""" @@ -713,7 +713,7 @@ gf = pagegenerators.GeneratorFactory(site=self.get_site()) gf.handleArg('-ns:1,2,3,4,5') gf.handleArg('-ns:not:User') - self.assertEqual(gf.namespaces, set([1, 3, 4, 5])) + self.assertEqual(gf.namespaces, {1, 3, 4, 5})
class TestItemClaimFilterPageGenerator(WikidataTestCase): @@ -998,7 +998,7 @@ gf.handleArg('-recentchanges:60') gen = gf.getCombinedGenerator() self.assertIsNotNone(gen) - self.assertPagesInNamespacesAll(gen, set([0, 1, 2]), skip=True) + self.assertPagesInNamespacesAll(gen, {0, 1, 2}, skip=True)
def test_recentchanges_rctag(self): """Test recentchanges generator with recent changes tag.""" @@ -1006,7 +1006,7 @@ gf.handleArg('-recentchanges:visualeditor') gen = gf.getCombinedGenerator() self.assertIsNotNone(gen) - self.assertPagesInNamespacesAll(gen, set([0, 1, 2]), skip=True) + self.assertPagesInNamespacesAll(gen, {0, 1, 2}, skip=True)
def test_recentchanges_ns_default(self): """Test recentchanges generator.""" @@ -1014,7 +1014,7 @@ gf.handleArg('-recentchanges:50') gen = gf.getCombinedGenerator() self.assertIsNotNone(gen) - self.assertPagesInNamespacesAll(gen, set([0, 1, 2]), skip=True) + self.assertPagesInNamespacesAll(gen, {0, 1, 2}, skip=True)
def test_recentchanges_ns(self): """Test recentchanges generator with namespace.""" @@ -1033,7 +1033,7 @@ gf.handleArg('-recentchanges:10') gen = gf.getCombinedGenerator() self.assertIsNotNone(gen) - self.assertPagesInNamespaces(gen, set([1, 3])) + self.assertPagesInNamespaces(gen, {1, 3})
def test_pageid(self): """Test pageid parameter.""" @@ -1092,7 +1092,7 @@ gf.handleArg('-random:10') gen = gf.getCombinedGenerator() self.assertIsNotNone(gen) - self.assertPagesInNamespaces(gen, set([1, 3])) + self.assertPagesInNamespaces(gen, {1, 3})
def test_randomredirect_generator_default(self): """Test random generator.""" @@ -1120,7 +1120,7 @@ gf.handleArg('-randomredirect:10') gen = gf.getCombinedGenerator() self.assertIsNotNone(gen) - self.assertPagesInNamespaces(gen, set([1, 3])) + self.assertPagesInNamespaces(gen, {1, 3})
def test_pages_with_property_generator(self): """Test the pages_with_property_generator method.""" @@ -1193,7 +1193,7 @@ for page in pages: self.assertIsInstance(page, pywikibot.Page) self.assertEqual(page._lintinfo['category'], 'obsolete-tag') - self.assertPagesInNamespaces(pages, set([1, ])) + self.assertPagesInNamespaces(pages, {1})
def test_linter_generator_invalid_cat(self): """Test generator of pages with lint errors.""" diff --git a/tests/paraminfo_tests.py b/tests/paraminfo_tests.py index ffe7e56..6a0fc60 100644 --- a/tests/paraminfo_tests.py +++ b/tests/paraminfo_tests.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Test confirming paraminfo contains expected values.""" # -# (C) Pywikibot team, 2015-2016 +# (C) Pywikibot team, 2015-2018 # # Distributed under the terms of the MIT license. # @@ -129,7 +129,7 @@ if isinstance(self.site, DataSite): # It is not clear when this format has been added, see T129281. base.append('application/vnd.php.serialized') - extensions = set(e['name'] for e in self.site.siteinfo['extensions']) + extensions = {e['name'] for e in self.site.siteinfo['extensions']} if 'CollaborationKit' in extensions: base.append('text/x-collabkit')
diff --git a/tests/proofreadpage_tests.py b/tests/proofreadpage_tests.py index 2f8492f..02fae23 100644 --- a/tests/proofreadpage_tests.py +++ b/tests/proofreadpage_tests.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Tests for the proofreadpage module.""" # -# (C) Pywikibot team, 2015-2017 +# (C) Pywikibot team, 2015-2018 # # Distributed under the terms of the MIT license. # @@ -491,8 +491,8 @@ 'num_pages': 804, 'page': 'Page:Popular Science Monthly Volume 1.djvu/{0}', 'get_label': [11, 11, '1'], - 'get_number': [[1, set([11])], - ['Cvr', set([1, 9, 10, 804])], + 'get_number': [[1, {11}], + ['Cvr', {1, 9, 10, 804}], ], # 'get_page' is filled in setUpClass. }, @@ -503,7 +503,7 @@ 'num_pages': 272, 'page': 'Seite:Schiller_Musenalmanach_1799_{0:3d}.jpg', 'get_label': [120, 120, '120'], # page no, title no, label - 'get_number': [[120, set([120])], + 'get_number': [[120, {120}], ], # 'get_page' is filled in setUpClass. }, @@ -514,7 +514,7 @@ 'num_pages': 107, 'page': 'Page:Segard - Hymnes profanes, 1894.djvu/{0}', 'get_label': [11, 11, '8'], - 'get_number': [[8, set([11])], + 'get_number': [[8, {11}], ['-', set(range(1, 4)) | set(range(101, 108))], ], # 'get_page' is filled in setUpClass. @@ -534,8 +534,8 @@ # 'get_page' has same structure as 'get_number'. site_def['get_page'] = [] for label, page_numbers in site_def['get_number']: - page_set = set(ProofreadPage(site, base_title.format(i)) - for i in page_numbers) + page_set = {ProofreadPage(site, base_title.format(i)) + for i in page_numbers} site_def['get_page'].append([label, page_set])
def test_check_if_cached(self, key): diff --git a/tests/script_tests.py b/tests/script_tests.py index d542f92..7a70d5e 100644 --- a/tests/script_tests.py +++ b/tests/script_tests.py @@ -80,10 +80,9 @@ list_scripts(scripts_path, 'login.py') + list_scripts(archive_path))
-runnable_script_list = (['login'] + - sorted(set(script_list) - - set(['login']) - - set(unrunnable_script_list))) +runnable_script_list = ( + ['login'] + sorted(set(script_list) + - {'login'} - set(unrunnable_script_list)))
script_input = { 'catall': 'q\n', # q for quit diff --git a/tests/site_tests.py b/tests/site_tests.py index 1c5353e..e42c3eb 100644 --- a/tests/site_tests.py +++ b/tests/site_tests.py @@ -505,8 +505,8 @@ for bl in backlinks_ns_0: self.assertIsInstance(bl, pywikibot.Page)
- self.assertEqual(filtered & redirs, set([])) - self.assertEqual(indirect & redirs, set([])) + self.assertEqual(filtered & redirs, set()) + self.assertEqual(indirect & redirs, set()) self.assertLessEqual(filtered, indirect) self.assertLessEqual(filtered, backlinks_ns_0) self.assertLessEqual(redirs, backlinks_ns_0) @@ -600,7 +600,7 @@ gen_params = links_gen.request._params.copy() expected_params['gplnamespace'] = [0, 1] self.assertEqual(gen_params, expected_params) - self.assertPagesInNamespaces(links_gen, set([0, 1])) + self.assertPagesInNamespaces(links_gen, {0, 1})
for target in self.site.preloadpages( self.site.pagelinks(self.mainpage, follow_redirects=True, total=5)): @@ -1377,7 +1377,7 @@ pagelist = [mainpage] if imagepage: pagelist += [imagepage] - titlelist = set(page.title() for page in pagelist) + titlelist = {page.title() for page in pagelist} for change in mysite.recentchanges(pagelist=pagelist, total=5): self.assertIsInstance(change, dict) diff --git a/tests/sparql_tests.py b/tests/sparql_tests.py index 838e567..44b346b 100644 --- a/tests/sparql_tests.py +++ b/tests/sparql_tests.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """Test cases for the SPARQL API.""" # -# (C) Pywikibot team, 2016 +# (C) Pywikibot team, 2016-2018 # # Distributed under the terms of the MIT license. # @@ -144,7 +144,7 @@ ITEM_Q677525))) q = sparql.SparqlQuery() res = q.get_items('SELECT * WHERE { ?x ?y ?z }', 'cat') - self.assertSetEqual(res, set(['Q498787', 'Q677525'])) + self.assertSetEqual(res, {'Q498787', 'Q677525'}) res = q.get_items('SELECT * WHERE { ?x ?y ?z }', 'cat', result_type=list) self.assertEqual(res, ['Q498787', 'Q677525', 'Q677525']) diff --git a/tests/textlib_tests.py b/tests/textlib_tests.py index 49855fd..073373c 100644 --- a/tests/textlib_tests.py +++ b/tests/textlib_tests.py @@ -1480,7 +1480,7 @@ def setUpClass(cls): """Define set of valid targets for the example text.""" super(TestGetLanguageLinks, cls).setUpClass() - cls.sites_set = set([cls.enwp, cls.dewp]) + cls.sites_set = {cls.enwp, cls.dewp}
def test_getLanguageLinks(self, key): """Test if the function returns the correct titles and sites.""" @@ -1489,9 +1489,9 @@ m.assert_called_once_with( '[getLanguageLinks] Text contains invalid interwiki link ' '[[fr:{{PAGENAME}}]].') - self.assertEqual(set(page.title() for page in lang_links.values()), - set(['Site'])) - self.assertEqual(set(lang_links), self.sites_set - set([self.site])) + self.assertEqual({page.title() for page in lang_links.values()}, + {'Site'}) + self.assertEqual(set(lang_links), self.sites_set - {self.site})
class TestUnescape(TestCase): diff --git a/tests/tools_tests.py b/tests/tools_tests.py index 0e1aa04..ab58205 100644 --- a/tests/tools_tests.py +++ b/tests/tools_tests.py @@ -457,7 +457,7 @@ elif isinstance(deduped, collections.Mapping): self.assertCountEqual(list(deduped.keys()), [1, 3]) else: - self.assertEqual(deduped, set([1, 3])) + self.assertEqual(deduped, {1, 3})
self.assertEqual(next(deduper), 2) self.assertEqual(next(deduper), 4) @@ -468,7 +468,7 @@ elif isinstance(deduped, collections.Mapping): self.assertCountEqual(list(deduped.keys()), [1, 2, 3, 4]) else: - self.assertEqual(deduped, set([1, 2, 3, 4])) + self.assertEqual(deduped, {1, 2, 3, 4})
self.assertRaises(StopIteration, next, deduper)
@@ -486,7 +486,7 @@ if isinstance(deduped, collections.Mapping): self.assertEqual(deduped.keys(), [key('1'), key('3')]) else: - self.assertEqual(deduped, set([key('1'), key('3')])) + self.assertEqual(deduped, {key('1'), key('3')})
self.assertEqual(next(deduper), '2') self.assertEqual(next(deduper), '4') @@ -495,7 +495,7 @@ if isinstance(deduped, collections.Mapping): self.assertEqual(deduped.keys(), [key(i) for i in self.strs]) else: - self.assertEqual(deduped, set(key(i) for i in self.strs)) + self.assertEqual(deduped, {key(i) for i in self.strs})
self.assertRaises(StopIteration, next, deduper)
@@ -593,7 +593,7 @@ deduper = tools.filter_unique(self.ints, container=deduped) deduped_out = list(deduper) self.assertCountEqual(deduped, deduped_out) - self.assertEqual(deduped, set([2, 4])) + self.assertEqual(deduped, {2, 4})
def test_process_again(self): """Test filter_unique with an ignoring container.""" @@ -601,7 +601,7 @@ deduper = tools.filter_unique(self.ints, container=deduped) deduped_out = list(deduper) self.assertEqual(deduped_out, [1, 3, 2, 1, 1, 4]) - self.assertEqual(deduped, set([2, 4])) + self.assertEqual(deduped, {2, 4})
def test_stop(self): """Test filter_unique with an ignoring container.""" @@ -610,7 +610,7 @@ deduper = tools.filter_unique(self.ints, container=deduped) deduped_out = list(deduper) self.assertCountEqual(deduped, deduped_out) - self.assertEqual(deduped, set([1, 3])) + self.assertEqual(deduped, {1, 3})
# And it should not resume self.assertRaises(StopIteration, next, deduper) @@ -620,7 +620,7 @@ deduper = tools.filter_unique(self.ints, container=deduped) deduped_out = list(deduper) self.assertCountEqual(deduped, deduped_out) - self.assertEqual(deduped, set([1, 2, 3])) + self.assertEqual(deduped, {1, 2, 3})
# And it should not resume self.assertRaises(StopIteration, next, deduper) diff --git a/tests/utils.py b/tests/utils.py index 4fe6414..2f7837a 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -329,7 +329,7 @@
def __init__(self, cache): """Constructor.""" - self._cache = dict((key, (item, False)) for key, item in cache.items()) + self._cache = {key: (item, False) for key, item in cache.items()}
def __getitem__(self, key): """Get item.""" diff --git a/tox.ini b/tox.ini index c53553a..700e857 100644 --- a/tox.ini +++ b/tox.ini @@ -152,7 +152,6 @@ # W503: line break before binary operator; against current PEP 8 recommendation
# The following are to be fixed -# C401, C402, C405: does not work with py 2.6 # D102: Missing docstring in public method # D103: Missing docstring in public function # E402: module level import not at top of file; see T87409 @@ -164,7 +163,7 @@ # D413: Missing blank line after last section # D412: No blank lines allowed between a section header and its content
-ignore = C401,C402,C405,E402,D105,D211,FI10,FI12,FI13,FI15,FI16,FI17,FI5,H101,H236,H301,H404,H405,H903,I100,I101,I202,N802,N803,N806,D401,D413,D103,D412,W503 +ignore = E402,D105,D211,FI10,FI12,FI13,FI15,FI16,FI17,FI5,H101,H236,H301,H404,H405,H903,I100,I101,I202,N802,N803,N806,D401,D413,D103,D412,W503 exclude = .tox,.git,./*.egg,ez_setup.py,build,externals,user-config.py,./scripts/i18n/*,scripts/userscripts/* min-version = 2.7 max_line_length = 100
pywikibot-commits@lists.wikimedia.org