jenkins-bot has submitted this change. (
https://gerrit.wikimedia.org/r/c/pywikibot/core/+/652568 )
Change subject: [tests] add flake8-bugbear tests
......................................................................
[tests] add flake8-bugbear tests
add flake8-bugbear tests and solve the given warnings
Change-Id: Id5029582a7de343523652391e8411c497ef46bd5
---
M pywikibot/bot.py
M pywikibot/page/__init__.py
M pywikibot/site/_apisite.py
M pywikibot/specialbots/_upload.py
M pywikibot/textlib.py
M scripts/casechecker.py
M scripts/data_ingestion.py
M scripts/imagecopy_self.py
M scripts/redirect.py
M scripts/replace.py
M scripts/solve_disambiguation.py
M setup.py
M tests/flow_tests.py
M tests/i18n_tests.py
M tests/pagegenerators_tests.py
M tests/script_tests.py
M tests/ui_tests.py
M tests/utils.py
M tox.ini
19 files changed, 57 insertions(+), 50 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/bot.py b/pywikibot/bot.py
index 7f514cf..7a966ab 100644
--- a/pywikibot/bot.py
+++ b/pywikibot/bot.py
@@ -892,19 +892,19 @@
return show_help(module_name)
-def suggest_help(missing_parameters=[], missing_generator=False,
- unknown_parameters=[], exception=None,
- missing_action=False, additional_text: str = '',
- missing_dependencies=[]) -> bool:
+def suggest_help(missing_parameters: Optional[List[str]] = None,
+ missing_generator: bool = False,
+ unknown_parameters: Optional[List[str]] = None,
+ exception=None,
+ missing_action: bool = False,
+ additional_text: str = '',
+ missing_dependencies: Optional[List[str]] = None) -> bool:
"""
Output error message to use -help with additional text before it.
@param missing_parameters: A list of parameters which are missing.
- @type missing_parameters: list of str
@param missing_generator: Whether a generator is missing.
- @type missing_generator: bool
@param unknown_parameters: A list of parameters which are unknown.
- @type unknown_parameters: list of str
@param exception: An exception thrown.
@type exception: Exception
@param missing_action: Add an entry that no action was defined.
@@ -1717,10 +1717,10 @@
if not conf.has_option(section, option):
continue
# use a convenience parser method, default to get()
- default = getattr(conf, 'get')
+ default = conf.get
value_type = type(value).__name__
if value_type == 'bool':
- method = getattr(conf, 'getboolean')
+ method = conf.getboolean
else:
method = getattr(conf, 'get' + value_type, default)
options[option] = method(section, option)
diff --git a/pywikibot/page/__init__.py b/pywikibot/page/__init__.py
index 1b295fb..390dfd1 100644
--- a/pywikibot/page/__init__.py
+++ b/pywikibot/page/__init__.py
@@ -1915,7 +1915,7 @@
sortKey='sort_key', inPlace='in_place')
def change_category(
self, old_cat, new_cat, summary=None, sort_key=None, in_place=True,
- include=[]
+ include=None
) -> bool:
"""
Remove page from oldCat and add it to newCat.
@@ -1944,7 +1944,7 @@
# duplicates
cats = []
for cat in textlib.getCategoryLinks(self.text, site=self.site,
- include=include):
+ include=include or []):
if cat not in cats:
cats.append(cat)
diff --git a/pywikibot/site/_apisite.py b/pywikibot/site/_apisite.py
index 8e142c2..680c5d1 100644
--- a/pywikibot/site/_apisite.py
+++ b/pywikibot/site/_apisite.py
@@ -995,7 +995,7 @@
mw_ver, cache_time = getattr(self, '_mw_version_time', (None, None))
if mw_ver is None or time.time() - cache_time > 60 * 60 * 24:
mw_ver = MediaWikiVersion(self.version())
- setattr(self, '_mw_version_time', (mw_ver, time.time()))
+ self._mw_version_time = mw_ver, time.time()
return mw_ver
@property
diff --git a/pywikibot/specialbots/_upload.py b/pywikibot/specialbots/_upload.py
index eebf405..622c8a3 100644
--- a/pywikibot/specialbots/_upload.py
+++ b/pywikibot/specialbots/_upload.py
@@ -46,7 +46,7 @@
verify_description: bool = True,
ignore_warning: Union[bool, list] = False,
target_site=None,
- aborts: Union[bool, list] = [],
+ aborts: Union[bool, list, None] = None,
chunk_size: int = 0,
summary: Optional[str] = None,
filename_prefix: Optional[str] = None, **kwargs):
@@ -100,7 +100,7 @@
self.keep_filename = keep_filename or self.opt.always
self.verify_description = verify_description and not self.opt.always
self.ignore_warning = ignore_warning
- self.aborts = aborts
+ self.aborts = aborts or []
self.chunk_size = chunk_size
self.summary = summary
self.filename_prefix = filename_prefix
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index dc47927..e0637d4 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -6,7 +6,7 @@
"""
#
-# (C) Pywikibot team, 2008-2020
+# (C) Pywikibot team, 2008-2021
#
# Distributed under the terms of the MIT license.
#
@@ -436,7 +436,7 @@
return text
-def removeDisabledParts(text: str, tags=None, include=[], site=None) -> str:
+def removeDisabledParts(text: str, tags=None, include=None, site=None) -> str:
"""
Return text without portions where wiki markup is disabled.
@@ -453,7 +453,7 @@
@param include: Or, in alternative, default parts that shall not
be removed.
- @type include: list, set or tuple
+ @type include: list, set, tuple or None
@param site: Site to be used for site-dependent regexes. Default
disabled parts listed above do not need it.
@@ -462,16 +462,18 @@
@return: text stripped from disabled parts.
"""
if not tags:
- tags = ('comment', 'includeonly', 'nowiki',
'pre', 'syntaxhighlight')
- tags = set(tags) - set(include)
+ tags = {'comment', 'includeonly', 'nowiki',
'pre', 'syntaxhighlight'}
+ else:
+ tags = set(tags)
+ if include:
+ tags -= set(include)
regexes = _get_regexes(tags, site)
toRemoveR = re.compile('|'.join(x.pattern for x in regexes),
re.IGNORECASE | re.DOTALL)
return toRemoveR.sub('', text)
-def removeHTMLParts(text: str,
- keeptags=['tt', 'nowiki', 'small',
'sup']) -> str:
+def removeHTMLParts(text: str, keeptags: Optional[List[str]] = None) -> str:
"""
Return text without portions where HTML markup is disabled.
@@ -485,6 +487,8 @@
# thanks to:
#
https://www.hellboundhackers.org/articles/read-article.php?article_id=841
parser = _GetDataHTML()
+ if keeptags is None:
+ keeptags = ['tt', 'nowiki', 'small', 'sup']
with parser:
parser.keeptags = keeptags
parser.feed(text)
@@ -1241,7 +1245,8 @@
# Functions dealing with category links
# -------------------------------------
-def getCategoryLinks(text: str, site=None, include: list = [],
+def getCategoryLinks(text: str, site=None,
+ include: Optional[List[str]] = None,
expand_text: bool = False) -> list:
"""Return a list of category links found in text.
@@ -1255,7 +1260,7 @@
site = pywikibot.Site()
# Ignore category links within nowiki tags, pre tags, includeonly tags,
# and HTML comments
- text = removeDisabledParts(text, include=include)
+ text = removeDisabledParts(text, include=include or [])
catNamespace = '|'.join(site.namespaces.CATEGORY)
R =
re.compile(r'\[\[\s*(?P<namespace>%s)\s*:\s*(?P<rest>.+?)\]\]'
% catNamespace, re.I)
diff --git a/scripts/casechecker.py b/scripts/casechecker.py
index 40a7c45..144f35a 100755
--- a/scripts/casechecker.py
+++ b/scripts/casechecker.py
@@ -1,7 +1,7 @@
#!/usr/bin/python
"""Bot to find all pages on the wiki with mixed latin and cyrilic
alphabets."""
#
-# (C) Pywikibot team, 2006-2020
+# (C) Pywikibot team, 2006-2021
#
# Distributed under the terms of the MIT license.
#
@@ -300,7 +300,7 @@
return
firstItem = True
- for pageID, page in data['query']['pages'].items():
+ for page in data['query']['pages'].values():
printed = False
title = page['title']
self.currentTitle = title
diff --git a/scripts/data_ingestion.py b/scripts/data_ingestion.py
index 19bbbb3..10a7cb4 100755
--- a/scripts/data_ingestion.py
+++ b/scripts/data_ingestion.py
@@ -188,11 +188,11 @@
# FIXME: normalise the title so it is usable as a MediaWiki title.
return fmt % self.metadata
- def getDescription(self, template, extraparams={}):
+ def getDescription(self, template, extraparams=None):
"""Generate a description for a file."""
params = {}
params.update(self.metadata)
- params.update(extraparams)
+ params.update(extraparams or {})
description = '{{%s\n' % template
for key in sorted(params.keys()):
value = params[key]
diff --git a/scripts/imagecopy_self.py b/scripts/imagecopy_self.py
index 0d5f1f5..03e582e 100644
--- a/scripts/imagecopy_self.py
+++ b/scripts/imagecopy_self.py
@@ -384,7 +384,7 @@
return
text = imagepage.get()
- for regex, replacement in licenseTemplates[page.site.lang]:
+ for regex, _ in licenseTemplates[page.site.lang]:
match = re.search(regex, text, flags=re.IGNORECASE)
if match:
break
@@ -507,7 +507,7 @@
# Still have to filter out crap like "see below" or "yes"
if contents['permission']:
# Strip of the license template if it's in the permission section
- for (regex, repl) in licenseTemplates[imagepage.site.lang]:
+ for regex, _ in licenseTemplates[imagepage.site.lang]:
contents['permission'] = re.sub(regex, '',
contents['permission'],
flags=re.IGNORECASE)
diff --git a/scripts/redirect.py b/scripts/redirect.py
index e03832b..4ce35fd 100755
--- a/scripts/redirect.py
+++ b/scripts/redirect.py
@@ -276,9 +276,8 @@
"""Retrieve broken redirects."""
if self.opt.fullscan:
count = 0
- for pagetitle, type, target, final in self.get_redirects_via_api(
- maxlen=2):
- if type == 0:
+ for pagetitle, type_, *_ in self.get_redirects_via_api(maxlen=2):
+ if type_ == 0:
yield pagetitle
count += 1
if self.opt.limit and count >= self.opt.limit:
@@ -303,9 +302,8 @@
yield from self.get_moved_pages_redirects()
elif self.opt.fullscan:
count = 0
- for pagetitle, type_, target, final in self.get_redirects_via_api(
- maxlen=2):
- if type_ != 0 and type_ != 1:
+ for pagetitle, type_, *_ in self.get_redirects_via_api(maxlen=2):
+ if type_ not in (0, 1):
yield pagetitle
count += 1
if self.opt.limit and count >= self.opt.limit:
diff --git a/scripts/replace.py b/scripts/replace.py
index 546e037..5497f44 100755
--- a/scripts/replace.py
+++ b/scripts/replace.py
@@ -525,7 +525,7 @@
"""
@deprecated_args(acceptall='always', addedCat='addcat')
- def __init__(self, generator, replacements, exceptions={}, **kwargs):
+ def __init__(self, generator, replacements, exceptions=None, **kwargs):
"""Initializer."""
self.available_options.update({
'addcat': None,
@@ -546,7 +546,7 @@
replacements[i] = Replacement.from_compiled(replacement[0],
replacement[1])
self.replacements = replacements
- self.exceptions = exceptions
+ self.exceptions = exceptions or {}
if self.opt.addcat and isinstance(self.opt.addcat, str):
self.opt.addcat = pywikibot.Category(self.site, self.opt.addcat)
diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py
index 3d4acc7..9a5559a 100755
--- a/scripts/solve_disambiguation.py
+++ b/scripts/solve_disambiguation.py
@@ -1146,9 +1146,10 @@
self.alternatives += links
return True
- def setSummaryMessage(self, page, new_targets=[], unlink_counter=0,
+ def setSummaryMessage(self, page, new_targets=None, unlink_counter=0,
dn=False) -> None:
"""Setup i18n summary message."""
+ new_targets = new_targets or []
# make list of new targets
comma = self.site.mediawiki_message('comma-separator')
targets = comma.join('[[{0}]]'.format(page_title)
diff --git a/setup.py b/setup.py
index fab0fd1..a694ec7 100644
--- a/setup.py
+++ b/setup.py
@@ -74,6 +74,7 @@
'flake8>=3.7.5',
'pydocstyle>=4.0.0',
'hacking',
+ 'flake8-bugbear',
'flake8-coding',
'flake8-colors>=0.1.9',
'flake8-comprehensions>=3.1.4;python_version>="3.8"',
diff --git a/tests/flow_tests.py b/tests/flow_tests.py
index 5b8fed6..0ecba55 100644
--- a/tests/flow_tests.py
+++ b/tests/flow_tests.py
@@ -142,9 +142,7 @@
def test_topiclist(self):
"""Test loading of topiclist."""
board = self._page
- i = 0
- for topic in board.topics(limit=7):
- i += 1
+ for i, _ in enumerate(board.topics(limit=7), start=1):
if i == 10:
break
self.assertEqual(i, 10)
diff --git a/tests/i18n_tests.py b/tests/i18n_tests.py
index 4d31881..031effc 100644
--- a/tests/i18n_tests.py
+++ b/tests/i18n_tests.py
@@ -33,7 +33,7 @@
"""Initializer."""
self.code = code
self.family = self.Family()
- setattr(self.family, 'name', family)
+ self.family.name = family
def __repr__(self):
return "'{site.family.name}:{site.code}'".format(site=self)
diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py
index 0e6c92f..0f4e7e8 100755
--- a/tests/pagegenerators_tests.py
+++ b/tests/pagegenerators_tests.py
@@ -78,7 +78,7 @@
def assertFunction(self, obj):
"""Assert function test."""
self.assertTrue(hasattr(pagegenerators, obj))
- self.assertTrue(hasattr(getattr(pagegenerators, obj), '__call__'))
+ self.assertTrue(callable(getattr(pagegenerators, obj)))
def test_module_import(self):
"""Test module import."""
@@ -1661,9 +1661,9 @@
if self.site.data_repository():
self.skipTest('Site is using a Wikibase repository')
with self.assertRaises(ValueError):
- for page in pagegenerators.UnconnectedPageGenerator(self.site,
- total=5):
- assert False # this shouldn't be reached
+ for _ in pagegenerators.UnconnectedPageGenerator(self.site,
+ total=5):
+ raise AssertionError("this shouldn't be reached")
class TestLinksearchPageGenerator(TestCase):
diff --git a/tests/script_tests.py b/tests/script_tests.py
index a5e6b1f..cc2942d 100644
--- a/tests/script_tests.py
+++ b/tests/script_tests.py
@@ -207,7 +207,10 @@
def __new__(cls, name, bases, dct):
"""Create the new class."""
- def test_execution(script_name, args=[]):
+ def test_execution(script_name, args=None):
+ if args is None:
+ args = []
+
is_autorun = ('-help' not in args
and script_name in auto_run_script_list)
diff --git a/tests/ui_tests.py b/tests/ui_tests.py
index 8f21ae2..c70737f 100644
--- a/tests/ui_tests.py
+++ b/tests/ui_tests.py
@@ -692,7 +692,7 @@
def _encounter_color(self, color, target_stream):
"""Patched encounter_color method."""
- assert False, 'This method should not be invoked'
+ raise AssertionError('This method should not be invoked')
def test_no_color(self):
"""Test a string without any colors."""
diff --git a/tests/utils.py b/tests/utils.py
index 5025dbd..9564eb2 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -62,7 +62,7 @@
def entered_loop(iterable):
"""Return True if iterable contains items."""
- for iterable_item in iterable:
+ for _ in iterable:
return True
return False
diff --git a/tox.ini b/tox.ini
index 8c59284..e2f6b21 100644
--- a/tox.ini
+++ b/tox.ini
@@ -96,7 +96,8 @@
# D412: No blank lines allowed between a section header and its content
# D413: Missing blank line after last section
-ignore =
C103,D105,D211,D401,D413,D412,FI1,H101,H238,H301,H404,H405,H903,P101,P102,P103,P205,W503
+ignore =
B007,C103,D105,D211,D401,D413,D412,FI1,H101,H238,H301,H404,H405,H903,P101,P102,P103,P205,W503
+select = B901,B903
enable-extensions = H203,H204,H205
exclude = .tox,.git,./*.egg,build,scripts/archive/*,./scripts/i18n/*
classmethod-decorators = classmethod,classproperty
--
To view, visit
https://gerrit.wikimedia.org/r/c/pywikibot/core/+/652568
To unsubscribe, or for help writing mail filters, visit
https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Id5029582a7de343523652391e8411c497ef46bd5
Gerrit-Change-Number: 652568
Gerrit-PatchSet: 8
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-CC: Matěj Suchánek <matejsuchanek97(a)gmail.com>
Gerrit-MessageType: merged