jenkins-bot submitted this change.

View Change

Approvals: jenkins-bot: Verified Xqt: Looks good to me, approved
[IMPR] code improvements

- use ternary operater if the line fits
- update typings
- unpack operator in collections instead of concat them
as it is more readable and flexible
- return conditions directly instead of return False/True
- use dict.get() instead testing the membership first
- use explicit conversion falg in strings
- remove unnecesary keys()
- use type(self) instead of self.__class__
- use next() to get the first item of a generator instead of list(...)[0]

Change-Id: I8f1baf826bb31a24dc693b9e18af8952e5becac9
---
M pywikibot/_wbtypes.py
M pywikibot/bot.py
M pywikibot/comms/http.py
M pywikibot/config.py
M pywikibot/cosmetic_changes.py
M pywikibot/data/api/_requests.py
M pywikibot/date.py
M pywikibot/family.py
M pywikibot/i18n.py
M pywikibot/interwiki_graph.py
M pywikibot/page/_basepage.py
M pywikibot/page/_toolforge.py
M pywikibot/page/_wikibase.py
M pywikibot/proofreadpage.py
M pywikibot/scripts/wrapper.py
M pywikibot/site/_apisite.py
M pywikibot/site/_datasite.py
M pywikibot/site/_namespace.py
M pywikibot/specialbots/_unlink.py
M pywikibot/textlib.py
M pywikibot/throttle.py
M pywikibot/tools/__init__.py
M pywikibot/tools/djvu.py
M pywikibot/userinterfaces/buffer_interface.py
M pywikibot/userinterfaces/terminal_interface_base.py
M pywikibot/xmlreader.py
M scripts/__init__.py
M scripts/checkimages.py
M scripts/claimit.py
M scripts/dataextend.py
M scripts/fixing_redirects.py
M scripts/interwiki.py
M scripts/maintenance/cache.py
M scripts/pagefromfile.py
M scripts/reflinks.py
M scripts/solve_disambiguation.py
M scripts/watchlist.py
M scripts/welcome.py
M tests/aspects.py
M tests/bot_tests.py
M tests/eventstreams_tests.py
M tests/make_dist_tests.py
M tests/pwb_tests.py
M tests/replacebot_tests.py
M tests/script_tests.py
M tests/token_tests.py
M tests/wikibase_tests.py
47 files changed, 107 insertions(+), 194 deletions(-)

diff --git a/pywikibot/_wbtypes.py b/pywikibot/_wbtypes.py
index cd24060..1a9a080 100644
--- a/pywikibot/_wbtypes.py
+++ b/pywikibot/_wbtypes.py
@@ -929,10 +929,7 @@
error = None
if bounds_provided or cls._require_errors(site):
error = (upper_bound - amount, amount - lower_bound)
- if data['unit'] == '1':
- unit = None
- else:
- unit = data['unit']
+ unit = None if data['unit'] == '1' else data['unit']
return cls(amount, unit, error, site)


diff --git a/pywikibot/bot.py b/pywikibot/bot.py
index 449377a..cdb9230 100644
--- a/pywikibot/bot.py
+++ b/pywikibot/bot.py
@@ -2189,10 +2189,7 @@
"""
# This code is somewhat duplicate to user_add_claim but
# unfortunately we need the source claim here, too.
- if source:
- sourceclaim = self.getSource(source)
- else:
- sourceclaim = None
+ sourceclaim = self.getSource(source) if source else None

# Existing claims on page of same property
claims = item.get().get('claims')
diff --git a/pywikibot/comms/http.py b/pywikibot/comms/http.py
index 24c4728..edda981 100644
--- a/pywikibot/comms/http.py
+++ b/pywikibot/comms/http.py
@@ -508,10 +508,7 @@
header = response.content[:100].splitlines()[0] # bytes
m = re.search(
br'encoding=(["\'])(?P<encoding>.+?)\1', header)
- if m:
- header_encoding = m['encoding'].decode('utf-8')
- else:
- header_encoding = 'utf-8'
+ header_encoding = m['encoding'].decode('utf-8') if m else 'utf-8'
else:
header_encoding = None

diff --git a/pywikibot/config.py b/pywikibot/config.py
index f24d461..e886663 100644
--- a/pywikibot/config.py
+++ b/pywikibot/config.py
@@ -371,7 +371,7 @@
raise OSError(
f'Windows version {win_version} not supported yet.')
base_dir_cand.extend([[home] + sub_dir + ['Pywikibot'],
- [home] + sub_dir + ['pywikibot']])
+ [home] + sub_dir + ['pywikibot']])
else:
base_dir_cand.append([home, '.pywikibot'])

@@ -475,9 +475,9 @@


# Get the names of all known families, and initialize with empty dictionaries.
-# ‘families/’ is a subdirectory of the directory in which config.py is found.
+# 'families/' is a subdirectory of the directory in which config.py is found.
register_families_folder(os.path.join(os.path.dirname(__file__), 'families'))
-# ‘families/’ can also be stored in the base directory
+# 'families/' can also be stored in the base directory
register_families_folder(os.path.join(base_dir, 'families'),
not_exists_ok=True)

diff --git a/pywikibot/cosmetic_changes.py b/pywikibot/cosmetic_changes.py
index d4b4def..2a17aa5 100644
--- a/pywikibot/cosmetic_changes.py
+++ b/pywikibot/cosmetic_changes.py
@@ -834,10 +834,7 @@
for template in deprecatedTemplates[
self.site.family.name][self.site.code]:
old, new = template
- if new is None:
- new = ''
- else:
- new = '{{%s}}' % new
+ new = '{{%s}}' % new if new else ''

text = textlib.replaceExcept(
text,
diff --git a/pywikibot/data/api/_requests.py b/pywikibot/data/api/_requests.py
index 8de80c4..0e84593 100644
--- a/pywikibot/data/api/_requests.py
+++ b/pywikibot/data/api/_requests.py
@@ -426,10 +426,10 @@
typep = self._params.get('type', [])
if not ('tokens' in meta and 'login' in typep):
if 'userinfo' not in meta:
- meta = set(meta + ['userinfo'])
+ meta = {*meta, 'userinfo'}
self['meta'] = sorted(meta)
uiprop = self._params.get('uiprop', [])
- uiprop = set(uiprop + ['blockinfo', 'hasmsg'])
+ uiprop = {*uiprop, 'blockinfo', 'hasmsg'}
self['uiprop'] = sorted(uiprop)
if 'prop' in self._params \
and self.site.has_extension('ProofreadPage'):
diff --git a/pywikibot/date.py b/pywikibot/date.py
index ef12fff..cbcc420 100644
--- a/pywikibot/date.py
+++ b/pywikibot/date.py
@@ -413,7 +413,7 @@
newpattern += f'([{dec[0]}]{{{subpattern[1]}}})'
# add the number of required digits as the last (4th)
# part of the tuple
- decoders.append(dec + (int(s[1]),))
+ decoders.append((*dec, int(s[1])))
else:
newpattern += f'([{dec[0]}]+)'
decoders.append(dec)
diff --git a/pywikibot/family.py b/pywikibot/family.py
index e8e8242..b5520cd 100644
--- a/pywikibot/family.py
+++ b/pywikibot/family.py
@@ -382,7 +382,7 @@
FamilyMaintenanceWarning,
stacklevel=2,
)
- for code in cls.langs.keys():
+ for code in cls.langs:
if not all(x in CODE_CHARACTERS for x in code):
warnings.warn(
'Family {} code {} must be ASCII lowercase letters and '
diff --git a/pywikibot/i18n.py b/pywikibot/i18n.py
index 9a5ceb7..7d1aa55 100644
--- a/pywikibot/i18n.py
+++ b/pywikibot/i18n.py
@@ -613,7 +613,7 @@
else:
codes = [code]
if fallback is True:
- codes += _altlang(code) + ['_default', 'en']
+ codes += [*_altlang(code), '_default', 'en']
elif fallback is not False:
assert not isinstance(fallback, bool)
codes.extend(fallback)
@@ -793,7 +793,7 @@
# modes are caught with the KeyError.
langs = [lang]
if fallback:
- langs += _altlang(lang) + ['en']
+ langs += [*_altlang(lang), 'en']
for alt in langs:
trans = _get_translation(alt, twtitle)
if trans:
diff --git a/pywikibot/interwiki_graph.py b/pywikibot/interwiki_graph.py
index 40d67c4..3efacb5 100644
--- a/pywikibot/interwiki_graph.py
+++ b/pywikibot/interwiki_graph.py
@@ -1,6 +1,6 @@
"""Module with the Graphviz drawing calls."""
#
-# (C) Pywikibot team, 2006-2022
+# (C) Pywikibot team, 2006-2024
#
# Distributed under the terms of the MIT license.
#
@@ -187,7 +187,7 @@

self.octagon_sites = self._octagon_site_set()

- for page in self.subject.found_in.keys():
+ for page in self.subject.found_in:
# a node for each found page
self.addNode(page)
# mark start node by pointing there from a black dot.
diff --git a/pywikibot/page/_basepage.py b/pywikibot/page/_basepage.py
index 8f33610..0223c20 100644
--- a/pywikibot/page/_basepage.py
+++ b/pywikibot/page/_basepage.py
@@ -1568,10 +1568,7 @@
"""
# This function does not exist in the API, so it has to be
# implemented by screen-scraping
- if expand:
- text = self.expand_text()
- else:
- text = self.text
+ text = self.expand_text() if expand else self.text
for linkmatch in pywikibot.link_regex.finditer(
textlib.removeDisabledParts(text)):
linktitle = linkmatch['title']
@@ -2160,7 +2157,7 @@
"""
if hasattr(self, '_deletedRevs'):
undelete_revs = [ts for ts, rev in self._deletedRevs.items()
- if 'marked' in rev and rev['marked']]
+ if rev.get('marked')]
else:
undelete_revs = []
if reason is None:
diff --git a/pywikibot/page/_toolforge.py b/pywikibot/page/_toolforge.py
index 019717f..cd41964 100644
--- a/pywikibot/page/_toolforge.py
+++ b/pywikibot/page/_toolforge.py
@@ -172,11 +172,7 @@
raise ValueError(
'You cannot specify revid together with date argument')

- if date is None:
- show = revid or 0
- else:
- show = str(date)[:10]
-
+ show = revid or 0 if date is None else str(date)[:10]
url = '{}.wikipedia.org/{}/{}?uselang={}'.format(
self.site.code,
self.title(as_url=True, with_ns=False, with_section=False),
diff --git a/pywikibot/page/_wikibase.py b/pywikibot/page/_wikibase.py
index 951f89a..5b34dd7 100644
--- a/pywikibot/page/_wikibase.py
+++ b/pywikibot/page/_wikibase.py
@@ -584,7 +584,7 @@
There should be no need to instantiate this directly.
"""

- _cache_attrs = BasePage._cache_attrs + ('_content', )
+ _cache_attrs = (*BasePage._cache_attrs, '_content')

def __init__(self, site, title: str = '', **kwargs) -> None:
"""
@@ -925,8 +925,7 @@

class ItemPage(WikibasePage):

- """
- Wikibase entity of type 'item'.
+ """Wikibase entity of type 'item'.

A Wikibase item may be defined by either a 'Q' id (qid),
or by a site & title.
@@ -935,8 +934,8 @@
been looked up, the item is then defined by the qid.
"""

- _cache_attrs = WikibasePage._cache_attrs + (
- 'labels', 'descriptions', 'aliases', 'claims', 'sitelinks')
+ _cache_attrs = (*WikibasePage._cache_attrs, 'labels', 'descriptions',
+ 'aliases', 'claims', 'sitelinks')
entity_type = 'item'
title_pattern = r'Q[1-9]\d*'
DATA_ATTRIBUTES = {
@@ -1440,8 +1439,7 @@

class PropertyPage(WikibasePage, Property):

- """
- A Wikibase entity in the property namespace.
+ """A Wikibase entity in the property namespace.

Should be created as::

@@ -1452,8 +1450,8 @@
PropertyPage(DataSite, datatype='url')
"""

- _cache_attrs = WikibasePage._cache_attrs + (
- '_type', 'labels', 'descriptions', 'aliases', 'claims')
+ _cache_attrs = (*WikibasePage._cache_attrs, '_type', 'labels',
+ 'descriptions', 'aliases', 'claims')
entity_type = 'property'
title_pattern = r'P[1-9]\d*'
DATA_ATTRIBUTES = {
@@ -2077,10 +2075,8 @@
if (isinstance(self.target, pywikibot.Coordinate)
and isinstance(value, str)):
coord_args = [float(x) for x in value.split(',')]
- if len(coord_args) >= 3:
- precision = coord_args[2]
- else:
- precision = 0.0001 # Default value (~10 m at equator)
+ # Default value 0.0001 ~10 m at equator
+ precision = coord_args[2] if len(coord_args) >= 3 else 0.0001
with suppress(TypeError):
if self.target.precision is not None:
precision = max(precision, self.target.precision)
@@ -2169,9 +2165,8 @@
['P5137', 'P5972', 'P2888']
"""

- _cache_attrs = WikibasePage._cache_attrs + (
- 'lemmas', 'language', 'lexicalCategory', 'forms', 'senses',
- )
+ _cache_attrs = (*WikibasePage._cache_attrs, 'lemmas', 'language',
+ 'lexicalCategory', 'forms', 'senses')
entity_type = 'lexeme'
title_pattern = r'L[1-9]\d*'
DATA_ATTRIBUTES = {
diff --git a/pywikibot/proofreadpage.py b/pywikibot/proofreadpage.py
index 5a14de2..61dfd45 100644
--- a/pywikibot/proofreadpage.py
+++ b/pywikibot/proofreadpage.py
@@ -160,7 +160,7 @@

def __repr__(self):
attr = 'from' if self.attr == 'ffrom' else self.attr
- return f"{self.__class__.__name__}('{attr}', {repr(self._orig_value)})"
+ return f"{type(self).__name__}('{attr}', {self._orig_value!r})"


class TagAttrDesc:
@@ -530,7 +530,7 @@
index_page, others = self._index
if others:
pywikibot.warning(f'{self} linked to several Index pages.')
- pywikibot.info(f"{' ' * 9}{[index_page] + others!s}")
+ pywikibot.info(f"{' ' * 9}{[index_page, *others]!s}")

if index_page:
pywikibot.info(
@@ -1122,11 +1122,8 @@

# Discard all inner templates as only top-level ones matter
templates = textlib.extract_templates_and_params_regex_simple(text)
- if len(templates) != 1 or templates[0][0] != self.INDEX_TEMPLATE:
- # Only a single call to the INDEX_TEMPLATE is allowed
- return False
-
- return True
+ # Only a single call to the INDEX_TEMPLATE is allowed
+ return len(templates) == 1 and templates[0][0] == self.INDEX_TEMPLATE

def purge(self) -> None: # type: ignore[override]
"""Overwrite purge method.
diff --git a/pywikibot/scripts/wrapper.py b/pywikibot/scripts/wrapper.py
index 32eaa8f..71e7a0b 100755
--- a/pywikibot/scripts/wrapper.py
+++ b/pywikibot/scripts/wrapper.py
@@ -139,8 +139,8 @@
for key, value in environ: # pragma: no cover
os.environ[key] = value

- sys.argv = [filename] + args
- pwb.argvu = [Path(filename).stem] + args
+ sys.argv = [filename, *args]
+ pwb.argvu = [Path(filename).stem, *args]
sys.path.insert(0, os.path.dirname(filename))

try:
@@ -388,7 +388,7 @@
"""Search for filename in given paths within 'root' base directory."""
for file_package in paths:
package = file_package.split('.')
- path = package + [filename]
+ path = [*package, filename]
testpath = root.joinpath(*path)
if testpath.exists():
return str(testpath)
diff --git a/pywikibot/site/_apisite.py b/pywikibot/site/_apisite.py
index 8f07e39..e733495 100644
--- a/pywikibot/site/_apisite.py
+++ b/pywikibot/site/_apisite.py
@@ -325,16 +325,14 @@
"""
if not hasattr(self, '_userinfo'):
return False
+
if 'anon' in self.userinfo or not self.userinfo.get('id'):
return False

if not self.userinfo.get('name'):
return False

- if self.userinfo['name'] != self.username():
- return False
-
- return True
+ return self.userinfo['name'] == self.username()

def is_oauth_token_available(self) -> bool:
"""Check whether OAuth token is set for this site."""
@@ -473,7 +471,7 @@
pywikibot.warning('Using OAuth suppresses logout function')

# check if already logged out to avoid requiring logging in
- if not self._loginstatus == login.LoginStatus.NOT_LOGGED_IN:
+ if self._loginstatus != login.LoginStatus.NOT_LOGGED_IN:
req_params = {'action': 'logout', 'token': self.tokens['csrf']}
uirequest = self.simple_request(**req_params)
uirequest.submit()
@@ -1638,7 +1636,7 @@
item['from']: {
'title': item['to'],
'section': '#' + item['tofragment']
- if 'tofragment' in item and item['tofragment'] else ''
+ if item.get('tofragment') else ''
}
for item in result['query']['redirects']
}
@@ -1775,7 +1773,7 @@
data = data.get('query', data)

user_tokens = {}
- if 'tokens' in data and data['tokens']:
+ if data.get('tokens'):
user_tokens = {removesuffix(key, 'token'): val
for key, val in data['tokens'].items()
if val != '+\\'}
diff --git a/pywikibot/site/_datasite.py b/pywikibot/site/_datasite.py
index f5be91e..ec1e919 100644
--- a/pywikibot/site/_datasite.py
+++ b/pywikibot/site/_datasite.py
@@ -316,8 +316,10 @@
params['action'] = 'wbeditentity'
if bot:
params['bot'] = 1
- if 'baserevid' in kwargs and kwargs['baserevid']:
+
+ if kwargs.get('baserevid'):
params['baserevid'] = kwargs['baserevid']
+
params['token'] = self.tokens['csrf']

for arg in kwargs:
@@ -447,10 +449,7 @@
'summary': summary, 'bot': bot, 'token': self.tokens['csrf']}

# build up the snak
- if isinstance(source, list):
- sources = source
- else:
- sources = [source]
+ sources = source if isinstance(source, list) else [source]

snak = {}
for sourceclaim in sources:
diff --git a/pywikibot/site/_namespace.py b/pywikibot/site/_namespace.py
index 20d9ef4..1d70744 100644
--- a/pywikibot/site/_namespace.py
+++ b/pywikibot/site/_namespace.py
@@ -138,8 +138,8 @@

def _distinct(self):
if self.custom_name == self.canonical_name:
- return [self.canonical_name] + self.aliases
- return [self.custom_name, self.canonical_name] + self.aliases
+ return [self.canonical_name, *self.aliases]
+ return [self.custom_name, self.canonical_name, *self.aliases]

def _contains_lowercase_name(self, name):
"""Determine a lowercase normalised name is a name of this namespace.
@@ -442,8 +442,7 @@
result = [NotImplemented if isinstance(ns, bool)
else self._lookup_name(ns)
if isinstance(ns, str) and not ns.lstrip('-').isdigit()
- else namespaces[int(ns)] if int(ns) in namespaces
- else None
+ else namespaces.get(int(ns))
for ns in identifiers]

if NotImplemented in result:
diff --git a/pywikibot/specialbots/_unlink.py b/pywikibot/specialbots/_unlink.py
index dc65dfa..17d2910 100644
--- a/pywikibot/specialbots/_unlink.py
+++ b/pywikibot/specialbots/_unlink.py
@@ -85,10 +85,7 @@
unlink_callback.current_text,
jumpIndex=unlink_callback.current_range[0])
# if user didn't press Cancel
- if new_text:
- text = new_text
- else:
- text = unlink_callback.current_text
+ text = new_text or unlink_callback.current_text
else:
break

diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index 29cd9df..af64ff7 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -1426,10 +1426,7 @@
s.append(link)
else:
raise ValueError('links dict must contain Page or Link objects')
- if insite.code in insite.family.interwiki_on_one_line:
- sep = ' '
- else:
- sep = '\n'
+ sep = ' ' if insite.code in insite.family.interwiki_on_one_line else '\n'
return sep.join(s) + '\n'


@@ -1891,10 +1888,7 @@
name, params = match[1], match[2]

# Special case for {{a}}
- if params is None:
- params = []
- else:
- params = params.split('|')
+ params = [] if params is None else params.split('|')

numbered_param_identifiers = itertools.count(1)

@@ -2190,9 +2184,7 @@
return False
if min_pos < tzinfo_pos < max_pos:
return False
- if min_pos < time_pos < max_pos:
- return False
- return True
+ return not min_pos < time_pos < max_pos

def timestripper(self, line: str) -> pywikibot.Timestamp | None:
"""
diff --git a/pywikibot/throttle.py b/pywikibot/throttle.py
index ec0cfba..d50dafb 100644
--- a/pywikibot/throttle.py
+++ b/pywikibot/throttle.py
@@ -1,6 +1,6 @@
"""Mechanics to slow down wiki read and/or write rate."""
#
-# (C) Pywikibot team, 2008-2023
+# (C) Pywikibot team, 2008-2024
#
# Distributed under the terms of the MIT license.
#
@@ -222,12 +222,8 @@

This value is the maximum wait between reads/writes, not taking
into account of how much time has elapsed since the last access.
-
"""
- if write:
- thisdelay = self.writedelay
- else:
- thisdelay = self.delay
+ thisdelay = self.writedelay if write else self.delay

# We're checking for multiple processes
if time.time() > self.checktime + self.checkdelay:
diff --git a/pywikibot/tools/__init__.py b/pywikibot/tools/__init__.py
index 32c0bf2..00ce4c3 100644
--- a/pywikibot/tools/__init__.py
+++ b/pywikibot/tools/__init__.py
@@ -641,7 +641,7 @@

.. versionadded:: 3.0
"""
- args = list(args) + [dict(kwargs)]
+ args = [*list(args), dict(kwargs)]
conflicts = set()
result = {}
for arg in args:
diff --git a/pywikibot/tools/djvu.py b/pywikibot/tools/djvu.py
index 3aba267..43b1af3 100644
--- a/pywikibot/tools/djvu.py
+++ b/pywikibot/tools/djvu.py
@@ -24,12 +24,8 @@
:return: returns a tuple (res, stdoutdata), where
res is True if dp.returncode != 0 else False
"""
- if not isinstance(args, str):
- # upcast any param in sequence args to str
- cmd = ' '.join(str(a) for a in args)
- else:
- cmd = args
-
+ # upcast any param in sequence args to str
+ cmd = ' '.join(str(a) for a in args) if not isinstance(args, str) else args
dp = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdoutdata, stderrdata = dp.communicate()

diff --git a/pywikibot/userinterfaces/buffer_interface.py b/pywikibot/userinterfaces/buffer_interface.py
index 166d8b5..97d7237 100644
--- a/pywikibot/userinterfaces/buffer_interface.py
+++ b/pywikibot/userinterfaces/buffer_interface.py
@@ -47,7 +47,7 @@
"""Ask the user a question and return the answer."""
return default

- def input_choice(self, question: str, options, default: str = None,
+ def input_choice(self, question: str, options, default: str | None = None,
return_shortcut: bool = True,
automatic_quit: bool = True, force: bool = False):
"""Ask the user and returns a value from the options."""
diff --git a/pywikibot/userinterfaces/terminal_interface_base.py b/pywikibot/userinterfaces/terminal_interface_base.py
index fb76bf3..c905a24 100644
--- a/pywikibot/userinterfaces/terminal_interface_base.py
+++ b/pywikibot/userinterfaces/terminal_interface_base.py
@@ -53,7 +53,7 @@
'white',
]

-_color_pat = '((:?{0});?(:?{0})?)'.format('|'.join(colors + ['previous']))
+_color_pat = '((:?{0});?(:?{0})?)'.format('|'.join([*colors, 'previous']))
old_colorTagR = re.compile(f'\03{{{_color_pat}}}')
new_colorTagR = re.compile(f'<<{_color_pat}>>')

@@ -465,10 +465,8 @@
if force and not default:
raise ValueError('With no default option it cannot be forced')

- if isinstance(options, Option):
- options = [options]
- else: # make a copy
- options = list(options)
+ # make a copy
+ options = [options] if isinstance(options, Option) else list(options)

if not options:
raise ValueError('No options are given.')
diff --git a/pywikibot/xmlreader.py b/pywikibot/xmlreader.py
index bb8c4d1..2788ff7 100644
--- a/pywikibot/xmlreader.py
+++ b/pywikibot/xmlreader.py
@@ -132,7 +132,7 @@
self,
filename,
*,
- allrevisions: bool | str = None,
+ allrevisions: bool | str | None = None,
# when allrevisions removed, revisions can default to 'latest'
revisions: str = 'first_found',
on_error: Callable[[ParseError], None] | None = None,
@@ -251,10 +251,7 @@
uri = self.uri
headers = self._headers(elem)
for revision in elem.findall(f'{uri}revision'):
- if with_id:
- revid = int(revision.findtext(f'{uri}id'))
- else:
- revid = 0
+ revid = int(revision.findtext(f'{uri}id')) if with_id else 0
yield RawRev(headers, revision, revid)

@staticmethod
diff --git a/scripts/__init__.py b/scripts/__init__.py
index 02c277d..9772875 100644
--- a/scripts/__init__.py
+++ b/scripts/__init__.py
@@ -24,12 +24,11 @@
- find a script even if given script name does not match a filename e.g.
due to spelling mistake
"""
-from __future__ import annotations
-
-
#
# (C) Pywikibot team, 2021-2024
#
# Distributed under the terms of the MIT license.
#
+from __future__ import annotations
+
__version__ = '9.4.0'
diff --git a/scripts/checkimages.py b/scripts/checkimages.py
index 43ea20b..e06725f 100755
--- a/scripts/checkimages.py
+++ b/scripts/checkimages.py
@@ -751,7 +751,7 @@
"""Function to load the white templates."""
# A template as {{en is not a license! Adding also them in the
# whitelist template...
- for key in Family.load('wikipedia').langs.keys():
+ for key in Family.load('wikipedia').langs:
self.hiddentemplates.add(pywikibot.Page(
self.site, f'Template:{key}'))
# Hidden template loading
@@ -1000,11 +1000,10 @@
if not result:
return True # If Errors, exit (but continue the check)

- if older_image_page.title() != self.image_name:
- # The image is a duplicate, it will be deleted. So skip the
- # check-part, useless
- return False
- return True # Ok - No problem. Let's continue the checking phase
+ # If different, the image is a duplicate; it will be deleted.
+ # So skip the check-part as useless.
+ # Otherwise ok - No problem. Let's continue the checking phase
+ return older_image_page.title() == self.image_name

def report_image(self, image_to_report, rep_page=None, com=None,
rep_text=None, addings: bool = True) -> bool:
diff --git a/scripts/claimit.py b/scripts/claimit.py
index ab892fc..3f1ba9e 100755
--- a/scripts/claimit.py
+++ b/scripts/claimit.py
@@ -135,12 +135,9 @@
elif claim.type == 'string':
target = target_str
elif claim.type == 'globe-coordinate':
- coord_args = [
- float(c) for c in target_str.split(',')]
- if len(coord_args) >= 3:
- precision = coord_args[2]
- else:
- precision = 0.0001 # Default value (~10 m at equator)
+ coord_args = [float(c) for c in target_str.split(',')]
+ # Default value 0.0001 ~10 m at equator
+ precision = coord_args[2] if len(coord_args) >= 3 else 0.0001
target = pywikibot.Coordinate(
coord_args[0], coord_args[1], precision=precision)
else:
diff --git a/scripts/dataextend.py b/scripts/dataextend.py
index 4ae3dcc..1a802cb 100755
--- a/scripts/dataextend.py
+++ b/scripts/dataextend.py
@@ -860,7 +860,7 @@
aliases = item.aliases

# This can happen after reloading
- if prop not in claims.keys():
+ if prop not in claims:
continue

if self.opt.restrict:
@@ -1127,7 +1127,7 @@
claims['Data'] = [Quasiclaim(item.title())]

editdescriptions = {}
- for language in newdescriptions.keys():
+ for language in newdescriptions:
newdescription = self.definedescription(
language, descriptions.get(language),
newdescriptions.get(language))
@@ -1150,7 +1150,7 @@

@staticmethod
def definedescription(language, existingdescription, suggestions):
- possibilities = [existingdescription] + list(suggestions)
+ possibilities = [existingdescription, *list(suggestions)]

pywikibot.info(f'\nSelect a description for language {language}:')
pywikibot.info('Default is to keep the old value (0)')
@@ -1184,13 +1184,13 @@

if anythingfound:
pywikibot.info('\nNew names found:')
- for language in realnewnames.keys():
+ for language in realnewnames:
for name in realnewnames[language]:
pywikibot.info(f'{language}: {name}')
result = pywikibot.input('Add these names? (y/n/[S]elect/x) ')
if not result or result[0].upper() not in 'YNX':
chosennewnames = defaultdict(list)
- for language in realnewnames.keys():
+ for language in realnewnames:
for name in realnewnames[language]:
result = pywikibot.input(f'{language}: {name} - ')
if (not result) or result[0].upper() == 'Y':
@@ -1200,7 +1200,7 @@
realnewnames = chosennewnames
result = 'Y'
if result[0].upper() == 'X':
- for language in realnewnames.keys():
+ for language in realnewnames:
for name in realnewnames[language]:
self.noname.add(name)
elif result[0].upper() != 'N':
@@ -2228,7 +2228,7 @@
if not m:
return None
if dtype:
- alt = [dtype] + alt
+ alt = [dtype, *alt]
for alttype in alt:
if self.getdata(alttype, m[1], ask=False) \
and self.getdata(alttype, m[1], ask=False) != 'XXX':
@@ -2248,7 +2248,7 @@
if not alt:
alt = []
if dtype:
- alt = [dtype] + alt
+ alt = [dtype, *alt]
matches = re.findall(regex, html)
result = set()
for match in matches:
@@ -7541,10 +7541,7 @@
def findlongtext(self, html: str):
result = self.findbyre(
r'(?s)Présentation[^<>]*</span>\s*<span[^<>]*>(.*?)</span>', html)
- if result:
- result = [result]
- else:
- result = []
+ result = [result] if result else []
result += self.findallbyre(r'<p align="justify">(.*?)</p>', html)
return '\n'.join(result)

@@ -11325,10 +11322,7 @@

def findnames(self, html) -> list[str]:
section = self.findbyre(r'(?s)Outros nomes.*?<ul>(.*?)</ul>', html)
- if section:
- result = self.findallbyre(r'(?s)>(.*?)<', section)
- else:
- result = []
+ result = self.findallbyre(r'(?s)>(.*?)<', section) if section else []
return (result + self.findallbyre(r'title" content="(.*?)[\|"]', html)
+ self.findallbyre(r'(?s)<title>(.*?)[\|"]', html))

diff --git a/scripts/fixing_redirects.py b/scripts/fixing_redirects.py
index 55fc3f8..d1aea72 100755
--- a/scripts/fixing_redirects.py
+++ b/scripts/fixing_redirects.py
@@ -116,10 +116,7 @@
if not link_text:
# or like this: [[page_title]]trailing_chars
link_text = page_title
- if m['section'] is None:
- section = ''
- else:
- section = m['section']
+ section = m['section'] or ''
if section and target_page.section():
pywikibot.warning(f'Source section {section} and target '
f'section {target_page} found. Skipping.')
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index 81a8359..d303363 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -1574,7 +1574,7 @@
del new[page.site]
# Do not add interwiki links to foreign families that page.site() does
# not forward to
- for stmp in new.keys():
+ for stmp in new:
if stmp.family != page.site.family \
and stmp.family.name != page.site.family.interwiki_forward:
del new[stmp]
diff --git a/scripts/maintenance/cache.py b/scripts/maintenance/cache.py
index bc338a5..e3cbb68 100755
--- a/scripts/maintenance/cache.py
+++ b/scripts/maintenance/cache.py
@@ -308,10 +308,7 @@

if func is None or func(entry):
if output_func or action_func is None:
- if output_func is None:
- output = entry
- else:
- output = output_func(entry)
+ output = entry if output_func is None else output_func(entry)
if output is not None:
pywikibot.info(output)
if action_func:
@@ -471,10 +468,7 @@
cache_paths += [
os.path.join(userpath, f) for f in folders]

- if delete:
- action_func = CacheEntry._delete
- else:
- action_func = None
+ action_func = CacheEntry._delete if delete else None

if output:
output_func = _parse_command(output, 'output')
diff --git a/scripts/pagefromfile.py b/scripts/pagefromfile.py
index 8a8c1a4..c5ef416 100755
--- a/scripts/pagefromfile.py
+++ b/scripts/pagefromfile.py
@@ -60,7 +60,7 @@
can be added between them by specifying '\n' as a value.
"""
#
-# (C) Pywikibot team, 2004-2022
+# (C) Pywikibot team, 2004-2024
#
# Distributed under the terms of the MIT license.
#
@@ -254,10 +254,7 @@
def find_page(self, text) -> tuple[int, str, str]:
"""Find page to work on."""
location = self.page_regex.search(text)
- if self.opt.include:
- contents = location[0]
- else:
- contents = location[1]
+ contents = location[0] if self.opt.include else location[1]

title = self.opt.title
if not title:
diff --git a/scripts/reflinks.py b/scripts/reflinks.py
index 9dbfb42..5607c9c 100755
--- a/scripts/reflinks.py
+++ b/scripts/reflinks.py
@@ -436,7 +436,7 @@
# Check
manual = 'mw:Manual:Pywikibot/refLinks'
code = None
- for alt in [self.site.code] + i18n._altlang(self.site.code):
+ for alt in [self.site.code, *i18n._altlang(self.site.code)]:
if alt in localized_msg:
code = alt
break
@@ -449,10 +449,7 @@
self.msg = i18n.twtranslate(self.site, 'reflinks-msg', locals())

local = i18n.translate(self.site, badtitles)
- if local:
- bad = f'({globalbadtitles}|{local})'
- else:
- bad = globalbadtitles
+ bad = f'({globalbadtitles}|{local})' if local else globalbadtitles

self.titleBlackList = re.compile(bad, re.I | re.S | re.X)
self.norefbot = noreferences.NoReferencesBot(verbose=False)
diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py
index 90efbb6c..f9ba4de 100755
--- a/scripts/solve_disambiguation.py
+++ b/scripts/solve_disambiguation.py
@@ -927,10 +927,7 @@
# or like this: [[page_title]]trailing_chars
link_text = page_title

- if m['section'] is None:
- section = ''
- else:
- section = m['section']
+ section = m['section'] or ''

trailing_chars = m['linktrail']
if trailing_chars:
diff --git a/scripts/watchlist.py b/scripts/watchlist.py
index 6618d52..2be17d6 100755
--- a/scripts/watchlist.py
+++ b/scripts/watchlist.py
@@ -79,7 +79,7 @@

def refresh(site):
"""Fetch the watchlist."""
- pywikibot.info(f'Retrieving watchlist for {str(site)}.')
+ pywikibot.info(f'Retrieving watchlist for {site}.')
return list(site.watched_pages(force=True))


diff --git a/scripts/welcome.py b/scripts/welcome.py
index 7ea7954..341e6e6 100755
--- a/scripts/welcome.py
+++ b/scripts/welcome.py
@@ -663,10 +663,7 @@
if not globalvar.make_welcome_log or not self.welcomed_users:
return

- if self.site.code == 'it':
- pattern = '%d/%m/%Y'
- else:
- pattern = '%Y/%m/%d'
+ pattern = '%d/%m/%Y' if self.site.code == 'it' else '%Y/%m/%d'
target = self.log_name + '/' + time.strftime(
pattern, time.localtime(time.time()))

diff --git a/tests/aspects.py b/tests/aspects.py
index 99286e2..e29fecb 100644
--- a/tests/aspects.py
+++ b/tests/aspects.py
@@ -865,7 +865,7 @@
def add_base(bases, subclass):
"""Return a tuple of bases with the subclasses added if not already."""
if not any(issubclass(base, subclass) for base in bases):
- bases = (subclass, ) + bases
+ bases = (subclass, *bases)
return bases

@staticmethod
diff --git a/tests/bot_tests.py b/tests/bot_tests.py
index 44e57d7..0f5cb99 100755
--- a/tests/bot_tests.py
+++ b/tests/bot_tests.py
@@ -233,7 +233,7 @@
def _missing_generator(self):
"""Yield pages and the last one does not exist."""
self._count = 0 # skip_page skips one page
- self._current_page = list(self.site.allpages(total=1))[0]
+ self._current_page = next(self.site.allpages(total=1))
yield self._current_page
while self._current_page.exists():
self._count += 1
diff --git a/tests/eventstreams_tests.py b/tests/eventstreams_tests.py
index 0eacee6..ea8d7e3 100755
--- a/tests/eventstreams_tests.py
+++ b/tests/eventstreams_tests.py
@@ -59,7 +59,7 @@
self.assertEqual(e._url, e.sse_kwargs.get('url'))
self.assertIsNone(e._total)
self.assertEqual(e._streams, streams)
- site_repr = f'site={repr(site)}, ' if site != Site() else ''
+ site_repr = f'site={site!r}, ' if site != Site() else ''
self.assertEqual(repr(e),
f"EventStreams({site_repr}streams='{streams}')")

diff --git a/tests/make_dist_tests.py b/tests/make_dist_tests.py
index 699c666..8b99c1e 100755
--- a/tests/make_dist_tests.py
+++ b/tests/make_dist_tests.py
@@ -38,12 +38,12 @@
def test_main(self):
"""Test main result."""
saved_argv = sys.argv
- sys.argv = saved_argv + ['-clear']
+ sys.argv = [*saved_argv, '-clear']
self.assertTrue(make_dist.main())

# no build or twine modules
self.assertFalse(make_dist.main())
- sys.argv = saved_argv + ['-local']
+ sys.argv = [*saved_argv, '-local']
self.assertFalse(make_dist.main())
sys.argv = saved_argv

diff --git a/tests/pwb_tests.py b/tests/pwb_tests.py
index b2a39c7..4a04f12 100755
--- a/tests/pwb_tests.py
+++ b/tests/pwb_tests.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
"""Test pwb.py."""
#
-# (C) Pywikibot team, 2007-2023
+# (C) Pywikibot team, 2007-2024
#
# Distributed under the terms of the MIT license.
#
@@ -82,13 +82,13 @@
script_name = 'print_argv'
script_path = join_pwb_tests_path(script_name + '.py')
script_opts = ['-help']
- command = [script_path] + script_opts
+ command = [script_path, *script_opts]
without_global_args = execute_pwb(command)
- with_no_global_args = execute_pwb(['-maxlag:5'] + command)
+ with_no_global_args = execute_pwb(['-maxlag:5', *command])
self.assertEqual(without_global_args['stdout'],
with_no_global_args['stdout'])
self.assertEqual(without_global_args['stdout'].rstrip(),
- str([script_name] + script_opts))
+ str([script_name, *script_opts]))

def test_script_found(self):
"""Test pwb.py script call which is found."""
diff --git a/tests/replacebot_tests.py b/tests/replacebot_tests.py
index b9b6bfc..319871a 100755
--- a/tests/replacebot_tests.py
+++ b/tests/replacebot_tests.py
@@ -95,7 +95,7 @@
-page to not have an empty generator
-site as it will use Site() otherwise
"""
- return replace.main(*(args + ('-site:wikipedia:test', '-page:TEST')))
+ return replace.main(*args, '-site:wikipedia:test', '-page:TEST')

def test_invalid_replacements(self):
"""Test invalid command line replacement configurations."""
diff --git a/tests/script_tests.py b/tests/script_tests.py
index 6330641..bd208b3 100755
--- a/tests/script_tests.py
+++ b/tests/script_tests.py
@@ -202,7 +202,7 @@
'For global options use -help:global or run pwb'
global_args = ['-pwb_close_matches:1']

- cmd = global_args + [script_name] + args
+ cmd = [*global_args, script_name, *args]
data_in = script_input.get(script_name)
if isinstance(self._timeout, bool):
do_timeout = self._timeout
diff --git a/tests/token_tests.py b/tests/token_tests.py
index 4a86a57..11807e8 100755
--- a/tests/token_tests.py
+++ b/tests/token_tests.py
@@ -35,7 +35,7 @@
def test_tokens(self):
"""Test tokens."""
redirected_tokens = ['edit', 'move', 'delete']
- for ttype in redirected_tokens + ['patrol', 'deleteglobalaccount']:
+ for ttype in [*redirected_tokens, 'patrol', 'deleteglobalaccount']:
self.assertIsInstance(self.site.tokens[ttype], str)
self.assertIn(ttype, self.site.tokens) # test __contains__
if ttype in redirected_tokens:
diff --git a/tests/wikibase_tests.py b/tests/wikibase_tests.py
index f3aa96e..a5c3135 100755
--- a/tests/wikibase_tests.py
+++ b/tests/wikibase_tests.py
@@ -2111,7 +2111,7 @@

def test_iterlinks_page_object(self):
"""Test iterlinks for page objects."""
- page = [pg for pg in self.wdp.iterlinks() if pg.site.code == 'af'][0]
+ page = next(pg for pg in self.wdp.iterlinks() if pg.site.code == 'af')
self.assertEqual(page, pywikibot.Page(self.get_site('afwiki'),
'New York Stad'))


To view, visit change 1057804. To unsubscribe, or for help writing mail filters, visit settings.

Gerrit-MessageType: merged
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I8f1baf826bb31a24dc693b9e18af8952e5becac9
Gerrit-Change-Number: 1057804
Gerrit-PatchSet: 5
Gerrit-Owner: Xqt <info@gno.de>
Gerrit-Reviewer: D3r1ck01 <dalangi-ctr@wikimedia.org>
Gerrit-Reviewer: Xqt <info@gno.de>
Gerrit-Reviewer: jenkins-bot