jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/860955 )
Change subject: [IMPR] Simplify code ......................................................................
[IMPR] Simplify code
Change-Id: I9f53e97e85bce11e5015431c03e9fa3a91097d96 --- M pywikibot/page/_collections.py M pywikibot/textlib.py M pywikibot/tools/djvu.py M pywikibot/version.py M scripts/nowcommons.py M scripts/watchlist.py M scripts/checkimages.py M pywikibot/site/_datasite.py M tests/aspects.py M pywikibot/diff.py M pywikibot/tools/_deprecate.py M pywikibot/site/_apisite.py M pywikibot/flow.py 13 files changed, 61 insertions(+), 79 deletions(-)
Approvals: Matěj Suchánek: Looks good to me, approved jenkins-bot: Verified
diff --git a/pywikibot/diff.py b/pywikibot/diff.py index 4a8e6b7..117b6dd 100644 --- a/pywikibot/diff.py +++ b/pywikibot/diff.py @@ -587,9 +587,7 @@ else: pywikibot.info(template.format('None.', '', ''))
- text = ''.join(text_list) - - return text + return ''.join(text_list)
def html_comparator(compare_string: str) -> Dict[str, List[str]]: diff --git a/pywikibot/flow.py b/pywikibot/flow.py index ed55212..ff95165 100644 --- a/pywikibot/flow.py +++ b/pywikibot/flow.py @@ -503,8 +503,7 @@ del self._replies data = self.site.reply_to_post(self.page, reply_to, content, content_format) - post = Post(self.page, data['post-id']) - return post + return Post(self.page, data['post-id'])
# Moderation def delete(self, reason: str) -> None: diff --git a/pywikibot/page/_collections.py b/pywikibot/page/_collections.py index 2826fa9..77d8ad8 100644 --- a/pywikibot/page/_collections.py +++ b/pywikibot/page/_collections.py @@ -267,17 +267,18 @@ if not diffto: return claims
- temp = defaultdict(list) - props_add = set(claims.keys()) - props_orig = set(diffto.keys()) + diff_claims = defaultdict(list) + props_add = set(claims) + props_orig = set(diffto) for prop in (props_orig | props_add): if prop not in props_orig: - temp[prop].extend(claims[prop]) + diff_claims[prop].extend(claims[prop]) continue
if prop not in props_add: - temp[prop].extend({'id': claim['id'], 'remove': ''} - for claim in diffto[prop] if 'id' in claim) + diff_claims[prop].extend( + {'id': claim['id'], 'remove': ''} + for claim in diffto[prop] if 'id' in claim) continue
claim_ids = set() @@ -286,24 +287,25 @@
for claim, json in zip(self[prop], claims[prop]): if 'id' not in json: - temp[prop].append(json) + diff_claims[prop].append(json) continue
claim_ids.add(json['id']) if json['id'] in claim_map: other = pywikibot.page.Claim.fromJSON( self.repo, claim_map[json['id']]) + if claim.same_as(other, ignore_rank=False, ignore_refs=False): continue - temp[prop].append(json) + + diff_claims[prop].append(json)
for claim in diffto[prop]: if 'id' in claim and claim['id'] not in claim_ids: - temp[prop].append({'id': claim['id'], 'remove': ''}) + diff_claims[prop].append({'id': claim['id'], 'remove': ''})
- claims = temp - return claims + return diff_claims
def set_on_item(self, item) -> None: """Set Claim.on_item attribute for all claims in this collection.""" diff --git a/pywikibot/site/_apisite.py b/pywikibot/site/_apisite.py index c892912..4509614 100644 --- a/pywikibot/site/_apisite.py +++ b/pywikibot/site/_apisite.py @@ -2652,9 +2652,7 @@ anononly=anononly, nocreate=nocreate, autoblock=autoblock, noemail=noemail, reblock=reblock, allowusertalk=allowusertalk) - - data = req.submit() - return data + return req.submit()
@need_right('unblock') def unblockuser( @@ -2674,9 +2672,7 @@ user=user.username, token=self.tokens['csrf'], reason=reason) - - data = req.submit() - return data + return req.submit()
@need_right('editmywatchlist') def watch( @@ -2863,5 +2859,4 @@
req = self.simple_request(**params) data = req.submit() - comparison = data['compare']['*'] - return comparison + return data['compare']['*'] diff --git a/pywikibot/site/_datasite.py b/pywikibot/site/_datasite.py index 75dce37..054452e 100644 --- a/pywikibot/site/_datasite.py +++ b/pywikibot/site/_datasite.py @@ -655,8 +655,7 @@ if bot: params['bot'] = 1 req = self.simple_request(**params) - data = req.submit() - return data + return req.submit()
@need_right('item-redirect') def set_redirect_target(self, from_item, to_item, bot: bool = True): @@ -883,8 +882,7 @@ .format(arg, action), UserWarning, 2)
req = self.simple_request(**params) - data = req.submit() - return data + return req.submit()
def wbsetaliases(self, itemdef, aliases, **kwargs): """ @@ -945,8 +943,7 @@ if baserevid: params['baserevid'] = baserevid req = self.simple_request(**params) - data = req.submit() - return data + return req.submit()
@need_right('edit') @need_extension('WikibaseLexeme') @@ -970,8 +967,7 @@ if baserevid: params['baserevid'] = baserevid req = self.simple_request(**params) - data = req.submit() - return data + return req.submit()
@need_right('edit') @need_extension('WikibaseLexeme') @@ -1000,5 +996,4 @@ if baserevid: params['baserevid'] = baserevid req = self.simple_request(**params) - data = req.submit() - return data + return req.submit() diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py index d7e2366..7bee53a 100644 --- a/pywikibot/textlib.py +++ b/pywikibot/textlib.py @@ -467,8 +467,8 @@ index += 1 markerpos = match.start() + len(replacement) replaced += 1 - text = text[:markerpos] + marker + text[markerpos:] - return text + + return text[:markerpos] + marker + text[markerpos:]
def removeDisabledParts(text: str, @@ -1641,8 +1641,7 @@ regex = r'(?<![)' + regex elif onlyBracketed: regex = r'[' + regex - linkR = re.compile(regex) - return linkR + return re.compile(regex)
# -------------------------------- @@ -1821,8 +1820,7 @@ :return: reformatted text """ isbnR = re.compile(r'(?<=ISBN )(?P<code>[\d-]+[\dXx])') - text = isbnR.sub(match_func, text) - return text + return isbnR.sub(match_func, text)
# --------------------------------------- diff --git a/pywikibot/tools/_deprecate.py b/pywikibot/tools/_deprecate.py index 691c8a8..8995c86 100644 --- a/pywikibot/tools/_deprecate.py +++ b/pywikibot/tools/_deprecate.py @@ -264,19 +264,16 @@ re.IGNORECASE)
# Add the deprecation notice to the docstring if not present - if not wrapper.__doc__: + if not (wrapper.__doc__ and deprecated_notice.search(wrapper.__doc__)): add_docstring(wrapper) else: - if not deprecated_notice.search(wrapper.__doc__): - add_docstring(wrapper) - else: - # Get docstring up to :params so deprecation notices for - # parameters don't disrupt it - trim_params = re.compile(r'^.*?((?=:param)|$)', re.DOTALL) - trimmed_doc = trim_params.match(wrapper.__doc__)[0] + # Get docstring up to :params so deprecation notices for + # parameters don't disrupt it + trim_params = re.compile(r'^.*?((?=:param)|$)', re.DOTALL) + trimmed_doc = trim_params.match(wrapper.__doc__)[0]
- if not deprecated_notice.search(trimmed_doc): # No notice - add_docstring(wrapper) + if not deprecated_notice.search(trimmed_doc): # No notice + add_docstring(wrapper)
return wrapper
diff --git a/pywikibot/tools/djvu.py b/pywikibot/tools/djvu.py index 02e7c4e..2c885c6 100644 --- a/pywikibot/tools/djvu.py +++ b/pywikibot/tools/djvu.py @@ -92,8 +92,7 @@ if force: for el in cache: obj.__dict__.pop(el, None) - _res = fn(obj, *args, **kwargs) - return _res + return fn(obj, *args, **kwargs) return wrapper
def check_page_number(fn): @@ -108,8 +107,7 @@ raise ValueError('Page {} not in file {} [{}-{}]' .format(int(n), obj.file, int(n), int(obj.number_of_images()))) - _res = fn(obj, *args, **kwargs) - return _res + return fn(obj, *args, **kwargs) return wrapper
@check_cache diff --git a/pywikibot/version.py b/pywikibot/version.py index a9b030c..bfaa619 100644 --- a/pywikibot/version.py +++ b/pywikibot/version.py @@ -27,10 +27,8 @@ from pywikibot.exceptions import VersionParseError
-def _get_program_dir(): - _program_dir = os.path.normpath( - os.path.split(os.path.dirname(__file__))[0]) - return _program_dir +def _get_program_dir() -> str: + return os.path.normpath(os.path.split(os.path.dirname(__file__))[0])
def get_toolforge_hostname() -> Optional[str]: @@ -322,16 +320,13 @@
def getversion_onlinerepo(path: str = 'branches/master'): """Retrieve current framework git hash from Gerrit.""" - from pywikibot.comms import http - # Gerrit API responses include )]}' at the beginning, # make sure to strip it out - buf = http.fetch( + buf = fetch( 'https://gerrit.wikimedia.org/r/projects/pywikibot%2Fcore/' + path, headers={'user-agent': '{pwb}'}).text[4:] try: - hsh = json.loads(buf)['revision'] - return hsh + return json.loads(buf)['revision'] except Exception as e: raise VersionParseError(f'{e!r} while parsing {buf!r}')
diff --git a/scripts/checkimages.py b/scripts/checkimages.py index 49c3985..5eb5923 100755 --- a/scripts/checkimages.py +++ b/scripts/checkimages.py @@ -613,8 +613,8 @@ results = re.findall(regex, report_page_text)
if results: - luser = results[0] - return luser + return results[0] + # we can't find the user, report the problem to the bot return upload_bot_array[0]
diff --git a/scripts/nowcommons.py b/scripts/nowcommons.py index daf30e1..317c08d 100755 --- a/scripts/nowcommons.py +++ b/scripts/nowcommons.py @@ -222,8 +222,7 @@ for t in self.nc_templates) gen = chain(*gens) gen = filter_unique(gen, key=lambda p: '{}:{}:{}'.format(*p._cmpkey())) - gen = pg.PreloadingGenerator(gen) - return gen + return pg.PreloadingGenerator(gen)
def find_file_on_commons(self, local_file_page): """Find filename on Commons.""" diff --git a/scripts/watchlist.py b/scripts/watchlist.py index 496c36d..de46781 100755 --- a/scripts/watchlist.py +++ b/scripts/watchlist.py @@ -33,17 +33,17 @@
import pywikibot from pywikibot import config +from pywikibot.backports import List from pywikibot.data.api import CachedRequest from pywikibot.exceptions import InvalidTitleError from scripts.maintenance.cache import CacheEntry
-def get(site=None): +def get(site=None) -> List[str]: """Load the watchlist, fetching it if necessary.""" if site is None: site = pywikibot.Site() - watchlist = [p.title() for p in site.watched_pages()] - return watchlist + return [p.title() for p in site.watched_pages()]
def count_watchlist(site=None) -> None: diff --git a/tests/aspects.py b/tests/aspects.py index 3f12d00..e8ba489 100644 --- a/tests/aspects.py +++ b/tests/aspects.py @@ -25,7 +25,7 @@
import pywikibot from pywikibot import Site, config -from pywikibot.backports import removeprefix, removesuffix +from pywikibot.backports import removeprefix, removesuffix, List from pywikibot.comms import http from pywikibot.data.api import Request as _original_Request from pywikibot.exceptions import ( @@ -143,7 +143,7 @@ self.assertIn(page.namespace(), namespaces, f'{page} not in namespace {namespaces!r}')
- def _get_gen_pages(self, gen, count=None, site=None): + def _get_gen_pages(self, gen, count: int = None, site=None): """ Get pages from gen, asserting they are Page from site.
@@ -154,7 +154,6 @@ :param gen: Page generator :type gen: typing.Iterable[pywikibot.Page] :param count: number of pages to get - :type count: int :param site: Site of expected pages :type site: pywikibot.site.APISite """ @@ -178,10 +177,9 @@
return gen_pages
- def _get_gen_titles(self, gen, count, site=None): - gen_pages = self._get_gen_pages(gen, count, site) - gen_titles = [page.title() for page in gen_pages] - return gen_titles + def _get_gen_titles(self, gen, count: int, site=None) -> List[str]: + """Return a list of page titles of given iterable.""" + return [page.title() for page in self._get_gen_pages(gen, count, site)]
@staticmethod def _get_canonical_titles(titles, site=None): @@ -1445,8 +1443,7 @@ @property def deprecation_messages(self): """Return captured deprecation warnings.""" - messages = [str(item.message) for item in self.warning_log] - return messages + return [str(item.message) for item in self.warning_log]
@classmethod def _build_message(cls, deprecated, instead):
pywikibot-commits@lists.wikimedia.org