jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/767270 )
Change subject: add None return type annotations ......................................................................
add None return type annotations
Change-Id: Ia3ee041a63ad42ac58cb19b22be4cddc1b050be9 --- M pywikibot/bot.py M pywikibot/comms/eventstreams.py M pywikibot/comms/http.py M pywikibot/data/api.py M pywikibot/data/mysql.py M pywikibot/data/sparql.py M pywikibot/family.py M pywikibot/page/__init__.py M pywikibot/page/_collections.py M pywikibot/page/_decorators.py M pywikibot/page/_revision.py M pywikibot/scripts/generate_family_file.py M pywikibot/scripts/generate_user_files.py M pywikibot/site/_apisite.py M pywikibot/site/_basesite.py M pywikibot/site/_datasite.py M pywikibot/site/_extensions.py M pywikibot/site/_interwikimap.py M pywikibot/site/_namespace.py M pywikibot/site/_obsoletesites.py M pywikibot/site/_siteinfo.py M pywikibot/site/_tokenwallet.py M pywikibot/site_detect.py M pywikibot/specialbots/_unlink.py M pywikibot/textlib.py M pywikibot/throttle.py M pywikibot/tools/__init__.py M pywikibot/tools/_deprecate.py M pywikibot/tools/djvu.py M pywikibot/tools/formatter.py M pywikibot/userinterfaces/_interface_base.py M pywikibot/userinterfaces/buffer_interface.py M pywikibot/userinterfaces/gui.py M pywikibot/userinterfaces/terminal_interface_base.py M pywikibot/userinterfaces/terminal_interface_unix.py M pywikibot/userinterfaces/terminal_interface_win32.py M pywikibot/userinterfaces/transliteration.py M pywikibot/userinterfaces/win32_unicode.py M pywikibot/xmlreader.py M scripts/add_text.py M scripts/blockpageschecker.py M scripts/category.py M scripts/category_redirect.py M scripts/change_pagelang.py M scripts/commons_information.py M scripts/commonscat.py M scripts/data_ingestion.py M scripts/djvutext.py M scripts/download_dump.py M scripts/fixing_redirects.py M scripts/imagetransfer.py M scripts/interwiki.py M scripts/movepages.py M scripts/nowcommons.py M scripts/pagefromfile.py M scripts/parser_function_count.py M scripts/patrol.py M scripts/protect.py M scripts/redirect.py M scripts/reflinks.py M scripts/replace.py M scripts/replicate_wiki.py M scripts/solve_disambiguation.py M scripts/speedy_delete.py M scripts/unusedfiles.py M scripts/watchlist.py M scripts/weblinkchecker.py M scripts/welcome.py 68 files changed, 394 insertions(+), 364 deletions(-)
Approvals: JJMC89: Looks good to me, approved jenkins-bot: Verified
diff --git a/pywikibot/bot.py b/pywikibot/bot.py index 16a02bf..96d73f5 100644 --- a/pywikibot/bot.py +++ b/pywikibot/bot.py @@ -1260,7 +1260,7 @@
@_treat_counter.setter @deprecated("self.counter['read']", since='7.0.0') - def _treat_counter(self, value): + def _treat_counter(self, value) -> None: self.counter['read'] = value
@property @@ -1270,7 +1270,7 @@
@_save_counter.setter @deprecated("self.counter['write']", since='7.0.0') - def _save_counter(self, value): + def _save_counter(self, value) -> None: self.counter['write'] = value
@property @@ -1280,7 +1280,7 @@
@_skip_counter.setter @deprecated("self.counter['skip']", since='7.0.0') - def _skip_counter(self, value): + def _skip_counter(self, value) -> None: self.counter['skip'] = value
@property diff --git a/pywikibot/comms/eventstreams.py b/pywikibot/comms/eventstreams.py index 71f6d94..6db0a5c 100644 --- a/pywikibot/comms/eventstreams.py +++ b/pywikibot/comms/eventstreams.py @@ -158,7 +158,7 @@ if self._since else ''))) return self._url
- def set_maximum_items(self, value: int): + def set_maximum_items(self, value: int) -> None: """ Set the maximum number of items to be retrieved from the stream.
diff --git a/pywikibot/comms/http.py b/pywikibot/comms/http.py index 8fb7b03..84eb68f 100644 --- a/pywikibot/comms/http.py +++ b/pywikibot/comms/http.py @@ -78,7 +78,7 @@
# Prepare flush on quit -def flush(): # pragma: no cover +def flush() -> None: # pragma: no cover """Close the session object. This is called when the module terminates.""" log('Closing network session.') session.close() diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py index be4b0a1..ab2441d 100644 --- a/pywikibot/data/api.py +++ b/pywikibot/data/api.py @@ -51,7 +51,7 @@ r'Waiting for [\w.: ]+: (?P<lag>\d+(?:.\d+)?) seconds? lagged')
-def _invalidate_superior_cookies(family): +def _invalidate_superior_cookies(family) -> None: """ Clear cookies for site's second level domain.
@@ -77,12 +77,12 @@
"""Workaround for bug in python 3 email handling of CTE binary."""
- def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: """Initializer.""" super().__init__(*args, **kwargs) self._writeBody = self._write_body
- def _write_body(self, msg): + def _write_body(self, msg) -> None: if msg['content-transfer-encoding'] == 'binary': self._fp.write(msg.get_payload(decode=True)) else: @@ -122,7 +122,12 @@
init_modules = frozenset(['main', 'paraminfo'])
- def __init__(self, site, preloaded_modules=None, modules_only_mode=None): + def __init__( + self, + site, + preloaded_modules=None, + modules_only_mode=None + ) -> None: """ Initializer.
@@ -154,7 +159,7 @@ if self.modules_only_mode: self.paraminfo_keys = frozenset(['modules'])
- def _add_submodules(self, name, modules): + def _add_submodules(self, name, modules) -> None: """Add the modules to the internal cache or check if equal.""" # The current implementation here doesn't support submodules inside of # submodules, because that would require to fetch all modules when only @@ -217,7 +222,7 @@ self._fetch({'query'}) assert 'query' in self._modules
- def _emulate_pageset(self): + def _emulate_pageset(self) -> None: """Emulate the pageset module, which existed until MW 1.24.""" # pageset isn't a module in the new system, so it is emulated, with # the paraminfo from the query module. @@ -378,7 +383,7 @@ if 'pageset' in modules and 'pageset' not in self._paraminfo: self._emulate_pageset()
- def _generate_submodules(self, module): + def _generate_submodules(self, module) -> None: """Check and generate submodules for the given module.""" parameters = self._paraminfo[module].get('parameters', []) submodules = set() @@ -683,7 +688,7 @@ def __init__(self, site=None, module: Optional[str] = None, param: Optional[str] = None, - dict: Optional[dict] = None): + dict: Optional[dict] = None) -> None: """ Initializer.
@@ -782,7 +787,7 @@ self._enabled = enabled | (self._enabled - disabled - removed) self._disabled = disabled | (self._disabled - enabled - removed)
- def clear(self): + def clear(self) -> None: """Clear all enabled and disabled options.""" self._enabled.clear() self._disabled.clear() @@ -826,7 +831,7 @@ return None raise KeyError('Invalid name "{}"'.format(name))
- def __delitem__(self, name): + def __delitem__(self, name) -> None: """Remove the item by setting it to None.""" self[name] = None
@@ -1059,14 +1064,14 @@ return cls(site=req_site, parameters=kwargs)
@classmethod - def _warn_both(cls): + def _warn_both(cls) -> None: """Warn that kwargs mode was used but parameters was set too.""" warn('Both kwargs and parameters are set in Request.__init__. It ' 'assumes that "parameters" is actually a parameter of the ' 'Request and is added to kwargs.', DeprecationWarning, 3)
@classmethod - def _warn_kwargs(cls): + def _warn_kwargs(cls) -> None: """Warn that kwargs was used instead of parameters.""" warn('Instead of using kwargs from Request.__init__, parameters ' 'for the request to the API should be added via the ' @@ -1146,7 +1151,7 @@ """Implement dict interface.""" return self._params[key]
- def __setitem__(self, key: str, value): + def __setitem__(self, key: str, value) -> None: """Set MediaWiki API request parameter.
:param value: param value(s) @@ -1173,7 +1178,7 @@ else: self._params[key] = list(value)
- def __delitem__(self, key): + def __delitem__(self, key) -> None: """Implement dict interface.""" del self._params[key]
@@ -1556,7 +1561,7 @@ self.wait() return None
- def _relogin(self, message=''): + def _relogin(self, message='') -> None: """Force re-login and inform user.""" pywikibot.error('{}{}Forcing re-login.'.format(message, ' ' if message else '')) @@ -1577,7 +1582,7 @@ return True return False
- def _handle_warnings(self, result): + def _handle_warnings(self, result) -> None: if 'warnings' in result: for mod, warning in result['warnings'].items(): if mod == 'info': @@ -1665,7 +1670,7 @@ self.wait() return True
- def _ratelimited(self): + def _ratelimited(self) -> None: """Handle ratelimited warning.""" ratelimits = self.site.userinfo['ratelimits'] delay = None @@ -1904,7 +1909,7 @@
"""Cached request."""
- def __init__(self, expiry, *args, **kwargs): + def __init__(self, expiry, *args, **kwargs) -> None: """Initialize a CachedRequest object.
:param expiry: either a number of days or a datetime.timedelta object @@ -2017,7 +2022,7 @@ pywikibot.output('Could not load cache: {!r}'.format(e)) return False
- def _write_cache(self, data): + def _write_cache(self, data) -> None: """Write data to self._cachefile_path().""" data = (self._uniquedescriptionstr(), data, datetime.datetime.utcnow()) with open(self._cachefile_path(), 'wb') as f: @@ -2063,8 +2068,14 @@ after iterating that many values. """
- def __init__(self, action: str, continue_name: str = 'continue', - limit_name: str = 'limit', data_name: str = 'data', **kwargs): + def __init__( + self, + action: str, + continue_name: str = 'continue', + limit_name: str = 'limit', + data_name: str = 'data', + **kwargs + ) -> None: """ Initialize an APIGenerator object.
@@ -2091,7 +2102,7 @@ self.request = self.request_class(**kwargs) self.request[self.limit_name] = self.query_increment
- def set_query_increment(self, value: int): + def set_query_increment(self, value: int) -> None: """ Set the maximum number of items to be retrieved per API query.
@@ -2106,7 +2117,7 @@ .format(self.__class__.__name__, self.query_increment), _logger)
- def set_maximum_items(self, value: Union[int, str, None]): + def set_maximum_items(self, value: Union[int, str, None]) -> None: """ Set the maximum number of items to be retrieved from the wiki.
@@ -2292,7 +2303,7 @@ self.continuekey = self.modules self._add_slots()
- def _add_slots(self): + def _add_slots(self) -> None: """Add slots to params if the site supports multi-content revisions.
On MW 1.32+ the following query parameters require slots to be given @@ -2344,7 +2355,7 @@ if not set(request) & deprecated_params: request['adrslots'] = '*'
- def set_query_increment(self, value): + def set_query_increment(self, value) -> None: """Set the maximum number of items to be retrieved per API query.
If not called, the default is to ask for "max" items and let the @@ -2361,7 +2372,7 @@ .format(self.__class__.__name__, self.query_limit), _logger)
- def set_maximum_items(self, value: Union[int, str, None]): + def set_maximum_items(self, value: Union[int, str, None]) -> None: """Set the maximum number of items to be retrieved from the wiki.
If not called, most queries will continue as long as there is @@ -2378,7 +2389,7 @@ if value is not None: self.limit = int(value)
- def _update_limit(self): + def _update_limit(self) -> None: """Set query limit for self.module based on api response.""" param = self.site._paraminfo.parameter('query+' + self.limited_module, 'limit') @@ -2477,7 +2488,7 @@ self._add_continues(self.data['continue']) return False # a new request with continue is needed
- def _add_continues(self, continue_pair): + def _add_continues(self, continue_pair) -> None: for key, value in continue_pair.items(): # query-continue can return ints (continue too?) if isinstance(value, int): @@ -2651,7 +2662,7 @@
"""
- def __init__(self, generator: str, g_content=False, **kwargs): + def __init__(self, generator: str, g_content=False, **kwargs) -> None: """ Initializer.
@@ -2664,7 +2675,7 @@
""" # If possible, use self.request after __init__ instead of appendParams - def append_params(params, key, value): + def append_params(params, key, value) -> None: if key in params: params[key] += '|' + value else: @@ -2724,7 +2735,7 @@
"""
- def __init__(self, prop: str, **kwargs): + def __init__(self, prop: str, **kwargs) -> None: """ Initializer.
@@ -2771,7 +2782,7 @@ del self._previous_dicts[prev_title]
@staticmethod - def _update_old_result_dict(old_dict, new_dict): + def _update_old_result_dict(old_dict, new_dict) -> None: """Update old result dict with new_dict.""" for k, v in new_dict.items(): if k not in old_dict: @@ -2800,7 +2811,7 @@
"""
- def __init__(self, listaction: str, **kwargs): + def __init__(self, listaction: str, **kwargs) -> None: """ Initializer.
@@ -2821,7 +2832,7 @@ Yields LogEntry objects instead of dicts. """
- def __init__(self, logtype=None, **kwargs): + def __init__(self, logtype=None, **kwargs) -> None: """Initializer.""" super().__init__('logevents', **kwargs)
@@ -3036,7 +3047,7 @@ .format(pagedict['title']))
-def _update_contentmodel(page, pagedict: dict): +def _update_contentmodel(page, pagedict: dict) -> None: """Update page content model.""" page._contentmodel = pagedict.get('contentmodel') # can be None
@@ -3047,7 +3058,7 @@ page._quality_text = pagedict['proofread']['quality_text']
-def _update_protection(page, pagedict: dict): +def _update_protection(page, pagedict: dict) -> None: """Update page protection.""" if 'restrictiontypes' in pagedict: page._applicable_protections = set(pagedict['restrictiontypes']) @@ -3057,13 +3068,13 @@ for item in pagedict['protection']}
-def _update_revisions(page, revisions): +def _update_revisions(page, revisions) -> None: """Update page revisions.""" for rev in revisions: page._revisions[rev['revid']] = pywikibot.page.Revision(**rev)
-def _update_templates(page, templates): +def _update_templates(page, templates) -> None: """Update page templates.""" templ_pages = [pywikibot.Page(page.site, tl['title']) for tl in templates] if hasattr(page, '_templates'): @@ -3072,7 +3083,7 @@ page._templates = templ_pages
-def _update_langlinks(page, langlinks): +def _update_langlinks(page, langlinks) -> None: """Update page langlinks.""" links = [pywikibot.Link.langlinkUnsafe(link['lang'], link['*'], source=page.site) @@ -3084,7 +3095,7 @@ page._langlinks = links
-def _update_coordinates(page, coordinates): +def _update_coordinates(page, coordinates) -> None: """Update page coordinates.""" coords = [] for co in coordinates: diff --git a/pywikibot/data/mysql.py b/pywikibot/data/mysql.py index 3952570..376e1ab 100644 --- a/pywikibot/data/mysql.py +++ b/pywikibot/data/mysql.py @@ -33,7 +33,7 @@ .. versionadded:: 7.0 """
- def close(self): # pragma: no cover + def close(self) -> None: # pragma: no cover """Send the quit message and close the socket.""" if self._closed or self._sock is None: super().close() diff --git a/pywikibot/data/sparql.py b/pywikibot/data/sparql.py index f4b1899..b1c7c15 100644 --- a/pywikibot/data/sparql.py +++ b/pywikibot/data/sparql.py @@ -204,7 +204,7 @@ class SparqlNode: """Base class for SPARQL nodes."""
- def __init__(self, value): + def __init__(self, value) -> None: """Create a SparqlNode.""" self.value = value
@@ -215,7 +215,7 @@ class URI(SparqlNode): """Representation of URI result type."""
- def __init__(self, data: dict, entity_url, **kwargs): + def __init__(self, data: dict, entity_url, **kwargs) -> None: """Create URI object.""" super().__init__(data.get('value')) self.entity_url = entity_url @@ -238,7 +238,7 @@ class Literal(SparqlNode): """Representation of RDF literal result type."""
- def __init__(self, data: dict, **kwargs): + def __init__(self, data: dict, **kwargs) -> None: """Create Literal object.""" super().__init__(data.get('value')) self.type = data.get('datatype') @@ -255,7 +255,7 @@ class Bnode(SparqlNode): """Representation of blank node."""
- def __init__(self, data: dict, **kwargs): + def __init__(self, data: dict, **kwargs) -> None: """Create Bnode.""" super().__init__(data.get('value'))
diff --git a/pywikibot/family.py b/pywikibot/family.py index 8ce8fb0..879be6d 100644 --- a/pywikibot/family.py +++ b/pywikibot/family.py @@ -642,7 +642,7 @@ self._get_cr_templates(code, fallback) return self._catredirtemplates[code]
- def _get_cr_templates(self, code, fallback): + def _get_cr_templates(self, code, fallback) -> None: """Build list of category redirect templates.""" if not hasattr(self, '_catredirtemplates'): self._catredirtemplates = {} @@ -967,7 +967,7 @@ return types.MappingProxyType(data)
@obsolete.setter - def obsolete(self, data): + def obsolete(self, data) -> None: """Split obsolete dict into constituent parts.""" self.interwiki_removals[:] = [old for (old, new) in data.items() if new is None] diff --git a/pywikibot/page/__init__.py b/pywikibot/page/__init__.py index f3db6ea..4c9eb7d 100644 --- a/pywikibot/page/__init__.py +++ b/pywikibot/page/__init__.py @@ -522,7 +522,7 @@ return self._revid
@latest_revision_id.deleter - def latest_revision_id(self): + def latest_revision_id(self) -> None: """ Remove the latest revision id set for this Page.
@@ -545,7 +545,7 @@ delattr(self, attr)
@latest_revision_id.setter - def latest_revision_id(self, value): + def latest_revision_id(self, value) -> None: """Set the latest revision for this Page.""" del self.latest_revision_id self._revid = value @@ -595,7 +595,7 @@ self._text = None if value is None else str(value)
@text.deleter - def text(self): + def text(self) -> None: """Delete the current (edited) wikitext.""" if hasattr(self, '_text'): del self._text @@ -1288,7 +1288,7 @@ asynchronous: bool = False, callback=None, show_diff: bool = False, - **kwargs): + **kwargs) -> None: """ Save the page with the contents of the first argument as the text.
@@ -1319,7 +1319,7 @@ """ return self.site.watch(self, unwatch)
- def clear_cache(self): + def clear_cache(self) -> None: """Clear the cached attributes of the page.""" self._revisions = {} for attr in self._cache_attrs: @@ -1710,7 +1710,7 @@ return sum(cnt[user.username] if isinstance(user, User) else cnt[user] for user in contributors)
- def merge_history(self, dest, timestamp=None, reason=None): + def merge_history(self, dest, timestamp=None, reason=None) -> None: """ Merge revisions from this page into another page.
@@ -1753,7 +1753,7 @@ reason: Optional[str] = None, prompt: bool = True, mark: bool = False, - automatic_quit: bool = False): + automatic_quit: bool = False) -> None: """ Delete the page from the wiki. Requires administrator status.
@@ -1875,7 +1875,7 @@ .format(timestamp)) self._deletedRevs[timestamp]['marked'] = undelete
- def undelete(self, reason: Optional[str] = None): + def undelete(self, reason: Optional[str] = None) -> None: """ Undelete revisions based on the markers set by previous calls.
@@ -1913,7 +1913,7 @@ def protect(self, reason: Optional[str] = None, protections: Optional[dict] = None, - **kwargs): + **kwargs) -> None: """ Protect or unprotect a wiki page. Requires administrator status.
@@ -2288,7 +2288,7 @@ raise ValueError("'{}' is not in the file namespace!" .format(self.title()))
- def _load_file_revisions(self, imageinfo): + def _load_file_revisions(self, imageinfo) -> None: for file_rev in imageinfo: # filemissing in API response indicates most fields are missing # see https://gerrit.wikimedia.org/r/c/mediawiki/core/+/533482/ @@ -3096,7 +3096,7 @@
raise err
- def unblock(self, reason: Optional[str] = None): + def unblock(self, reason: Optional[str] = None) -> None: """ Remove the block for the user.
@@ -3450,7 +3450,7 @@ data[key] = value return data
- def editEntity(self, data=None, **kwargs): + def editEntity(self, data=None, **kwargs) -> None: """ Edit an entity using Wikibase wbeditentity API.
@@ -3754,16 +3754,16 @@ return self._revid
@latest_revision_id.setter - def latest_revision_id(self, value): + def latest_revision_id(self, value) -> None: self._revid = value
@latest_revision_id.deleter - def latest_revision_id(self): + def latest_revision_id(self) -> None: # fixme: this seems too destructive in comparison to the parent self.clear_cache()
@allow_asynchronous - def editEntity(self, data=None, **kwargs): + def editEntity(self, data=None, **kwargs) -> None: """ Edit an entity using Wikibase wbeditentity API.
@@ -3788,7 +3788,7 @@ # kept for the decorator super().editEntity(data, **kwargs)
- def editLabels(self, labels, **kwargs): + def editLabels(self, labels, **kwargs) -> None: """ Edit entity labels.
@@ -3800,7 +3800,7 @@ data = {'labels': labels} self.editEntity(data, **kwargs)
- def editDescriptions(self, descriptions, **kwargs): + def editDescriptions(self, descriptions, **kwargs) -> None: """ Edit entity descriptions.
@@ -3812,7 +3812,7 @@ data = {'descriptions': descriptions} self.editEntity(data, **kwargs)
- def editAliases(self, aliases, **kwargs): + def editAliases(self, aliases, **kwargs) -> None: """ Edit entity aliases.
@@ -3858,7 +3858,7 @@ self.repo.addClaim(self, claim, bot=bot, **kwargs) claim.on_item = self
- def removeClaims(self, claims, **kwargs): + def removeClaims(self, claims, **kwargs) -> None: """ Remove the claims from the entity.
@@ -4169,7 +4169,7 @@
return self.sitelinks[site].canonical_title()
- def setSitelink(self, sitelink, **kwargs): + def setSitelink(self, sitelink, **kwargs) -> None: """ Set sitelinks. Calls setSitelinks().
@@ -4178,7 +4178,7 @@ """ self.setSitelinks([sitelink], **kwargs)
- def removeSitelink(self, site, **kwargs): + def removeSitelink(self, site, **kwargs) -> None: """ Remove a sitelink.
@@ -4186,7 +4186,7 @@ """ self.removeSitelinks([site], **kwargs)
- def removeSitelinks(self, sites, **kwargs): + def removeSitelinks(self, sites, **kwargs) -> None: """ Remove sitelinks.
@@ -4199,7 +4199,7 @@ data.append({'site': site, 'title': ''}) self.setSitelinks(data, **kwargs)
- def setSitelinks(self, sitelinks, **kwargs): + def setSitelinks(self, sitelinks, **kwargs) -> None: """ Set sitelinks.
@@ -4210,7 +4210,7 @@ data = {'sitelinks': sitelinks} self.editEntity(data, **kwargs)
- def mergeInto(self, item, **kwargs): + def mergeInto(self, item, **kwargs) -> None: """ Merge the item into another item.
@@ -4306,7 +4306,7 @@ 'musical-notation': 'string', }
- def __init__(self, site, id: str, datatype: Optional[str] = None): + def __init__(self, site, id: str, datatype: Optional[str] = None) -> None: """ Initializer.
@@ -4506,7 +4506,7 @@ return self._on_item
@on_item.setter - def on_item(self, item): + def on_item(self, item) -> None: self._on_item = item for values in self.qualifiers.values(): for qualifier in values: @@ -4738,7 +4738,7 @@ .format(value, value_class)) self.target = value
- def changeTarget(self, value=None, snaktype='value', **kwargs): + def changeTarget(self, value=None, snaktype='value', **kwargs) -> None: """ Set the target value in the data repository.
@@ -4791,7 +4791,7 @@ """Return the rank of the Claim.""" return self.rank
- def setRank(self, rank): + def setRank(self, rank) -> None: """Set the rank of the Claim.""" self.rank = rank
@@ -4800,7 +4800,7 @@ self.rank = rank return self.repo.save_claim(self, **kwargs)
- def changeSnakType(self, value=None, **kwargs): + def changeSnakType(self, value=None, **kwargs) -> None: """ Save the new snak value.
@@ -4814,7 +4814,7 @@ """Return a list of sources, each being a list of Claims.""" return self.sources
- def addSource(self, claim, **kwargs): + def addSource(self, claim, **kwargs) -> None: """ Add the claim as a source.
@@ -4846,7 +4846,7 @@ source[claim.getID()].append(claim) self.sources.append(source)
- def removeSource(self, source, **kwargs): + def removeSource(self, source, **kwargs) -> None: """ Remove the source. Call removeSources().
@@ -4855,7 +4855,7 @@ """ self.removeSources([source], **kwargs)
- def removeSources(self, sources, **kwargs): + def removeSources(self, sources, **kwargs) -> None: """ Remove the sources.
@@ -4888,7 +4888,7 @@ else: self.qualifiers[qualifier.getID()] = [qualifier]
- def removeQualifier(self, qualifier, **kwargs): + def removeQualifier(self, qualifier, **kwargs) -> None: """ Remove the qualifier. Call removeQualifiers().
@@ -4897,7 +4897,7 @@ """ self.removeQualifiers([qualifier], **kwargs)
- def removeQualifiers(self, qualifiers, **kwargs): + def removeQualifiers(self, qualifiers, **kwargs) -> None: """ Remove the qualifiers.
@@ -5024,7 +5024,7 @@ Note: timestamp will be casted to pywikibot.Timestamp. """
- def __init__(self, file_revision): + def __init__(self, file_revision) -> None: """Initiate the class using the dict from L{APISite.loadimageinfo}.""" self.__dict__.update(file_revision) self.timestamp = pywikibot.Timestamp.fromISOformat(self.timestamp) @@ -5058,7 +5058,7 @@ # Components used for __repr__ _items = ('title', 'namespace', '_sitekey')
- def __init__(self, title: str, namespace=None, site=None): + def __init__(self, title: str, namespace=None, site=None) -> None: """ Initializer.
@@ -5688,7 +5688,7 @@ # Components used for __repr__ _items = ('_sitekey', '_rawtitle', 'badges')
- def __init__(self, title, site=None, badges=None): + def __init__(self, title, site=None, badges=None) -> None: """ Initializer.
diff --git a/pywikibot/page/_collections.py b/pywikibot/page/_collections.py index 6668a45..f4fe56b 100644 --- a/pywikibot/page/_collections.py +++ b/pywikibot/page/_collections.py @@ -29,7 +29,7 @@ specialised in subclasses. """
- def __init__(self, data=None): + def __init__(self, data=None) -> None: super().__init__() self._data = {} if data: @@ -44,11 +44,11 @@ key = self.normalizeKey(key) return self._data[key]
- def __setitem__(self, key, value): + def __setitem__(self, key, value) -> None: key = self.normalizeKey(key) self._data[key] = value
- def __delitem__(self, key): + def __delitem__(self, key) -> None: key = self.normalizeKey(key) del self._data[key]
@@ -190,7 +190,7 @@ class ClaimCollection(MutableMapping): """A structure holding claims for a Wikibase entity."""
- def __init__(self, repo): + def __init__(self, repo) -> None: """Initializer.""" super().__init__() self.repo = repo @@ -213,10 +213,10 @@ def __getitem__(self, key): return self._data[key]
- def __setitem__(self, key, value): + def __setitem__(self, key, value) -> None: self._data[key] = value
- def __delitem__(self, key): + def __delitem__(self, key) -> None: del self._data[key]
def __iter__(self): @@ -294,7 +294,7 @@ claims = temp return claims
- def set_on_item(self, item): + def set_on_item(self, item) -> None: """Set Claim.on_item attribute for all claims in this collection.""" for claims in self.values(): for claim in claims: @@ -304,7 +304,7 @@ class SiteLinkCollection(MutableMapping): """A structure holding SiteLinks for a Wikibase item."""
- def __init__(self, repo, data=None): + def __init__(self, repo, data=None) -> None: """ Initializer.
@@ -358,7 +358,7 @@ self._data[key] = val return val
- def __setitem__(self, key, val): + def __setitem__(self, key, val) -> None: """ Set the SiteLink for a given key.
@@ -377,7 +377,7 @@ assert val.site.dbName() == key self._data[key] = val
- def __delitem__(self, key): + def __delitem__(self, key) -> None: key = self.getdbName(key) del self._data[key]
diff --git a/pywikibot/page/_decorators.py b/pywikibot/page/_decorators.py index ddd6dc8..4a056e4 100644 --- a/pywikibot/page/_decorators.py +++ b/pywikibot/page/_decorators.py @@ -46,7 +46,7 @@ if callback: callback(self, err)
- def wrapper(self, *args, **kwargs): + def wrapper(self, *args, **kwargs) -> None: if kwargs.get('asynchronous'): pywikibot.async_request(handle, func, self, *args, **kwargs) else: diff --git a/pywikibot/page/_revision.py b/pywikibot/page/_revision.py index fcdc140..6594baf 100644 --- a/pywikibot/page/_revision.py +++ b/pywikibot/page/_revision.py @@ -26,14 +26,14 @@ 'Sample for Revision access' """
- def __init__(self, **kwargs): + def __init__(self, **kwargs) -> None: """Initializer.""" self._data = kwargs self._upcast_dict(self._data) super().__init__()
@staticmethod - def _upcast_dict(map_): + def _upcast_dict(map_) -> None: """Upcast dictionary values.""" with suppress(KeyError): # enable doctest map_['timestamp'] = Timestamp.fromISOformat(map_['timestamp']) diff --git a/pywikibot/scripts/generate_family_file.py b/pywikibot/scripts/generate_family_file.py index 2af8ae3..16753d7 100755 --- a/pywikibot/scripts/generate_family_file.py +++ b/pywikibot/scripts/generate_family_file.py @@ -55,7 +55,7 @@ url: Optional[str] = None, name: Optional[str] = None, dointerwiki: Optional[str] = None, - verify: Optional[str] = None): + verify: Optional[str] = None) -> None: """ Parameters are optional. If not given the script asks for the values.
@@ -128,7 +128,7 @@ return w, verify return None, None
- def run(self): + def run(self) -> None: """Main method, generate family file.""" if not self.get_params(): return @@ -147,7 +147,7 @@ self.getapis() self.writefile(verify)
- def getlangs(self, w): + def getlangs(self, w) -> None: """Determine site code of a family.""" print('Determining other sites...', end='') try: @@ -193,7 +193,7 @@ 'letters and digits [a-z0-9] or underscore/dash [_-]' \ .format(self.name, wiki['prefix'])
- def getapis(self): + def getapis(self) -> None: """Load other site pages.""" print('Loading wikis... ') for lang in self.langs: @@ -208,7 +208,7 @@ else: print('in cache')
- def writefile(self, verify): + def writefile(self, verify) -> None: """Write the family file.""" fn = os.path.join(self.base_dir, 'families', '{}_family.py'.format(self.name)) @@ -285,7 +285,7 @@ """
-def main(): +def main() -> None: """Process command line arguments and generate a family file.""" if len(sys.argv) > 1 and sys.argv[1] == '-help': print(__doc__) diff --git a/pywikibot/scripts/generate_user_files.py b/pywikibot/scripts/generate_user_files.py index b918aef..064229a 100755 --- a/pywikibot/scripts/generate_user_files.py +++ b/pywikibot/scripts/generate_user_files.py @@ -406,7 +406,7 @@ return userfile, passfile
-def main(*args: str): +def main(*args: str) -> None: """ Process command line arguments and generate user-config.
diff --git a/pywikibot/site/_apisite.py b/pywikibot/site/_apisite.py index 3589ad2..cd14ab2 100644 --- a/pywikibot/site/_apisite.py +++ b/pywikibot/site/_apisite.py @@ -105,7 +105,7 @@ Do not instantiate directly; use :py:obj:`pywikibot.Site` function. """
- def __init__(self, code, fam=None, user=None): + def __init__(self, code, fam=None, user=None) -> None: """Initializer.""" super().__init__(code, fam, user) self._globaluserinfo = {} @@ -123,7 +123,7 @@ del new['_interwikimap'] return new
- def __setstate__(self, attrs): + def __setstate__(self, attrs) -> None: """Restore things removed in __getstate__.""" super().__setstate__(attrs) self._interwikimap = _InterwikiMap(self) @@ -399,7 +399,7 @@
self._loginstatus = _LoginStatus.NOT_LOGGED_IN # failure
- def _relogin(self): + def _relogin(self) -> None: """Force a login sequence without logging out, using the current user.
This is an internal function which is used to re-login when @@ -410,7 +410,7 @@ self._loginstatus = _LoginStatus.NOT_LOGGED_IN self.login()
- def logout(self): + def logout(self) -> None: """ Logout of the site and load details for the logged out user.
@@ -487,7 +487,7 @@ return self._userinfo
@userinfo.deleter - def userinfo(self): + def userinfo(self) -> None: """Delete cached userinfo.
..versionadded:: 5.5 @@ -557,7 +557,7 @@ return self.get_globaluserinfo()
@globaluserinfo.deleter - def globaluserinfo(self): + def globaluserinfo(self) -> None: """Delete cached globaluserinfo of current user.
..versionadded:: 7.0 @@ -652,7 +652,7 @@
@staticmethod def assert_valid_iter_params(msg_prefix, start, end, reverse, - is_ts=True): + is_ts=True) -> None: """Validate iterating API parameters.
:param msg_prefix: The calling method name @@ -1168,7 +1168,7 @@ raise PageRelatedError( page, 'loadimageinfo: Query on {} returned no imageinfo')
- def loadpageinfo(self, page, preload=False): + def loadpageinfo(self, page, preload=False) -> None: """Load page info from api and store in page attributes.
:see: https://www.mediawiki.org/wiki/API:Info @@ -1184,7 +1184,7 @@ inprop=inprop) self._update_page(page, query)
- def loadpageprops(self, page): + def loadpageprops(self, page) -> None: """Load page props for the given page.""" title = page.title(with_section=False) query = self._generator(api.PropertyGenerator, @@ -1194,7 +1194,7 @@ self._update_page(page, query)
def loadimageinfo(self, page, history=False, - url_width=None, url_height=None, url_param=None): + url_width=None, url_height=None, url_param=None) -> None: """Load image info from api and save in page attributes.
Parameters correspond to iiprops in: @@ -1452,7 +1452,7 @@ parsed_text = data['parse']['text']['*'] return parsed_text
- def getcategoryinfo(self, category): + def getcategoryinfo(self, category) -> None: """Retrieve data on contents of category.
:see: https://www.mediawiki.org/wiki/API:Categoryinfo diff --git a/pywikibot/site/_basesite.py b/pywikibot/site/_basesite.py index db86186..ad5a8d7 100644 --- a/pywikibot/site/_basesite.py +++ b/pywikibot/site/_basesite.py @@ -172,7 +172,7 @@ del new['_iw_sites'] return new
- def __setstate__(self, attrs): + def __setstate__(self, attrs) -> None: """Restore things removed in __getstate__.""" self.__dict__.update(attrs) self._pagemutex = threading.Condition() @@ -293,7 +293,7 @@ self._pagemutex.wait() self._locked_pages.add(title)
- def unlock_page(self, page): + def unlock_page(self, page) -> None: """ Unlock page. Call as soon as a write operation has completed.
diff --git a/pywikibot/site/_datasite.py b/pywikibot/site/_datasite.py index 56756cf..cd21300 100644 --- a/pywikibot/site/_datasite.py +++ b/pywikibot/site/_datasite.py @@ -34,7 +34,7 @@
"""Wikibase data capable site."""
- def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: """Initializer.""" super().__init__(*args, **kwargs) self._item_namespace = None @@ -45,7 +45,7 @@ 'mediainfo': pywikibot.MediaInfo, }
- def _cache_entity_namespaces(self): + def _cache_entity_namespaces(self) -> None: """Find namespaces for each known wikibase entity type.""" self._entity_namespaces = {} for entity_type in self._type_to_class: @@ -298,7 +298,7 @@ return req.submit()
@need_right('edit') - def addClaim(self, entity, claim, bot=True, summary=None): + def addClaim(self, entity, claim, bot=True, summary=None) -> None: """ Add a claim.
diff --git a/pywikibot/site/_extensions.py b/pywikibot/site/_extensions.py index 98e4b75..0e5e229 100644 --- a/pywikibot/site/_extensions.py +++ b/pywikibot/site/_extensions.py @@ -75,7 +75,7 @@ """APISite mixin for ProofreadPage extension."""
@need_extension('ProofreadPage') - def _cache_proofreadinfo(self, expiry=False): + def _cache_proofreadinfo(self, expiry=False) -> None: """Retrieve proofreadinfo from site and cache response.
Applicable only to sites with ProofreadPage extension installed. @@ -144,7 +144,7 @@ """APISite mixin for GeoData extension."""
@need_extension('GeoData') - def loadcoordinfo(self, page): + def loadcoordinfo(self, page) -> None: """Load [[mw:Extension:GeoData]] info.""" title = page.title(with_section=False) query = self._generator(api.PropertyGenerator, @@ -162,7 +162,7 @@ """APISite mixin for PageImages extension."""
@need_extension('PageImages') - def loadpageimage(self, page): + def loadpageimage(self, page) -> None: """ Load [[mw:Extension:PageImages]] info.
diff --git a/pywikibot/site/_interwikimap.py b/pywikibot/site/_interwikimap.py index de81928..77bbfb6 100644 --- a/pywikibot/site/_interwikimap.py +++ b/pywikibot/site/_interwikimap.py @@ -12,7 +12,7 @@
"""An entry of the _InterwikiMap with a lazy loading site."""
- def __init__(self, local, url, prefix=None): + def __init__(self, local, url, prefix=None) -> None: self._site = None self.local = local self.url = url @@ -33,7 +33,7 @@
"""A representation of the interwiki map of a site."""
- def __init__(self, site): + def __init__(self, site) -> None: """ Create an empty uninitialized interwiki map for the given site.
@@ -44,7 +44,7 @@ self._site = site self._map = None
- def reset(self): + def reset(self) -> None: """Remove all mappings to force building a new mapping.""" self._map = None
diff --git a/pywikibot/site/_namespace.py b/pywikibot/site/_namespace.py index dbc51de..027a836 100644 --- a/pywikibot/site/_namespace.py +++ b/pywikibot/site/_namespace.py @@ -88,7 +88,7 @@ canonical_name: Optional[str] = None, custom_name: Optional[str] = None, aliases: Optional[List[str]] = None, - **kwargs): + **kwargs) -> None: """Initializer.
:param canonical_name: Canonical name @@ -323,7 +323,7 @@ APISite was callable. """
- def __init__(self, namespaces): + def __init__(self, namespaces) -> None: """Create new dict using the given namespaces.""" super().__init__() self._namespaces = namespaces diff --git a/pywikibot/site/_obsoletesites.py b/pywikibot/site/_obsoletesites.py index 3825207..40643d4 100644 --- a/pywikibot/site/_obsoletesites.py +++ b/pywikibot/site/_obsoletesites.py @@ -18,11 +18,11 @@ class ClosedSite(APISite): """Site closed to read-only mode."""
- def __init__(self, code, fam, user=None): + def __init__(self, code, fam, user=None) -> None: """Initializer.""" super().__init__(code, fam, user)
- def _closed_error(self, notice=''): + def _closed_error(self, notice='') -> None: """An error instead of pointless API call.""" pywikibot.error('Site {} has been closed. {}'.format(self.sitename, notice)) @@ -39,7 +39,7 @@ 'create': ('steward', 'infinity')} return page._protection
- def recentchanges(self, **kwargs): + def recentchanges(self, **kwargs) -> None: """An error instead of pointless API call.""" self._closed_error('No recent changes can be returned.')
@@ -49,6 +49,6 @@ self._uploaddisabled = True return self._uploaddisabled
- def newpages(self, **kwargs): + def newpages(self, **kwargs) -> None: """An error instead of pointless API call.""" self._closed_error('No new pages can be returned.') diff --git a/pywikibot/site/_siteinfo.py b/pywikibot/site/_siteinfo.py index af88274..5999e1c 100644 --- a/pywikibot/site/_siteinfo.py +++ b/pywikibot/site/_siteinfo.py @@ -58,7 +58,7 @@ ], }
- def __init__(self, site): + def __init__(self, site) -> None: """Initialise it with an empty cache.""" self._site = site self._cache = {} @@ -95,7 +95,7 @@ return EMPTY_DEFAULT
@staticmethod - def _post_process(prop, data): + def _post_process(prop, data) -> None: """Do some default handling of data. Directly modifies data.""" # Be careful with version tests inside this here as it might need to # query this method to actually get the version number diff --git a/pywikibot/site/_tokenwallet.py b/pywikibot/site/_tokenwallet.py index 53b4d06..564ec77 100644 --- a/pywikibot/site/_tokenwallet.py +++ b/pywikibot/site/_tokenwallet.py @@ -15,7 +15,7 @@
"""Container for tokens."""
- def __init__(self, site): + def __init__(self, site) -> None: """Initializer.
:type site: pywikibot.site.APISite @@ -24,7 +24,7 @@ self._tokens = {} self.failed_cache = set() # cache unavailable tokens.
- def load_tokens(self, types, all=False): + def load_tokens(self, types, all=False) -> None: """ Preload one or multiple tokens.
diff --git a/pywikibot/site_detect.py b/pywikibot/site_detect.py index 60fbe42..63e2fad 100644 --- a/pywikibot/site_detect.py +++ b/pywikibot/site_detect.py @@ -108,7 +108,7 @@ return [wiki for wiki in iw['query']['interwikimap'] if 'language' in wiki]
- def _fetch_old_version(self): + def _fetch_old_version(self) -> None: """Extract the version from API help with ?version enabled.""" if self.version is None: try: @@ -128,7 +128,7 @@ else: self.version = MediaWikiVersion(self.version)
- def _parse_site(self): + def _parse_site(self) -> None: """Parse siteinfo data.""" response = fetch(self.api + '?action=query&meta=siteinfo&format=json') check_response(response) @@ -190,7 +190,7 @@
"""Wiki HTML page parser."""
- def __init__(self, url): + def __init__(self, url) -> None: """Initializer.""" super().__init__(convert_charrefs=True) self.url = urlparse(url) @@ -200,14 +200,14 @@ self.server = None self.scriptpath = None
- def set_version(self, value): + def set_version(self, value) -> None: """Set highest version.""" if self.version and value < self.version: return
self.version = value
- def set_api_url(self, url): + def set_api_url(self, url) -> None: """Set api_url.""" url = url.split('.php', 1)[0] try: @@ -257,7 +257,7 @@ self._parsed_url.scheme, self._parsed_url.netloc) self.scriptpath = self._parsed_url.path
- def handle_starttag(self, tag, attrs): + def handle_starttag(self, tag, attrs) -> None: """Handle an opening tag.""" attrs = dict(attrs) if tag == 'meta': diff --git a/pywikibot/specialbots/_unlink.py b/pywikibot/specialbots/_unlink.py index cf61dd0..76fd66f 100644 --- a/pywikibot/specialbots/_unlink.py +++ b/pywikibot/specialbots/_unlink.py @@ -24,7 +24,7 @@
"""The text should be edited and replacement should be restarted."""
- def __init__(self): + def __init__(self) -> None: """Initializer.""" super().__init__('edit', 'e') self.stop = True @@ -34,7 +34,7 @@
"""An implementation which just allows unlinking."""
- def __init__(self, bot): + def __init__(self, bot) -> None: """Create default settings.""" super().__init__(old_link=bot.pageToUnlink, new_link=False, default='u') @@ -58,7 +58,7 @@
"""A basic bot unlinking a given link from the current page."""
- def __init__(self, **kwargs): + def __init__(self, **kwargs) -> None: """Redirect all parameters and add namespace as an available option.""" self.available_options.update({ 'namespaces': [], @@ -71,7 +71,7 @@ """Create a new callback instance for replace_links.""" return InteractiveUnlink(self)
- def unlink(self, target_page): + def unlink(self, target_page) -> None: """Unlink all links linking to the target page.""" text = self.current_page.text while True: diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py index 3fa934e..cea6c70 100644 --- a/pywikibot/textlib.py +++ b/pywikibot/textlib.py @@ -187,7 +187,7 @@
"""Build template matcher."""
- def __init__(self, site): + def __init__(self, site) -> None: """Initializer.""" self.site = site
@@ -247,7 +247,7 @@ return re.compile(_tag_pattern(tag_name))
-def _create_default_regexes(): +def _create_default_regexes() -> None: """Fill (and possibly overwrite) _regex_cache with default regexes.""" _regex_cache.update({ # categories @@ -539,22 +539,22 @@ textdata = '' keeptags = []
- def __enter__(self): + def __enter__(self) -> None: pass
- def __exit__(self, *exc_info): + def __exit__(self, *exc_info) -> None: self.close()
- def handle_data(self, data): + def handle_data(self, data) -> None: """Add data to text.""" self.textdata += data
- def handle_starttag(self, tag, attrs): + def handle_starttag(self, tag, attrs) -> None: """Add start tag to text if tag should be kept.""" if tag in self.keeptags: self.textdata += '<{}>'.format(tag)
- def handle_endtag(self, tag): + def handle_endtag(self, tag) -> None: """Add end tag to text if tag should be kept.""" if tag in self.keeptags: self.textdata += '</{}>'.format(tag) @@ -1825,7 +1825,7 @@ :param name: a string with name of the timezone """
- def __init__(self, offset: int, name: str): + def __init__(self, offset: int, name: str) -> None: """Initializer.""" self.__offset = datetime.timedelta(minutes=offset) self.__name = name @@ -1855,7 +1855,7 @@
"""Find timestamp in page and return it as pywikibot.Timestamp object."""
- def __init__(self, site=None): + def __init__(self, site=None) -> None: """Initializer.""" self.site = pywikibot.Site() if site is None else site
diff --git a/pywikibot/throttle.py b/pywikibot/throttle.py index 8144f70..c9627f1 100644 --- a/pywikibot/throttle.py +++ b/pywikibot/throttle.py @@ -57,7 +57,7 @@ def __init__(self, site: Union['pywikibot.site.BaseSite', str], *, mindelay: Optional[int] = None, maxdelay: Optional[int] = None, - writedelay: Union[int, float, None] = None): + writedelay: Union[int, float, None] = None) -> None: """Initializer.""" self.lock = threading.RLock() self.lock_write = threading.RLock() @@ -96,7 +96,7 @@
@multiplydelay.setter @deprecated(since='6.2') - def multiplydelay(self): + def multiplydelay(self) -> None: """DEPRECATED attribute setter."""
@staticmethod @@ -140,7 +140,7 @@ continue yield proc_entry
- def _write_file(self, processes): + def _write_file(self, processes) -> None: """Write process entries to file.""" if not isinstance(processes, list): processes = list(processes) @@ -150,7 +150,7 @@ for p in processes: f.write(FORMAT_LINE.format_map(p._asdict()))
- def checkMultiplicity(self): + def checkMultiplicity(self) -> None: """Count running processes for site and set process_multiplicity.
.. versionchanged:: 7.0 @@ -197,7 +197,7 @@ pywikibot.log('Found {} {} processes running, including this one.' .format(count, mysite))
- def setDelays(self, delay=None, writedelay=None, absolute=False): + def setDelays(self, delay=None, writedelay=None, absolute=False) -> None: """Set the nominal delays in seconds. Defaults to config values.""" with self.lock: delay = delay or self.mindelay @@ -246,7 +246,7 @@ ago = now - (self.last_write if write else self.last_read) return max(0.0, thisdelay - ago)
- def drop(self): + def drop(self) -> None: """Remove me from the list of running bot processes.""" # drop all throttles with this process's pid, regardless of site self.checktime = 0 @@ -257,7 +257,7 @@
self._write_file(processes)
- def wait(self, seconds): + def wait(self, seconds) -> None: """Wait for seconds seconds.
Announce the delay if it exceeds a preset limit. @@ -277,7 +277,7 @@
time.sleep(seconds)
- def __call__(self, requestsize=1, write=False): + def __call__(self, requestsize=1, write=False) -> None: """Block the calling program if the throttle time has not expired.
Parameter requestsize is the number of Pages to be read/written; @@ -304,7 +304,7 @@ else: self.last_read = time.time()
- def lag(self, lagtime: Optional[float] = None): + def lag(self, lagtime: Optional[float] = None) -> None: """Seize the throttle lock due to server lag.
Usually the `self.retry-after` value from `response_header` of the diff --git a/pywikibot/tools/__init__.py b/pywikibot/tools/__init__.py index 9e19104..6e52f2c 100644 --- a/pywikibot/tools/__init__.py +++ b/pywikibot/tools/__init__.py @@ -130,7 +130,7 @@ .. versionadded:: 3.0 """
- def __init__(self, cls_method): + def __init__(self, cls_method) -> None: """Hold the class method.""" self.method = cls_method self.__doc__ = self.method.__doc__ @@ -150,7 +150,7 @@ .. versionadded:: 3.0 """
- def __init__(self, message='', category=Warning, filename=''): + def __init__(self, message='', category=Warning, filename='') -> None: """Initialize the object.
The parameter semantics are similar to those of @@ -172,11 +172,11 @@ self.filename_match = re.compile(filename).match super().__init__(record=True)
- def __enter__(self): + def __enter__(self) -> None: """Catch all warnings and store them in `self.log`.""" self.log = super().__enter__()
- def __exit__(self, exc_type, exc_val, exc_tb): + def __exit__(self, exc_type, exc_val, exc_tb) -> None: """Stop logging warnings and show those that do not match to params.""" super().__exit__(exc_type, exc_val, exc_tb) for warning in self.log: @@ -269,7 +269,7 @@ .. versionadded:: 6.1 """
- def __init__(self, keyattr: str): + def __init__(self, keyattr: str) -> None: """Initializer.
:param keyattr: an attribute or method of the values to be hold @@ -301,7 +301,7 @@ def __repr__(self) -> str: return str(self.data).replace('defaultdict', self.__class__.__name__)
- def append(self, value): + def append(self, value) -> None: """Add a value to the collection.""" key = getattr(value, self.keyattr) if callable(key): @@ -311,7 +311,7 @@ self.data[key].append(value) self.size += 1
- def remove(self, value): + def remove(self, value) -> None: """Remove a value from the container.""" key = getattr(value, self.keyattr) if callable(key): @@ -320,13 +320,13 @@ self.data[key].remove(value) self.size -= 1
- def remove_key(self, key): + def remove_key(self, key) -> None: """Remove all values for a given key.""" with suppress(KeyError): self.size -= len(self.data[key]) del self.data[key]
- def clear(self): + def clear(self) -> None: """Remove all elements from SizedKeyCollection.""" self.data = {} # defaultdict fails (T282865) self.size = 0 @@ -545,7 +545,7 @@ .. versionadded:: 6.2 """
- def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: """Initializer.""" self._lock = threading.RLock(*args, **kwargs) self._block = threading.Lock() @@ -644,11 +644,11 @@ except KeyboardInterrupt: self.stop()
- def stop(self): + def stop(self) -> None: """Stop the background thread.""" self.finished.set()
- def run(self): + def run(self) -> None: """Run the generator and store the results on the queue.""" iterable = any(hasattr(self.generator, key) for key in ('__iter__', '__getitem__')) @@ -788,7 +788,7 @@ debug("thread {} ('{}') started".format(len(self), type(thd)), self._logger)
- def stop_all(self): + def stop_all(self) -> None: """Stop all threads the pool.""" if self: debug('EARLY QUIT: Threads: {}'.format(len(self)), self._logger) @@ -981,12 +981,12 @@
if not add: if hasattr(container, 'add'): - def container_add(x): + def container_add(x) -> None: container.add(key(x) if key else x)
add = container_add else: - def container_setitem(x): + def container_setitem(x) -> None: container.__setitem__(key(x) if key else x, True)
@@ -1026,7 +1026,7 @@ ``empty_iterator()`` was removed in favour of ``iter()``. """
- def __init__(self): + def __init__(self) -> None: """Initialise the default as an empty string.""" str.__init__(self)
diff --git a/pywikibot/tools/_deprecate.py b/pywikibot/tools/_deprecate.py index 3ae19d7..87d2135 100644 --- a/pywikibot/tools/_deprecate.py +++ b/pywikibot/tools/_deprecate.py @@ -43,7 +43,7 @@ """
-def add_decorated_full_name(obj, stacklevel=1): +def add_decorated_full_name(obj, stacklevel=1) -> None: """Extract full object name, including class, and store in __full_name__.
This must be done on all decorators that are chained together, otherwise @@ -68,7 +68,7 @@ obj.__full_name__ = '{}.{}'.format(obj.__module__, obj.__name__)
-def manage_wrapping(wrapper, obj): +def manage_wrapping(wrapper, obj) -> None: """Add attributes to wrapper and wrapped functions.
.. versionadded:: 3.0 @@ -185,7 +185,7 @@
def issue_deprecation_warning(name: str, instead: str = '', depth: int = 2, - warning_class=None, since: str = ''): + warning_class=None, since: str = '') -> None: """Issue a deprecation warning.
.. versionchanged:: 7.0 @@ -244,7 +244,7 @@ warning_class=None if future_warning else DeprecationWarning) return obj(*args, **kwargs)
- def add_docstring(wrapper): + def add_docstring(wrapper) -> None: """Add a Deprecated notice to the docstring.""" deprecation_notice = 'Deprecated' if instead: @@ -552,7 +552,7 @@
"""A wrapper for a module to deprecate classes or variables of it."""
- def __init__(self, module): + def __init__(self, module) -> None: """ Initialise the wrapper.
@@ -627,7 +627,7 @@ self._deprecated[name] = ( replacement_name, replacement, warning_message, future_warning)
- def __setattr__(self, attr, value): + def __setattr__(self, attr, value) -> None: """Set the value of the wrapped module.""" self.__dict__[attr] = value setattr(self._module, attr, value) diff --git a/pywikibot/tools/djvu.py b/pywikibot/tools/djvu.py index 012478f..cfc1c80 100644 --- a/pywikibot/tools/djvu.py +++ b/pywikibot/tools/djvu.py @@ -56,7 +56,7 @@
"""
- def __init__(self, file: str): + def __init__(self, file: str) -> None: """ Initializer.
diff --git a/pywikibot/tools/formatter.py b/pywikibot/tools/formatter.py index a3375b3..4f81afd 100644 --- a/pywikibot/tools/formatter.py +++ b/pywikibot/tools/formatter.py @@ -37,7 +37,7 @@ prefix = '\n' suffix = '\n'
- def __init__(self, sequence): + def __init__(self, sequence) -> None: """Create a new instance with a reference to the sequence.""" super().__init__() self.sequence = sequence @@ -60,7 +60,7 @@ content = '' return self.prefix + content + self.suffix
- def output(self): + def output(self) -> None: """Output the text of the current sequence.""" output(self.out)
diff --git a/pywikibot/userinterfaces/_interface_base.py b/pywikibot/userinterfaces/_interface_base.py index 0a966cd..14eb2b6 100644 --- a/pywikibot/userinterfaces/_interface_base.py +++ b/pywikibot/userinterfaces/_interface_base.py @@ -31,14 +31,14 @@ """ return list(sys.argv)
- def flush(self): + def flush(self) -> None: """Flush cached output.
May be passed to atexit.register() to flush any ui cache. """
@abstractmethod - def init_handlers(self, *args, **kwargs): + def init_handlers(self, *args, **kwargs) -> None: """Initialize the handlers for user output.
Called in bot.init_handlers(). diff --git a/pywikibot/userinterfaces/buffer_interface.py b/pywikibot/userinterfaces/buffer_interface.py index ac9795d..0c1b675 100755 --- a/pywikibot/userinterfaces/buffer_interface.py +++ b/pywikibot/userinterfaces/buffer_interface.py @@ -17,7 +17,7 @@
"""Collects output into an unseen buffer."""
- def __init__(self): + def __init__(self) -> None: """Initialize the UI.""" super().__init__()
@@ -26,7 +26,7 @@ self.log_handler = logging.handlers.QueueHandler(self._buffer) self.log_handler.setLevel(VERBOSE if config.verbose_output else INFO)
- def init_handlers(self, root_logger, *args, **kwargs): + def init_handlers(self, root_logger, *args, **kwargs) -> None: """Initialize the handlers for user output.""" root_logger.addHandler(self.log_handler)
@@ -69,6 +69,6 @@
return output
- def clear(self): + def clear(self) -> None: """Removes any buffered output.""" self.pop_output() diff --git a/pywikibot/userinterfaces/gui.py b/pywikibot/userinterfaces/gui.py index 9324f4b..cae3559 100644 --- a/pywikibot/userinterfaces/gui.py +++ b/pywikibot/userinterfaces/gui.py @@ -45,7 +45,7 @@
"""
- def __init__(self, master=None, **kwargs): + def __init__(self, master=None, **kwargs) -> None: """ Initializer.
@@ -100,7 +100,7 @@ Theme, 'cursor', fgBg='fg') return config
- def add_bindings(self): + def add_bindings(self) -> None: """Assign key and events bindings to methods.""" # due to IDLE dependencies, this can't be called from __init__ # add key and event bindings @@ -161,7 +161,7 @@ self.see('insert') return 'break'
- def remove_selection(self, event=None): + def remove_selection(self, event=None) -> None: """Perform remove operation.""" self.tag_remove('sel', '1.0', 'end') self.see('insert') @@ -253,7 +253,7 @@ self.do_highlight(found[0], found[1]) return None
- def do_highlight(self, start, end): + def do_highlight(self, start, end) -> None: """Select and show the text from index start to index end.""" self.see(start) self.tag_remove(tkinter.SEL, '1.0', tkinter.END) @@ -278,7 +278,7 @@
"""Edit box window."""
- def __init__(self, parent=None, **kwargs): + def __init__(self, parent=None, **kwargs) -> None: """Initializer.""" if parent is None: # create a new window @@ -405,23 +405,23 @@ self.parent.mainloop() return self.text
- def find_all(self, target): + def find_all(self, target) -> None: """Perform find all operation.""" self.textfield.insert(tkinter.END, target) self.editbox.find_all(target)
- def find(self): + def find(self) -> None: """Perform find operation.""" # get text to search for s = self.textfield.get() if s: self.editbox.find_all(s)
- def config_dialog(self, event=None): + def config_dialog(self, event=None) -> None: """Show config dialog.""" ConfigDialog(self, 'Settings')
- def pressedOK(self): + def pressedOK(self) -> None: """ Perform OK operation.
@@ -444,7 +444,7 @@
# called when user pushes the OK button. # closes the window. - def pressedOK(self): + def pressedOK(self) -> None: """ Perform OK operation.
@@ -452,7 +452,7 @@ """ self.parent.destroy()
- def __init__(self, parent=None): + def __init__(self, parent=None) -> None: """Initializer.""" if parent is None: # create a new window @@ -495,7 +495,7 @@
"""The dialog window for image info."""
- def __init__(self, photo_description, photo, filename): + def __init__(self, photo_description, photo, filename) -> None: """Initializer.""" self.root = tkinter.Tk() # "%dx%d%+d%+d" % (width, height, xoffset, yoffset) @@ -583,13 +583,13 @@ imageTk = ImageTk.PhotoImage(image) return imageTk
- def ok_file(self): + def ok_file(self) -> None: """The user pressed the OK button.""" self.filename = self.filename_field.get() self.photo_description = self.description_field.get(0.0, tkinter.END) self.root.destroy()
- def skip_file(self): + def skip_file(self) -> None: """The user pressed the Skip button.""" self.skip = True self.root.destroy() diff --git a/pywikibot/userinterfaces/terminal_interface_base.py b/pywikibot/userinterfaces/terminal_interface_base.py index eb1808f..8909aa7 100755 --- a/pywikibot/userinterfaces/terminal_interface_base.py +++ b/pywikibot/userinterfaces/terminal_interface_base.py @@ -65,7 +65,7 @@
split_col_pat = re.compile(r'(\w+);?(\w+)?')
- def __init__(self): + def __init__(self) -> None: """ Initialize the UI.
@@ -81,7 +81,7 @@ self.cache = [] self.lock = RLock()
- def init_handlers(self, root_logger, default_stream='stderr'): + def init_handlers(self, root_logger, default_stream='stderr') -> None: """Initialize the handlers for user output.
This method initializes handler(s) for output levels VERBOSE (if @@ -141,7 +141,7 @@ """ return cls.split_col_pat.search(color).groups()
- def _write(self, text, target_stream): + def _write(self, text, target_stream) -> None: """Optionally encode and write the text to the target stream.""" target_stream.write(text)
@@ -149,7 +149,7 @@ """Return whether the target stream does support colors.""" return False
- def _print(self, text, target_stream): + def _print(self, text, target_stream) -> None: """Write the text to the target stream handling the colors.""" colorized = (config.colorized_output and self.support_color(target_stream)) @@ -188,7 +188,7 @@ # set the new color, but only if they change self.encounter_color(color_stack[-1], target_stream)
- def output(self, text, targetStream=None): + def output(self, text, targetStream=None) -> None: """Forward text to cache and flush if output is not locked.
All input methods locks the output to a stream but collect them @@ -202,7 +202,7 @@ if not self.lock.locked(): self.flush()
- def flush(self): + def flush(self) -> None: """Output cached text.
.. versionadded:: 7.0 @@ -212,7 +212,7 @@ self.stream_output(*args, **kwargs) self.cache.clear()
- def cache_output(self, *args, **kwargs): + def cache_output(self, *args, **kwargs) -> None: """Put text to cache.
.. versionadded:: 7.0 @@ -220,7 +220,7 @@ with self.lock: self.cache.append((args, kwargs))
- def stream_output(self, text, targetStream=None): + def stream_output(self, text, targetStream=None) -> None: """Output text to a stream.
If a character can't be displayed in the encoding used by the user's @@ -388,7 +388,7 @@ default (if it's not None). Otherwise the index of the answer in options. If default is not a shortcut, it'll return -1. """ - def output_option(option, before_question): + def output_option(option, before_question) -> None: """Print an OutputOption before or after question.""" if isinstance(option, OutputOption) \ and option.before_question is before_question: @@ -523,7 +523,7 @@ # create a class-level lock that can be shared by all instances sharedlock = threading.RLock()
- def __init__(self, UI, stream=None): + def __init__(self, UI, stream=None) -> None: """Initialize the handler.
If stream is not specified, sys.stderr is used. @@ -531,7 +531,7 @@ super().__init__(stream=stream) self.UI = UI
- def createLock(self): + def createLock(self) -> None: """Acquire a thread lock for serializing access to the underlying I/O.
Replace Handler's instance-specific lock with the shared @@ -540,7 +540,7 @@ """ self.lock = TerminalHandler.sharedlock
- def emit(self, record): + def emit(self, record) -> None: """Emit the record formatted to the output.""" self.flush() if record.name == 'py.warnings': @@ -564,7 +564,7 @@
"""
- def __init__(self, level=None): + def __init__(self, level=None) -> None: """Initializer.""" self.level = level
diff --git a/pywikibot/userinterfaces/terminal_interface_unix.py b/pywikibot/userinterfaces/terminal_interface_unix.py index 8d82f32..2a56196 100755 --- a/pywikibot/userinterfaces/terminal_interface_unix.py +++ b/pywikibot/userinterfaces/terminal_interface_unix.py @@ -43,7 +43,7 @@ code = re.search(r'(?<=[)\d+', color).group() return chr(27) + '[' + str(int(code) + 10) + 'm'
- def encounter_color(self, color, target_stream): + def encounter_color(self, color, target_stream) -> None: """Write the Unix color directly to the stream.""" fg, bg = self.divide_color(color) fg = unixColors[fg] @@ -52,6 +52,6 @@ bg = unixColors[bg] self._write(self.make_unix_bg_color(bg), target_stream)
- def _write(self, text, target_stream): + def _write(self, text, target_stream) -> None: """Optionally encode and write the text to the target stream.""" target_stream.write(text) diff --git a/pywikibot/userinterfaces/terminal_interface_win32.py b/pywikibot/userinterfaces/terminal_interface_win32.py index 05b62eb..1a0ef51 100755 --- a/pywikibot/userinterfaces/terminal_interface_win32.py +++ b/pywikibot/userinterfaces/terminal_interface_win32.py @@ -34,7 +34,7 @@
"""User interface for Win32 terminals."""
- def __init__(self): + def __init__(self) -> None: """Initializer.""" super().__init__() (stdin, stdout, stderr, argv) = win32_unicode.get_unicode_console() @@ -48,7 +48,7 @@ """Return whether the target stream supports actually color.""" return getattr(target_stream, '_hConsole', None) is not None
- def encounter_color(self, color, target_stream): + def encounter_color(self, color, target_stream) -> None: """Set the new color.""" fg, bg = self.divide_color(color) windows_color = windowsColors[fg] diff --git a/pywikibot/userinterfaces/transliteration.py b/pywikibot/userinterfaces/transliteration.py index 9e63800..d7bb3a0 100644 --- a/pywikibot/userinterfaces/transliteration.py +++ b/pywikibot/userinterfaces/transliteration.py @@ -1085,7 +1085,7 @@
"""Class to transliterating text."""
- def __init__(self, encoding: str): + def __init__(self, encoding: str) -> None: """ Initialize the transliteration mapping.
diff --git a/pywikibot/userinterfaces/win32_unicode.py b/pywikibot/userinterfaces/win32_unicode.py index cbc4864..6242832 100755 --- a/pywikibot/userinterfaces/win32_unicode.py +++ b/pywikibot/userinterfaces/win32_unicode.py @@ -67,7 +67,7 @@
"""Unicode terminal input class."""
- def __init__(self, hConsole, name, bufsize=1024): + def __init__(self, hConsole, name, bufsize=1024) -> None: """Initialize the input stream.""" self._hConsole = hConsole self.bufsize = bufsize @@ -90,7 +90,7 @@
"""Unicode terminal output class."""
- def __init__(self, hConsole, stream, fileno, name): + def __init__(self, hConsole, stream, fileno, name) -> None: """Initialize the output stream.""" self._hConsole = hConsole self._stream = stream @@ -169,12 +169,12 @@ # which makes for frustrating debugging if stderr is directed to our wrapper. # So be paranoid about catching errors and reporting them to original_stderr, # so that we can at least see them. -def _complain(message): +def _complain(message) -> None: print(isinstance(message, str) and message or repr(message), file=original_stderr)
-def force_truetype_console(h_stdout): +def force_truetype_console(h_stdout) -> None: """Force the console to use a TrueType font (Vista+).""" TMPF_TRUETYPE = 0x04 LF_FACESIZE = 32 diff --git a/pywikibot/xmlreader.py b/pywikibot/xmlreader.py index 6a03e8b..9b57a19 100644 --- a/pywikibot/xmlreader.py +++ b/pywikibot/xmlreader.py @@ -49,7 +49,7 @@
def __init__(self, title, ns, id, text, username, ipedit, timestamp, editRestriction, moveRestriction, revisionid, comment, - redirect): + redirect) -> None: """Initializer.""" # TODO: there are more tags we can read. self.title = title @@ -78,13 +78,13 @@ There surely are more elegant ways to do this. """
- def __init__(self, filename, handler): + def __init__(self, filename, handler) -> None: """Initializer.""" super().__init__() self.filename = filename self.handler = handler
- def run(self): + def run(self) -> None: """Parse the file in a single thread.""" xml.sax.parse(self.filename, self.handler)
@@ -102,7 +102,7 @@ Default: False. """
- def __init__(self, filename, allrevisions=False): + def __init__(self, filename, allrevisions=False) -> None: """Initializer.""" self.filename = filename if allrevisions: @@ -143,7 +143,7 @@ elem.clear() self.root.clear()
- def _headers(self, elem): + def _headers(self, elem) -> None: """Extract headers from XML chunk.""" self.title = elem.findtext('{%s}title' % self.uri) self.ns = elem.findtext('{%s}ns' % self.uri) diff --git a/scripts/add_text.py b/scripts/add_text.py index 903c6a5..e850f73 100755 --- a/scripts/add_text.py +++ b/scripts/add_text.py @@ -117,7 +117,7 @@ text = re.sub(r'\r?\n', ' - ', self.opt.text[:200]) return {'adding': text}
- def setup(self): + def setup(self) -> None: """Read text to be added from file.""" if self.opt.textfile: with codecs.open(self.opt.textfile, 'r', @@ -159,7 +159,7 @@
return super().skip_page(page)
- def treat_page(self): + def treat_page(self) -> None: """Add text to the page.""" text = self.current_page.text
diff --git a/scripts/blockpageschecker.py b/scripts/blockpageschecker.py index e72f9b1..b1ce7e1 100755 --- a/scripts/blockpageschecker.py +++ b/scripts/blockpageschecker.py @@ -184,7 +184,7 @@ 'move': False, }
- def invoke_editor(self, page): + def invoke_editor(self, page) -> None: """Ask for an editor and invoke it.""" choice = pywikibot.input_choice( 'Do you want to open the page?', @@ -195,16 +195,16 @@ editor = TextEditor() editor.edit(page.text)
- def setup(self): + def setup(self) -> None: """Initialize the coroutine for parsing templates.""" self.parse_tempates = self.remove_templates() self.parse_tempates.send(None)
- def teardown(self): + def teardown(self) -> None: """Close the coroutine.""" self.parse_tempates.close()
- def treat_page(self): + def treat_page(self) -> None: """Load the given page, do some changes, and save it.""" page = self.current_page if page.isRedirectPage(): diff --git a/scripts/category.py b/scripts/category.py index cae9495..ef138cd 100755 --- a/scripts/category.py +++ b/scripts/category.py @@ -188,7 +188,7 @@ """A class to prepare a list of pages for robots."""
def __init__(self, follow_redirects=False, edit_redirects=False, - create=False, **kwargs): + create=False, **kwargs) -> None: """Initializer.""" super().__init__(**kwargs) self.follow_redirects = follow_redirects diff --git a/scripts/category_redirect.py b/scripts/category_redirect.py index 4089cb3..2b542ae 100755 --- a/scripts/category_redirect.py +++ b/scripts/category_redirect.py @@ -63,7 +63,7 @@ 'delay': 7, # cool down delay in days }
- def __init__(self, **kwargs): + def __init__(self, **kwargs) -> None: """Initializer.""" super().__init__(**kwargs) self.catprefix = self.site.namespace(14) + ':' @@ -211,7 +211,7 @@ log_text += ('\n\n' + message) return log_text
- def check_hard_redirect(self): + def check_hard_redirect(self) -> None: """ Check for hard-redirected categories.
@@ -270,7 +270,7 @@ self.site, 'category_redirect-log-add-failed', params) self.log_text.append(message)
- def run(self): + def run(self) -> None: """Run the bot.""" # validate L10N self.template_list = self.site.category_redirects() @@ -451,7 +451,7 @@
self.teardown()
- def teardown(self): + def teardown(self) -> None: """Write self.record to file and save logs.""" with open(self.datafile, 'wb') as f: pickle.dump(self.record, f, protocol=config.pickle_protocol) diff --git a/scripts/change_pagelang.py b/scripts/change_pagelang.py index a8fb485..ea205cb 100755 --- a/scripts/change_pagelang.py +++ b/scripts/change_pagelang.py @@ -49,13 +49,13 @@ 'setlang': '', }
- def __init__(self, **kwargs): + def __init__(self, **kwargs) -> None: """Initializer.""" super().__init__(**kwargs) assert not (self.opt.always and self.opt.never), \ 'Either "always" or "never" must be set but not both'
- def changelang(self, page): + def changelang(self, page) -> None: """Set page language.
:param page: The page to update and save @@ -73,7 +73,7 @@ 'page language to {green}{1}{default}', page.title(as_link=True), self.opt.setlang))
- def treat(self, page): + def treat(self, page) -> None: """Treat a page.
:param page: The page to treat diff --git a/scripts/commons_information.py b/scripts/commons_information.py index 1367448..baa2637 100755 --- a/scripts/commons_information.py +++ b/scripts/commons_information.py @@ -36,7 +36,7 @@ 'appropriate language template') }
- def __init__(self, **kwargs): + def __init__(self, **kwargs) -> None: """Initialzer.""" super().__init__(**kwargs) lang_tmp_cat = pywikibot.Category(self.site, self.lang_tmp_cat) @@ -95,7 +95,7 @@ return False
@staticmethod - def replace_value(param, value): + def replace_value(param, value) -> None: """Replace param with given value.""" lstrip = param.value.lstrip() lspaces = param.value[:len(param.value) - len(lstrip)] @@ -107,7 +107,7 @@ if isinstance(mwparserfromhell, Exception): raise mwparserfromhell
- def treat_page(self): + def treat_page(self) -> None: """Treat current page.""" page = self.current_page code = mwparserfromhell.parse(page.text) diff --git a/scripts/commonscat.py b/scripts/commonscat.py index 8149224..b58b6ab 100755 --- a/scripts/commonscat.py +++ b/scripts/commonscat.py @@ -269,7 +269,7 @@ return True return False
- def treat_page(self): + def treat_page(self) -> None: """ Add CommonsCat template to page.
@@ -333,7 +333,7 @@
def changeCommonscat( self, page=None, oldtemplate='', oldcat='', - newtemplate='', newcat='', linktitle=''): + newtemplate='', newcat='', linktitle='') -> None: """Change the current commonscat template and target.""" if '3=S' in (oldcat, linktitle): return # TODO: handle additional param on de-wiki diff --git a/scripts/data_ingestion.py b/scripts/data_ingestion.py index 084e252..b276e37 100755 --- a/scripts/data_ingestion.py +++ b/scripts/data_ingestion.py @@ -120,7 +120,7 @@ """Represents a Photo (or other file), with metadata, to be uploaded."""
def __init__(self, url: str, metadata: Dict[str, Any], - site: Optional[pywikibot.site.APISite] = None): + site: Optional[pywikibot.site.APISite] = None) -> None: """ Initializer.
@@ -216,7 +216,7 @@
"""Data ingestion bot."""
- def __init__(self, titlefmt: str, pagefmt: str, **kwargs): + def __init__(self, titlefmt: str, pagefmt: str, **kwargs) -> None: """ Initializer.
@@ -227,7 +227,7 @@ self.titlefmt = titlefmt self.pagefmt = pagefmt
- def treat(self, page): + def treat(self, page) -> None: """Process each page.
1. Check for existing duplicates on the wiki specified in self.site. @@ -282,7 +282,7 @@ return configuration
-def main(*args: str): +def main(*args: str) -> None: """ Process command line arguments and invoke bot.
diff --git a/scripts/djvutext.py b/scripts/djvutext.py index 638ea06..509ddeb 100755 --- a/scripts/djvutext.py +++ b/scripts/djvutext.py @@ -63,7 +63,13 @@ 'summary': '', }
- def __init__(self, djvu, index, pages: Optional[tuple] = None, **kwargs): + def __init__( + self, + djvu, + index, + pages: Optional[tuple] = None, + **kwargs + ) -> None: """ Initializer.
@@ -109,7 +115,7 @@ page.page_number = page_number # remember page number in djvu file yield page
- def treat(self, page): + def treat(self, page) -> None: """Process one page.""" old_text = page.text
diff --git a/scripts/download_dump.py b/scripts/download_dump.py index e33d4cf..53062e6 100755 --- a/scripts/download_dump.py +++ b/scripts/download_dump.py @@ -70,7 +70,7 @@ return dump_filepath return None
- def run(self): + def run(self) -> None: """Run bot.""" def convert_from_bytes(total_bytes): for unit in ['B', 'K', 'M', 'G', 'T']: diff --git a/scripts/fixing_redirects.py b/scripts/fixing_redirects.py index 12d111a..8707595 100755 --- a/scripts/fixing_redirects.py +++ b/scripts/fixing_redirects.py @@ -191,7 +191,7 @@ target = None return page, target
- def treat_page(self): + def treat_page(self) -> None: """Change all redirects from the current page to actual links.""" try: newtext = self.current_page.text diff --git a/scripts/imagetransfer.py b/scripts/imagetransfer.py index c25e8bc..3f60988 100755 --- a/scripts/imagetransfer.py +++ b/scripts/imagetransfer.py @@ -152,7 +152,7 @@ 'chunk_size': 0, }
- def __init__(self, **kwargs): + def __init__(self, **kwargs) -> None: """Initializer.
:keyword generator: the pages to work on @@ -179,7 +179,7 @@ else: self.opt.target = pywikibot.Site(self.opt.target)
- def transfer_image(self, sourceImagePage): + def transfer_image(self, sourceImagePage) -> None: """ Download image and its description, and upload it to another site.
@@ -257,7 +257,7 @@ % target_filename, summary=reason)
- def show_image_list(self, imagelist): + def show_image_list(self, imagelist) -> None: """Print image list.""" pywikibot.output('-' * 60) for i, image in enumerate(imagelist): @@ -292,7 +292,7 @@
pywikibot.output('=' * 60)
- def treat(self, page): + def treat(self, page) -> None: """Treat a single page.""" if self.opt.interwiki: imagelist = [] diff --git a/scripts/interwiki.py b/scripts/interwiki.py index 7696438..fff4d5c 100755 --- a/scripts/interwiki.py +++ b/scripts/interwiki.py @@ -458,7 +458,7 @@ summary = '' repository = False
- def note(self, text): + def note(self, text) -> None: """Output a notification message with.
The text will be printed only if conf.quiet isn't set. @@ -622,7 +622,7 @@ this Object. """
- def __init__(self, origin=None, hints=None, conf=None): + def __init__(self, origin=None, hints=None, conf=None) -> None: """ Initializer.
@@ -710,7 +710,7 @@ return page return None
- def translate(self, hints=None, keephintedsites=False): + def translate(self, hints=None, keephintedsites=False) -> None: """Add the given translation hints to the todo list.""" if self.conf.same and self.origin: if hints: @@ -772,7 +772,7 @@ # If there are any, return them. Otherwise, nothing is in progress. return result
- def makeForcedStop(self, counter): + def makeForcedStop(self, counter) -> None: """End work on the page before the normal end.""" for site, count in self.todo.iter_values_len(): counter.minus(site, count) @@ -1013,7 +1013,7 @@
return False
- def reportInterwikilessPage(self, page): + def reportInterwikilessPage(self, page) -> None: """Report interwikiless page.""" self.conf.note('{} does not have any interwiki links' .format(self.origin)) @@ -1023,7 +1023,7 @@ 'a', 'utf-8') as f: f.write('# {} \n'.format(page))
- def askForHints(self, counter): + def askForHints(self, counter) -> None: """Ask for hints to other sites.""" if (not self.workonme # we don't work on it anyway or not self.untranslated and not self.conf.askhints @@ -1067,7 +1067,7 @@ if self.conf.hintsareright: self.hintedsites.add(page.site)
- def check_page(self, page, counter): + def check_page(self, page, counter) -> None: """Check whether any iw links should be added to the todo list.""" if not page.exists(): self.conf.remove.append(str(page)) @@ -1245,7 +1245,7 @@ if self.forcedStop: break
- def batchLoaded(self, counter): + def batchLoaded(self, counter) -> None: """ Notify that the promised batch of pages was loaded.
@@ -1296,14 +1296,14 @@ """Return True if all the work for this subject has completed.""" return not self.todo
- def problem(self, txt, createneed=True): + def problem(self, txt, createneed=True) -> None: """Report a problem with the resolution of this subject.""" pywikibot.error(txt) self.confirm = True if createneed: self.problemfound = True
- def whereReport(self, page, indent=4): + def whereReport(self, page, indent=4) -> None: """Report found interlanguage links with conflicts.""" for page2 in sorted(self.found_in[page]): if page2 is None: @@ -1776,7 +1776,7 @@ break return True
- def reportBacklinks(self, new, updatedSites): + def reportBacklinks(self, new, updatedSites) -> None: """ Report missing back links. This will be called from finish() if needed.
@@ -1845,7 +1845,7 @@ It controls which pages are queried from which languages when. """
- def __init__(self, conf=None): + def __init__(self, conf=None) -> None: """Initializer.""" self.subjects = [] # We count how many pages still need to be loaded per site. @@ -1859,7 +1859,7 @@ self.conf = conf self.site = pywikibot.Site()
- def add(self, page, hints=None): + def add(self, page, hints=None) -> None: """Add a single subject to the list.""" subj = Subject(page, hints=hints, conf=self.conf) self.subjects.append(subj) @@ -1867,7 +1867,7 @@ # Keep correct counters self.plus(site, count)
- def setPageGenerator(self, pageGenerator, number=None, until=None): + def setPageGenerator(self, pageGenerator, number=None, until=None) -> None: """ Add a generator of subjects.
@@ -1883,7 +1883,7 @@ """Return generator of titles for dump file.""" return (s.origin.title(as_link=True) for s in self.subjects)
- def generateMore(self, number): + def generateMore(self, number) -> None: """Generate more subjects.
This is called internally when the @@ -2054,7 +2054,7 @@ subject.batchLoaded(self) return True
- def queryStep(self): + def queryStep(self) -> None: """Delete the ones that are done now.""" self.oneQuery() for i in range(len(self.subjects) - 1, -1, -1): @@ -2067,16 +2067,16 @@ """Check whether there is still more work to do.""" return not self and self.pageGenerator is None
- def plus(self, site, count=1): + def plus(self, site, count=1) -> None: """Helper routine that the Subject class expects in a counter.""" self.counts[site] += count
- def minus(self, site, count=1): + def minus(self, site, count=1) -> None: """Helper routine that the Subject class expects in a counter.""" self.counts[site] -= count self.counts = +self.counts # remove zero and negative counts
- def run(self): + def run(self) -> None: """Start the process until finished.""" while not self.isDone(): self.queryStep() @@ -2199,7 +2199,7 @@
FILE_PATTERN = '{site.family.name}-{site.code}.txt'
- def __init__(self, **kwargs): + def __init__(self, **kwargs) -> None: """Initializer.
:keyword do_continue: If true, continue alphabetically starting at the @@ -2225,7 +2225,7 @@ """Return next page namespace for continue option.""" return self._next_namespace
- def remove(self, filename: str): + def remove(self, filename: str) -> None: """Remove filename from restored files.
:param filename: A filename to be removed from restored set. @@ -2278,7 +2278,7 @@ namespace=self.next_namespace, filterredir=False)
- def write_dump(self, iterable, append: bool = True): + def write_dump(self, iterable, append: bool = True) -> None: """Write dump file.
:param iterable: an iterable of page titles to be dumped. @@ -2296,7 +2296,7 @@ .format(site=self.site, mode=mode)) self.remove(filename)
- def delete_dumps(self): + def delete_dumps(self) -> None: """Delete processed dumps.""" for filename in self.restored_files: tail = os.path.split(filename)[-1] diff --git a/scripts/movepages.py b/scripts/movepages.py index dcacaff..75f4536 100755 --- a/scripts/movepages.py +++ b/scripts/movepages.py @@ -91,7 +91,7 @@ return True return super().skip_page(page)
- def treat_page(self): + def treat_page(self) -> None: """Treat a single page.""" page = self.current_page pagetitle = page.title(with_ns=False) diff --git a/scripts/nowcommons.py b/scripts/nowcommons.py index bbb9038..9ec0557 100755 --- a/scripts/nowcommons.py +++ b/scripts/nowcommons.py @@ -188,7 +188,7 @@ 'replaceonly': False, }
- def __init__(self, **kwargs): + def __init__(self, **kwargs) -> None: """Initializer.""" super().__init__(**kwargs) self.site = pywikibot.Site() @@ -257,7 +257,7 @@ file_on_commons = val[1].strip() return file_on_commons
- def run(self): + def run(self) -> None: """Run the bot.""" commons = self.commons comment = self.summary diff --git a/scripts/pagefromfile.py b/scripts/pagefromfile.py index f5dff52..0a439d3 100755 --- a/scripts/pagefromfile.py +++ b/scripts/pagefromfile.py @@ -83,7 +83,7 @@
"""No title found."""
- def __init__(self, offset): + def __init__(self, offset) -> None: """Initializer.""" self.offset = offset
diff --git a/scripts/parser_function_count.py b/scripts/parser_function_count.py index 08ce8d9..5cdc4cd 100755 --- a/scripts/parser_function_count.py +++ b/scripts/parser_function_count.py @@ -77,7 +77,7 @@ 'upload': None, }
- def __init__(self, **kwargs): + def __init__(self, **kwargs) -> None: """Initializer.""" super().__init__(**kwargs) editcomment = { @@ -121,7 +121,7 @@ gen, self.site.doc_subpage, quantifier='none') return gen
- def setup(self): + def setup(self) -> None: """Setup magic words, regex and result counter.""" pywikibot.output('Hold on, this will need some time. ' 'You will be notified by 50 templates.') @@ -131,7 +131,7 @@ self.regex = re.compile(r'#({}):'.format('|'.join(magicwords)), re.I) self.results = Counter()
- def treat(self, page): + def treat(self, page) -> None: """Process a single template.""" title = page.title() if (self.counter['read'] + 1) % 50 == 0: @@ -149,7 +149,7 @@ and len(self.results) >= self.opt.first: self.stop()
- def teardown(self): + def teardown(self) -> None: """Final processing.""" resultlist = '\n'.join( '# [[{result[0]}]] ({result[1]})' diff --git a/scripts/patrol.py b/scripts/patrol.py index 80959e3..3dacb6f 100755 --- a/scripts/patrol.py +++ b/scripts/patrol.py @@ -64,7 +64,7 @@ _logger = 'patrol'
-def verbose_output(string): +def verbose_output(string) -> None: """Verbose output.""" if pywikibot.config.verbose_output: pywikibot.output(string) @@ -85,7 +85,7 @@ 'en': 'patrol_whitelist', }
- def __init__(self, site=None, **kwargs): + def __init__(self, site=None, **kwargs) -> None: """ Initializer.
@@ -335,7 +335,7 @@
"""Matches of page site title and linked pages title."""
- def __init__(self, page_title: str): + def __init__(self, page_title: str) -> None: """Initializer.
:param page_title: The page title for this rule diff --git a/scripts/protect.py b/scripts/protect.py index a65fb4b..a01947a 100755 --- a/scripts/protect.py +++ b/scripts/protect.py @@ -84,7 +84,7 @@ 'expiry': '', }
- def __init__(self, protections, **kwargs): + def __init__(self, protections, **kwargs) -> None: """ Create a new ProtectionRobot.
@@ -95,7 +95,7 @@ super().__init__(**kwargs) self.protections = protections
- def treat_page(self): + def treat_page(self) -> None: """Run the bot's action on each page.
treat_page treats every page given by the generator and applies diff --git a/scripts/redirect.py b/scripts/redirect.py index 9f71e8a..c928e08 100755 --- a/scripts/redirect.py +++ b/scripts/redirect.py @@ -124,7 +124,7 @@ 'xml': None, }
- def __init__(self, action, **kwargs): + def __init__(self, action, **kwargs) -> None: """Initializer.""" super().__init__(**kwargs) self.site = pywikibot.Site() diff --git a/scripts/reflinks.py b/scripts/reflinks.py index 5b4183e..d42ce0d 100755 --- a/scripts/reflinks.py +++ b/scripts/reflinks.py @@ -210,7 +210,7 @@
"""Container to handle a single bare reference."""
- def __init__(self, link, name, site=None): + def __init__(self, link, name, site=None) -> None: """Initializer.""" self.name = name self.link = link @@ -239,7 +239,7 @@ dead_link = '<ref{}>{}</ref>'.format(self.name, tag) return dead_link
- def transform(self, ispdf=False): + def transform(self, ispdf=False) -> None: """Normalize the title.""" # convert html entities if not ispdf: @@ -263,7 +263,7 @@ self.title = string2html(self.title, self.site.encoding()) # TODO : remove HTML when both opening and closing tags are included
- def avoid_uppercase(self): + def avoid_uppercase(self) -> None: """ Convert to title()-case if title is 70% uppercase characters.
@@ -302,7 +302,7 @@ name the first, and remove the content of the others """
- def __init__(self, site=None): + def __init__(self, site=None) -> None: """Initializer.""" if not site: site = pywikibot.Site() @@ -438,7 +438,7 @@ 'summary': '', }
- def __init__(self, **kwargs): + def __init__(self, **kwargs) -> None: """Initializer.""" super().__init__(**kwargs) self._use_fake_user_agent = config.fake_user_agent_default.get( @@ -494,12 +494,12 @@ self.MIME = re.compile( r'application/(?:xhtml+xml|xml)|text/(?:ht|x)ml')
- def httpError(self, err_num, link, pagetitleaslink): + def httpError(self, err_num, link, pagetitleaslink) -> None: """Log HTTP Error.""" pywikibot.stdout('HTTP error ({}) for {} on {}' .format(err_num, link, pagetitleaslink))
- def getPDFTitle(self, ref, response): + def getPDFTitle(self, ref, response) -> None: """Use pdfinfo to retrieve title from a PDF.""" # pdfinfo is Unix-only pywikibot.output('Reading PDF file...') @@ -551,7 +551,7 @@ return True return super().skip_page(page)
- def treat(self, page): + def treat(self, page) -> None: """Process one page.""" # Load the page's text from the wiki new_text = page.text diff --git a/scripts/replace.py b/scripts/replace.py index 2413c25..035ba41 100755 --- a/scripts/replace.py +++ b/scripts/replace.py @@ -164,7 +164,7 @@ }
-def precompile_exceptions(exceptions, use_regex, flags): +def precompile_exceptions(exceptions, use_regex, flags) -> None: """Compile the exceptions with the given flags.""" if not exceptions: return @@ -187,7 +187,13 @@
"""The replacement instructions."""
- def __init__(self, old, new, edit_summary=None, default_summary=True): + def __init__( + self, + old, + new, + edit_summary=None, + default_summary=True + ) -> None: """Create a basic replacement instance.""" self.old = old self.old_regex = None @@ -225,7 +231,7 @@ """ return None
- def _compile(self, use_regex, flags): + def _compile(self, use_regex, flags) -> None: """Compile the search text without modifying the flags.""" # This does not update use_regex and flags depending on this instance if not use_regex: @@ -234,7 +240,7 @@ self.old_regex = self.old self.old_regex = re.compile(self.old_regex, flags)
- def compile(self, use_regex, flags): + def compile(self, use_regex, flags) -> None: """Compile the search text.""" # Set the regular expression flags if self.case_insensitive is False: @@ -253,7 +259,7 @@
def __init__(self, old, new, use_regex=None, exceptions=None, case_insensitive=None, edit_summary=None, - default_summary=True): + default_summary=True) -> None: """Create a single replacement entry unrelated to a fix.""" super().__init__(old, new, edit_summary, default_summary) self._use_regex = use_regex @@ -279,7 +285,7 @@ """Return whether the search text is using regex.""" return self._use_regex
- def _compile(self, use_regex, flags): + def _compile(self, use_regex, flags) -> None: """Compile the search regex and exceptions.""" super()._compile(use_regex, flags) precompile_exceptions(self.exceptions, use_regex, flags) @@ -304,7 +310,7 @@ """
def __init__(self, use_regex, exceptions, case_insensitive, edit_summary, - name): + name) -> None: """Create a fix list which can contain multiple replacements.""" super().__init__() self.use_regex = use_regex @@ -314,7 +320,7 @@ self.edit_summary = edit_summary self.name = name
- def _compile_exceptions(self, use_regex, flags): + def _compile_exceptions(self, use_regex, flags) -> None: """Compile the exceptions if not already done.""" if not self.exceptions and self._exceptions is not None: self.exceptions = dict(self._exceptions) @@ -326,7 +332,7 @@ """A replacement entry for ReplacementList."""
def __init__(self, old, new, fix_set, edit_summary=None, - default_summary=True): + default_summary=True) -> None: """Create a replacement entry inside a fix set.""" super().__init__(old, new, edit_summary, default_summary) self.fix_set = fix_set @@ -366,7 +372,7 @@ """ return self.fix_set
- def _compile(self, use_regex, flags): + def _compile(self, use_regex, flags) -> None: """Compile the search regex and the fix's exceptions.""" super()._compile(use_regex, flags) self.fix_set._compile_exceptions(use_regex, flags) @@ -395,7 +401,14 @@ :type exceptions: dict """
- def __init__(self, xmlFilename, xmlStart, replacements, exceptions, site): + def __init__( + self, + xmlFilename, + xmlStart, + replacements, + exceptions, + site + ) -> None: """Initializer.""" self.xmlFilename = xmlFilename self.replacements = replacements @@ -656,7 +669,7 @@
return super().skip_page(page)
- def treat(self, page): + def treat(self, page) -> None: """Work on each page retrieved from generator.""" try: original_text = page.text @@ -746,7 +759,7 @@ self.save(page, original_text, new_text, applied, show_diff=False, asynchronous=False)
- def save(self, page, oldtext, newtext, applied, **kwargs): + def save(self, page, oldtext, newtext, applied, **kwargs) -> None: """Save the given page.""" self.userPut(page, oldtext, newtext, summary=self.generate_summary(applied), diff --git a/scripts/replicate_wiki.py b/scripts/replicate_wiki.py index e4dd498..8f9a516 100755 --- a/scripts/replicate_wiki.py +++ b/scripts/replicate_wiki.py @@ -62,7 +62,7 @@
"""Work is done in here."""
- def __init__(self, options): + def __init__(self, options) -> None: """Initializer.""" self.options = options
@@ -96,7 +96,7 @@ pywikibot.output(str(s), newline=False) pywikibot.output('')
- def check_sysops(self): + def check_sysops(self) -> None: """Check if sysops are the same on all wikis.""" def get_users(site): userlist = [ul['name'] for ul in site.allusers(group='sysop')] @@ -109,7 +109,7 @@ diff.sort() self.user_diff[site] = diff
- def check_namespaces(self): + def check_namespaces(self) -> None: """Check all namespaces, to be ditched for clarity.""" namespaces = [ 0, # Main @@ -131,7 +131,7 @@ for ns in namespaces: self.check_namespace(ns)
- def check_namespace(self, namespace): + def check_namespace(self, namespace) -> None: """Check an entire namespace.""" pywikibot.output('\nCHECKING NAMESPACE {}'.format(namespace)) pages = (p.title() for p in self.original.allpages( @@ -149,7 +149,7 @@ 'error: Redirectpage - todo: handle gracefully') pywikibot.output('')
- def generate_overviews(self): + def generate_overviews(self) -> None: """Create page on wikis with overview of bot results.""" for site in self.sites: sync_overview_page = Page(site, @@ -180,7 +180,7 @@ return ('{} replicate_wiki.py synchronization from {}' .format(site.user(), str(self.original)))
- def check_page(self, pagename): + def check_page(self, pagename) -> None: """Check one page.""" pywikibot.output('\nChecking ' + pagename) page1 = Page(self.original, pagename) diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py index fe556f0..7d2bcd8 100755 --- a/scripts/solve_disambiguation.py +++ b/scripts/solve_disambiguation.py @@ -632,7 +632,7 @@ self.summary = None self.dn_template_str = i18n.translate(self.site, dn_template)
- def _clean_args(self, args, kwargs): + def _clean_args(self, args, kwargs) -> None: """Cleanup positional and keyword arguments.
Replace positional arguments with keyword arguments. @@ -1255,7 +1255,7 @@ 'to': targets, 'count': len(new_targets)})
- def teardown(self): + def teardown(self) -> None: """Write ignoring pages to a file.""" self.primaryIgnoreManager.ignore(self.ignores)
diff --git a/scripts/speedy_delete.py b/scripts/speedy_delete.py index ad68da2..dcbcde1 100755 --- a/scripts/speedy_delete.py +++ b/scripts/speedy_delete.py @@ -394,11 +394,11 @@
return reason or suggested_reason
- def exit(self): + def exit(self) -> None: """Just call teardown after current run.""" self.teardown()
- def run(self): + def run(self) -> None: """Start the bot's action.""" start_ts = pywikibot.Timestamp.now() self.saved_progress = None @@ -420,7 +420,7 @@ self._start_ts = start_ts super().exit()
- def treat_page(self): + def treat_page(self) -> None: """Process one page.""" page = self.current_page
@@ -464,7 +464,7 @@ else: pywikibot.output('Skipping page {}'.format(page))
- def setup(self): + def setup(self) -> None: """Refresh generator.""" generator = pagegenerators.CategorizedPageGenerator( self.csd_cat, start=self.saved_progress) diff --git a/scripts/unusedfiles.py b/scripts/unusedfiles.py index e8956b5..c891f05 100755 --- a/scripts/unusedfiles.py +++ b/scripts/unusedfiles.py @@ -104,7 +104,7 @@ 'This script is not localized for {} site;\n' 'try using -filetemplate:<template name>.'.format(self.site))
- def treat(self, image): + def treat(self, image) -> None: """Process one image page.""" # Use get_file_url() and file_is_shared() to confirm it is local media # rather than a local page with the same name as shared media. @@ -145,7 +145,7 @@ self.current_page = page self.put_current(text)
- def post_to_flow_board(self, page, post): + def post_to_flow_board(self, page, post) -> None: """Post message as a Flow topic.""" board = Board(page) header, rest = post.split('\n', 1) diff --git a/scripts/watchlist.py b/scripts/watchlist.py index 5bfef5b..cbb5444 100755 --- a/scripts/watchlist.py +++ b/scripts/watchlist.py @@ -41,7 +41,7 @@ return watchlist
-def count_watchlist(site=None): +def count_watchlist(site=None) -> None: """Count only the total number of page(s) in watchlist for this wiki.""" if site is None: site = pywikibot.Site() @@ -50,7 +50,7 @@ .format(watchlist_count))
-def count_watchlist_all(): +def count_watchlist_all() -> None: """Count only the total number of page(s) in watchlist for all wikis.""" wl_count_all = 0 pywikibot.output('Counting pages in watchlists of all wikis...') @@ -74,7 +74,7 @@ return list(site.watched_pages(force=True))
-def refresh_all(): +def refresh_all() -> None: """Reload watchlists for all wikis where a watchlist is already present.""" cache_path = CachedRequest._get_cache_dir() files = os.scandir(cache_path) @@ -89,7 +89,7 @@ seen.add(entry.site)
-def refresh_new(): +def refresh_new() -> None: """Load watchlists of all wikis for accounts set in user-config.py.""" pywikibot.output( 'Downloading all watchlists for your accounts in user-config.py') diff --git a/scripts/weblinkchecker.py b/scripts/weblinkchecker.py index 9f11b04..f8eb86a 100755 --- a/scripts/weblinkchecker.py +++ b/scripts/weblinkchecker.py @@ -286,7 +286,7 @@ After checking the page, it will die. """
- def __init__(self, page, url, history, http_ignores, day): + def __init__(self, page, url, history, http_ignores, day) -> None: """Initializer.""" self.page = page self.url = url @@ -364,7 +364,7 @@ } """
- def __init__(self, report_thread, site=None): + def __init__(self, report_thread, site=None) -> None: """Initializer.""" self.report_thread = report_thread if not site: @@ -385,7 +385,7 @@ # no saved history exists yet, or history dump broken self.history_dict = {}
- def log(self, url, error, containing_page, archive_url): + def log(self, url, error, containing_page, archive_url) -> None: """Log an error report to a text file in the deadlinks subdirectory.""" if archive_url: error_report = '* {} ([{} archive])\n'.format(url, archive_url) @@ -414,7 +414,7 @@ self.report_thread.report(url, error_report, containing_page, archive_url)
- def set_dead_link(self, url, error, page, weblink_dead_days): + def set_dead_link(self, url, error, page, weblink_dead_days) -> None: """Add the fact that the link was found dead to the .dat file.""" with self.semaphore: now = time.time() @@ -456,7 +456,7 @@
return False
- def save(self): + def save(self) -> None: """Save the .dat file to disk.""" with open(self.datfilename, 'wb') as f: pickle.dump(self.history_dict, f, protocol=config.pickle_protocol) @@ -471,7 +471,7 @@ sure that two LinkCheckerThreads cannot access the queue at the same time. """
- def __init__(self): + def __init__(self) -> None: """Initializer.""" super().__init__() self.semaphore = threading.Semaphore() @@ -479,22 +479,22 @@ self.finishing = False self.killed = False
- def report(self, url, error_report, containing_page, archive_url): + def report(self, url, error_report, containing_page, archive_url) -> None: """Report error on talk page of the page containing the dead link.""" with self.semaphore: self.queue.append((url, error_report, containing_page, archive_url))
- def shutdown(self): + def shutdown(self) -> None: """Finish thread.""" self.finishing = True
- def kill(self): + def kill(self) -> None: """Kill thread.""" # TODO: remove if unneeded self.killed = True
- def run(self): + def run(self) -> None: """Run thread.""" while not self.killed: if not self.queue: @@ -567,7 +567,7 @@ It uses several LinkCheckThreads at once to process pages from generator. """
- def __init__(self, http_ignores=None, day=7, **kwargs): + def __init__(self, http_ignores=None, day=7, **kwargs) -> None: """Initializer.""" super().__init__(**kwargs)
@@ -585,7 +585,7 @@ self.threads = ThreadList(limit=config.max_external_links, wait_time=config.retry_wait)
- def treat_page(self): + def treat_page(self) -> None: """Process one page.""" page = self.current_page for url in weblinks_from_text(page.text): diff --git a/scripts/welcome.py b/scripts/welcome.py index 7b9b7d3..d317622 100755 --- a/scripts/welcome.py +++ b/scripts/welcome.py @@ -648,7 +648,7 @@ pywikibot.output('Reported') self.BAQueue = []
- def makelogpage(self): + def makelogpage(self) -> None: """Make log page.""" if not globalvar.make_welcome_log or not self.welcomed_users: return @@ -855,7 +855,7 @@ if welcomed_count >= globalvar.dump_to_log: self.makelogpage()
- def write_log(self): + def write_log(self) -> None: """Write logfile.""" welcomed_count = len(self.welcomed_users) if globalvar.make_welcome_log and welcomed_count > 0: @@ -874,14 +874,14 @@ self.report_bad_account()
@staticmethod - def show_status(message=Msg.DEFAULT): + def show_status(message=Msg.DEFAULT) -> None: """Output colorized status.""" msg, color = message.value pywikibot.output(color_format('{color}[{msg:5}]{default} ', msg=msg, color=color), newline=False)
- def teardown(self): + def teardown(self) -> None: """Some cleanups after run operation.""" if self.welcomed_users: self.show_status() @@ -930,7 +930,7 @@ 'script source header for documentation.'))
-def handle_args(args): +def handle_args(args) -> None: """Process command line arguments.
If args is an empty list, sys.argv is used.
pywikibot-commits@lists.wikimedia.org