jenkins-bot submitted this change.

View Change

Approvals: JJMC89: Looks good to me, approved jenkins-bot: Verified
add parameter type of bool based on the default value

Change-Id: I71c8474594c273dcb22f5b6e6675d2d910350e5d
---
M pywikibot/data/api.py
M pywikibot/page/__init__.py
M pywikibot/scripts/generate_user_files.py
M pywikibot/site/_apisite.py
M pywikibot/site/_basesite.py
M pywikibot/site/_datasite.py
M pywikibot/site/_extensions.py
M pywikibot/site/_generators.py
M pywikibot/site/_tokenwallet.py
M pywikibot/textlib.py
M pywikibot/throttle.py
M pywikibot/titletranslate.py
M pywikibot/tools/__init__.py
M pywikibot/tools/djvu.py
M pywikibot/version.py
M pywikibot/xmlreader.py
M scripts/archivebot.py
M scripts/category.py
M scripts/checkimages.py
M scripts/interwiki.py
M scripts/patrol.py
M scripts/redirect.py
M scripts/reflinks.py
M scripts/solve_disambiguation.py
M scripts/weblinkchecker.py
M scripts/welcome.py
26 files changed, 335 insertions(+), 223 deletions(-)

diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index f362a0c..7c81237 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -93,7 +93,7 @@

"""Workaround for bug in python 3 email handling of CTE binary."""

- def as_bytes(self, unixfrom=False, policy=None):
+ def as_bytes(self, unixfrom: bool = False, policy=None):
"""Return unmodified binary payload."""
policy = self.policy if policy is None else policy
fp = BytesIO()
@@ -2662,7 +2662,12 @@

"""

- def __init__(self, generator: str, g_content=False, **kwargs) -> None:
+ def __init__(
+ self,
+ generator: str,
+ g_content: bool = False,
+ **kwargs
+ ) -> None:
"""
Initializer.

diff --git a/pywikibot/page/__init__.py b/pywikibot/page/__init__.py
index 15ea6ff..2fe45e3 100644
--- a/pywikibot/page/__init__.py
+++ b/pywikibot/page/__init__.py
@@ -263,10 +263,21 @@
self.site.loadpageinfo(self)
return self._pageid

- def title(self, *, underscore=False, with_ns=True,
- with_section=True, as_url=False, as_link=False,
- allow_interwiki=True, force_interwiki=False, textlink=False,
- as_filename=False, insite=None, without_brackets=False) -> str:
+ def title(
+ self,
+ *,
+ underscore: bool = False,
+ with_ns: bool = True,
+ with_section: bool = True,
+ as_url: bool = False,
+ as_link: bool = False,
+ allow_interwiki: bool = True,
+ force_interwiki: bool = False,
+ textlink: bool = False,
+ as_filename: bool = False,
+ insite=None,
+ without_brackets: bool = False
+ ) -> str:
"""
Return the title of this Page, as a string.

@@ -623,7 +634,7 @@
self._parsed_text = self.site.get_parsed_page(self)
return self._parsed_text

- def properties(self, force=False) -> dict:
+ def properties(self, force: bool = False) -> dict:
"""
Return the properties of the page.

@@ -634,7 +645,7 @@
self.site.loadpageprops(self)
return self._pageprops

- def defaultsort(self, force=False) -> Optional[str]:
+ def defaultsort(self, force: bool = False) -> Optional[str]:
"""
Extract value of the {{DEFAULTSORT:}} magic word from the page.

@@ -642,7 +653,11 @@
"""
return self.properties(force=force).get('defaultsort')

- def expand_text(self, force=False, includecomments=False) -> str:
+ def expand_text(
+ self,
+ force: bool = False,
+ includecomments: bool = False
+ ) -> str:
"""Return the page text with all templates and parser words expanded.

:param force: force updating from the live site
@@ -1221,8 +1236,8 @@
cc=apply_cosmetic_changes, quiet=quiet, **kwargs)

@allow_asynchronous
- def _save(self, summary=None, watch=None, minor=True, botflag=None,
- cc=None, quiet=False, **kwargs):
+ def _save(self, summary=None, watch=None, minor: bool = True, botflag=None,
+ cc=None, quiet: bool = False, **kwargs):
"""Helper function for save()."""
link = self.title(as_link=True)
if cc or (cc is None and config.cosmetic_changes):
@@ -1345,7 +1360,7 @@
self.clear_cache()
return self.site.purgepages([self], **kwargs)

- def touch(self, callback=None, botflag=False, **kwargs):
+ def touch(self, callback=None, botflag: bool = False, **kwargs):
"""
Make a touch edit for this page.

@@ -1417,15 +1432,13 @@

return self.site.pagelinks(self, **kwargs)

- def interwiki(self, expand=True):
+ def interwiki(self, expand: bool = True):
"""
Iterate interwiki links in the page text, excluding language links.

:param expand: if True (default), include interwiki links found in
templates transcluded onto this page; if False, only iterate
interwiki links found in this page's own wikitext
- :type expand: bool
-
:return: a generator that yields Link objects
:rtype: generator
"""
@@ -1453,14 +1466,12 @@
# ignore any links with invalid contents
continue

- def langlinks(self, include_obsolete=False) -> list:
+ def langlinks(self, include_obsolete: bool = False) -> list:
"""
Return a list of all inter-language Links on this page.

:param include_obsolete: if true, return even Link objects whose site
is obsolete
- :type include_obsolete: bool
-
:return: list of Link objects.
"""
# Note: We preload a list of *all* langlinks, including links to
@@ -1580,7 +1591,7 @@
"""
return self.site.page_extlinks(self, total=total)

- def coordinates(self, primary_only=False):
+ def coordinates(self, primary_only: bool = False):
"""
Return a list of Coordinate objects for points on the page.

@@ -1837,7 +1848,12 @@
self._deletedRevs[rev['timestamp']] = rev
yield rev['timestamp']

- def getDeletedRevision(self, timestamp, content=False, **kwargs) -> List:
+ def getDeletedRevision(
+ self,
+ timestamp,
+ content: bool = False,
+ **kwargs
+ ) -> List:
"""
Return a particular deleted revision by timestamp.

@@ -1860,12 +1876,11 @@
return item['revisions'][0]
return []

- def markDeletedRevision(self, timestamp, undelete=True):
+ def markDeletedRevision(self, timestamp, undelete: bool = True):
"""
Mark the revision identified by timestamp for undeletion.

:param undelete: if False, mark the revision to remain deleted.
- :type undelete: bool
"""
if not hasattr(self, '_deletedRevs'):
self.loadDeletedRevisions()
@@ -2167,8 +2182,15 @@
result.append((pywikibot.Page(link, self.site), positional))
return result

- def set_redirect_target(self, target_page, create=False, force=False,
- keep_section=False, save=True, **kwargs):
+ def set_redirect_target(
+ self,
+ target_page,
+ create: bool = False,
+ force: bool = False,
+ keep_section: bool = False,
+ save: bool = True,
+ **kwargs
+ ):
"""
Change the page's text to point to the redirect page.

@@ -2176,15 +2198,11 @@
:type target_page: pywikibot.Page or string
:param create: if true, it creates the redirect even if the page
doesn't exist.
- :type create: bool
:param force: if true, it set the redirect target even the page
doesn't exist or it's not redirect.
- :type force: bool
:param keep_section: if the old redirect links to a section
and the new one doesn't it uses the old redirect's section.
- :type keep_section: bool
:param save: if true, it saves the page immediately.
- :type save: bool
:param kwargs: Arguments which are used for saving the page directly
afterwards, like 'summary' for edit summary.
"""
@@ -2717,7 +2735,7 @@
def members(self, recurse: bool = False,
namespaces=None,
total: Optional[int] = None,
- content=False):
+ content: bool = False):
"""Yield all category contents (subcats, pages, and files).

:rtype: typing.Iterable[pywikibot.Page]
@@ -2913,13 +2931,11 @@
self._userprops['blockreason'] = r[0]['reason']
return self._userprops

- def registration(self, force=False):
+ def registration(self, force: bool = False):
"""
Fetch registration date for this user.

:param force: if True, forces reloading the data from API
- :type force: bool
-
:rtype: pywikibot.Timestamp or None
"""
if not self.isAnonymous():
@@ -3326,12 +3342,11 @@
params['ids'] = self.id
return params

- def getID(self, numeric=False):
+ def getID(self, numeric: bool = False):
"""
Get the identifier of this entity.

:param numeric: Strip the first letter and return an int
- :type numeric: bool
"""
if numeric:
return int(self.id[1:]) if self.id != '-1' else -1
@@ -3552,12 +3567,11 @@

return super().get(force=force)

- def getID(self, numeric=False):
+ def getID(self, numeric: bool = False):
"""
Get the entity identifier.

:param numeric: Strip the first letter and return an int
- :type numeric: bool
"""
if self.id == '-1':
self.get()
@@ -3823,8 +3837,15 @@
data = {'aliases': aliases}
self.editEntity(data, **kwargs)

- def set_redirect_target(self, target_page, create=False, force=False,
- keep_section=False, save=True, **kwargs):
+ def set_redirect_target(
+ self,
+ target_page,
+ create: bool = False,
+ force: bool = False,
+ keep_section: bool = False,
+ save: bool = True,
+ **kwargs
+ ):
"""
Set target of a redirect for a Wikibase page.

@@ -3833,14 +3854,13 @@
raise NotImplementedError

@allow_asynchronous
- def addClaim(self, claim, bot=True, **kwargs):
+ def addClaim(self, claim, bot: bool = True, **kwargs):
"""
Add a claim to the entity.

:param claim: The claim to add
:type claim: pywikibot.page.Claim
:param bot: Whether to flag as bot (if possible)
- :type bot: bool
:keyword asynchronous: if True, launch a separate thread to add claim
asynchronously
:type asynchronous: bool
@@ -4011,21 +4031,19 @@

return super().title(**kwargs)

- def getID(self, numeric=False, force=False):
+ def getID(self, numeric: bool = False, force: bool = False):
"""
Get the entity identifier.

:param numeric: Strip the first letter and return an int
- :type numeric: bool
:param force: Force an update of new data
- :type force: bool
"""
if not hasattr(self, 'id') or force:
self.get(force=force)
return super().getID(numeric=numeric)

@classmethod
- def fromPage(cls, page, lazy_load=False):
+ def fromPage(cls, page, lazy_load: bool = False):
"""
Get the ItemPage for a Page that links to it.

@@ -4033,7 +4051,6 @@
:type page: pywikibot.page.Page
:param lazy_load: Do not raise NoPageError if either page or
corresponding ItemPage does not exist.
- :type lazy_load: bool
:rtype: pywikibot.page.ItemPage

:raise pywikibot.exceptions.NoPageError: There is no corresponding
@@ -4099,15 +4116,19 @@

return item

- def get(self, force=False, get_redirect=False, *args, **kwargs) -> dict:
+ def get(
+ self,
+ force: bool = False,
+ get_redirect: bool = False,
+ *args,
+ **kwargs
+ ) -> Dict[str, Any]:
"""
Fetch all item data, and cache it.

:param force: override caching
- :type force: bool
:param get_redirect: return the item content, do not follow the
redirect, do not raise an exception.
- :type get_redirect: bool
:raise NotImplementedError: a value in args or kwargs
:return: actual data which entity holds
:note: dicts returned by this method are references to content of this
@@ -4151,7 +4172,7 @@
pg._item = self
yield pg

- def getSitelink(self, site, force=False) -> str:
+ def getSitelink(self, site, force: bool = False) -> str:
"""
Return the title for the specific site.

@@ -4226,8 +4247,15 @@
self._isredir = True
self._redirtarget = item

- def set_redirect_target(self, target_page, create=False, force=False,
- keep_section=False, save=True, **kwargs):
+ def set_redirect_target(
+ self,
+ target_page,
+ create: bool = False,
+ force: bool = False,
+ keep_section: bool = False,
+ save: bool = True,
+ **kwargs
+ ):
"""
Make the item redirect to another item.

@@ -4237,7 +4265,6 @@
:type target_page: pywikibot.page.ItemPage or string
:param force: if true, it sets the redirect target even the page
is not redirect.
- :type force: bool
"""
if isinstance(target_page, str):
target_page = pywikibot.ItemPage(self.repo, target_page)
@@ -4328,12 +4355,11 @@
self._type = self.repo.getPropertyType(self)
return self._type

- def getID(self, numeric=False):
+ def getID(self, numeric: bool = False):
"""
Get the identifier of this property.

:param numeric: Strip the first letter and return an int
- :type numeric: bool
"""
if numeric:
return int(self.id[1:])
@@ -4425,12 +4451,11 @@
return Claim(self.site, self.getID(), datatype=self.type,
*args, **kwargs)

- def getID(self, numeric=False):
+ def getID(self, numeric: bool = False):
"""
Get the identifier of this property.

:param numeric: Strip the first letter and return an int
- :type numeric: bool
"""
# enforce this parent's implementation
return WikibasePage.getID(self, numeric=numeric)
@@ -4470,8 +4495,17 @@

SNAK_TYPES = ('value', 'somevalue', 'novalue')

- def __init__(self, site, pid, snak=None, hash=None, is_reference=False,
- is_qualifier=False, rank: str = 'normal', **kwargs) -> None:
+ def __init__(
+ self,
+ site,
+ pid,
+ snak=None,
+ hash=None,
+ is_reference: bool = False,
+ is_qualifier: bool = False,
+ rank: str = 'normal',
+ **kwargs
+ ) -> None:
"""
Initializer.

@@ -4546,8 +4580,13 @@
return False
return True

- def same_as(self, other, ignore_rank=True, ignore_quals=False,
- ignore_refs=True) -> bool:
+ def same_as(
+ self,
+ other,
+ ignore_rank: bool = True,
+ ignore_quals: bool = False,
+ ignore_refs: bool = True
+ ) -> bool:
"""Check if two claims are same."""
if ignore_rank:
attributes = ['id', 'snaktype', 'target']
diff --git a/pywikibot/scripts/generate_user_files.py b/pywikibot/scripts/generate_user_files.py
index b4c09c9..7924328 100755
--- a/pywikibot/scripts/generate_user_files.py
+++ b/pywikibot/scripts/generate_user_files.py
@@ -100,9 +100,12 @@
return False


-def get_site_and_lang(default_family: Optional[str] = 'wikipedia',
- default_lang: Optional[str] = 'en',
- default_username: Optional[str] = None, force=False):
+def get_site_and_lang(
+ default_family: Optional[str] = 'wikipedia',
+ default_lang: Optional[str] = 'en',
+ default_username: Optional[str] = None,
+ force: bool = False
+):
"""
Ask the user for the family, site code and username.

@@ -264,7 +267,12 @@
return ''.join(result)


-def create_user_config(main_family, main_code, main_username, force=False):
+def create_user_config(
+ main_family,
+ main_code,
+ main_username,
+ force: bool = False
+):
"""
Create a user-config.py in base_dir.

diff --git a/pywikibot/site/_apisite.py b/pywikibot/site/_apisite.py
index 8ca21bd..851ca9d 100644
--- a/pywikibot/site/_apisite.py
+++ b/pywikibot/site/_apisite.py
@@ -594,7 +594,7 @@
"""
return 'locked' in self.get_globaluserinfo(user, force)

- def get_searched_namespaces(self, force=False):
+ def get_searched_namespaces(self, force: bool = False):
"""
Retrieve the default searched namespaces for the user.

@@ -651,8 +651,8 @@
return path.replace('$1', '{}')

@staticmethod
- def assert_valid_iter_params(msg_prefix, start, end, reverse,
- is_ts=True) -> None:
+ def assert_valid_iter_params(msg_prefix, start, end, reverse: bool,
+ is_ts: bool = True) -> None:
"""Validate iterating API parameters.

:param msg_prefix: The calling method name
@@ -660,10 +660,8 @@
:param start: The start value to compare
:param end: The end value to compare
:param reverse: The reverse option
- :type reverse: bool
:param is_ts: When comparing timestamps (with is_ts=True) the start
is usually greater than end. Comparing titles this is vice versa.
- :type is_ts: bool
:raises AssertionError: start/end values are in wrong order
"""
if reverse ^ is_ts:
@@ -1127,14 +1125,13 @@
# 'title' is expected to be URL-encoded already
return self.siteinfo['articlepath'].replace('$1', title)

- def namespace(self, num, all=False):
+ def namespace(self, num: int, all: bool = False):
"""Return string containing local name of namespace 'num'.

If optional argument 'all' is true, return all recognized
values for this namespace.

:param num: Namespace constant.
- :type num: int
:param all: If True return a Namespace object. Otherwise
return the namespace name.
:return: local name or Namespace object
@@ -1168,7 +1165,7 @@
raise PageRelatedError(
page, 'loadimageinfo: Query on {} returned no imageinfo')

- def loadpageinfo(self, page, preload=False) -> None:
+ def loadpageinfo(self, page, preload: bool = False) -> None:
"""Load page info from api and store in page attributes.

:see: https://www.mediawiki.org/wiki/API:Info
@@ -1193,7 +1190,7 @@
)
self._update_page(page, query)

- def loadimageinfo(self, page, history=False,
+ def loadimageinfo(self, page, history: bool = False,
url_width=None, url_height=None, url_param=None) -> None:
"""Load image info from api and save in page attributes.

@@ -1590,9 +1587,19 @@
_ep_text_overrides = {'appendtext', 'prependtext', 'undo'}

@need_right('edit')
- def editpage(self, page, summary=None, minor=True, notminor=False,
- bot=True, recreate=True, createonly=False, nocreate=False,
- watch=None, **kwargs) -> bool:
+ def editpage(
+ self,
+ page,
+ summary=None,
+ minor: bool = True,
+ notminor: bool = False,
+ bot: bool = True,
+ recreate: bool = True,
+ createonly: bool = False,
+ nocreate: bool = False,
+ watch=None,
+ **kwargs
+ ) -> bool:
"""Submit an edit to be saved to the wiki.

:see: https://www.mediawiki.org/wiki/API:Edit
@@ -1934,8 +1941,8 @@
}

@need_right('move')
- def movepage(self, page, newtitle: str, summary, movetalk=True,
- noredirect=False):
+ def movepage(self, page, newtitle: str, summary, movetalk: bool = True,
+ noredirect: bool = False):
"""Move a Page to a new title.

:see: https://www.mediawiki.org/wiki/API:Move
@@ -2324,9 +2331,18 @@
# TODO: implement undelete

@need_right('block')
- def blockuser(self, user, expiry, reason: str, anononly=True,
- nocreate=True, autoblock=True, noemail=False,
- reblock=False, allowusertalk=False):
+ def blockuser(
+ self,
+ user,
+ expiry,
+ reason: str,
+ anononly: bool = True,
+ nocreate: bool = True,
+ autoblock: bool = True,
+ noemail: bool = False,
+ reblock: bool = False,
+ allowusertalk: bool = False
+ ):
"""
Block a user for certain amount of time and for a certain reason.

@@ -2350,20 +2366,14 @@
str (relative/absolute) or False ('never')
:param reason: The reason for the block.
:param anononly: Disable anonymous edits for this IP.
- :type anononly: boolean
:param nocreate: Prevent account creation.
- :type nocreate: boolean
:param autoblock: Automatically block the last used IP address and all
subsequent IP addresses from which this account logs in.
- :type autoblock: boolean
:param noemail: Prevent user from sending email through the wiki.
- :type noemail: boolean
:param reblock: If the user is already blocked, overwrite the existing
block.
- :type reblock: boolean
:param allowusertalk: Whether the user can edit their talk page while
blocked.
- :type allowusertalk: boolean
:return: The data retrieved from the API request.
:rtype: dict
"""
diff --git a/pywikibot/site/_basesite.py b/pywikibot/site/_basesite.py
index c2a02f9..19f3250 100644
--- a/pywikibot/site/_basesite.py
+++ b/pywikibot/site/_basesite.py
@@ -230,7 +230,7 @@
return [lang for lang in self.languages()
if self.namespaces.lookup_normalized_name(lang) is None]

- def _interwiki_urls(self, only_article_suffixes=False):
+ def _interwiki_urls(self, only_article_suffixes: bool = False):
base_path = self.path()
if not only_article_suffixes:
yield base_path + '{}'
@@ -272,7 +272,7 @@
"""Return list of localized PAGENAMEE tags for the site."""
return ['PAGENAMEE']

- def lock_page(self, page, block=True):
+ def lock_page(self, page, block: bool = True):
"""
Lock page for writing. Must be called before writing any page.

diff --git a/pywikibot/site/_datasite.py b/pywikibot/site/_datasite.py
index 5c2c62f..f5c8a2a 100644
--- a/pywikibot/site/_datasite.py
+++ b/pywikibot/site/_datasite.py
@@ -244,7 +244,7 @@
return dtype

@need_right('edit')
- def editEntity(self, entity, data, bot=True, **kwargs):
+ def editEntity(self, entity, data, bot: bool = True, **kwargs):
"""
Edit entity.

@@ -257,7 +257,6 @@
:param data: data updates
:type data: dict
:param bot: Whether to mark the edit as a bot edit
- :type bot: bool
:return: New entity data
:rtype: dict
"""
@@ -297,7 +296,7 @@
return req.submit()

@need_right('edit')
- def addClaim(self, entity, claim, bot=True, summary=None) -> None:
+ def addClaim(self, entity, claim, bot: bool = True, summary=None) -> None:
"""
Add a claim.

@@ -306,7 +305,6 @@
:param claim: Claim to be added
:type claim: pywikibot.Claim
:param bot: Whether to mark the edit as a bot edit
- :type bot: bool
:param summary: Edit summary
:type summary: str
"""
@@ -329,7 +327,7 @@

@need_right('edit')
def changeClaimTarget(self, claim, snaktype: str = 'value',
- bot=True, summary=None):
+ bot: bool = True, summary=None):
"""
Set the claim target to the value of the provided claim target.

@@ -338,7 +336,6 @@
:param snaktype: An optional snaktype ('value', 'novalue' or
'somevalue'). Default: 'value'
:param bot: Whether to mark the edit as a bot edit
- :type bot: bool
:param summary: Edit summary
:type summary: str
"""
@@ -359,14 +356,13 @@
return req.submit()

@need_right('edit')
- def save_claim(self, claim, summary=None, bot=True):
+ def save_claim(self, claim, summary=None, bot: bool = True):
"""
Save the whole claim to the wikibase site.

:param claim: The claim to save
:type claim: pywikibot.Claim
:param bot: Whether to mark the edit as a bot edit
- :type bot: bool
:param summary: Edit summary
:type summary: str
"""
@@ -573,7 +569,7 @@
return req.submit()

@need_right('edit')
- def linkTitles(self, page1, page2, bot=True):
+ def linkTitles(self, page1, page2, bot: bool = True):
"""
Link two pages together.

@@ -582,7 +578,6 @@
:param page2: Second page to link
:type page2: pywikibot.Page
:param bot: Whether to mark the edit as a bot edit
- :type bot: bool
:return: dict API output
:rtype: dict
"""
@@ -601,7 +596,7 @@

@need_right('item-merge')
def mergeItems(self, from_item, to_item, ignore_conflicts=None,
- summary=None, bot=True):
+ summary=None, bot: bool = True):
"""
Merge two items together.

@@ -616,7 +611,6 @@
:param summary: Edit summary
:type summary: str
:param bot: Whether to mark the edit as a bot edit
- :type bot: bool
:return: dict API output
:rtype: dict
"""
@@ -634,7 +628,7 @@
return req.submit()

@need_right('item-redirect')
- def set_redirect_target(self, from_item, to_item, bot=True):
+ def set_redirect_target(self, from_item, to_item, bot: bool = True):
"""
Make a redirect to another item.

@@ -643,7 +637,6 @@
:param from_item: Title of the item to be redirected.
:type from_item: pywikibot.ItemPage
:param bot: Whether to mark the edit as a bot edit
- :type bot: bool
"""
params = {
'action': 'wbcreateredirect',
diff --git a/pywikibot/site/_extensions.py b/pywikibot/site/_extensions.py
index 63b8df3..ed7b597 100644
--- a/pywikibot/site/_extensions.py
+++ b/pywikibot/site/_extensions.py
@@ -375,11 +375,11 @@
content_format: str = 'wikitext',
limit: int = 100,
sortby: str = 'newest',
- toconly=False,
+ toconly: bool = False,
offset=None,
offset_id=None,
- reverse=False,
- include_offset=False
+ reverse: bool = False,
+ include_offset: bool = False
):
"""
Retrieve the topiclist of a Flow board.
diff --git a/pywikibot/site/_generators.py b/pywikibot/site/_generators.py
index 5605f22..f8ab2ad 100644
--- a/pywikibot/site/_generators.py
+++ b/pywikibot/site/_generators.py
@@ -94,8 +94,15 @@
priority, page = heapq.heappop(prio_queue)
yield page

- def preloadpages(self, pagelist, *, groupsize: int = 50, templates=False,
- langlinks=False, pageprops=False):
+ def preloadpages(
+ self,
+ pagelist,
+ *,
+ groupsize: int = 50,
+ templates: bool = False,
+ langlinks: bool = False,
+ pageprops: bool = False
+ ):
"""Return a generator to a list of preloaded pages.

Pages are iterated in the same order than in the underlying pagelist.
@@ -103,16 +110,11 @@

:param pagelist: an iterable that returns Page objects
:param groupsize: how many Pages to query at a time
- :type groupsize: int
:param templates: preload pages (typically templates) transcluded in
the provided pages
- :type templates: bool
:param langlinks: preload all language links from the provided pages
to other languages
- :type langlinks: bool
:param pageprops: preload various properties defined in page content
- :type pageprops: bool
-
"""
props = 'revisions|info|categoryinfo'
if templates:
@@ -194,9 +196,9 @@
priority, page = heapq.heappop(prio_queue)
yield page

- def pagebacklinks(self, page, *, follow_redirects=False,
+ def pagebacklinks(self, page, *, follow_redirects: bool = False,
filter_redirects=None, namespaces=None, total=None,
- content=False):
+ content: bool = False):
"""Iterate all pages that link to the given page.

:see: https://www.mediawiki.org/wiki/API:Backlinks
@@ -258,7 +260,7 @@
return blgen

def page_embeddedin(self, page, *, filter_redirects=None, namespaces=None,
- total=None, content=False):
+ total=None, content: bool = False):
"""Iterate all pages that embedded the given page as a template.

:see: https://www.mediawiki.org/wiki/API:Embeddedin
@@ -321,10 +323,17 @@
namespaces=namespaces, total=total,
g_content=content, **rdargs)

- def pagereferences(self, page, *, follow_redirects=False,
- filter_redirects=None, with_template_inclusion=True,
- only_template_inclusion=False, namespaces=None,
- total=None, content=False):
+ def pagereferences(
+ self,
+ page, *,
+ follow_redirects: bool = False,
+ filter_redirects=None,
+ with_template_inclusion: bool = True,
+ only_template_inclusion: bool = False,
+ namespaces=None,
+ total=None,
+ content: bool = False
+ ):
"""
Convenience method combining pagebacklinks and page_embeddedin.

@@ -395,7 +404,7 @@
**plargs)

# Sortkey doesn't work with generator
- def pagecategories(self, page, *, total=None, content=False):
+ def pagecategories(self, page, *, total=None, content: bool = False):
"""Iterate categories to which page belongs.

:see: https://www.mediawiki.org/wiki/API:Categories
@@ -414,7 +423,7 @@
type_arg='categories', total=total,
g_content=content, **clargs)

- def pageimages(self, page, *, total=None, content=False):
+ def pageimages(self, page, *, total=None, content: bool = False):
"""Iterate images used (not just linked) on the page.

:see: https://www.mediawiki.org/wiki/API:Images
@@ -430,7 +439,7 @@
g_content=content)

def pagetemplates(self, page, *, namespaces=None, total=None,
- content=False):
+ content: bool = False):
"""Iterate templates transcluded (not just linked) on the page.

:see: https://www.mediawiki.org/wiki/API:Templates
@@ -611,7 +620,14 @@
props.append('roles')
return props

- def loadrevisions(self, page, *, content=False, section=None, **kwargs):
+ def loadrevisions(
+ self,
+ page,
+ *,
+ content: bool = False,
+ section=None,
+ **kwargs
+ ):
"""Retrieve revision information and store it in page object.

By default, retrieves the last (current) revision of the page,
@@ -630,7 +646,6 @@
:type page: pywikibot.Page
:param content: if True, retrieve the wiki-text of each revision;
otherwise, only retrieve the revision metadata (default)
- :type content: bool
:param section: if specified, retrieve only this section of the text
(content must be True); section must be given by number (top of
the article is section 0), not name
@@ -802,9 +817,9 @@
maxsize=None,
protect_type=None,
protect_level=None,
- reverse=False,
+ reverse: bool = False,
total=None,
- content=False
+ content: bool = False
):
"""Iterate pages in a single namespace.

@@ -879,8 +894,8 @@
start: str = '!',
prefix: str = '',
namespace=0,
- unique=False,
- fromids=False,
+ unique: bool = False,
+ fromids: bool = False,
total=None
):
"""Iterate all links to pages (which need not exist) in one namespace.
@@ -920,7 +935,7 @@
yield p

def allcategories(self, start: str = '!', prefix: str = '', total=None,
- reverse=False, content=False):
+ reverse: bool = False, content: bool = False):
"""Iterate categories used (which need not have a Category page).

Iterator yields Category objects. Note that, in practice, links that
@@ -1000,11 +1015,11 @@
prefix: str = '',
minsize=None,
maxsize=None,
- reverse=False,
+ reverse: bool = False,
sha1=None,
sha1base36=None,
total=None,
- content=False
+ content: bool = False
):
"""Iterate all images, ordered by image title.

@@ -1041,8 +1056,14 @@
aigen.request['gaisha1base36'] = sha1base36
return aigen

- def filearchive(self, start=None, end=None, reverse=False, total=None,
- **kwargs):
+ def filearchive(
+ self,
+ start=None,
+ end=None,
+ reverse: bool = False,
+ total=None,
+ **kwargs
+ ):
"""Iterate archived files.

Yields dict of file archive informations.
@@ -1072,7 +1093,7 @@
fagen.request['fadir'] = 'descending'
return fagen

- def blocks(self, starttime=None, endtime=None, reverse=False,
+ def blocks(self, starttime=None, endtime=None, reverse: bool = False,
blockids=None, users=None, iprange: Optional[str] = None,
total: Optional[int] = None):
"""Iterate all current blocks, in order of creation.
@@ -1091,7 +1112,6 @@
:param endtime: stop iterating at this Timestamp
:type endtime: pywikibot.Timestamp
:param reverse: if True, iterate oldest blocks first (default: newest)
- :type reverse: bool
:param blockids: only iterate blocks with these id numbers. Numbers
must be separated by '|' if given by a str.
:type blockids: str, tuple or list
@@ -1131,7 +1151,7 @@

def exturlusage(self, url: Optional[str] = None,
protocol: Optional[str] = None, namespaces=None,
- total: Optional[int] = None, content=False):
+ total: Optional[int] = None, content: bool = False):
"""Iterate Pages that contain links to the given URL.

:see: https://www.mediawiki.org/wiki/API:Exturlusage
@@ -1168,7 +1188,7 @@
total=total, g_content=content)

def imageusage(self, image, namespaces=None, filterredir=None,
- total=None, content=False):
+ total=None, content: bool = False):
"""Iterate Pages that contain links to the given FilePage.

:see: https://www.mediawiki.org/wiki/API:Imageusage
@@ -1387,8 +1407,8 @@
return srgen

def usercontribs(self, user=None, userprefix=None, start=None, end=None,
- reverse=False, namespaces=None, minor=None,
- total: Optional[int] = None, top_only=False):
+ reverse: bool = False, namespaces=None, minor=None,
+ total: Optional[int] = None, top_only: bool = False):
"""Iterate contributions by a particular user.

Iterated values are in the same format as recentchanges.
@@ -1442,7 +1462,7 @@
ucgen.request['ucshow'] = option_set
return ucgen

- def watchlist_revs(self, start=None, end=None, reverse=False,
+ def watchlist_revs(self, start=None, end=None, reverse: bool = False,
namespaces=None, minor=None, bot=None,
anon=None, total=None):
"""Iterate revisions to pages on the bot user's watchlist.
@@ -1508,7 +1528,7 @@

def deletedrevs(self, titles=None, start=None, end=None,
reverse: bool = False,
- content=False, total=None, **kwargs):
+ content: bool = False, total=None, **kwargs):
"""Iterate deleted revisions.

Each value returned by the iterator will be a dict containing the
@@ -1685,7 +1705,7 @@
return usgen

def randompages(self, total=None, namespaces=None,
- redirects=False, content=False):
+ redirects: Optional[bool] = False, content: bool = False):
"""Iterate a number of random pages.

:see: https://www.mediawiki.org/wiki/API:Random
@@ -1701,7 +1721,6 @@
:param redirects: if True, include only redirect pages in results,
False does not include redirects and None (MW 1.26+) include both
types. (default: False)
- :type redirects: bool or None
:param content: if True, load the current content of each iterated page
(default False)
:raises KeyError: a namespace identifier was not resolved
@@ -1821,10 +1840,20 @@

yield result['patrol']

- def newpages(self, user=None, returndict=False,
- start=None, end=None, reverse=False, bot=False,
- redirect=False, excludeuser=None,
- patrolled=None, namespaces=None, total=None):
+ def newpages(
+ self,
+ user=None,
+ returndict: bool = False,
+ start=None,
+ end=None,
+ reverse: bool = False,
+ bot: bool = False,
+ redirect: bool = False,
+ excludeuser=None,
+ patrolled=None,
+ namespaces=None,
+ total=None
+ ):
"""Yield new articles (as Page objects) from recent changes.

Starts with the newest article and fetches the number of articles
@@ -2046,8 +2075,13 @@
"""
return self.querypage('Listredirects', total)

- def protectedpages(self, namespace=0, type: str = 'edit', level=False,
- total=None):
+ def protectedpages(
+ self,
+ namespace=0,
+ type: str = 'edit',
+ level: Union[str, bool] = False,
+ total=None
+ ):
"""
Return protected pages depending on protection level and type.

@@ -2063,7 +2097,6 @@
:type type: str
:param level: The protection level (like 'autoconfirmed'). If False it
shows all protection levels.
- :type level: str or False
:return: The pages which are protected.
:rtype: typing.Iterable[pywikibot.Page]
"""
@@ -2095,14 +2128,13 @@
return self._generator(api.PageGenerator, type_arg='pageswithprop',
gpwppropname=propname, total=total)

- def watched_pages(self, force=False, total=None):
+ def watched_pages(self, force: bool = False, total=None):
"""
Return watchlist.

:see: https://www.mediawiki.org/wiki/API:Watchlistraw

:param force: Reload watchlist
- :type force: bool
:param total: if not None, limit the generator to yielding this many
items in total
:type total: int
diff --git a/pywikibot/site/_tokenwallet.py b/pywikibot/site/_tokenwallet.py
index fd49f32..12bd1a3 100644
--- a/pywikibot/site/_tokenwallet.py
+++ b/pywikibot/site/_tokenwallet.py
@@ -24,7 +24,7 @@
self._tokens = {}
self.failed_cache = set() # cache unavailable tokens.

- def load_tokens(self, types, all=False) -> None:
+ def load_tokens(self, types, all: bool = False) -> None:
"""
Preload one or multiple tokens.

@@ -32,7 +32,6 @@
:type types: iterable
:param all: load all available tokens, if None only if it can be done
in one request.
- :type all: bool
"""
if self.site.user() is None:
self.site.login()
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index 8f4b566..d033913 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -19,7 +19,7 @@
from typing import NamedTuple, Optional, Union

import pywikibot
-from pywikibot.backports import Container, Iterable, List
+from pywikibot.backports import Container, Dict, Iterable, List
from pywikibot.backports import OrderedDict as OrderedDictType
from pywikibot.backports import Sequence as SequenceType
from pywikibot.backports import Tuple
@@ -1006,7 +1006,11 @@
# do not find or change links of other kinds, nor any that are formatted
# as in-line interwiki links (e.g., "[[:es:Artículo]]".

-def getLanguageLinks(text: str, insite=None, template_subpage=False) -> dict:
+def getLanguageLinks(
+ text: str,
+ insite=None,
+ template_subpage: bool = False
+) -> Dict:
"""
Return a dict of inter-language links found in text.

@@ -1601,7 +1605,7 @@
# Functions dealing with external links
# -------------------------------------

-def compileLinkR(withoutBracketed=False, onlyBracketed: bool = False):
+def compileLinkR(withoutBracketed: bool = False, onlyBracketed: bool = False):
"""Return a regex that matches external links."""
# RFC 2396 says that URLs may only contain certain characters.
# For this regex we also accept non-allowed characters, so that the bot
diff --git a/pywikibot/throttle.py b/pywikibot/throttle.py
index a77ec52..7079d1a 100644
--- a/pywikibot/throttle.py
+++ b/pywikibot/throttle.py
@@ -111,7 +111,7 @@
hashobj = md5(module)
return hashobj.hexdigest()[:4] # slice for Python 3.5

- def _read_file(self, raise_exc=False):
+ def _read_file(self, raise_exc: bool = False):
"""Yield process entries from file."""
try:
with open(self.ctrlfilename) as f:
@@ -197,7 +197,12 @@
pywikibot.log('Found {} {} processes running, including this one.'
.format(count, mysite))

- def setDelays(self, delay=None, writedelay=None, absolute=False) -> None:
+ def setDelays(
+ self,
+ delay=None,
+ writedelay=None,
+ absolute: bool = False
+ ) -> None:
"""Set the nominal delays in seconds. Defaults to config values."""
with self.lock:
delay = delay or self.mindelay
@@ -211,7 +216,7 @@
# Start the delay count now, not at the next check
self.last_read = self.last_write = time.time()

- def getDelay(self, write=False):
+ def getDelay(self, write: bool = False):
"""Return the actual delay, accounting for multiple processes.

This value is the maximum wait between reads/writes, not taking
@@ -234,7 +239,7 @@
thisdelay *= self.process_multiplicity
return thisdelay

- def waittime(self, write=False):
+ def waittime(self, write: bool = False):
"""Return waiting time in seconds.

The result is for a query that would be made right now.
@@ -277,7 +282,7 @@

time.sleep(seconds)

- def __call__(self, requestsize: int = 1, write=False) -> None:
+ def __call__(self, requestsize: int = 1, write: bool = False) -> None:
"""Block the calling program if the throttle time has not expired.

Parameter requestsize is the number of Pages to be read/written;
diff --git a/pywikibot/titletranslate.py b/pywikibot/titletranslate.py
index 9812ff6..1e70a00 100644
--- a/pywikibot/titletranslate.py
+++ b/pywikibot/titletranslate.py
@@ -6,10 +6,16 @@
#
import pywikibot
from pywikibot import config, date
+from pywikibot.backports import List


-def translate(page=None, hints=(), auto=True, removebrackets=False,
- site=None) -> list:
+def translate(
+ page=None,
+ hints=(),
+ auto: bool = True,
+ removebrackets: bool = False,
+ site=None
+) -> List['pywikibot.Link']:
"""
Return a list of links to pages on other sites based on hints.

diff --git a/pywikibot/tools/__init__.py b/pywikibot/tools/__init__.py
index a5367f8..61e7046 100644
--- a/pywikibot/tools/__init__.py
+++ b/pywikibot/tools/__init__.py
@@ -1108,7 +1108,7 @@
return result


-def open_archive(filename, mode: str = 'rb', use_extension=True):
+def open_archive(filename: str, mode: str = 'rb', use_extension: bool = True):
"""
Open a file and uncompress it if needed.

@@ -1123,11 +1123,9 @@
.. versionadded:: 3.0

:param filename: The filename.
- :type filename: str
:param use_extension: Use the file extension instead of the magic number
to determine the type of compression (default True). Must be True when
writing or appending.
- :type use_extension: bool
:param mode: The mode in which the file should be opened. It may either be
'r', 'rb', 'a', 'ab', 'w' or 'wb'. All modes open the file in binary
mode. It defaults to 'rb'.
@@ -1241,8 +1239,8 @@
def file_mode_checker(
filename: str,
mode: int = 0o600,
- quiet=False,
- create=False
+ quiet: bool = False,
+ create: bool = False
):
"""Check file mode and update it, if needed.

@@ -1251,9 +1249,7 @@
:param filename: filename path
:param mode: requested file mode
:param quiet: warn about file mode change if False.
- :type quiet: bool
:param create: create the file if it does not exist already
- :type create: bool
:raise IOError: The file does not exist and `create` is False.
"""
try:
diff --git a/pywikibot/tools/djvu.py b/pywikibot/tools/djvu.py
index debb78e..c657df0 100644
--- a/pywikibot/tools/djvu.py
+++ b/pywikibot/tools/djvu.py
@@ -113,12 +113,11 @@
return wrapper

@check_cache
- def number_of_images(self, force=False):
+ def number_of_images(self, force: bool = False):
"""
Return the number of images in the djvu file.

:param force: if True, refresh the cached data
- :type force: bool
"""
if not hasattr(self, '_page_count'):
res, stdoutdata = _call_cmd(['djvused', '-e', 'n', self.file])
@@ -128,26 +127,23 @@
return self._page_count

@check_page_number
- def page_info(self, n, force=False):
+ def page_info(self, n: int, force: bool = False):
"""
Return a tuple (id, (size, dpi)) for page n of djvu file.

:param n: page n of djvu file
- :type n: int
:param force: if True, refresh the cached data
- :type force: bool
"""
if not hasattr(self, '_page_info') or force:
self._get_page_info(force=force)
return self._page_info[n]

@check_cache
- def _get_page_info(self, force=False):
+ def _get_page_info(self, force: bool = False):
"""
Return a dict of tuples (id, (size, dpi)) for all pages of djvu file.

:param force: if True, refresh the cached data
- :type force: bool
"""
if not hasattr(self, '_page_info'):
self._page_info = {}
@@ -190,12 +186,11 @@
return size, dpi

@check_cache
- def has_text(self, force=False):
+ def has_text(self, force: bool = False):
"""
Test if the djvu file has a text-layer.

:param force: if True, refresh the cached data
- :type force: bool
"""
if not hasattr(self, '_has_text'):
self._get_page_info(force=force)
@@ -221,14 +216,12 @@

@check_page_number
@check_cache
- def get_page(self, n, force=False):
+ def get_page(self, n: int, force: bool = False):
"""
Get page n for djvu file.

:param n: page n of djvu file
- :type n: int
:param force: if True, refresh the cached data
- :type force: bool
"""
if not self.has_text(force=force):
raise ValueError('Djvu file {} has no text layer.'
diff --git a/pywikibot/version.py b/pywikibot/version.py
index a84a536..fa621fb 100644
--- a/pywikibot/version.py
+++ b/pywikibot/version.py
@@ -22,7 +22,7 @@

import pywikibot
from pywikibot import config
-from pywikibot.backports import cache
+from pywikibot.backports import List, cache
from pywikibot.comms.http import fetch
from pywikibot.exceptions import VersionParseError

@@ -379,18 +379,19 @@
return None


-def package_versions(modules=None, builtins=False, standard_lib=None):
+def package_versions(
+ modules: Optional[List[str]] = None,
+ builtins: Optional[bool] = False,
+ standard_lib: Optional[bool] = None
+):
"""Retrieve package version information.

When builtins or standard_lib are None, they will be included only
if a version was found in the package.

:param modules: Modules to inspect
- :type modules: list of strings
:param builtins: Include builtins
- :type builtins: Boolean, or None for automatic selection
:param standard_lib: Include standard library packages
- :type standard_lib: Boolean, or None for automatic selection
"""
if not modules:
modules = sys.modules.keys()
diff --git a/pywikibot/xmlreader.py b/pywikibot/xmlreader.py
index 9b57a19..2a5bb3c 100644
--- a/pywikibot/xmlreader.py
+++ b/pywikibot/xmlreader.py
@@ -102,7 +102,7 @@
Default: False.
"""

- def __init__(self, filename, allrevisions=False) -> None:
+ def __init__(self, filename, allrevisions: bool = False) -> None:
"""Initializer."""
self.filename = filename
if allrevisions:
diff --git a/scripts/archivebot.py b/scripts/archivebot.py
index 411e430..04e365b 100755
--- a/scripts/archivebot.py
+++ b/scripts/archivebot.py
@@ -492,7 +492,7 @@
return len(self.header.encode('utf-8')) + sum(t.size()
for t in self.threads)

- def update(self, summary, sort_threads=False) -> None:
+ def update(self, summary, sort_threads: bool = False) -> None:
"""Recombine threads and save page."""
if sort_threads:
pywikibot.output('Sorting threads...')
@@ -558,7 +558,7 @@
"""Get an archiver attribute."""
return self.attributes.get(attr, [default])[0]

- def set_attr(self, attr, value, out=True) -> None:
+ def set_attr(self, attr, value, out: bool = True) -> None:
"""Set an archiver attribute."""
if attr == 'archive':
value = value.replace('_', ' ')
diff --git a/scripts/category.py b/scripts/category.py
index eb69f9b..e5c13ef 100755
--- a/scripts/category.py
+++ b/scripts/category.py
@@ -187,8 +187,13 @@

"""A class to prepare a list of pages for robots."""

- def __init__(self, follow_redirects=False, edit_redirects=False,
- create=False, **kwargs) -> None:
+ def __init__(
+ self,
+ follow_redirects: bool = False,
+ edit_redirects: bool = False,
+ create: bool = False,
+ **kwargs
+ ) -> None:
"""Initializer."""
super().__init__(**kwargs)
self.follow_redirects = follow_redirects
@@ -305,7 +310,7 @@

def __init__(
self,
- rebuild=False,
+ rebuild: bool = False,
filename: str = 'category.dump.bz2'
) -> None:
"""Initializer."""
@@ -833,7 +838,7 @@
self.newcat.text = self.oldcat.text
self._strip_cfd_templates(summary)

- def _strip_cfd_templates(self, summary=None, commit=True) -> None:
+ def _strip_cfd_templates(self, summary=None, commit: bool = True) -> None:
"""Private function to strip out CFD templates from the new category.

The new category is saved.
diff --git a/scripts/checkimages.py b/scripts/checkimages.py
index d097449..62d4363 100755
--- a/scripts/checkimages.py
+++ b/scripts/checkimages.py
@@ -510,9 +510,9 @@
self,
site,
log_full_number: int = 25000,
- sendemail_active=False,
- duplicates_report=False,
- log_full_error=True,
+ sendemail_active: bool = False,
+ duplicates_report: bool = False,
+ log_full_error: bool = True,
max_user_notify=None
) -> None:
"""Initializer, define some instance variables."""
@@ -571,9 +571,17 @@
self.timestamp = None
self.uploader = None

- def report(self, newtext, image_to_report, notification=None, head=None,
- notification2=None, unver=True, comm_talk=None, comm_image=None
- ) -> None:
+ def report(
+ self,
+ newtext,
+ image_to_report,
+ notification=None,
+ head=None,
+ notification2=None,
+ unver: bool = True,
+ comm_talk=None,
+ comm_image=None
+ ) -> None:
"""Function to make the reports easier."""
self.image_to_report = image_to_report
self.newtext = newtext
@@ -627,7 +635,7 @@
# we can't find the user, report the problem to the bot
return upload_bot_array[0]

- def tag_image(self, put=True) -> bool:
+ def tag_image(self, put: bool = True) -> bool:
"""Add template to the Image page and find out the uploader."""
# Get the image's description
report_page_object = pywikibot.FilePage(self.site,
@@ -1040,7 +1048,7 @@
return True # Ok - No problem. Let's continue the checking phase

def report_image(self, image_to_report, rep_page=None, com=None,
- rep_text=None, addings=True) -> bool:
+ rep_text=None, addings: bool = True) -> bool:
"""Report the files to the report page when needed."""
rep_page = rep_page or self.rep_page
com = com or self.com
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index 98a4250..89e8958 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -710,7 +710,7 @@
return page
return None

- def translate(self, hints=None, keephintedsites=False) -> None:
+ def translate(self, hints=None, keephintedsites: bool = False) -> None:
"""Add the given translation hints to the todo list."""
if self.conf.same and self.origin:
if hints:
@@ -1296,7 +1296,7 @@
"""Return True if all the work for this subject has completed."""
return not self.todo

- def problem(self, txt, createneed=True) -> None:
+ def problem(self, txt, createneed: bool = True) -> None:
"""Report a problem with the resolution of this subject."""
pywikibot.error(txt)
self.confirm = True
diff --git a/scripts/patrol.py b/scripts/patrol.py
index 921df43..ab04b4e 100755
--- a/scripts/patrol.py
+++ b/scripts/patrol.py
@@ -370,8 +370,14 @@
return False


-def api_feed_repeater(gen, delay: float = 0, repeat=False, namespaces=None,
- user=None, recent_new_gen=True):
+def api_feed_repeater(
+ gen,
+ delay: float = 0,
+ repeat: bool = False,
+ namespaces=None,
+ user=None,
+ recent_new_gen: bool = True
+):
"""Generator which loads pages details to be processed."""
while True:
if recent_new_gen:
diff --git a/scripts/redirect.py b/scripts/redirect.py
index 7ed9702..8738b0a 100755
--- a/scripts/redirect.py
+++ b/scripts/redirect.py
@@ -138,8 +138,10 @@
elif action == 'both':
cls.__iter__ = lambda slf: slf.get_redirects_via_api(maxlen=2)

- def get_redirects_from_dump(self, alsoGetPageTitles=False) -> Tuple[
- Dict[str, str], Set[str]]:
+ def get_redirects_from_dump(
+ self,
+ alsoGetPageTitles: bool = False
+ ) -> Tuple[Dict[str, str], Set[str]]:
"""
Extract redirects from dump.

diff --git a/scripts/reflinks.py b/scripts/reflinks.py
index 23458ce..ed54129 100755
--- a/scripts/reflinks.py
+++ b/scripts/reflinks.py
@@ -239,7 +239,7 @@
dead_link = '<ref{}>{}</ref>'.format(self.name, tag)
return dead_link

- def transform(self, ispdf=False) -> None:
+ def transform(self, ispdf: bool = False) -> None:
"""Normalize the title."""
# convert html entities
if not ispdf:
diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py
index 83d4bb0..41286ca 100755
--- a/scripts/solve_disambiguation.py
+++ b/scripts/solve_disambiguation.py
@@ -397,16 +397,13 @@
def __init__(
self,
page,
- primary=False,
+ primary: bool = False,
minimum: int = 0,
- main_only=False
+ main_only: bool = False
) -> None:
"""Initializer.

:type page: pywikibot.Page
- :type primary: bool
- :type minimum: int
- :type main_only: bool
"""
self.page = page
# if run with the -primary argument, enable the ignore manager
@@ -451,11 +448,10 @@

"""

- def __init__(self, disamb_page, enabled=False) -> None:
+ def __init__(self, disamb_page, enabled: bool = False) -> None:
"""Initializer.

:type disamb_page: pywikibot.Page
- :type enabled: bool
:rtype: None

"""
@@ -578,7 +574,7 @@

"""An option allowing multiple aliases which also select it."""

- def __init__(self, option, shortcuts, stop=True) -> None:
+ def __init__(self, option, shortcuts, stop: bool = True) -> None:
"""Initializer."""
super().__init__(option, shortcuts[0], stop=stop)
self._aliases = frozenset(s.lower() for s in shortcuts[1:])
@@ -1199,7 +1195,7 @@
page,
new_targets=None,
unlink_counter: int = 0,
- dn=False
+ dn: bool = False
) -> None:
"""Setup i18n summary message."""
new_targets = new_targets or []
diff --git a/scripts/weblinkchecker.py b/scripts/weblinkchecker.py
index 1daa12b..8f68746 100755
--- a/scripts/weblinkchecker.py
+++ b/scripts/weblinkchecker.py
@@ -228,7 +228,11 @@
return archive


-def weblinks_from_text(text, without_bracketed=False, only_bracketed=False):
+def weblinks_from_text(
+ text,
+ without_bracketed: bool = False,
+ only_bracketed: bool = False
+):
"""
Yield web links from text.

diff --git a/scripts/welcome.py b/scripts/welcome.py
index d317622..8390524 100755
--- a/scripts/welcome.py
+++ b/scripts/welcome.py
@@ -488,7 +488,7 @@
.format(self.site))
self.welcome_text = site_netext

- def bad_name_filer(self, name, force=False) -> bool:
+ def bad_name_filer(self, name, force: bool = False) -> bool:
"""Check for bad names."""
if not globalvar.filt_bad_name:
return False
@@ -726,7 +726,7 @@
.format(globalvar.time_recur, strfstr))
pywikibot.sleep(globalvar.time_recur)

- def define_sign(self, force=False) -> List[str]:
+ def define_sign(self, force: bool = False) -> List[str]:
"""Setup signature."""
if hasattr(self, '_random_signature') and not force:
return self._random_signature

To view, visit change 767615. To unsubscribe, or for help writing mail filters, visit settings.

Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I71c8474594c273dcb22f5b6e6675d2d910350e5d
Gerrit-Change-Number: 767615
Gerrit-PatchSet: 3
Gerrit-Owner: JJMC89 <JJMC89.Wikimedia@gmail.com>
Gerrit-Reviewer: JJMC89 <JJMC89.Wikimedia@gmail.com>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged