jenkins-bot submitted this change.

View Change


Approvals: jenkins-bot: Verified Xqt: Looks good to me, approved
typing improvements for generators

plus others that I came across while working on those

Change-Id: Icefc70424dd2e16ae1f79b88a2c2e6d8055ae2ce
---
M pywikibot/bot.py
M pywikibot/data/api/_generators.py
M pywikibot/page/__init__.py
M pywikibot/page/_basepage.py
M pywikibot/page/_category.py
M pywikibot/page/_links.py
M pywikibot/page/_user.py
M pywikibot/pagegenerators/__init__.py
M pywikibot/pagegenerators/_factory.py
M pywikibot/pagegenerators/_filters.py
M pywikibot/pagegenerators/_generators.py
M pywikibot/site/_datasite.py
M pywikibot/site/_generators.py
M pywikibot/site/_namespace.py
M scripts/harvest_template.py
M scripts/interwiki.py
M scripts/pagefromfile.py
M tests/pagegenerators_tests.py
M tests/site_generators_tests.py
19 files changed, 789 insertions(+), 573 deletions(-)

diff --git a/pywikibot/bot.py b/pywikibot/bot.py
index 59882ea..3121ebc 100644
--- a/pywikibot/bot.py
+++ b/pywikibot/bot.py
@@ -172,7 +172,9 @@


if TYPE_CHECKING:
- from pywikibot.page import Link, Page
+ from typing_extensions import Literal
+
+ from pywikibot.page import BaseLink, Link, Page
from pywikibot.site import BaseSite

AnswerType = Iterable[tuple[str, str] | Option] | Option
@@ -611,7 +613,7 @@
selected no. If the default is not None it'll return True if default
is True or 'y' and False if default is False or 'n'.
"""
- if default in (True, False):
+ if isinstance(default, bool):
default = 'ny'[default]

return input_choice(question, [('Yes', 'y'), ('No', 'n')],
@@ -666,7 +668,7 @@

def __init__(self,
old_link: Link | Page,
- new_link: Link | Page | bool,
+ new_link: Link | Page | Literal[False],
default: str | None = None,
automatic_quit: bool = True) -> None:
"""
@@ -687,7 +689,7 @@
else:
self._old = old_link
if isinstance(new_link, pywikibot.Page):
- self._new = new_link._link
+ self._new: BaseLink | Literal[False] = new_link._link
else:
self._new = new_link
self._default = default
@@ -1035,6 +1037,7 @@
pywikibot.stdout('Sorry, no help available for ' + module_name)
pywikibot.log('show_help:', exc_info=True)
else:
+ assert module.__doc__ is not None
help_text = re.sub(r'^\.\. version(added|changed)::.+', '',
module.__doc__, flags=re.MULTILINE | re.DOTALL)
if hasattr(module, 'docuReplacements'):
diff --git a/pywikibot/data/api/_generators.py b/pywikibot/data/api/_generators.py
index 4b0d1f0..78df271 100644
--- a/pywikibot/data/api/_generators.py
+++ b/pywikibot/data/api/_generators.py
@@ -14,10 +14,12 @@

from abc import ABC, abstractmethod
from contextlib import suppress
+from typing import Any
from warnings import warn

import pywikibot
from pywikibot import config
+from pywikibot.backports import Callable
from pywikibot.exceptions import Error, InvalidTitleError, UnsupportedPageError
from pywikibot.site import Namespace
from pywikibot.tools import deprecated
@@ -110,7 +112,7 @@
self.query_increment = config.step
else:
self.query_increment = None
- self.limit = None
+ self.limit: int | None = None
self.starting_offset = kwargs['parameters'].pop(self.continue_name, 0)
self.request = self.request_class(**kwargs)
self.request[self.limit_name] = self.query_increment
@@ -210,11 +212,11 @@
# Used if the API module does not support multiple namespaces.
# Override in subclasses by defining a function that returns True if
# the result's namespace is in self._namespaces.
- _check_result_namespace = NotImplemented
+ _check_result_namespace: Callable[[Any], bool] = NotImplemented

# Set of allowed namespaces will be assigned to _namespaces during
# set_namespace call. Only to be used by _check_result_namespace.
- _namespaces = None
+ _namespaces: set[int] | bool | None = None

def __init__(self, **kwargs) -> None:
"""Initialize a QueryGenerator object.
@@ -295,7 +297,7 @@
if self.api_limit is not None and 'generator' in parameters:
self.prefix = 'g' + self.prefix

- self.limit = None
+ self.limit: int | None = None
self.query_limit = self.api_limit
if 'generator' in parameters:
# name of the "query" subelement key to look for when iterating
@@ -398,6 +400,7 @@

def _update_limit(self) -> None:
"""Set query limit for self.module based on api response."""
+ assert self.limited_module is not None
param = self.site._paraminfo.parameter('query+' + self.limited_module,
'limit')
if self.site.logged_in() and self.site.has_right('apihighlimits'):
@@ -709,7 +712,7 @@
self.resultkey = 'pages' # element to look for in result
self.props = self.request['prop']

- def result(self, pagedata):
+ def result(self, pagedata: dict[str, Any]) -> pywikibot.Page:
"""Convert page dict entry from api to Page object.

This can be overridden in subclasses to return a different type
@@ -904,7 +907,7 @@
revid = rev['revid']
revision = pywikibot.page.Revision(**rev)
# do not overwrite an existing Revision if there is no content
- if revid in page._revisions and revision.text is None:
+ if revid in page._revisions and revision.text is None: # type: ignore[attr-defined] # noqa: E501
pass
else:
page._revisions[revid] = revision
diff --git a/pywikibot/page/__init__.py b/pywikibot/page/__init__.py
index f5191b5..81b95ee 100644
--- a/pywikibot/page/__init__.py
+++ b/pywikibot/page/__init__.py
@@ -56,6 +56,6 @@

PageSourceType = Union[
BaseLink,
+ BasePage,
_BaseSite,
- Page,
]
diff --git a/pywikibot/page/_basepage.py b/pywikibot/page/_basepage.py
index f38eece..c327813 100644
--- a/pywikibot/page/_basepage.py
+++ b/pywikibot/page/_basepage.py
@@ -12,12 +12,13 @@
from contextlib import suppress
from itertools import islice
from textwrap import shorten, wrap
+from typing import TYPE_CHECKING
from urllib.parse import quote_from_bytes
from warnings import warn

import pywikibot
from pywikibot import Timestamp, config, date, i18n, textlib, tools
-from pywikibot.backports import Generator, Iterable, Iterator
+from pywikibot.backports import Generator, Iterable
from pywikibot.cosmetic_changes import CANCEL, CosmeticChangesToolkit
from pywikibot.exceptions import (
Error,
@@ -44,6 +45,12 @@
)


+if TYPE_CHECKING:
+ from typing_extensions import Literal
+
+ from pywikibot.page import Revision
+
+
PROTOCOL_REGEX = r'\Ahttps?://'

__all__ = ['BasePage']
@@ -104,6 +111,7 @@
if title is None:
raise ValueError('Title cannot be None.')

+ self._link: BaseLink
if isinstance(source, pywikibot.site.BaseSite):
self._link = Link(title, source=source, default_namespace=ns)
self._revisions = {}
@@ -374,7 +382,7 @@
if not get_redirect:
raise

- return self.latest_revision.text
+ return self.latest_revision.text # type: ignore[attr-defined]

def has_content(self) -> bool:
"""
@@ -516,7 +524,7 @@

:return: text of the page
"""
- if getattr(self, '_text', None) is not None:
+ if hasattr(self, '_text') and self._text is not None:
return self._text

try:
@@ -690,14 +698,14 @@

def userName(self) -> str:
"""Return name or IP address of last user to edit page."""
- return self.latest_revision.user
+ return self.latest_revision.user # type: ignore[attr-defined]

def isIpEdit(self) -> bool:
"""Return True if last editor was unregistered."""
- return self.latest_revision.anon
+ return self.latest_revision.anon # type: ignore[attr-defined]

@cached
- def lastNonBotUser(self) -> str:
+ def lastNonBotUser(self) -> str | None:
"""
Return name or IP address of last human/non-bot user to edit page.

@@ -722,7 +730,7 @@
Use :attr:`latest_revision.timestamp<latest_revision>`
instead.
"""
- return self.latest_revision.timestamp
+ return self.latest_revision.timestamp # type: ignore[attr-defined]

def exists(self) -> bool:
"""Return True if page exists on the wiki, even if it's a redirect.
@@ -775,7 +783,7 @@
return False

if not hasattr(self, '_catredirect'):
- self._catredirect = False
+ self._catredirect: str | Literal[False] = False
catredirs = self.site.category_redirects()
for template, args in self.templatesWithParams():
if template.title(with_ns=False) not in catredirs:
@@ -911,7 +919,7 @@
filter_redirects: bool = False,
namespaces=None,
total: int | None = None,
- content: bool = False):
+ content: bool = False) -> Iterable[pywikibot.Page]:
"""
Return an iterator all pages that refer to or embed the page.

@@ -929,7 +937,6 @@
:param total: iterate no more than this number of pages in total
:param content: if True, retrieve the content of the current version
of each referring page (default False)
- :rtype: typing.Iterable[pywikibot.Page]
"""
# N.B.: this method intentionally overlaps with backlinks() and
# embeddedin(). Depending on the interface, it may be more efficient
@@ -952,7 +959,7 @@
filter_redirects: bool | None = None,
namespaces=None,
total: int | None = None,
- content: bool = False):
+ content: bool = False) -> Iterable[pywikibot.Page]:
"""
Return an iterator for pages that link to this page.

@@ -978,7 +985,7 @@
filter_redirects: bool | None = None,
namespaces=None,
total: int | None = None,
- content: bool = False):
+ content: bool = False) -> Iterable[pywikibot.Page]:
"""
Return an iterator for pages that embed this page as a template.

@@ -1423,7 +1430,7 @@

def linkedPages(
self, *args, **kwargs
- ) -> Generator[pywikibot.Page, None, None]:
+ ) -> Generator[pywikibot.page.BasePage, None, None]:
"""Iterate Pages that this Page links to.

Only returns pages from "normal" internal links. Embedded
@@ -1467,15 +1474,17 @@

return self.site.pagelinks(self, **kwargs)

- def interwiki(self, expand: bool = True):
+ def interwiki(
+ self,
+ expand: bool = True,
+ ) -> Generator[pywikibot.page.Link, None, None]:
"""
- Iterate interwiki links in the page text, excluding language links.
+ Yield interwiki links in the page text, excluding language links.

:param expand: if True (default), include interwiki links found in
templates transcluded onto this page; if False, only iterate
interwiki links found in this page's own wikitext
:return: a generator that yields Link objects
- :rtype: generator
"""
# This function does not exist in the API, so it has to be
# implemented by screen-scraping
@@ -1500,7 +1509,10 @@
# ignore any links with invalid contents
continue

- def langlinks(self, include_obsolete: bool = False) -> list:
+ def langlinks(
+ self,
+ include_obsolete: bool = False,
+ ) -> list[pywikibot.Link]:
"""
Return a list of all inter-language Links on this page.

@@ -1519,16 +1531,17 @@
return list(self._langlinks)
return [i for i in self._langlinks if not i.site.obsolete]

- def iterlanglinks(self,
- total: int | None = None,
- include_obsolete: bool = False):
+ def iterlanglinks(
+ self,
+ total: int | None = None,
+ include_obsolete: bool = False,
+ ) -> Iterable[pywikibot.Link]:
"""Iterate all inter-language links on this page.

:param total: iterate no more than this number of pages in total
:param include_obsolete: if true, yield even Link object whose site
is obsolete
:return: a generator that yields Link objects.
- :rtype: generator
"""
if hasattr(self, '_langlinks'):
return iter(self.langlinks(include_obsolete=include_obsolete))
@@ -1539,12 +1552,8 @@
return self.site.pagelanglinks(self, total=total,
include_obsolete=include_obsolete)

- def data_item(self):
- """
- Convenience function to get the Wikibase item of a page.
-
- :rtype: pywikibot.page.ItemPage
- """
+ def data_item(self) -> pywikibot.page.ItemPage:
+ """Convenience function to get the Wikibase item of a page."""
return pywikibot.ItemPage.fromPage(self)

def templates(self, content: bool = False) -> list[pywikibot.Page]:
@@ -1571,9 +1580,11 @@

return list(self._templates)

- def itertemplates(self,
- total: int | None = None,
- content: bool = False):
+ def itertemplates(
+ self,
+ total: int | None = None,
+ content: bool = False,
+ ) -> Iterable[pywikibot.Page]:
"""
Iterate Page objects for templates used on this Page.

@@ -1591,7 +1602,11 @@

return self.site.pagetemplates(self, total=total, content=content)

- def imagelinks(self, total: int | None = None, content: bool = False):
+ def imagelinks(
+ self,
+ total: int | None = None,
+ content: bool = False,
+ ) -> Iterable[pywikibot.FilePage]:
"""
Iterate FilePage objects for images displayed on this Page.

@@ -1602,10 +1617,12 @@
"""
return self.site.pageimages(self, total=total, content=content)

- def categories(self,
- with_sort_key: bool = False,
- total: int | None = None,
- content: bool = False) -> Iterator[pywikibot.Page]:
+ def categories(
+ self,
+ with_sort_key: bool = False,
+ total: int | None = None,
+ content: bool = False,
+ ) -> Iterable[pywikibot.Page]:
"""
Iterate categories that the article is in.

@@ -1631,13 +1648,12 @@

return self.site.pagecategories(self, total=total, content=content)

- def extlinks(self, total: int | None = None):
+ def extlinks(self, total: int | None = None) -> Iterable[str]:
"""
Iterate all external URLs (not interwiki links) from this page.

:param total: iterate no more than this number of pages in total
:return: a generator that yields str objects containing URLs.
- :rtype: generator
"""
return self.site.page_extlinks(self, total=total)

@@ -1717,7 +1733,7 @@
starttime=starttime, endtime=endtime,
total=total)

- revs = self._revisions.values()
+ revs: Iterable[Revision] = self._revisions.values()

if starttime or endtime:
t_min, t_max = Timestamp.min, Timestamp.max
@@ -1729,9 +1745,9 @@
t0 = Timestamp.set_timestamp(endtime) if endtime else t_min
t1 = Timestamp.set_timestamp(starttime) if starttime else t_max

- revs = [rev for rev in revs if t0 <= rev.timestamp <= t1]
+ revs = [rev for rev in revs if t0 <= rev.timestamp <= t1] # type: ignore[attr-defined] # noqa: E501

- revs = sorted(revs, reverse=not reverse, key=lambda rev: rev.timestamp)
+ revs = sorted(revs, reverse=not reverse, key=lambda rev: rev.timestamp) # type: ignore[attr-defined] # noqa: E501

return islice(revs, total)

@@ -1810,7 +1826,7 @@
reason: str | None = None,
movetalk: bool = True,
noredirect: bool = False,
- movesubpages: bool = True) -> None:
+ movesubpages: bool = True) -> pywikibot.page.Page:
"""
Move this page to a new title.

@@ -1901,7 +1917,9 @@
# We can't add templates in a wikidata item, so let's use its
# talk page
if isinstance(self, pywikibot.ItemPage):
- target = self.toggleTalkPage()
+ trgt = self.toggleTalkPage()
+ assert trgt is not None
+ target: BasePage = trgt
else:
target = self
target.text = template + target.text
diff --git a/pywikibot/page/_category.py b/pywikibot/page/_category.py
index 01c8b2a..9bda6cd 100644
--- a/pywikibot/page/_category.py
+++ b/pywikibot/page/_category.py
@@ -10,7 +10,7 @@
from typing import Any

import pywikibot
-from pywikibot.backports import Generator, Iterable
+from pywikibot.backports import Generator
from pywikibot.page._page import Page


@@ -62,8 +62,8 @@

def subcategories(self, *,
recurse: int | bool = False,
- **kwargs: Any) -> Iterable[Page]:
- """Iterate all subcategories of the current category.
+ **kwargs: Any) -> Generator[Page, None, None]:
+ """Yield all subcategories of the current category.

**Usage:**

@@ -137,7 +137,7 @@
def articles(self, *,
recurse: int | bool = False,
total: int | None = None,
- **kwargs: Any) -> Iterable[Page]:
+ **kwargs: Any) -> Generator[Page, None, None]:
"""
Yield all articles in the current category.

@@ -204,7 +204,7 @@
def members(self, *,
recurse: bool = False,
total: int | None = None,
- **kwargs: Any) -> Iterable[Page]:
+ **kwargs: Any) -> Generator[Page, None, None]:
"""Yield all category contents (subcats, pages, and files).

**Usage:**
diff --git a/pywikibot/page/_links.py b/pywikibot/page/_links.py
index a9a8601..4373132 100644
--- a/pywikibot/page/_links.py
+++ b/pywikibot/page/_links.py
@@ -132,7 +132,7 @@
self._namespace = self.lookup_namespace()
return self._namespace

- def canonical_title(self):
+ def canonical_title(self) -> str:
"""Return full page title, including localized namespace."""
# Avoid that ':' will be added to the title for Main ns.
if self.namespace != Namespace.MAIN:
diff --git a/pywikibot/page/_user.py b/pywikibot/page/_user.py
index 0df8acc..4dd9aa4 100644
--- a/pywikibot/page/_user.py
+++ b/pywikibot/page/_user.py
@@ -7,7 +7,7 @@
from __future__ import annotations

import pywikibot
-from pywikibot.backports import Iterable
+from pywikibot.backports import Generator
from pywikibot.exceptions import (
APIError,
AutoblockUserError,
@@ -324,9 +324,13 @@

def contributions(
self,
- total: int = 500,
+ total: int | None = 500,
**kwargs
- ) -> tuple[Page, int, pywikibot.Timestamp, str | None]:
+ ) -> Generator[
+ tuple[Page, int, pywikibot.Timestamp, str | None],
+ None,
+ None
+ ]:
"""Yield tuples describing this user edits.

Each tuple is composed of a pywikibot.Page object, the revision
@@ -377,7 +381,7 @@
@property
def first_edit(
self
- ) -> tuple[Page, int, pywikibot.Timestamp, str] | None:
+ ) -> tuple[Page, int, pywikibot.Timestamp, str | None] | None:
"""Return first user contribution.

:return: first user contribution entry
@@ -388,7 +392,7 @@
@property
def last_edit(
self
- ) -> tuple[Page, int, pywikibot.Timestamp, str] | None:
+ ) -> tuple[Page, int, pywikibot.Timestamp, str | None] | None:
"""Return last user contribution.

:return: last user contribution entry
@@ -397,8 +401,11 @@
return next(self.contributions(total=1), None)

def deleted_contributions(
- self, *, total: int = 500, **kwargs
- ) -> Iterable[tuple[Page, Revision]]:
+ self,
+ *,
+ total: int | None = 500,
+ **kwargs,
+ ) -> Generator[tuple[Page, Revision], None, None]:
"""Yield tuples describing this user's deleted edits.

.. versionadded:: 5.5
diff --git a/pywikibot/pagegenerators/__init__.py b/pywikibot/pagegenerators/__init__.py
index eca2326..477c3fa 100644
--- a/pywikibot/pagegenerators/__init__.py
+++ b/pywikibot/pagegenerators/__init__.py
@@ -22,7 +22,7 @@
from typing import TYPE_CHECKING, Any

import pywikibot
-from pywikibot.backports import Callable, Iterable, Iterator
+from pywikibot.backports import Callable, Generator, Iterable
from pywikibot.pagegenerators._factory import GeneratorFactory
from pywikibot.pagegenerators._filters import (
CategoryFilterPageGenerator,
@@ -544,7 +544,7 @@


def PageClassGenerator(generator: Iterable[pywikibot.page.Page]
- ) -> Iterator[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""
Yield pages from another generator as Page subclass objects.

@@ -562,9 +562,10 @@
yield page


-def PageWithTalkPageGenerator(generator: Iterable[pywikibot.page.Page],
- return_talk_only: bool = False
- ) -> Iterator[pywikibot.page.Page]:
+def PageWithTalkPageGenerator(
+ generator: Iterable[pywikibot.page.BasePage],
+ return_talk_only: bool = False,
+) -> Generator[pywikibot.page.BasePage, None, None]:
"""Yield pages and associated talk pages from another generator.

Only yields talk pages if the original generator yields a non-talk page,
@@ -578,11 +579,13 @@
yield page.toggleTalkPage()


-def RepeatingGenerator(generator: Callable, # type: ignore[type-arg]
- key_func: Callable[[Any], Any] = lambda x: x,
- sleep_duration: int = 60,
- total: int | None = None,
- **kwargs: Any) -> Iterator[pywikibot.page.Page]:
+def RepeatingGenerator(
+ generator: Callable[..., Iterable[pywikibot.page.BasePage]],
+ key_func: Callable[[pywikibot.page.BasePage], Any] = lambda x: x,
+ sleep_duration: int = 60,
+ total: int | None = None,
+ **kwargs: Any,
+) -> Generator[pywikibot.page.Page, None, None]:
"""Yield items in live time.

The provided generator must support parameter 'start', 'end',
@@ -615,7 +618,8 @@

seen: set[Any] = set()
while total is None or len(seen) < total:
- def filtered_generator() -> Iterable[pywikibot.page.Page]:
+ def filtered_generator() -> Generator[pywikibot.page.BasePage,
+ None, None]:
for item in generator(total=None if seen else 1, **kwargs):
key = key_func(item)
if key not in seen:
@@ -633,7 +637,7 @@
def PreloadingGenerator(generator: Iterable[pywikibot.page.Page],
groupsize: int = 50,
quiet: bool = False
- ) -> Iterator[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""Yield preloaded pages taken from another generator.

:param generator: pages to iterate over
@@ -661,10 +665,11 @@
yield from site.preloadpages(pages, groupsize=groupsize, quiet=quiet)


-def DequePreloadingGenerator(generator: Iterable[pywikibot.page.Page],
- groupsize: int = 50,
- quiet: bool = False
- ) -> Iterator[pywikibot.page.Page]:
+def DequePreloadingGenerator(
+ generator: DequeGenerator,
+ groupsize: int = 50,
+ quiet: bool = False,
+) -> Generator[pywikibot.page.Page, None, None]:
"""Preload generator of type DequeGenerator.

:param generator: pages to iterate over
@@ -683,9 +688,10 @@
yield from PreloadingGenerator(generator, page_count, quiet)


-def PreloadingEntityGenerator(generator: Iterable[pywikibot.page.Page],
- groupsize: int = 50
- ) -> Iterator[pywikibot.page.Page]:
+def PreloadingEntityGenerator(
+ generator: Iterable[pywikibot.page.WikibaseEntity],
+ groupsize: int = 50,
+) -> Generator[pywikibot.page.WikibaseEntity, None, None]:
"""
Yield preloaded pages taken from another generator.

@@ -694,7 +700,8 @@
:param generator: pages to iterate over
:param groupsize: how many pages to preload at once
"""
- sites: PRELOAD_SITE_TYPE = {}
+ sites: dict[pywikibot.site.BaseSite,
+ list[pywikibot.page.WikibaseEntity]] = {}
for page in generator:
site = page.site
sites.setdefault(site, []).append(page)
diff --git a/pywikibot/pagegenerators/_factory.py b/pywikibot/pagegenerators/_factory.py
index 4de0b08..2616ee0 100644
--- a/pywikibot/pagegenerators/_factory.py
+++ b/pywikibot/pagegenerators/_factory.py
@@ -16,13 +16,7 @@

import pywikibot
from pywikibot import i18n
-from pywikibot.backports import (
- Callable,
- Iterable,
- Iterator,
- Sequence,
- removeprefix,
-)
+from pywikibot.backports import Callable, Iterable, Sequence, removeprefix
from pywikibot.bot import ShowingListOption
from pywikibot.data import api
from pywikibot.exceptions import UnknownExtensionError
@@ -64,11 +58,13 @@


if TYPE_CHECKING:
+ from typing_extensions import Literal
+
from pywikibot.site import BaseSite, Namespace

- HANDLER_RETURN_TYPE = bool, Iterable[pywikibot.page.BasePage] | None
+ HANDLER_GEN_TYPE = Iterable[pywikibot.page.BasePage]
GEN_FACTORY_CLAIM_TYPE = list[tuple[str, str, dict[str, str], bool]]
- OPT_GENERATOR_TYPE = Iterable[pywikibot.page.Page] | None
+ OPT_GENERATOR_TYPE = HANDLER_GEN_TYPE | None


# This is the function that will be used to de-duplicate page iterators.
@@ -102,7 +98,7 @@
:param disabled_options: disable these given options and let them
be handled by scripts options handler
"""
- self.gens: list[Iterable[pywikibot.page.Page]] = []
+ self.gens: list[Iterable[pywikibot.page.BasePage]] = []
self._namespaces: list[str] | frozenset[Namespace] = []
self.limit: int | None = None
self.qualityfilter_list: list[int] = []
@@ -369,11 +365,12 @@
content=content)

@staticmethod
- def _parse_log_events(logtype: str,
- user: str | None = None,
- start: str | None = None,
- end: str | None = None
- ) -> Iterator[pywikibot.page.Page] | None:
+ def _parse_log_events(
+ logtype: str,
+ user: str | None = None,
+ start: str | None = None,
+ end: str | None = None,
+ ) -> Iterable[pywikibot.page.BasePage] | None:
"""
Parse the -logevent argument information.

@@ -390,7 +387,7 @@
:return: The generator or None if invalid 'start/total' or 'end' value.
"""
def parse_start(start: str | None
- ) -> tuple[str | None, int | None]:
+ ) -> tuple[pywikibot.Timestamp | None, int | None]:
"""Parse start and return (start, total)."""
if start is None:
return None, None
@@ -402,7 +399,7 @@

start = start or None # because start might be an empty string
try:
- start, total = parse_start(start)
+ start_, total = parse_start(start)
assert total is None or total > 0
except ValueError as err:
pywikibot.error(
@@ -413,24 +410,27 @@
'positive int.'.format(start))
return None

- if end is not None:
+ if end is None:
+ end_ = None
+ else:
try:
- end = pywikibot.Timestamp.fromtimestampformat(end)
+ end_ = pywikibot.Timestamp.fromtimestampformat(end)
except ValueError as err:
pywikibot.error(
f'{err}. End parameter has wrong format!')
return None

- if start or end:
- pywikibot.info('Fetching log events in range: {} - {}.'
- .format(end or 'beginning of time', start or 'now'))
+ if start_ or end_:
+ pywikibot.info(
+ 'Fetching log events in range: {} - {}.'
+ .format(end or 'beginning of time', start or 'now'))

# 'user or None', because user might be an empty string when
# 'foo,,bar' was used.
return LogeventsPageGenerator(logtype, user or None, total=total,
- start=start, end=end)
+ start=start_, end=end_)

- def _handle_filelinks(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_filelinks(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-filelinks` argument."""
if not value:
value = i18n.input('pywikibot-enter-file-links-processing')
@@ -439,7 +439,7 @@
file_page = pywikibot.FilePage(self.site, value)
return file_page.using_pages()

- def _handle_linter(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_linter(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-linter` argument."""
if not self.site.has_extension('Linter'):
raise UnknownExtensionError(
@@ -481,7 +481,7 @@
lint_categories='|'.join(lint_cats), namespaces=self.namespaces,
lint_from=lint_from)

- def _handle_querypage(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_querypage(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-querypage` argument."""
if value is None: # Display special pages.
pages = self.site._paraminfo.parameter('query+querypage',
@@ -501,41 +501,41 @@

return self.site.querypage(value)

- def _handle_url(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_url(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-url` argument."""
if not value:
value = pywikibot.input('Please enter the URL:')
return TextIOPageGenerator(value, site=self.site)

- def _handle_unusedfiles(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_unusedfiles(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-unusedfiles` argument."""
return self.site.unusedfiles(total=_int_none(value))

- def _handle_lonelypages(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_lonelypages(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-lonelypages` argument."""
return self.site.lonelypages(total=_int_none(value))

- def _handle_unwatched(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_unwatched(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-unwatched` argument."""
return self.site.unwatchedpage(total=_int_none(value))

- def _handle_wantedpages(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_wantedpages(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-wantedpages` argument."""
return self.site.wantedpages(total=_int_none(value))

- def _handle_wantedfiles(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_wantedfiles(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-wantedfiles` argument."""
return self.site.wantedfiles(total=_int_none(value))

- def _handle_wantedtemplates(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_wantedtemplates(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-wantedtemplates` argument."""
return self.site.wantedtemplates(total=_int_none(value))

- def _handle_wantedcategories(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_wantedcategories(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-wantedcategories` argument."""
return self.site.wantedcategories(total=_int_none(value))

- def _handle_property(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_property(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-property` argument."""
if not value:
question = 'Which property name to be used?'
@@ -547,24 +547,24 @@
ShowingListOption(pnames))
return self.site.pages_with_property(value)

- def _handle_usercontribs(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_usercontribs(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-usercontribs` argument."""
self._single_gen_filter_unique = True
return UserContributionsGenerator(
value, site=self.site, _filter_unique=None)

- def _handle_withoutinterwiki(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_withoutinterwiki(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-withoutinterwiki` argument."""
return self.site.withoutinterwiki(total=_int_none(value))

- def _handle_interwiki(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_interwiki(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-interwiki` argument."""
if not value:
value = i18n.input('pywikibot-enter-page-processing')
page = pywikibot.Page(pywikibot.Link(value, self.site))
return LanguageLinksPageGenerator(page)

- def _handle_randomredirect(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_randomredirect(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-randomredirect` argument."""
# partial workaround for bug T119940
# to use -namespace/ns with -randomredirect, -ns must be given
@@ -574,7 +574,7 @@
return self.site.randompages(total=_int_none(value),
namespaces=namespaces, redirects=True)

- def _handle_random(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_random(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-random` argument."""
# partial workaround for bug T119940
# to use -namespace/ns with -random, -ns must be given
@@ -584,7 +584,7 @@
return self.site.randompages(total=_int_none(value),
namespaces=namespaces)

- def _handle_recentchanges(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_recentchanges(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-recentchanges` argument."""
rcstart = None
rcend = None
@@ -610,18 +610,18 @@
namespaces=self.namespaces, total=total, start=rcstart, end=rcend,
site=self.site, tag=rctag)

- def _handle_liverecentchanges(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_liverecentchanges(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-liverecentchanges` argument."""
self.nopreload = True
return LiveRCPageGenerator(site=self.site, total=_int_none(value))

- def _handle_file(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_file(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-file` argument."""
if not value:
value = pywikibot.input('Please enter the local file name:')
return TextIOPageGenerator(value, site=self.site)

- def _handle_namespaces(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_namespaces(self, value: str) -> Literal[True]:
"""Handle `-namespaces` argument."""
if isinstance(self._namespaces, frozenset):
raise RuntimeError('-namespace/ns option must be provided before '
@@ -646,66 +646,66 @@
_handle_ns = _handle_namespaces
_handle_namespace = _handle_namespaces

- def _handle_limit(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_limit(self, value: str) -> Literal[True]:
"""Handle `-limit` argument."""
if not value:
value = pywikibot.input('What is the limit value?')
self.limit = _int_none(value)
return True

- def _handle_category(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_category(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-category` argument."""
return self.getCategoryGen(
value, recurse=False, gen_func=CategorizedPageGenerator)

_handle_cat = _handle_category

- def _handle_catr(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_catr(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-catr` argument."""
return self.getCategoryGen(
value, recurse=True, gen_func=CategorizedPageGenerator)

- def _handle_subcats(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_subcats(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-subcats` argument."""
return self.getCategoryGen(
value, recurse=False, gen_func=SubCategoriesPageGenerator)

- def _handle_subcatsr(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_subcatsr(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-subcatsr` argument."""
return self.getCategoryGen(
value, recurse=True, gen_func=SubCategoriesPageGenerator)

- def _handle_catfilter(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_catfilter(self, value: str) -> Literal[True]:
"""Handle `-catfilter` argument."""
cat, _ = self.getCategory(value)
self.catfilter_list.append(cat)
return True

- def _handle_page(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_page(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-page` argument."""
if not value:
value = pywikibot.input('What page do you want to use?')
return [pywikibot.Page(pywikibot.Link(value, self.site))]

- def _handle_pageid(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_pageid(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-pageid` argument."""
if not value:
value = pywikibot.input('What pageid do you want to use?')
return self.site.load_pages_from_pageids(value)

- def _handle_uncatfiles(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_uncatfiles(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-uncatfiles` argument."""
return self.site.uncategorizedimages()

- def _handle_uncatcat(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_uncatcat(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-uncatcat` argument."""
return self.site.uncategorizedcategories()

- def _handle_uncat(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_uncat(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-uncat` argument."""
return self.site.uncategorizedpages()

- def _handle_ref(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_ref(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-ref` argument."""
if not value:
value = pywikibot.input(
@@ -713,7 +713,7 @@
page = pywikibot.Page(pywikibot.Link(value, self.site))
return page.getReferences()

- def _handle_links(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_links(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-links` argument."""
if not value:
value = pywikibot.input(
@@ -721,14 +721,14 @@
page = pywikibot.Page(pywikibot.Link(value, self.site))
return page.linkedPages()

- def _handle_weblink(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_weblink(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-weblink` argument."""
if not value:
value = pywikibot.input(
'Pages with which weblink should be processed?')
return self.site.exturlusage(value)

- def _handle_transcludes(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_transcludes(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-transcludes` argument."""
if not value:
value = pywikibot.input(
@@ -738,7 +738,7 @@
source=self.site))
return page.getReferences(only_template_inclusion=True)

- def _handle_start(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_start(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-start` argument."""
if not value:
value = '!'
@@ -747,17 +747,17 @@
start=firstpagelink.title, namespace=firstpagelink.namespace,
filterredir=False)

- def _handle_prefixindex(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_prefixindex(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-prefixindex` argument."""
if not value:
value = pywikibot.input('What page names are you looking for?')
return PrefixingPageGenerator(prefix=value, site=self.site)

- def _handle_newimages(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_newimages(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-newimages` argument."""
return NewimagesPageGenerator(total=_int_none(value), site=self.site)

- def _handle_newpages(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_newpages(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-newpages` argument."""
# partial workaround for bug T69249
# to use -namespace/ns with -newpages, -ns must be given
@@ -767,11 +767,14 @@
return NewpagesPageGenerator(
namespaces=namespaces, total=_int_none(value), site=self.site)

- def _handle_unconnectedpages(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_unconnectedpages(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-unconnectedpages` argument."""
return self.site.unconnected_pages(total=_int_none(value))

- def _handle_imagesused(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_imagesused(
+ self,
+ value: str,
+ ) -> Iterable[pywikibot.FilePage]:
"""Handle `-imagesused` argument."""
if not value:
value = pywikibot.input(
@@ -779,7 +782,7 @@
page = pywikibot.Page(pywikibot.Link(value, self.site))
return page.imagelinks()

- def _handle_searchitem(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_searchitem(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-searchitem` argument."""
if not value:
value = pywikibot.input('Text to look for:')
@@ -789,7 +792,7 @@
return WikibaseSearchItemPageGenerator(
value, language=lang, site=self.site)

- def _handle_search(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_search(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-search` argument."""
if not value:
value = pywikibot.input('What do you want to search for?')
@@ -797,11 +800,11 @@
return self.site.search(value, namespaces=[])

@staticmethod
- def _handle_google(value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_google(value: str) -> HANDLER_GEN_TYPE:
"""Handle `-google` argument."""
return GoogleSearchPageGenerator(value)

- def _handle_titleregex(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_titleregex(self, value: str) -> Literal[True]:
"""Handle `-titleregex` argument."""
if not value:
value = pywikibot.input(
@@ -809,7 +812,7 @@
self.titlefilter_list.append(value)
return True

- def _handle_titleregexnot(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_titleregexnot(self, value: str) -> Literal[True]:
"""Handle `-titleregexnot` argument."""
if not value:
value = pywikibot.input(
@@ -817,21 +820,21 @@
self.titlenotfilter_list.append(value)
return True

- def _handle_grep(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_grep(self, value: str) -> Literal[True]:
"""Handle `-grep` argument."""
if not value:
value = pywikibot.input('Which pattern do you want to grep?')
self.articlefilter_list.append(value)
return True

- def _handle_grepnot(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_grepnot(self, value: str) -> Literal[True]:
"""Handle `-grepnot` argument."""
if not value:
value = pywikibot.input('Which pattern do you want to skip?')
self.articlenotfilter_list.append(value)
return True

- def _handle_ql(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_ql(self, value: str) -> Literal[True]:
"""Handle `-ql` argument."""
if not self.site.has_extension('ProofreadPage'):
raise UnknownExtensionError(
@@ -847,16 +850,16 @@
self.qualityfilter_list = int_values
return True

- def _handle_onlyif(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_onlyif(self, value: str) -> Literal[True]:
"""Handle `-onlyif` argument."""
return self._onlyif_onlyifnot_handler(value, False)

- def _handle_onlyifnot(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_onlyifnot(self, value: str) -> Literal[True]:
"""Handle `-onlyifnot` argument."""
return self._onlyif_onlyifnot_handler(value, True)

def _onlyif_onlyifnot_handler(self, value: str, ifnot: bool
- ) -> HANDLER_RETURN_TYPE:
+ ) -> Literal[True]:
"""Handle `-onlyif` and `-onlyifnot` arguments."""
if not value:
value = pywikibot.input('Which claim do you want to filter?')
@@ -869,32 +872,32 @@
(temp[0][0], temp[0][1], dict(temp[1:]), ifnot))
return True

- def _handle_sparqlendpoint(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_sparqlendpoint(self, value: str) -> Literal[True]:
"""Handle `-sparqlendpoint` argument."""
if not value:
value = pywikibot.input('SPARQL endpoint:')
self._sparql = value
return True

- def _handle_sparql(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_sparql(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-sparql` argument."""
if not value:
value = pywikibot.input('SPARQL query:')
return WikidataSPARQLPageGenerator(
value, site=self.site, endpoint=self._sparql)

- def _handle_mysqlquery(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_mysqlquery(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-mysqlquery` argument."""
if not value:
value = pywikibot.input('Mysql query string:')
return MySQLPageGenerator(value, site=self.site)

- def _handle_intersect(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_intersect(self, value: str) -> Literal[True]:
"""Handle `-intersect` argument."""
self.intersect = True
return True

- def _handle_subpage(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_subpage(self, value: str) -> Literal[True]:
"""Handle `-subpage` argument."""
if not value:
value = pywikibot.input(
@@ -902,7 +905,7 @@
self.subpage_max_depth = int(value)
return True

- def _handle_logevents(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_logevents(self, value: str) -> HANDLER_GEN_TYPE | None:
"""Handle `-logevents` argument."""
params = value.split(',')
if params[0] not in self.site.logtypes:
@@ -910,7 +913,7 @@
f'Invalid -logevents parameter "{params[0]}"')
return self._parse_log_events(*params)

- def _handle_redirect(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_redirect(self, value: str) -> Literal[True]:
"""Handle `-redirect` argument.

.. versionadded:: 8.5
@@ -921,7 +924,7 @@
self.redirectfilter = strtobool(value)
return True

- def _handle_pagepile(self, value: str) -> HANDLER_RETURN_TYPE:
+ def _handle_pagepile(self, value: str) -> HANDLER_GEN_TYPE:
"""Handle `-pagepile` argument.

.. versionadded:: 9.0
diff --git a/pywikibot/pagegenerators/_filters.py b/pywikibot/pagegenerators/_filters.py
index f85496d..5ee3a71 100644
--- a/pywikibot/pagegenerators/_filters.py
+++ b/pywikibot/pagegenerators/_filters.py
@@ -13,7 +13,7 @@

import pywikibot
from pywikibot import config
-from pywikibot.backports import Iterable, Iterator, Pattern, Sequence
+from pywikibot.backports import Generator, Iterable, Pattern, Sequence
from pywikibot.exceptions import NoPageError
from pywikibot.proofreadpage import ProofreadPage
from pywikibot.tools.itertools import filter_unique
@@ -23,7 +23,7 @@
from pywikibot.site import BaseSite, Namespace

PRELOAD_SITE_TYPE = dict[pywikibot.site.BaseSite,
- list[pywikibot.page.Page]]
+ list[pywikibot.page.BasePage]]
PATTERN_STR_OR_SEQ_TYPE = (
str
| Pattern[str]
@@ -43,13 +43,13 @@


def NamespaceFilterPageGenerator(
- generator: Iterable[pywikibot.page.Page],
+ generator: Iterable[pywikibot.page.BasePage],
namespaces: frozenset[Namespace]
| str
| Namespace
| Sequence[str | Namespace],
site: BaseSite | None = None,
-) -> Iterator[pywikibot.page.Page]:
+) -> Generator[pywikibot.page.BasePage, None, None]:
"""
A generator yielding pages from another generator in given namespaces.

@@ -83,9 +83,10 @@
return (page for page in generator if page.namespace() in namespaces)


-def PageTitleFilterPageGenerator(generator: Iterable[pywikibot.page.Page],
- ignore_list: dict[str, dict[str, str]]
- ) -> Iterator[pywikibot.page.Page]:
+def PageTitleFilterPageGenerator(
+ generator: Iterable[pywikibot.page.BasePage],
+ ignore_list: dict[str, dict[str, str]],
+) -> Generator[pywikibot.page.BasePage, None, None]:
"""
Yield only those pages are not listed in the ignore list.

@@ -93,7 +94,7 @@
language codes are mapped to lists of page titles. Each title must
be a valid regex as they are compared using :py:obj:`re.search`.
"""
- def is_ignored(page: pywikibot.page.Page) -> bool:
+ def is_ignored(page: pywikibot.page.BasePage) -> bool:
try:
site_ig_list = ignore_list[page.site.family.name][page.site.code]
except KeyError:
@@ -109,10 +110,11 @@
pywikibot.info(f'Ignoring page {page.title()}')


-def RedirectFilterPageGenerator(generator: Iterable[pywikibot.page.Page],
- no_redirects: bool = True,
- show_filtered: bool = False
- ) -> Iterator[pywikibot.page.Page]:
+def RedirectFilterPageGenerator(
+ generator: Iterable[pywikibot.page.BasePage],
+ no_redirects: bool = True,
+ show_filtered: bool = False,
+) -> Generator[pywikibot.page.BasePage, None, None]:
"""
Yield pages from another generator that are redirects or not.

@@ -144,7 +146,7 @@

@classmethod
def __filter_match(cls,
- page: pywikibot.page.BasePage,
+ page: pywikibot.page.WikibasePage,
prop: str,
claim: str,
qualifiers: dict[str, str]) -> bool:
@@ -178,12 +180,14 @@
for p_cl in page_claims)

@classmethod
- def filter(cls,
- generator: Iterable[pywikibot.page.Page],
- prop: str,
- claim: str,
- qualifiers: dict[str, str] | None = None,
- negate: bool = False) -> Iterator[pywikibot.page.Page]:
+ def filter(
+ cls,
+ generator: Iterable[pywikibot.page.WikibasePage],
+ prop: str,
+ claim: str,
+ qualifiers: dict[str, str] | None = None,
+ negate: bool = False,
+ ) -> Generator[pywikibot.page.WikibasePage, None, None]:
"""
Yield all ItemPages which contain certain claim in a property.

@@ -205,10 +209,10 @@
ItemClaimFilterPageGenerator = ItemClaimFilter.filter


-def SubpageFilterGenerator(generator: Iterable[pywikibot.page.Page],
+def SubpageFilterGenerator(generator: Iterable[pywikibot.page.BasePage],
max_depth: int = 0,
show_filtered: bool = False
- ) -> Iterable[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.BasePage, None, None]:
"""
Generator which filters out subpages based on depth.

@@ -266,11 +270,11 @@

@classmethod
def titlefilter(cls,
- generator: Iterable[pywikibot.page.Page],
+ generator: Iterable[pywikibot.page.BasePage],
regex: PATTERN_STR_OR_SEQ_TYPE,
quantifier: str = 'any',
ignore_namespace: bool = True
- ) -> Iterator[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.BasePage, None, None]:
"""Yield pages from another generator whose title matches regex.

Uses regex option re.IGNORECASE depending on the quantifier parameter.
@@ -302,10 +306,10 @@

@classmethod
def contentfilter(cls,
- generator: Iterable[pywikibot.page.Page],
+ generator: Iterable[pywikibot.page.BasePage],
regex: PATTERN_STR_OR_SEQ_TYPE,
quantifier: str = 'any'
- ) -> Iterator[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.BasePage, None, None]:
"""Yield pages from another generator whose body matches regex.

Uses regex option re.IGNORECASE depending on the quantifier parameter.
@@ -317,9 +321,10 @@
if cls.__filter_match(reg, page.text, quantifier))


-def QualityFilterPageGenerator(generator: Iterable[pywikibot.page.Page],
- quality: list[int]
- ) -> Iterator[pywikibot.page.Page]:
+def QualityFilterPageGenerator(
+ generator: Iterable[pywikibot.page.BasePage],
+ quality: list[int],
+) -> Generator[pywikibot.page.BasePage, None, None]:
"""
Wrap a generator to filter pages according to quality levels.

@@ -338,10 +343,10 @@
yield page


-def CategoryFilterPageGenerator(generator: Iterable[pywikibot.page.Page],
- category_list:
- Sequence[pywikibot.page.Category]
- ) -> Iterator[pywikibot.page.Page]:
+def CategoryFilterPageGenerator(
+ generator: Iterable[pywikibot.page.BasePage],
+ category_list: Sequence[pywikibot.page.Category],
+) -> Generator[pywikibot.page.BasePage, None, None]:
"""
Wrap a generator to filter pages by categories specified.

@@ -359,19 +364,19 @@


class _Edit(NamedTuple):
- do_edit: datetime.datetime
- edit_start: datetime.datetime
- edit_end: datetime.datetime
+ do_edit: datetime.datetime | None
+ edit_start: datetime.datetime | None
+ edit_end: datetime.datetime | None


def EdittimeFilterPageGenerator(
- generator: Iterable[pywikibot.page.Page],
+ generator: Iterable[pywikibot.page.BasePage],
last_edit_start: datetime.datetime | None = None,
last_edit_end: datetime.datetime | None = None,
first_edit_start: datetime.datetime | None = None,
first_edit_end: datetime.datetime | None = None,
- show_filtered: bool = False
-) -> Iterator[pywikibot.page.Page]:
+ show_filtered: bool = False,
+) -> Generator[pywikibot.page.BasePage, None, None]:
"""
Wrap a generator to filter pages outside last or first edit range.

@@ -383,7 +388,7 @@
:param show_filtered: Output a message for each page not yielded
"""
def to_be_yielded(edit: _Edit,
- page: pywikibot.page.Page,
+ page: pywikibot.page.BasePage,
rev: pywikibot.page.Revision,
show_filtered: bool) -> bool:
if not edit.do_edit:
@@ -425,13 +430,14 @@
yield page


-def UserEditFilterGenerator(generator: Iterable[pywikibot.page.Page],
- username: str,
- timestamp: str | datetime.datetime | None = None,
- skip: bool = False,
- max_revision_depth: int | None = None,
- show_filtered: bool = False
- ) -> Iterator[pywikibot.page.Page]:
+def UserEditFilterGenerator(
+ generator: Iterable[pywikibot.page.BasePage],
+ username: str,
+ timestamp: str | datetime.datetime | None = None,
+ skip: bool = False,
+ max_revision_depth: int | None = None,
+ show_filtered: bool = False
+) -> Generator[pywikibot.page.BasePage, None, None]:
"""
Generator which will yield Pages modified by username.

@@ -449,10 +455,10 @@
max_revision_depth
:param show_filtered: Output a message for each page not yielded
"""
- if isinstance(timestamp, str):
- ts = pywikibot.Timestamp.fromtimestampformat(timestamp)
+ if timestamp is None:
+ ts = None
else:
- ts = timestamp
+ ts = pywikibot.Timestamp.set_timestamp(timestamp)

for page in generator:
contribs = page.contributors(total=max_revision_depth, endtime=ts)
@@ -462,10 +468,11 @@
pywikibot.info(f'Skipping {page.title(as_link=True)}')


-def WikibaseItemFilterPageGenerator(generator: Iterable[pywikibot.page.Page],
- has_item: bool = True,
- show_filtered: bool = False
- ) -> Iterator[pywikibot.page.Page]:
+def WikibaseItemFilterPageGenerator(
+ generator: Iterable[pywikibot.page.BasePage],
+ has_item: bool = True,
+ show_filtered: bool = False,
+) -> Generator[pywikibot.page.BasePage, None, None]:
"""
A wrapper generator used to exclude if page has a Wikibase item or not.

diff --git a/pywikibot/pagegenerators/_generators.py b/pywikibot/pagegenerators/_generators.py
index 80cc3ee..af59e3d 100644
--- a/pywikibot/pagegenerators/_generators.py
+++ b/pywikibot/pagegenerators/_generators.py
@@ -38,7 +38,8 @@


if TYPE_CHECKING:
- from pywikibot.site import BaseSite
+ from pywikibot.site import BaseSite, NamespaceArgType
+ from pywikibot.site._namespace import SingleNamespaceType
from pywikibot.time import Timestamp


@@ -49,7 +50,7 @@

def AllpagesPageGenerator(
start: str = '!',
- namespace: int = 0,
+ namespace: SingleNamespaceType = 0,
includeredirects: str | bool = True,
site: BaseSite | None = None,
total: int | None = None, content: bool = False
@@ -77,7 +78,7 @@


def PrefixingPageGenerator(prefix: str,
- namespace: int | Namespace = None,
+ namespace: SingleNamespaceType | None = None,
includeredirects: bool | str | None = True,
site: BaseSite | None = None,
total: int | None = None,
@@ -118,12 +119,12 @@
def LogeventsPageGenerator(logtype: str | None = None,
user: str | None = None,
site: BaseSite | None = None,
- namespace: int | None = None,
+ namespace: SingleNamespaceType | None = None,
total: int | None = None,
start: Timestamp | None = None,
end: Timestamp | None = None,
reverse: bool = False
- ) -> Iterator[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""
Generate Pages for specified modes of logevents.

@@ -151,9 +152,9 @@


def NewpagesPageGenerator(site: BaseSite | None = None,
- namespaces: tuple[int] = (0, ),
+ namespaces: NamespaceArgType = (0, ),
total: int | None = None
- ) -> Iterator[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""
Iterate Page objects for all new titles in a single namespace.

@@ -198,7 +199,7 @@

ns = rc['ns']
if ns == Namespace.USER:
- pageclass = pywikibot.User
+ pageclass: type[pywikibot.Page] = pywikibot.User
elif ns == Namespace.FILE:
pageclass = pywikibot.FilePage
elif ns == Namespace.CATEGORY:
@@ -254,14 +255,14 @@


def InterwikiPageGenerator(page: pywikibot.page.Page
- ) -> Iterable[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""Iterate over all interwiki (non-language) links on a page."""
return (pywikibot.Page(link) for link in page.interwiki())


def LanguageLinksPageGenerator(page: pywikibot.page.Page,
total: int | None = None
- ) -> Iterable[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""Iterate over all interwiki language links on a page."""
return (pywikibot.Page(link) for link in page.iterlanglinks(total=total))

@@ -271,8 +272,8 @@
start: str | None = None,
total: int | None = None,
content: bool = False,
- namespaces: Sequence[int] | None = None
- ) -> Iterable[pywikibot.page.Page]:
+ namespaces: NamespaceArgType = None,
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""Yield all pages in a specific category.

:param recurse: if not False or 0, also iterate articles in
@@ -286,22 +287,21 @@
:param content: if True, retrieve the content of the current version
of each page (default False)
"""
- kwargs = {
- 'content': content,
- 'namespaces': namespaces,
- 'recurse': recurse,
- 'startprefix': start,
- 'total': total,
- }
- yield from category.articles(**kwargs)
+ yield from category.articles(
+ content=content,
+ namespaces=namespaces,
+ recurse=recurse,
+ startprefix=start,
+ total=total,
+ )


def SubCategoriesPageGenerator(category: pywikibot.page.Category,
recurse: int | bool = False,
start: str | None = None,
total: int | None = None,
- content: bool = False
- ) -> Iterable[pywikibot.page.Page]:
+ content: bool = False,
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""Yield all subcategories in a specific category.

:param recurse: if not False or 0, also iterate articles in
@@ -326,7 +326,7 @@
linkingPage: pywikibot.page.Page, # noqa: N803
total: int | None = None,
content: bool = False
-) -> Iterable[pywikibot.page.Page]:
+) -> Iterable[pywikibot.page.BasePage]:
"""Yield all pages linked from a specific page.

See :py:obj:`page.BasePage.linkedPages` for details.
@@ -342,7 +342,7 @@

def _yield_titles(f: codecs.StreamReaderWriter | io.StringIO,
site: pywikibot.site.BaseSite
- ) -> Iterable[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""Yield page titles from a text stream.

:param f: text stream object
@@ -372,8 +372,8 @@


def TextIOPageGenerator(source: str | None = None,
- site: BaseSite | None = None
- ) -> Iterable[pywikibot.page.Page]:
+ site: BaseSite | None = None,
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""Iterate pages from a list in a text file or on a webpage.

The text source must contain page links between double-square-brackets or,
@@ -402,7 +402,7 @@

def PagesFromTitlesGenerator(iterable: Iterable[str],
site: BaseSite | None = None
- ) -> Iterable[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""
Generate pages from the titles (strings) yielded by iterable.

@@ -437,14 +437,14 @@


def UserContributionsGenerator(username: str,
- namespaces: list[int] | None = None,
+ namespaces: NamespaceArgType = None,
site: BaseSite | None = None,
total: int | None = None,
_filter_unique: None | (Callable[
[Iterable[pywikibot.page.Page]],
Iterable[pywikibot.page.Page]]) =
_filter_unique_pages
- ) -> Iterator[pywikibot.page.Page]:
+ ) -> Iterable[pywikibot.page.Page]:
"""Yield unique pages edited by user:username.

:param total: Maximum number of pages to retrieve in total
@@ -468,7 +468,7 @@

def NewimagesPageGenerator(total: int | None = None,
site: BaseSite | None = None
- ) -> Iterator[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""
New file generator.

@@ -482,7 +482,7 @@


def WikibaseItemGenerator(gen: Iterable[pywikibot.page.Page]
- ) -> Iterator[pywikibot.page.ItemPage]:
+ ) -> Generator[pywikibot.page.ItemPage, None, None]:
"""
A wrapper generator used to yield Wikibase items of another generator.

@@ -504,7 +504,7 @@
def AncientPagesPageGenerator(
total: int = 100,
site: BaseSite | None = None
-) -> Iterator[pywikibot.page.Page]:
+) -> Generator[pywikibot.page.Page, None, None]:
"""
Ancient page generator.

@@ -674,7 +674,7 @@

def LongPagesPageGenerator(total: int = 100,
site: BaseSite | None = None
- ) -> Iterator[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""
Long page generator.

@@ -688,7 +688,7 @@

def ShortPagesPageGenerator(total: int = 100,
site: BaseSite | None = None
- ) -> Iterator[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""
Short page generator.

@@ -703,7 +703,7 @@
def RandomPageGenerator(
total: int | None = None,
site: BaseSite | None = None,
- namespaces: Sequence[str | Namespace] | None = None
+ namespaces: NamespaceArgType = None
) -> Iterable[pywikibot.page.Page]:
"""Random page generator.

@@ -718,7 +718,7 @@
def RandomRedirectPageGenerator(
total: int | None = None,
site: BaseSite | None = None,
- namespaces: Sequence[str | Namespace] | None = None,
+ namespaces: NamespaceArgType = None,
) -> Iterable[pywikibot.page.Page]:
"""Random redirect generator.

@@ -733,7 +733,7 @@

def LinksearchPageGenerator(
url: str,
- namespaces: list[int] | None = None,
+ namespaces: NamespaceArgType = None,
total: int | None = None,
site: BaseSite | None = None,
protocol: str | None = None
@@ -758,7 +758,7 @@
def SearchPageGenerator(
query: str,
total: int | None = None,
- namespaces: Sequence[str | Namespace] | None = None,
+ namespaces: NamespaceArgType = None,
site: BaseSite | None = None
) -> Iterable[pywikibot.page.Page]:
"""Yield pages from the MediaWiki internal search engine.
@@ -773,7 +773,7 @@

def LiveRCPageGenerator(site: BaseSite | None = None,
total: int | None = None
- ) -> Iterator[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""
Yield pages from a socket.io RC stream.

@@ -797,7 +797,7 @@
if 'title' not in entry and entry['type'] == 'log':
continue
page = pywikibot.Page(site, entry['title'], entry['namespace'])
- page._rcinfo = entry
+ page._rcinfo = entry # type: ignore[attr-defined]
yield page


@@ -835,7 +835,7 @@
self._google_query = None

@staticmethod
- def queryGoogle(query: str) -> Iterator[Any]:
+ def queryGoogle(query: str) -> Generator[str, None, None]:
"""Perform a query using python package 'google'.

The terms of service as at June 2014 give two conditions that
@@ -861,7 +861,7 @@
yield from google.search(query)

@property
- def generator(self) -> Iterator[pywikibot.page.Page]:
+ def generator(self) -> Generator[pywikibot.page.Page, None, None]:
"""Yield results from :meth:`queryGoogle` query.

Google contains links in the format:
@@ -884,7 +884,7 @@

def MySQLPageGenerator(query: str, site: BaseSite | None = None,
verbose: bool | None = None
- ) -> Iterator[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""
Yield a list of pages based on a MySQL query.

@@ -944,7 +944,7 @@
self,
filename: str,
start: str | None = None,
- namespaces: str | Namespace | Sequence[str | Namespace] | None = None,
+ namespaces: NamespaceArgType = None,
site: BaseSite | None = None,
text_predicate: Callable[[str], bool] | None = None,
content=False,
@@ -1000,7 +1000,7 @@

def YearPageGenerator(start: int = 1, end: int = 2050,
site: BaseSite | None = None
- ) -> Iterator[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""
Year page generator.

@@ -1020,7 +1020,7 @@

def DayPageGenerator(start_month: int = 1, end_month: int = 12,
site: BaseSite | None = None, year: int = 2000
- ) -> Iterator[pywikibot.page.Page]:
+ ) -> Generator[pywikibot.page.Page, None, None]:
"""
Day page generator.

@@ -1038,9 +1038,10 @@
pywikibot.Link(date.format_date(month, day, lang), site))


-def WikidataPageFromItemGenerator(gen: Iterable[pywikibot.page.ItemPage],
- site: pywikibot.site.BaseSite
- ) -> Iterator[pywikibot.page.Page]:
+def WikidataPageFromItemGenerator(
+ gen: Iterable[pywikibot.page.ItemPage],
+ site: pywikibot.site.BaseSite,
+) -> Generator[pywikibot.page.Page, None, None]:
"""Generate pages from site based on sitelinks of item pages.

:param gen: generator of :py:obj:`pywikibot.ItemPage`
@@ -1099,11 +1100,12 @@
return WikidataPageFromItemGenerator(entities, site)


-def WikibaseSearchItemPageGenerator(text: str,
- language: str | None = None,
- total: int | None = None,
- site: BaseSite | None = None
- ) -> Iterator[pywikibot.page.ItemPage]:
+def WikibaseSearchItemPageGenerator(
+ text: str,
+ language: str | None = None,
+ total: int | None = None,
+ site: BaseSite | None = None,
+) -> Generator[pywikibot.page.ItemPage, None, None]:
"""
Generate pages that contain the provided text.

@@ -1133,11 +1135,14 @@
subclassed from :class:`tools.collections.GeneratorWrapper`
"""

- def __init__(self, categories: Sequence[str],
- subset_combination: bool = True,
- namespaces: Sequence[str | Namespace] | None = None,
- site: BaseSite | None = None,
- extra_options: dict[Any, Any] | None = None) -> None:
+ def __init__(
+ self,
+ categories: Sequence[str],
+ subset_combination: bool = True,
+ namespaces: Iterable[int | pywikibot.site.Namespace] | None = None,
+ site: BaseSite | None = None,
+ extra_options: dict[Any, Any] | None = None
+ ) -> None:
"""
Initializer.

@@ -1159,7 +1164,7 @@
namespaces, extra_options)

def buildQuery(self, categories: Sequence[str], subset_combination: bool,
- namespaces: Sequence[str | Namespace] | None,
+ namespaces: Iterable[int | pywikibot.site.Namespace] | None,
extra_options: dict[Any, Any] | None) -> dict[str, Any]:
"""
Get the querystring options to query PetScan.
@@ -1192,7 +1197,7 @@

return query_final

- def query(self) -> Iterator[dict[str, Any]]:
+ def query(self) -> Generator[dict[str, Any], None, None]:
"""Query PetScan.

.. versionchanged:: 7.4
@@ -1221,16 +1226,15 @@
yield from raw_pages

@property
- def generator(self) -> Iterator[pywikibot.page.Page]:
+ def generator(self) -> Generator[pywikibot.page.Page, None, None]:
"""Yield results from :meth:`query`.

.. versionchanged:: 7.6
changed from iterator method to generator property
"""
for raw_page in self.query():
- page = pywikibot.Page(self.site, raw_page['title'],
- int(raw_page['namespace']))
- yield page
+ yield pywikibot.Page(self.site, raw_page['title'],
+ int(raw_page['namespace']))


class PagePilePageGenerator(GeneratorWrapper):
@@ -1262,7 +1266,7 @@

return query

- def query(self) -> Iterator[dict[str, Any]]:
+ def query(self) -> Generator[str, None, None]:
"""Query PagePile.

:raises ServerError: Either ReadTimeout or server status error
@@ -1281,7 +1285,7 @@
yield from raw_pages

@property
- def generator(self) -> Iterator[pywikibot.page.Page]:
+ def generator(self) -> Generator[pywikibot.page.Page, None, None]:
"""Yield results from :meth:`query`."""
for raw_page in self.query():
page = pywikibot.Page(self.site, raw_page)
diff --git a/pywikibot/site/_datasite.py b/pywikibot/site/_datasite.py
index 4a6627f..a61773d 100644
--- a/pywikibot/site/_datasite.py
+++ b/pywikibot/site/_datasite.py
@@ -14,7 +14,7 @@
from warnings import warn

import pywikibot
-from pywikibot.backports import batched
+from pywikibot.backports import Generator, Iterable, batched
from pywikibot.data import api
from pywikibot.exceptions import (
APIError,
@@ -205,7 +205,12 @@
raise APIError(data['errors'], '')
return data['entities']

- def preload_entities(self, pagelist, groupsize: int = 50):
+ def preload_entities(
+ self,
+ pagelist: Iterable[pywikibot.page.WikibaseEntity
+ | pywikibot.page.Page],
+ groupsize: int = 50
+ ) -> Generator[pywikibot.page.WikibaseEntity, None, None]:
"""Yield subclasses of WikibaseEntity's with content prefilled.

.. note:: Pages will be iterated in a different order than in
diff --git a/pywikibot/site/_generators.py b/pywikibot/site/_generators.py
index 1b36d19..763b0d9 100644
--- a/pywikibot/site/_generators.py
+++ b/pywikibot/site/_generators.py
@@ -8,10 +8,9 @@

import heapq
import itertools
-import typing
from contextlib import suppress
from itertools import zip_longest
-from typing import Any
+from typing import TYPE_CHECKING, Any

import pywikibot
from pywikibot.backports import Generator, Iterable, batched
@@ -34,11 +33,18 @@
from pywikibot.tools.itertools import filter_unique


+if TYPE_CHECKING:
+ from pywikibot.site._namespace import SingleNamespaceType
+
+
class GeneratorsMixin:

"""API generators mixin to MediaWiki site."""

- def load_pages_from_pageids(self, pageids):
+ def load_pages_from_pageids(
+ self,
+ pageids: str | Iterable[int | str],
+ ) -> Generator[pywikibot.Page, None, None]:
"""
Return a page generator from pageids.

@@ -93,7 +99,7 @@

def preloadpages(
self,
- pagelist,
+ pagelist: Iterable[pywikibot.Page],
*,
groupsize: int | None = None,
templates: bool = False,
@@ -101,8 +107,8 @@
pageprops: bool = False,
categories: bool = False,
content: bool = True,
- quiet: bool = True
- ):
+ quiet: bool = True,
+ ) -> Generator[pywikibot.Page, None, None]:
"""Return a generator to a list of preloaded pages.

Pages are iterated in the same order than in the underlying
@@ -143,8 +149,8 @@
if categories:
props += '|categories'

- groupsize = min(groupsize or self.maxlimit, self.maxlimit)
- for batch in batched(pagelist, groupsize):
+ groupsize_ = min(groupsize or self.maxlimit, self.maxlimit)
+ for batch in batched(pagelist, groupsize_):
# Do not use p.pageid property as it will force page loading.
pageids = [str(p._pageid) for p in batch
if hasattr(p, '_pageid') and p._pageid > 0]
@@ -216,9 +222,16 @@
priority, page = heapq.heappop(prio_queue)
yield page

- def pagebacklinks(self, page, *, follow_redirects: bool = False,
- filter_redirects=None, namespaces=None, total=None,
- content: bool = False):
+ def pagebacklinks(
+ self,
+ page: pywikibot.Page,
+ *,
+ follow_redirects: bool = False,
+ filter_redirects=None,
+ namespaces: NamespaceArgType = None,
+ total: int | None = None,
+ content: bool = False,
+ ) -> Iterable[pywikibot.Page]:
"""Iterate all pages that link to the given page.

.. seealso:: :api:`Backlinks`
@@ -231,19 +244,15 @@
both (no filtering).
:param namespaces: If present, only return links from the namespaces
in this list.
- :type namespaces: iterable of str or Namespace key,
- or a single instance of those types. May be a '|' separated
- list of namespace identifiers.
:param total: Maximum number of pages to retrieve in total.
:param content: if True, load the current content of each iterated page
(default False)
- :rtype: typing.Iterable[pywikibot.Page]
:raises KeyError: a namespace identifier was not resolved
:raises TypeError: a namespace identifier has an inappropriate
type such as NoneType or bool
"""
bltitle = page.title(with_section=False).encode(self.encoding())
- blargs = {'gbltitle': bltitle}
+ blargs: dict[str, Any] = {'gbltitle': bltitle}
if filter_redirects is not None:
blargs['gblfilterredir'] = ('redirects' if filter_redirects
else 'nonredirects')
@@ -279,8 +288,15 @@
return itertools.chain(*genlist.values())
return blgen

- def page_embeddedin(self, page, *, filter_redirects=None, namespaces=None,
- total=None, content: bool = False):
+ def page_embeddedin(
+ self,
+ page: pywikibot.Page,
+ *,
+ filter_redirects=None,
+ namespaces: NamespaceArgType = None,
+ total: int | None = None,
+ content: bool = False,
+ ) -> Iterable[pywikibot.Page]:
"""Iterate all pages that embedded the given page as a template.

.. seealso:: :api:`Embeddedin`
@@ -291,18 +307,15 @@
None, return both (no filtering).
:param namespaces: If present, only return links from the namespaces
in this list.
- :type namespaces: iterable of str or Namespace key,
- or a single instance of those types. May be a '|' separated
- list of namespace identifiers.
:param content: if True, load the current content of each iterated page
(default False)
- :rtype: typing.Iterable[pywikibot.Page]
:raises KeyError: a namespace identifier was not resolved
:raises TypeError: a namespace identifier has an inappropriate
type such as NoneType or bool
"""
- eiargs = {'geititle':
- page.title(with_section=False).encode(self.encoding())}
+ eiargs: dict[str, Any] = {
+ 'geititle': page.title(with_section=False).encode(self.encoding()),
+ }
if filter_redirects is not None:
eiargs['geifilterredir'] = ('redirects' if filter_redirects
else 'nonredirects')
@@ -333,7 +346,7 @@
:param total: maximum number of redirects to retrieve in total
:param content: load the current content of each redirect
"""
- rdargs = {
+ rdargs: dict[str, Any] = {
'titles': page.title(with_section=False).encode(self.encoding()),
}
if filter_fragments is not None:
@@ -346,22 +359,18 @@
self,
page, *,
follow_redirects: bool = False,
- filter_redirects=None,
+ filter_redirects: bool | None = None,
with_template_inclusion: bool = True,
only_template_inclusion: bool = False,
- namespaces=None,
- total=None,
- content: bool = False
- ):
+ namespaces: NamespaceArgType = None,
+ total: int | None = None,
+ content: bool = False,
+ ) -> Iterable[pywikibot.Page]:
"""
Convenience method combining pagebacklinks and page_embeddedin.

:param namespaces: If present, only return links from the namespaces
in this list.
- :type namespaces: iterable of str or Namespace key,
- or a single instance of those types. May be a '|' separated
- list of namespace identifiers.
- :rtype: typing.Iterable[pywikibot.Page]
:raises KeyError: a namespace identifier was not resolved
:raises TypeError: a namespace identifier has an inappropriate
type such as NoneType or bool
@@ -388,21 +397,20 @@
), total)

def pagelinks(
- self, page, *,
- namespaces=None,
+ self,
+ page: pywikibot.page.BasePage,
+ *,
+ namespaces: NamespaceArgType = None,
follow_redirects: bool = False,
total: int | None = None,
- content: bool = False
+ content: bool = False,
) -> Generator[pywikibot.Page, None, None]:
- """Iterate internal wikilinks contained (or transcluded) on page.
+ """Yield internal wikilinks contained (or transcluded) on page.

.. seealso:: :api:`Links`

:param namespaces: Only iterate pages in these namespaces
(default: all)
- :type namespaces: iterable of str or Namespace key,
- or a single instance of those types. May be a '|' separated
- list of namespace identifiers.
:param follow_redirects: if True, yields the target of any redirects,
rather than the redirect page
:param total: iterate no more than this number of pages in total
@@ -411,7 +419,7 @@
:raises TypeError: a namespace identifier has an inappropriate
type such as NoneType or bool
"""
- plargs = {}
+ plargs: dict[str, Any] = {}
if hasattr(page, '_pageid'):
plargs['pageids'] = str(page._pageid)
else:
@@ -423,7 +431,13 @@
**plargs)

# Sortkey doesn't work with generator
- def pagecategories(self, page, *, total=None, content: bool = False):
+ def pagecategories(
+ self,
+ page: pywikibot.Page,
+ *,
+ total: int | None = None,
+ content: bool = False,
+ ) -> Iterable[pywikibot.Page]:
"""Iterate categories to which page belongs.

.. seealso:: :api:`Categories`
@@ -432,7 +446,7 @@
(default False); note that this means the contents of the
category description page, not the pages contained in the category
"""
- clargs = {}
+ clargs: dict[str, Any] = {}
if hasattr(page, '_pageid'):
clargs['pageids'] = str(page._pageid)
else:
@@ -442,7 +456,13 @@
type_arg='categories', total=total,
g_content=content, **clargs)

- def pageimages(self, page, *, total=None, content: bool = False):
+ def pageimages(
+ self,
+ page: pywikibot.Page,
+ *,
+ total: int | None = None,
+ content: bool = False
+ ) -> Iterable[pywikibot.Page]:
"""Iterate images used (not just linked) on the page.

.. seealso:: :api:`Images`
@@ -457,16 +477,19 @@
titles=imtitle, total=total,
g_content=content)

- def pagetemplates(self, page, *, namespaces=None, total=None,
- content: bool = False):
+ def pagetemplates(
+ self,
+ page: pywikibot.Page,
+ *,
+ namespaces: NamespaceArgType = None,
+ total: int | None = None,
+ content: bool = False,
+ ) -> Iterable[pywikibot.Page]:
"""Iterate templates transcluded (not just linked) on the page.

.. seealso:: :api:`Templates`

:param namespaces: Only iterate pages in these namespaces
- :type namespaces: iterable of str or Namespace key,
- or a single instance of those types. May be a '|' separated
- list of namespace identifiers.
:param content: if True, load the current content of each iterated page
(default False)

@@ -481,8 +504,9 @@

def categorymembers(
self,
- category: pywikibot.Category, *,
- namespaces=None,
+ category: pywikibot.Category,
+ *,
+ namespaces: NamespaceArgType = None,
sortby: str | None = None,
reverse: bool = False,
starttime: pywikibot.time.Timestamp | None = None,
@@ -491,7 +515,7 @@
startprefix: str | None = None,
endprefix: str | None = None,
content: bool = False,
- member_type: str | Iterable[str] | None = None
+ member_type: str | Iterable[str] | None = None,
) -> Iterable[pywikibot.Page]:
"""Iterate members of specified category.

@@ -512,9 +536,6 @@
:param namespaces: If present, only return category members from
these namespaces. To yield subcategories or files, use
parameter member_type instead.
- :type namespaces: iterable of str or Namespace key,
- or a single instance of those types. May be a '|' separated
- list of namespace identifiers.
:param sortby: determines the order in which results are generated,
valid values are "sortkey" (default, results ordered by category
sort key) or "timestamp" (results ordered by time page was
@@ -547,7 +568,7 @@
f'categorymembers: non-Category page {category!r} specified')

cmtitle = category.title(with_section=False).encode(self.encoding())
- cmargs = {
+ cmargs: dict[str, Any] = {
'type_arg': 'categorymembers',
'gcmtitle': cmtitle,
'gcmprop': 'ids|title|sortkey'
@@ -657,12 +678,12 @@

def loadrevisions(
self,
- page,
+ page: pywikibot.Page,
*,
content: bool = False,
- section=None,
- **kwargs
- ):
+ section: int | None = None,
+ **kwargs,
+ ) -> None:
"""Retrieve revision information and store it in page object.

By default, retrieves the last (current) revision of the page,
@@ -678,13 +699,11 @@
.. seealso:: :api:`Revisions`

:param page: retrieve revisions of this Page and hold the data.
- :type page: pywikibot.Page
:param content: if True, retrieve the wiki-text of each revision;
otherwise, only retrieve the revision metadata (default)
:param section: if specified, retrieve only this section of the text
(content must be True); section must be given by number (top of
the article is section 0), not name
- :type section: int
:keyword revids: retrieve only the specified revision ids (raise
Exception if any of revids does not correspond to page)
:type revids: an int, a str or a list of ints or strings
@@ -735,7 +754,7 @@
raise ValueError(
'loadrevisions: endid > startid with rvdir=False')

- rvargs = {
+ rvargs: dict[str, Any] = {
'type_arg': 'info|revisions',
'rvprop': self._rvprops(content=content),
}
@@ -790,11 +809,15 @@
raise NoPageError(page)
api.update_page(page, pagedata, rvgen.props)

- def pagelanglinks(self, page, *,
- total: int | None = None,
- include_obsolete: bool = False,
- include_empty_titles: bool = False):
- """Iterate all interlanguage links on page, yielding Link objects.
+ def pagelanglinks(
+ self,
+ page: pywikibot.Page,
+ *,
+ total: int | None = None,
+ include_obsolete: bool = False,
+ include_empty_titles: bool = False,
+ ) -> Generator[pywikibot.Link, None, None]:
+ """Yield all interlanguage links on page, yielding Link objects.

.. versionchanged:: 6.2:
`include_empty_titles` parameter was added.
@@ -826,8 +849,13 @@
if link.title or include_empty_titles:
yield link

- def page_extlinks(self, page, *, total=None):
- """Iterate all external links on page, yielding URL strings.
+ def page_extlinks(
+ self,
+ page: pywikibot.Page,
+ *,
+ total: int | None = None,
+ ) -> Generator[str, None, None]:
+ """Yield all external links on page, yielding URL strings.

.. seealso:: :api:`Extlinks`
"""
@@ -847,17 +875,17 @@
self,
start: str = '!',
prefix: str = '',
- namespace=0,
- filterredir=None,
- filterlanglinks=None,
- minsize=None,
- maxsize=None,
- protect_type=None,
- protect_level=None,
+ namespace: SingleNamespaceType = 0,
+ filterredir: bool | None = None,
+ filterlanglinks: bool | None = None,
+ minsize: int | None = None,
+ maxsize: int | None = None,
+ protect_type: str | None = None,
+ protect_level: str | None = None,
reverse: bool = False,
- total=None,
- content: bool = False
- ):
+ total: int | None = None,
+ content: bool = False,
+ ) -> Iterable[pywikibot.Page]:
"""Iterate pages in a single namespace.

.. seealso:: :api:`Allpages`
@@ -865,7 +893,6 @@
:param start: Start at this title (page need not exist).
:param prefix: Only yield pages starting with this string.
:param namespace: Iterate pages from this (single) namespace
- :type namespace: int or Namespace.
:param filterredir: if True, only yield redirects; if False (and not
None), only yield non-redirects (default: yield both)
:param filterlanglinks: if True, only yield pages with language links;
@@ -877,7 +904,6 @@
in size
:param protect_type: only yield pages that have a protection of the
specified type
- :type protect_type: str
:param protect_level: only yield pages that have protection at this
level; can only be used if protect_type is specified
:param reverse: if True, iterate in reverse Unicode lexigraphic
@@ -930,11 +956,11 @@
self,
start: str = '!',
prefix: str = '',
- namespace=0,
+ namespace: SingleNamespaceType = 0,
unique: bool = False,
fromids: bool = False,
- total=None
- ):
+ total: int | None = None,
+ ) -> Generator[pywikibot.Page, None, None]:
"""Iterate all links to pages (which need not exist) in one namespace.

Note that, in practice, links that were found on pages that have
@@ -946,7 +972,6 @@
:param start: Start at this title (page need not exist).
:param prefix: Only yield pages starting with this string.
:param namespace: Iterate pages from this (single) namespace
- :type namespace: int or Namespace
:param unique: If True, only iterate each link title once (default:
iterate once for each linking page)
:param fromids: if True, include the pageid of the page containing
@@ -968,11 +993,17 @@
for link in algen:
p = pywikibot.Page(self, link['title'], link['ns'])
if fromids:
- p._fromid = link['fromid']
+ p._fromid = link['fromid'] # type: ignore[attr-defined]
yield p

- def allcategories(self, start: str = '!', prefix: str = '', total=None,
- reverse: bool = False, content: bool = False):
+ def allcategories(
+ self,
+ start: str = '!',
+ prefix: str = '',
+ total: int | None = None,
+ reverse: bool = False,
+ content: bool = False,
+ ) -> Iterable[pywikibot.page.Category]:
"""Iterate categories used (which need not have a Category page).

Iterator yields Category objects. Note that, in practice, links that
@@ -998,7 +1029,10 @@
acgen.request['gacdir'] = 'descending'
return acgen

- def botusers(self, total=None):
+ def botusers(
+ self,
+ total: int | None = None,
+ ) -> Generator[dict[str, Any], None, None]:
"""Iterate bot users.

Iterated values are dicts containing 'name', 'userid', 'editcount',
@@ -1007,10 +1041,11 @@
str; all the other values are str and should always be present.
"""
if not hasattr(self, '_bots'):
- self._bots = {}
+ self._bots: dict[str, dict[str, str | list[str]]] = {}

if not self._bots:
for item in self.allusers(group='bot', total=total):
+ assert isinstance(item['name'], str)
self._bots.setdefault(item['name'], item)

yield from self._bots.values()
@@ -1019,9 +1054,9 @@
self,
start: str = '!',
prefix: str = '',
- group=None,
- total=None
- ):
+ group: str | None = None,
+ total: int | None = None,
+ ) -> Iterable[dict[str, str | list[str]]]:
"""Iterate registered users, ordered by username.

Iterated values are dicts containing 'name', 'editcount',
@@ -1035,7 +1070,6 @@
:param start: start at this username (name need not exist)
:param prefix: only iterate usernames starting with this substring
:param group: only iterate users that are members of this group
- :type group: str
"""
augen = self._generator(api.ListGenerator, type_arg='allusers',
auprop='editcount|groups|registration',
@@ -1050,14 +1084,14 @@
self,
start: str = '!',
prefix: str = '',
- minsize=None,
- maxsize=None,
+ minsize: int | None = None,
+ maxsize: int | None = None,
reverse: bool = False,
- sha1=None,
- sha1base36=None,
- total=None,
- content: bool = False
- ):
+ sha1: str | None = None,
+ sha1base36: str | None = None,
+ total: int | None = None,
+ content: bool = False,
+ ) -> Iterable[pywikibot.FilePage]:
"""Iterate all images, ordered by image title.

Yields FilePages, but these pages need not exist on the wiki.
@@ -1095,12 +1129,12 @@

def filearchive(
self,
- start=None,
- end=None,
+ start: str | None = None,
+ end: str | None = None,
reverse: bool = False,
- total=None,
- **kwargs
- ):
+ total: int | None = None,
+ **kwargs,
+ ) -> Iterable[dict[str, Any]]:
"""Iterate archived files.

Yields dict of file archive informations.
@@ -1130,9 +1164,16 @@
fagen.request['fadir'] = 'descending'
return fagen

- def blocks(self, starttime=None, endtime=None, reverse: bool = False,
- blockids=None, users=None, iprange: str | None = None,
- total: int | None = None):
+ def blocks(
+ self,
+ starttime: pywikibot.time.Timestamp | None = None,
+ endtime: pywikibot.time.Timestamp | None = None,
+ reverse: bool = False,
+ blockids: int | str | Iterable[int | str] | None = None,
+ users: str | Iterable[str] | None = None,
+ iprange: str | None = None,
+ total: int | None = None,
+ ) -> Iterable[dict[str, Any]]:
"""Iterate all current blocks, in order of creation.

The iterator yields dicts containing keys corresponding to the
@@ -1147,15 +1188,11 @@
``iprange`` parameter cannot be used together with ``users``.

:param starttime: start iterating at this Timestamp
- :type starttime: time.Timestamp
:param endtime: stop iterating at this Timestamp
- :type endtime: time.Timestamp
:param reverse: if True, iterate oldest blocks first (default: newest)
:param blockids: only iterate blocks with these id numbers. Numbers
must be separated by '|' if given by a str.
- :type blockids: str, tuple or list
:param users: only iterate blocks affecting these usernames or IPs
- :type users: str, tuple or list
:param iprange: a single IP or an IP range. Ranges broader than
IPv4/16 or IPv6/19 are not accepted.
:param total: total amount of block entries
@@ -1188,9 +1225,14 @@
bkgen.request['bkip'] = iprange
return bkgen

- def exturlusage(self, url: str | None = None,
- protocol: str | None = None, namespaces=None,
- total: int | None = None, content: bool = False):
+ def exturlusage(
+ self,
+ url: str | None = None,
+ protocol: str | None = None,
+ namespaces: list[int] | None = None,
+ total: int | None = None,
+ content: bool = False,
+ ) -> Iterable[pywikibot.Page]:
"""Iterate Pages that contain links to the given URL.

.. seealso:: :api:`Exturlusage`
@@ -1199,7 +1241,6 @@
prefix); this may include a '*' as a wildcard, only at the start
of the hostname
:param namespaces: list of namespace numbers to fetch contribs from
- :type namespaces: list of int
:param total: Maximum number of pages to retrieve in total
:param protocol: Protocol to search for, likely http or https, http by
default. Full list shown on Special:LinkSearch wikipage
@@ -1226,11 +1267,15 @@
namespaces=namespaces,
total=total, g_content=content)

- def imageusage(self, image: pywikibot.FilePage, *,
- namespaces=None,
- filterredir: bool | None = None,
- total: int | None = None,
- content: bool = False):
+ def imageusage(
+ self,
+ image: pywikibot.FilePage,
+ *,
+ namespaces: NamespaceArgType = None,
+ filterredir: bool | None = None,
+ total: int | None = None,
+ content: bool = False,
+ ) -> Iterable[pywikibot.Page]:
"""Iterate Pages that contain links to the given FilePage.

.. seealso:: :api:`Imageusage`
@@ -1240,9 +1285,6 @@
:param image: the image to search for (FilePage need not exist on
the wiki)
:param namespaces: If present, only iterate pages in these namespaces
- :type namespaces: iterable of str or Namespace key,
- or a single instance of those types. May be a '|' separated
- list of namespace identifiers.
:param filterredir: if True, only yield redirects; if False (and not
None), only yield non-redirects (default: yield both)
:param total: iterate no more than this number of pages in total
@@ -1260,11 +1302,18 @@
namespaces=namespaces,
total=total, g_content=content, **iuargs)

- def logevents(self, logtype: str | None = None,
- user: str | None = None, page=None,
- namespace=None, start=None, end=None,
- reverse: bool = False, tag: str | None = None,
- total: int | None = None):
+ def logevents(
+ self,
+ logtype: str | None = None,
+ user: str | None = None,
+ page: str | pywikibot.Page | None = None,
+ namespace: NamespaceArgType = None,
+ start: str | pywikibot.time.Timestamp | None = None,
+ end: str | pywikibot.time.Timestamp | None = None,
+ reverse: bool = False,
+ tag: str | None = None,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.logentries.LogEntry]:
"""Iterate all log entries.

.. seealso:: :api:`Logevents`
@@ -1276,9 +1325,7 @@
(see mediawiki api documentation for available types)
:param user: only iterate entries that match this user name
:param page: only iterate entries affecting this page
- :type page: pywikibot.Page or str
:param namespace: namespace(s) to retrieve logevents from
- :type namespace: int or Namespace or an iterable of them

.. note:: due to an API limitation,
if namespace param contains multiple namespaces,
@@ -1286,13 +1333,10 @@
the API and will be filtered later during iteration.

:param start: only iterate entries from and after this Timestamp
- :type start: time.Timestamp or ISO date string
:param end: only iterate entries up to and through this Timestamp
- :type end: time.Timestamp or ISO date string
:param reverse: if True, iterate oldest entries first (default: newest)
:param tag: only iterate entries tagged with this tag
:param total: maximum number of events to iterate
- :rtype: iterable

:raises KeyError: the namespace identifier was not resolved
:raises TypeError: the namespace identifier has an inappropriate
@@ -1322,22 +1366,25 @@

return legen

- def recentchanges(self, *,
- start=None,
- end=None,
- reverse: bool = False,
- namespaces=None,
- changetype: str | None = None,
- minor: bool | None = None,
- bot: bool | None = None,
- anon: bool | None = None,
- redirect: bool | None = None,
- patrolled: bool | None = None,
- top_only: bool = False,
- total: int | None = None,
- user: str | list[str] | None = None,
- excludeuser: str | list[str] | None = None,
- tag: str | None = None):
+ def recentchanges(
+ self,
+ *,
+ start: pywikibot.time.Timestamp | None = None,
+ end: pywikibot.time.Timestamp | None = None,
+ reverse: bool = False,
+ namespaces: NamespaceArgType = None,
+ changetype: str | None = None,
+ minor: bool | None = None,
+ bot: bool | None = None,
+ anon: bool | None = None,
+ redirect: bool | None = None,
+ patrolled: bool | None = None,
+ top_only: bool = False,
+ total: int | None = None,
+ user: str | list[str] | None = None,
+ excludeuser: str | list[str] | None = None,
+ tag: str | None = None,
+ ) -> Iterable[dict[str, Any]]:
"""Iterate recent changes.

.. seealso:: :api:`RecentChanges`
@@ -1348,9 +1395,6 @@
:type end: pywikibot.Timestamp
:param reverse: if True, start with oldest changes (default: newest)
:param namespaces: only iterate pages in these namespaces
- :type namespaces: iterable of str or Namespace key,
- or a single instance of those types. May be a '|' separated
- list of namespace identifiers.
:param changetype: only iterate changes of this type ("edit" for
edits to existing pages, "new" for new pages, "log" for log
entries)
@@ -1409,11 +1453,15 @@
rcgen.request['rctag'] = tag
return rcgen

- def search(self, searchstring: str, *,
- namespaces=None,
- where: str | None = None,
- total: int | None = None,
- content: bool = False):
+ def search(
+ self,
+ searchstring: str,
+ *,
+ namespaces: NamespaceArgType = None,
+ where: str | None = None,
+ total: int | None = None,
+ content: bool = False,
+ ) -> Iterable[pywikibot.Page]:
"""Iterate Pages that contain the searchstring.

Note that this may include non-existing Pages if the wiki's database
@@ -1432,9 +1480,6 @@
:param where: Where to search; value must be "text", "title",
"nearmatch" or None (many wikis do not support all search types)
:param namespaces: search only in these namespaces (defaults to all)
- :type namespaces: iterable of str or Namespace key,
- or a single instance of those types. May be a '|' separated
- list of namespace identifiers.
:param content: if True, load the current content of each iterated page
(default False)
:raises KeyError: a namespace identifier was not resolved
@@ -1453,9 +1498,18 @@
total=total, g_content=content)
return srgen

- def usercontribs(self, user=None, userprefix=None, start=None, end=None,
- reverse: bool = False, namespaces=None, minor=None,
- total: int | None = None, top_only: bool = False):
+ def usercontribs(
+ self,
+ user: str | None = None,
+ userprefix: str | None = None,
+ start=None,
+ end=None,
+ reverse: bool = False,
+ namespaces: NamespaceArgType = None,
+ minor: bool | None = None,
+ total: int | None = None,
+ top_only: bool = False,
+ ) -> Iterable[dict[str, Any]]:
"""Iterate contributions by a particular user.

Iterated values are in the same format as recentchanges.
@@ -1471,9 +1525,6 @@
:param end: Iterate contributions ending at this Timestamp
:param reverse: Iterate oldest contributions first (default: newest)
:param namespaces: only iterate pages in these namespaces
- :type namespaces: iterable of str or Namespace key,
- or a single instance of those types. May be a '|' separated
- list of namespace identifiers.
:param minor: if True, iterate only minor edits; if False and
not None, iterate only non-minor edits (default: iterate both)
:param total: limit result to this number of pages
@@ -1511,9 +1562,17 @@
ucgen.request['ucshow'] = option_set
return ucgen

- def watchlist_revs(self, start=None, end=None, reverse: bool = False,
- namespaces=None, minor=None, bot=None,
- anon=None, total=None):
+ def watchlist_revs(
+ self,
+ start=None,
+ end=None,
+ reverse: bool = False,
+ namespaces: NamespaceArgType = None,
+ minor: bool | None = None,
+ bot: bool | None = None,
+ anon: bool | None = None,
+ total: int | None = None,
+ ) -> Iterable[dict[str, Any]]:
"""Iterate revisions to pages on the bot user's watchlist.

Iterated values will be in same format as recentchanges.
@@ -1524,9 +1583,6 @@
:param end: Iterate revisions ending at this Timestamp
:param reverse: Iterate oldest revisions first (default: newest)
:param namespaces: only iterate pages in these namespaces
- :type namespaces: iterable of str or Namespace key,
- or a single instance of those types. May be a '|' separated
- list of namespace identifiers.
:param minor: if True, only list minor edits; if False (and not
None), only list non-minor edits
:param bot: if True, only list bot edits; if False (and not
@@ -1574,9 +1630,20 @@
or self.has_right('undelete'))):
raise UserRightsError(err + 'deleted content.')

- def deletedrevs(self, titles=None, start=None, end=None,
- reverse: bool = False,
- content: bool = False, total=None, **kwargs):
+ def deletedrevs(
+ self,
+ titles: str
+ | pywikibot.Page
+ | Iterable[str]
+ | Iterable[pywikibot.Page]
+ | None = None,
+ start=None,
+ end=None,
+ reverse: bool = False,
+ content: bool = False,
+ total: int | None = None,
+ **kwargs,
+ ) -> Generator[dict[str, Any], None, None]:
"""Iterate deleted revisions.

Each value returned by the iterator will be a dict containing the
@@ -1589,9 +1656,6 @@
.. seealso:: :api:`Deletedrevisions`

:param titles: The page titles to check for deleted revisions
- :type titles: str (multiple titles delimited with '|')
- or pywikibot.Page or typing.Iterable[pywikibot.Page]
- or typing.Iterable[str]
:keyword revids: Get revisions by their ID

.. note:: either titles or revids must be set but not both
@@ -1647,21 +1711,18 @@
def alldeletedrevisions(
self,
*,
- namespaces=None,
+ namespaces: NamespaceArgType = None,
reverse: bool = False,
content: bool = False,
total: int | None = None,
- **kwargs
- ) -> typing.Iterable[dict[str, Any]]:
+ **kwargs,
+ ) -> Generator[dict[str, Any], None, None]:
"""
- Iterate all deleted revisions.
+ Yield all deleted revisions.

.. seealso:: :api:`Alldeletedrevisions`

:param namespaces: Only iterate pages in these namespaces
- :type namespaces: iterable of str or Namespace key,
- or a single instance of those types. May be a '|' separated
- list of namespace identifiers.
:param reverse: Iterate oldest revisions first (default: newest)
:param content: If True, retrieve the content of each revision
:param total: Number of revisions to retrieve
@@ -1700,13 +1761,15 @@
total=total,
parameters=parameters)

- def users(self, usernames):
+ def users(
+ self,
+ usernames: Iterable[str],
+ ) -> Iterable[dict[str, Any]]:
"""Iterate info about a list of users by name or IP.

.. seealso:: :api:`Users`

:param usernames: a list of user names
- :type usernames: list, or other iterable, of str
"""
usprop = ['blockinfo', 'gender', 'groups', 'editcount', 'registration',
'rights', 'emailable']
@@ -1715,8 +1778,13 @@
'ususers': usernames, 'usprop': usprop})
return usgen

- def randompages(self, total: int | None = None, namespaces=None,
- redirects: bool | None = False, content: bool = False):
+ def randompages(
+ self,
+ total: int | None = None,
+ namespaces: NamespaceArgType = None,
+ redirects: bool | None = False,
+ content: bool = False,
+ ) -> Iterable[pywikibot.Page]:
"""Iterate a number of random pages.

.. seealso: :api:`Random`
@@ -1726,9 +1794,6 @@

:param total: the maximum number of pages to iterate
:param namespaces: only iterate pages in these namespaces.
- :type namespaces: iterable of str or Namespace key,
- or a single instance of those types. May be a '|' separated
- list of namespace identifiers.
:param redirects: if True, include only redirect pages in
results, False does not include redirects and None include
both types (default: False).
@@ -1741,10 +1806,10 @@
"""
mapping = {False: None, True: 'redirects', None: 'all'}
assert redirects in mapping
- redirects = mapping[redirects]
+ redirects_ = mapping[redirects]
params = {}
- if redirects is not None:
- params['grnfilterredir'] = redirects
+ if redirects_ is not None:
+ params['grnfilterredir'] = redirects_
return self._generator(api.PageGenerator, type_arg='random',
namespaces=namespaces, total=total,
g_content=content, **params)
@@ -1760,7 +1825,14 @@
}

@need_right('patrol')
- def patrol(self, rcid=None, revid=None, revision=None):
+ def patrol(
+ self,
+ rcid: int | str | Iterable[int] | Iterable[str] | None = None,
+ revid: int | str | Iterable[int] | Iterable[str] | None = None,
+ revision: pywikibot.page.Revision
+ | Iterable[pywikibot.page.Revision]
+ | None = None,
+ ) -> Generator[dict[str, int | str], None, None]:
"""Return a generator of patrolled pages.

.. seealso:: :api:`Patrol`
@@ -1771,19 +1843,10 @@

:param rcid: an int/string/iterable/iterator providing rcid of pages
to be patrolled.
- :type rcid: iterable/iterator which returns a number or string which
- contains only digits; it also supports a string (as above) or int
:param revid: an int/string/iterable/iterator providing revid of pages
to be patrolled.
- :type revid: iterable/iterator which returns a number or string which
- contains only digits; it also supports a string (as above) or int.
:param revision: an Revision/iterable/iterator providing Revision
object of pages to be patrolled.
- :type revision: iterable/iterator which returns a Revision object; it
- also supports a single Revision.
- :rtype: iterator of dict with 'rcid', 'ns' and 'title'
- of the patrolled page.
-
"""
# If patrol is not enabled, attr will be set the first time a
# request is done.
@@ -1793,22 +1856,29 @@
if all(_ is None for _ in [rcid, revid, revision]):
raise Error('No rcid, revid or revision provided.')

- if isinstance(rcid, (int, str)):
- rcid = {rcid}
- if isinstance(revid, (int, str)):
- revid = {revid}
- if isinstance(revision, pywikibot.page.Revision):
- revision = {revision}
+ if rcid is None:
+ rcid_ = set()
+ elif isinstance(rcid, (int, str)):
+ rcid_ = {rcid}
+ else:
+ rcid_ = set(rcid)
+ if revid is None:
+ revid_ = set()
+ elif isinstance(revid, (int, str)):
+ revid_ = {revid}
+ else:
+ revid_ = set(revid)
+ if revision is None:
+ revision_ = set()
+ elif isinstance(revision, pywikibot.page.Revision):
+ revision_ = {revision}
+ else:
+ revision_ = set(revision)

- # Handle param=None.
- rcid = rcid or set()
- revid = revid or set()
- revision = revision or set()
-
- combined_revid = set(revid) | {r.revid for r in revision}
+ combined_revid = revid_ | {r.revid for r in revision_} # type: ignore[attr-defined] # noqa: E501

gen = itertools.chain(
- zip_longest(rcid, [], fillvalue='rcid'),
+ zip_longest(rcid_, [], fillvalue='rcid'),
zip_longest(combined_revid, [], fillvalue='revid'))

token = self.tokens['patrol']
@@ -1851,8 +1921,11 @@
redirect: bool = False,
excludeuser=None,
patrolled=None,
- namespaces=None,
- total=None
+ namespaces: NamespaceArgType = None,
+ total: int | None = None,
+ ) -> (
+ Generator[tuple[pywikibot.Page, dict[str, Any]], None, None]
+ | Generator[tuple[pywikibot.Page, str, int, str, str, str], None, None]
):
"""Yield new articles (as Page objects) from recent changes.

@@ -1866,9 +1939,6 @@
username or IP address (str), comment (str).

:param namespaces: only iterate pages in these namespaces
- :type namespaces: iterable of str or Namespace key,
- or a single instance of those types. May be a '|' separated
- list of namespace identifiers.
:raises KeyError: a namespace identifier was not resolved
:raises TypeError: a namespace identifier has an inappropriate
type such as NoneType or bool
@@ -1893,8 +1963,12 @@
yield (newpage, pageitem['timestamp'], pageitem['newlen'],
'', pageitem['user'], pageitem.get('comment', ''))

- def querypage(self, special_page, total=True):
- """Yield Page objects retrieved from Special:{special_page}.
+ def querypage(
+ self,
+ special_page: str,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
+ """Iterate Page objects retrieved from Special:{special_page}.

.. seealso:: :api:`Querypage`

@@ -1902,17 +1976,20 @@

:param special_page: Special page to query
:param total: number of pages to return
- :raise AssertionError: special_page is not supported in SpecialPages.
+ :raise ValueError: special_page is not supported in SpecialPages.
"""
param = self._paraminfo.parameter('query+querypage', 'page')
- assert special_page in param['type'], (
- f"{special_page} not in {param['type']}")
+ if special_page not in param['type']:
+ raise ValueError(f"{special_page} not in {param['type']}")

return self._generator(api.PageGenerator,
type_arg='querypage', gqppage=special_page,
total=total)

- def longpages(self, total=None):
+ def longpages(
+ self,
+ total: int | None = None,
+ ) -> Generator[tuple[pywikibot.Page, int], None, None]:
"""Yield Pages and lengths from Special:Longpages.

Yields a tuple of Page object, length(int).
@@ -1926,7 +2003,10 @@
yield (pywikibot.Page(self, pageitem['title']),
int(pageitem['value']))

- def shortpages(self, total=None):
+ def shortpages(
+ self,
+ total: int | None = None,
+ ) -> Generator[tuple[pywikibot.Page, int], None, None]:
"""Yield Pages and lengths from Special:Shortpages.

Yields a tuple of Page object, length(int).
@@ -1940,14 +2020,20 @@
yield (pywikibot.Page(self, pageitem['title']),
int(pageitem['value']))

- def deadendpages(self, total=None):
+ def deadendpages(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield Page objects retrieved from Special:Deadendpages.

:param total: number of pages to return
"""
return self.querypage('Deadendpages', total)

- def ancientpages(self, total=None):
+ def ancientpages(
+ self,
+ total: int | None = None,
+ ) -> Generator[tuple[pywikibot.Page, pywikibot.Timestamp], None, None]:
"""Yield Pages, datestamps from Special:Ancientpages.

:param total: number of pages to return
@@ -1959,56 +2045,80 @@
yield (pywikibot.Page(self, pageitem['title']),
pywikibot.Timestamp.fromISOformat(pageitem['timestamp']))

- def lonelypages(self, total=None):
+ def lonelypages(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield Pages retrieved from Special:Lonelypages.

:param total: number of pages to return
"""
return self.querypage('Lonelypages', total)

- def unwatchedpages(self, total=None):
+ def unwatchedpages(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield Pages from Special:Unwatchedpages (requires Admin privileges).

:param total: number of pages to return
"""
return self.querypage('Unwatchedpages', total)

- def wantedpages(self, total=None):
+ def wantedpages(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield Pages from Special:Wantedpages.

:param total: number of pages to return
"""
return self.querypage('Wantedpages', total)

- def wantedfiles(self, total=None):
+ def wantedfiles(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield Pages from Special:Wantedfiles.

:param total: number of pages to return
"""
return self.querypage('Wantedfiles', total)

- def wantedtemplates(self, total=None):
+ def wantedtemplates(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield Pages from Special:Wantedtemplates.

:param total: number of pages to return
"""
return self.querypage('Wantedtemplates', total)

- def wantedcategories(self, total=None):
+ def wantedcategories(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield Pages from Special:Wantedcategories.

:param total: number of pages to return
"""
return self.querypage('Wantedcategories', total)

- def uncategorizedcategories(self, total=None):
+ def uncategorizedcategories(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield Categories from Special:Uncategorizedcategories.

:param total: number of pages to return
"""
return self.querypage('Uncategorizedcategories', total)

- def uncategorizedimages(self, total=None):
+ def uncategorizedimages(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield FilePages from Special:Uncategorizedimages.

:param total: number of pages to return
@@ -2018,56 +2128,80 @@
# synonym
uncategorizedfiles = uncategorizedimages

- def uncategorizedpages(self, total=None):
+ def uncategorizedpages(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield Pages from Special:Uncategorizedpages.

:param total: number of pages to return
"""
return self.querypage('Uncategorizedpages', total)

- def uncategorizedtemplates(self, total=None):
+ def uncategorizedtemplates(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield Pages from Special:Uncategorizedtemplates.

:param total: number of pages to return
"""
return self.querypage('Uncategorizedtemplates', total)

- def unusedcategories(self, total=None):
+ def unusedcategories(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield Category objects from Special:Unusedcategories.

:param total: number of pages to return
"""
return self.querypage('Unusedcategories', total)

- def unusedfiles(self, total=None):
+ def unusedfiles(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield FilePage objects from Special:Unusedimages.

:param total: number of pages to return
"""
return self.querypage('Unusedimages', total)

- def withoutinterwiki(self, total=None):
+ def withoutinterwiki(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield Pages without language links from Special:Withoutinterwiki.

:param total: number of pages to return
"""
return self.querypage('Withoutinterwiki', total)

- def broken_redirects(self, total=None):
+ def broken_redirects(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield Pages with broken redirects from Special:BrokenRedirects.

:param total: number of pages to return
"""
return self.querypage('BrokenRedirects', total)

- def double_redirects(self, total=None):
+ def double_redirects(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield Pages with double redirects from Special:DoubleRedirects.

:param total: number of pages to return
"""
return self.querypage('DoubleRedirects', total)

- def redirectpages(self, total=None):
+ def redirectpages(
+ self,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Yield redirect pages from Special:ListRedirects.

:param total: number of pages to return
@@ -2077,11 +2211,11 @@
@deprecate_arg('type', 'protect_type')
def protectedpages(
self,
- namespace=0,
+ namespace: NamespaceArgType = 0,
protect_type: str = 'edit',
level: str | bool = False,
- total=None
- ):
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
"""Return protected pages depending on protection level and type.

For protection types which aren't 'create' it uses
@@ -2093,13 +2227,11 @@
.. seealso:: :api:`Protectedtitles`

:param namespace: The searched namespace.
- :type namespace: int or Namespace or str
:param protect_type: The protection type to search for
(default 'edit').
:param level: The protection level (like 'autoconfirmed'). If False it
shows all protection levels.
:return: The pages which are protected.
- :rtype: typing.Iterable[pywikibot.Page]
"""
namespaces = self.namespaces.resolve(namespace)
# always assert, so we are be sure that protect_type could be 'create'
@@ -2112,9 +2244,13 @@
return self.allpages(namespace=namespaces[0], protect_level=level,
protect_type=protect_type, total=total)

- def pages_with_property(self, propname: str, *,
- total: int | None = None):
- """Yield Page objects from Special:PagesWithProp.
+ def pages_with_property(
+ self,
+ propname: str,
+ *,
+ total: int | None = None,
+ ) -> Iterable[pywikibot.Page]:
+ """Iterate Page objects from Special:PagesWithProp.

.. seealso:: :api:`Pageswithprop`

@@ -2132,9 +2268,10 @@
def watched_pages(
self,
force: bool = False,
- total: int | None = None, *,
- with_talkpage: bool = True
- ) -> Generator[pywikibot.Page, Any, None]:
+ total: int | None = None,
+ *,
+ with_talkpage: bool = True,
+ ) -> Iterable[pywikibot.Page]:
"""Return watchlist.

.. note:: ``watched_pages`` is a restartable generator. See
@@ -2150,7 +2287,7 @@
pages
:return: generator of pages in watchlist
"""
- def ignore_talkpages(page):
+ def ignore_talkpages(page: pywikibot.page.BasePage) -> bool:
"""Ignore talk pages and special pages."""
ns = page.namespace()
return ns >= 0 and not page.namespace() % 2
diff --git a/pywikibot/site/_namespace.py b/pywikibot/site/_namespace.py
index cbab431..20d9ef4 100644
--- a/pywikibot/site/_namespace.py
+++ b/pywikibot/site/_namespace.py
@@ -15,8 +15,12 @@
from pywikibot.tools import ComparableMixin, classproperty


-NamespaceIDType = Union[int, str, 'Namespace']
-NamespaceArgType = Union[NamespaceIDType, IterableType[NamespaceIDType], None]
+SingleNamespaceType = Union[int, str, 'Namespace']
+NamespaceArgType = Union[
+ SingleNamespaceType,
+ IterableType[SingleNamespaceType],
+ None,
+]


class BuiltinNamespace(IntEnum):
diff --git a/scripts/harvest_template.py b/scripts/harvest_template.py
index 442f59e..6fc06c9 100755
--- a/scripts/harvest_template.py
+++ b/scripts/harvest_template.py
@@ -116,12 +116,13 @@
import re
import signal
import sys
-from typing import Any, Iterator
+from typing import Any

import pywikibot
from pywikibot import WbTime
from pywikibot import pagegenerators as pg
from pywikibot import textlib
+from pywikibot.backports import Generator
from pywikibot.bot import ConfigParserBot, OptionHandler, WikidataBot
from pywikibot.exceptions import (
APIError,
@@ -375,10 +376,13 @@
if added:
exists_arg.add('p')

- def handle_wikibase_item(self, value: str,
- site: pywikibot.site.BaseSite,
- item: pywikibot.page.ItemPage,
- field: str) -> Iterator[pywikibot.ItemPage]:
+ def handle_wikibase_item(
+ self,
+ value: str,
+ site: pywikibot.site.BaseSite,
+ item: pywikibot.page.ItemPage,
+ field: str,
+ ) -> Generator[pywikibot.ItemPage, None, None]:
"""Handle 'wikibase-item' claim type.

.. versionadded:: 7.5
@@ -409,7 +413,7 @@

def handle_time(self, value: str,
site: pywikibot.site.BaseSite,
- *args) -> Iterator[WbTime]:
+ *args) -> Generator[WbTime, None, None]:
"""Handle 'time' claim type.

.. versionadded:: 7.5
@@ -463,7 +467,7 @@
yield WbTime.fromWikibase(out, self.repo)

@staticmethod
- def handle_string(value, *args) -> Iterator[str]:
+ def handle_string(value: str, *args) -> Generator[str, None, None]:
"""Handle 'string' and 'external-id' claim type.

.. versionadded:: 7.5
@@ -472,7 +476,7 @@

handle_external_id = handle_string

- def handle_url(self, value, *args) -> Iterator[str]:
+ def handle_url(self, value, *args) -> Generator[str, None, None]:
"""Handle 'url' claim type.

.. versionadded:: 7.5
@@ -481,8 +485,11 @@
yield match['url']

@staticmethod
- def handle_commonsmedia(value, site,
- *args) -> Iterator[pywikibot.FilePage]:
+ def handle_commonsmedia(
+ value,
+ site,
+ *args,
+ ) -> Generator[pywikibot.FilePage, None, None]:
"""Handle 'commonsMedia' claim type.

.. versionadded:: 7.5
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index a85c35f..1f376c6 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -1828,7 +1828,7 @@
self.generateUntil = until

@property
- def dump_titles(self):
+ def dump_titles(self) -> Iterable[str]:
"""Return generator of titles for dump file."""
return (s.origin.title(as_link=True) for s in self.subjects)

diff --git a/scripts/pagefromfile.py b/scripts/pagefromfile.py
index fdac193..8a8c1a4 100755
--- a/scripts/pagefromfile.py
+++ b/scripts/pagefromfile.py
@@ -72,7 +72,7 @@

import pywikibot
from pywikibot import config, i18n
-from pywikibot.backports import Iterator
+from pywikibot.backports import Generator
from pywikibot.bot import CurrentPageBot, OptionHandler, SingleSiteBot
from pywikibot.pagegenerators import PreloadingGenerator
from pywikibot.tools.collections import GeneratorWrapper
@@ -213,7 +213,7 @@
return page_regex, title_regex

@property
- def generator(self) -> Iterator[pywikibot.Page]:
+ def generator(self) -> Generator[pywikibot.Page, None, None]:
"""Read file and yield a page with content from file.

content is stored as a page attribute defined by CTX_ATTR.
diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py
index cd62aec..34b4e19 100755
--- a/tests/pagegenerators_tests.py
+++ b/tests/pagegenerators_tests.py
@@ -1338,7 +1338,7 @@
def test_querypage_generator_with_invalid_page(self):
"""Test generator of pages with lint errors."""
gf = pagegenerators.GeneratorFactory(site=self.site)
- with self.assertRaises(AssertionError):
+ with self.assertRaises(ValueError):
gf.handle_arg('-querypage:dummy')

def test_querypage_generator_with_no_page(self):
diff --git a/tests/site_generators_tests.py b/tests/site_generators_tests.py
index 8141c94..2172160 100755
--- a/tests/site_generators_tests.py
+++ b/tests/site_generators_tests.py
@@ -420,7 +420,7 @@
pages = mysite.querypage('Longpages', total=10)
for p in pages:
self.assertIsInstance(p, pywikibot.Page)
- with self.assertRaises(AssertionError):
+ with self.assertRaises(ValueError):
mysite.querypage('LongpageX')

def test_longpages(self):

To view, visit change 987449. To unsubscribe, or for help writing mail filters, visit settings.

Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Icefc70424dd2e16ae1f79b88a2c2e6d8055ae2ce
Gerrit-Change-Number: 987449
Gerrit-PatchSet: 9
Gerrit-Owner: JJMC89 <JJMC89.Wikimedia@gmail.com>
Gerrit-Reviewer: Xqt <info@gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged