Xqt has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1121771?usp=email )
Change subject: [IMPR] no longer pass continue= as with api query in QueryGenerator
......................................................................
[IMPR] no longer pass continue= as with api query in QueryGenerator
- the 'continue' parameter can be omitted in mw >= 1.26
- update documentation for APIGenerator and CachedRequest
Bug: T343204
Change-Id: Iace6f15ab63c3f0b753483ca9cd1230a9bbc9b5c
---
M pywikibot/data/api/_generators.py
M pywikibot/data/api/_requests.py
M tests/site_generators_tests.py
3 files changed, 15 insertions(+), 8 deletions(-)
Approvals:
Xqt: Verified; Looks good to me, approved
diff --git a/pywikibot/data/api/_generators.py b/pywikibot/data/api/_generators.py
index 776dbeb..2e89f36 100644
--- a/pywikibot/data/api/_generators.py
+++ b/pywikibot/data/api/_generators.py
@@ -103,6 +103,12 @@
:param continue_name: Name of the continue API parameter.
:param limit_name: Name of the limit API parameter.
:param data_name: Name of the data in API response.
+ :keyword dict parameters: All parameters passed to request class
+ usally :class:`api.Request<data.api.Request>` or
+ :class:`api.CachedRequest<data.api.CachedRequest>`. See these
+ classes for further parameter descriptions. The *parameters*
+ keys can also given here as keyword parameters but this is
+ not recommended.
"""
kwargs = self._clean_kwargs(kwargs, action=action)
@@ -243,8 +249,6 @@
parameters['indexpageids'] = True # always ask for list of pageids
self.continue_name = 'continue'
- # Explicitly enable the simplified continuation
- parameters['continue'] = True
self.request = self.request_class(**kwargs)
self.site._paraminfo.fetch('query+' + mod for mod in self.modules)
@@ -494,8 +498,8 @@
.. versionchanged:: 8.4
return *None* instead of *False*.
"""
- for key, value in self.data['continue'].items():
- # query-continue can return ints (continue too?)
+ for key, value in self.data[self.continue_name].items():
+ # old query-continue could return ints, continue too?
if isinstance(value, int):
value = str(value)
self.request[key] = value
diff --git a/pywikibot/data/api/_requests.py b/pywikibot/data/api/_requests.py
index 5a0bc9b..b1d5737 100644
--- a/pywikibot/data/api/_requests.py
+++ b/pywikibot/data/api/_requests.py
@@ -1,6 +1,6 @@
"""Objects representing API requests."""
#
-# (C) Pywikibot team, 2007-2024
+# (C) Pywikibot team, 2007-2025
#
# Distributed under the terms of the MIT license.
#
@@ -146,7 +146,8 @@
max_retries: int | None = None,
retry_wait: int | None = None,
use_get: bool | None = None,
- parameters=_PARAM_DEFAULT, **kwargs) -> None:
+ parameters=_PARAM_DEFAULT,
+ **kwargs) -> None:
"""Create a new Request instance with the given parameters.
The parameters for the request can be defined via either the
@@ -1163,7 +1164,10 @@
def __init__(self, expiry, *args, **kwargs) -> None:
"""Initialize a CachedRequest object.
- :param expiry: either a number of days or a datetime.timedelta object
+ :param expiry: either a number of days or a datetime.timedelta
+ object
+ :param args: Refer :class:`Request` for positional arguments.
+ :param args: Refer :class:`Request` for keyword arguments.
"""
assert expiry is not None
super().__init__(*args, **kwargs)
diff --git a/tests/site_generators_tests.py b/tests/site_generators_tests.py
index 4100026..c16c219 100755
--- a/tests/site_generators_tests.py
+++ b/tests/site_generators_tests.py
@@ -28,7 +28,6 @@
global_expected_params = {
'action': ['query'],
- 'continue': [True],
'iilimit': ['max'],
'iiprop': list(pywikibot.site._IIPROP),
'indexpageids': [True],
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1121771?usp=email
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings?usp=email
Gerrit-MessageType: merged
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Iace6f15ab63c3f0b753483ca9cd1230a9bbc9b5c
Gerrit-Change-Number: 1121771
Gerrit-PatchSet: 2
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1121736?usp=email )
Change subject: Cleanup: Remove Structured Discussions/Flow support
......................................................................
Cleanup: Remove Structured Discussions/Flow support
Structured Discussion (previously known as Flow) support was introduced
to Pywikibot with release 3.0.20170403. But this extension is no longer
under active maintenance for 7 years and people are encouraged to no
longer use it. On most wikis structured discussions are in read-only
mode and archived as subpages. The remaining wikis cawikiquote,
fiwikimedia, gomwiki, kabwiki and ptwikibooks will be processed shortly.
Structured Discussions/Flow support is deprecated since Pywikibot 9.4.
Bug: T381551
Change-Id: I2443b48e1cf3ff77813d9ad8ddfedb1b01d946d0
---
D docs/api_ref/flow.rst
M docs/mwapi.rst
M pywikibot/CONTENT.rst
M pywikibot/data/api/_generators.py
D pywikibot/flow.py
M pywikibot/page/_basepage.py
M pywikibot/site/_apisite.py
M pywikibot/site/_extensions.py
M tests/api_tests.py
9 files changed, 22 insertions(+), 1,146 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/docs/api_ref/flow.rst b/docs/api_ref/flow.rst
deleted file mode 100644
index be9b2a7..0000000
--- a/docs/api_ref/flow.rst
+++ /dev/null
@@ -1,6 +0,0 @@
-***********************************************
-:mod:`flow` --- Structured Discussions Entities
-***********************************************
-
-.. automodule:: flow
- :synopsis: Objects representing Structured Discussions entities, like boards, topics, and posts
diff --git a/docs/mwapi.rst b/docs/mwapi.rst
index 8852829..f8a9748 100644
--- a/docs/mwapi.rst
+++ b/docs/mwapi.rst
@@ -61,10 +61,6 @@
- :meth:`expand_text()<pywikibot.site._apisite.APISite.expand_text>`
- :meth:`BasePage.expand_text()<page.BasePage.expand_text>`
- :meth:`textlib.getCategoryLinks`
- * - :api:`flow<flow>`
- - *deprecated, see below*
- -
- -
* - :api:`login<login>`
- :meth:`login()<pywikibot.site._apisite.APISite.login>`
-
@@ -138,51 +134,3 @@
- :meth:`watch()<pywikibot.site._apisite.APISite.watch>`
- :meth:`BasePage.watch()<page.BasePage.watch>`
-
-
-Flow support
-============
-
-.. deprecated:: 9.4
- will be removed with Pywikibot 12 or earlier (:phab:`T371180`).
-
-.. list-table::
- :header-rows: 1
- :align: left
-
- * - action
- - APISite method
- - flow module method
- * - :api:`flow<flow>`
- - :meth:`load_board()<pywikibot.site._extensions.FlowMixin.load_board>` |br|
- :meth:`load_topiclist()<pywikibot.site._extensions.FlowMixin.load_topiclist>` |br|
- :meth:`load_topic()<pywikibot.site._extensions.FlowMixin.load_topic>` |br|
- :meth:`load_post_current_revision()<pywikibot.site._extensions.FlowMixin.load_post_current_revision>` |br|
- :meth:`create_new_topic()<pywikibot.site._extensions.FlowMixin.create_new_topic>` |br|
- :meth:`reply_to_post()<pywikibot.site._extensions.FlowMixin.reply_to_post>` |br|
- :meth:`lock_topic()<pywikibot.site._extensions.FlowMixin.lock_topic>` |br|
- :meth:`moderate_topic()<pywikibot.site._extensions.FlowMixin.moderate_topic>` |br|
- :meth:`delete_topic()<pywikibot.site._extensions.FlowMixin.delete_topic>` |br|
- :meth:`hide_topic()<pywikibot.site._extensions.FlowMixin.hide_topic>` |br|
- :meth:`suppress_topic()<pywikibot.site._extensions.FlowMixin.suppress_topic>` |br|
- :meth:`restore_topic()<pywikibot.site._extensions.FlowMixin.restore_topic>` |br|
- :meth:`moderate_post()<pywikibot.site._extensions.FlowMixin.moderate_post>` |br|
- :meth:`delete_post()<pywikibot.site._extensions.FlowMixin.delete_post>` |br|
- :meth:`hide_post()<pywikibot.site._extensions.FlowMixin.hide_post>` |br|
- :meth:`suppress_post()<pywikibot.site._extensions.FlowMixin.suppress_post>` |br|
- :meth:`restore_post()<pywikibot.site._extensions.FlowMixin.restore_post>` |br|
- - :meth:`flow.Board.topics` |br|
- :meth:`flow.Topic.create_topic` |br|
- :meth:`flow.Topic.lock` |br|
- :meth:`flow.Topic.unlock` |br|
- :meth:`flow.Topic.delete_mod` |br|
- :meth:`flow.Topic.hide` |br|
- :meth:`flow.Topic.suppress` |br|
- :meth:`flow.Topic.restore` |br|
- :meth:`flow.Post.reply` |br|
- :meth:`flow.Post.delete` |br|
- :meth:`flow.Post.hide` |br|
- :meth:`flow.Post.suppress` |br|
- :meth:`flow.Post.restore` |br|
- * - :api:`flowthank<flowthank>`
- - :meth:`thank_post()<pywikibot.site._extensions.ThanksFlowMixin.thank_post>`
- - :meth:`flow.Post.thank`
diff --git a/pywikibot/CONTENT.rst b/pywikibot/CONTENT.rst
index 4f02a96..2ff8924 100644
--- a/pywikibot/CONTENT.rst
+++ b/pywikibot/CONTENT.rst
@@ -40,8 +40,6 @@
| fixes.py | File containing all standard fixes, stores |
| | predefined replacements used by replace.py |
+----------------------------+------------------------------------------------------+
- | flow.py | Objects representing Flow entities |
- +----------------------------+------------------------------------------------------+
| i18n.py | Helper functions for both the internal translation |
| | system and for TranslateWiki-based translations |
+----------------------------+------------------------------------------------------+
diff --git a/pywikibot/data/api/_generators.py b/pywikibot/data/api/_generators.py
index b27abb1..776dbeb 100644
--- a/pywikibot/data/api/_generators.py
+++ b/pywikibot/data/api/_generators.py
@@ -2,11 +2,10 @@
.. versionchanged:: 7.6
All Objects were changed from Iterable object to a Generator object.
- They are subclassed from
- :class:`tools.collections.GeneratorWrapper`
+ They are subclassed from :class:`tools.collections.GeneratorWrapper`
"""
#
-# (C) Pywikibot team, 2008-2024
+# (C) Pywikibot team, 2008-2025
#
# Distributed under the terms of the MIT license.
#
@@ -1056,9 +1055,6 @@
elif 'preload' in pagedict:
page._preloadedtext = pagedict['preload']
- if 'flowinfo' in pagedict:
- page._flowinfo = pagedict['flowinfo']['flow']
-
if 'lintId' in pagedict:
page._lintinfo = pagedict
page._lintinfo.pop('pageid')
diff --git a/pywikibot/flow.py b/pywikibot/flow.py
deleted file mode 100644
index 34997dd..0000000
--- a/pywikibot/flow.py
+++ /dev/null
@@ -1,631 +0,0 @@
-"""Objects representing Structured Discussions entities.
-
-Structured Discussions was formerly known as Flow. Flow was renamed in
-2017 as the focus was scoped to user-to-user discussions.
-
-.. caution:: Structured Discussions support previously known as Flow is
- no longer tested because the test environment was disabled. Please
- use this module with care.
-.. deprecated:: 9.4
- Structured Discussions extension is not maintained and will be
- removed. Users are encouraged to stop using it. (:phab:`T371180`)
-.. versionremoved:: 10.0
- (:phab:`T381551`)
-.. seealso::
- - https://www.mediawiki.org/wiki/Extension:StructuredDiscussions
- - https://www.mediawiki.org/wiki/Structured_Discussions
- - https://www.mediawiki.org/wiki/Structured_Discussions/Wikis
- - https://www.mediawiki.org/wiki/Structured_Discussions/Deprecation
-"""
-#
-# (C) Pywikibot team, 2015-2024
-#
-# Distributed under the terms of the MIT license.
-#
-from __future__ import annotations
-
-import abc
-import datetime
-from typing import Any
-from urllib.parse import parse_qs, urlparse
-
-import pywikibot
-from pywikibot import config
-from pywikibot.backports import Iterator, Mapping
-from pywikibot.exceptions import (
- LockedPageError,
- NoPageError,
- UnknownExtensionError,
-)
-from pywikibot.page import BasePage, PageSourceType, User
-from pywikibot.tools import (
- SPHINX_RUNNING,
- ModuleDeprecationWrapper,
- cached,
- deprecated_args,
- suppress_warnings,
-)
-from pywikibot.tools._deprecate import _NotImplementedWarning
-
-
-__all__ = (
- 'Board',
- 'FlowPage',
- 'Post',
- 'Topic',
-)
-
-FLOW_WARNING = (r'pywikibot\.site\._extensions\.(Thanks)?FlowMixin\.[a-z_]+ '
- r'is deprecated since release 9\.4\.0\.')
-
-
-class FlowPage(BasePage, abc.ABC):
-
- """The base page meta class for the Flow extension.
-
- Defines Flow page-like object for :class:`Board` and :class:`Topic`.
- It cannot be instantiated directly.
- """
-
- def __init__(self, source: PageSourceType, title: str = '') -> None:
- """Initializer.
-
- :param source: A Flow-enabled site or a Link or Page on such a site
- :param title: normalized title of the page
-
- :raises TypeError: incorrect use of parameters
- :raises ValueError: use of non-Flow-enabled Site
- """
- super().__init__(source, title)
-
- if not self.site.has_extension('Flow'):
- raise UnknownExtensionError('site is not Flow-enabled')
-
- @abc.abstractmethod
- def _load(self, force: bool = False) -> dict[str, Any]:
- """Abstract method to load and cache the Flow data.
-
- Subclasses must overwrite _load() method to load and cache
- the object's internal data from the API.
- """
- raise NotImplementedError
-
- @property
- @cached
- def uuid(self) -> str:
- """Return the UUID of the page.
-
- :return: UUID of the page
- """
- return self._load()['workflowId']
-
- def get(self, force: bool = False, get_redirect: bool = False
- ) -> dict[str, Any]:
- """Get the page's content."""
- if get_redirect or force:
- raise NotImplementedError(
- "Neither 'force' nor 'get_redirect' parameter is implemented "
- f'in {self.__class__.__name__}.get()'
- )
-
- # TODO: Return more useful data
- return getattr(self, '_data', {})
-
-
-class Board(FlowPage):
-
- """A Flow discussion board."""
-
- def _load(self, force: bool = False) -> dict[str, Any]:
- """Load and cache the Board's data, derived from its topic list.
-
- :param force: Whether to force a reload if the data is already loaded
- """
- if not hasattr(self, '_data') or force:
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- self._data = self.site.load_board(self)
- return self._data
-
- @staticmethod
- def _parse_url(links: Mapping[str, Any]) -> dict[str, Any]:
- """Parse a URL retrieved from the API."""
- if 'fwd' in links:
- rule = links['fwd']
- elif 'rev' in links:
- rule = links['rev']
- else:
- raise ValueError('Illegal board data (missing required data).')
- parsed_url = urlparse(rule['url'])
- params = parse_qs(parsed_url.query)
- new_params: dict[str, Any] = {}
- for key, value in params.items():
- if key != 'title':
- key = key.replace('topiclist_', '').replace('-', '_')
- if key == 'offset_dir':
- new_params['reverse'] = value == 'rev'
- else:
- new_params[key] = value
- return new_params
-
- @deprecated_args(limit='total') # since 8.0.0
- def topics(self, *,
- content_format: str = 'wikitext',
- total: int | None = None,
- sort_by: str = 'newest',
- offset: str | datetime.datetime | None = None,
- offset_uuid: str = '',
- reverse: bool = False,
- include_offset: bool = False,
- toc_only: bool = False
- ) -> Iterator[Topic]:
- """Load this board's topics.
-
- .. versionchanged:: 8.0
- The *total* parameter was added as a per request limit.
- All parameters are keyword only parameters.
- .. deprecated:: 8.0
- The *limit* parameter. Use ``-step`` global option or
- :ref:`config.step<Settings to Avoid Server Overload>` instead.
-
- :param content_format: The content format to request the data in;
- must be either 'wikitext', 'html', or 'fixed-html'
- :param total: The number of topics to fetch.
- :param sort_by: Algorithm to sort topics by;
- must be either 'newest' or 'updated'
- :param offset: The timestamp to start at (when sortby is 'updated').
- :param offset_uuid: The UUID to start at (when sortby is 'newest').
- :param reverse: Whether to reverse the topic ordering.
- :param include_offset: Whether to include the offset topic.
- :param toc_only: Whether to only include information for the TOC.
- :yield: A generator of this board's topics.
- """
- maxlimit = min(config.step, 100) if config.step > 0 else 100
- request_limit = min(total, maxlimit)
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- data = self.site.load_topiclist(
- self,
- content_format=content_format,
- limit=request_limit,
- sortby=sort_by,
- toconly=toc_only,
- offset=offset,
- offset_id=offset_uuid,
- reverse=reverse,
- include_offset=include_offset
- )
- count = 0
- while data['roots']:
- for root in data['roots']:
- topic = Topic.from_topiclist_data(self, root, data)
- yield topic
-
- count += 1
- if count >= total:
- return
-
- continue_args = self._parse_url(data['links']['pagination'])
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- data = self.site.load_topiclist(self, **continue_args)
-
- def new_topic(self, title: str, content: str,
- content_format: str = 'wikitext') -> Topic:
- """Create and return a Topic object for a new topic on this Board.
-
- :param title: The title of the new topic (must be in plaintext)
- :param content: The content of the topic's initial post
- :param content_format: The content format of the supplied content;
- either 'wikitext' or 'html'
- :return: The new topic
- """
- return Topic.create_topic(self, title, content, content_format)
-
-
-class Topic(FlowPage):
-
- """A Flow discussion topic."""
-
- def _load(self, force: bool = False, content_format: str = 'wikitext'
- ) -> dict[str, Any]:
- """Load and cache the Topic's data.
-
- :param force: Whether to force a reload if the data is already loaded
- :param content_format: The post format in which to load
- """
- if not hasattr(self, '_data') or force:
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- self._data = self.site.load_topic(self, content_format)
- return self._data
-
- def _reload(self) -> None:
- """Forcibly reload the topic's root post."""
- self.root._load(load_from_topic=True)
-
- @classmethod
- def create_topic(cls, board: Board, title: str,
- content: str, content_format: str = 'wikitext'
- ) -> Topic:
- """Create and return a Topic object for a new topic on a Board.
-
- :param board: The topic's parent board
- :param title: The title of the new topic (must be in plaintext)
- :param content: The content of the topic's initial post
- :param content_format: The content format of the supplied content;
- either 'wikitext' or 'html'
- :return: The new topic
- """
- data = board.site.create_new_topic(board, title, content,
- content_format)
- return cls(board.site, data['topic-page'])
-
- @classmethod
- def from_topiclist_data(cls, board: Board,
- root_uuid: str,
- topiclist_data: dict[str, Any]) -> Topic:
- """Create a Topic object from API data.
-
- :param board: The topic's parent Flow board
- :param root_uuid: The UUID of the topic and its root post
- :param topiclist_data: The data returned by view-topiclist
- :return: A Topic object derived from the supplied data
- :raises TypeError: any passed parameters have wrong types
- :raises ValueError: the passed topiclist_data is missing required data
- """
- if not isinstance(board, Board):
- raise TypeError('board must be a pywikibot.flow.Board object.')
- if not isinstance(root_uuid, str):
- raise TypeError('Topic/root UUID must be a string.')
-
- topic = cls(board.site, 'Topic:' + root_uuid)
- topic._root = Post.fromJSON(topic, root_uuid, topiclist_data)
- topic._uuid = root_uuid
- return topic
-
- @property
- def root(self) -> Post:
- """The root post of this topic."""
- if not hasattr(self, '_root'):
- self._root = Post.fromJSON(self, self.uuid, self._data)
- return self._root
-
- @property
- def is_locked(self) -> bool:
- """Whether this topic is locked."""
- return self.root._current_revision['isLocked']
-
- @property
- def is_moderated(self) -> bool:
- """Whether this topic is moderated."""
- return self.root._current_revision['isModerated']
-
- def replies(self, content_format: str = 'wikitext', force: bool = False
- ) -> list[Post]:
- """A list of replies to this topic's root post.
-
- :param content_format: Content format to return contents in;
- must be 'wikitext', 'html', or 'fixed-html'
- :param force: Whether to reload from the API instead of using the cache
- :return: The replies of this topic's root post
- """
- return self.root.replies(content_format=content_format, force=force)
-
- def reply(self, content: str, content_format: str = 'wikitext') -> Post:
- """A convenience method to reply to this topic's root post.
-
- :param content: The content of the new post
- :param content_format: The format of the given content;
- must be 'wikitext' or 'html')
- :return: The new reply to this topic's root post
- """
- return self.root.reply(content, content_format)
-
- # Moderation
- def lock(self, reason: str) -> None:
- """Lock this topic.
-
- :param reason: The reason for locking this topic
- """
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- self.site.lock_topic(self, True, reason)
- self._reload()
-
- def unlock(self, reason: str) -> None:
- """Unlock this topic.
-
- :param reason: The reason for unlocking this topic
- """
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- self.site.lock_topic(self, False, reason)
- self._reload()
-
- def delete_mod(self, reason: str) -> None:
- """Delete this topic through the Flow moderation system.
-
- :param reason: The reason for deleting this topic.
- """
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- self.site.delete_topic(self, reason)
- self._reload()
-
- def hide(self, reason: str) -> None:
- """Hide this topic.
-
- :param reason: The reason for hiding this topic.
- """
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- self.site.hide_topic(self, reason)
- self._reload()
-
- def suppress(self, reason: str) -> None:
- """Suppress this topic.
-
- :param reason: The reason for suppressing this topic.
- """
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- self.site.suppress_topic(self, reason)
- self._reload()
-
- def restore(self, reason: str) -> None:
- """Restore this topic.
-
- :param reason: The reason for restoring this topic.
- """
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- self.site.restore_topic(self, reason)
- self._reload()
-
- def summary(self) -> str | None:
- """Get this topic summary, if any.
-
- :return: summary or None
- """
- if 'summary' in self.root._current_revision:
- return self.root._current_revision['summary']['revision'][
- 'content']['content']
- return None
-
- def summarize(self, summary: str) -> None:
- """Summarize this topic.
-
- :param summary: The summary that will be added to the topic.
- """
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- self.site.summarize_topic(self, summary)
- self._reload()
-
-
-class Post:
-
- """A post to a Flow discussion topic. This is a non-page-like object."""
-
- def __init__(self, page: Topic, uuid: str) -> None:
- """Initializer.
-
- :param page: Flow topic
- :param uuid: UUID of a Flow post
-
- :raises TypeError: incorrect types of parameters
- """
- if not isinstance(page, Topic):
- raise TypeError('Page must be a Topic object')
- if not page.exists():
- raise NoPageError(page, 'Topic must exist: %s')
- if not isinstance(uuid, str):
- raise TypeError('Post UUID must be a string')
-
- self._page = page
- self._uuid = uuid
-
- self._content: dict[str, Any] = {}
-
- @classmethod
- def fromJSON(cls, page: Topic, post_uuid: str, # noqa: N802
- data: dict[str, Any]) -> Post:
- """Create a Post object using the data returned from the API call.
-
- :param page: A Flow topic
- :param post_uuid: The UUID of the post
- :param data: The JSON data returned from the API
-
- :return: A Post object
- :raises TypeError: data is not a dict
- :raises ValueError: data is missing required entries
- """
- post = cls(page, post_uuid)
- post._set_data(data)
-
- return post
-
- def _set_data(self, data: dict[str, Any]) -> None:
- """Set internal data and cache content.
-
- :param data: The data to store internally
- :raises TypeError: data is not a dict
- :raises ValueError: missing data entries or post/revision not found
- """
- if not isinstance(data, dict):
- raise TypeError('Illegal post data (must be a dictionary).')
- if ('posts' not in data) or ('revisions' not in data):
- raise ValueError('Illegal post data (missing required data).')
- if self.uuid not in data['posts']:
- raise ValueError('Post not found in supplied data.')
-
- current_revision_id = data['posts'][self.uuid][0]
- if current_revision_id not in data['revisions']:
- raise ValueError('Current revision of post'
- 'not found in supplied data.')
-
- self._current_revision = data['revisions'][current_revision_id]
- if 'content' in self._current_revision:
- content = self._current_revision.pop('content')
- assert isinstance(content, dict)
- assert isinstance(content['content'], str)
- self._content[content['format']] = content['content']
-
- def _load(self, force: bool = True, content_format: str = 'wikitext',
- load_from_topic: bool = False) -> dict[str, Any]:
- """Load and cache the Post's data using the given content format.
-
- :param load_from_topic: Whether to load the post from the whole topic
- """
- if load_from_topic:
- data = self.page._load(force=force, content_format=content_format)
- else:
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- data = self.site.load_post_current_revision(
- self.page, self.uuid, content_format)
- self._set_data(data)
- return self._current_revision
-
- @property
- def uuid(self) -> str:
- """Return the UUID of the post.
-
- :return: UUID of the post
- """
- return self._uuid
-
- @property
- def site(self) -> pywikibot.site.BaseSite:
- """Return the site associated with the post.
-
- :return: Site associated with the post
- """
- return self._page.site
-
- @property
- def page(self) -> Topic:
- """Return the page associated with the post.
-
- :return: Page associated with the post
- """
- return self._page
-
- @property
- def is_moderated(self) -> bool:
- """Whether this post is moderated."""
- if not hasattr(self, '_current_revision'):
- self._load()
- return self._current_revision['isModerated']
-
- @property
- def creator(self) -> User:
- """The creator of this post."""
- if not hasattr(self, '_current_revision'):
- self._load()
- if not hasattr(self, '_creator'):
- self._creator = User(self.site,
- self._current_revision['creator']['name'])
- return self._creator
-
- def get(self, content_format: str = 'wikitext',
- force: bool = False) -> str:
- """Return the contents of the post in the given format.
-
- :param force: Whether to reload from the API instead of using the cache
- :param content_format: Content format to return contents in
- :return: The contents of the post in the given content format
- """
- if content_format not in self._content or force:
- self._load(content_format=content_format)
- return self._content[content_format]
-
- def replies(self, content_format: str = 'wikitext', force: bool = False
- ) -> list[Post]:
- """Return this post's replies.
-
- :param content_format: Content format to return contents in;
- must be 'wikitext', 'html', or 'fixed-html'
- :param force: Whether to reload from the API instead of using the cache
- :return: This post's replies
- """
- if content_format not in ('wikitext', 'html', 'fixed-html'):
- raise ValueError('Invalid content format.')
-
- if hasattr(self, '_replies') and not force:
- return self._replies # type: ignore[has-type]
-
- # load_from_topic workaround due to T106733
- # (replies not returned by view-post)
- if not hasattr(self, '_current_revision') or force:
- self._load(content_format=content_format, load_from_topic=True)
-
- reply_uuids = self._current_revision['replies']
- self._replies = [Post(self.page, uuid) for uuid in reply_uuids]
-
- return self._replies
-
- def reply(self, content: str, content_format: str = 'wikitext') -> Post:
- """Reply to this post.
-
- :param content: The content of the new post
- :param content_format: The format of the given content;
- must be 'wikitext' or 'html'
- :return: The new reply post
- """
- self._load()
- if self.page.is_locked:
- raise LockedPageError(self.page, 'Topic %s is locked.')
-
- reply_url = self._current_revision['actions']['reply']['url']
- parsed_url = urlparse(reply_url)
- params = parse_qs(parsed_url.query)
- reply_to = params['topic_postId']
- if self.uuid == reply_to:
- del self._current_revision
- del self._replies
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- data = self.site.reply_to_post(self.page, reply_to, content,
- content_format)
- return Post(self.page, data['post-id'])
-
- # Moderation
- def delete(self, reason: str) -> None:
- """Delete this post through the Flow moderation system.
-
- :param reason: The reason for deleting this post.
- """
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- self.site.delete_post(self, reason)
- self._load()
-
- def hide(self, reason: str) -> None:
- """Hide this post.
-
- :param reason: The reason for hiding this post.
- """
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- self.site.hide_post(self, reason)
- self._load()
-
- def suppress(self, reason: str) -> None:
- """Suppress this post.
-
- :param reason: The reason for suppressing this post.
- """
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- self.site.suppress_post(self, reason)
- self._load()
-
- def restore(self, reason: str) -> None:
- """Restore this post.
-
- :param reason: The reason for restoring this post.
- """
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- self.site.restore_post(self, reason)
- self._load()
-
- def thank(self) -> None:
- """Thank the user who made this post."""
- with suppress_warnings(FLOW_WARNING, _NotImplementedWarning):
- self.site.thank_post(self)
-
-
-if not SPHINX_RUNNING:
- wrapper = ModuleDeprecationWrapper(__name__)
- for cls in __all__:
- wrapper.add_deprecated_attr(
- cls,
- replacement_name='',
- since='9.4.0'
- )
diff --git a/pywikibot/page/_basepage.py b/pywikibot/page/_basepage.py
index 10b6950..f61408f 100644
--- a/pywikibot/page/_basepage.py
+++ b/pywikibot/page/_basepage.py
@@ -1,6 +1,6 @@
"""Objects representing a base object for a MediaWiki page."""
#
-# (C) Pywikibot team, 2008-2024
+# (C) Pywikibot team, 2008-2025
#
# Distributed under the terms of the MIT license.
#
@@ -64,15 +64,16 @@
reading from or writing to the wiki. All other methods are delegated
to the Site object.
- Will be subclassed by Page, WikibasePage, and FlowPage.
+ Will be subclassed by :class:`pywikibot.Page` and
+ :class:`pywikibot.page.WikibasePage`.
"""
_cache_attrs = (
- '_text', '_pageid', '_catinfo', '_templates', '_protection',
- '_contentmodel', '_langlinks', '_isredir', '_coords',
- '_preloadedtext', '_timestamp', '_applicable_protections',
- '_flowinfo', '_quality', '_pageprops', '_revid', '_quality_text',
- '_pageimage', '_item', '_lintinfo', '_imageforpage',
+ '_applicable_protections', '_catinfo', '_contentmodel', '_coords',
+ '_imageforpage', '_isredir', '_item', '_langlinks', '_lintinfo',
+ '_pageid', '_pageimage', '_pageprops', '_preloadedtext', '_protection',
+ '_quality', '_quality_text', '_revid', '_templates', '_text',
+ '_timestamp',
)
def __init__(self, source, title: str = '', ns=0) -> None:
@@ -2295,7 +2296,13 @@
return False
def is_flow_page(self) -> bool:
- """Whether a page is a Flow page."""
+ """Whether a page is a Flow page.
+
+ .. attention::
+ Structured Discussion/Flow support was deprecated in 9.4 and
+ removed in Pywikibot 10. This method is kept to detect
+ unsupported content.
+ """
return self.content_model == 'flow-board'
def create_short_link(self,
diff --git a/pywikibot/site/_apisite.py b/pywikibot/site/_apisite.py
index 089adee..9480955 100644
--- a/pywikibot/site/_apisite.py
+++ b/pywikibot/site/_apisite.py
@@ -1,6 +1,6 @@
"""Objects representing API interface to MediaWiki site."""
#
-# (C) Pywikibot team, 2008-2024
+# (C) Pywikibot team, 2008-2025
#
# Distributed under the terms of the MIT license.
#
@@ -54,14 +54,12 @@
from pywikibot.site._decorators import need_right
from pywikibot.site._extensions import (
EchoMixin,
- FlowMixin,
GeoDataMixin,
GlobalUsageMixin,
LinterMixin,
PageImagesMixin,
ProofreadPageMixin,
TextExtractsMixin,
- ThanksFlowMixin,
ThanksMixin,
UrlShortenerMixin,
WikibaseClientMixin,
@@ -104,7 +102,6 @@
class APISite(
BaseSite,
EchoMixin,
- FlowMixin,
GeneratorsMixin,
GeoDataMixin,
GlobalUsageMixin,
@@ -112,7 +109,6 @@
PageImagesMixin,
ProofreadPageMixin,
TextExtractsMixin,
- ThanksFlowMixin,
ThanksMixin,
UrlShortenerMixin,
WikibaseClientMixin,
diff --git a/pywikibot/site/_extensions.py b/pywikibot/site/_extensions.py
index 22872fa..4e1bd4f 100644
--- a/pywikibot/site/_extensions.py
+++ b/pywikibot/site/_extensions.py
@@ -1,13 +1,11 @@
"""Objects representing API interface to MediaWiki site extensions."""
#
-# (C) Pywikibot team, 2008-2024
+# (C) Pywikibot team, 2008-2025
#
# Distributed under the terms of the MIT license.
#
from __future__ import annotations
-from typing import Any
-
import pywikibot
from pywikibot.data import api
from pywikibot.echo import Notification
@@ -18,8 +16,8 @@
NoPageError,
SiteDefinitionError,
)
-from pywikibot.site._decorators import need_extension, need_right
-from pywikibot.tools import deprecated, merge_unique_dicts
+from pywikibot.site._decorators import need_extension
+from pywikibot.tools import merge_unique_dicts
class EchoMixin:
@@ -351,410 +349,6 @@
return data
-class ThanksFlowMixin:
-
- """APISite mixin for Thanks and Structured Discussions extension.
-
- .. deprecated:: 9.4.0
- Structured Discussions extension formerly known as Flow
- extension is not maintained and will be removed. Users are
- encouraged to stop using it. (:phab:`T371180`)
- .. seealso:: :mod:`flow`
- """
-
- @need_extension('Flow')
- @need_extension('Thanks')
- @deprecated(since='9.4.0')
- def thank_post(self, post):
- """Corresponding method to the 'action=flowthank' API action.
-
- :param post: The post to be thanked for.
- :type post: Post
- :raise APIError: On thanking oneself or other API errors.
- :return: The API response.
- """
- post_id = post.uuid
- token = self.tokens['csrf']
- req = self.simple_request(action='flowthank', postid=post_id,
- token=token)
- data = req.submit()
- if data['result']['success'] != 1:
- raise APIError('Thanking unsuccessful', '')
- return data
-
-
-class FlowMixin:
-
- """APISite mixin for Structured Discussions extension.
-
- .. deprecated:: 9.4.0
- Structured Discussions extension formerly known as Flow
- extension is not maintained and will be removed. Users are
- encouraged to stop using it. (:phab:`T371180`)
- .. seealso:: :mod:`flow`
- """
-
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def load_board(self, page):
- """Retrieve the data for a Flow board.
-
- :param page: A Flow board
- :type page: Board
- :return: A dict representing the board's metadata.
- :rtype: dict
- """
- req = self.simple_request(action='flow', page=page,
- submodule='view-topiclist', vtllimit=1)
- data = req.submit()
- return data['flow']['view-topiclist']['result']['topiclist']
-
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def load_topiclist(self,
- page: pywikibot.flow.Board,
- *,
- content_format: str = 'wikitext',
- limit: int = 100,
- sortby: str = 'newest',
- toconly: bool = False,
- offset: pywikibot.Timestamp | str | None = None,
- offset_id: str | None = None,
- reverse: bool = False,
- include_offset: bool = False) -> dict[str, Any]:
- """Retrieve the topiclist of a Flow board.
-
- .. versionchanged:: 8.0
- All parameters except *page* are keyword only parameters.
-
- :param page: A Flow board
- :param content_format: The content format to request the data in.
- must be either 'wikitext', 'html', or 'fixed-html'
- :param limit: The number of topics to fetch in each single request.
- :param sortby: Algorithm to sort topics by ('newest' or 'updated').
- :param toconly: Whether to only include information for the TOC.
- :param offset: The timestamp to start at (when sortby is 'updated').
- :param offset_id: The topic UUID to start at (when sortby is 'newest').
- :param reverse: Whether to reverse the topic ordering.
- :param include_offset: Whether to include the offset topic.
- :return: A dict representing the board's topiclist.
- """
- if offset:
- offset = pywikibot.Timestamp.fromtimestampformat(offset)
- offset_dir = 'rev' if reverse else 'fwd'
-
- params = {'action': 'flow', 'submodule': 'view-topiclist',
- 'page': page,
- 'vtlformat': content_format, 'vtlsortby': sortby,
- 'vtllimit': limit, 'vtloffset-dir': offset_dir,
- 'vtloffset': offset, 'vtloffset-id': offset_id,
- 'vtlinclude-offset': include_offset, 'vtltoconly': toconly}
- req = self._request(parameters=params)
- data = req.submit()
- return data['flow']['view-topiclist']['result']['topiclist']
-
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def load_topic(self, page, content_format: str):
- """Retrieve the data for a Flow topic.
-
- :param page: A Flow topic
- :type page: Topic
- :param content_format: The content format to request the data in.
- Must ne either 'wikitext', 'html', or 'fixed-html'
- :return: A dict representing the topic's data.
- :rtype: dict
- """
- req = self.simple_request(action='flow', page=page,
- submodule='view-topic',
- vtformat=content_format)
- data = req.submit()
- return data['flow']['view-topic']['result']['topic']
-
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def load_post_current_revision(self, page, post_id, content_format: str):
- """Retrieve the data for a post to a Flow topic.
-
- :param page: A Flow topic
- :type page: Topic
- :param post_id: The UUID of the Post
- :type post_id: str
- :param content_format: The content format used for the returned
- content; must be either 'wikitext', 'html', or 'fixed-html'
- :return: A dict representing the post data for the given UUID.
- :rtype: dict
- """
- req = self.simple_request(action='flow', page=page,
- submodule='view-post', vppostId=post_id,
- vpformat=content_format)
- data = req.submit()
- return data['flow']['view-post']['result']['topic']
-
- @need_right('edit')
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def create_new_topic(self, page, title, content, content_format):
- """Create a new topic on a Flow board.
-
- :param page: A Flow board
- :type page: Board
- :param title: The title of the new topic (must be in plaintext)
- :type title: str
- :param content: The content of the topic's initial post
- :type content: str
- :param content_format: The content format of the supplied content
- :type content_format: str (either 'wikitext' or 'html')
- :return: The metadata of the new topic
- :rtype: dict
- """
- token = self.tokens['csrf']
- params = {'action': 'flow', 'page': page, 'token': token,
- 'submodule': 'new-topic', 'ntformat': content_format,
- 'nttopic': title, 'ntcontent': content}
- req = self._request(parameters=params, use_get=False)
- data = req.submit()
- return data['flow']['new-topic']['committed']['topiclist']
-
- @need_right('edit')
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def reply_to_post(self, page, reply_to_uuid: str, content: str,
- content_format: str) -> dict:
- """Reply to a post on a Flow topic.
-
- :param page: A Flow topic
- :type page: Topic
- :param reply_to_uuid: The UUID of the Post to create a reply to
- :param content: The content of the reply
- :param content_format: The content format used for the supplied
- content; must be either 'wikitext' or 'html'
- :return: Metadata returned by the API
- """
- token = self.tokens['csrf']
- params = {'action': 'flow', 'page': page, 'token': token,
- 'submodule': 'reply', 'repreplyTo': reply_to_uuid,
- 'repcontent': content, 'repformat': content_format}
- req = self._request(parameters=params, use_get=False)
- data = req.submit()
- return data['flow']['reply']['committed']['topic']
-
- @need_right('flow-lock')
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def lock_topic(self, page, lock, reason):
- """Lock or unlock a Flow topic.
-
- :param page: A Flow topic
- :type page: Topic
- :param lock: Whether to lock or unlock the topic
- :type lock: bool (True corresponds to locking the topic.)
- :param reason: The reason to lock or unlock the topic
- :type reason: str
- :return: Metadata returned by the API
- :rtype: dict
- """
- status = 'lock' if lock else 'unlock'
- token = self.tokens['csrf']
- params = {'action': 'flow', 'page': page, 'token': token,
- 'submodule': 'lock-topic', 'cotreason': reason,
- 'cotmoderationState': status}
- req = self._request(parameters=params, use_get=False)
- data = req.submit()
- return data['flow']['lock-topic']['committed']['topic']
-
- @need_right('edit')
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def moderate_topic(self, page, state, reason):
- """Moderate a Flow topic.
-
- :param page: A Flow topic
- :type page: Topic
- :param state: The new moderation state
- :type state: str
- :param reason: The reason to moderate the topic
- :type reason: str
- :return: Metadata returned by the API
- :rtype: dict
- """
- token = self.tokens['csrf']
- params = {'action': 'flow', 'page': page, 'token': token,
- 'submodule': 'moderate-topic', 'mtreason': reason,
- 'mtmoderationState': state}
- req = self._request(parameters=params, use_get=False)
- data = req.submit()
- return data['flow']['moderate-topic']['committed']['topic']
-
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def summarize_topic(self, page, summary):
- """Add summary to Flow topic.
-
- :param page: A Flow topic
- :type page: Topic
- :param summary: The text of the summary
- :type symmary: str
- :return: Metadata returned by the API
- :rtype: dict
- """
- token = self.tokens['csrf']
- params = {'action': 'flow', 'page': page, 'token': token,
- 'submodule': 'edit-topic-summary', 'etssummary': summary,
- 'etsformat': 'wikitext'}
- if 'summary' in page.root._current_revision:
- params['etsprev_revision'] = page.root._current_revision[
- 'summary']['revision']['revisionId']
- req = self._request(parameters=params, use_get=False)
- data = req.submit()
- return data['flow']['edit-topic-summary']['committed']['topicsummary']
-
- @need_right('flow-delete')
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def delete_topic(self, page, reason):
- """Delete a Flow topic.
-
- :param page: A Flow topic
- :type page: Topic
- :param reason: The reason to delete the topic
- :type reason: str
- :return: Metadata returned by the API
- :rtype: dict
- """
- return self.moderate_topic(page, 'delete', reason)
-
- @need_right('flow-hide')
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def hide_topic(self, page, reason):
- """Hide a Flow topic.
-
- :param page: A Flow topic
- :type page: Topic
- :param reason: The reason to hide the topic
- :type reason: str
- :return: Metadata returned by the API
- :rtype: dict
- """
- return self.moderate_topic(page, 'hide', reason)
-
- @need_right('flow-suppress')
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def suppress_topic(self, page, reason):
- """Suppress a Flow topic.
-
- :param page: A Flow topic
- :type page: Topic
- :param reason: The reason to suppress the topic
- :type reason: str
- :return: Metadata returned by the API
- :rtype: dict
- """
- return self.moderate_topic(page, 'suppress', reason)
-
- @need_right('edit')
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def restore_topic(self, page, reason):
- """Restore a Flow topic.
-
- :param page: A Flow topic
- :type page: Topic
- :param reason: The reason to restore the topic
- :type reason: str
- :return: Metadata returned by the API
- :rtype: dict
- """
- return self.moderate_topic(page, 'restore', reason)
-
- @need_right('edit')
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def moderate_post(self, post, state, reason):
- """Moderate a Flow post.
-
- :param post: A Flow post
- :type post: Post
- :param state: The new moderation state
- :type state: str
- :param reason: The reason to moderate the topic
- :type reason: str
- :return: Metadata returned by the API
- :rtype: dict
- """
- page = post.page
- uuid = post.uuid
- token = self.tokens['csrf']
- params = {'action': 'flow', 'page': page, 'token': token,
- 'submodule': 'moderate-post', 'mpreason': reason,
- 'mpmoderationState': state, 'mppostId': uuid}
- req = self._request(parameters=params, use_get=False)
- data = req.submit()
- return data['flow']['moderate-post']['committed']['topic']
-
- @need_right('flow-delete')
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def delete_post(self, post, reason):
- """Delete a Flow post.
-
- :param post: A Flow post
- :type post: Post
- :param reason: The reason to delete the post
- :type reason: str
- :return: Metadata returned by the API
- :rtype: dict
- """
- return self.moderate_post(post, 'delete', reason)
-
- @need_right('flow-hide')
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def hide_post(self, post, reason):
- """Hide a Flow post.
-
- :param post: A Flow post
- :type post: Post
- :param reason: The reason to hide the post
- :type reason: str
- :return: Metadata returned by the API
- :rtype: dict
- """
- return self.moderate_post(post, 'hide', reason)
-
- @need_right('flow-suppress')
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def suppress_post(self, post, reason):
- """Suppress a Flow post.
-
- :param post: A Flow post
- :type post: Post
- :param reason: The reason to suppress the post
- :type reason: str
- :return: Metadata returned by the API
- :rtype: dict
- """
- return self.moderate_post(post, 'suppress', reason)
-
- @need_right('edit')
- @need_extension('Flow')
- @deprecated(since='9.4.0')
- def restore_post(self, post, reason):
- """Restore a Flow post.
-
- :param post: A Flow post
- :type post: Post
- :param reason: The reason to restore the post
- :type reason: str
- :return: Metadata returned by the API
- :rtype: dict
- """
- return self.moderate_post(post, 'restore', reason)
-
-
class UrlShortenerMixin:
"""APISite mixin for UrlShortener extension."""
diff --git a/tests/api_tests.py b/tests/api_tests.py
index 8c21f4d..0f3a7ba 100755
--- a/tests/api_tests.py
+++ b/tests/api_tests.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
"""API test module."""
#
-# (C) Pywikibot team, 2007-2024
+# (C) Pywikibot team, 2007-2025
#
# Distributed under the terms of the MIT license.
#
@@ -270,32 +270,6 @@
self.assertEqual(value, '')
-class TestOtherSubmodule(TestCase):
-
- """Test handling multiple different modules having submodules."""
-
- family = 'mediawiki'
- code = 'mediawiki'
-
- def test_other_submodule(self):
- """Test another module apart from query having submodules."""
- pi = api.ParamInfo(self.site)
- self.assertFalse(pi._modules)
- pi.fetch(['query'])
- self.assertNotIn('flow', pi._modules)
- pi.fetch(['flow'])
- self.assertIn('flow', pi._modules)
- other_modules = set()
- for modules in pi._modules.values():
- self.assertIsInstance(modules, set)
- other_modules |= modules
-
- other_modules -= pi.action_modules
- other_modules -= pi.query_modules
- self.assertLessEqual(other_modules & pi.submodules('flow'),
- pi.submodules('flow'))
-
-
class TestParaminfoModules(DefaultSiteTestCase):
"""Test loading all paraminfo modules."""
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1121736?usp=email
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings?usp=email
Gerrit-MessageType: merged
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I2443b48e1cf3ff77813d9ad8ddfedb1b01d946d0
Gerrit-Change-Number: 1121736
Gerrit-PatchSet: 2
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-CC: JJMC89 <JJMC89.Wikimedia(a)gmail.com>
Gerrit-CC: Multichill <maarten(a)mdammers.nl>
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1121419?usp=email )
Change subject: [bugfix] Enable url without api, requests or script path in Site constuctor
......................................................................
[bugfix] Enable url without api, requests or script path in Site constuctor
Enable url with empty path of urllib.parse.ParseResult in Site
constructor and Family.from_url method. This means that
//commons.wikimedia.org is a valid url for them. The previous
implementation requires a valid path e.g. //meta.wikimedia.org/w/api.php
with apipath.
Therefore if path of the URL scheme is empty, do not check for any
site._interwiki_urls() which holds all possible paths.
Some tests added.
Bug: T386665
Change-Id: Icd538fe05005e02d1061e30b8e160018218fe986
---
M pywikibot/__init__.py
M pywikibot/family.py
M tests/basesite_tests.py
M tests/family_tests.py
4 files changed, 48 insertions(+), 21 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/__init__.py b/pywikibot/__init__.py
index 2f6c4c0..9449596 100644
--- a/pywikibot/__init__.py
+++ b/pywikibot/__init__.py
@@ -171,6 +171,9 @@
.. versionchanged:: 7.3
Short creation if site code is equal to family name like
`Site('commons')`, `Site('meta')` or `Site('wikidata')`.
+ .. versionchanged:: 10.0
+ *url* does not have to contain an api, requests or script path
+ any longer.
:param code: language code (override config.mylang)
code may also be a sitename like 'wikipedia:test'
diff --git a/pywikibot/family.py b/pywikibot/family.py
index 70d1934..77e8da6 100644
--- a/pywikibot/family.py
+++ b/pywikibot/family.py
@@ -1,6 +1,6 @@
"""Objects representing MediaWiki families."""
#
-# (C) Pywikibot team, 2004-2024
+# (C) Pywikibot team, 2004-2025
#
# Distributed under the terms of the MIT license.
#
@@ -579,23 +579,27 @@
def from_url(self, url: str) -> str | None:
"""Return whether this family matches the given url.
- It is first checking if a domain of this family is in the domain of
- the URL. If that is the case it's checking all codes and verifies that
- a path generated via
- :py:obj:`APISite.articlepath<pywikibot.site.APISite.articlepath>` and
- :py:obj:`Family.path` matches the path of the URL together with
- the hostname for that code.
+ It is first checking if a domain of this family is in the domain
+ of the URL. If that is the case it's checking all codes and
+ verifies that a path generated via :attr:`APISite.articlepath
+ <pywikibot.site.APISite.articlepath>` and :attr:`Family.path`
+ matches the path of the URL together with the hostname for that
+ code.
- It is using :py:obj:`Family.domains` to first check if a domain
- applies and then iterates over :py:obj:`Family.codes` to actually
+ It is using :attr:`Family.domains` to first check if a domain
+ applies and then iterates over :attr:`Family.codes` to actually
determine which code applies.
- :param url: the URL which may contain a ``$1``. If it's missing it is
- assumed to be at the end.
- :return: The language code of the url. None if that url is not from
- this family.
- :raises RuntimeError: When there are multiple languages in this family
- which would work with the given URL.
+ .. versionchanged:: 10.0
+ *url* parameter does not have to contain a api/query/script
+ path
+
+ :param url: the URL which may contain a ``$1``. If it's missing
+ it is assumed to be at the end.
+ :return: The language code of the URL. None if that URL is not
+ from his family.
+ :raises RuntimeError: When there are multiple languages in this
+ family which would work with the given URL.
"""
parsed = urlparse.urlparse(url)
if parsed.scheme not in {'http', 'https', ''}:
@@ -625,6 +629,9 @@
site = pywikibot.Site(code, self.name)
pywikibot.log(f'Found candidate {site}')
+ if not path:
+ return site.code
+
for iw_url in site._interwiki_urls():
iw_url, *_ = iw_url.partition('{}')
if path.startswith(iw_url):
diff --git a/tests/basesite_tests.py b/tests/basesite_tests.py
index 1c7b497..eae3719 100755
--- a/tests/basesite_tests.py
+++ b/tests/basesite_tests.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
"""Tests for the site module."""
#
-# (C) Pywikibot team, 2008-2024
+# (C) Pywikibot team, 2008-2025
#
# Distributed under the terms of the MIT license.
#
@@ -129,6 +129,20 @@
self.assertTrue(mysite.sametitle('MediaWiki:Always',
'MediaWiki:always'))
+ def test_site_with_url(self):
+ """Test site constructor with url."""
+ for fam in ('commons', 'meta'):
+ with self.subTest(family=fam):
+ site = pywikibot.Site(url=f'https://{fam}.wikimedia.org')
+ self.assertEqual(site, pywikibot.Site(fam))
+ self.assertEqual(site.family.name, fam)
+ self.assertEqual(site.code, fam)
+
+ site = pywikibot.Site(url='https://fr.wikipedia.org')
+ self.assertEqual(site, pywikibot.Site('wikipedia:fr'))
+ self.assertEqual(site.family.name, 'wikipedia')
+ self.assertEqual(site.code, 'fr')
+
if __name__ == '__main__':
with suppress(SystemExit):
diff --git a/tests/family_tests.py b/tests/family_tests.py
index 68e52ba..3bfa9ac 100755
--- a/tests/family_tests.py
+++ b/tests/family_tests.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
"""Tests for the family module."""
#
-# (C) Pywikibot team, 2014-2024
+# (C) Pywikibot team, 2014-2025
#
# Distributed under the terms of the MIT license.
#
@@ -168,8 +168,8 @@
super().setUp()
self.articlepath = '/wiki/$1'
- def test_from_url_wikipedia_extra(self):
- """Test various URLs against wikipedia regex."""
+ def test_from_url(self):
+ """Test various URLs for Family.from_url."""
self.current_code = 'vo'
self.current_family = 'wikipedia'
@@ -181,6 +181,8 @@
self.assertEqual(f.from_url(prefix + '/w/index.php'), 'vo')
self.assertEqual(f.from_url(prefix + '/w/index.php/'), 'vo')
self.assertEqual(f.from_url(prefix + '/w/index.php?title=$1'), 'vo')
+ # url without scripts/api path
+ self.assertEqual(f.from_url(prefix), 'vo')
self.assertEqual(f.from_url(prefix + '/wiki/$1'), 'vo')
self.assertEqual(f.from_url('//vo.wikipedia.org/wiki/$1'), 'vo')
@@ -218,10 +220,11 @@
family = Family.load(family)
for code in family.codes:
self.current_code = code
- url = (f'{family.protocol(code)}://{family.hostname(code)}'
- f'{family.path(code)}/$1')
+ url = f'{family.protocol(code)}://{family.hostname(code)}'
+ url_with_path = url + f'{family.path(code)}/$1'
with self.subTest(url=url):
self.assertEqual(family.from_url(url), code)
+ self.assertEqual(family.from_url(url_with_path), code)
if __name__ == '__main__':
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1121419?usp=email
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings?usp=email
Gerrit-MessageType: merged
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Icd538fe05005e02d1061e30b8e160018218fe986
Gerrit-Change-Number: 1121419
Gerrit-PatchSet: 3
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-CC: Tacsipacsi <tacsipacsi(a)jnet.hu>
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1122287?usp=email )
Change subject: [bugfix] ensure that -from and -to arguments are not equal
......................................................................
[bugfix] ensure that -from and -to arguments are not equal
verify that -from and -to arguments are not equal for move action.
otherwise ask for new values. If the given answer for -from is empty,
leave the script.
Bug: T384753
Change-Id: Ib23dba9d8e69571b272ce460312c7ca79baa3922
---
M scripts/category.py
1 file changed, 19 insertions(+), 7 deletions(-)
Approvals:
Matěj Suchánek: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/category.py b/scripts/category.py
index 15200be..9b758c6 100755
--- a/scripts/category.py
+++ b/scripts/category.py
@@ -153,7 +153,7 @@
:mod:`pagegenerators` are supported with "move" and "remove" action.
"""
#
-# (C) Pywikibot team, 2004-2024
+# (C) Pywikibot team, 2004-2025
#
# Distributed under the terms of the MIT license.
#
@@ -1638,12 +1638,24 @@
deletion_comment=use_deletion_summary,
generator=gen)
elif action == 'move':
- if 'from' not in options:
- options['from'] = pywikibot.input(
- 'Please enter the old name of the category:')
- if 'to' not in options:
- options['to'] = pywikibot.input(
- 'Please enter the new name of the category:')
+ while True:
+ if 'from' not in options:
+ options['from'] = pywikibot.input(
+ 'Please enter the old name of the category:')
+ if not options['from']:
+ return
+
+ if 'to' not in options:
+ options['to'] = pywikibot.input(
+ 'Please enter the new name of the category:')
+
+ if options['from'] != options['to']:
+ break
+
+ pywikibot.error('-from and -to arguments are equal, please retry.')
+ del options['from']
+ del options['to']
+
if use_deletion_summary:
deletion_comment = \
CategoryMoveRobot.DELETION_COMMENT_SAME_AS_EDIT_COMMENT
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1122287?usp=email
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings?usp=email
Gerrit-MessageType: merged
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ib23dba9d8e69571b272ce460312c7ca79baa3922
Gerrit-Change-Number: 1122287
Gerrit-PatchSet: 3
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Matěj Suchánek <matejsuchanek97(a)gmail.com>
Gerrit-Reviewer: Vladis13 <wikimail2013(a)gmail.com>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/i18n/+/1122117?usp=email )
Change subject: Localisation updates from https://translatewiki.net.
......................................................................
Localisation updates from https://translatewiki.net.
Change-Id: I42a7689146123f553a86ee8664311e3ca9992768
---
M checkimages/diq.json
1 file changed, 1 insertion(+), 0 deletions(-)
Approvals:
L10n-bot: Looks good to me, approved
jenkins-bot: Verified
diff --git a/checkimages/diq.json b/checkimages/diq.json
index 1271665..621a912 100644
--- a/checkimages/diq.json
+++ b/checkimages/diq.json
@@ -10,6 +10,7 @@
"checkimages-doubles-file-comment": "Bot: Dosya commons ra belkiya esteriyaya",
"checkimages-doubles-head": "Dosyaya tekrarkerdiye",
"checkimages-doubles-talk-comment": "Bot: Na dosya xora commons de mewcuda",
+ "checkimages-doubles-talk-text": "Serva bar kerdış %(upload)s teşekur keme. Lakin , no dosya kopyay :%(image)s ya. Boti en dosya ciya gureynayişi de nisan kerda. Ek sima sinasiya dosya xırab izah kerên se do dosya ameyaye zeman de serva xirab gureynayisi ra tepoya be terefê boti ra do bıesteriyo. No peyami otomatik ne be terefê boti ra risiyao %(bot)s.",
"checkimages-forced-mode": "('''modo zaruri''')",
"checkimages-has-duplicates": "nê duplicates%(force)s cêrêni estê:",
"checkimages-log-comment": "Boti: Qeydan rocane kerê",
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/i18n/+/1122117?usp=email
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings?usp=email
Gerrit-MessageType: merged
Gerrit-Project: pywikibot/i18n
Gerrit-Branch: master
Gerrit-Change-Id: I42a7689146123f553a86ee8664311e3ca9992768
Gerrit-Change-Number: 1122117
Gerrit-PatchSet: 1
Gerrit-Owner: L10n-bot <l10n-bot(a)translatewiki.net>
Gerrit-Reviewer: L10n-bot <l10n-bot(a)translatewiki.net>
Gerrit-Reviewer: jenkins-bot
Xqt has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/983213?usp=email )
Change subject: sse: use requests_sse instead of unsupported sseclient for EventStreams
......................................................................
sse: use requests_sse instead of unsupported sseclient for EventStreams
Bug: T309380
Change-Id: Ibc6675eed57d939b2bfb884f55ecdd735c7e624b
---
M .github/workflows/doctest.yml
M pywikibot/comms/eventstreams.py
M requirements.txt
M setup.py
M tests/eventstreams_tests.py
M tests/pagegenerators_tests.py
6 files changed, 105 insertions(+), 78 deletions(-)
Approvals:
Xqt: Verified; Looks good to me, approved
diff --git a/.github/workflows/doctest.yml b/.github/workflows/doctest.yml
index 4898c46..ff11951 100644
--- a/.github/workflows/doctest.yml
+++ b/.github/workflows/doctest.yml
@@ -58,7 +58,7 @@
pip install packaging
pip install "PyMySQL >= 1.0.0"
pip install pytest
- pip install "sseclient<0.0.23,>=0.0.18"
+ pip install requests-sse
pip install wikitextparser
- name: Generate user files
diff --git a/pywikibot/comms/eventstreams.py b/pywikibot/comms/eventstreams.py
index 8dd2daa..2e91566 100644
--- a/pywikibot/comms/eventstreams.py
+++ b/pywikibot/comms/eventstreams.py
@@ -2,11 +2,13 @@
This file is part of the Pywikibot framework.
-This module requires sseclient to be installed::
+This module requires requests-sse to be installed::
- pip install "sseclient<0.0.23,>=0.0.18"
+ pip install "requests-sse>=0.5.0"
.. versionadded:: 3.0
+.. versionchanged:: 10.0
+ ``requests-sse`` package is required instead of ``sseclient``.
"""
#
# (C) Pywikibot team, 2017-2025
@@ -16,23 +18,32 @@
from __future__ import annotations
import json
+from datetime import timedelta
from functools import partial
+from typing import Any
from requests.packages.urllib3.exceptions import ProtocolError
from requests.packages.urllib3.util.response import httplib
from pywikibot import Site, Timestamp, config, debug, warning
from pywikibot.backports import NoneType
-from pywikibot.tools import cached
+from pywikibot.tools import cached, deprecated_args
from pywikibot.tools.collections import GeneratorWrapper
try:
- from sseclient import SSEClient as EventSource
-except ImportError as e:
+ from requests_sse import EventSource
+except ModuleNotFoundError as e:
EventSource = e
+INSTALL_MSG = """requests-sse is required for EventStreams;
+install it with
+
+ pip install "requests-sse>=0.5.0"
+"""
+
+
class EventStreams(GeneratorWrapper):
"""Generator class for Server-Sent Events (SSE) protocol.
@@ -99,47 +110,76 @@
>>> del stream
.. versionchanged:: 7.6
- subclassed from :class:`tools.collections.GeneratorWrapper`
+ subclassed from :class:`tools.collections.GeneratorWrapper`.
+ .. versionchanged:: 10.0
+ *retry* value is doubled for each consecutive connect try.
"""
+ @deprecated_args(last_id='last_event_id') # since 10.0.0
def __init__(self, **kwargs) -> None:
"""Initializer.
:keyword bool canary: if True, include canary events, see
- https://w.wiki/7$2z for more info
- :keyword APISite site: a project site object. Used if no url is
- given
- :keyword pywikibot.Timestamp or str since: a timestamp for older
+ https://w.wiki/7$2z for more info.
+ :keyword APISite site: a project site object. Used if no *url*
+ is given.
+ :keyword int retry: Number of milliseconds to wait after disconnects
+ before attempting to reconnect. The server may change this
+ by including a 'retry' line in a message. Retries are handled
+ automatically.
+
+ .. versionchanged:: 10.0
+ 5 seconds are used instead of 3 seconds as default.
+
+ :keyword pywikibot.Timestamp | str since: a timestamp for older
events; there will likely be between 7 and 31 days of
history available but is not guaranteed. It may be given as
a pywikibot.Timestamp, an ISO 8601 string or a mediawiki
timestamp string.
- :keyword Iterable[str] or str streams: event stream types.
+ :keyword Iterable[str] | str streams: event stream types.
Mandatory when no url is given. Multiple streams may be
given as a string with comma separated stream types or an
iterable of strings
- :keyword int or float or tuple[int or float, int or float] timeout:
+ :keyword int | float | tuple[int | float, int | float] timeout:
a timeout value indication how long to wait to send data
before giving up
:keyword str url: an url retrieving events from. Will be set up
to a default url using _site.family settings, stream types
and timestamp
- :param kwargs: keyword arguments passed to `SSEClient` and
- `requests` library
- :raises ImportError: sseclient is not installed
+
+ :keyword Any last_event_id: [*requests-sse*] If provided, this
+ parameter will be sent to the server to tell it to return
+ only messages more recent than this ID.
+ :keyword requests.Session session: [*requests-sse*] specifies a
+ requests.Session, if not, create a default requests.Session.
+ :keyword Callable[[], None] on_open: [*requests-sse*] event
+ handler for open event
+ :keyword Callable[[requests_sse.MessageEvent], None] on_message:
+ [*requests-sse*] event handler for message event
+ :keyword Callable[[], None] on_error: [*requests-sse*] event
+ handler for error event
+ :keyword int chunk_size: [*requests*] A maximum size of the chunk
+ for chunk-encoded requests.
+
+ .. versionchanged:: 10.0
+ None is used instead of 1024 as default value.
+
+ :param kwargs: Other keyword arguments passed to `requests_sse`
+ and `requests` library
+ :raises ModuleNotFoundError: requests-sse is not installed
:raises NotImplementedError: no stream types specified
.. seealso:: https://stream.wikimedia.org/?doc#streams for
available Wikimedia stream types to be passed with `streams`
parameter.
+ .. note:: *retry* keyword argument is used instead of the
+ underlying *reconnection_time* argument which is ignored.
"""
- if isinstance(EventSource, Exception):
- raise ImportError(
- 'sseclient is required for EventStreams;\n'
- 'install it with "pip install sseclient==0.0.22"\n'
- )
+ if isinstance(EventSource, ModuleNotFoundError):
+ raise ImportError(INSTALL_MSG) from EventSource
+
self.filter = {'all': [], 'any': [], 'none': []}
- self._total = None
+ self._total: int | None = None
self._canary = kwargs.pop('canary', False)
try:
@@ -161,6 +201,11 @@
self._url = kwargs.get('url') or self.url
kwargs.setdefault('url', self._url)
+
+ retry = kwargs.pop('retry', None)
+ if retry:
+ kwargs['reconnection_time'] = timedelta(milliseconds=retry)
+
kwargs.setdefault('timeout', config.socket_timeout)
self.sse_kwargs = kwargs
@@ -176,12 +221,12 @@
kwargs['since'] = self._since
if kwargs['timeout'] == config.socket_timeout:
kwargs.pop('timeout')
- return '{}({})'.format(self.__class__.__name__, ', '.join(
+ return '{}({})'.format(type(self).__name__, ', '.join(
f'{k}={v!r}' for k, v in kwargs.items()))
@property
@cached
- def url(self):
+ def url(self) -> str:
"""Get the EventStream's url.
:raises NotImplementedError: no stream types specified
@@ -195,7 +240,7 @@
streams=self._streams,
since=f'?since={self._since}' if self._since else '')
- def set_maximum_items(self, value: int) -> None:
+ def set_maximum_items(self, value: int | None) -> None:
"""Set the maximum number of items to be retrieved from the stream.
If not called, most queries will continue as long as there is
@@ -209,7 +254,7 @@
debug(f'{type(self).__name__}: Set limit (maximum_items) to '
f'{self._total}.')
- def register_filter(self, *args, **kwargs):
+ def register_filter(self, *args, **kwargs) -> None:
"""Register a filter.
Filter types:
@@ -252,6 +297,7 @@
register_filter(ftype='none', bot=True) # 3
Explanation for the result of the filter function:
+
1. ``return data['sever_name'] == 'de.wikipedia.org'``
2. ``return data['type'] in ('edit', 'log')``
3. ``return data['bot'] is True``
@@ -297,7 +343,7 @@
else:
self.filter[ftype].append(partial(_in, key=key, value=value))
- def streamfilter(self, data: dict):
+ def streamfilter(self, data: dict[str, Any]) -> bool:
"""Filter function for eventstreams.
See the description of register_filter() how it works.
@@ -309,10 +355,13 @@
if any(function(data) for function in self.filter['none']):
return False
+
if not all(function(data) for function in self.filter['all']):
return False
+
if not self.filter['any']:
return True
+
return any(function(data) for function in self.filter['any'])
@property
@@ -327,24 +376,20 @@
while self._total is None or n < self._total:
if not hasattr(self, 'source'):
self.source = EventSource(**self.sse_kwargs)
- # sseclient >= 0.0.18 is required for eventstreams (T184713)
- # we don't have a version string inside but the instance
- # variable 'chunk_size' was newly introduced with 0.0.18
- if not hasattr(self.source, 'chunk_size'):
- warning(
- 'You may not have the right sseclient version;\n'
- 'sseclient >= 0.0.18 is required for eventstreams.\n'
- "Install it with 'pip install \"sseclient>=0.0.18\"'")
+ self.source.connect(config.max_retries)
+
try:
event = next(self.source)
except (ProtocolError, OSError, httplib.IncompleteRead) as e:
warning(
f'Connection error: {e}.\nTry to re-establish connection.')
+ self.source.close()
del self.source
if event is not None:
- self.sse_kwargs['last_id'] = event.id
+ self.sse_kwargs['last_event_id'] = event.last_event_id
continue
- if event.event == 'message':
+
+ if event.type == 'message':
if event.data:
try:
element = json.loads(event.data)
@@ -355,13 +400,15 @@
n += 1
yield element
# else: ignore empty message
- elif event.event == 'error':
+ elif event.type == 'error':
warning(f'Encountered error: {event.data}')
else:
- warning(f'Unknown event {event.event} occurred.')
+ warning(f'Unknown event {event.type} occurred.')
debug(f'{type(self).__name__}: Stopped iterating due to exceeding item'
' limit.')
+
+ self.source.close()
del self.source
@@ -373,11 +420,10 @@
:param total: the maximum number of changes to return
:return: pywikibot.comms.eventstream.rc_listener configured for given site
- :raises ImportError: sseclient installation is required
+ :raises ModuleNotFoundError: requests-sse installation is required
"""
- if isinstance(EventSource, Exception):
- raise ImportError('sseclient is required for EventStreams;\n'
- 'install it with "pip install sseclient"\n')
+ if isinstance(EventSource, ModuleNotFoundError):
+ raise ModuleNotFoundError(INSTALL_MSG) from EventSource
stream = EventStreams(streams='recentchange', site=site)
stream.set_maximum_items(total)
diff --git a/requirements.txt b/requirements.txt
index 7a771d3..30a0c0b 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -47,7 +47,7 @@
# core pagegenerators
google >= 1.7
-sseclient >= 0.0.18,< 0.0.23
+requests-sse >= 0.5.0
# The mysql generator in pagegenerators depends on PyMySQL
PyMySQL >= 1.0.0
diff --git a/setup.py b/setup.py
index 942bc94..31f1ec5 100755
--- a/setup.py
+++ b/setup.py
@@ -20,7 +20,7 @@
.. warning:: do not upload a development release to pypi.
"""
#
-# (C) Pywikibot team, 2009-2024
+# (C) Pywikibot team, 2009-2025
#
# Distributed under the terms of the MIT license.
#
@@ -37,7 +37,7 @@
# ------- setup extra_requires ------- #
extra_deps = {
# Core library dependencies
- 'eventstreams': ['sseclient<0.0.23,>=0.0.18'], # T222885
+ 'eventstreams': ['requests-sse>=0.5.0'],
'isbn': ['python-stdnum>=1.19'],
'Graphviz': ['pydot>=1.4.1'],
'Google': ['google>=1.7'],
diff --git a/tests/eventstreams_tests.py b/tests/eventstreams_tests.py
index ea8d7e3..c95e81d 100755
--- a/tests/eventstreams_tests.py
+++ b/tests/eventstreams_tests.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
"""Tests for the eventstreams module."""
#
-# (C) Pywikibot team, 2017-2024
+# (C) Pywikibot team, 2017-2025
#
# Distributed under the terms of the MIT license.
#
@@ -272,25 +272,18 @@
while self._total is None or n < self._total:
if not hasattr(self, 'source'):
self.source = EventSource(**self.sse_kwargs)
+ self.source.connect()
+
event = next(self.source)
- if event.event == 'message':
- if not event.data:
- continue
+ if event.type == 'message' and event.data:
n += 1
- try:
- element = json.loads(event.data)
- except ValueError as e: # pragma: no cover
- self.source.resp.close() # close SSLSocket
- del self.source
- raise ValueError(
- f'{e}\n\nEvent no {n}: '
- f'Could not load json data from source\n${event}$'
- ) from e
- yield element
+ yield json.loads(event.data)
+
+ self.source.close()
del self.source
-@require_modules('sseclient')
+@require_modules('requests_sse')
class TestEventSource(TestCase):
"""Test sseclient.EventSource."""
@@ -298,12 +291,7 @@
net = True
def test_stream(self):
- """Verify that the EventSource delivers events without problems.
-
- As found in sseclient 0.0.24 the EventSource gives randomly a
- ValueError 'Unterminated string' when json.load is processed
- if the limit is high enough.
- """
+ """Verify that the EventSource delivers events without problems."""
with skipping(NotImplementedError):
self.es = EventStreamsTestClass(streams='recentchange')
limit = 50
diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py
index d72c392..2fdcbbd 100755
--- a/tests/pagegenerators_tests.py
+++ b/tests/pagegenerators_tests.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
"""Test pagegenerators module."""
#
-# (C) Pywikibot team, 2009-2024
+# (C) Pywikibot team, 2009-2025
#
# Distributed under the terms of the MIT license.
from __future__ import annotations
@@ -27,7 +27,6 @@
PreloadingGenerator,
WikibaseItemFilterPageGenerator,
)
-from pywikibot.tools import has_module
from tests import join_data_path, unittest_print
from tests.aspects import (
DefaultSiteTestCase,
@@ -35,6 +34,7 @@
RecentChangesTestCase,
TestCase,
WikidataTestCase,
+ require_modules,
)
from tests.tools_tests import GeneratorIntersectTestCase
from tests.utils import skipping
@@ -1648,17 +1648,10 @@
"""Test case for Live Recent Changes pagegenerator."""
- @classmethod
- def setUpClass(cls):
- """Setup test class."""
- super().setUpClass()
- cls.client = 'sseclient'
- if not has_module(cls.client):
- raise unittest.SkipTest(f'{cls.client} is not available')
-
+ @require_modules('requests_sse')
def test_RC_pagegenerator_result(self):
"""Test RC pagegenerator."""
- lgr = logging.getLogger(self.client)
+ lgr = logging.getLogger('requests_sse.client')
lgr.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/983213?usp=email
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings?usp=email
Gerrit-MessageType: merged
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ibc6675eed57d939b2bfb884f55ecdd735c7e624b
Gerrit-Change-Number: 983213
Gerrit-PatchSet: 30
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot