jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/966120 )
Change subject: [cleanup] drop unused use_hard_category_redirects
......................................................................
[cleanup] drop unused use_hard_category_redirects
- depreate BaseSite.use_hard_category_redirects
- remove Family.use_hard_category_redirects
Bug: T348953
Change-Id: Ifcbc0d8086c50d5c887f273f65f817af052a8621
---
M pywikibot/family.py
M pywikibot/site/_basesite.py
2 files changed, 27 insertions(+), 6 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/family.py b/pywikibot/family.py
index 52c6d3a..93cd1df 100644
--- a/pywikibot/family.py
+++ b/pywikibot/family.py
@@ -118,9 +118,6 @@
'_default': []
}
- # A list of languages that use hard (not soft) category redirects
- use_hard_category_redirects = []
-
# A list of disambiguation template names in different languages
disambiguationTemplates: Dict[str, Sequence[str]] = {
'_default': []
diff --git a/pywikibot/site/_basesite.py b/pywikibot/site/_basesite.py
index ea47b94..8175ebe 100644
--- a/pywikibot/site/_basesite.py
+++ b/pywikibot/site/_basesite.py
@@ -25,6 +25,7 @@
ComparableMixin,
SelfCallString,
cached,
+ deprecated,
first_upper,
normalize_username,
)
@@ -90,14 +91,24 @@
self._username = normalize_username(user)
- self.use_hard_category_redirects = (
- self.code in self.family.use_hard_category_redirects)
-
# following are for use with lock_page and unlock_page methods
self._pagemutex = threading.Condition()
self._locked_pages = set()
@property
+ @deprecated(since='8.5.0')
+ def use_hard_category_redirects(self):
+ """Hard redirects are used for this site.
+
+ Originally create as property for future use for a proposal to
+ replace category redirect templates with hard redirects. This
+ was never implemented and is not used inside the framework.
+
+ .. deprecated:: 8.5
+ """
+ return False
+
+ @property
@cached
def throttle(self):
"""Return this Site's throttle. Initialize a new one if needed."""
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/966120
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ifcbc0d8086c50d5c887f273f65f817af052a8621
Gerrit-Change-Number: 966120
Gerrit-PatchSet: 2
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/959178 )
Change subject: [IMPR] No longer drop site decorators for optimized codes
......................................................................
[IMPR] No longer drop site decorators for optimized codes
site decorators are useless with -O optimized code since
https://gerrit.wikimedia.org/r/c/pywikibot/core/+/169788
but it might not be a good idea to ignore these decorators and
let users guess the cause for failing scripts.
Bug: T74424
Change-Id: I9ab0dd60f3ae9f47b7462d07733fa42482102140
---
M tests/aspects.py
M pywikibot/site/_decorators.py
M tests/site_decorators_tests.py
3 files changed, 29 insertions(+), 32 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/site/_decorators.py b/pywikibot/site/_decorators.py
index d8fc123..975573b 100644
--- a/pywikibot/site/_decorators.py
+++ b/pywikibot/site/_decorators.py
@@ -1,6 +1,6 @@
"""Decorators used by site models."""
#
-# (C) Pywikibot team, 2008-2022
+# (C) Pywikibot team, 2008-2023
#
# Distributed under the terms of the MIT license.
#
@@ -36,9 +36,6 @@
return fn(self, *args, **kwargs)
- if not __debug__:
- return fn
-
manage_wrapping(callee, fn)
return callee
@@ -60,9 +57,6 @@
.format(fn.__name__, extension))
return fn(self, *args, **kwargs)
- if not __debug__:
- return fn
-
manage_wrapping(callee, fn)
return callee
@@ -91,9 +85,6 @@
.format(self.user(), right))
return fn(self, *args, **kwargs)
- if not __debug__:
- return fn
-
manage_wrapping(callee, fn)
return callee
@@ -116,9 +107,6 @@
.format(fn.__name__, version))
return fn(self, *args, **kwargs)
- if not __debug__:
- return fn
-
manage_wrapping(callee, fn)
return callee
diff --git a/tests/aspects.py b/tests/aspects.py
index baa5414..8bb0ec3 100644
--- a/tests/aspects.py
+++ b/tests/aspects.py
@@ -1362,21 +1362,7 @@
cls.override_default_site(pywikibot.Site('en', 'wikipedia'))
-class DebugOnlyTestCase(TestCase):
-
- """Test cases that only operate in debug mode."""
-
- @classmethod
- def setUpClass(cls):
- """Set up test class."""
- if not __debug__:
- raise unittest.SkipTest(
- '{} is disabled when __debug__ is disabled.'
- .format(cls.__name__))
- super().setUpClass()
-
-
-class DeprecationTestCase(DebugOnlyTestCase, TestCase):
+class DeprecationTestCase(TestCase):
"""Test cases for deprecation function in the tools module."""
@@ -1533,6 +1519,14 @@
'expected warning filename {}; warning item: {}'
.format(filename, item))
+ @classmethod
+ def setUpClass(cls):
+ """Only operate in debug mode."""
+ if not __debug__:
+ raise unittest.SkipTest(
+ f'{cls.__name__} is disabled when __debug__ is disabled.')
+ super().setUpClass()
+
def setUp(self):
"""Set up unit test."""
super().setUp()
diff --git a/tests/site_decorators_tests.py b/tests/site_decorators_tests.py
index 3a4ce28..dfcfd3f 100755
--- a/tests/site_decorators_tests.py
+++ b/tests/site_decorators_tests.py
@@ -1,7 +1,7 @@
#!/usr/bin/env python3
"""Tests against a fake Site object."""
#
-# (C) Pywikibot team, 2012-2022
+# (C) Pywikibot team, 2012-2023
#
# Distributed under the terms of the MIT license.
#
@@ -10,10 +10,10 @@
from pywikibot.exceptions import UserRightsError
from pywikibot.site._decorators import must_be, need_right, need_version
from pywikibot.tools import deprecated
-from tests.aspects import DebugOnlyTestCase, DeprecationTestCase
+from tests.aspects import TestCase, DeprecationTestCase
-class TestMustBe(DebugOnlyTestCase):
+class TestMustBe(TestCase):
"""Test cases for the must_be decorator."""
@@ -125,7 +125,7 @@
self.call_this_user_req_function(args, kwargs)
-class TestNeedRight(DebugOnlyTestCase):
+class TestNeedRight(TestCase):
"""Test cases for the must_be decorator."""
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/959178
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I9ab0dd60f3ae9f47b7462d07733fa42482102140
Gerrit-Change-Number: 959178
Gerrit-PatchSet: 3
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/965243 )
Change subject: [IMPR] Improve handling of uninitialized MediaInfo
......................................................................
[IMPR] Improve handling of uninitialized MediaInfo
Make it possible to retrieve just the id for non-existing
mediainfo's of existing files, and use this as an internal
assertion. It will also throw an error for invalid
mediainfo references.
Drop MediaInfo.get_data_for_new_entity because it will
not be called anymore.
Change-Id: I7ad7396708e9fc7fcd98274c3668f07e25a1be30
---
M pywikibot/page/_wikibase.py
M tests/file_tests.py
2 files changed, 101 insertions(+), 38 deletions(-)
Approvals:
Zache-tool: Looks good to me, but someone else must approve
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/page/_wikibase.py b/pywikibot/page/_wikibase.py
index 753dc3f..bbfd71f 100644
--- a/pywikibot/page/_wikibase.py
+++ b/pywikibot/page/_wikibase.py
@@ -130,14 +130,17 @@
def __getattr__(self, name):
if name in self.DATA_ATTRIBUTES:
if self.getID() == '-1':
- for key, cls in self.DATA_ATTRIBUTES.items():
- setattr(self, key, cls.new_empty(self.repo))
+ self._initialize_empty()
return getattr(self, name)
return self.get()[name]
raise AttributeError("'{}' object has no attribute '{}'"
.format(self.__class__.__name__, name))
+ def _initialize_empty(self):
+ for key, cls in self.DATA_ATTRIBUTES.items():
+ setattr(self, key, cls.new_empty(self.repo))
+
def _defined_by(self, singular: bool = False) -> dict:
"""
Internal function to provide the API parameters to identify the entity.
@@ -355,7 +358,7 @@
"""
Return the full concept URI.
- :raise NoWikibaseEntityError: if this entity doesn't exist
+ :raise NoWikibaseEntityError: if this entity's id is not known
"""
entity_id = self.getID()
if entity_id == '-1':
@@ -370,6 +373,7 @@
.. versionadded:: 6.5
"""
+ entity_type = 'mediainfo'
title_pattern = r'M[1-9]\d*'
DATA_ATTRIBUTES = {
'labels': LanguageDict,
@@ -378,12 +382,39 @@
def __getattr__(self, name):
if name == 'claims': # T149410
- name = 'statements'
- if hasattr(self, name):
- return getattr(self, name)
+ return self.statements
+
+ if name in self.DATA_ATTRIBUTES:
+ if not self.exists():
+ self._assert_has_id()
+ self._initialize_empty()
+ return getattr(self, name)
return super().__getattr__(name)
+ def _assert_has_id(self):
+ if self.id != '-1':
+ return
+
+ if not self.file.exists():
+ exc = NoPageError(self.file)
+ raise NoWikibaseEntityError(self) from exc
+
+ self.id = 'M' + str(self.file.pageid)
+
+ def _defined_by(self, singular: bool = False) -> dict:
+ """
+ Internal function to provide the API parameters to identify the entity.
+
+ :param singular: Whether the parameter names should use the singular
+ form
+ :raise NoWikibaseEntityError: if this entity is associated with
+ a non-existing file
+ :return: API parameters
+ """
+ self._assert_has_id()
+ return super()._defined_by(singular)
+
@property
def file(self) -> FilePage:
"""Get the file associated with the mediainfo."""
@@ -397,7 +428,8 @@
pywikibot.error(msg)
raise Error(msg)
- page_id = self.getID(numeric=True)
+ # avoid recursion with self.getID()
+ page_id = int(self.id[1:])
result = list(self.repo.load_pages_from_pageids([page_id]))
if not result:
raise Error(f'There is no existing page with id "{page_id}"')
@@ -410,27 +442,24 @@
return self._file
- def get_data_for_new_entity(self) -> dict:
- """Return data required for creation of a new mediainfo."""
- self.id = 'M' + str(self.file.pageid)
- self._content = {}
- return super().get()
-
def get(self, force: bool = False) -> dict:
"""Fetch all MediaInfo entity data and cache it.
+ .. note:: This method may raise exception even if the associated file
+ exists because the mediainfo may not have been initialized yet.
+ :attr:`labels` and :attr:`statements` can still be accessed and
+ modified. :meth:`exists` suppresses the exception.
+
+ .. note:: dicts returned by this method are references to content
+ of this entity and their modifying may indirectly cause
+ unwanted change to the live content
+
:param force: override caching
:raise NoWikibaseEntityError: if this entity doesn't exist
:return: actual data which entity holds
"""
if self.id == '-1':
- if force:
- if not self.file.exists():
- exc = NoPageError(self.file)
- raise NoWikibaseEntityError(self) from exc
- # get just the id for Wikibase API call
- self.id = 'M' + str(self.file.pageid)
- else:
+ if not force:
try:
data = self.file.latest_revision.slots['mediainfo']['*']
except NoPageError as exc:
@@ -443,6 +472,8 @@
self._content = jsonlib.loads(data)
self.id = self._content['id']
+ self._assert_has_id()
+
return super().get(force=force)
def getID(self, numeric: bool = False):
@@ -450,9 +481,10 @@
Get the entity identifier.
:param numeric: Strip the first letter and return an int
+ :raise NoWikibaseEntityError: if this entity is associated with
+ a non-existing file
"""
- if self.id == '-1':
- self.get()
+ self._assert_has_id()
return super().getID(numeric=numeric)
diff --git a/tests/file_tests.py b/tests/file_tests.py
index 911c595..8a252c4 100755
--- a/tests/file_tests.py
+++ b/tests/file_tests.py
@@ -13,6 +13,7 @@
import pywikibot
from pywikibot.exceptions import (
+ Error,
NoPageError,
NoWikibaseEntityError,
PageRelatedError,
@@ -351,7 +352,7 @@
self.assertTrue(item.file is page)
self.assertEqual('-1', item.id)
item.get()
- self.assertEqual('M14634781', item.getID())
+ self.assertEqual('M14634781', item.id)
self.assertIsInstance(
item.labels, pywikibot.page._collections.LanguageDict)
self.assertIsInstance(
@@ -371,6 +372,15 @@
del item._file
self.assertEqual(page, item.file)
+ def test_data_item_not_file(self):
+ """Test data item with invalid pageid."""
+ item = pywikibot.MediaInfo(self.site, 'M1') # Main Page
+ with self.assertRaises(Error):
+ item.file
+ with self.assertRaises(NoWikibaseEntityError):
+ item.get()
+ self.assertFalse(item.exists())
+
def test_data_item_when_no_file_or_data_item(self):
"""Test data item associated to file that does not exist."""
page = pywikibot.FilePage(self.site,
@@ -381,6 +391,8 @@
with self.assertRaises(NoWikibaseEntityError):
item.get()
+ with self.assertRaises(NoWikibaseEntityError):
+ item.labels
def test_data_item_when_file_exist_but_without_item(self):
"""Test if data item is missing from file."""
@@ -394,24 +406,26 @@
# Seek to first page without mediainfo.
for page in gen:
- if 'mediainfo' not in page.latest_revision.slots:
- item = page.data_item()
- self.assertIsInstance(item, pywikibot.MediaInfo)
+ if 'mediainfo' in page.latest_revision.slots:
+ continue
- # Get fails as there is no mediainfo.
- with self.assertRaises(NoWikibaseEntityError):
- item.get()
+ item = page.data_item()
+ self.assertIsInstance(item, pywikibot.MediaInfo)
- # Create new empty mediainfo.
- item.get_data_for_new_entity()
- self.assertIsInstance(
- item.labels, pywikibot.page._collections.LanguageDict)
- self.assertIsInstance(
- item.statements,
- pywikibot.page._collections.ClaimCollection)
+ # Get fails as there is no mediainfo.
+ with self.assertRaises(NoWikibaseEntityError):
+ item.get()
- # break the loop after checking first file
- break
+ self.assertFalse(item.exists())
+ self.assertEqual(f'M{page.pageid}', item.id)
+ self.assertIsInstance(
+ item.labels, pywikibot.page._collections.LanguageDict)
+ self.assertIsInstance(
+ item.statements,
+ pywikibot.page._collections.ClaimCollection)
+
+ # break the loop after checking first file
+ break
def test_data_list_to_dict_workaround(self):
"""Test that T222159 workaround converts [] to {}."""
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/965243
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I7ad7396708e9fc7fcd98274c3668f07e25a1be30
Gerrit-Change-Number: 965243
Gerrit-PatchSet: 7
Gerrit-Owner: Matěj Suchánek <matejsuchanek97(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: Zache-tool <kimmo.virtanen(a)gmail.com>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
Xqt has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/966503 )
Change subject: [typing] solve some mypy issues
......................................................................
[typing] solve some mypy issues
Change-Id: I3048641df1a6e5d4ced1ad3688a12a3e2f72c6e7
---
M pywikibot/textlib.py
M pywikibot/family.py
M pywikibot/throttle.py
M pywikibot/cosmetic_changes.py
M pywikibot/backports.py
5 files changed, 30 insertions(+), 16 deletions(-)
Approvals:
Xqt: Verified; Looks good to me, approved
diff --git a/pywikibot/backports.py b/pywikibot/backports.py
index f2a77ec..02b59fe 100644
--- a/pywikibot/backports.py
+++ b/pywikibot/backports.py
@@ -69,6 +69,7 @@
if PYTHON_VERSION < (3, 9):
from typing import (
Container,
+ Counter,
Dict,
FrozenSet,
Generator,
@@ -92,6 +93,7 @@
Mapping,
Sequence,
)
+ from collections import Counter
from re import Match, Pattern
Dict = dict # type: ignore[misc]
FrozenSet = frozenset # type: ignore[misc]
diff --git a/pywikibot/cosmetic_changes.py b/pywikibot/cosmetic_changes.py
index 29d8f64..05d7209 100644
--- a/pywikibot/cosmetic_changes.py
+++ b/pywikibot/cosmetic_changes.py
@@ -707,7 +707,7 @@
header, sections, footer = textlib.extract_sections(text, self.site)
# iterate stripped sections and create a new page body
- new_body = []
+ new_body: List[textlib.Section] = []
for i, strip_section in enumerate(strip_sections):
current_dep = sections[i].level
try:
diff --git a/pywikibot/family.py b/pywikibot/family.py
index 52c6d3a..ed3bba9 100644
--- a/pywikibot/family.py
+++ b/pywikibot/family.py
@@ -21,6 +21,7 @@
import pywikibot
from pywikibot import config
from pywikibot.backports import (
+ DefaultDict,
Dict,
FrozenSet,
List,
@@ -36,6 +37,8 @@
logger = logging.getLogger('pywiki.wiki.family')
+CrossnamespaceType = DefaultDict[str, Dict[str, List[int]]]
+
# Legal characters for Family.name and Family.langs keys
NAME_CHARACTERS = string.ascii_letters + string.digits
# nds_nl code alias requires "_"n
@@ -135,8 +138,8 @@
# should be avoided
archived_page_templates: Dict[str, Tuple[str, ...]] = {}
- # A list of projects that share cross-project sessions.
- cross_projects = []
+ # A set of projects that share cross-project sessions.
+ cross_projects: Set[str] = set()
# A list with the name for cross-project cookies.
# default for wikimedia centralAuth extensions.
@@ -182,7 +185,7 @@
# is checked first, and languages are put in the order given there.
# All other languages are put after those, in code-alphabetical
# order.
- interwiki_putfirst = {}
+ interwiki_putfirst: Dict[str, str] = {}
# Some families, e. g. commons and meta, are not multilingual and
# forward interlanguage links to another family (wikipedia).
@@ -267,7 +270,7 @@
# values are dicts where:
# keys are the languages that can be linked to from the lang+ns, or
# '_default'; values are a list of namespace numbers
- crossnamespace = collections.defaultdict(dict)
+ crossnamespace: CrossnamespaceType = collections.defaultdict(dict)
##
# Examples :
#
@@ -302,7 +305,7 @@
.. versionadded:: 7.0
"""
- _families = {}
+ _families: Dict[str, 'Family'] = {}
@staticmethod
def load(fam: Optional[str] = None):
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index 99ca938..f68e6a6 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -582,7 +582,7 @@
"""
textdata = ''
- keeptags = []
+ keeptags: List[str] = []
def __enter__(self) -> None:
pass
@@ -1140,15 +1140,14 @@
text: str,
insite=None,
template_subpage: bool = False
-) -> Dict:
- """
- Return a dict of inter-language links found in text.
+) -> Dict['pywikibot.site.BaseSite', 'pywikibot.Page']:
+ """Return a dict of inter-language links found in text.
- The returned dict uses the site as keys and Page objects as values. It does
- not contain its own site.
+ The returned dict uses the site as keys and Page objects as values.
+ It does not contain its own site.
- Do not call this routine directly, use Page.interwiki() method
- instead.
+ Do not call this routine directly, use
+ :meth:`page.BasePage.interwiki` method instead.
"""
if insite is None:
insite = pywikibot.Site()
@@ -1157,7 +1156,7 @@
# infos there
if fam.interwiki_forward:
fam = Family.load(fam.interwiki_forward)
- result = {}
+ result: Dict[pywikibot.site.BaseSite, pywikibot.Page] = {}
# Ignore interwiki links within nowiki tags, includeonly tags, pre tags,
# and HTML comments
include = []
diff --git a/pywikibot/throttle.py b/pywikibot/throttle.py
index 825bfa3..348e751 100644
--- a/pywikibot/throttle.py
+++ b/pywikibot/throttle.py
@@ -15,6 +15,7 @@
import pywikibot
from pywikibot import config
+from pywikibot.backports import Counter as CounterType
from pywikibot.tools import deprecated
@@ -72,7 +73,7 @@
self.retry_after = 0 # set by http.request
self.delay = 0
self.checktime = 0.0
- self.modules = Counter()
+ self.modules: CounterType[str] = Counter()
self.checkMultiplicity()
self.setDelays()
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/966503
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I3048641df1a6e5d4ced1ad3688a12a3e2f72c6e7
Gerrit-Change-Number: 966503
Gerrit-PatchSet: 2
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged