jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/525225 )
Change subject: [PY3] Make Python 2 more Python 3 compatible
......................................................................
[PY3] Make Python 2 more Python 3 compatible
Use filter, map, zip from future_builtins to get Python 3 behaviour.
Also remove old xrange in casechecker.
Bug: T228833
Change-Id: I775215a2f82e37425f5324b525d10b6569fbab0b
---
M generate_user_files.py
M pywikibot/site.py
M pywikibot/textlib.py
M scripts/casechecker.py
M scripts/maintenance/download_dump.py
M scripts/protect.py
M tests/pagegenerators_tests.py
7 files changed, 28 insertions(+), 17 deletions(-)
Approvals:
D3r1ck01: Looks good to me, but someone else must approve
Zhuyifei1999: Looks good to me, approved
jenkins-bot: Verified
diff --git a/generate_user_files.py b/generate_user_files.py
index 69df7df..8abb94e 100755
--- a/generate_user_files.py
+++ b/generate_user_files.py
@@ -18,6 +18,9 @@
from generate_family_file import _import_with_no_user_config
+if sys.version_info[0] == 2:
+ from future_builtins import filter
+
# DISABLED_SECTIONS cannot be copied; variables must be set manually
DISABLED_SECTIONS = {'USER INTERFACE SETTINGS', # uses sys
'EXTERNAL EDITOR SETTINGS', # uses os
diff --git a/pywikibot/site.py b/pywikibot/site.py
index bef3935..fd52460 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -86,6 +86,7 @@
from itertools import zip_longest
from urllib.parse import urlencode, urlparse
else:
+ from future_builtins import zip
from itertools import izip_longest as zip_longest
from urllib import urlencode
from urlparse import urlparse
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index 4c037ee..990d40c 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -38,8 +38,8 @@
if not PY2:
from html.parser import HTMLParser
else:
+ from future_builtins import zip
from HTMLParser import HTMLParser
- from itertools import izip as zip
try:
import mwparserfromhell
diff --git a/scripts/casechecker.py b/scripts/casechecker.py
index 08bd2f4..9ab09b1 100755
--- a/scripts/casechecker.py
+++ b/scripts/casechecker.py
@@ -24,8 +24,8 @@
from scripts.category import CategoryMoveRobot as CategoryMoveBot
-if not PY2:
- xrange = range
+if PY2:
+ from future_builtins import zip
class CaseChecker(object):
@@ -172,18 +172,18 @@
self.titleList = [self.Page(t) for t in f]
self.failedTitles += '.failed'
- ziplist = zip(self.localSuspects, self.latinSuspects)
+ iterzip = zip(self.localSuspects, self.latinSuspects)
self.lclToLatDict = {
- ord(local): latin for local, latin in ziplist}
+ ord(local): latin for local, latin in iterzip}
self.latToLclDict = {
- ord(latin): local for local, latin in ziplist}
+ ord(latin): local for local, latin in iterzip}
if self.localKeyboard is not None:
- ziplist = zip(self.localKeyboard, self.latinKeyboard)
+ iterzip = zip(self.localKeyboard, self.latinKeyboard)
self.lclToLatKeybDict = {
- ord(local): latin for local, latin in ziplist}
+ ord(local): latin for local, latin in iterzip}
self.latToLclKeybDict = {
- ord(latin): local for local, latin in ziplist}
+ ord(latin): local for local, latin in iterzip}
else:
self.lclToLatKeybDict = {}
self.latToLclKeybDict = {}
@@ -513,7 +513,7 @@
# try to match one of the knownWords
bwLen = len(badWord)
kw = [w for w in self.knownWords if len(w) == bwLen]
- for p in xrange(bwLen):
+ for p in range(bwLen):
if len(kw) == 0:
break
c = badWord[p]
@@ -559,7 +559,7 @@
# combinations from the bad words list, and convert just the
# picked words to cyrilic, whereas making all other words as
# latin character.
- for itemCntToPick in xrange(len(ambigBadWords) + 1):
+ for itemCntToPick in range(len(ambigBadWords) + 1):
title2 = title
for uc in itertools.combinations(list(ambigBadWords),
itemCntToPick):
diff --git a/scripts/maintenance/download_dump.py b/scripts/maintenance/download_dump.py
index ea0276f..d0c72b3 100644
--- a/scripts/maintenance/download_dump.py
+++ b/scripts/maintenance/download_dump.py
@@ -14,7 +14,7 @@
"""
#
-# (C) Pywikibot team, 2017-2018
+# (C) Pywikibot team, 2017-2019
# (C) Yifei He, 2017
#
# Distributed under the terms of the MIT license.
@@ -22,10 +22,8 @@
from __future__ import absolute_import, division, unicode_literals
import binascii
-
import os.path
import sys
-
from os import remove, symlink, urandom
try:
@@ -45,10 +43,12 @@
from os import rename as replace
import pywikibot
-
from pywikibot import Bot
-
from pywikibot.comms.http import fetch
+from pywikibot.tools import PY2
+
+if PY2:
+ from future_builtins import map
class DownloadDumpBot(Bot):
diff --git a/scripts/protect.py b/scripts/protect.py
index 4cd088a..986e35d 100755
--- a/scripts/protect.py
+++ b/scripts/protect.py
@@ -64,6 +64,10 @@
import pywikibot
from pywikibot import i18n, pagegenerators
from pywikibot.bot import SingleSiteBot
+from pywikibot.tools import PY2
+
+if PY2:
+ from future_builtins import zip
# This is required for the text that is shown when you run this script
# with the parameter -help.
diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py
index f5d1e6d..ba2868d 100755
--- a/tests/pagegenerators_tests.py
+++ b/tests/pagegenerators_tests.py
@@ -23,7 +23,7 @@
CategorizedPageGenerator
)
-from pywikibot.tools import has_module, suppress_warnings
+from pywikibot.tools import has_module, PY2, suppress_warnings
from tests import join_data_path, mock
from tests.aspects import (
@@ -36,6 +36,9 @@
)
from tests.thread_tests import GeneratorIntersectTestCase
+if PY2:
+ from future_builtins import zip
+
en_wp_page_titles = (
# just a bunch of randomly selected titles for English Wikipedia tests
'Eastern Sayan',
--
To view, visit https://gerrit.wikimedia.org/r/525225
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I775215a2f82e37425f5324b525d10b6569fbab0b
Gerrit-Change-Number: 525225
Gerrit-PatchSet: 2
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Zhuyifei1999 <zhuyifei1999(a)gmail.com>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/525285 )
Change subject: Revert "[bugfix] T113120 is solved for Python 3.5+"
......................................................................
Revert "[bugfix] T113120 is solved for Python 3.5+"
This reverts change I20a42ca67ab9d9692b6bcb22e33ab740a76f03cc
Bug: T113120
Bug: T228841
Change-Id: I1c15b9e84d9a7b71a082bb6e59acdca1bf95c60c
---
M pywikibot/data/api.py
1 file changed, 10 insertions(+), 8 deletions(-)
Approvals:
Dalba: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index 09d0648..17f5a05 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -43,9 +43,15 @@
)
from pywikibot.tools.formatter import color_format
-if PYTHON_VERSION[:2] == (3, 4): # T113120
- # Subclassing necessary to fix a bug of the email package
- # in Python 3.4: see http://bugs.python.org/issue19003
+if not PY2:
+ from urllib.parse import urlencode, unquote
+
+ # Bug: T113120, T228841
+ # Subclassing necessary to fix bug of the email package in Python 3:
+ # see https://bugs.python.org/issue19003
+ # see https://bugs.python.org/issue18886
+ # The following solution might be removed if the bug is fixed for
+ # Python versions which are supported by PWB.
from email.generator import BytesGenerator
from email.mime.multipart import MIMEMultipart as MIMEMultipartOrig
@@ -80,12 +86,8 @@
MIMEMultipart = CTEBinaryMIMEMultipart
else:
- from email.mime.multipart import MIMEMultipart
-
-if not PY2:
- from urllib.parse import urlencode, unquote
-else:
from urllib import urlencode, unquote
+ from email.mime.multipart import MIMEMultipart
_logger = 'data.api'
--
To view, visit https://gerrit.wikimedia.org/r/525285
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I1c15b9e84d9a7b71a082bb6e59acdca1bf95c60c
Gerrit-Change-Number: 525285
Gerrit-PatchSet: 1
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/518192 )
Change subject: [bugfix] T113120 is solved for Python 3.5+
......................................................................
[bugfix] T113120 is solved for Python 3.5+
Bug: T113120
Change-Id: I20a42ca67ab9d9692b6bcb22e33ab740a76f03cc
---
M pywikibot/data/api.py
1 file changed, 11 insertions(+), 12 deletions(-)
Approvals:
Dalba: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index 5f92b76..09d0648 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -43,20 +43,15 @@
)
from pywikibot.tools.formatter import color_format
-if not PY2:
- # Subclassing necessary to fix a possible bug of the email package
- # in py3: see http://bugs.python.org/issue19003
- # The following solution might be removed if/once the bug is fixed,
- # unless the fix is not backported to py3.x versions that should
- # instead support PWB.
- from urllib.parse import urlencode, unquote
+if PYTHON_VERSION[:2] == (3, 4): # T113120
+ # Subclassing necessary to fix a bug of the email package
+ # in Python 3.4: see http://bugs.python.org/issue19003
+ from email.generator import BytesGenerator
+ from email.mime.multipart import MIMEMultipart as MIMEMultipartOrig
from io import BytesIO
- import email.generator
- from email.mime.multipart import MIMEMultipart as MIMEMultipartOrig
-
- class CTEBinaryBytesGenerator(email.generator.BytesGenerator):
+ class CTEBinaryBytesGenerator(BytesGenerator):
"""Workaround for bug in python 3 email handling of CTE binary."""
@@ -85,9 +80,13 @@
MIMEMultipart = CTEBinaryMIMEMultipart
else:
- from urllib import urlencode, unquote
from email.mime.multipart import MIMEMultipart
+if not PY2:
+ from urllib.parse import urlencode, unquote
+else:
+ from urllib import urlencode, unquote
+
_logger = 'data.api'
lagpattern = re.compile(
--
To view, visit https://gerrit.wikimedia.org/r/518192
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I20a42ca67ab9d9692b6bcb22e33ab740a76f03cc
Gerrit-Change-Number: 518192
Gerrit-PatchSet: 4
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/524793 )
Change subject: [setup] Combine setup requirements for different python versions
......................................................................
[setup] Combine setup requirements for different python versions
There are still remaining dependencies like os.environ variables
which cannot combined in such way
Bug: T227409
Change-Id: I552f1a8a3e913904b10979a899470037d51552dc
---
M setup.py
1 file changed, 58 insertions(+), 59 deletions(-)
Approvals:
Dalba: Looks good to me, approved
jenkins-bot: Verified
diff --git a/setup.py b/setup.py
index 6262505..ebc6086 100644
--- a/setup.py
+++ b/setup.py
@@ -33,18 +33,7 @@
if not python_is_supported():
raise RuntimeError(versions_required_message.format(version=sys.version))
-test_deps = ['bz2file', 'mock']
-
-dependencies = ['requests>=2.20.0']
-
-pydocstyle = 'pydocstyle<=3.0.0' if PY2 else 'pydocstyle>=2.5.0,!=4.0.0'
-if PY2:
- pillow = 'Pillow<7.0.0'
-elif PYTHON_VERSION < (3, 5):
- pillow = 'Pillow<6.0.0'
-else:
- pillow = 'Pillow'
-
+# ------- setup extra_requires ------- #
extra_deps = {
# Core library dependencies
'eventstreams': ['sseclient>=0.0.18,!=0.0.23,!=0.0.24'],
@@ -52,14 +41,23 @@
'Graphviz': ['pydot>=1.2'],
'Google': ['google>=1.7'],
'mwparserfromhell': ['mwparserfromhell>=0.3.3'],
- 'Tkinter': [pillow],
- 'security': ['requests[security]', 'pycparser!=2.14'],
+ 'Tkinter': [
+ 'Pillow<7.0.0;python_version<"3"',
+ 'Pillow<6.0.0;python_version=="3.4"',
+ 'Pillow;python_version>="3.5"',
+ ],
+ 'security': [
+ 'requests[security]'
+ ';python_full_version=="2.7.7" or python_full_version=="2.7.8"',
+ 'pycparser!=2.14',
+ ],
'mwoauth': ['mwoauth>=0.2.4,!=0.3.1'],
'html': ['BeautifulSoup4'],
'http': ['fake_useragent'],
'flake8': [ # Due to incompatibilities between packages the order matters.
'flake8>=3.7.5',
- pydocstyle,
+ 'pydocstyle<=3.0.0;python_version<"3"',
+ 'pydocstyle>=2.5.0,!=4.0.0;python_version>="3.4"',
'hacking',
'flake8-coding',
'flake8-comprehensions',
@@ -73,49 +71,53 @@
'flake8-no-u-prefixed-strings>=0.2',
'pep8-naming>=0.7',
'pyflakes>=2.1.0',
- ]
+ ],
+ # Additional core library dependencies which are only available on Python 2
+ 'csv': ['unicodecsv;python_version<"3"'],
}
-if PY2:
- # Additional core library dependencies which are only available on Python 2
- extra_deps.update({
- 'csv': ['unicodecsv'],
- })
+# ------- setup extra_requires for scripts ------- #
script_deps = {
- 'flickrripper.py': ['flickrapi', pillow],
+ 'flickrripper.py': [
+ 'flickrapi',
+ 'Pillow<7.0.0;python_version<"3"',
+ 'Pillow<6.0.0;python_version=="3.4"',
+ 'Pillow;python_version>="3.5"',
+ ],
'states_redirect.py': ['pycountry'],
'weblinkchecker.py': ['memento_client>=0.5.1,!=0.6.0'],
'patrol.py': ['mwparserfromhell>=0.3.3'],
}
+script_deps['data_ingestion.py'] = extra_deps['csv']
+extra_deps.update(script_deps)
-if PY2:
- # tools.ip does not have a hard dependency on an IP address module,
- # as it falls back to using regexes if one is not available.
- # The functional backport of py3 ipaddress is acceptable:
- # https://pypi.org/project/ipaddress
- # However the Debian package python-ipaddr is also supported:
- # https://pypi.org/project/ipaddr
- # Other backports are likely broken.
- # ipaddr 2.1.10+ is distributed with Debian and Fedora. See T105443.
- dependencies.append('ipaddr>=2.1.10')
+# ------- setup install_requires ------- #
+dependencies = ['requests>=2.20.0']
+# tools.ip does not have a hard dependency on an IP address module,
+# as it falls back to using regexes if one is not available.
+# The functional backport of py3 ipaddress is acceptable:
+# https://pypi.org/project/ipaddress
+# However the Debian package python-ipaddr is also supported:
+# https://pypi.org/project/ipaddr
+# Other backports are likely broken.
+# ipaddr 2.1.10+ is distributed with Debian and Fedora. See T105443.
+dependencies.append('ipaddr>=2.1.10;python_version<"3"')
- # version.package_version() uses pathlib which is a python 3 library.
- # pathlib2 is required for python 2.7
- dependencies.append('pathlib2')
+# version.package_version() uses pathlib which is a python 3 library.
+# pathlib2 is required for python 2.7
+dependencies.append('pathlib2;python_version<"3"')
- if (2, 7, 6) < PYTHON_VERSION < (2, 7, 9):
- # Python versions before 2.7.9 will cause urllib3 to trigger
- # InsecurePlatformWarning warnings for all HTTPS requests. By
- # installing with security extras, requests will automatically set
- # them up and the warnings will stop. See
- # <https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwar…>
- # for more details.
- # There is no secure version of cryptography for Python 2.7.6 or older.
- dependencies += extra_deps['security']
+# Python versions before 2.7.9 will cause urllib3 to trigger
+# InsecurePlatformWarning warnings for all HTTPS requests. By
+# installing with security extras, requests will automatically set
+# them up and the warnings will stop. See
+# <https://urllib3.readthedocs.org/en/latest/security.html#insecureplatformwar…>
+# for more details.
+# There is no secure version of cryptography for Python 2.7.6 or older.
+dependencies += extra_deps['security']
- script_deps['data_ingestion.py'] = extra_deps['csv']
try:
import bz2
@@ -123,9 +125,11 @@
# Use bz2file if the python is not compiled with bz2 support.
dependencies.append('bz2file')
else:
- _unused = bz2
+ assert bz2
+# ------- setup tests_require ------- #
+test_deps = ['bz2file', 'mock']
# Some of the ui_tests depend on accessing the console window's menu
# to set the console font and copy and paste, achieved using pywinauto
# which depends on pywin32.
@@ -135,15 +139,12 @@
# Microsoft makes available a compiler for Python 2.7
# http://www.microsoft.com/en-au/download/details.aspx?id=44266
if os.name == 'nt' and os.environ.get('PYSETUP_TEST_NO_UI', '0') != '1':
- if PYTHON_VERSION >= (3, 5, 0) or PY2:
- pywinauto = 'pywinauto>0.6.4'
- pywin32 = 'pywin32>220'
- else: # Python 3.4
- pywinauto = 'pywinauto<=0.6.4'
- pywin32 = 'pywin32<=220'
- test_deps += [pywin32, pywinauto]
-
-extra_deps.update(script_deps)
+ test_deps += [
+ 'pywinauto>0.6.4;python_version>="3.5" or python_version<"3"',
+ 'pywinauto<=0.6.4;python_version=="3.4"',
+ 'pywin32>220;python_version>="3.5" or python_version<"3"',
+ 'pywin32<=220;python_version=="3.4"',
+ ]
# Add all dependencies as test dependencies,
# so all scripts can be compiled for script_tests, etc.
@@ -156,10 +157,8 @@
test_deps.remove('requests[security]')
# These extra dependencies are needed other unittest fails to load tests.
-if PY2:
- test_deps += extra_deps['csv']
-else:
- test_deps += ['six']
+test_deps += extra_deps['csv']
+test_deps += ['six;python_version>="3"']
def get_version():
--
To view, visit https://gerrit.wikimedia.org/r/524793
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I552f1a8a3e913904b10979a899470037d51552dc
Gerrit-Change-Number: 524793
Gerrit-PatchSet: 6
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/524741 )
Change subject: [doc] Prepare next release
......................................................................
[doc] Prepare next release
Change-Id: Iff4acf1949a7c5380f1d4c559383863312272c3f
---
M HISTORY.rst
M docs/conf.py
2 files changed, 7 insertions(+), 1 deletion(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/HISTORY.rst b/HISTORY.rst
index 95ff932..f6694f9 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -4,6 +4,12 @@
Current release
---------------
+* Bugfixes and improvements
+* Localisation updates
+
+3.0.20190722
+------------
+
* Increase the throttling delay if maxlag >> retry-after (T210606)
* deprecate test_family: Site('test', 'test'), use wikipedia_family: Site('test', 'wikipedia') instead (T228375, T228300)
* Add "user_agent_description" option in config.py
diff --git a/docs/conf.py b/docs/conf.py
index cd50360..db93772 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -71,7 +71,7 @@
# The short X.Y version.
version = '3.0'
# The full version, including alpha/beta/rc tags.
-release = '3.0.20190430'
+release = '3.0.20190722'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
--
To view, visit https://gerrit.wikimedia.org/r/524741
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: Iff4acf1949a7c5380f1d4c559383863312272c3f
Gerrit-Change-Number: 524741
Gerrit-PatchSet: 1
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/522597 )
Change subject: [FEAT] Introduce WikibaseEntity class
......................................................................
[FEAT] Introduce WikibaseEntity class
This class should be base class for all classes
which hold some data on a Wikibase repository.
First step for supporting lexicographical data
and structured data on Commons.
For now, it doesn't hold any data and doesn't communicate
with API at all.
This patch also includes several refactorings, bugfixes
and code cleanup.
Bug: T189321
Change-Id: If9661a48fa899a5544fc403d0d7c0764e2bd9413
---
M pywikibot/page.py
1 file changed, 193 insertions(+), 117 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/page.py b/pywikibot/page.py
index f93dbd9..48f6789 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -3706,7 +3706,104 @@
return self.isRegistered() and 'bot' not in self.groups()
-class WikibasePage(BasePage):
+class WikibaseEntity(object):
+
+ """
+ The base interface for Wikibase entities.
+
+ Each entity is identified by a data repository it belongs to
+ and an identifier.
+ """
+
+ def __init__(self, repo, id_=None):
+ """
+ Initializer.
+
+ @param repo: Entity repository.
+ @type repo: DataSite
+ @param id_: Entity identifier.
+ @type id_: str or None, -1 and None mean non-existing
+ """
+ self.repo = repo
+ self.id = id_ if id_ is not None else '-1'
+ if self.id != '-1' and not self.is_valid_id(self.id):
+ raise pywikibot.InvalidTitle(
+ "'%s' is not a valid %s page title"
+ % (self.id, self.entity_type))
+
+ def __repr__(self):
+ if self.id != '-1':
+ return 'pywikibot.page.{0}({1!r}, {2!r})'.format(
+ self.__class__.__name__, self.repo, self.id)
+ else:
+ return 'pywikibot.page.{0}({1!r})'.format(
+ self.__class__.__name__, self.repo)
+
+ @classmethod
+ def is_valid_id(cls, entity_id):
+ """
+ Whether the string can be a valid id of the entity type.
+
+ @param entity_id: The ID to test.
+ @type entity_id: basestring
+
+ @rtype: bool
+ """
+ if not hasattr(cls, 'title_pattern'):
+ return True
+
+ # todo: use re.fullmatch when Python 3.4+ required
+ return bool(re.match(cls.title_pattern + '$', entity_id))
+
+ def _defined_by(self, singular=False):
+ """
+ Internal function to provide the API parameters to identify the entity.
+
+ An empty dict is returned if the entity has not been created yet.
+
+ @param singular: Whether the parameter names should use the singular
+ form
+ @type singular: bool
+ @return: API parameters
+ @rtype: dict
+ """
+ params = {}
+ if self.id != '-1':
+ if singular:
+ params['id'] = self.id
+ else:
+ params['ids'] = self.id
+ return params
+
+ def getID(self, numeric=False):
+ """
+ Get the identifier of this entity.
+
+ @param numeric: Strip the first letter and return an int
+ @type numeric: bool
+ """
+ if numeric:
+ return int(self.id[1:]) if self.id != '-1' else -1
+ else:
+ return self.id
+
+ def get_data_for_new_entity(self):
+ """
+ Return data required for creation of a new entity.
+
+ Override it if you need.
+
+ @rtype: dict
+ """
+ return {}
+
+ def concept_uri(self):
+ """Return the full concept URI."""
+ # todo: raise when self.id is -1
+ return '{0}{1}'.format(self.repo.concept_base_uri, self.id)
+
+
+class WikibasePage(BasePage, WikibaseEntity):
"""
The base page for the Wikibase extension.
@@ -3784,12 +3881,12 @@
self._namespace = entity_type_ns
kwargs['ns'] = self._namespace.id
- super(WikibasePage, self).__init__(site, title, **kwargs)
+ BasePage.__init__(self, site, title, **kwargs)
# If a title was not provided,
# avoid checks which may cause an exception.
if not title:
- self.repo = site
+ WikibaseEntity.__init__(self, site)
return
if self._namespace:
@@ -3808,66 +3905,14 @@
raise ValueError('%r: Namespace "%r" is not valid'
% (self.site, ns))
- # .site forces a parse of the Link title to determine site
- self.repo = self.site
- # Link.__init__, called from Page.__init__, has cleaned the title
- # stripping whitespace and uppercasing the first letter according
- # to the namespace case=first-letter.
- self.id = self._link.title
- if not self.is_valid_id(self.id):
- raise pywikibot.InvalidTitle(
- "'%s' is not a valid %s page title"
- % (self.id, self.entity_type))
-
- def _defined_by(self, singular=False):
- """
- Internal function to provide the API parameters to identify the entity.
-
- The API parameters may be 'id' if the ItemPage has one,
- or 'site'&'title' if instantiated via ItemPage.fromPage with
- lazy_load enabled.
-
- Once an item's "p/q##" is looked up, that will be used for all future
- requests.
-
- An empty dict is returned if the ItemPage is instantiated without
- either ID (internally it has id = '-1') or site&title.
-
- @param singular: Whether the parameter names should use the singular
- form
- @type singular: bool
- @return: API parameters
- @rtype: dict
- """
- params = {}
- if singular:
- id = 'id'
- site = 'site'
- title = 'title'
- else:
- id = 'ids'
- site = 'sites'
- title = 'titles'
-
- lazy_loading_id = not hasattr(self, 'id') and hasattr(self, '_site')
-
- # id overrides all
- if hasattr(self, 'id'):
- if self.id != '-1':
- params[id] = self.id
- elif lazy_loading_id:
- params[site] = self._site.dbName()
- params[title] = self._title
- else:
- # if none of the above applies, this item is in an invalid state
- # which needs to be raise as an exception, but also logged in case
- # an exception handler is catching the generic Error.
- pywikibot.error('%s is in invalid state'
- % self.__class__.__name__)
- raise pywikibot.Error('%s is in invalid state'
- % self.__class__.__name__)
-
- return params
+ WikibaseEntity.__init__(
+ self,
+ # .site forces a parse of the Link title to determine site
+ self.site,
+ # Link.__init__, called from Page.__init__, has cleaned the title
+ # stripping whitespace and uppercasing the first letter according
+ # to the namespace case=first-letter.
+ self._link.title)
def __getattribute__(self, name):
"""Low-level attribute getter. Deprecates lastrevid."""
@@ -3951,6 +3996,7 @@
'{0}.get does not implement var args: {1!r} and {2!r}'.format(
self.__class__, args, kwargs))
+ # todo: this variable is specific to ItemPage
lazy_loading_id = not hasattr(self, 'id') and hasattr(self, '_site')
if force or not hasattr(self, '_content'):
identification = self._defined_by()
@@ -4014,16 +4060,6 @@
'claims': self.claims,
}
- def get_data_for_new_entity(self):
- """
- Return data required for creation of new page.
-
- Override it if you need.
-
- @rtype: dict
- """
- return {}
-
def _diff_to(self, type_key, key_name, value_name, diffto, data):
assert type_key not in data, 'Key type must be defined in data'
source = self._normalizeLanguages(getattr(self, type_key)).copy()
@@ -4047,7 +4083,7 @@
When diffto is provided, JSON representing differences
to the provided data is created.
- @param diffto: JSON containing claim data
+ @param diffto: JSON containing entity data
@type diffto: dict
@rtype: dict
@@ -4109,38 +4145,6 @@
data['claims'] = claims
return data
- def getID(self, numeric=False, force=False):
- """
- Get the entity identifier.
-
- @param numeric: Strip the first letter and return an int
- @type numeric: bool
- @param force: Force an update of new data
- @type force: bool
- """
- if not hasattr(self, 'id') or force:
- self.get(force=force)
- if numeric:
- return int(self.id[1:]) if self.id != '-1' else -1
-
- return self.id
-
- @classmethod
- def is_valid_id(cls, entity_id):
- """
- Whether the string can be a valid id of the entity type.
-
- @param entity_id: The ID to test.
- @type entity_id: basestring
-
- @rtype: bool
- """
- if not hasattr(cls, 'title_pattern'):
- return True
-
- # todo: use re.fullmatch when Python 3.4+ required
- return bool(re.match(cls.title_pattern + '$', entity_id))
-
@property
def latest_revision_id(self):
"""
@@ -4259,6 +4263,7 @@
baserevid=baserevid, **kwargs)
self.latest_revision_id = updates['entity']['lastrevid']
+ # todo: this variable is specific to ItemPage
lazy_loading_id = not hasattr(self, 'id') and hasattr(self, '_site')
if lazy_loading_id or self.id == '-1':
self.__init__(self.site, title=updates['entity']['id'])
@@ -4375,7 +4380,7 @@
_cache_attrs = WikibasePage._cache_attrs + ('sitelinks',)
entity_type = 'item'
- title_pattern = r'(Q[1-9]\d*|-1)'
+ title_pattern = r'Q[1-9]\d*'
def __init__(self, site, title=None, ns=None):
"""
@@ -4406,6 +4411,56 @@
assert self.id == self._link.title
+ def _defined_by(self, singular=False):
+ """
+ Internal function to provide the API parameters to identify the item.
+
+ The API parameters may be 'id' if the ItemPage has one,
+ or 'site'&'title' if instantiated via ItemPage.fromPage with
+ lazy_load enabled.
+
+ Once an item's Q## is looked up, that will be used for all future
+ requests.
+
+ An empty dict is returned if the ItemPage is instantiated without
+ either ID (internally it has id = '-1') or site&title.
+
+ @param singular: Whether the parameter names should use the singular
+ form
+ @type singular: bool
+ @return: API parameters
+ @rtype: dict
+ """
+ params = {}
+ if singular:
+ id = 'id'
+ site = 'site'
+ title = 'title'
+ else:
+ id = 'ids'
+ site = 'sites'
+ title = 'titles'
+
+ lazy_loading_id = not hasattr(self, 'id') and hasattr(self, '_site')
+
+ # id overrides all
+ if hasattr(self, 'id'):
+ if self.id != '-1':
+ params[id] = self.id
+ elif lazy_loading_id:
+ params[site] = self._site.dbName()
+ params[title] = self._title
+ else:
+ # if none of the above applies, this item is in an invalid state
+ # which needs to be raise as an exception, but also logged in case
+ # an exception handler is catching the generic Error.
+ pywikibot.error('%s is in invalid state'
+ % self.__class__.__name__)
+ raise pywikibot.Error('%s is in invalid state'
+ % self.__class__.__name__)
+
+ return params
+
def title(self, **kwargs):
"""
Return ID as title of the ItemPage.
@@ -4440,6 +4495,19 @@
return super(ItemPage, self).title(**kwargs)
+ def getID(self, numeric=False, force=False):
+ """
+ Get the entity identifier.
+
+ @param numeric: Strip the first letter and return an int
+ @type numeric: bool
+ @param force: Force an update of new data
+ @type force: bool
+ """
+ if not hasattr(self, 'id') or force:
+ self.get(force=force)
+ return super(WikibasePage, self).getID(numeric=numeric)
+
@classmethod
def fromPage(cls, page, lazy_load=False):
"""
@@ -4543,11 +4611,6 @@
data['sitelinks'] = self.sitelinks
return data
- @need_version('1.28-wmf.23')
- def concept_uri(self):
- """Return the full concept URI."""
- return '{0}{1}'.format(self.site.concept_base_uri, self.id)
-
def getRedirectTarget(self):
"""Return the redirect target for this page."""
target = super(ItemPage, self).getRedirectTarget()
@@ -4565,7 +4628,7 @@
When diffto is provided, JSON representing differences
to the provided data is created.
- @param diffto: JSON containing claim data
+ @param diffto: JSON containing entity data
@type diffto: dict
@rtype: dict
@@ -4755,9 +4818,11 @@
return self._isredir
return super(ItemPage, self).isRedirectPage()
- # alias for backwards compatibility
- concept_url = redirect_func(concept_uri, old_name='concept_url',
- class_name='ItemPage', since='20170222')
+
+# alias for backwards compatibility
+ItemPage.concept_url = redirect_func(
+ ItemPage.concept_uri, old_name='concept_url', class_name='ItemPage',
+ since='20170222')
class Property(object):
@@ -4872,7 +4937,7 @@
_cache_attrs = WikibasePage._cache_attrs + ('_type',)
entity_type = 'property'
- title_pattern = r'(P[1-9]\d*|-1)'
+ title_pattern = r'P[1-9]\d*'
def __init__(self, source, title=None, datatype=None):
"""
@@ -4932,9 +4997,20 @@
@rtype: Claim
"""
+ # todo: raise when self.id is -1
return Claim(self.site, self.getID(), datatype=self.type,
*args, **kwargs)
+ def getID(self, numeric=False):
+ """
+ Get the identifier of this property.
+
+ @param numeric: Strip the first letter and return an int
+ @type numeric: bool
+ """
+ # enforce this parent's implementation
+ return WikibasePage.getID(self, numeric=numeric)
+
def get_data_for_new_entity(self):
"""Return data required for creation of new property."""
return {'datatype': self.type}
--
To view, visit https://gerrit.wikimedia.org/r/522597
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: If9661a48fa899a5544fc403d0d7c0764e2bd9413
Gerrit-Change-Number: 522597
Gerrit-PatchSet: 6
Gerrit-Owner: Matěj Suchánek <matejsuchanek97(a)gmail.com>
Gerrit-Reviewer: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <Ladsgroup(a)gmail.com>
Gerrit-Reviewer: Lokal Profil <andre.costa(a)wikimedia.se>
Gerrit-Reviewer: Matěj Suchánek <matejsuchanek97(a)gmail.com>
Gerrit-Reviewer: Multichill <maarten(a)mdammers.nl>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot (75)