jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/329360 )
Change subject: Query string dictionary parameter for pwb.comms.http.fetch and friends
......................................................................
Query string dictionary parameter for pwb.comms.http.fetch and friends
This will break usages of the affected methods that rely on the order of
the parameters, rather than specifying the parameters dictionary-style.
* New "params" parameter to pass unencoded query string parameters as a
dictionary to pywikibot.comms.http.{fetch,request,_enqueue}
* PetScanPageGenerator has been updated to use this new parameter.
* "data" parameter added to fetch/request/_enqueue as an alias of "body"
to make the method parameters correspond to requests.Session.request
* Unit tests for "params" and "data" parameters
Bug: T153559
Change-Id: I96da6d4c719aba24d35e58dd5f0694e628be86a3
---
M pywikibot/comms/http.py
M pywikibot/comms/threadedhttp.py
M pywikibot/pagegenerators.py
M tests/http_tests.py
4 files changed, 105 insertions(+), 27 deletions(-)
Approvals:
John Vandenberg: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/comms/http.py b/pywikibot/comms/http.py
index 7bf6235..3006dde 100644
--- a/pywikibot/comms/http.py
+++ b/pywikibot/comms/http.py
@@ -276,8 +276,8 @@
@deprecate_arg('ssl', None)
-def request(site=None, uri=None, method='GET', body=None, headers=None,
- **kwargs):
+def request(site=None, uri=None, method='GET', params=None, body=None,
+ headers=None, data=None, **kwargs):
"""
Request to Site with default error handling and response decoding.
@@ -299,12 +299,17 @@
@return: The received data
@rtype: a unicode string
"""
+ # body and data parameters both map to the data parameter of
+ # requests.Session.request.
+ if data:
+ body = data
+
assert(site or uri)
if not site:
# +1 because of @deprecate_arg
issue_deprecation_warning(
'Invoking http.request without argument site', 'http.fetch()', 3)
- r = fetch(uri, method, body, headers, **kwargs)
+ r = fetch(uri, method, params, body, headers, **kwargs)
return r.content
baseuri = site.base_url(uri)
@@ -320,7 +325,7 @@
headers['user-agent'] = user_agent(site, format_string)
- r = fetch(baseuri, method, body, headers, **kwargs)
+ r = fetch(baseuri, method, params, body, headers, **kwargs)
return r.content
@@ -350,6 +355,7 @@
def _http_process(session, http_request):
method = http_request.method
uri = http_request.uri
+ params = http_request.params
body = http_request.body
headers = http_request.headers
if PY2 and headers:
@@ -370,8 +376,8 @@
# Note that the connections are pooled which mean that a future
# HTTPS request can succeed even if the certificate is invalid and
# verify=True, when a request with verify=False happened before
- response = session.request(method, uri, data=body, headers=headers,
- auth=auth, timeout=timeout,
+ response = session.request(method, uri, params=params, data=body,
+ headers=headers, auth=auth, timeout=timeout,
verify=not ignore_validation)
except Exception as e:
http_request.data = e
@@ -407,7 +413,8 @@
warning('Http response status {0}'.format(request.data.status_code))
-def _enqueue(uri, method="GET", body=None, headers=None, **kwargs):
+def _enqueue(uri, method="GET", params=None, body=None, headers=None, data=None,
+ **kwargs):
"""
Enqueue non-blocking threaded HTTP request with callback.
@@ -432,6 +439,11 @@
@type callbacks: list of callable
@rtype: L{threadedhttp.HttpRequest}
"""
+ # body and data parameters both map to the data parameter of
+ # requests.Session.request.
+ if data:
+ body = data
+
default_error_handling = kwargs.pop('default_error_handling', None)
callback = kwargs.pop('callback', None)
@@ -451,13 +463,14 @@
all_headers['user-agent'] = user_agent(None, user_agent_format_string)
request = threadedhttp.HttpRequest(
- uri, method, body, all_headers, callbacks, **kwargs)
+ uri, method, params, body, all_headers, callbacks, **kwargs)
_http_process(session, request)
return request
-def fetch(uri, method="GET", body=None, headers=None,
- default_error_handling=True, use_fake_user_agent=False, **kwargs):
+def fetch(uri, method="GET", params=None, body=None, headers=None,
+ default_error_handling=True, use_fake_user_agent=False, data=None,
+ **kwargs):
"""
Blocking HTTP request.
@@ -474,6 +487,11 @@
overridden by domain in config.
@rtype: L{threadedhttp.HttpRequest}
"""
+ # body and data parameters both map to the data parameter of
+ # requests.Session.request.
+ if data:
+ body = data
+
# Change user agent depending on fake UA settings.
# Set header to new UA if needed.
headers = headers or {}
@@ -489,7 +507,7 @@
elif use_fake_user_agent is True:
headers['user-agent'] = fake_user_agent()
- request = _enqueue(uri, method, body, headers, **kwargs)
+ request = _enqueue(uri, method, params, body, headers, **kwargs)
assert(request._data is not None) # if there's no data in the answer we're in trouble
# Run the error handling callback in the callers thread so exceptions
# may be caught.
diff --git a/pywikibot/comms/threadedhttp.py b/pywikibot/comms/threadedhttp.py
index a166929..03386cf 100644
--- a/pywikibot/comms/threadedhttp.py
+++ b/pywikibot/comms/threadedhttp.py
@@ -31,7 +31,7 @@
* an exception
"""
- def __init__(self, uri, method="GET", body=None, headers=None,
+ def __init__(self, uri, method="GET", params=None, body=None, headers=None,
callbacks=None, charset=None, **kwargs):
"""
Constructor.
@@ -40,6 +40,7 @@
"""
self.uri = uri
self.method = method
+ self.params = params
self.body = body
self.headers = headers
if isinstance(charset, codecs.CodecInfo):
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index 3db16a7..61dacdb 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -47,7 +47,6 @@
intersect_generators,
IteratorNextMixin,
filter_unique,
- PY2,
)
from pywikibot import date, config, i18n, xmlreader
@@ -55,13 +54,6 @@
from pywikibot.exceptions import ArgumentDeprecationWarning, UnknownExtension
from pywikibot.logentries import LogEntryFactory
from pywikibot.proofreadpage import ProofreadPage
-
-if PY2:
- from urllib import urlencode
- import urlparse
-else:
- import urllib.parse as urlparse
- from urllib.parse import urlencode
if sys.version_info[0] > 2:
basestring = (str, )
@@ -2840,14 +2832,9 @@
def query(self):
"""Query PetScan."""
- url = urlparse.urlunparse(('https', # scheme
- 'petscan.wmflabs.org', # netloc
- '', # path
- '', # params
- urlencode(self.opts), # query
- '')) # fragment
+ url = 'https://petscan.wmflabs.org'
- req = http.fetch(url)
+ req = http.fetch(url, params=self.opts)
j = json.loads(req.content)
raw_pages = j['*'][0]['a']['*']
for raw_page in raw_pages:
diff --git a/tests/http_tests.py b/tests/http_tests.py
index 0cb28bd..26663f1 100644
--- a/tests/http_tests.py
+++ b/tests/http_tests.py
@@ -9,6 +9,7 @@
__version__ = '$Id$'
+import json
import re
import warnings
@@ -556,6 +557,77 @@
self.assertIs(main_module_cookie_jar, http.cookie_jar)
+class QueryStringParamsTestCase(TestCase):
+
+ """
+ Test the query string parameter of request methods.
+
+ The /get endpoint of httpbin returns JSON that can include an 'args' key with
+ urldecoded query string parameters.
+ """
+
+ sites = {
+ 'httpbin': {
+ 'hostname': 'httpbin.org',
+ },
+ }
+
+ def test_no_params(self):
+ """Test fetch method with no parameters."""
+ r = http.fetch(uri='https://httpbin.org/get', params={})
+ self.assertEqual(r.status, 200)
+
+ content = json.loads(r.content)
+ self.assertDictEqual(content['args'], {})
+
+ def test_unencoded_params(self):
+ """
+ Test fetch method with unencoded parameters, which should be encoded internally.
+
+ HTTPBin returns the args in their urldecoded form, so what we put in should be
+ the same as what we get out.
+ """
+ r = http.fetch(uri='https://httpbin.org/get', params={'fish&chips': 'delicious'})
+ self.assertEqual(r.status, 200)
+
+ content = json.loads(r.content)
+ self.assertDictEqual(content['args'], {'fish&chips': 'delicious'})
+
+ def test_encoded_params(self):
+ """
+ Test fetch method with encoded parameters, which should be re-encoded internally.
+
+ HTTPBin returns the args in their urldecoded form, so what we put in should be
+ the same as what we get out.
+ """
+ r = http.fetch(uri='https://httpbin.org/get',
+ params={'fish%26chips': 'delicious'})
+ self.assertEqual(r.status, 200)
+
+ content = json.loads(r.content)
+ self.assertDictEqual(content['args'], {'fish%26chips': 'delicious'})
+
+
+class DataBodyParameterTestCase(TestCase):
+ """Test that the data and body parameters of fetch/request methods are equivalent."""
+
+ sites = {
+ 'httpbin': {
+ 'hostname': 'httpbin.org',
+ },
+ }
+
+ def test_fetch(self):
+ """Test that using the data parameter and body parameter produce same results."""
+ r_data = http.fetch(uri='https://httpbin.org/post', method='POST',
+ data={'fish&chips': 'delicious'})
+ r_body = http.fetch(uri='https://httpbin.org/post', method='POST',
+ body={'fish&chips': 'delicious'})
+
+ self.assertDictEqual(json.loads(r_data.content),
+ json.loads(r_body.content))
+
+
if __name__ == '__main__': # pragma: no cover
try:
unittest.main()
--
To view, visit https://gerrit.wikimedia.org/r/329360
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I96da6d4c719aba24d35e58dd5f0694e628be86a3
Gerrit-PatchSet: 6
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Sn1per <geofbot(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/306640 )
Change subject: use page.pageid property instead of hidden cache variable
......................................................................
use page.pageid property instead of hidden cache variable
Change-Id: I00ff3f1d18883f956b262aa9059e55017d97cf12
Note: page.pageid is fetched from page info when needed
---
M tests/pagegenerators_tests.py
M tests/site_tests.py
2 files changed, 8 insertions(+), 9 deletions(-)
Approvals:
Magul: Looks good to me, approved
jenkins-bot: Verified
diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py
index dc92beb..522f296 100755
--- a/tests/pagegenerators_tests.py
+++ b/tests/pagegenerators_tests.py
@@ -212,8 +212,7 @@
self.site)
pageids = []
for page in gen_pages:
- page.latest_revision_id # Force page info loading.
- pageids.append(page._pageid)
+ pageids.append(page.pageid)
gen = pagegenerators.PagesFromPageidGenerator(pageids, self.site)
self.assertPagelistTitles(gen, self.titles)
@@ -886,7 +885,7 @@
pages = list(gen)
self.assertEqual(len(pages), 10)
# pipe-separated used as test reference.
- pageids = '|'.join(str(page._pageid) for page in pages)
+ pageids = '|'.join(str(page.pageid) for page in pages)
# Get by pageids.
gf = pagegenerators.GeneratorFactory(site=self.get_site())
diff --git a/tests/site_tests.py b/tests/site_tests.py
index 00695b3..daa4165 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -2474,7 +2474,7 @@
def test_load_from_pageids_iterable_of_str(self):
"""Test basic loading with pageids."""
- pageids = [str(page._pageid) for page in self.links]
+ pageids = [str(page.pageid) for page in self.links]
gen = self.site.load_pages_from_pageids(pageids)
for count, page in enumerate(gen, start=1):
self.assertIsInstance(page, pywikibot.Page)
@@ -2486,7 +2486,7 @@
def test_load_from_pageids_iterable_of_int(self):
"""Test basic loading with pageids."""
- pageids = [page._pageid for page in self.links]
+ pageids = [page.pageid for page in self.links]
gen = self.site.load_pages_from_pageids(pageids)
for count, page in enumerate(gen, start=1):
self.assertIsInstance(page, pywikibot.Page)
@@ -2498,7 +2498,7 @@
def test_load_from_pageids_iterable_in_order(self):
"""Test loading with pageids is ordered."""
- pageids = [page._pageid for page in self.links]
+ pageids = [page.pageid for page in self.links]
gen = self.site.load_pages_from_pageids(pageids)
for page in gen:
link = self.links.pop(0)
@@ -2510,7 +2510,7 @@
def test_load_from_pageids_iterable_with_duplicate(self):
"""Test loading with duplicate pageids."""
- pageids = [page._pageid for page in self.links]
+ pageids = [page.pageid for page in self.links]
pageids = pageids + pageids
gen = self.site.load_pages_from_pageids(pageids)
for count, page in enumerate(gen, start=1):
@@ -2523,7 +2523,7 @@
def test_load_from_pageids_comma_separated(self):
"""Test loading from comma-separated pageids."""
- pageids = ', '.join(str(page._pageid) for page in self.links)
+ pageids = ', '.join(str(page.pageid) for page in self.links)
gen = self.site.load_pages_from_pageids(pageids)
for count, page in enumerate(gen, start=1):
self.assertIsInstance(page, pywikibot.Page)
@@ -2535,7 +2535,7 @@
def test_load_from_pageids_pipe_separated(self):
"""Test loading from comma-separated pageids."""
- pageids = '|'.join(str(page._pageid) for page in self.links)
+ pageids = '|'.join(str(page.pageid) for page in self.links)
gen = self.site.load_pages_from_pageids(pageids)
for count, page in enumerate(gen, start=1):
self.assertIsInstance(page, pywikibot.Page)
--
To view, visit https://gerrit.wikimedia.org/r/306640
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I00ff3f1d18883f956b262aa9059e55017d97cf12
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Magul <tomasz.magulski(a)gmail.com>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/306082 )
Change subject: Site.search: Search all namespaces when none is given
......................................................................
Site.search: Search all namespaces when none is given
This behavior matches what had been previously described in the docstring.
Also remove the warning which was falsely triggered when the namespaces was 0.
Amend the related test accordingly.
Change-Id: I49df0a1441ec976a474a3d05448b4d09ea932a60
---
M pywikibot/site.py
M tests/site_tests.py
2 files changed, 2 insertions(+), 5 deletions(-)
Approvals:
Lokal Profil: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/site.py b/pywikibot/site.py
index 31cbebd..c317f8c 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -4498,11 +4498,8 @@
where = 'titles'
else:
where = 'title'
- if namespaces == []:
+ if not namespaces and namespaces != 0:
namespaces = [ns_id for ns_id in self.namespaces if ns_id >= 0]
- if not namespaces:
- pywikibot.warning(u"search: namespaces cannot be empty; using [0].")
- namespaces = [0]
srgen = self._generator(api.PageGenerator, type_arg="search",
gsrsearch=searchstring, gsrwhat=where,
namespaces=namespaces,
diff --git a/tests/site_tests.py b/tests/site_tests.py
index 82f1c0b..d95e41f 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -1377,7 +1377,7 @@
"""Test the site.search() method."""
mysite = self.site
try:
- se = list(mysite.search("wiki", total=100))
+ se = list(mysite.search("wiki", total=100, namespaces=0))
self.assertLessEqual(len(se), 100)
self.assertTrue(all(isinstance(hit, pywikibot.Page)
for hit in se))
--
To view, visit https://gerrit.wikimedia.org/r/306082
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I49df0a1441ec976a474a3d05448b4d09ea932a60
Gerrit-PatchSet: 5
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Lokal Profil <lokal.profil(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/328520 )
Change subject: tests/README.rst: Explain `cached` and `cacheinfo` class attributes
......................................................................
tests/README.rst: Explain `cached` and `cacheinfo` class attributes
Change-Id: I12ba77ab4b5b9a8cf7ee4ea41cd3ad6b1ad65fde
---
M tests/README.rst
1 file changed, 2 insertions(+), 0 deletions(-)
Approvals:
Magul: Looks good to me, approved
jenkins-bot: Verified
diff --git a/tests/README.rst b/tests/README.rst
index 90f88db..b34ab6b 100644
--- a/tests/README.rst
+++ b/tests/README.rst
@@ -352,6 +352,8 @@
- ``net = False`` : test class does not use a site
- ``dry = True`` : test class can use a fake site object
+- ``cached = True``: test class may aggressively cache API responses
+- ``cacheinfo = True``: report cache hits and misses on tearDown
- ``user = True`` : test class needs to login to site
- ``sysop = True`` : test class needs to login to site as a sysop
- ``write = True`` : test class needs to write to a site
--
To view, visit https://gerrit.wikimedia.org/r/328520
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I12ba77ab4b5b9a8cf7ee4ea41cd3ad6b1ad65fde
Gerrit-PatchSet: 4
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Magul <tomasz.magulski(a)gmail.com>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/322421 )
Change subject: Remove bad default from Property.__init__
......................................................................
Remove bad default from Property.__init__
Argument id should always be given because calling None.upper() will
throw an error.
Change-Id: Ia3281a956b33cdb77bd8cfa245c51804430c62a3
---
M pywikibot/page.py
1 file changed, 3 insertions(+), 1 deletion(-)
Approvals:
Dalba: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/page.py b/pywikibot/page.py
index 2549ff2..586e907 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -4247,12 +4247,14 @@
'external-id': 'string',
}
- def __init__(self, site, id=None, datatype=None):
+ def __init__(self, site, id, datatype=None):
"""
Constructor.
@param site: data repository
@type site: pywikibot.site.DataSite
+ @param id: id of the property
+ @type id: basestring
@param datatype: datatype of the property;
if not given, it will be queried via the API
@type datatype: basestring
--
To view, visit https://gerrit.wikimedia.org/r/322421
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ia3281a956b33cdb77bd8cfa245c51804430c62a3
Gerrit-PatchSet: 5
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Matěj Suchánek <matejsuchanek97(a)gmail.com>
Gerrit-Reviewer: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: Matěj Suchánek <matejsuchanek97(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/328508 )
Change subject: Add `.cache` to .gitignore
......................................................................
Add `.cache` to .gitignore
`pytest` creates a directory named`.cache` when running tests. This directory
is ignored on many python projects and is also included in github's default
gitignore file:
https://github.com/github/gitignore/blob/master/Python.gitignore#L43
Change-Id: Ic880337a12af50f0d086a0b586417472e1ef1618
---
M .gitignore
1 file changed, 1 insertion(+), 0 deletions(-)
Approvals:
Magul: Looks good to me, approved
jenkins-bot: Verified
diff --git a/.gitignore b/.gitignore
index 69fba31..eb433f7 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,6 +12,7 @@
*apicache*
*pycache*
/cache/*
+.cache
.idea
pywikibot.egg-info/
pywikibot/families/
--
To view, visit https://gerrit.wikimedia.org/r/328508
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ic880337a12af50f0d086a0b586417472e1ef1618
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Magul <tomasz.magulski(a)gmail.com>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/328499 )
Change subject: api_tests.py: Skip test_valid_lagpattern if there is no replication
......................................................................
api_tests.py: Skip test_valid_lagpattern if there is no replication
site.siteinfo(): Issue a more clear deprecation warning by explicitly stating
that siteinfo should now be used as a dictionary instead.
Bug: T153838
Change-Id: I0e53be3c8cbbf6a533ddbb0b38855fcff8cf5e2f
---
M pywikibot/site.py
M tests/api_tests.py
2 files changed, 8 insertions(+), 1 deletion(-)
Approvals:
Magul: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/site.py b/pywikibot/site.py
index a1346ba..b3054fe 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -1730,7 +1730,9 @@
def __call__(self, key='general', force=False, dump=False):
"""DEPRECATED: Return the entry for key or dump the complete cache."""
- issue_deprecation_warning('Calling siteinfo', 'itself', 2)
+ issue_deprecation_warning(
+ 'Calling siteinfo', 'itself as a dictionary', 2
+ )
if not dump:
return self.get(key, expiry=0 if force else False)
else:
diff --git a/tests/api_tests.py b/tests/api_tests.py
index 9da8d8e..008b85e 100644
--- a/tests/api_tests.py
+++ b/tests/api_tests.py
@@ -1047,6 +1047,11 @@
def test_valid_lagpattern(self):
"""Test whether api.lagpattern is valid."""
mysite = self.get_site()
+ if mysite.siteinfo['dbrepllag'][0]['lag'] == -1:
+ raise unittest.SkipTest(
+ '{0} is not running on a replicated database cluster.'
+ .format(mysite)
+ )
mythrottle = DummyThrottle(mysite)
mysite._throttle = mythrottle
params = {'action': 'query',
--
To view, visit https://gerrit.wikimedia.org/r/328499
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I0e53be3c8cbbf6a533ddbb0b38855fcff8cf5e2f
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Magul <tomasz.magulski(a)gmail.com>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/323396 )
Change subject: Recover from an allowed warning in the first chunk of an upload
......................................................................
Recover from an allowed warning in the first chunk of an upload
Bug: T151562
Change-Id: If9e946138f50100dfb752f113b87344eea366116
---
M pywikibot/site.py
1 file changed, 14 insertions(+), 0 deletions(-)
Approvals:
Dalba: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/site.py b/pywikibot/site.py
index f4267ca..9084daa 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -6091,10 +6091,24 @@
_file_key = data['filekey']
if 'warnings' in data and not ignore_all_warnings:
if callable(ignore_warnings):
+ restart = False
if 'offset' not in data:
+ # This is a result of a warning in the
+ # first chunk. The chunk is not actually
+ # stashed so upload must be restarted if
+ # the warning is allowed.
+ # T112416 and T112405#1637544
+ restart = True
data['offset'] = True
if ignore_warnings(create_warnings_list(data)):
# Future warnings of this run can be ignored
+ if restart:
+ return self.upload(
+ filepage, source_filename,
+ source_url, comment, text, watch,
+ True, chunk_size, None, 0,
+ report_success=False)
+
ignore_warnings = True
ignore_all_warnings = True
offset = data['offset']
--
To view, visit https://gerrit.wikimedia.org/r/323396
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: If9e946138f50100dfb752f113b87344eea366116
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Lokal Profil <lokal.profil(a)gmail.com>
Gerrit-Reviewer: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Lokal Profil <lokal.profil(a)gmail.com>
Gerrit-Reviewer: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/321903 )
Change subject: Refactor ID validation
......................................................................
Refactor ID validation
Created new classmethod to check whether given string can be an id of
the entity type. This check is triggered whenever a new entity is
constructed.
Change-Id: I4fe06f8d8f36100c2b183cf130e2e9e7113218b9
---
M pywikibot/page.py
M tests/wikibase_tests.py
2 files changed, 32 insertions(+), 14 deletions(-)
Approvals:
Magul: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/page.py b/pywikibot/page.py
index a105dac..797d4b2 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -3361,7 +3361,14 @@
# .site forces a parse of the Link title to determine site
self.repo = self.site
+ # Link.__init__, called from Page.__init__, has cleaned the title
+ # stripping whitespace and uppercasing the first letter according
+ # to the namespace case=first-letter.
self.id = self._link.title
+ if not self.is_valid_id(self.id):
+ raise pywikibot.InvalidTitle(
+ "'%s' is not a valid %s page title"
+ % (self.id, self.entity_type))
def _defined_by(self, singular=False):
"""
@@ -3648,6 +3655,21 @@
return self.id
+ @classmethod
+ def is_valid_id(cls, entity_id):
+ """
+ Whether the string can be a valid id of the entity type.
+
+ @param entity_id: The ID to test.
+ @type entity_id: basestring
+
+ @rtype: bool
+ """
+ if not hasattr(cls, 'title_pattern'):
+ return True
+
+ return bool(re.match(cls.title_pattern, entity_id))
+
@property
def latest_revision_id(self):
"""
@@ -3812,6 +3834,7 @@
"""
entity_type = 'item'
+ title_pattern = r'^(Q[1-9]\d*|-1)$'
def __init__(self, site, title=None, ns=None):
"""
@@ -3834,17 +3857,11 @@
assert self.id == '-1'
return
+ # we don't want empty titles
+ if not title:
+ raise pywikibot.InvalidTitle("Item's title cannot be empty")
+
super(ItemPage, self).__init__(site, title, ns=ns)
-
- # Link.__init__, called from Page.__init__, has cleaned the title
- # stripping whitespace and uppercasing the first letter according
- # to the namespace case=first-letter.
-
- # Validate the title is 'Q' and a positive integer.
- if not re.match(r'^Q[1-9]\d*$', self._link.title):
- raise pywikibot.InvalidTitle(
- u"'%s' is not a valid item page title"
- % self._link.title)
assert self.id == self._link.title
@@ -4233,6 +4250,7 @@
"""
entity_type = 'property'
+ title_pattern = r'^P[1-9]\d*$'
def __init__(self, source, title=u""):
"""
@@ -4243,11 +4261,11 @@
@param title: page name of property, like "P##"
@type title: str
"""
+ if not title:
+ raise pywikibot.InvalidTitle("Property's title cannot be empty")
+
WikibasePage.__init__(self, source, title,
ns=source.property_namespace)
- if not title or not self.id.startswith('P'):
- raise pywikibot.InvalidTitle(
- u"'%s' is not an property page title" % title)
Property.__init__(self, source, self.id)
def get(self, force=False, *args, **kwargs):
diff --git a/tests/wikibase_tests.py b/tests/wikibase_tests.py
index 8a18771..6d51472 100644
--- a/tests/wikibase_tests.py
+++ b/tests/wikibase_tests.py
@@ -723,7 +723,7 @@
def test_property_empty_property(self):
"""Test creating a PropertyPage without a title."""
wikidata = self.get_repo()
- self.assertRaises(pywikibot.Error, PropertyPage, wikidata)
+ self.assertRaises(pywikibot.InvalidTitle, PropertyPage, wikidata)
def test_globe_coordinate(self):
"""Test a coordinate PropertyPage has the correct type."""
--
To view, visit https://gerrit.wikimedia.org/r/321903
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I4fe06f8d8f36100c2b183cf130e2e9e7113218b9
Gerrit-PatchSet: 9
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Matěj Suchánek <matejsuchanek97(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <Ladsgroup(a)gmail.com>
Gerrit-Reviewer: Magul <tomasz.magulski(a)gmail.com>
Gerrit-Reviewer: Matěj Suchánek <matejsuchanek97(a)gmail.com>
Gerrit-Reviewer: Multichill <maarten(a)mdammers.nl>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/326992 )
Change subject: Alias "mul" for "-" added
......................................................................
Alias "mul" for "-" added
Bug: T114574
Change-Id: I125300cb3b8800d2795b6d0511e8941ba9ad541c
---
M pywikibot/families/wikisource_family.py
M pywikibot/family.py
2 files changed, 6 insertions(+), 4 deletions(-)
Approvals:
John Vandenberg: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/families/wikisource_family.py b/pywikibot/families/wikisource_family.py
index 661ca2b..8d0ec61 100644
--- a/pywikibot/families/wikisource_family.py
+++ b/pywikibot/families/wikisource_family.py
@@ -39,10 +39,10 @@
super(Family, self).__init__()
- # FIXME: '-' is invalid at the beginning of a hostname, and
- # '-' is not a valid subdomain.
- self.langs['-'] = self.domain
- self.languages_by_size.append('-')
+ # All requests to 'mul.wikisource.org/*' are redirected to
+ # the main page, so using 'wikisource.org'
+ self.langs['mul'] = self.domain
+ self.languages_by_size.append('mul')
# Global bot allowed languages on
# https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implement…
diff --git a/pywikibot/family.py b/pywikibot/family.py
index e1157cb..145d984 100644
--- a/pywikibot/family.py
+++ b/pywikibot/family.py
@@ -1557,6 +1557,8 @@
# Renamed; see T11823
'be-x-old': 'be-tarask',
+
+ '-': 'mul', # T114574
}
# Not open for edits; stewards can still edit.
--
To view, visit https://gerrit.wikimedia.org/r/326992
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I125300cb3b8800d2795b6d0511e8941ba9ad541c
Gerrit-PatchSet: 4
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Borisfba <borya236(a)gmail.com>
Gerrit-Reviewer: Aklapper <aklapper(a)wikimedia.org>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/206790 )
Change subject: Use user-based configuration settings in script_wui.py
......................................................................
Use user-based configuration settings in script_wui.py
And also a nice error message when it doesn't exist.
Bug: T70797
Change-Id: I7486e66df0c05b490f1e8cc95c28b74e370fe90a
---
M scripts/script_wui.py
1 file changed, 19 insertions(+), 12 deletions(-)
Approvals:
Ladsgroup: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/script_wui.py b/scripts/script_wui.py
index 616e35f..ba75271 100755
--- a/scripts/script_wui.py
+++ b/scripts/script_wui.py
@@ -73,7 +73,6 @@
import datetime
import gc
import logging
-import os
import re
import resource
import sys
@@ -107,14 +106,11 @@
bot_config = {
- 'BotName': pywikibot.config.usernames[pywikibot.config.family][pywikibot.config.mylang],
+ 'BotName': "{username}",
- # protected !!! ('CSS' or other semi-protected page is essential here)
- 'ConfCSSshell': 'User:DrTrigon/DrTrigonBot/script_wui-shell.css',
- 'ConfCSScrontab': u'User:DrTrigon/DrTrigonBot/script_wui-crontab.css',
-
- # (may be protected but not that important... 'CSS' is not needed here !!!)
- 'ConfCSSoutput': u'User:DrTrigonBot/Simulation',
+ 'ConfCSSshell': u'User:{username}/script_wui-shell.css',
+ 'ConfCSScrontab': u'User:{username}/script_wui-crontab.css',
+ 'ConfCSSoutput': u'User:{username}/Simulation',
'CRONMaxDelay': 5 * 60.0, # check all ~5 minutes
@@ -157,15 +153,20 @@
}
pywikibot.output(u'** Pre-loading all relevant page contents')
for item in self.refs:
- # security; first check if page is protected, reject any data if not
- if os.path.splitext(self.refs[item].title().lower())[1] not in ['.css', '.js']:
+ # First check if page is protected, reject any data if not
+ parts = self.refs[item].title().lower().rsplit('.')
+ if len(parts) == 1 or parts[1] not in ['.css', '.js']:
raise ValueError(u'%s config %s = %s is not a secure page; '
u'it should be a css or js userpage which are '
u'automatically semi-protected.'
% (self.__class__.__name__, item,
self.refs[item]))
- self.refs[item].get(force=True) # load all page contents
-
+ try:
+ self.refs[item].get(force=True) # load all page contents
+ except pywikibot.NoPage:
+ pywikibot.error("The configuation page %s doesn't exists"
+ % self.refs[item].title(asLink=True))
+ raise
# init background timer
pywikibot.output(u'** Starting crontab background timer thread')
self.on_timer()
@@ -326,6 +327,12 @@
site = pywikibot.Site()
site.login()
chan = '#' + site.code + '.' + site.family.name
+
+ bot_user_name = pywikibot.config.usernames[pywikibot.config.family][pywikibot.config.mylang]
+ for key, value in bot_config.items():
+ if hasattr(value, 'format'):
+ bot_config[key] = value.format(username=bot_user_name)
+
bot = ScriptWUIBot(site, chan, site.user() + "_WUI", "irc.wikimedia.org")
try:
bot.start()
--
To view, visit https://gerrit.wikimedia.org/r/206790
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I7486e66df0c05b490f1e8cc95c28b74e370fe90a
Gerrit-PatchSet: 8
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Ladsgroup <Ladsgroup(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <Ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Pppery <mapreader(a)olum.org>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/327551 )
Change subject: Fix regex for error message in siteinfo request
......................................................................
Fix regex for error message in siteinfo request
Allow new format of error message introduced in https://phabricator.wikimedia.org/rMW4e6810e4a
Bug: T153325
Change-Id: I1e2e7dbd711f385fd6ee891f08fa016ddc8fb38c
---
M pywikibot/site.py
1 file changed, 2 insertions(+), 2 deletions(-)
Approvals:
Dalba: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/site.py b/pywikibot/site.py
index 92245fd..12a40f4 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -1402,8 +1402,8 @@
All values of the siteinfo property 'general' are directly available.
"""
- WARNING_REGEX = re.compile(u"^Unrecognized values? for parameter "
- u"'siprop': ([^,]+(?:, [^,]+)*)$")
+ WARNING_REGEX = re.compile('^Unrecognized values? for parameter '
+ '["\']siprop["\']: (.+?)\.?$')
# Until we get formatversion=2, we have to convert empty-string properties
# into booleans so they are easier to use.
--
To view, visit https://gerrit.wikimedia.org/r/327551
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I1e2e7dbd711f385fd6ee891f08fa016ddc8fb38c
Gerrit-PatchSet: 4
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Magul <tomasz.magulski(a)gmail.com>
Gerrit-Reviewer: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Lokal Profil <lokal.profil(a)gmail.com>
Gerrit-Reviewer: Magul <tomasz.magulski(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>