jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/655199 )
Change subject: [IMPR] Add enabled_options, disabled_options to GeneratorFactory
......................................................................
[IMPR] Add enabled_options, disabled_options to GeneratorFactory
GeneratorFactory provides several page generators and page filters.
Options may be enabled or disabled with this patch.
Bug: T271320
Change-Id: Ib6a4b7a210da2790a5400362c933bb8eed9f0c99
---
M pywikibot/pagegenerators.py
1 file changed, 39 insertions(+), 4 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index 874127d..c152abb 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -37,7 +37,7 @@
import pywikibot
from pywikibot import date, config, i18n, xmlreader
-from pywikibot.backports import List
+from pywikibot.backports import Iterable, List
from pywikibot.bot import ShowingListOption
from pywikibot.comms import http
from pywikibot.data import api
@@ -435,14 +435,21 @@
that are used by many scripts and that determine which pages to work on.
"""
- def __init__(self, site=None, positional_arg_name: Optional[str] = None):
+ def __init__(self, site=None,
+ positional_arg_name: Optional[str] = None,
+ enabled_options: Optional[Iterable[str]] = None,
+ disabled_options: Optional[Iterable[str]] = None):
"""
Initializer.
- @param site: Site for generator results.
+ @param site: Site for generator results
@type site: L{pywikibot.site.BaseSite}
@param positional_arg_name: generator to use for positional args,
which do not begin with a hyphen
+ @param enabled_options: only enable options given by this Iterable.
+ This is priorized over disabled_options
+ @param disabled_options: disable these given options and let them
+ be handled by scripts options handler
"""
self.gens = []
self._namespaces = []
@@ -460,6 +467,27 @@
self._positional_arg_name = positional_arg_name
self._sparql = None
self.nopreload = False
+ self._validate_options(enabled_options, disabled_options)
+
+ def _validate_options(self, enable, disable):
+ """Validate option restrictions."""
+ msg = '{!r} is not a valid pagegenerators option to be '
+ enable = enable or []
+ disable = disable or []
+ self.enabled_options = set(enable)
+ self.disabled_options = set(disable)
+ for opt in enable:
+ if not hasattr(self, '_handle_' + opt):
+ pywikibot.warning((msg + 'enabled').format(opt))
+ self.enabled_options.remove(opt)
+ for opt in disable:
+ if not hasattr(self, '_handle_' + opt):
+ pywikibot.warning((msg + 'disabled').format(opt))
+ self.disabled_options.remove(opt)
+ if self.enabled_options and self.disabled_options:
+ pywikibot.warning('Ignoring disabled option because enabled '
+ 'options are set.')
+ self.disabled_options = []
@property
def site(self):
@@ -1200,7 +1228,14 @@
if value == '':
value = None
- handler = getattr(self, '_handle_' + arg[1:], None)
+ opt = arg[1:]
+ if opt in self.disabled_options:
+ return False
+
+ if self.enabled_options and opt not in self.enabled_options:
+ return False
+
+ handler = getattr(self, '_handle_' + opt, None)
if not handler:
return False
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/655199
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ib6a4b7a210da2790a5400362c933bb8eed9f0c99
Gerrit-Change-Number: 655199
Gerrit-PatchSet: 5
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-CC: DannyS712 <DannyS712.enwiki(a)gmail.com>
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/658559 )
Change subject: [bugfix] BaseSite does not have an _interwikimap attribute
......................................................................
[bugfix] BaseSite does not have an _interwikimap attribute
Therefore move interwiki() interwiki_prefix() and local_interwiki()
methods from BaseSite to APISite
Change-Id: If7b9450c287ae7b108dd9b9d18fce1546c0c0afd
---
M pywikibot/site/__init__.py
M pywikibot/site/_basesite.py
2 files changed, 47 insertions(+), 47 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/site/__init__.py b/pywikibot/site/__init__.py
index 941fbd5..98b12a7 100644
--- a/pywikibot/site/__init__.py
+++ b/pywikibot/site/__init__.py
@@ -128,6 +128,53 @@
self._interwikimap = _InterwikiMap(self)
self.tokens = TokenWallet(self)
+ def interwiki(self, prefix):
+ """
+ Return the site for a corresponding interwiki prefix.
+
+ @raises pywikibot.exceptions.SiteDefinitionError: if the url given in
+ the interwiki table doesn't match any of the existing families.
+ @raises KeyError: if the prefix is not an interwiki prefix.
+ """
+ return self._interwikimap[prefix].site
+
+ def interwiki_prefix(self, site):
+ """
+ Return the interwiki prefixes going to that site.
+
+ The interwiki prefixes are ordered first by length (shortest first)
+ and then alphabetically. L{interwiki(prefix)} is not guaranteed to
+ equal C{site} (i.e. the parameter passed to this function).
+
+ @param site: The targeted site, which might be it's own.
+ @type site: L{BaseSite}
+ @return: The interwiki prefixes
+ @rtype: list (guaranteed to be not empty)
+ @raises KeyError: if there is no interwiki prefix for that site.
+ """
+ assert site is not None, 'Site must not be None'
+ prefixes = set()
+ for url in site._interwiki_urls():
+ prefixes.update(self._interwikimap.get_by_url(url))
+ if not prefixes:
+ raise KeyError(
+ "There is no interwiki prefix to '{0}'".format(site))
+ return sorted(prefixes, key=lambda p: (len(p), p))
+
+ def local_interwiki(self, prefix):
+ """
+ Return whether the interwiki prefix is local.
+
+ A local interwiki prefix is handled by the target site like a normal
+ link. So if that link also contains an interwiki link it does follow
+ it as long as it's a local link.
+
+ @raises pywikibot.exceptions.SiteDefinitionError: if the url given in
+ the interwiki table doesn't match any of the existing families.
+ @raises KeyError: if the prefix is not an interwiki prefix.
+ """
+ return self._interwikimap[prefix].local
+
@classmethod
def fromDBName(cls, dbname, site=None):
"""
diff --git a/pywikibot/site/_basesite.py b/pywikibot/site/_basesite.py
index 4cf3f43..7e88701 100644
--- a/pywikibot/site/_basesite.py
+++ b/pywikibot/site/_basesite.py
@@ -243,53 +243,6 @@
yield base_path + '?title='
yield self.article_path
- def interwiki(self, prefix):
- """
- Return the site for a corresponding interwiki prefix.
-
- @raises pywikibot.exceptions.SiteDefinitionError: if the url given in
- the interwiki table doesn't match any of the existing families.
- @raises KeyError: if the prefix is not an interwiki prefix.
- """
- return self._interwikimap[prefix].site
-
- def interwiki_prefix(self, site):
- """
- Return the interwiki prefixes going to that site.
-
- The interwiki prefixes are ordered first by length (shortest first)
- and then alphabetically. L{interwiki(prefix)} is not guaranteed to
- equal C{site} (i.e. the parameter passed to this function).
-
- @param site: The targeted site, which might be it's own.
- @type site: L{BaseSite}
- @return: The interwiki prefixes
- @rtype: list (guaranteed to be not empty)
- @raises KeyError: if there is no interwiki prefix for that site.
- """
- assert site is not None, 'Site must not be None'
- prefixes = set()
- for url in site._interwiki_urls():
- prefixes.update(self._interwikimap.get_by_url(url))
- if not prefixes:
- raise KeyError(
- "There is no interwiki prefix to '{0}'".format(site))
- return sorted(prefixes, key=lambda p: (len(p), p))
-
- def local_interwiki(self, prefix):
- """
- Return whether the interwiki prefix is local.
-
- A local interwiki prefix is handled by the target site like a normal
- link. So if that link also contains an interwiki link it does follow
- it as long as it's a local link.
-
- @raises pywikibot.exceptions.SiteDefinitionError: if the url given in
- the interwiki table doesn't match any of the existing families.
- @raises KeyError: if the prefix is not an interwiki prefix.
- """
- return self._interwikimap[prefix].local
-
@deprecated('APISite.namespaces.lookup_name', since='20150703',
future_warning=True)
def ns_index(self, namespace): # pragma: no cover
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/658559
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: If7b9450c287ae7b108dd9b9d18fce1546c0c0afd
Gerrit-Change-Number: 658559
Gerrit-PatchSet: 1
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: JJMC89 <JJMC89.Wikimedia(a)gmail.com>
Gerrit-Reviewer: Matěj Suchánek <matejsuchanek97(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/656997 )
Change subject: [maintenance] Add a preload_sites.py script to preload site informations
......................................................................
[maintenance] Add a preload_sites.py script to preload site informations
Instantiating a BaseLink causes site and user informations to be loaded.
This is especially true for Page, Link, SiteLink objects and other
subclasses. The informations are cached for 30 days by default. Loading
a bulk of these site/user information for a wikibase object can increase
the wait time a lot to get all these be loaded during normal bot
operation and bot operators does not expect such a delay.
preload_sites.py preloads sites/user informations at once and decreases
normal bot operations a lot if these informations aren't cached already.
To force preloading even the cache date is not expired the global config
variable -API_config_expiry:0 can be set.
preload_sites.py decrease loading time by 65% due to parallel workers.
Bug: T226157
Change-Id: I91f6fe7cd3257c1ec8496c74892ff006b9f86449
---
M docs/scripts/scripts.maintenance.rst
M scripts/README.rst
A scripts/maintenance/preload_sites.py
3 files changed, 90 insertions(+), 1 deletion(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/docs/scripts/scripts.maintenance.rst b/docs/scripts/scripts.maintenance.rst
index c7e5bf9..c6441df 100644
--- a/docs/scripts/scripts.maintenance.rst
+++ b/docs/scripts/scripts.maintenance.rst
@@ -32,6 +32,11 @@
.. automodule:: scripts.maintenance.make_i18n_dict
+scripts.maintenance.preload_sites script
+-----------------------------------------
+
+.. automodule:: scripts.maintenance.preload_sites
+
scripts.maintenance.sorting\_order script
-----------------------------------------
@@ -42,7 +47,6 @@
.. automodule:: scripts.maintenance.update_linktrails
-
scripts.maintenance.wikimedia\_sites script
-------------------------------------------
diff --git a/scripts/README.rst b/scripts/README.rst
index e2feed6..8a2ec98 100644
--- a/scripts/README.rst
+++ b/scripts/README.rst
@@ -285,6 +285,8 @@
+------------------------+---------------------------------------------------------+
| make_i18n_dict.py | Generate an i18n file from a given script. |
+------------------------+---------------------------------------------------------+
+ | preload_sites.py | Preload and cache site information for each WM family. |
+ +------------------------+---------------------------------------------------------+
| sorting_order.py | Updates interwiki sorting order in family.py file. |
+------------------------+---------------------------------------------------------+
| update_linktrails.py | Script that updates the linktrails in family.py file. |
diff --git a/scripts/maintenance/preload_sites.py b/scripts/maintenance/preload_sites.py
new file mode 100644
index 0000000..5eb4f2a
--- /dev/null
+++ b/scripts/maintenance/preload_sites.py
@@ -0,0 +1,83 @@
+#!/usr/bin/python
+"""Script that preloads site and user info for all sites of given family.
+
+The following parameters are supported:
+
+-worker:<num> The number of parallel tasks to be run. Default is the
+ number of precessors on the machine
+
+Usage:
+
+ python pwb.py preload_sites [{<family>}] [-worker{<num>}]
+
+To force preloading, change the global expiry value to 0:
+
+ python pwb.py -API_config_expiry:0 preload_sites [{<family>}]
+
+"""
+#
+# (C) Pywikibot team, 2021
+#
+# Distributed under the terms of the MIT license.
+#
+from concurrent.futures import ThreadPoolExecutor, wait
+from datetime import datetime
+
+import pywikibot
+
+from pywikibot.family import Family
+
+# supported families by this script
+families_list = [
+ 'wikibooks',
+ 'wikinews',
+ 'wikipedia',
+ 'wikiquote',
+ 'wikisource',
+ 'wikiversity',
+ 'wikivoyage',
+ 'wiktionary',
+]
+
+exceptions = {
+}
+
+
+def preload_family(family):
+ """Preload all sites of a single family file."""
+ msg = 'Preloading sites of {} family{}'
+ pywikibot.output(msg.format(family, '...'))
+
+ codes = Family.load(family).languages_by_size
+ for code in exceptions.get(family, []):
+ if code in codes:
+ codes.remove(code)
+ obsolete = Family.load(family).obsolete
+
+ for code in codes:
+ if code not in obsolete:
+ site = pywikibot.Site(code, family)
+ pywikibot.Page(site, 'Main page') # title does not care
+
+ pywikibot.output(msg.format(family, ' completed.'))
+
+
+def preload_families(families, worker):
+ """Preload all sites of all given family files."""
+ start = datetime.now()
+ with ThreadPoolExecutor(worker) as executor:
+ futures = {executor.submit(preload_family, family):
+ family for family in families}
+ wait(futures)
+ pywikibot.output('Loading time used: {}'.format(datetime.now() - start))
+
+
+if __name__ == '__main__':
+ fam = set()
+ worker = None
+ for arg in pywikibot.handle_args():
+ if arg in families_list:
+ fam.add(arg)
+ elif arg.startswith('-worker'):
+ worker = int(arg.partition(':')[2])
+ preload_families(fam or families_list, worker)
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/656997
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I91f6fe7cd3257c1ec8496c74892ff006b9f86449
Gerrit-Change-Number: 656997
Gerrit-PatchSet: 4
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Bugreporter <bugreporter1(a)sina.com>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: JJMC89 <JJMC89.Wikimedia(a)gmail.com>
Gerrit-Reviewer: Matěj Suchánek <matejsuchanek97(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/658058 )
Change subject: [IMPR] return requests.Response with http.request
......................................................................
[IMPR] return requests.Response with http.request
- return requests.Response with http.request; this is a breaking change
but usual not used directly
- modify api.Request._http_request to return the requests.Response object
- modify api.Request._json_loads to use Resonse object and its json()
method directly. Remove old compat code and ignore site.encoding()
- update getImagePageHtml
- remove api_tests.TestAPIMWException; fetch test was never used and
was not functional. Now remove the requests tests too including
utils.DummyHttp and PatchedHttp because both does not work with
requests.Response object and it i to heavy to fix.
Bug: T265206
Change-Id: I4c9e18935cbec8ef3a6f7d974498b0fb140de31b
---
M pywikibot/comms/http.py
M pywikibot/data/api.py
M pywikibot/page/__init__.py
M tests/api_tests.py
M tests/utils.py
5 files changed, 35 insertions(+), 263 deletions(-)
Approvals:
JJMC89: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/comms/http.py b/pywikibot/comms/http.py
index 1c7f6e2..3e2748f 100644
--- a/pywikibot/comms/http.py
+++ b/pywikibot/comms/http.py
@@ -11,7 +11,7 @@
- Basic HTTP error handling
"""
#
-# (C) Pywikibot team, 2007-2020
+# (C) Pywikibot team, 2007-2021
#
# Distributed under the terms of the MIT license.
#
@@ -215,25 +215,26 @@
@deprecated_args(body='data')
-def request(site, uri: Optional[str] = None, headers=None, **kwargs) -> str:
+def request(site,
+ uri: Optional[str] = None,
+ headers: Optional[dict] = None,
+ **kwargs) -> requests.Response:
"""
Request to Site with default error handling and response decoding.
See L{requests.Session.request} for additional parameters.
- If the site argument is provided, the uri is a relative uri from
- and including the document root '/'.
-
- If the site argument is None, the uri must be absolute.
+ The optional uri is a relative uri from site base uri including the
+ document root '/'.
@param site: The Site to connect to
- @type site: L{pywikibot.site.BaseSite}
+ @type: site: pywikibot.site.BaseSite
@param uri: the URI to retrieve
@keyword charset: Either a valid charset (usable for str.decode()) or None
to automatically chose the charset from the returned header (defaults
to latin-1)
@type charset: CodecInfo, str, None
- @return: The received data
+ @return: The received data Response
"""
kwargs.setdefault('verify', site.verify_SSL_certificate())
old_validation = kwargs.pop('disable_ssl_certificate_validation', None)
@@ -254,7 +255,7 @@
baseuri = site.base_url(uri)
r = fetch(baseuri, headers=headers, **kwargs)
site.throttle.retry_after = int(r.headers.get('retry-after', 0))
- return r.text
+ return r
def get_authentication(uri: str) -> Optional[Tuple[str, str]]:
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index bde7b4f..2106b0a 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -7,7 +7,6 @@
import datetime
import hashlib
import inspect
-import json
import os
import pickle
import pprint
@@ -1534,16 +1533,17 @@
_logger)
return use_get, uri, body, headers
- def _http_request(self, use_get, uri, body, headers, paramstring) -> tuple:
+ def _http_request(self, use_get: bool, uri: str, data, headers,
+ paramstring) -> tuple:
"""Get or post a http request with exception handling.
- @return: a tuple containing data from request and use_get value
+ @return: a tuple containing requests.Response object from
+ http.request and use_get value
"""
try:
- data = http.request(
- self.site, uri=uri,
- method='GET' if use_get else 'POST',
- data=body, headers=headers)
+ response = http.request(self.site, uri=uri,
+ method='GET' if use_get else 'POST',
+ data=data, headers=headers)
except Server504Error:
pywikibot.log('Caught HTTP 504 error; retrying')
except Server414Error:
@@ -1564,32 +1564,28 @@
pywikibot.error(traceback.format_exc())
pywikibot.log('{}, {}'.format(uri, paramstring))
else:
- return data, use_get
+ return response, use_get
self.wait()
return None, use_get
- def _json_loads(self, data: Union[str, bytes]) -> Optional[dict]:
- """Read source text and return a dict.
+ def _json_loads(self, response) -> Optional[dict]:
+ """Return a dict from requests.Response.
- @param data: raw data string
+ @param response: a requests.Response object
+ @type response: requests.Response
@return: a data dict
@raises APIError: unknown action found
@raises APIError: unknown query result type
"""
- if not isinstance(data, str):
- data = data.decode(self.site.encoding())
- pywikibot.debug(('API response received from {}:\n'
- .format(self.site)) + data, _logger)
- if data.startswith('unknown_action'):
- raise APIError(data[:14], data[16:])
try:
- result = json.loads(data)
+ result = response.json()
except ValueError:
# if the result isn't valid JSON, there must be a server
# problem. Wait a few seconds and try again
pywikibot.warning(
'Non-JSON response received from server {}; '
- 'the server may be down.'.format(self.site))
+ 'the server may be down.\nStatus code:{}'
+ .format(self.site, response.status_code))
# there might also be an overflow, so try a smaller limit
for param in self._params:
if param.endswith('limit'):
@@ -1600,11 +1596,6 @@
pywikibot.output('Set {} = {}'
.format(param, self[param]))
else:
- if result and not isinstance(result, dict):
- raise APIError('Unknown',
- 'Unable to process query response of type {}.'
- .format(type(result)),
- data=result)
return result or {}
self.wait()
return None
@@ -1804,12 +1795,12 @@
use_get, uri, body, headers = self._get_request_params(use_get,
paramstring)
- rawdata, use_get = self._http_request(use_get, uri, body, headers,
- paramstring)
- if rawdata is None:
+ response, use_get = self._http_request(use_get, uri, body, headers,
+ paramstring)
+ if response is None:
continue
- result = self._json_loads(rawdata)
+ result = self._json_loads(response)
if result is None:
continue
diff --git a/pywikibot/page/__init__.py b/pywikibot/page/__init__.py
index a803780..7970018 100644
--- a/pywikibot/page/__init__.py
+++ b/pywikibot/page/__init__.py
@@ -2347,17 +2347,16 @@
self.site.loadimageinfo(self, history=True)
return self._file_revisions
- def getImagePageHtml(self):
- """
- Download the file page, and return the HTML, as a string.
+ def getImagePageHtml(self) -> str:
+ """Download the file page, and return the HTML, as a string.
Caches the HTML code, so that if you run this method twice on the
same FilePage object, the page will only be downloaded once.
"""
if not hasattr(self, '_imagePageHtml'):
- path = '%s/index.php?title=%s' \
- % (self.site.scriptpath(), self.title(as_url=True))
- self._imagePageHtml = http.request(self.site, path)
+ path = '{}/index.php?title={}'.format(self.site.scriptpath(),
+ self.title(as_url=True))
+ self._imagePageHtml = http.request(self.site, path).text
return self._imagePageHtml
@deprecated('get_file_url', since='20160609', future_warning=True)
diff --git a/tests/api_tests.py b/tests/api_tests.py
index 8974259..86f36a0 100644
--- a/tests/api_tests.py
+++ b/tests/api_tests.py
@@ -9,7 +9,6 @@
from collections import defaultdict
from contextlib import suppress
-from urllib.parse import unquote_to_bytes
import pywikibot.data.api as api
import pywikibot.family
@@ -27,105 +26,7 @@
DefaultDrySiteTestCase,
)
from tests import patch
-from tests.utils import FakeLoginManager, PatchedHttp
-
-
-class TestAPIMWException(DefaultSiteTestCase):
-
- """Test raising an APIMWException."""
-
- user = True
-
- data = {'error': {'code': 'internal_api_error_fake',
- 'info': 'Fake error message'},
- 'servedby': 'unittest',
- }
-
- def _dummy_request(self, *args, **kwargs):
- self.assertLength(args, 1) # one positional argument for http.request
- site = args[0]
- self.assertIsInstance(site, pywikibot.BaseSite)
- self.assertIn('data', kwargs)
- self.assertIn('uri', kwargs)
- if kwargs['data'] is None:
- # use uri and remove script path
- parameters = kwargs['uri']
- prefix = site.scriptpath() + '/api.php?'
- self.assertEqual(prefix, parameters[:len(prefix)])
- parameters = parameters[len(prefix):]
- else:
- parameters = kwargs['data']
- parameters = parameters.encode('ascii') # it should be bytes anyway
- # Extract parameter data from the body, it's ugly but allows us
- # to verify that we actually test the right request
- parameters = [p.split(b'=', 1) for p in parameters.split(b'&')]
- keys = [p[0].decode('ascii') for p in parameters]
- values = [unquote_to_bytes(p[1]) for p in parameters]
- values = [v.decode(site.encoding()) for v in values]
- values = [v.replace('+', ' ') for v in values]
- values = [set(v.split('|')) for v in values]
- parameters = dict(zip(keys, values))
-
- if 'fake' not in parameters:
- return False # do an actual request
- if self.assert_parameters:
- for param, value in self.assert_parameters.items():
- self.assertIn(param, parameters)
- if value is not None:
- if isinstance(value, str):
- value = value.split('|')
- self.assertLessEqual(set(value), parameters[param])
- return self.data
-
- def setUp(self):
- """Mock warning and error."""
- super().setUp()
- self.warning_patcher = patch.object(pywikibot, 'warning')
- self.error_patcher = patch.object(pywikibot, 'error')
- self.warning_patcher.start()
- self.error_patcher.start()
-
- def tearDown(self):
- """Check warning and error calls."""
- self.warning_patcher.stop()
- self.error_patcher.stop()
- super().tearDown()
-
- def _test_assert_called_with(self, req):
- with self.assertRaises(api.APIMWException):
- req.submit()
- pywikibot.warning.assert_called_with(
- 'API error internal_api_error_fake: Fake error message')
- pywikibot.error.assert_called_with(
- 'Detected MediaWiki API exception internal_api_error_fake: '
- 'Fake error message\n[servedby: unittest]; raising')
-
- def test_API_error(self):
- """Test a static request."""
- req = api.Request(site=self.site, parameters={'action': 'query',
- 'fake': True})
- with PatchedHttp(api, self.data):
- self._test_assert_called_with(req)
-
- def test_API_error_encoding_ASCII(self):
- """Test a Page instance as parameter using ASCII chars."""
- page = pywikibot.page.Page(self.site, 'ASCII')
- req = api.Request(site=self.site, parameters={'action': 'query',
- 'fake': True,
- 'titles': page})
- self.assert_parameters = {'fake': ''}
- with PatchedHttp(api, self._dummy_request):
- self._test_assert_called_with(req)
-
- def test_API_error_encoding_Unicode(self):
- """Test a Page instance as parameter using non-ASCII chars."""
- page = pywikibot.page.Page(self.site, 'Ümlä üt')
- req = api.Request(site=self.site, parameters={'action': 'query',
- 'fake': True,
- 'titles': page})
- self.assert_parameters = {'fake': ''}
- with PatchedHttp(api, self._dummy_request):
- self._test_assert_called_with(req)
+from tests.utils import FakeLoginManager
class TestApiFunctions(DefaultSiteTestCase):
diff --git a/tests/utils.py b/tests/utils.py
index 291f11d..5025dbd 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -5,18 +5,14 @@
# Distributed under the terms of the MIT license.
#
import inspect
-import json
import os
import sys
import warnings
-from collections.abc import Mapping
from contextlib import contextmanager
from subprocess import PIPE, Popen, TimeoutExpired
from types import ModuleType
-from requests import Response
-
import pywikibot
from pywikibot import config
@@ -449,122 +445,6 @@
pass
-class DummyHttp(object):
-
- """A class simulating the http module."""
-
- def __init__(self, wrapper):
- """Initializer with the given PatchedHttp instance."""
- self.__wrapper = wrapper
-
- def request(self, *args, **kwargs):
- """The patched request method."""
- result = self.__wrapper.before_request(*args, **kwargs)
- if result is False:
- result = self.__wrapper._old_http.request(*args, **kwargs)
- elif isinstance(result, Mapping):
- result = json.dumps(result)
- elif not isinstance(result, str):
- raise ValueError('The result is not a valid type '
- '"{0}"'.format(type(result)))
- response = self.__wrapper.after_request(result, *args, **kwargs)
- if response is None:
- response = result
- return response
-
- def fetch(self, *args, **kwargs):
- """The patched fetch method."""
- result = self.__wrapper.before_fetch(*args, **kwargs)
- if result is False:
- result = self.__wrapper._old_http.fetch(*args, **kwargs)
- elif not isinstance(result, Response):
- raise ValueError('The result is not a valid type "{}"'
- .format(type(result)))
- response = self.__wrapper.after_fetch(result, *args, **kwargs)
- if response is None:
- response = result
- return response
-
-
-class PatchedHttp(object):
-
- """
- A ContextWrapper to handle any data going through the http module.
-
- This patches the C{http} import in the given module to a class simulating
- C{request} and C{fetch}. It has a C{data} attribute which is either a
- static value which the requests will return or it's a callable returning
- the data. If it's a callable it'll be called with the same parameters as
- the original function in the L{http} module. For fine grained control it's
- possible to override/monkey patch the C{before_request} and C{before_fetch}
- methods. By default they just return C{data} directory or call it if it's
- callable.
-
- Even though L{http.request} is calling L{http.fetch}, it won't call the
- patched method.
-
- The data returned for C{request} may either be C{False}, a C{str} or a
- C{Mapping} which is converted into a json string. The data returned for
- C{fetch} can only be C{False} or a L{requests.Response}. For both
- variants any other types are not allowed and if it is False it'll use the
- original method and do an actual request.
-
- Afterwards it is always calling C{after_request} or C{after_fetch} with the
- response and given arguments. That can return a different response too, but
- can also return None so that the original response is forwarded.
- """
-
- def __init__(self, module, data=None):
- """
- Initializer.
-
- @param module: The given module to patch. It must have the http module
- imported as http.
- @type module: Module
- @param data: The data returned for any request or fetch.
- @type data: callable or False (or other depending on request/fetch)
- """
- super().__init__()
- self._module = module
- self.data = data
-
- def _handle_data(self, *args, **kwargs):
- """Return the data after it may have been called."""
- if self.data is None:
- raise ValueError('No handler is defined.')
-
- if callable(self.data):
- return self.data(*args, **kwargs)
-
- return self.data
-
- def before_request(self, *args, **kwargs):
- """Return the value which should is returned by request."""
- return self._handle_data(*args, **kwargs)
-
- def before_fetch(self, *args, **kwargs):
- """Return the value which should is returned by fetch."""
- return self._handle_data(*args, **kwargs)
-
- def after_request(self, response, *args, **kwargs):
- """Handle the response after request."""
- pass
-
- def after_fetch(self, response, *args, **kwargs):
- """Handle the response after fetch."""
- pass
-
- def __enter__(self):
- """Patch the http module property."""
- self._old_http = self._module.http
- self._module.http = DummyHttp(self)
- return self
-
- def __exit__(self, exc_type, exc_value, traceback):
- """Reset the http module property."""
- self._module.http = self._old_http
-
-
def execute(command, data_in=None, timeout=None, error=None):
"""
Execute a command and capture outputs.
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/658058
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I4c9e18935cbec8ef3a6f7d974498b0fb140de31b
Gerrit-Change-Number: 658058
Gerrit-PatchSet: 3
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: JJMC89 <JJMC89.Wikimedia(a)gmail.com>
Gerrit-Reviewer: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged