jenkins-bot has submitted this change and it was merged.
Change subject: [IMPROV] tests: General patcher for http module
......................................................................
[IMPROV] tests: General patcher for http module
Instead of just having a patcher for the request method in api_tests it moves a
more general patcher into utils to also patch fetch. The new patcher also
allows to intercept calls to request and fetch before and after the request has
been done. This will allow us to get the actual response from
archive.org.
Bug: T104761
Change-Id: Icfbc14424ff61ddd31d2f346c54ccc3d11c714dc
---
M tests/api_tests.py
M tests/utils.py
M tests/weblib_tests.py
3 files changed, 137 insertions(+), 68 deletions(-)
Approvals:
John Vandenberg: Looks good to me, approved
jenkins-bot: Verified
diff --git a/tests/api_tests.py b/tests/api_tests.py
index c3aa17f..c311922 100644
--- a/tests/api_tests.py
+++ b/tests/api_tests.py
@@ -10,10 +10,7 @@
__version__ = '$Id$'
import datetime
-import json
import types
-
-from collections import Mapping
import pywikibot.data.api as api
import pywikibot.family
@@ -29,72 +26,13 @@
DefaultSiteTestCase,
DefaultDrySiteTestCase,
)
-from tests.utils import allowed_failure, FakeLoginManager
+from tests.utils import allowed_failure, FakeLoginManager, PatchedHttp
if not PY2:
from urllib.parse import unquote_to_bytes
unicode = str
else:
from urllib import unquote_plus as unquote_to_bytes
-
-
-class PatchedRequest(object):
-
- """
- A ContextWrapper allowing Request to handle specific returned data.
-
- This patches the C{http} import in the L{pywikibot.data.api} module to a
- class simulating C{request}. It has a C{data} attribute which is either a
- static value which the requests will return or it's a callable returning the
- data. If it's a callable it'll be called with the same parameters as the
- original function in the L{pywikibot.comms.http} module, but with an extra
- argument C{parameters} which contains the extracted parameters.
-
- A unicode returned will be forwarded directly and a Mapping will be first
- converted into a json string. If it is False it'll use the original request
- and do an actual request. Any other types are not allowed.
- """
-
- class FakeHttp(object):
-
- """A fake http module to have a consistent response from
request."""
-
- def __init__(self, wrapper):
- self.__wrapper = wrapper
-
- def request(self, *args, **kwargs):
- result = self.__wrapper.data
- if callable(result):
- result = result(*args, **kwargs)
- if result is False:
- return self.__wrapper._old_http.request(*args, **kwargs)
- elif isinstance(result, unicode):
- return result
- elif isinstance(result, Mapping):
- return json.dumps(result)
- else:
- raise ValueError('The result is not a valid type '
- '"{0}"'.format(type(result)))
-
- def __init__(self, data=None):
- """
- Initialize the context wrapper.
-
- @param data: The data for the request which may be changed later. It
- must be either unicode or Mapping before submitting a request.
- @type data: unicode or Mapping
- """
- super(PatchedRequest, self).__init__()
- self.data = data
-
- def __enter__(self):
- """Patch the http module property."""
- self._old_http = api.http
- api.http = PatchedRequest.FakeHttp(self)
-
- def __exit__(self, exc_type, exc_value, traceback):
- """Reset the http module property."""
- api.http = self._old_http
class TestAPIMWException(DefaultSiteTestCase):
@@ -138,7 +76,7 @@
"""Test a static request."""
req = api.Request(site=self.site, parameters={'action': 'query',
'fake': True})
- with PatchedRequest(self.data):
+ with PatchedHttp(api, self.data):
self.assertRaises(api.APIMWException, req.submit)
def test_API_error_encoding_ASCII(self):
@@ -148,7 +86,7 @@
'fake': True,
'titles': page})
self.assert_parameters = {'fake': ''}
- with PatchedRequest(self._dummy_request):
+ with PatchedHttp(api, self._dummy_request):
self.assertRaises(api.APIMWException, req.submit)
def test_API_error_encoding_Unicode(self):
@@ -158,7 +96,7 @@
'fake': True,
'titles': page})
self.assert_parameters = {'fake': ''}
- with PatchedRequest(self._dummy_request):
+ with PatchedHttp(api, self._dummy_request):
self.assertRaises(api.APIMWException, req.submit)
diff --git a/tests/utils.py b/tests/utils.py
index ac5fc50..4e06af0 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -8,6 +8,7 @@
from __future__ import print_function, unicode_literals
__version__ = '$Id$'
#
+import json
import os
import re
import subprocess
@@ -15,14 +16,18 @@
import time
import traceback
+from collections import Mapping
from warnings import warn
if sys.version_info[0] > 2:
import six
+ unicode = str
+
import pywikibot
from pywikibot import config
+from pywikibot.comms import threadedhttp
from pywikibot.site import Namespace
from pywikibot.data.api import CachedRequest
from pywikibot.data.api import Request as _original_Request
@@ -307,6 +312,121 @@
pass
+class DummyHttp(object):
+
+ """A class simulating the http module."""
+
+ def __init__(self, wrapper):
+ """Constructor with the given PatchedHttp
instance."""
+ self.__wrapper = wrapper
+
+ def request(self, *args, **kwargs):
+ """The patched request method."""
+ result = self.__wrapper.before_request(*args, **kwargs)
+ if result is False:
+ result = self.__wrapper._old_http.request(*args, **kwargs)
+ elif isinstance(result, Mapping):
+ result = json.dumps(result)
+ elif not isinstance(result, unicode):
+ raise ValueError('The result is not a valid type '
+ '"{0}"'.format(type(result)))
+ response = self.__wrapper.after_request(result, *args, **kwargs)
+ if response is None:
+ response = result
+ return response
+
+ def fetch(self, *args, **kwargs):
+ """The patched fetch method."""
+ result = self.__wrapper.before_fetch(*args, **kwargs)
+ if result is False:
+ result = self.__wrapper._old_http.fetch(*args, **kwargs)
+ elif not isinstance(result, threadedhttp.HttpRequest):
+ raise ValueError('The result is not a valid type '
+ '"{0}"'.format(type(result)))
+ response = self.__wrapper.after_fetch(result, *args, **kwargs)
+ if response is None:
+ response = result
+ return response
+
+
+class PatchedHttp(object):
+
+ """
+ A ContextWrapper to handle any data going through the http module.
+
+ This patches the C{http} import in the given module to a class simulating
+ C{request} and C{fetch}. It has a C{data} attribute which is either a
+ static value which the requests will return or it's a callable returning the
+ data. If it's a callable it'll be called with the same parameters as the
+ original function in the L{http} module. For fine grained control it's
+ possible to override/monkey patch the C{before_request} and C{before_fetch}
+ methods. By default they just return C{data} directory or call it if it's
+ callable.
+
+ Even though L{http.request} is calling L{http.fetch}, it won't call the
+ patched method.
+
+ The data returned for C{request} may either be C{False}, a C{unicode} or a
+ C{Mapping} which is converted into a json string. The data returned for
+ C{fetch} can only be C{False} or a L{threadedhttp.HttpRequest}. For both
+ variants any other types are not allowed and if it is False it'll use the
+ original method and do an actual request.
+
+ Afterwards it is always calling C{after_request} or C{after_fetch} with the
+ response and given arguments. That can return a different response too, but
+ can also return None so that the original response is forwarded.
+ """
+
+ def __init__(self, module, data=None):
+ """
+ Constructor.
+
+ @param module: The given module to patch. It must have the http module
+ imported as http.
+ @type module: Module
+ @param data: The data returned for any request or fetch.
+ @type data: callable or False (or other depending on request/fetch)
+ """
+ super(PatchedHttp, self).__init__()
+ self._module = module
+ self.data = data
+
+ def _handle_data(self, *args, **kwargs):
+ """Return the data after it may have been
called."""
+ if self.data is None:
+ raise ValueError('No handler is defined.')
+ elif callable(self.data):
+ return self.data(*args, **kwargs)
+ else:
+ return self.data
+
+ def before_request(self, *args, **kwargs):
+ """Return the value which should is returned by
request."""
+ return self._handle_data(*args, **kwargs)
+
+ def before_fetch(self, *args, **kwargs):
+ """Return the value which should is returned by
fetch."""
+ return self._handle_data(*args, **kwargs)
+
+ def after_request(self, response, *args, **kwargs):
+ """Handle the response after request."""
+ pass
+
+ def after_fetch(self, response, *args, **kwargs):
+ """Handle the response after fetch."""
+ pass
+
+ def __enter__(self):
+ """Patch the http module property."""
+ self._old_http = self._module.http
+ self._module.http = DummyHttp(self)
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ """Reset the http module property."""
+ self._module.http = self._old_http
+
+
def execute(command, data_in=None, timeout=0, error=None):
"""
Execute a command and capture outputs.
diff --git a/tests/weblib_tests.py b/tests/weblib_tests.py
index d05c853..18fa710 100644
--- a/tests/weblib_tests.py
+++ b/tests/weblib_tests.py
@@ -16,7 +16,9 @@
from urlparse import urlparse
import pywikibot.weblib as weblib
+
from tests.aspects import unittest, TestCase
+from tests.utils import PatchedHttp
class TestInternetArchive(TestCase):
@@ -29,15 +31,24 @@
},
}
+ def _test_response(self, response, *args, **kwargs):
+ # for later tests this is must be present, and it'll tell us the
+ # original content if that does not match
+ self.assertIn('closest', response.content)
+
def testInternetArchiveNewest(self):
- archivedversion = weblib.getInternetArchiveURL('https://google.com')
+ with PatchedHttp(weblib, False) as p:
+ p.after_fetch = self._test_response
+ archivedversion = weblib.getInternetArchiveURL('https://google.com')
parsed = urlparse(archivedversion)
self.assertIn(parsed.scheme, [u'http', u'https'])
self.assertEqual(parsed.netloc, u'web.archive.org')
self.assertTrue(parsed.path.strip('/').endswith('www.google.com'),
parsed.path)
def testInternetArchiveOlder(self):
- archivedversion = weblib.getInternetArchiveURL('https://google.com',
'200606')
+ with PatchedHttp(weblib, False) as p:
+ p.after_fetch = self._test_response
+ archivedversion = weblib.getInternetArchiveURL('https://google.com',
'200606')
parsed = urlparse(archivedversion)
self.assertIn(parsed.scheme, [u'http', u'https'])
self.assertEqual(parsed.netloc, u'web.archive.org')
--
To view, visit
https://gerrit.wikimedia.org/r/224644
To unsubscribe, visit
https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Icfbc14424ff61ddd31d2f346c54ccc3d11c714dc
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: jenkins-bot <>