jenkins-bot submitted this change.

View Change

Approvals: Xqt: Looks good to me, approved jenkins-bot: Verified
[IMPR] remove unneeded explicit params in fetch() and request()

Remove those parameters that are explicit in http.fetch() and
http.requests() and instead can be embedded in **kwargs.

Also cleanup fetch() calls that use 'uri' as a keyword paramenter.

Change-Id: I335058809bf1c1732b305eac12a70cd1b82996d4
---
M pywikibot/comms/http.py
M pywikibot/data/api.py
M pywikibot/version.py
M tests/aspects.py
M tests/http_tests.py
M tests/site_tests.py
6 files changed, 29 insertions(+), 41 deletions(-)

diff --git a/pywikibot/comms/http.py b/pywikibot/comms/http.py
index 5a103be..aa8b6fa 100644
--- a/pywikibot/comms/http.py
+++ b/pywikibot/comms/http.py
@@ -218,8 +218,8 @@
return UserAgent().random


-def request(site, uri: Optional[str] = None, method='GET', params=None,
- body=None, headers=None, data=None, **kwargs) -> str:
+@deprecated_args(body='data')
+def request(site, uri: Optional[str] = None, headers=None, **kwargs) -> str:
"""
Request to Site with default error handling and response decoding.

@@ -239,11 +239,6 @@
@type charset: CodecInfo, str, None
@return: The received data
"""
- # body and data parameters both map to the data parameter of
- # requests.Session.request.
- if data:
- body = data
-
kwargs.setdefault('verify', site.verify_SSL_certificate())
old_validation = kwargs.pop('disable_ssl_certificate_validation', None)
if old_validation is not None:
@@ -261,7 +256,7 @@
headers['user-agent'] = user_agent(site, format_string)

baseuri = site.base_url(uri)
- r = fetch(baseuri, method, params, body, headers, **kwargs)
+ r = fetch(baseuri, headers=headers, **kwargs)
site.throttle.retry_after = int(r.response_headers.get('retry-after', 0))
return r.text

@@ -321,11 +316,9 @@
warning('Http response status {}'.format(request.status_code))


-@deprecated_args(callback=True)
-def fetch(uri, method='GET', params=None, body=None, headers=None,
- default_error_handling: bool = True,
- use_fake_user_agent: Union[bool, str] = False,
- data=None, **kwargs):
+@deprecated_args(callback=True, body='data')
+def fetch(uri, method='GET', headers=None, default_error_handling: bool = True,
+ use_fake_user_agent: Union[bool, str] = False, **kwargs):
"""
HTTP request.

@@ -346,11 +339,6 @@
@type callbacks: list of callable
@rtype: L{threadedhttp.HttpRequest}
"""
- # body and data parameters both map to the data parameter of
- # requests.Session.request.
- if data:
- body = data
-
# Change user agent depending on fake UA settings.
# Set header to new UA if needed.
headers = headers or {}
@@ -416,7 +404,7 @@
# Note that the connections are pooled which mean that a future
# HTTPS request can succeed even if the certificate is invalid and
# verify=True, when a request with verify=False happened before
- response = session.request(method, uri, params=params, data=body,
+ response = session.request(method, uri,
headers=headers, auth=auth, timeout=timeout,
**kwargs)
except Exception as e:
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index 4b15ec9..b5e35bc 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -1605,9 +1605,9 @@
"""
try:
data = http.request(
- site=self.site, uri=uri,
+ self.site, uri=uri,
method='GET' if use_get else 'POST',
- body=body, headers=headers)
+ data=body, headers=headers)
except Server504Error:
pywikibot.log('Caught HTTP 504 error; retrying')
except Server414Error:
diff --git a/pywikibot/version.py b/pywikibot/version.py
index 3656cd3..c308a73 100644
--- a/pywikibot/version.py
+++ b/pywikibot/version.py
@@ -198,8 +198,8 @@
@return: the git hash
"""
uri = 'https://github.com/wikimedia/{}/!svn/vcc/default'.format(tag)
- request = fetch(uri=uri, method='PROPFIND',
- body="<?xml version='1.0' encoding='utf-8'?>"
+ request = fetch(uri, method='PROPFIND',
+ data="<?xml version='1.0' encoding='utf-8'?>"
'<propfind xmlns=\"DAV:\"><allprop/></propfind>',
headers={'label': str(rev),
'user-agent': 'SVN/1.7.5 {pwb}'})
@@ -374,7 +374,7 @@
# Gerrit API responses include )]}' at the beginning,
# make sure to strip it out
buf = http.fetch(
- uri='https://gerrit.wikimedia.org/r/projects/pywikibot%2Fcore/' + path,
+ 'https://gerrit.wikimedia.org/r/projects/pywikibot%2Fcore/' + path,
headers={'user-agent': '{pwb}'}).text[4:]
try:
hsh = json.loads(buf)['revision']
diff --git a/tests/aspects.py b/tests/aspects.py
index 8a8d29b..de69f64 100644
--- a/tests/aspects.py
+++ b/tests/aspects.py
@@ -484,7 +484,7 @@
try:
if '://' not in hostname:
hostname = 'http://' + hostname
- r = http.fetch(uri=hostname,
+ r = http.fetch(hostname,
method='HEAD',
default_error_handling=False)
if r.exception:
diff --git a/tests/http_tests.py b/tests/http_tests.py
index b5f2fab..a6880fe 100644
--- a/tests/http_tests.py
+++ b/tests/http_tests.py
@@ -100,12 +100,12 @@
"""Test if http.fetch respects disable_ssl_certificate_validation."""
self.assertRaisesRegex(
pywikibot.FatalServerError, self.CERT_VERIFY_FAILED_RE, http.fetch,
- uri='https://testssl-expire-r2i2.disig.sk/index.en.html')
+ 'https://testssl-expire-r2i2.disig.sk/index.en.html')
http.session.close() # clear the connection

with warnings.catch_warnings(record=True) as warning_log:
response = http.fetch(
- uri='https://testssl-expire-r2i2.disig.sk/index.en.html',
+ 'https://testssl-expire-r2i2.disig.sk/index.en.html',
verify=False)
r = response.text
self.assertIsInstance(r, str)
@@ -115,7 +115,7 @@
# Verify that it now fails again
self.assertRaisesRegex(
pywikibot.FatalServerError, self.CERT_VERIFY_FAILED_RE, http.fetch,
- uri='https://testssl-expire-r2i2.disig.sk/index.en.html')
+ 'https://testssl-expire-r2i2.disig.sk/index.en.html')
http.session.close() # clear the connection

# Verify that the warning occurred
@@ -144,14 +144,14 @@
self.assertRaisesRegex(pywikibot.Server504Error,
r'Server ([^\:]+|[^\:]+:[0-9]+) timed out',
http.fetch,
- uri=self.get_httpbin_url('/status/504'))
+ self.get_httpbin_url('/status/504'))

def test_server_not_found(self):
"""Test server not found exception."""
self.assertRaisesRegex(requests.exceptions.ConnectionError,
'Max retries exceeded with url: /w/api.php',
http.fetch,
- uri='http://ru-sib.wikipedia.org/w/api.php',
+ 'http://ru-sib.wikipedia.org/w/api.php',
default_error_handling=True)

def test_invalid_scheme(self):
@@ -160,18 +160,18 @@
self.assertRaisesRegex(
requests.exceptions.InvalidSchema,
"No connection adapters were found for u?'invalid://url'",
- http.fetch, uri='invalid://url')
+ http.fetch, 'invalid://url')

def test_follow_redirects(self):
"""Test follow 301 redirects correctly."""
# The following will redirect from ' ' -> '_', and maybe to https://
- r = http.fetch(uri='http://en.wikipedia.org/wiki/Main%20Page')
+ r = http.fetch('http://en.wikipedia.org/wiki/Main%20Page')
self.assertEqual(r.status_code, 200)
self.assertIsNotNone(r.data.history)
self.assertIn('//en.wikipedia.org/wiki/Main_Page',
r.data.url)

- r = http.fetch(uri='http://en.wikia.com')
+ r = http.fetch('http://en.wikia.com')
self.assertEqual(r.status_code, 200)
self.assertEqual(r.data.url,
'https://www.fandom.com/explore')
@@ -547,7 +547,7 @@

def test_http(self):
"""Test with http, standard http interface for pywikibot."""
- r = http.fetch(uri=self.url)
+ r = http.fetch(self.url)

self.assertEqual(r.content, self.png)

@@ -585,7 +585,7 @@

def test_no_params(self):
"""Test fetch method with no parameters."""
- r = http.fetch(uri=self.url, params={})
+ r = http.fetch(self.url, params={})
if r.status_code == 503: # T203637
self.skipTest(
'503: Service currently not available for ' + self.url)
@@ -601,7 +601,7 @@
HTTPBin returns the args in their urldecoded form, so what we put in
should be the same as what we get out.
"""
- r = http.fetch(uri=self.url, params={'fish&chips': 'delicious'})
+ r = http.fetch(self.url, params={'fish&chips': 'delicious'})
if r.status_code == 503: # T203637
self.skipTest(
'503: Service currently not available for ' + self.url)
@@ -617,7 +617,7 @@
HTTPBin returns the args in their urldecoded form, so what we put in
should be the same as what we get out.
"""
- r = http.fetch(uri=self.url, params={'fish%26chips': 'delicious'})
+ r = http.fetch(self.url, params={'fish%26chips': 'delicious'})
if r.status_code == 503: # T203637
self.skipTest(
'503: Service currently not available for ' + self.url)
@@ -638,12 +638,12 @@
'X-Amzn-Trace-Id', 'X-B3-Parentspanid', 'X-B3-Spanid',
'X-B3-Traceid', 'X-Forwarded-Client-Cert',
)
- r_data_request = http.fetch(uri=self.get_httpbin_url('/post'),
+ r_data_request = http.fetch(self.get_httpbin_url('/post'),
method='POST',
data={'fish&chips': 'delicious'})
- r_body_request = http.fetch(uri=self.get_httpbin_url('/post'),
+ r_body_request = http.fetch(self.get_httpbin_url('/post'),
method='POST',
- body={'fish&chips': 'delicious'})
+ data={'fish&chips': 'delicious'})

r_data = json.loads(r_data_request.text)
r_body = json.loads(r_body_request.text)
diff --git a/tests/site_tests.py b/tests/site_tests.py
index b4758ea..e57504f 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -2982,7 +2982,7 @@
self.assertIsInstance(site.obsolete, bool)
self.assertTrue(site.obsolete)
self.assertEqual(site.hostname(), 'mh.wikipedia.org')
- r = http.fetch(uri='http://mh.wikipedia.org/w/api.php',
+ r = http.fetch('http://mh.wikipedia.org/w/api.php',
default_error_handling=False)
self.assertEqual(r.status_code, 200)
self.assertEqual(site.siteinfo['lang'], 'mh')

To view, visit change 650976. To unsubscribe, or for help writing mail filters, visit settings.

Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I335058809bf1c1732b305eac12a70cd1b82996d4
Gerrit-Change-Number: 650976
Gerrit-PatchSet: 2
Gerrit-Owner: Mpaa <mpaa.wiki@gmail.com>
Gerrit-Reviewer: Xqt <info@gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged