jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/463612 )
Change subject: [cleanup] cleanup tests/[add_text_tests.py-bot_tests.py]
......................................................................
[cleanup] cleanup tests/[add_text_tests.py-bot_tests.py]
- use single quotes for string literals
- remove preleading "u" fron strings
- indentation to make sure code lines are less than 79 characters
- use str.format(...) instead of modulo for type specifier arguments
Change-Id: I1de075cfae930ade2eb6131137528d99a68b6a55
---
M tests/api_tests.py
M tests/archivebot_tests.py
M tests/aspects.py
M tests/bot_tests.py
4 files changed, 166 insertions(+), 143 deletions(-)
Approvals:
Dalba: Looks good to me, approved
jenkins-bot: Verified
diff --git a/tests/api_tests.py b/tests/api_tests.py
index 5aa5f49..76d5fc3 100644
--- a/tests/api_tests.py
+++ b/tests/api_tests.py
@@ -155,15 +155,15 @@
'bar': 'test'})
self.assertTrue(req)
self.assertEqual(req.site, mysite)
- self.assertIn("foo", req._params)
- self.assertEqual(req["bar"], ["test"])
+ self.assertIn('foo', req._params)
+ self.assertEqual(req['bar'], ['test'])
# test item assignment
- req["one"] = "1"
- self.assertEqual(req._params['one'], ["1"])
+ req['one'] = '1'
+ self.assertEqual(req._params['one'], ['1'])
# test compliance with dict interface
- # req.keys() should contain "action", "foo", "bar", "one"
+ # req.keys() should contain 'action', 'foo', 'bar', 'one'
self.assertEqual(len(req.keys()), 4)
- self.assertIn("test", req._encoded_items().values())
+ self.assertIn('test', req._encoded_items().values())
for item in req.items():
self.assertEqual(len(item), 2, item)
@@ -483,8 +483,8 @@
site = self.get_site()
if site.mw_version < '1.25wmf4':
raise unittest.SkipTest(
- "version %s doesn't support the new paraminfo api"
- % site.mw_version)
+ "version {} doesn't support the new paraminfo api"
+ .format(site.mw_version))
pi = api.ParamInfo(site, modules_only_mode=True)
pi.fetch(['info'])
self.assertIn('query+info', pi._paraminfo)
@@ -553,7 +553,8 @@
"""Test OptionSet with initialised site."""
options = api.OptionSet(self.get_site(), 'recentchanges', 'show')
self.assertRaises(KeyError, options.__setitem__, 'invalid_name', True)
- self.assertRaises(ValueError, options.__setitem__, 'anon', 'invalid_value')
+ self.assertRaises(ValueError, options.__setitem__,
+ 'anon', 'invalid_value')
options['anon'] = True
self.assertCountEqual(['anon'], options._enabled)
self.assertEqual(set(), options._disabled)
@@ -602,7 +603,8 @@
self.assertCountEqual(['a', 'b'], list(options.keys()))
self.assertCountEqual([True, False], list(options.values()))
self.assertEqual(set(), set(options.values()) - {True, False})
- self.assertCountEqual([('a', True), ('b', False)], list(options.items()))
+ self.assertCountEqual([('a', True), ('b', False)],
+ list(options.items()))
class TestDryPageGenerator(TestCase):
@@ -616,35 +618,36 @@
# api.py sorts 'pages' using the string key, which is not a
# numeric comparison.
- titles = ("Broadcaster (definition)", "Wiktionary", "Broadcaster.com",
- "Wikipedia:Disambiguation")
+ titles = ('Broadcaster (definition)', 'Wiktionary', 'Broadcaster.com',
+ 'Wikipedia:Disambiguation')
def setUp(self):
"""Set up test case."""
super(TestDryPageGenerator, self).setUp()
mysite = self.get_site()
self.gen = api.PageGenerator(site=mysite,
- generator="links",
+ generator='links',
parameters={'titles': "User:R'n'B"})
# following test data is copied from an actual api.php response,
# but that query no longer matches this dataset.
# http://en.wikipedia.org/w/api.php?action=query&generator=links&titles=User:…
self.gen.request.submit = types.MethodType(lambda self: {
- "query": {"pages": {"296589": {"pageid": 296589,
- "ns": 0,
- "title": "Broadcaster.com"
+ 'query': {'pages': {'296589': {'pageid': 296589,
+ 'ns': 0,
+ 'title': 'Broadcaster.com'
},
- "13918157": {"pageid": 13918157,
- "ns": 0,
- "title": "Broadcaster (definition)"
+ '13918157': {'pageid': 13918157,
+ 'ns': 0,
+ 'title': 'Broadcaster '
+ '(definition)'
},
- "156658": {"pageid": 156658,
- "ns": 0,
- "title": "Wiktionary"
+ '156658': {'pageid': 156658,
+ 'ns': 0,
+ 'title': 'Wiktionary'
},
- "47757": {"pageid": 47757,
- "ns": 4,
- "title": "Wikipedia:Disambiguation"
+ '47757': {'pageid': 47757,
+ 'ns': 4,
+ 'title': 'Wikipedia:Disambiguation'
}
}
}
@@ -654,7 +657,8 @@
# Add custom_name for this site namespace, to match the live site.
if 'Wikipedia' not in self.site.namespaces:
self.site.namespaces[4].custom_name = 'Wikipedia'
- self.site.namespaces._namespace_names['wikipedia'] = self.site.namespaces[4]
+ self.site.namespaces._namespace_names['wikipedia'] = (
+ self.site.namespaces[4])
def test_results(self):
"""Test that PageGenerator yields pages with expected attributes."""
@@ -724,7 +728,7 @@
titles = [l.title(with_section=False)
for l in links]
gen = api.PropertyGenerator(site=self.site,
- prop="info",
+ prop='info',
parameters={'titles': '|'.join(titles)})
count = 0
@@ -742,7 +746,7 @@
titles = [l.title(with_section=False)
for l in links]
gen = api.PropertyGenerator(site=self.site,
- prop="revisions",
+ prop='revisions',
parameters={'titles': '|'.join(titles)})
gen.set_maximum_items(-1) # suppress use of "rvlimit" parameter
@@ -762,7 +766,7 @@
titles = [l.title(with_section=False)
for l in links]
gen = api.PropertyGenerator(site=self.site,
- prop="revisions|coordinates",
+ prop='revisions|coordinates',
parameters={'titles': '|'.join(titles)})
gen.set_maximum_items(-1) # suppress use of "rvlimit" parameter
@@ -867,7 +871,7 @@
'namespace': {'multi': True}
}
mysite._paraminfo.query_modules_with_limits = {'allpages'}
- self.gen = api.ListGenerator(listaction="allpages", site=mysite)
+ self.gen = api.ListGenerator(listaction='allpages', site=mysite)
def test_namespace_none(self):
"""Test ListGenerator set_namespace with None."""
@@ -1159,7 +1163,7 @@
for info, time in patterns.items():
lag = api.lagpattern.search(info)
self.assertIsNotNone(lag)
- self.assertEqual(int(lag.group("lag")), time)
+ self.assertEqual(int(lag.group('lag')), time)
if __name__ == '__main__': # pragma: no cover
diff --git a/tests/archivebot_tests.py b/tests/archivebot_tests.py
index a8d9d3d..66243e6 100644
--- a/tests/archivebot_tests.py
+++ b/tests/archivebot_tests.py
@@ -93,11 +93,13 @@
self.assertEqual(archivebot.str2time('7d'), archivebot.str2time('1w'))
self.assertEqual(archivebot.str2time('3y'), timedelta(1096))
self.assertEqual(archivebot.str2time('3y', date), timedelta(1095))
- self.assertRaises(archivebot.MalformedConfigError, archivebot.str2time, '4000@')
- self.assertRaises(archivebot.MalformedConfigError, archivebot.str2time, '$1')
+ self.assertRaises(archivebot.MalformedConfigError, archivebot.str2time,
+ '4000@')
+ self.assertRaises(archivebot.MalformedConfigError, archivebot.str2time,
+ '$1')
def test_checkstr(self):
- """Test for extracting key and duration from shorthand notation of durations."""
+ """Test for extracting key and duration from shorthand notation."""
self.assertEqual(archivebot.checkstr('400s'), ('s', '400'))
with suppress_warnings('Time period without qualifier', UserWarning):
self.assertEqual(archivebot.checkstr('3000'), ('s', '3000'))
@@ -143,8 +145,8 @@
self.assertIsInstance(talk.threads, list)
self.assertGreaterEqual(
len(talk.threads), THREADS[code],
- u'%d Threads found on %s,\n%d or more expected'
- % (len(talk.threads), talk, THREADS[code]))
+ '{} Threads found on {},\n{} or more expected'
+ .format(len(talk.threads), talk, THREADS[code]))
for thread in talk.threads:
self.assertIsInstance(thread, archivebot.DiscussionThread)
@@ -160,7 +162,8 @@
self.assertIsInstance(thread.timestamp, datetime)
except AssertionError:
if thread.code not in self.expected_failures:
- pywikibot.output('code %s: %s' % (thread.code, thread.content))
+ pywikibot.output('code {}: {}'
+ .format(thread.code, thread.content))
raise
expected_failures = ['ar', 'eo', 'pdc', 'th']
@@ -207,8 +210,9 @@
self.assertIsInstance(talk.threads, list)
self.assertGreaterEqual(
len(talk.threads), THREADS_WITH_UPDATED_FORMAT[code],
- u'%d Threads found on %s,\n%d or more expected'
- % (len(talk.threads), talk, THREADS_WITH_UPDATED_FORMAT[code]))
+ '{} Threads found on {},\n{} or more expected'
+ .format(len(talk.threads), talk,
+ THREADS_WITH_UPDATED_FORMAT[code]))
for thread in talk.threads:
self.assertIsInstance(thread, archivebot.DiscussionThread)
@@ -224,7 +228,8 @@
self.assertIsInstance(thread.timestamp, datetime)
except AssertionError:
if thread.code not in self.expected_failures:
- pywikibot.output('code %s: %s' % (thread.code, thread.content))
+ pywikibot.output('code {}: {}'
+ .format(thread.code, thread.content))
raise
expected_failures = []
diff --git a/tests/aspects.py b/tests/aspects.py
index 352f576..0237885 100644
--- a/tests/aspects.py
+++ b/tests/aspects.py
@@ -46,7 +46,8 @@
try:
import pytest_httpbin
- optional_pytest_httpbin_cls_decorator = pytest_httpbin.use_class_based_httpbin
+ optional_pytest_httpbin_cls_decorator = (
+ pytest_httpbin.use_class_based_httpbin)
except ImportError:
pytest_httpbin = None
@@ -138,7 +139,7 @@
namespaces = {namespaces}
self.assertIn(page.namespace(), namespaces,
- "%s not in namespace %r" % (page, namespaces))
+ '{} not in namespace {!r}'.format(page, namespaces))
def _get_gen_pages(self, gen, count=None, site=None):
"""
@@ -224,8 +225,9 @@
page_namespaces = [page.namespace() for page in gen]
if skip and set(page_namespaces) != namespaces:
- raise unittest.SkipTest('Pages in namespaces %r not found.'
- % list(namespaces - set(page_namespaces)))
+ raise unittest.SkipTest('Pages in namespaces {!r} not found.'
+ .format(list(namespaces -
+ set(page_namespaces))))
else:
self.assertEqual(set(page_namespaces), namespaces)
@@ -285,7 +287,7 @@
@type callable_obj: callable
@param args: The positional arguments forwarded to the callable object.
@param kwargs: The keyword arguments forwared to the callable object.
- @return: The context manager if callable_obj is None and None otherwise.
+ @return: Context manager if callable_obj is None and None otherwise.
@rtype: None or context manager
"""
msg = kwargs.pop('msg', None)
@@ -312,7 +314,7 @@
duration = self.test_completed - self.test_start
if duration > self.test_duration_warning_interval:
- unittest_print(' %0.3fs' % duration, end=' ')
+ unittest_print(' {0:.3f}s'.format(duration), end=' ')
sys.stdout.flush()
super(TestTimerMixin, self).tearDown()
@@ -360,8 +362,8 @@
def setUp(self):
"""Set up test."""
self.old_Site_lookup_method = pywikibot.Site
- pywikibot.Site = lambda *args: self.fail('%s: Site() not permitted'
- % self.__class__.__name__)
+ pywikibot.Site = lambda *args: self.fail(
+ '{}: Site() not permitted'.format(self.__class__.__name__))
super(DisableSiteMixin, self).setUp()
@@ -400,8 +402,8 @@
def __init__(self, code, fam=None, user=None, sysop=None):
"""Initializer."""
raise pywikibot.SiteDefinitionError(
- 'Loading site %s:%s during dry test not permitted'
- % (fam, code))
+ 'Loading site {}:{} during dry test not permitted'
+ .format(fam, code))
class DisconnectedSiteMixin(TestCaseBase):
@@ -456,9 +458,11 @@
self.cache_hits = tests.cache_hits - self.cache_hits_start
if self.cache_misses:
- unittest_print(' %d cache misses' % self.cache_misses, end=' ')
+ unittest_print(' {} cache misses'
+ .format(self.cache_misses), end=' ')
if self.cache_hits:
- unittest_print(' %d cache hits' % self.cache_hits, end=' ')
+ unittest_print(' {} cache hits'
+ .format(self.cache_hits), end=' ')
if self.cache_misses or self.cache_hits:
sys.stdout.flush()
@@ -500,19 +504,19 @@
for key, data in cls.sites.items():
if 'hostname' not in data:
- raise Exception('%s: hostname not defined for %s'
- % (cls.__name__, key))
+ raise Exception('{}: hostname not defined for {}'
+ .format(cls.__name__, key))
hostname = data['hostname']
if hostname in cls._checked_hostnames:
if isinstance(cls._checked_hostnames[hostname], Exception):
raise unittest.SkipTest(
- '%s: hostname %s failed (cached): %s'
- % (cls.__name__, hostname,
- cls._checked_hostnames[hostname]))
+ '{}: hostname {} failed (cached): {}'
+ .format(cls.__name__, hostname,
+ cls._checked_hostnames[hostname]))
elif cls._checked_hostnames[hostname] is False:
- raise unittest.SkipTest('%s: hostname %s failed (cached)'
- % (cls.__name__, hostname))
+ raise unittest.SkipTest('{}: hostname {} failed (cached)'
+ .format(cls.__name__, hostname))
else:
continue
@@ -527,18 +531,18 @@
e = r.exception
else:
if r.status not in [200, 301, 302, 303, 307, 308]:
- raise ServerError('HTTP status: %d' % r.status)
+ raise ServerError('HTTP status: {}'.format(r.status))
except Exception as e2:
- pywikibot.error('%s: accessing %s caused exception:'
- % (cls.__name__, hostname))
+ pywikibot.error('{}: accessing {} caused exception:'
+ .format(cls.__name__, hostname))
pywikibot.exception(e2, tb=True)
e = e2
if e:
cls._checked_hostnames[hostname] = e
raise unittest.SkipTest(
- '%s: hostname %s failed: %s'
- % (cls.__name__, hostname, e))
+ '{}: hostname {} failed: {}'
+ .format(cls.__name__, hostname, e))
cls._checked_hostnames[hostname] = True
@@ -569,9 +573,9 @@
"""
if issubclass(cls, ForceCacheMixin):
raise Exception(
- '%s can not be a subclass of both '
+ '{} can not be a subclass of both '
'SiteWriteMixin and ForceCacheMixin'
- % cls.__name__)
+ .format(cls.__name__))
super(SiteWriteMixin, cls).setUpClass()
@@ -584,17 +588,17 @@
if os.environ.get(env_var, '0') != '1':
raise unittest.SkipTest(
- '%r write tests disabled. '
- 'Set %s=1 to enable.'
- % (cls.__name__, env_var))
+ '{!r} write tests disabled. '
+ 'Set {}=1 to enable.'
+ .format(cls.__name__, env_var))
if (not hasattr(site.family, 'test_codes') or
site.code not in site.family.test_codes):
raise Exception(
- '%s should only be run on test sites. '
- 'To run this test, add \'%s\' to the %s family '
- 'attribute \'test_codes\'.'
- % (cls.__name__, site.code, site.family.name))
+ '{} should only be run on test sites. '
+ "To run this test, add '{}' to the {} family "
+ "attribute 'test_codes'."
+ .format(cls.__name__, site.code, site.family.name))
class RequireUserMixin(TestCaseBase):
@@ -608,10 +612,10 @@
"""Check the user config has a valid login to the site."""
if not cls.has_site_user(family, code, sysop=sysop):
raise unittest.SkipTest(
- '%s: No %susername for %s:%s'
- % (cls.__name__,
- "sysop " if sysop else "",
- family, code))
+ '{}: No {}username for {}:{}'
+ .format(cls.__name__,
+ 'sysop ' if sysop else '',
+ family, code))
@classmethod
def setUpClass(cls):
@@ -638,10 +642,10 @@
if not site['site'].user():
raise unittest.SkipTest(
- '%s: Not able to login to %s as %s'
- % (cls.__name__,
- 'sysop' if sysop else 'bot',
- site['site']))
+ '{}: Not able to login to {} as {}'
+ .format(cls.__name__,
+ 'sysop' if sysop else 'bot',
+ site['site']))
def setUp(self):
"""
@@ -731,7 +735,8 @@
for base in bases:
base_tests += [attr_name
for attr_name, attr in base.__dict__.items()
- if attr_name.startswith('test') and callable(attr)]
+ if (attr_name.startswith('test') and
+ callable(attr))]
dct['abstract_class'] = not tests and not base_tests
@@ -799,9 +804,9 @@
if 'pwb' in dct and dct['pwb']:
if 'site' not in dct:
raise Exception(
- '%s: Test classes using pwb must set "site"; add '
+ '{}: Test classes using pwb must set "site"; add '
'site=False if the test script will not use a site'
- % name)
+ .format(name))
# If the 'site' attribute is a false value,
# remove it so it matches !site in nose.
@@ -811,8 +816,8 @@
# If there isn't a site, require declaration of net activity.
if 'net' not in dct:
raise Exception(
- '%s: Test classes without a site configured must set "net"'
- % name)
+ '{}: Test classes without a site configured must set "net"'
+ .format(name))
# If the 'net' attribute is a false value,
# remove it so it matches !net in nose.
@@ -845,7 +850,8 @@
dct['user'] = True
bases = cls.add_base(bases, SiteWriteMixin)
- if ('user' in dct and dct['user']) or ('sysop' in dct and dct['sysop']):
+ if (('user' in dct and dct['user']) or
+ ('sysop' in dct and dct['sysop'])):
bases = cls.add_base(bases, RequireUserMixin)
for test in tests:
@@ -863,9 +869,9 @@
# a multi-site test method only accepts 'self' and the site-key
if test_func.__code__.co_argcount != 2:
raise Exception(
- '%s: Test method %s must accept either 1 or 2 arguments; '
- ' %d found'
- % (name, test, test_func.__code__.co_argcount))
+ '{}: Test method {} must accept either 1 or 2 arguments; '
+ ' {} found'
+ .format(name, test, test_func.__code__.co_argcount))
# create test methods processed by unittest
for (key, sitedata) in dct['sites'].items():
@@ -941,15 +947,16 @@
if ('code' in data and data['code'] in ('test', 'mediawiki') and
'PYWIKIBOT_TEST_PROD_ONLY' in os.environ and not dry):
raise unittest.SkipTest(
- 'Site code "%s" and PYWIKIBOT_TEST_PROD_ONLY is set.'
- % data['code'])
+ 'Site code "{}" and PYWIKIBOT_TEST_PROD_ONLY is set.'
+ .format(data['code']))
if 'site' not in data and 'code' in data and 'family' in data:
data['site'] = Site(data['code'], data['family'],
interface=interface)
if 'hostname' not in data and 'site' in data:
try:
- data['hostname'] = data['site'].base_url(data['site'].path())
+ data['hostname'] = (
+ data['site'].base_url(data['site'].path()))
except KeyError:
# The family has defined this as obsolete
# without a mapping to a hostname.
@@ -980,12 +987,12 @@
name = next(iter(cls.sites.keys()))
else:
raise Exception(
- '"%s.get_site(name=None)" called with multiple sites'
- % cls.__name__)
+ '"{}.get_site(name=None)" called with multiple sites'
+ .format(cls.__name__))
if name and name not in cls.sites:
- raise Exception('"%s" not declared in %s'
- % (name, cls.__name__))
+ raise Exception('"{}" not declared in {}'
+ .format(name, cls.__name__))
if isinstance(cls.site, BaseSite):
assert cls.sites[name]['site'] == cls.site
@@ -997,9 +1004,10 @@
def has_site_user(cls, family, code, sysop=False):
"""Check the user config has a user for the site."""
if not family:
- raise Exception('no family defined for %s' % cls.__name__)
+ raise Exception('no family defined for {}'.format(cls.__name__))
if not code:
- raise Exception('no site code defined for %s' % cls.__name__)
+ raise Exception('no site code defined for {}'
+ .format(cls.__name__))
usernames = config.sysopnames if sysop else config.usernames
@@ -1054,9 +1062,9 @@
if not site:
site = self.get_site()
page = pywikibot.Page(pywikibot.page.Link(
- "There is no page with this title", site))
+ 'There is no page with this title', site))
if page.exists():
- raise unittest.SkipTest("Did not find a page that does not exist.")
+ raise unittest.SkipTest('Did not find a page that does not exist.')
return page
@@ -1066,18 +1074,18 @@
"""
Capture assertion calls to do additional calls around them.
- All assertions done which start with "assert" are patched in such a way that
- after the assertion it calls C{process_assertion} with the assertion and the
- arguments.
+ All assertions done which start with "assert" are patched in such a way
+ that after the assertion it calls C{process_assertion} with the assertion
+ and the arguments.
To avoid that it patches the assertion it's possible to put the call in an
C{disable_assert_capture} with-statement.
"""
- # Is True while an assertion is running, so that assertions won't be patched
- # when they are executed while an assertion is running and only the outer
- # most assertion gets actually patched.
+ # Is True while an assertion is running, so that assertions won't be
+ # patched when they are executed while an assertion is running and only
+ # the outer most assertion gets actually patched.
_patched = False
@contextmanager
@@ -1112,7 +1120,8 @@
try:
context = self.process_assert(assertion, *args, **kwargs)
if hasattr(context, '__enter__'):
- return self._delay_assertion(context, assertion, args, kwargs)
+ return self._delay_assertion(context, assertion, args,
+ kwargs)
else:
self.after_assert(assertion, *args, **kwargs)
return context
@@ -1304,14 +1313,14 @@
site = data['site']
if not site.has_data_repository:
raise unittest.SkipTest(
- u'%s: %r does not have data repository'
- % (cls.__name__, site))
+ '{}: {!r} does not have data repository'
+ .format(cls.__name__, site))
if (hasattr(cls, 'repo') and
cls.repo != site.data_repository()):
raise Exception(
- '%s: sites do not all have the same data repository'
- % cls.__name__)
+ '{}: sites do not all have the same data repository'
+ .format(cls.__name__))
cls.repo = site.data_repository()
@@ -1348,8 +1357,8 @@
for site in cls.sites.values():
if not site['site'].has_data_repository:
raise unittest.SkipTest(
- '%s: %r does not have data repository'
- % (cls.__name__, site['site']))
+ '{}: {!r} does not have data repository'
+ .format(cls.__name__, site['site']))
class DefaultWikibaseClientTestCase(WikibaseClientTestCase,
@@ -1385,8 +1394,8 @@
if str(cls.get_repo()) != 'wikidata:wikidata':
raise unittest.SkipTest(
- u'%s: %s is not connected to Wikidata.'
- % (cls.__name__, cls.get_site()))
+ '{}: {} is not connected to Wikidata.'
+ .format(cls.__name__, cls.get_site()))
class ScriptMainTestCase(ScenarioDefinedDefaultSiteTestCase):
@@ -1473,7 +1482,8 @@
"""Set up test class."""
if not __debug__:
raise unittest.SkipTest(
- '%s is disabled when __debug__ is disabled.' % cls.__name__)
+ '{} is disabled when __debug__ is disabled.'
+ .format(cls.__name__))
super(DebugOnlyTestCase, cls).setUpClass()
@@ -1506,7 +1516,7 @@
self.warning_log = []
self.expect_warning_filename = inspect.getfile(self.__class__)
- if self.expect_warning_filename.endswith((".pyc", ".pyo")):
+ if self.expect_warning_filename.endswith(('.pyc', '.pyo')):
self.expect_warning_filename = self.expect_warning_filename[:-1]
self._do_test_warning_filename = True
@@ -1593,7 +1603,8 @@
if self._do_test_warning_filename:
self.assertDeprecationFile(self.expect_warning_filename)
- def assertOneDeprecationParts(self, deprecated=None, instead=None, count=1):
+ def assertOneDeprecationParts(self, deprecated=None, instead=None,
+ count=1):
"""
Assert that exactly one deprecation message happened and reset.
@@ -1633,15 +1644,15 @@
if item.filename != filename:
self.fail(
- 'expected warning filename %s; warning item: %s'
- % (filename, item))
+ 'expected warning filename {}; warning item: {}'
+ .format(filename, item))
def setUp(self):
"""Set up unit test."""
super(DeprecationTestCase, self).setUp()
self.warning_log = self.context_manager.__enter__()
- warnings.simplefilter("always")
+ warnings.simplefilter('always')
self._reset_messages()
@@ -1677,7 +1688,7 @@
class HttpbinTestCase(TestCase):
"""
- Custom test case class, which allows doing dry httpbin tests using pytest-httpbin.
+ Custom test case class, which allows dry httpbin tests with pytest-httpbin.
Test cases, which use httpbin, need to inherit this class.
"""
diff --git a/tests/bot_tests.py b/tests/bot_tests.py
index 6d47d22..d98c58e 100644
--- a/tests/bot_tests.py
+++ b/tests/bot_tests.py
@@ -28,8 +28,8 @@
def setUpClass(cls):
"""Verify that the translations are available."""
if not i18n.messages_available():
- raise unittest.SkipTest("i18n messages package '%s' not available."
- % i18n._messages_package_name)
+ raise unittest.SkipTest("i18n messages package '{}' not available."
+ .format(i18n._messages_package_name))
super(TWNBotTestCase, cls).setUpClass()
@@ -45,17 +45,17 @@
attribute bot is defined. It also sets the bot's 'always' option to True to
avoid user interaction.
- The C{bot_save} method compares the save counter before the call and asserts
- that it has increased by one after the call. It also stores locally in
- C{save_called} if C{page_save} has been called. If C{bot_save} or
- C{page_save} are implemented they should call super's method at some point
- to make sure these assertions work. At C{tearDown} it checks that the pages
- are saved often enough. The attribute C{default_assert_saves} defines the
- number of saves which must happen and compares it to the difference using
- the save counter. It is possible to define C{assert_saves} after C{setUp} to
- overwrite the default value for certain tests. By default the number of
- saves it asserts are 1. Additionally C{save_called} increases by 1 on each
- call of C{page_save} and should be equal to C{assert_saves}.
+ The C{bot_save} method compares the save counter before the call and
+ asserts that it has increased by one after the call. It also stores
+ locally in C{save_called} if C{page_save} has been called. If C{bot_save}
+ or C{page_save} are implemented they should call super's method at some
+ point to make sure these assertions work. At C{tearDown} it checks that
+ the pages are saved often enough. The attribute C{default_assert_saves}
+ defines the number of saves which must happen and compares it to the
+ difference using the save counter. It is possible to define C{assert_saves}
+ after C{setUp} to overwrite the default value for certain tests. By default
+ the number of saves it asserts are 1. Additionally C{save_called} increases
+ by 1 on each call of C{page_save} and should be equal to C{assert_saves}.
This means if the bot class actually does other writes, like using
L{pywikibot.page.Page.save} manually, it'll still write.
@@ -115,10 +115,11 @@
It uses pages as an iterator and compares the page given to the page
returned by pages iterator. It checks that the bot's _site and site
- attributes are set to the page's site. If _treat_site is set with a Site
- it compares it to that one too.
+ attributes are set to the page's site. If _treat_site is set with a
+ Site it compares it to that one too.
- Afterwards it calls post_treat so it's possible to do additional checks.
+ Afterwards it calls post_treat so it's possible to do additional
+ checks.
"""
def treat(page):
self.assertEqual(page, next(self._page_iter))
@@ -178,7 +179,7 @@
"""Tests for the BaseBot subclasses."""
- CANT_SET_ATTRIBUTE_RE = 'can\'t set attribute'
+ CANT_SET_ATTRIBUTE_RE = "can't set attribute"
NOT_IN_TREAT_RE = 'Requesting the site not while in treat is not allowed.'
dry = True
@@ -228,7 +229,8 @@
# Assert no specific site
self._treat_site = False
self.bot = pywikibot.bot.MultipleSitesBot(generator=self._generator())
- with self.assertRaisesRegex(AttributeError, self.CANT_SET_ATTRIBUTE_RE):
+ with self.assertRaisesRegex(AttributeError,
+ self.CANT_SET_ATTRIBUTE_RE):
self.bot.site = self.de
with self.assertRaisesRegex(ValueError, self.NOT_IN_TREAT_RE):
self.bot.site
@@ -346,8 +348,9 @@
def test_CreatingPageBot(self):
"""Test CreatingPageBot class."""
- # This doesn't verify much (e.g. it could yield the first existing page)
- # but the assertion in post_treat should verify that the page is valid
+ # This doesn't verify much (e.g. it could yield the first existing
+ # page) but the assertion in post_treat should verify that the page
+ # is valid
def treat_generator():
"""Yield just one current page (the last one)."""
yield self._current_page
--
To view, visit https://gerrit.wikimedia.org/r/463612
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I1de075cfae930ade2eb6131137528d99a68b6a55
Gerrit-Change-Number: 463612
Gerrit-PatchSet: 4
Gerrit-Owner: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/463608 )
Change subject: [bugfix] Fix sort key handling in category move
......................................................................
[bugfix] Fix sort key handling in category move
Bug: T192215
Change-Id: I749dede76b1878df9572a2b6e829be816f01370f
---
M category/en.json
M category/qqq.json
2 files changed, 8 insertions(+), 2 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/category/en.json b/category/en.json
index 55cff72..700628b 100644
--- a/category/en.json
+++ b/category/en.json
@@ -2,7 +2,8 @@
"@metadata": {
"authors": [
"Xqt",
- "Ben McIlwain (CydeWeys)"
+ "Ben McIlwain (CydeWeys)",
+ "Dvorapa"
]
},
"category-adding": "Bot: Adding category [[:Category:%(newcat)s|%(newcat)s]]",
@@ -14,6 +15,8 @@
"category-replacing": "Bot: Replacing category %(oldcat)s with %(newcat)s",
"category-section-title": "Page history of former %(oldcat)s",
"category-strip-cfd-templates": "Bot: Removing CFD templates for completed action",
+ "category-strip-sort-keys": "Bot: Removing sort keys for completed action",
+ "category-strip-both": "Bot: Removing CFD templates and sort keys for completed action",
"category-version-history": "Bot: Saving version history of former %(oldcat)s",
"category-was-disbanded": "Bot: Category was disbanded",
"category-was-moved": "Bot: Category was moved to [[:Category:%(newcat)s|%(title)s]]"
diff --git a/category/qqq.json b/category/qqq.json
index 0fcdc62..5ef368a 100644
--- a/category/qqq.json
+++ b/category/qqq.json
@@ -8,7 +8,8 @@
"Siebrand",
"Valhallasw",
"Xqt",
- "Ben McIlwain (CydeWeys)"
+ "Ben McIlwain (CydeWeys)",
+ "Dvorapa"
]
},
"category-adding": "{{doc-important|Do not change \":Category:%(newcat)s\" so this message will work in any language.}}",
@@ -19,6 +20,8 @@
"category-replacing": "Edit summary. Parameters:\n* %(oldcat)s - old category name\n* %(newcat)s - new category name",
"category-section-title": "Section title for keeping page history",
"category-strip-cfd-templates": "Edit summary when CFD (Categories for deletion) templates are removed from the page's text following a move/keep action",
+ "category-strip-sort-keys": "Edit summary when sort keys are removed from the page's text following a move/keep action",
+ "category-strip-both": "Edit summary when both CFD (Categories for deletion) templates and sort keys are removed from the page's text following a move/keep action",
"category-version-history": "Edit summary when the bot saves page' version history while category moving",
"category-was-disbanded": "Used as reason for deletion of the category.",
"category-was-moved": "{{doc-important|Do not translate \"[[:Category:%(newcat)s|%(title)s]]\"}}"
--
To view, visit https://gerrit.wikimedia.org/r/463608
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/i18n
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I749dede76b1878df9572a2b6e829be816f01370f
Gerrit-Change-Number: 463608
Gerrit-PatchSet: 2
Gerrit-Owner: Dvorapa <dvorapa(a)seznam.cz>
Gerrit-Reviewer: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: Framawiki <framawiki(a)tools.wmflabs.org>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Siebrand <siebrand(a)kitano.nl>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: Zhuyifei1999 <zhuyifei1999(a)gmail.com>
Gerrit-Reviewer: Zoranzoki21 <zorandori4444(a)gmail.com>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/463518 )
Change subject: [cleanup] cleanup tests/[family_tests.py-http_tests.py]
......................................................................
[cleanup] cleanup tests/[family_tests.py-http_tests.py]
- use single quotes for string literals
- remove preleading "u" fron strings
- indentation to make sure code lines are less than 79 characters
- use str.format(...) instead of modulo for type specifier arguments
Change-Id: Id456426c1d09a095bb71cfa2cfd73c6e93ee8ce2
---
M tests/family_tests.py
M tests/file_tests.py
M tests/flow_edit_tests.py
M tests/flow_tests.py
M tests/http_tests.py
5 files changed, 89 insertions(+), 67 deletions(-)
Approvals:
Dalba: Looks good to me, approved
jenkins-bot: Verified
diff --git a/tests/family_tests.py b/tests/family_tests.py
index 560faf7..1538693 100644
--- a/tests/family_tests.py
+++ b/tests/family_tests.py
@@ -30,7 +30,8 @@
FAMILY_TYPEERROR_RE = (
'Family.obsolete not updatable; '
'use Family.interwiki_removals and Family.interwiki_replacements')
- FROZENSET_TYPEERROR_RE = '\'frozenset\' object does not support item assignment'
+ FROZENSET_TYPEERROR_RE = ("'frozenset' object does not support item "
+ 'assignment')
net = False
def test_family_load_valid(self):
@@ -240,9 +241,9 @@
family = Family.load(family)
for code in family.codes:
self.current_code = code
- url = ('%s://%s%s/$1' % (family.protocol(code),
- family.hostname(code),
- family.path(code)))
+ url = ('{}://{}{}/$1'.format(family.protocol(code),
+ family.hostname(code),
+ family.path(code)))
# Families can switch off if they want to be detected using URL
# this applies for test:test (there is test:wikipedia)
if family._ignore_from_url or code in family._ignore_from_url:
@@ -275,7 +276,8 @@
self.assertEqual(f.name, 'i18n')
self.assertDeprecationParts('pywikibot.site.Family',
'pywikibot.family.Family.load')
- self.assertDeprecationParts('fatal argument of pywikibot.family.Family.load')
+ self.assertDeprecationParts(
+ 'fatal argument of pywikibot.family.Family.load')
def test_old_site_family_function_invalid(self):
"""Test that an invalid family raised UnknownFamily exception."""
@@ -295,7 +297,8 @@
'unknown')
self.assertDeprecationParts('pywikibot.site.Family',
'pywikibot.family.Family.load')
- self.assertDeprecationParts('fatal argument of pywikibot.family.Family.load')
+ self.assertDeprecationParts(
+ 'fatal argument of pywikibot.family.Family.load')
if __name__ == '__main__': # pragma: no cover
diff --git a/tests/file_tests.py b/tests/file_tests.py
index c187654..be16886 100644
--- a/tests/file_tests.py
+++ b/tests/file_tests.py
@@ -99,7 +99,7 @@
self.assertFalse(enwp_file.fileIsShared())
page_doesnt_exist_exc_regex = re.escape(
- 'Page [[commons:%s]] doesn\'t exist.' % title)
+ "Page [[commons:{}]] doesn't exist.".format(title))
with self.assertRaisesRegex(
pywikibot.NoPage,
page_doesnt_exist_exc_regex):
@@ -116,7 +116,7 @@
commons_file.get()
def testOnBoth(self):
- """Test fileIsShared() on file page with both local and shared file."""
+ """Test fileIsShared() on file page with local and shared file."""
title = 'File:Pulsante spam.png'
commons = self.get_site('commons')
@@ -170,19 +170,19 @@
def test_file_info_with_no_page(self):
"""FilePage:latest_file_info raises NoPage for non existing pages."""
site = self.get_site()
- image = pywikibot.FilePage(site, u'File:NoPage')
+ image = pywikibot.FilePage(site, 'File:NoPage')
self.assertFalse(image.exists())
with self.assertRaisesRegex(
pywikibot.NoPage,
(r'Page \[\[(wikipedia\:|)test:File:NoPage\]\] '
- r'doesn\'t exist\.')):
+ r"doesn't exist\.")):
image = image.latest_file_info
def test_file_info_with_no_file(self):
- """FilePage:latest_file_info raises PagerelatedError if no file is present."""
+ """FilePage:latest_file_info raises PagerelatedError if no file."""
site = self.get_site()
- image = pywikibot.FilePage(site, u'File:Test with no image')
+ image = pywikibot.FilePage(site, 'File:Test with no image')
self.assertTrue(image.exists())
with self.assertRaisesRegex(
pywikibot.PageRelatedError,
@@ -227,9 +227,10 @@
"""Get File thumburl from width."""
self.assertTrue(self.image.exists())
# url_param has no precedence over height/width.
- self.assertEqual(self.image.get_file_url(url_width=100, url_param='1000px'),
- 'https://upload.wikimedia.org/wikipedia/commons/thumb/'
- 'd/d3/Albert_Einstein_Head.jpg/100px-Albert_Einstein_Head.jpg')
+ self.assertEqual(
+ self.image.get_file_url(url_width=100, url_param='1000px'),
+ 'https://upload.wikimedia.org/wikipedia/commons/thumb/'
+ 'd/d3/Albert_Einstein_Head.jpg/100px-Albert_Einstein_Head.jpg')
self.assertEqual(self.image.latest_file_info.thumbwidth, 100)
self.assertEqual(self.image.latest_file_info.thumbheight, 133)
@@ -237,9 +238,10 @@
"""Get File thumburl from height."""
self.assertTrue(self.image.exists())
# url_param has no precedence over height/width.
- self.assertEqual(self.image.get_file_url(url_height=100, url_param='1000px'),
- 'https://upload.wikimedia.org/wikipedia/commons/thumb/'
- 'd/d3/Albert_Einstein_Head.jpg/75px-Albert_Einstein_Head.jpg')
+ self.assertEqual(
+ self.image.get_file_url(url_height=100, url_param='1000px'),
+ 'https://upload.wikimedia.org/wikipedia/commons/thumb/'
+ 'd/d3/Albert_Einstein_Head.jpg/75px-Albert_Einstein_Head.jpg')
self.assertEqual(self.image.latest_file_info.thumbwidth, 75)
self.assertEqual(self.image.latest_file_info.thumbheight, 100)
@@ -247,9 +249,10 @@
"""Get File thumburl from height."""
self.assertTrue(self.image.exists())
# url_param has no precedence over height/width.
- self.assertEqual(self.image.get_file_url(url_param='100px'),
- 'https://upload.wikimedia.org/wikipedia/commons/thumb/'
- 'd/d3/Albert_Einstein_Head.jpg/100px-Albert_Einstein_Head.jpg')
+ self.assertEqual(
+ self.image.get_file_url(url_param='100px'),
+ 'https://upload.wikimedia.org/wikipedia/commons/thumb/'
+ 'd/d3/Albert_Einstein_Head.jpg/100px-Albert_Einstein_Head.jpg')
self.assertEqual(self.image.latest_file_info.thumbwidth, 100)
self.assertEqual(self.image.latest_file_info.thumbheight, 133)
@@ -301,13 +304,14 @@
def test_not_existing_download(self):
"""Test not existing download."""
- page = pywikibot.FilePage(self.site, 'File:Albert Einstein.jpg_notexisting')
+ page = pywikibot.FilePage(self.site,
+ 'File:Albert Einstein.jpg_notexisting')
filename = join_images_path('Albert Einstein.jpg')
with self.assertRaisesRegex(
pywikibot.NoPage,
re.escape('Page [[commons:File:Albert Einstein.jpg '
- 'notexisting]] doesn\'t exist.')):
+ "notexisting]] doesn't exist.")):
page.download(filename)
diff --git a/tests/flow_edit_tests.py b/tests/flow_edit_tests.py
index d486ee0..bc60a16 100644
--- a/tests/flow_edit_tests.py
+++ b/tests/flow_edit_tests.py
@@ -75,7 +75,8 @@
def test_reply_to_topic_root(self):
"""Test replying to the topic's root post directly."""
# Setup
- content = "I am a reply to the topic's root post. Replying still works!"
+ content = ("I am a reply to the topic's root post. "
+ 'Replying still works!')
topic = Topic(self.site, self._topic_title)
topic_root = topic.root
old_replies = topic_root.replies(force=True)[:]
@@ -114,7 +115,8 @@
"""Test replying to a previous reply to a topic."""
# Setup
first_content = 'I am a reply to the topic with my own replies. Great!'
- second_content = 'I am a nested reply. This conversation is getting pretty good!'
+ second_content = ('I am a nested reply. This conversation is '
+ 'getting pretty good!')
topic = Topic(self.site, self._topic_title)
topic_root = topic.root
# First reply
@@ -145,7 +147,8 @@
# Test reply list in first reply
# Broken due to current Flow reply structure (T105438)
# new_nested_replies = first_reply_post.replies(force=True)
- # self.assertEqual(len(new_nested_replies), len(old_nested_replies) + 1)
+ # self.assertEqual(len(new_nested_replies),
+ # len(old_nested_replies) + 1)
# Current test for nested reply list
self.assertListEqual(old_nested_replies, [])
diff --git a/tests/flow_tests.py b/tests/flow_tests.py
index 2c8ddb3..4b8df96 100644
--- a/tests/flow_tests.py
+++ b/tests/flow_tests.py
@@ -169,7 +169,8 @@
real_topic = Topic(self.site, 'Topic:Slbktgav46omarsd')
fake_topic = Topic(self.site, 'Topic:Abcdefgh12345678')
# Topic.from_topiclist_data
- self.assertRaises(TypeError, Topic.from_topiclist_data, self.site, '', {})
+ self.assertRaises(TypeError, Topic.from_topiclist_data, self.site,
+ '', {})
self.assertRaises(TypeError, Topic.from_topiclist_data, board, 521, {})
self.assertRaises(TypeError, Topic.from_topiclist_data, board,
'slbktgav46omarsd', [0, 1, 2])
diff --git a/tests/http_tests.py b/tests/http_tests.py
index bb3d3e0..3f8d4ad 100644
--- a/tests/http_tests.py
+++ b/tests/http_tests.py
@@ -143,9 +143,9 @@
def test_https_cert_error(self):
"""Test if http.fetch respects disable_ssl_certificate_validation."""
- self.assertRaisesRegex(pywikibot.FatalServerError, self.CERT_VERIFY_FAILED_RE,
- http.fetch,
- uri='https://testssl-expire-r2i2.disig.sk/index.en.html')
+ self.assertRaisesRegex(
+ pywikibot.FatalServerError, self.CERT_VERIFY_FAILED_RE, http.fetch,
+ uri='https://testssl-expire-r2i2.disig.sk/index.en.html')
http.session.close() # clear the connection
with warnings.catch_warnings(record=True) as warning_log:
@@ -158,9 +158,9 @@
http.session.close() # clear the connection
# Verify that it now fails again
- self.assertRaisesRegex(pywikibot.FatalServerError, self.CERT_VERIFY_FAILED_RE,
- http.fetch,
- uri='https://testssl-expire-r2i2.disig.sk/index.en.html')
+ self.assertRaisesRegex(
+ pywikibot.FatalServerError, self.CERT_VERIFY_FAILED_RE, http.fetch,
+ uri='https://testssl-expire-r2i2.disig.sk/index.en.html')
http.session.close() # clear the connection
# Verify that the warning occurred
@@ -202,10 +202,10 @@
def test_invalid_scheme(self):
"""Test invalid scheme."""
# A InvalidSchema is raised within requests
- self.assertRaisesRegex(requests.exceptions.InvalidSchema,
- 'No connection adapters were found for \'invalid://url\'',
- http.fetch,
- uri='invalid://url')
+ self.assertRaisesRegex(
+ requests.exceptions.InvalidSchema,
+ "No connection adapters were found for 'invalid://url'",
+ http.fetch, uri='invalid://url')
def test_follow_redirects(self):
"""Test follow 301 redirects correctly."""
@@ -256,7 +256,7 @@
self.assertEqual("'", http.user_agent_username("'"))
self.assertEqual('foo_bar', http.user_agent_username('foo bar'))
- self.assertEqual('%E2%81%82', http.user_agent_username(u'⁂'))
+ self.assertEqual('%E2%81%82', http.user_agent_username('⁂'))
def test_version(self):
"""Test http.user_agent {version}."""
@@ -266,8 +266,9 @@
http.user_agent(format_string='version does not appear')
self.assertIsNone(pywikibot.version.cache)
pywikibot.version.cache = {'rev': 'dummy'}
- self.assertEqual(http.user_agent(format_string='{version} does appear'),
- 'dummy does appear')
+ self.assertEqual(
+ http.user_agent(format_string='{version} does appear'),
+ 'dummy does appear')
self.assertIsNotNone(pywikibot.version.cache)
finally:
pywikibot.version.cache = old_cache
@@ -283,8 +284,9 @@
"""Set up unit test."""
super(DefaultUserAgentTestCase, self).setUp()
self.orig_format = config.user_agent_format
- config.user_agent_format = ('{script_product} ({script_comments}) {pwb} '
- '({revision}) {http_backend} {python}')
+ config.user_agent_format = ('{script_product} ({script_comments}) '
+ '{pwb} ({revision}) {http_backend} '
+ '{python}')
def tearDown(self):
"""Tear down unit test."""
@@ -308,9 +310,9 @@
"""Test the generation of fake user agents.
- If the method cannot import either browseragents or fake_useragent, the
- default user agent will be returned, causing tests to fail. Therefore tests
- will skip if neither is present.
+ If the method cannot import either browseragents or fake_useragent,
+ the default user agent will be returned, causing tests to fail.
+ Therefore tests will skip if neither is present.
"""
net = False
@@ -336,32 +338,39 @@
def setUp(self):
"""Set up the unit test."""
- self.orig_fake_user_agent_exceptions = config.fake_user_agent_exceptions
+ self.orig_fake_user_agent_exceptions = (
+ config.fake_user_agent_exceptions)
super(LiveFakeUserAgentTestCase, self).setUp()
def tearDown(self):
"""Tear down unit test."""
- config.fake_user_agent_exceptions = self.orig_fake_user_agent_exceptions
+ config.fake_user_agent_exceptions = (
+ self.orig_fake_user_agent_exceptions)
super(LiveFakeUserAgentTestCase, self).tearDown()
def _test_fetch_use_fake_user_agent(self):
"""Test `use_fake_user_agent` argument of http.fetch."""
# Existing headers
r = http.fetch(
- self.get_httpbin_url('/status/200'), headers={'user-agent': 'EXISTING'})
+ self.get_httpbin_url('/status/200'),
+ headers={'user-agent': 'EXISTING'})
self.assertEqual(r.headers['user-agent'], 'EXISTING')
# Argument value changes
- r = http.fetch(self.get_httpbin_url('/status/200'), use_fake_user_agent=True)
+ r = http.fetch(self.get_httpbin_url('/status/200'),
+ use_fake_user_agent=True)
self.assertNotEqual(r.headers['user-agent'], http.user_agent())
- r = http.fetch(self.get_httpbin_url('/status/200'), use_fake_user_agent=False)
+ r = http.fetch(self.get_httpbin_url('/status/200'),
+ use_fake_user_agent=False)
self.assertEqual(r.headers['user-agent'], http.user_agent())
r = http.fetch(
- self.get_httpbin_url('/status/200'), use_fake_user_agent='ARBITRARY')
+ self.get_httpbin_url('/status/200'),
+ use_fake_user_agent='ARBITRARY')
self.assertEqual(r.headers['user-agent'], 'ARBITRARY')
# Manually overridden domains
- config.fake_user_agent_exceptions = {self.get_httpbin_hostname(): 'OVERRIDDEN'}
+ config.fake_user_agent_exceptions = {
+ self.get_httpbin_hostname(): 'OVERRIDDEN'}
r = http.fetch(
self.get_httpbin_url('/status/200'), use_fake_user_agent=False)
self.assertEqual(r.headers['user-agent'], 'OVERRIDDEN')
@@ -396,7 +405,8 @@
def _test_fake_user_agent_randomness(self):
"""Test if user agent returns are randomized."""
config.fake_user_agent = True
- self.assertNotEqual(http.get_fake_user_agent(), http.get_fake_user_agent())
+ self.assertNotEqual(http.get_fake_user_agent(),
+ http.get_fake_user_agent())
def _test_config_settings(self):
"""Test if method honours configuration toggle."""
@@ -429,10 +439,10 @@
"""Test that HttpRequest correct handles the charsets given."""
- CODEC_CANT_DECODE_RE = 'codec can\'t decode byte'
+ CODEC_CANT_DECODE_RE = "codec can't decode byte"
net = False
- STR = u'äöü'
+ STR = 'äöü'
LATIN1_BYTES = STR.encode('latin1')
UTF8_BYTES = STR.encode('utf8')
@@ -516,8 +526,9 @@
req = CharsetTestCase._create_request()
resp = requests.Response()
req._data = resp
- resp._content = '<?xml version="1.0" encoding="UTF-8" someparam="ignored"?>'.encode(
- 'utf-8')
+ resp._content = (
+ '<?xml version="1.0" encoding="UTF-8" someparam="ignored"?>'
+ .encode('utf-8'))
resp.headers = {'content-type': 'text/xml'}
self.assertIsNone(req.charset)
self.assertEqual('UTF-8', req.encoding)
@@ -639,8 +650,8 @@
"""
Test the query string parameter of request methods.
- The /get endpoint of httpbin returns JSON that can include an 'args' key with
- urldecoded query string parameters.
+ The /get endpoint of httpbin returns JSON that can include an
+ 'args' key with urldecoded query string parameters.
"""
def setUp(self):
@@ -661,10 +672,10 @@
def test_unencoded_params(self):
"""
- Test fetch method with unencoded parameters, which should be encoded internally.
+ Test fetch method with unencoded parameters to be encoded internally.
- HTTPBin returns the args in their urldecoded form, so what we put in should be
- the same as what we get out.
+ HTTPBin returns the args in their urldecoded form, so what we put in
+ should be the same as what we get out.
"""
r = http.fetch(uri=self.url, params={'fish&chips': 'delicious'})
if r.status == 503: # T203637
@@ -677,10 +688,10 @@
def test_encoded_params(self):
"""
- Test fetch method with encoded parameters, which should be re-encoded internally.
+ Test fetch method with encoded parameters to be re-encoded internally.
- HTTPBin returns the args in their urldecoded form, so what we put in should be
- the same as what we get out.
+ HTTPBin returns the args in their urldecoded form, so what we put in
+ should be the same as what we get out.
"""
r = http.fetch(uri=self.url, params={'fish%26chips': 'delicious'})
if r.status == 503: # T203637
@@ -693,10 +704,10 @@
class DataBodyParameterTestCase(HttpbinTestCase):
- """Test that the data and body parameters of fetch/request methods are equivalent."""
+ """Test data and body params of fetch/request methods are equivalent."""
def test_fetch(self):
- """Test that using the data parameter and body parameter produce same results."""
+ """Test that using the data and body params produce same results."""
r_data = http.fetch(uri=self.get_httpbin_url('/post'), method='POST',
data={'fish&chips': 'delicious'})
r_body = http.fetch(uri=self.get_httpbin_url('/post'), method='POST',
--
To view, visit https://gerrit.wikimedia.org/r/463518
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: Id456426c1d09a095bb71cfa2cfd73c6e93ee8ce2
Gerrit-Change-Number: 463518
Gerrit-PatchSet: 2
Gerrit-Owner: D3r1ck01 <alangiderick(a)gmail.com>
Gerrit-Reviewer: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: jenkins-bot (75)
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/463447 )
Change subject: [doc] Add todo list to phabricator
......................................................................
[doc] Add todo list to phabricator
Bug: T205696
Change-Id: I0106a6f50ea320343fa3b18d2ac83a72687ea6a8
---
M scripts/flickrripper.py
1 file changed, 0 insertions(+), 7 deletions(-)
Approvals:
Dalba: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/flickrripper.py b/scripts/flickrripper.py
index 795a45c..61310db 100755
--- a/scripts/flickrripper.py
+++ b/scripts/flickrripper.py
@@ -16,13 +16,6 @@
* Filter the categories
* Upload the image
-Todo
-----
-* Check if the image is already uploaded (SHA hash)
-* Check and prevent filename collisions
- * Initial suggestion
- * User input
-* Filter the categories
"""
#
# (C) Multichill, 2009
--
To view, visit https://gerrit.wikimedia.org/r/463447
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: I0106a6f50ea320343fa3b18d2ac83a72687ea6a8
Gerrit-Change-Number: 463447
Gerrit-PatchSet: 2
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: jenkins-bot (75)