jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/463612 )
Change subject: [cleanup] cleanup tests/[add_text_tests.py-bot_tests.py] ......................................................................
[cleanup] cleanup tests/[add_text_tests.py-bot_tests.py]
- use single quotes for string literals - remove preleading "u" fron strings - indentation to make sure code lines are less than 79 characters - use str.format(...) instead of modulo for type specifier arguments
Change-Id: I1de075cfae930ade2eb6131137528d99a68b6a55 --- M tests/api_tests.py M tests/archivebot_tests.py M tests/aspects.py M tests/bot_tests.py 4 files changed, 166 insertions(+), 143 deletions(-)
Approvals: Dalba: Looks good to me, approved jenkins-bot: Verified
diff --git a/tests/api_tests.py b/tests/api_tests.py index 5aa5f49..76d5fc3 100644 --- a/tests/api_tests.py +++ b/tests/api_tests.py @@ -155,15 +155,15 @@ 'bar': 'test'}) self.assertTrue(req) self.assertEqual(req.site, mysite) - self.assertIn("foo", req._params) - self.assertEqual(req["bar"], ["test"]) + self.assertIn('foo', req._params) + self.assertEqual(req['bar'], ['test']) # test item assignment - req["one"] = "1" - self.assertEqual(req._params['one'], ["1"]) + req['one'] = '1' + self.assertEqual(req._params['one'], ['1']) # test compliance with dict interface - # req.keys() should contain "action", "foo", "bar", "one" + # req.keys() should contain 'action', 'foo', 'bar', 'one' self.assertEqual(len(req.keys()), 4) - self.assertIn("test", req._encoded_items().values()) + self.assertIn('test', req._encoded_items().values()) for item in req.items(): self.assertEqual(len(item), 2, item)
@@ -483,8 +483,8 @@ site = self.get_site() if site.mw_version < '1.25wmf4': raise unittest.SkipTest( - "version %s doesn't support the new paraminfo api" - % site.mw_version) + "version {} doesn't support the new paraminfo api" + .format(site.mw_version)) pi = api.ParamInfo(site, modules_only_mode=True) pi.fetch(['info']) self.assertIn('query+info', pi._paraminfo) @@ -553,7 +553,8 @@ """Test OptionSet with initialised site.""" options = api.OptionSet(self.get_site(), 'recentchanges', 'show') self.assertRaises(KeyError, options.__setitem__, 'invalid_name', True) - self.assertRaises(ValueError, options.__setitem__, 'anon', 'invalid_value') + self.assertRaises(ValueError, options.__setitem__, + 'anon', 'invalid_value') options['anon'] = True self.assertCountEqual(['anon'], options._enabled) self.assertEqual(set(), options._disabled) @@ -602,7 +603,8 @@ self.assertCountEqual(['a', 'b'], list(options.keys())) self.assertCountEqual([True, False], list(options.values())) self.assertEqual(set(), set(options.values()) - {True, False}) - self.assertCountEqual([('a', True), ('b', False)], list(options.items())) + self.assertCountEqual([('a', True), ('b', False)], + list(options.items()))
class TestDryPageGenerator(TestCase): @@ -616,35 +618,36 @@
# api.py sorts 'pages' using the string key, which is not a # numeric comparison. - titles = ("Broadcaster (definition)", "Wiktionary", "Broadcaster.com", - "Wikipedia:Disambiguation") + titles = ('Broadcaster (definition)', 'Wiktionary', 'Broadcaster.com', + 'Wikipedia:Disambiguation')
def setUp(self): """Set up test case.""" super(TestDryPageGenerator, self).setUp() mysite = self.get_site() self.gen = api.PageGenerator(site=mysite, - generator="links", + generator='links', parameters={'titles': "User:R'n'B"}) # following test data is copied from an actual api.php response, # but that query no longer matches this dataset. # http://en.wikipedia.org/w/api.php?action=query&generator=links&title... self.gen.request.submit = types.MethodType(lambda self: { - "query": {"pages": {"296589": {"pageid": 296589, - "ns": 0, - "title": "Broadcaster.com" + 'query': {'pages': {'296589': {'pageid': 296589, + 'ns': 0, + 'title': 'Broadcaster.com' }, - "13918157": {"pageid": 13918157, - "ns": 0, - "title": "Broadcaster (definition)" + '13918157': {'pageid': 13918157, + 'ns': 0, + 'title': 'Broadcaster ' + '(definition)' }, - "156658": {"pageid": 156658, - "ns": 0, - "title": "Wiktionary" + '156658': {'pageid': 156658, + 'ns': 0, + 'title': 'Wiktionary' }, - "47757": {"pageid": 47757, - "ns": 4, - "title": "Wikipedia:Disambiguation" + '47757': {'pageid': 47757, + 'ns': 4, + 'title': 'Wikipedia:Disambiguation' } } } @@ -654,7 +657,8 @@ # Add custom_name for this site namespace, to match the live site. if 'Wikipedia' not in self.site.namespaces: self.site.namespaces[4].custom_name = 'Wikipedia' - self.site.namespaces._namespace_names['wikipedia'] = self.site.namespaces[4] + self.site.namespaces._namespace_names['wikipedia'] = ( + self.site.namespaces[4])
def test_results(self): """Test that PageGenerator yields pages with expected attributes.""" @@ -724,7 +728,7 @@ titles = [l.title(with_section=False) for l in links] gen = api.PropertyGenerator(site=self.site, - prop="info", + prop='info', parameters={'titles': '|'.join(titles)})
count = 0 @@ -742,7 +746,7 @@ titles = [l.title(with_section=False) for l in links] gen = api.PropertyGenerator(site=self.site, - prop="revisions", + prop='revisions', parameters={'titles': '|'.join(titles)}) gen.set_maximum_items(-1) # suppress use of "rvlimit" parameter
@@ -762,7 +766,7 @@ titles = [l.title(with_section=False) for l in links] gen = api.PropertyGenerator(site=self.site, - prop="revisions|coordinates", + prop='revisions|coordinates', parameters={'titles': '|'.join(titles)}) gen.set_maximum_items(-1) # suppress use of "rvlimit" parameter
@@ -867,7 +871,7 @@ 'namespace': {'multi': True} } mysite._paraminfo.query_modules_with_limits = {'allpages'} - self.gen = api.ListGenerator(listaction="allpages", site=mysite) + self.gen = api.ListGenerator(listaction='allpages', site=mysite)
def test_namespace_none(self): """Test ListGenerator set_namespace with None.""" @@ -1159,7 +1163,7 @@ for info, time in patterns.items(): lag = api.lagpattern.search(info) self.assertIsNotNone(lag) - self.assertEqual(int(lag.group("lag")), time) + self.assertEqual(int(lag.group('lag')), time)
if __name__ == '__main__': # pragma: no cover diff --git a/tests/archivebot_tests.py b/tests/archivebot_tests.py index a8d9d3d..66243e6 100644 --- a/tests/archivebot_tests.py +++ b/tests/archivebot_tests.py @@ -93,11 +93,13 @@ self.assertEqual(archivebot.str2time('7d'), archivebot.str2time('1w')) self.assertEqual(archivebot.str2time('3y'), timedelta(1096)) self.assertEqual(archivebot.str2time('3y', date), timedelta(1095)) - self.assertRaises(archivebot.MalformedConfigError, archivebot.str2time, '4000@') - self.assertRaises(archivebot.MalformedConfigError, archivebot.str2time, '$1') + self.assertRaises(archivebot.MalformedConfigError, archivebot.str2time, + '4000@') + self.assertRaises(archivebot.MalformedConfigError, archivebot.str2time, + '$1')
def test_checkstr(self): - """Test for extracting key and duration from shorthand notation of durations.""" + """Test for extracting key and duration from shorthand notation.""" self.assertEqual(archivebot.checkstr('400s'), ('s', '400')) with suppress_warnings('Time period without qualifier', UserWarning): self.assertEqual(archivebot.checkstr('3000'), ('s', '3000')) @@ -143,8 +145,8 @@ self.assertIsInstance(talk.threads, list) self.assertGreaterEqual( len(talk.threads), THREADS[code], - u'%d Threads found on %s,\n%d or more expected' - % (len(talk.threads), talk, THREADS[code])) + '{} Threads found on {},\n{} or more expected' + .format(len(talk.threads), talk, THREADS[code]))
for thread in talk.threads: self.assertIsInstance(thread, archivebot.DiscussionThread) @@ -160,7 +162,8 @@ self.assertIsInstance(thread.timestamp, datetime) except AssertionError: if thread.code not in self.expected_failures: - pywikibot.output('code %s: %s' % (thread.code, thread.content)) + pywikibot.output('code {}: {}' + .format(thread.code, thread.content)) raise
expected_failures = ['ar', 'eo', 'pdc', 'th'] @@ -207,8 +210,9 @@ self.assertIsInstance(talk.threads, list) self.assertGreaterEqual( len(talk.threads), THREADS_WITH_UPDATED_FORMAT[code], - u'%d Threads found on %s,\n%d or more expected' - % (len(talk.threads), talk, THREADS_WITH_UPDATED_FORMAT[code])) + '{} Threads found on {},\n{} or more expected' + .format(len(talk.threads), talk, + THREADS_WITH_UPDATED_FORMAT[code]))
for thread in talk.threads: self.assertIsInstance(thread, archivebot.DiscussionThread) @@ -224,7 +228,8 @@ self.assertIsInstance(thread.timestamp, datetime) except AssertionError: if thread.code not in self.expected_failures: - pywikibot.output('code %s: %s' % (thread.code, thread.content)) + pywikibot.output('code {}: {}' + .format(thread.code, thread.content)) raise
expected_failures = [] diff --git a/tests/aspects.py b/tests/aspects.py index 352f576..0237885 100644 --- a/tests/aspects.py +++ b/tests/aspects.py @@ -46,7 +46,8 @@
try: import pytest_httpbin - optional_pytest_httpbin_cls_decorator = pytest_httpbin.use_class_based_httpbin + optional_pytest_httpbin_cls_decorator = ( + pytest_httpbin.use_class_based_httpbin) except ImportError: pytest_httpbin = None
@@ -138,7 +139,7 @@ namespaces = {namespaces}
self.assertIn(page.namespace(), namespaces, - "%s not in namespace %r" % (page, namespaces)) + '{} not in namespace {!r}'.format(page, namespaces))
def _get_gen_pages(self, gen, count=None, site=None): """ @@ -224,8 +225,9 @@ page_namespaces = [page.namespace() for page in gen]
if skip and set(page_namespaces) != namespaces: - raise unittest.SkipTest('Pages in namespaces %r not found.' - % list(namespaces - set(page_namespaces))) + raise unittest.SkipTest('Pages in namespaces {!r} not found.' + .format(list(namespaces - + set(page_namespaces)))) else: self.assertEqual(set(page_namespaces), namespaces)
@@ -285,7 +287,7 @@ @type callable_obj: callable @param args: The positional arguments forwarded to the callable object. @param kwargs: The keyword arguments forwared to the callable object. - @return: The context manager if callable_obj is None and None otherwise. + @return: Context manager if callable_obj is None and None otherwise. @rtype: None or context manager """ msg = kwargs.pop('msg', None) @@ -312,7 +314,7 @@ duration = self.test_completed - self.test_start
if duration > self.test_duration_warning_interval: - unittest_print(' %0.3fs' % duration, end=' ') + unittest_print(' {0:.3f}s'.format(duration), end=' ') sys.stdout.flush()
super(TestTimerMixin, self).tearDown() @@ -360,8 +362,8 @@ def setUp(self): """Set up test.""" self.old_Site_lookup_method = pywikibot.Site - pywikibot.Site = lambda *args: self.fail('%s: Site() not permitted' - % self.__class__.__name__) + pywikibot.Site = lambda *args: self.fail( + '{}: Site() not permitted'.format(self.__class__.__name__))
super(DisableSiteMixin, self).setUp()
@@ -400,8 +402,8 @@ def __init__(self, code, fam=None, user=None, sysop=None): """Initializer.""" raise pywikibot.SiteDefinitionError( - 'Loading site %s:%s during dry test not permitted' - % (fam, code)) + 'Loading site {}:{} during dry test not permitted' + .format(fam, code))
class DisconnectedSiteMixin(TestCaseBase): @@ -456,9 +458,11 @@ self.cache_hits = tests.cache_hits - self.cache_hits_start
if self.cache_misses: - unittest_print(' %d cache misses' % self.cache_misses, end=' ') + unittest_print(' {} cache misses' + .format(self.cache_misses), end=' ') if self.cache_hits: - unittest_print(' %d cache hits' % self.cache_hits, end=' ') + unittest_print(' {} cache hits' + .format(self.cache_hits), end=' ')
if self.cache_misses or self.cache_hits: sys.stdout.flush() @@ -500,19 +504,19 @@
for key, data in cls.sites.items(): if 'hostname' not in data: - raise Exception('%s: hostname not defined for %s' - % (cls.__name__, key)) + raise Exception('{}: hostname not defined for {}' + .format(cls.__name__, key)) hostname = data['hostname']
if hostname in cls._checked_hostnames: if isinstance(cls._checked_hostnames[hostname], Exception): raise unittest.SkipTest( - '%s: hostname %s failed (cached): %s' - % (cls.__name__, hostname, - cls._checked_hostnames[hostname])) + '{}: hostname {} failed (cached): {}' + .format(cls.__name__, hostname, + cls._checked_hostnames[hostname])) elif cls._checked_hostnames[hostname] is False: - raise unittest.SkipTest('%s: hostname %s failed (cached)' - % (cls.__name__, hostname)) + raise unittest.SkipTest('{}: hostname {} failed (cached)' + .format(cls.__name__, hostname)) else: continue
@@ -527,18 +531,18 @@ e = r.exception else: if r.status not in [200, 301, 302, 303, 307, 308]: - raise ServerError('HTTP status: %d' % r.status) + raise ServerError('HTTP status: {}'.format(r.status)) except Exception as e2: - pywikibot.error('%s: accessing %s caused exception:' - % (cls.__name__, hostname)) + pywikibot.error('{}: accessing {} caused exception:' + .format(cls.__name__, hostname)) pywikibot.exception(e2, tb=True) e = e2
if e: cls._checked_hostnames[hostname] = e raise unittest.SkipTest( - '%s: hostname %s failed: %s' - % (cls.__name__, hostname, e)) + '{}: hostname {} failed: {}' + .format(cls.__name__, hostname, e))
cls._checked_hostnames[hostname] = True
@@ -569,9 +573,9 @@ """ if issubclass(cls, ForceCacheMixin): raise Exception( - '%s can not be a subclass of both ' + '{} can not be a subclass of both ' 'SiteWriteMixin and ForceCacheMixin' - % cls.__name__) + .format(cls.__name__))
super(SiteWriteMixin, cls).setUpClass()
@@ -584,17 +588,17 @@
if os.environ.get(env_var, '0') != '1': raise unittest.SkipTest( - '%r write tests disabled. ' - 'Set %s=1 to enable.' - % (cls.__name__, env_var)) + '{!r} write tests disabled. ' + 'Set {}=1 to enable.' + .format(cls.__name__, env_var))
if (not hasattr(site.family, 'test_codes') or site.code not in site.family.test_codes): raise Exception( - '%s should only be run on test sites. ' - 'To run this test, add '%s' to the %s family ' - 'attribute 'test_codes'.' - % (cls.__name__, site.code, site.family.name)) + '{} should only be run on test sites. ' + "To run this test, add '{}' to the {} family " + "attribute 'test_codes'." + .format(cls.__name__, site.code, site.family.name))
class RequireUserMixin(TestCaseBase): @@ -608,10 +612,10 @@ """Check the user config has a valid login to the site.""" if not cls.has_site_user(family, code, sysop=sysop): raise unittest.SkipTest( - '%s: No %susername for %s:%s' - % (cls.__name__, - "sysop " if sysop else "", - family, code)) + '{}: No {}username for {}:{}' + .format(cls.__name__, + 'sysop ' if sysop else '', + family, code))
@classmethod def setUpClass(cls): @@ -638,10 +642,10 @@
if not site['site'].user(): raise unittest.SkipTest( - '%s: Not able to login to %s as %s' - % (cls.__name__, - 'sysop' if sysop else 'bot', - site['site'])) + '{}: Not able to login to {} as {}' + .format(cls.__name__, + 'sysop' if sysop else 'bot', + site['site']))
def setUp(self): """ @@ -731,7 +735,8 @@ for base in bases: base_tests += [attr_name for attr_name, attr in base.__dict__.items() - if attr_name.startswith('test') and callable(attr)] + if (attr_name.startswith('test') and + callable(attr))]
dct['abstract_class'] = not tests and not base_tests
@@ -799,9 +804,9 @@ if 'pwb' in dct and dct['pwb']: if 'site' not in dct: raise Exception( - '%s: Test classes using pwb must set "site"; add ' + '{}: Test classes using pwb must set "site"; add ' 'site=False if the test script will not use a site' - % name) + .format(name))
# If the 'site' attribute is a false value, # remove it so it matches !site in nose. @@ -811,8 +816,8 @@ # If there isn't a site, require declaration of net activity. if 'net' not in dct: raise Exception( - '%s: Test classes without a site configured must set "net"' - % name) + '{}: Test classes without a site configured must set "net"' + .format(name))
# If the 'net' attribute is a false value, # remove it so it matches !net in nose. @@ -845,7 +850,8 @@ dct['user'] = True bases = cls.add_base(bases, SiteWriteMixin)
- if ('user' in dct and dct['user']) or ('sysop' in dct and dct['sysop']): + if (('user' in dct and dct['user']) or + ('sysop' in dct and dct['sysop'])): bases = cls.add_base(bases, RequireUserMixin)
for test in tests: @@ -863,9 +869,9 @@ # a multi-site test method only accepts 'self' and the site-key if test_func.__code__.co_argcount != 2: raise Exception( - '%s: Test method %s must accept either 1 or 2 arguments; ' - ' %d found' - % (name, test, test_func.__code__.co_argcount)) + '{}: Test method {} must accept either 1 or 2 arguments; ' + ' {} found' + .format(name, test, test_func.__code__.co_argcount))
# create test methods processed by unittest for (key, sitedata) in dct['sites'].items(): @@ -941,15 +947,16 @@ if ('code' in data and data['code'] in ('test', 'mediawiki') and 'PYWIKIBOT_TEST_PROD_ONLY' in os.environ and not dry): raise unittest.SkipTest( - 'Site code "%s" and PYWIKIBOT_TEST_PROD_ONLY is set.' - % data['code']) + 'Site code "{}" and PYWIKIBOT_TEST_PROD_ONLY is set.' + .format(data['code']))
if 'site' not in data and 'code' in data and 'family' in data: data['site'] = Site(data['code'], data['family'], interface=interface) if 'hostname' not in data and 'site' in data: try: - data['hostname'] = data['site'].base_url(data['site'].path()) + data['hostname'] = ( + data['site'].base_url(data['site'].path())) except KeyError: # The family has defined this as obsolete # without a mapping to a hostname. @@ -980,12 +987,12 @@ name = next(iter(cls.sites.keys())) else: raise Exception( - '"%s.get_site(name=None)" called with multiple sites' - % cls.__name__) + '"{}.get_site(name=None)" called with multiple sites' + .format(cls.__name__))
if name and name not in cls.sites: - raise Exception('"%s" not declared in %s' - % (name, cls.__name__)) + raise Exception('"{}" not declared in {}' + .format(name, cls.__name__))
if isinstance(cls.site, BaseSite): assert cls.sites[name]['site'] == cls.site @@ -997,9 +1004,10 @@ def has_site_user(cls, family, code, sysop=False): """Check the user config has a user for the site.""" if not family: - raise Exception('no family defined for %s' % cls.__name__) + raise Exception('no family defined for {}'.format(cls.__name__)) if not code: - raise Exception('no site code defined for %s' % cls.__name__) + raise Exception('no site code defined for {}' + .format(cls.__name__))
usernames = config.sysopnames if sysop else config.usernames
@@ -1054,9 +1062,9 @@ if not site: site = self.get_site() page = pywikibot.Page(pywikibot.page.Link( - "There is no page with this title", site)) + 'There is no page with this title', site)) if page.exists(): - raise unittest.SkipTest("Did not find a page that does not exist.") + raise unittest.SkipTest('Did not find a page that does not exist.')
return page
@@ -1066,18 +1074,18 @@ """ Capture assertion calls to do additional calls around them.
- All assertions done which start with "assert" are patched in such a way that - after the assertion it calls C{process_assertion} with the assertion and the - arguments. + All assertions done which start with "assert" are patched in such a way + that after the assertion it calls C{process_assertion} with the assertion + and the arguments.
To avoid that it patches the assertion it's possible to put the call in an C{disable_assert_capture} with-statement.
"""
- # Is True while an assertion is running, so that assertions won't be patched - # when they are executed while an assertion is running and only the outer - # most assertion gets actually patched. + # Is True while an assertion is running, so that assertions won't be + # patched when they are executed while an assertion is running and only + # the outer most assertion gets actually patched. _patched = False
@contextmanager @@ -1112,7 +1120,8 @@ try: context = self.process_assert(assertion, *args, **kwargs) if hasattr(context, '__enter__'): - return self._delay_assertion(context, assertion, args, kwargs) + return self._delay_assertion(context, assertion, args, + kwargs) else: self.after_assert(assertion, *args, **kwargs) return context @@ -1304,14 +1313,14 @@ site = data['site'] if not site.has_data_repository: raise unittest.SkipTest( - u'%s: %r does not have data repository' - % (cls.__name__, site)) + '{}: {!r} does not have data repository' + .format(cls.__name__, site))
if (hasattr(cls, 'repo') and cls.repo != site.data_repository()): raise Exception( - '%s: sites do not all have the same data repository' - % cls.__name__) + '{}: sites do not all have the same data repository' + .format(cls.__name__))
cls.repo = site.data_repository()
@@ -1348,8 +1357,8 @@ for site in cls.sites.values(): if not site['site'].has_data_repository: raise unittest.SkipTest( - '%s: %r does not have data repository' - % (cls.__name__, site['site'])) + '{}: {!r} does not have data repository' + .format(cls.__name__, site['site']))
class DefaultWikibaseClientTestCase(WikibaseClientTestCase, @@ -1385,8 +1394,8 @@
if str(cls.get_repo()) != 'wikidata:wikidata': raise unittest.SkipTest( - u'%s: %s is not connected to Wikidata.' - % (cls.__name__, cls.get_site())) + '{}: {} is not connected to Wikidata.' + .format(cls.__name__, cls.get_site()))
class ScriptMainTestCase(ScenarioDefinedDefaultSiteTestCase): @@ -1473,7 +1482,8 @@ """Set up test class.""" if not __debug__: raise unittest.SkipTest( - '%s is disabled when __debug__ is disabled.' % cls.__name__) + '{} is disabled when __debug__ is disabled.' + .format(cls.__name__)) super(DebugOnlyTestCase, cls).setUpClass()
@@ -1506,7 +1516,7 @@ self.warning_log = []
self.expect_warning_filename = inspect.getfile(self.__class__) - if self.expect_warning_filename.endswith((".pyc", ".pyo")): + if self.expect_warning_filename.endswith(('.pyc', '.pyo')): self.expect_warning_filename = self.expect_warning_filename[:-1]
self._do_test_warning_filename = True @@ -1593,7 +1603,8 @@ if self._do_test_warning_filename: self.assertDeprecationFile(self.expect_warning_filename)
- def assertOneDeprecationParts(self, deprecated=None, instead=None, count=1): + def assertOneDeprecationParts(self, deprecated=None, instead=None, + count=1): """ Assert that exactly one deprecation message happened and reset.
@@ -1633,15 +1644,15 @@
if item.filename != filename: self.fail( - 'expected warning filename %s; warning item: %s' - % (filename, item)) + 'expected warning filename {}; warning item: {}' + .format(filename, item))
def setUp(self): """Set up unit test.""" super(DeprecationTestCase, self).setUp()
self.warning_log = self.context_manager.__enter__() - warnings.simplefilter("always") + warnings.simplefilter('always')
self._reset_messages()
@@ -1677,7 +1688,7 @@ class HttpbinTestCase(TestCase):
""" - Custom test case class, which allows doing dry httpbin tests using pytest-httpbin. + Custom test case class, which allows dry httpbin tests with pytest-httpbin.
Test cases, which use httpbin, need to inherit this class. """ diff --git a/tests/bot_tests.py b/tests/bot_tests.py index 6d47d22..d98c58e 100644 --- a/tests/bot_tests.py +++ b/tests/bot_tests.py @@ -28,8 +28,8 @@ def setUpClass(cls): """Verify that the translations are available.""" if not i18n.messages_available(): - raise unittest.SkipTest("i18n messages package '%s' not available." - % i18n._messages_package_name) + raise unittest.SkipTest("i18n messages package '{}' not available." + .format(i18n._messages_package_name)) super(TWNBotTestCase, cls).setUpClass()
@@ -45,17 +45,17 @@ attribute bot is defined. It also sets the bot's 'always' option to True to avoid user interaction.
- The C{bot_save} method compares the save counter before the call and asserts - that it has increased by one after the call. It also stores locally in - C{save_called} if C{page_save} has been called. If C{bot_save} or - C{page_save} are implemented they should call super's method at some point - to make sure these assertions work. At C{tearDown} it checks that the pages - are saved often enough. The attribute C{default_assert_saves} defines the - number of saves which must happen and compares it to the difference using - the save counter. It is possible to define C{assert_saves} after C{setUp} to - overwrite the default value for certain tests. By default the number of - saves it asserts are 1. Additionally C{save_called} increases by 1 on each - call of C{page_save} and should be equal to C{assert_saves}. + The C{bot_save} method compares the save counter before the call and + asserts that it has increased by one after the call. It also stores + locally in C{save_called} if C{page_save} has been called. If C{bot_save} + or C{page_save} are implemented they should call super's method at some + point to make sure these assertions work. At C{tearDown} it checks that + the pages are saved often enough. The attribute C{default_assert_saves} + defines the number of saves which must happen and compares it to the + difference using the save counter. It is possible to define C{assert_saves} + after C{setUp} to overwrite the default value for certain tests. By default + the number of saves it asserts are 1. Additionally C{save_called} increases + by 1 on each call of C{page_save} and should be equal to C{assert_saves}.
This means if the bot class actually does other writes, like using L{pywikibot.page.Page.save} manually, it'll still write. @@ -115,10 +115,11 @@
It uses pages as an iterator and compares the page given to the page returned by pages iterator. It checks that the bot's _site and site - attributes are set to the page's site. If _treat_site is set with a Site - it compares it to that one too. + attributes are set to the page's site. If _treat_site is set with a + Site it compares it to that one too.
- Afterwards it calls post_treat so it's possible to do additional checks. + Afterwards it calls post_treat so it's possible to do additional + checks. """ def treat(page): self.assertEqual(page, next(self._page_iter)) @@ -178,7 +179,7 @@
"""Tests for the BaseBot subclasses."""
- CANT_SET_ATTRIBUTE_RE = 'can't set attribute' + CANT_SET_ATTRIBUTE_RE = "can't set attribute" NOT_IN_TREAT_RE = 'Requesting the site not while in treat is not allowed.' dry = True
@@ -228,7 +229,8 @@ # Assert no specific site self._treat_site = False self.bot = pywikibot.bot.MultipleSitesBot(generator=self._generator()) - with self.assertRaisesRegex(AttributeError, self.CANT_SET_ATTRIBUTE_RE): + with self.assertRaisesRegex(AttributeError, + self.CANT_SET_ATTRIBUTE_RE): self.bot.site = self.de with self.assertRaisesRegex(ValueError, self.NOT_IN_TREAT_RE): self.bot.site @@ -346,8 +348,9 @@
def test_CreatingPageBot(self): """Test CreatingPageBot class.""" - # This doesn't verify much (e.g. it could yield the first existing page) - # but the assertion in post_treat should verify that the page is valid + # This doesn't verify much (e.g. it could yield the first existing + # page) but the assertion in post_treat should verify that the page + # is valid def treat_generator(): """Yield just one current page (the last one).""" yield self._current_page
pywikibot-commits@lists.wikimedia.org