jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/463372 )
Change subject: [cleanup] cleanup tests/[page_tests.py-python_tests.py] ......................................................................
[cleanup] cleanup tests/[page_tests.py-python_tests.py]
- use single quotes for string literals - remove preleading "u" fron strings - indentation to make sure code lines are less than 79 characters - use str.format(...) instead of modulo for type specifier arguments
Change-Id: I2afc60004d9b14460ef465b09a46d61cdd719fc3 --- M tests/page_tests.py M tests/pagegenerators_tests.py M tests/paraminfo_tests.py M tests/patrolbot_tests.py M tests/plural_tests.py M tests/proofreadpage_tests.py 6 files changed, 298 insertions(+), 248 deletions(-)
Approvals: Xqt: Looks good to me, approved jenkins-bot: Verified
diff --git a/tests/page_tests.py b/tests/page_tests.py index f2f3b14..d471d66 100644 --- a/tests/page_tests.py +++ b/tests/page_tests.py @@ -65,42 +65,42 @@
cached = True
- namespaces = {0: [u""], # en.wikipedia.org namespaces for testing - 1: [u"Talk:"], # canonical form first, then others - 2: [u"User:"], # must end with : - 3: [u"User talk:", u"User_talk:"], - 4: [u"Wikipedia:", u"Project:", u"WP:"], - 5: [u"Wikipedia talk:", u"Project talk:", u"Wikipedia_talk:", - u"Project_talk:", u"WT:"], - 6: [u"File:"], - 7: [u"Image talk:", u"Image_talk:"], - 8: [u"MediaWiki:"], - 9: [u"MediaWiki talk:", u"MediaWiki_talk:"], - 10: [u"Template:"], - 11: [u"Template talk:", u"Template_talk:"], - 12: [u"Help:"], - 13: [u"Help talk:", u"Help_talk:"], - 14: [u"Category:"], - 15: [u"Category talk:", u"Category_talk:"], - 100: [u"Portal:"], - 101: [u"Portal talk:", u"Portal_talk:"], + namespaces = {0: [''], # en.wikipedia.org namespaces for testing + 1: ['Talk:'], # canonical form first, then others + 2: ['User:'], # must end with : + 3: ['User talk:', 'User_talk:'], + 4: ['Wikipedia:', 'Project:', 'WP:'], + 5: ['Wikipedia talk:', 'Project talk:', 'Wikipedia_talk:', + 'Project_talk:', 'WT:'], + 6: ['File:'], + 7: ['Image talk:', 'Image_talk:'], + 8: ['MediaWiki:'], + 9: ['MediaWiki talk:', 'MediaWiki_talk:'], + 10: ['Template:'], + 11: ['Template talk:', 'Template_talk:'], + 12: ['Help:'], + 13: ['Help talk:', 'Help_talk:'], + 14: ['Category:'], + 15: ['Category talk:', 'Category_talk:'], + 100: ['Portal:'], + 101: ['Portal talk:', 'Portal_talk:'], } titles = { # just a bunch of randomly selected titles # input format : expected output format - u"Cities in Burkina Faso": u"Cities in Burkina Faso", - u"eastern Sayan": u"Eastern Sayan", - u"The_Addams_Family_(pinball)": u"The Addams Family (pinball)", - u"Hispanic (U.S. Census)": u"Hispanic (U.S. Census)", - u"Stołpce": u"Stołpce", - u"Nowy_Sącz": u"Nowy Sącz", - u"battle of Węgierska Górka": u"Battle of Węgierska Górka", + 'Cities in Burkina Faso': 'Cities in Burkina Faso', + 'eastern Sayan': 'Eastern Sayan', + 'The_Addams_Family_(pinball)': 'The Addams Family (pinball)', + 'Hispanic (U.S. Census)': 'Hispanic (U.S. Census)', + 'Stołpce': 'Stołpce', + 'Nowy_Sącz': 'Nowy Sącz', + 'battle of Węgierska Górka': 'Battle of Węgierska Górka', } # random bunch of possible section titles - sections = [u"", - u"#Phase_2", - u"#History", - u"#later life", + sections = ['', + '#Phase_2', + '#History', + '#later life', ]
def testNamespaces(self): @@ -157,10 +157,12 @@ # wikisource:it kept Autore as canonical name l2 = pywikibot.page.Link('Autore:Albert Einstein', source=self.itws) self.assertEqual(l2.ns_title(), 'Autore:Albert Einstein') - self.assertEqual(l2.ns_title(onsite=self.enws), 'Author:Albert Einstein') + self.assertEqual(l2.ns_title(onsite=self.enws), + 'Author:Albert Einstein')
# Translation namespace does not exist on wikisource:it - l3 = pywikibot.page.Link('Translation:Albert Einstein', source=self.enws) + l3 = pywikibot.page.Link('Translation:Albert Einstein', + source=self.enws) self.assertEqual(l3.ns_title(), 'Translation:Albert Einstein') self.assertRaisesRegex(pywikibot.Error, 'No corresponding namespace found for ' @@ -186,43 +188,43 @@
family_name = (site.family.name + ':' if pywikibot.config2.family != site.family.name - else u'') - self.assertEqual(str(mainpage), u"[[%s%s:%s]]" - % (family_name, site.code, - mainpage.title())) + else '') + self.assertEqual(str(mainpage), '[[{}{}:{}]]' + .format(family_name, site.code, + mainpage.title())) self.assertLess(mainpage, maintalk)
def testHelpTitle(self): """Test title() method options in Help namespace.""" site = self.get_site() - p1 = pywikibot.Page(site, u"Help:Test page#Testing") - ns_name = u"Help" + p1 = pywikibot.Page(site, 'Help:Test page#Testing') + ns_name = 'Help' if site.namespaces[12][0] != ns_name: ns_name = site.namespaces[12][0] self.assertEqual(p1.title(), - ns_name + u":Test page#Testing") + ns_name + ':Test page#Testing') self.assertEqual(p1.title(underscore=True), - ns_name + u":Test_page#Testing") + ns_name + ':Test_page#Testing') self.assertEqual(p1.title(with_ns=False), - u"Test page#Testing") + 'Test page#Testing') self.assertEqual(p1.title(with_section=False), - ns_name + u":Test page") + ns_name + ':Test page') self.assertEqual(p1.title(with_ns=False, with_section=False), - u"Test page") + 'Test page') self.assertEqual(p1.title(as_url=True), - ns_name + "%3ATest_page%23Testing") + ns_name + '%3ATest_page%23Testing') self.assertEqual(p1.title(as_link=True, insite=site), - u"[[" + ns_name + u":Test page#Testing]]") + '[[' + ns_name + ':Test page#Testing]]') self.assertEqual( p1.title(as_link=True, force_interwiki=True, insite=site), '[[en:' + ns_name + ':Test page#Testing]]') self.assertEqual(p1.title(as_link=True, textlink=True, insite=site), p1.title(as_link=True, textlink=False, insite=site)) self.assertEqual(p1.title(as_link=True, with_ns=False, insite=site), - u"[[" + ns_name + u":Test page#Testing|Test page]]") + '[[' + ns_name + ':Test page#Testing|Test page]]') self.assertEqual(p1.title(as_link=True, force_interwiki=True, with_ns=False, insite=site), - u"[[en:" + ns_name + ":Test page#Testing|Test page]]") + '[[en:' + ns_name + ':Test page#Testing|Test page]]') self.assertEqual(p1.title(as_link=True, textlink=True, with_ns=False, insite=site), p1.title(as_link=True, textlink=False, @@ -232,39 +234,42 @@ """Test title() method options in File namespace.""" # also test a page with non-ASCII chars and a different namespace site = self.get_site() - p2 = pywikibot.Page(site, u"File:Jean-Léon Gérôme 003.jpg") - ns_name = u"File" + p2 = pywikibot.Page(site, 'File:Jean-Léon Gérôme 003.jpg') + ns_name = 'File' if site.namespaces[6][0] != ns_name: ns_name = site.namespaces[6][0] self.assertEqual(p2.title(), - u"File:Jean-Léon Gérôme 003.jpg") + 'File:Jean-Léon Gérôme 003.jpg') self.assertEqual(p2.title(underscore=True), - u"File:Jean-Léon_Gérôme_003.jpg") + 'File:Jean-Léon_Gérôme_003.jpg') self.assertEqual(p2.title(with_ns=False), - u"Jean-Léon Gérôme 003.jpg") + 'Jean-Léon Gérôme 003.jpg') self.assertEqual(p2.title(with_section=False), - u"File:Jean-Léon Gérôme 003.jpg") + 'File:Jean-Léon Gérôme 003.jpg') self.assertEqual(p2.title(with_ns=False, with_section=False), - u"Jean-Léon Gérôme 003.jpg") + 'Jean-Léon Gérôme 003.jpg') self.assertEqual(p2.title(as_url=True), - u"File%3AJean-L%C3%A9on_G%C3%A9r%C3%B4me_003.jpg") + 'File%3AJean-L%C3%A9on_G%C3%A9r%C3%B4me_003.jpg') self.assertEqual(p2.title(as_link=True, insite=site), - u"[[File:Jean-Léon Gérôme 003.jpg]]") + '[[File:Jean-Léon Gérôme 003.jpg]]') self.assertEqual( p2.title(as_link=True, force_interwiki=True, insite=site), '[[en:File:Jean-Léon Gérôme 003.jpg]]') self.assertEqual(p2.title(as_link=True, textlink=True, insite=site), - u"[[:File:Jean-Léon Gérôme 003.jpg]]") + '[[:File:Jean-Léon Gérôme 003.jpg]]') self.assertEqual(p2.title(as_filename=True), - u"File_Jean-Léon_Gérôme_003.jpg") - self.assertEqual(p2.title(as_link=True, with_ns=False, insite=site), - u"[[File:Jean-Léon Gérôme 003.jpg|Jean-Léon Gérôme 003.jpg]]") - self.assertEqual(p2.title(as_link=True, force_interwiki=True, - with_ns=False, insite=site), - u"[[en:File:Jean-Léon Gérôme 003.jpg|Jean-Léon Gérôme 003.jpg]]") - self.assertEqual(p2.title(as_link=True, textlink=True, - with_ns=False, insite=site), - u"[[:File:Jean-Léon Gérôme 003.jpg|Jean-Léon Gérôme 003.jpg]]") + 'File_Jean-Léon_Gérôme_003.jpg') + self.assertEqual( + p2.title(as_link=True, with_ns=False, insite=site), + '[[File:Jean-Léon Gérôme 003.jpg|Jean-Léon Gérôme 003.jpg]]') + self.assertEqual( + p2.title(as_link=True, force_interwiki=True, + with_ns=False, insite=site), + '[[en:File:Jean-Léon Gérôme 003.jpg|Jean-Léon Gérôme 003.jpg]]') + self.assertEqual( + p2.title(as_link=True, textlink=True, + with_ns=False, insite=site), + '[[:File:Jean-Léon Gérôme 003.jpg|Jean-Léon Gérôme 003.jpg]]')
def testImageAndDataRepository(self): """Test image_repository and data_repository page attributes.""" @@ -283,7 +288,8 @@ """Test Page.oldest_revision.""" mainpage = self.get_mainpage() self.assertEqual(mainpage.oldest_revision.user, 'TwoOneTwo') - self.assertIsInstance(mainpage.oldest_revision.timestamp, pywikibot.Timestamp) + self.assertIsInstance(mainpage.oldest_revision.timestamp, + pywikibot.Timestamp)
class TestPageObject(DefaultSiteTestCase): @@ -302,7 +308,7 @@ mainpage = self.get_mainpage() maintalk = mainpage.toggleTalkPage()
- if u':' not in mainpage.title(): + if ':' not in mainpage.title(): self.assertEqual(mainpage.namespace(), 0) self.assertEqual(maintalk.namespace(), mainpage.namespace() + 1)
@@ -318,7 +324,7 @@ # Empty string or None as title raises error. page = pywikibot.page.BasePage(site) self.assertRaisesRegex(InvalidTitle, INVALID_TITLE_RE, page.title) - page = pywikibot.page.BasePage(site, title=u'') + page = pywikibot.page.BasePage(site, title='') self.assertRaisesRegex(InvalidTitle, INVALID_TITLE_RE, page.title) self.assertRaisesRegex(ValueError, 'Title cannot be None.', pywikibot.page.BasePage, site, title=None) @@ -348,11 +354,11 @@ """Test title() method options in article namespace.""" # at last test article namespace site = self.get_site() - p2 = pywikibot.Page(site, u"Test page") + p2 = pywikibot.Page(site, 'Test page') self.assertEqual(p2.title(), - u"Test page") + 'Test page') self.assertEqual(p2.title(underscore=True), - u"Test_page") + 'Test_page') self.assertEqual(p2.title(), p2.title(with_ns=False)) self.assertEqual(p2.title(), @@ -360,7 +366,7 @@ self.assertEqual(p2.title(as_url=True), p2.title(underscore=True)) self.assertEqual(p2.title(as_link=True, insite=site), - u"[[Test page]]") + '[[Test page]]') self.assertEqual(p2.title(as_filename=True), p2.title(underscore=True)) self.assertEqual(p2.title(underscore=True), @@ -383,24 +389,24 @@ p2.title(with_ns=False, as_filename=True)) self.assertEqual(p2.title(with_ns=False, as_link=True, force_interwiki=True, insite=site), - u"[[" + site.code + u":Test page|Test page]]") + '[[' + site.code + ':Test page|Test page]]')
def testSection(self): """Test section() method.""" # use same pages as in previous test site = self.get_site() - p1 = pywikibot.Page(site, u"Help:Test page#Testing") - p2 = pywikibot.Page(site, u"File:Jean-Léon Gérôme 003.jpg") - self.assertEqual(p1.section(), u"Testing") + p1 = pywikibot.Page(site, 'Help:Test page#Testing') + p2 = pywikibot.Page(site, 'File:Jean-Léon Gérôme 003.jpg') + self.assertEqual(p1.section(), 'Testing') self.assertEqual(p2.section(), None)
def testIsTalkPage(self): """Test isTalkPage() method.""" site = self.get_site() - p1 = pywikibot.Page(site, u"First page") - p2 = pywikibot.Page(site, u"Talk:First page") - p3 = pywikibot.Page(site, u"User:Second page") - p4 = pywikibot.Page(site, u"User talk:Second page") + p1 = pywikibot.Page(site, 'First page') + p2 = pywikibot.Page(site, 'Talk:First page') + p3 = pywikibot.Page(site, 'User:Second page') + p4 = pywikibot.Page(site, 'User talk:Second page') self.assertEqual(p1.isTalkPage(), False) self.assertEqual(p2.isTalkPage(), True) self.assertEqual(p3.isTalkPage(), False) @@ -409,9 +415,9 @@ def testIsCategory(self): """Test is_categorypage method.""" site = self.get_site() - p1 = pywikibot.Page(site, u"First page") - p2 = pywikibot.Page(site, u"Category:Second page") - p3 = pywikibot.Page(site, u"Category talk:Second page") + p1 = pywikibot.Page(site, 'First page') + p2 = pywikibot.Page(site, 'Category:Second page') + p3 = pywikibot.Page(site, 'Category talk:Second page') self.assertEqual(p1.is_categorypage(), False) self.assertEqual(p2.is_categorypage(), True) self.assertEqual(p3.is_categorypage(), False) @@ -419,9 +425,9 @@ def testIsFile(self): """Test C{Page.is_filepage} check.""" site = self.get_site() - p1 = pywikibot.Page(site, u"First page") - p2 = pywikibot.Page(site, u"File:Second page") - p3 = pywikibot.Page(site, u"Image talk:Second page") + p1 = pywikibot.Page(site, 'First page') + p2 = pywikibot.Page(site, 'File:Second page') + p3 = pywikibot.Page(site, 'Image talk:Second page') self.assertEqual(p1.is_filepage(), False) self.assertEqual(p2.is_filepage(), True) self.assertEqual(p3.is_filepage(), False) @@ -455,8 +461,8 @@ mainpage = self.get_mainpage() maintalk = mainpage.toggleTalkPage() if not maintalk.exists(): - raise unittest.SkipTest("No talk page for %s's main page" - % self.get_site()) + raise unittest.SkipTest("No talk page for {}'s main page" + .format(self.get_site())) self.assertIsInstance(maintalk.get(get_redirect=True), unicode) self.assertEqual(mainpage.toggleTalkPage(), maintalk) self.assertEqual(maintalk.toggleTalkPage(), mainpage) @@ -540,7 +546,8 @@ for page in site.allpages(filterredir=True, total=1): break else: - raise unittest.SkipTest('No redirect pages on site {0!r}'.format(site)) + raise unittest.SkipTest('No redirect pages on site {0!r}' + .format(site)) # This page is already initialised self.assertTrue(hasattr(page, '_isredir')) # call api.update_page without prop=info @@ -681,27 +688,28 @@ super(TestPageRepr, self).tearDown()
def test_mainpage_type(self): - u"""Test the return type of repr(Page(<main page>)) is str.""" + """Test the return type of repr(Page(<main page>)) is str.""" mainpage = self.get_mainpage() self.assertIsInstance(repr(mainpage), str)
def test_unicode_type(self): - """Test the return type of repr(Page(u'<non-ascii>')) is str.""" - page = pywikibot.Page(self.get_site(), u'Ō') + """Test the return type of repr(Page('<non-ascii>')) is str.""" + page = pywikibot.Page(self.get_site(), 'Ō') self.assertIsInstance(repr(page), str)
@unittest.skipIf(not PY2, 'Python 2 specific test') def test_unicode_value(self): - """Test repr(Page(u'<non-ascii>')) is represented simply as utf8.""" - page = pywikibot.Page(self.get_site(), u'Ō') + """Test repr(Page('<non-ascii>')) is represented simply as utf8.""" + page = pywikibot.Page(self.get_site(), 'Ō') self.assertEqual(repr(page), b'Page(\xc5\x8c)')
@unittest.skipIf(not PY2, 'Python 2 specific test') def test_unicode_percent_r_failure(self): - """Test u'{x!r}'.format(Page(u'<non-ascii>')) raises exception on Python 2.""" + """Test '{x!r}'.format() raises exception for non-ASCII Page.""" # This raises an exception on Python 2, but passes on Python 3 - page = pywikibot.Page(self.get_site(), u'Ō') - self.assertRaisesRegex(UnicodeDecodeError, '', unicode.format, u'{0!r}', page) + page = pywikibot.Page(self.get_site(), 'Ō') + self.assertRaisesRegex(UnicodeDecodeError, '', unicode.format, + '{0!r}', page)
@unittest.skipIf(PY2, 'Python 3+ specific test') def test_unicode_value_py3(self): @@ -736,7 +744,7 @@
@unittest.skipIf(not PY2, 'Python 2 specific test') def test_incapable_encoding(self): - """Test that repr still works even if the console encoding does not.""" + """Test that repr works even if console encoding does not.""" self.assertEqual(repr(self.page), b'Page(\u014c)')
@@ -759,8 +767,8 @@ page = pywikibot.Page(site, 'not_existent_page_for_pywikibot_tests') if page.exists(): raise unittest.SkipTest( - "Page %s exists! Change page name in tests/page_tests.py" - % page.title()) + 'Page {} exists! Change page name in tests/page_tests.py' + .format(page.title()))
# Ban all compliant bots (shortcut). page.text = '{{nobots}}' @@ -770,20 +778,20 @@ # Ban all compliant bots not in the list, syntax for de wp. page.text = '{{nobots|HagermanBot,Werdnabot}}' self.assertTrue(page.botMayEdit(), - u'%s: %s but user=%s' - % (page.text, page.botMayEdit(), user)) + '{}: {} but user={}' + .format(page.text, page.botMayEdit(), user))
# Ban all compliant bots not in the list, syntax for de wp. - page.text = '{{nobots|%s, HagermanBot,Werdnabot}}' % user + page.text = '{{nobots|{}, HagermanBot,Werdnabot}}'.format(user) self.assertFalse(page.botMayEdit(), - u'%s: %s but user=%s' - % (page.text, page.botMayEdit(), user)) + '{}: {} but user={}' + .format(page.text, page.botMayEdit(), user))
# Ban all bots, syntax for de wp. page.text = '{{nobots|all}}' self.assertFalse(page.botMayEdit(), - u'%s: %s but user=%s' - % (page.text, page.botMayEdit(), user)) + '{}: {} but user={}' + .format(page.text, page.botMayEdit(), user))
# Allow all bots (shortcut). page.text = '{{bots}}' @@ -793,50 +801,50 @@ # Ban all compliant bots not in the list. page.text = '{{bots|allow=HagermanBot,Werdnabot}}' self.assertFalse(page.botMayEdit(), - u'%s: %s but user=%s' - % (page.text, page.botMayEdit(), user)) + '{}: {} but user={}' + .format(page.text, page.botMayEdit(), user))
# Ban all compliant bots in the list. page.text = '{{bots|deny=HagermanBot,Werdnabot}}' self.assertTrue(page.botMayEdit(), - u'%s: %s but user=%s' - % (page.text, page.botMayEdit(), user)) + '{}: {} but user={}' + .format(page.text, page.botMayEdit(), user))
# Ban all compliant bots not in the list. - page.text = '{{bots|allow=%s, HagermanBot}}' % user + page.text = '{{bots|allow={}, HagermanBot}}'.format(user) self.assertTrue(page.botMayEdit(), - u'%s: %s but user=%s' - % (page.text, page.botMayEdit(), user)) + '{}: {} but user={}' + .format(page.text, page.botMayEdit(), user))
# Ban all compliant bots in the list. - page.text = '{{bots|deny=%s, HagermanBot}}' % user + page.text = '{{bots|deny={}, HagermanBot}}'.format(user) self.assertFalse(page.botMayEdit(), - u'%s: %s but user=%s' - % (page.text, page.botMayEdit(), user)) + '{}: {} but user={}' + .format(page.text, page.botMayEdit(), user))
# Allow all bots. page.text = '{{bots|allow=all}}' self.assertTrue(page.botMayEdit(), - u'%s: %s but user=%s' - % (page.text, page.botMayEdit(), user)) + '{}: {} but user={}' + .format(page.text, page.botMayEdit(), user))
# Ban all compliant bots. page.text = '{{bots|allow=none}}' self.assertFalse(page.botMayEdit(), - u'%s: %s but user=%s' - % (page.text, page.botMayEdit(), user)) + '{}: {} but user={}' + .format(page.text, page.botMayEdit(), user))
# Ban all compliant bots. page.text = '{{bots|deny=all}}' self.assertFalse(page.botMayEdit(), - u'%s: %s but user=%s' - % (page.text, page.botMayEdit(), user)) + '{}: {} but user={}' + .format(page.text, page.botMayEdit(), user))
# Allow all bots. page.text = '{{bots|deny=none}}' self.assertTrue(page.botMayEdit(), - u'%s: %s but user=%s' - % (page.text, page.botMayEdit(), user)) + '{}: {} but user={}' + .format(page.text, page.botMayEdit(), user))
# Ban all users including bots. page.text = '{{in use}}' @@ -905,32 +913,34 @@ def testIsRedirect(self): """Test C{Page.isRedirectPage()} and C{Page.getRedirectTarget}.""" site = self.get_site() - p1 = pywikibot.Page(site, u'User:Legoktm/R1') - p2 = pywikibot.Page(site, u'User:Legoktm/R2') + p1 = pywikibot.Page(site, 'User:Legoktm/R1') + p2 = pywikibot.Page(site, 'User:Legoktm/R2') self.assertTrue(p1.isRedirectPage()) self.assertEqual(p1.getRedirectTarget(), p2)
def testPageGet(self): """Test C{Page.get()} on different types of pages.""" site = self.get_site() - p1 = pywikibot.Page(site, u'User:Legoktm/R2') - p2 = pywikibot.Page(site, u'User:Legoktm/R1') - p3 = pywikibot.Page(site, u'User:Legoktm/R3') + p1 = pywikibot.Page(site, 'User:Legoktm/R2') + p2 = pywikibot.Page(site, 'User:Legoktm/R1') + p3 = pywikibot.Page(site, 'User:Legoktm/R3')
- text = u'This page is used in the [[mw:Manual:Pywikipediabot]] testing suite.' + text = ('This page is used in the [[mw:Manual:Pywikipediabot]] ' + 'testing suite.') self.assertEqual(p1.get(), text) self.assertRaisesRegex(pywikibot.exceptions.IsRedirectPage, r'{0} is a redirect page.' .format(re.escape(str(p2))), p2.get) - self.assertRaisesRegex(pywikibot.exceptions.NoPage, NO_PAGE_RE, p3.get) + self.assertRaisesRegex( + pywikibot.exceptions.NoPage, NO_PAGE_RE, p3.get)
def test_set_redirect_target(self): """Test set_redirect_target method.""" # R1 redirects to R2 and R3 doesn't exist. site = self.get_site() - p1 = pywikibot.Page(site, u'User:Legoktm/R2') - p2 = pywikibot.Page(site, u'User:Legoktm/R1') - p3 = pywikibot.Page(site, u'User:Legoktm/R3') + p1 = pywikibot.Page(site, 'User:Legoktm/R2') + p2 = pywikibot.Page(site, 'User:Legoktm/R1') + p3 = pywikibot.Page(site, 'User:Legoktm/R3')
text = p2.get(get_redirect=True) self.assertRaisesRegex(pywikibot.exceptions.IsNotRedirectPage, @@ -953,7 +963,8 @@ """Test purging the mainpage.""" mainpage = self.get_mainpage() self.assertIsInstance(mainpage.purge(), bool) - self.assertEqual(mainpage.purge(), mainpage.purge(forcelinkupdate=None)) + self.assertEqual(mainpage.purge(), + mainpage.purge(forcelinkupdate=None))
def test_watch(self): """Test Page.watch, with and without unwatch enabled.""" @@ -981,7 +992,7 @@ def test_delete(self): """Test the site.delete and site.undelete method.""" site = self.get_site() - p = pywikibot.Page(site, u'User:Unicodesnowman/DeleteTest') + p = pywikibot.Page(site, 'User:Unicodesnowman/DeleteTest') # Ensure the page exists p.text = 'pywikibot unit test page' p.save('#redirect[[unit test]]', botflag=True) @@ -990,10 +1001,11 @@ p.delete(reason='pywikibot unit test', prompt=False, mark=False) self.assertEqual(p._pageid, 0) self.assertEqual(p.isRedirectPage(), False) - self.assertRaisesRegex(pywikibot.NoPage, NO_PAGE_RE, p.get, force=True) + self.assertRaisesRegex(pywikibot.NoPage, + NO_PAGE_RE, p.get, force=True) # Test undeleting last two revisions del_revs = list(p.loadDeletedRevisions()) - revid = p.getDeletedRevision(del_revs[-1])[u'revid'] + revid = p.getDeletedRevision(del_revs[-1])['revid'] p.markDeletedRevision(del_revs[-1]) p.markDeletedRevision(del_revs[-2]) self.assertRaisesRegex(ValueError, 'is not a deleted revision', @@ -1014,9 +1026,9 @@ def test_applicable_protections(self): """Test Page.applicable_protections.""" site = self.get_site() - p1 = pywikibot.Page(site, u'User:Unicodesnowman/NonexistentPage') - p2 = pywikibot.Page(site, u'User:Unicodesnowman/ProtectTest') - p3 = pywikibot.Page(site, u'File:Wiki.png') + p1 = pywikibot.Page(site, 'User:Unicodesnowman/NonexistentPage') + p2 = pywikibot.Page(site, 'User:Unicodesnowman/ProtectTest') + p3 = pywikibot.Page(site, 'File:Wiki.png')
# from the API, since 1.25wmf14 pp1 = p1.applicable_protections() @@ -1049,13 +1061,13 @@ def test_protect(self): """Test Page.protect.""" site = self.get_site() - p1 = pywikibot.Page(site, u'User:Unicodesnowman/ProtectTest') + p1 = pywikibot.Page(site, 'User:Unicodesnowman/ProtectTest')
p1.protect(protections={'edit': 'sysop', 'move': 'autoconfirmed'}, reason=u'Pywikibot unit test') self.assertEqual(p1.protection(), - {u'edit': (u'sysop', u'infinity'), - u'move': (u'autoconfirmed', u'infinity')}) + {'edit': ('sysop', 'infinity'), + 'move': ('autoconfirmed', 'infinity')})
p1.protect(protections={'edit': '', 'move': ''}, reason=u'Pywikibot unit test') @@ -1064,17 +1076,17 @@ def test_protect_alt(self): """Test of Page.protect that works around T78522.""" site = self.get_site() - p1 = pywikibot.Page(site, u'User:Unicodesnowman/ProtectTest') + p1 = pywikibot.Page(site, 'User:Unicodesnowman/ProtectTest')
p1.protect(protections={'edit': 'sysop', 'move': 'autoconfirmed'}, - reason=u'Pywikibot unit test') + reason='Pywikibot unit test') self.assertEqual(p1.protection(), - {u'edit': (u'sysop', u'infinity'), - u'move': (u'autoconfirmed', u'infinity')}) + {'edit': ('sysop', 'infinity'), + 'move': ('autoconfirmed', 'infinity')}) # workaround - p1 = pywikibot.Page(site, u'User:Unicodesnowman/ProtectTest') + p1 = pywikibot.Page(site, 'User:Unicodesnowman/ProtectTest') p1.protect(protections={'edit': '', 'move': ''}, - reason=u'Pywikibot unit test') + reason='Pywikibot unit test') self.assertEqual(p1.protection(), {})
@@ -1093,18 +1105,25 @@ """Test valid entities.""" self.assertEqual(pywikibot.page.html2unicode('A&O'), 'A&O') self.assertEqual(pywikibot.page.html2unicode('py'), 'py') - self.assertEqual(pywikibot.page.html2unicode('𐀀'), u'\U00010000') - self.assertEqual(pywikibot.page.html2unicode('p&y'), 'p&y') + self.assertEqual(pywikibot.page.html2unicode('𐀀'), + '\U00010000') + self.assertEqual(pywikibot.page.html2unicode('p&y'), + 'p&y') self.assertEqual(pywikibot.page.html2unicode('€'), '€')
def test_ignore_entities(self): """Test ignore entities.""" - self.assertEqual(pywikibot.page.html2unicode('A&O', [38]), 'A&O') - self.assertEqual(pywikibot.page.html2unicode('A&O', [38]), 'A&O') - self.assertEqual(pywikibot.page.html2unicode('A&O', [38]), 'A&O') + self.assertEqual(pywikibot.page.html2unicode('A&O', [38]), + 'A&O') + self.assertEqual(pywikibot.page.html2unicode('A&O', [38]), + 'A&O') + self.assertEqual(pywikibot.page.html2unicode('A&O', [38]), + 'A&O') self.assertEqual(pywikibot.page.html2unicode('A&O', [37]), 'A&O') - self.assertEqual(pywikibot.page.html2unicode('€', [128]), '€') - self.assertEqual(pywikibot.page.html2unicode('€', [8364]), '€') + self.assertEqual(pywikibot.page.html2unicode('€', [128]), + '€') + self.assertEqual(pywikibot.page.html2unicode('€', [8364]), + '€') self.assertEqual(pywikibot.page.html2unicode(''), '')
@@ -1114,7 +1133,8 @@
def test_invalid_entities(self): """Test texts with invalid entities.""" - self.assertEqual(pywikibot.page.html2unicode('A¬aname;O'), 'A¬aname;O') + self.assertEqual(pywikibot.page.html2unicode('A¬aname;O'), + 'A¬aname;O') self.assertEqual(pywikibot.page.html2unicode('Af;O'), 'Af;O') self.assertEqual(pywikibot.page.html2unicode('f'), 'f') self.assertEqual(pywikibot.page.html2unicode('py'), 'py') diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py index 39b8fec..63ca9b0 100755 --- a/tests/pagegenerators_tests.py +++ b/tests/pagegenerators_tests.py @@ -38,22 +38,22 @@
en_wp_page_titles = ( # just a bunch of randomly selected titles for English Wikipedia tests - u"Eastern Sayan", - u"The Addams Family (pinball)", - u"Talk:Nowy Sącz", - u"Talk:Battle of Węgierska Górka", - u"Template:!", - u"Template:Template", + 'Eastern Sayan', + 'The Addams Family (pinball)', + 'Talk:Nowy Sącz', + 'Talk:Battle of Węgierska Górka', + 'Template:!', + 'Template:Template', )
en_wp_nopage_titles = ( - u"Cities in Burkina Faso", - u"Talk:Hispanic (U.S. Census)", - u"Talk:Stołpce", - u"Template:!/Doc", - u"Template:!/Meta", - u"Template:Template/Doc", - u"Template:Template/Meta", + 'Cities in Burkina Faso', + 'Talk:Hispanic (U.S. Census)', + 'Talk:Stołpce', + 'Template:!/Doc', + 'Template:!/Meta', + 'Template:Template/Doc', + 'Template:Template/Meta', )
@@ -80,11 +80,11 @@
def test_module_import(self): """Test module import.""" - self.assertIn("pywikibot.pagegenerators", sys.modules) + self.assertIn('pywikibot.pagegenerators', sys.modules)
def test_PagesFromTitlesGenerator(self): """Test PagesFromTitlesGenerator.""" - self.assertFunction("PagesFromTitlesGenerator") + self.assertFunction('PagesFromTitlesGenerator') gen = pagegenerators.PagesFromTitlesGenerator(self.titles, self.site) self.assertPagelistTitles(gen, self.titles)
@@ -105,14 +105,13 @@ gen = pagegenerators.NamespaceFilterPageGenerator(gen, (1, 10), site) self.assertEqual(len(tuple(gen)), 10) gen = pagegenerators.PagesFromTitlesGenerator(self.titles, site) - gen = pagegenerators.NamespaceFilterPageGenerator(gen, - ('Talk', 'Template'), - site) + gen = pagegenerators.NamespaceFilterPageGenerator( + gen, ('Talk', 'Template'), site) self.assertEqual(len(tuple(gen)), 10)
def test_RegexFilterPageGenerator(self): """Test RegexFilterPageGenerator.""" - self.assertFunction("RegexFilterPageGenerator") + self.assertFunction('RegexFilterPageGenerator') gen = pagegenerators.PagesFromTitlesGenerator(self.titles, self.site) gen = pagegenerators.RegexFilterPageGenerator(gen, '/doc') self.assertPagelistTitles(gen, @@ -137,12 +136,12 @@ quantifier='all') self.assertPagelistTitles(gen, []) gen = pagegenerators.PagesFromTitlesGenerator(self.titles, self.site) - gen = pagegenerators.RegexFilterPageGenerator(gen, ['Template', '/meta'], - quantifier='all') + gen = pagegenerators.RegexFilterPageGenerator( + gen, ['Template', '/meta'], quantifier='all') self.assertPagelistTitles(gen, ('Template:Template/Meta', )) gen = pagegenerators.PagesFromTitlesGenerator(self.titles, self.site) - gen = pagegenerators.RegexFilterPageGenerator(gen, ['template', '/meta'], - quantifier='any') + gen = pagegenerators.RegexFilterPageGenerator( + gen, ['template', '/meta'], quantifier='any') self.assertPagelistTitles(gen, ('Template:Template', 'Template:!/Meta', @@ -150,33 +149,33 @@ 'Template:Template/Meta')) gen = pagegenerators.PagesFromTitlesGenerator(self.titles, site=self.site) - gen = pagegenerators.RegexFilterPageGenerator(gen, ['template', '/meta'], - quantifier='any', - ignore_namespace=False) + gen = pagegenerators.RegexFilterPageGenerator( + gen, ['template', '/meta'], quantifier='any', + ignore_namespace=False) self.assertEqual(len(tuple(gen)), 6) gen = pagegenerators.PagesFromTitlesGenerator(self.titles, site=self.site) - gen = pagegenerators.RegexFilterPageGenerator(gen, ['template', '/meta'], - quantifier='all', - ignore_namespace=False) + gen = pagegenerators.RegexFilterPageGenerator( + gen, ['template', '/meta'], quantifier='all', + ignore_namespace=False) self.assertPagelistTitles(gen, ('Template:!/Meta', 'Template:Template/Meta')) gen = pagegenerators.PagesFromTitlesGenerator(self.titles, site=self.site) - gen = pagegenerators.RegexFilterPageGenerator(gen, ['template', '/meta'], - quantifier='none', - ignore_namespace=False) + gen = pagegenerators.RegexFilterPageGenerator( + gen, ['template', '/meta'], quantifier='none', + ignore_namespace=False) self.assertEqual(len(tuple(gen)), 7)
def test_RegexBodyFilterPageGenerator(self): """Test RegexBodyFilterPageGenerator.""" - self.assertFunction("RegexBodyFilterPageGenerator") + self.assertFunction('RegexBodyFilterPageGenerator') gen = pagegenerators.PagesFromTitlesGenerator(self.titles, site=self.site) pages = [] for p in gen: - p.text = u"This is the content of %s as a sample" % p.title() + p.text = 'This is the content of {} as a sample'.format(p.title()) pages.append(p) gen = pagegenerators.RegexBodyFilterPageGenerator(iter(pages), '/doc') self.assertPagelistTitles(gen, @@ -195,7 +194,8 @@ family = 'wikisource' code = 'en'
- base_title = 'Page:06-24-1920 -The Story of the Jones County Calf Case.pdf/%s' + base_title = ('Page:06-24-1920 -The Story of the Jones County Calf' + 'Case.pdf/%s')
def setUp(self): """Setup tests.""" @@ -222,7 +222,8 @@ family = 'wikisource' code = 'en'
- base_title = 'Page:06-24-1920 -The Story of the Jones County Calf Case.pdf/%s' + base_title = ('Page:06-24-1920 -The Story of the Jones County ' + 'Calf Case.pdf/%s') category_list = ['Category:Validated']
def setUp(self): @@ -230,13 +231,15 @@ super(TestCategoryFilterPageGenerator, self).setUp() self.site = self.get_site() self.titles = [self.base_title % i for i in range(1, 11)] - self.catfilter_list = [pywikibot.Category(self.site, cat) for cat in self.category_list] + self.catfilter_list = [pywikibot.Category( + self.site, cat) for cat in self.category_list]
def test_CategoryFilterPageGenerator(self): """Test CategoryFilterPageGenerator.""" site = self.site gen = pagegenerators.PagesFromTitlesGenerator(self.titles, site) - gen = pagegenerators.CategoryFilterPageGenerator(gen, self.catfilter_list, site) + gen = pagegenerators.CategoryFilterPageGenerator( + gen, self.catfilter_list, site) self.assertEqual(len(tuple(gen)), 10)
@@ -281,9 +284,9 @@ def test_first_edit(self): """Test first edit.""" expect = ( - u'The Addams Family (pinball)', - u'Talk:Nowy Sącz', - u'Template:Template', + 'The Addams Family (pinball)', + 'Talk:Nowy Sącz', + 'Template:Template', ) gen = PagesFromTitlesGenerator(self.titles, self.site) gen = pagegenerators.EdittimeFilterPageGenerator( @@ -361,7 +364,8 @@ def test_petscan(self): """Test PetScanPageGenerator.""" site = self.get_site() - gen = pagegenerators.PetScanPageGenerator(['Pywikibot Protect Test'], True, None, site) + gen = pagegenerators.PetScanPageGenerator(['Pywikibot Protect Test'], + True, None, site) try: self.assertPagelistTitles(gen, titles=( 'User:Sn1per/ProtectTest1', 'User:Sn1per/ProtectTest2'), @@ -369,14 +373,18 @@ except ServerError as e: self.skipTest(e)
- gen = pagegenerators.PetScanPageGenerator(['Pywikibot Protect Test'], False, None, site) + gen = pagegenerators.PetScanPageGenerator(['Pywikibot Protect Test'], + False, None, site) self.assertPagelistTitles(gen, titles=('User:Sn1per/ProtectTest1', - 'User:Sn1per/ProtectTest2'), site=site) + 'User:Sn1per/ProtectTest2'), + site=site)
- gen = pagegenerators.PetScanPageGenerator(['Pywikibot PetScan Test', - 'Pywikibot Category That Needs&ToBe!Encoded', - 'Test'], True, None, site) - self.assertPagelistTitles(gen, titles=('User:Sn1per/PetScanTest1',), site=site) + gen = pagegenerators.PetScanPageGenerator( + ['Pywikibot PetScan Test', + 'Pywikibot Category That Needs&ToBe!Encoded', + 'Test'], True, None, site) + self.assertPagelistTitles(gen, titles=('User:Sn1per/PetScanTest1',), + site=site)
class TestRepeatingGenerator(RecentChangesTestCase): @@ -427,7 +435,8 @@ titles = list(pagegenerators.TextfilePageGenerator(filename, site)) self.assertEqual(len(titles), len(self.expected_titles)) expected_titles = [ - expected_title[self.title_columns[site.namespaces[page.namespace()].case]] + expected_title[self.title_columns[site.namespaces[page.namespace()] + .case]] for expected_title, page in zip(self.expected_titles, titles)] self.assertPageTitlesEqual(titles, expected_titles)
@@ -438,7 +447,8 @@ titles = list(pagegenerators.TextfilePageGenerator(filename, site)) self.assertEqual(len(titles), len(self.expected_titles)) expected_titles = [ - expected_title[self.title_columns[site.namespaces[page.namespace()].case]] + expected_title[self.title_columns[site.namespaces[page.namespace()] + .case]] for expected_title, page in zip(self.expected_titles, titles)] self.assertPageTitlesEqual(titles, expected_titles)
@@ -451,16 +461,19 @@ """Test YearPageGenerator.""" site = self.get_site() # Some languages are missing (T85681) - if (site.lang not in date.formats['YearBC']) or (site.lang not in date.formats['YearAD']): - raise unittest.SkipTest('Date formats for this language are missing from date.py') + if ((site.lang not in date.formats['YearBC']) or + (site.lang not in date.formats['YearAD'])): + raise unittest.SkipTest( + 'Date formats for this language are missing from date.py') start = -20 end = 2026
i = 0 for page in pagegenerators.YearPageGenerator(start, end, site): self.assertIsInstance(page, pywikibot.Page) - self.assertEqual(date.formatYear(site.lang, start + i), page.title()) - self.assertNotEqual(page.title(), "0") + self.assertEqual(date.formatYear(site.lang, start + i), + page.title()) + self.assertNotEqual(page.title(), '0') i += 1 if start + i == 0: i += 1 @@ -593,7 +606,8 @@ if not page.isTalkPage(): pages.extend([page.toggleTalkPage()])
- self.assertTrue(all(isinstance(page, pywikibot.Page) for page in pages_out)) + self.assertTrue(all(isinstance(page, + pywikibot.Page) for page in pages_out)) self.assertIn(mainpage, pages_out) self.assertIn(mainpage.toggleTalkPage(), pages_out) self.assertEqual(len(pages_out), 2) @@ -608,9 +622,11 @@ """Test TestPreloadingEntityGenerator with ReferringPageGenerator.""" site = self.get_site() instance_of_page = pywikibot.Page(site, 'Property:P31') - ref_gen = pagegenerators.ReferringPageGenerator(instance_of_page, total=5) + ref_gen = pagegenerators.ReferringPageGenerator(instance_of_page, + total=5) gen = pagegenerators.PreloadingEntityGenerator(ref_gen) - self.assertTrue(all(isinstance(item, pywikibot.ItemPage) for item in gen)) + self.assertTrue(all(isinstance(item, + pywikibot.ItemPage) for item in gen))
class DryFactoryGeneratorTest(TestCase): @@ -761,7 +777,7 @@ return pywikibot.Page(site, 'Q37470')
def test_valid_qualifiers(self): - """Test ItemClaimFilterPageGenerator on sample page using valid qualifiers.""" + """Test ItemClaimFilterPageGenerator using valid qualifiers.""" qualifiers = { 'P580': pywikibot.WbTime(1950, 1, 1, precision=9, site=self.get_site()), @@ -771,7 +787,7 @@ True)
def test_invalid_qualifiers(self): - """Test ItemClaimFilterPageGenerator on sample page using invalid qualifiers.""" + """Test ItemClaimFilterPageGenerator with invalid qualifiers.""" qualifiers = { 'P580': 1950, 'P582': pywikibot.WbTime(1960, 1, 1, precision=9, @@ -796,7 +812,7 @@ False)
def test_no_qualifiers(self): - """Test ItemClaimFilterPageGenerator on sample page without qualifiers.""" + """Test ItemClaimFilterPageGenerator without qualifiers.""" self._simple_claim_test('P474', '+91', None, True) self._simple_claim_test('P463', 'Q37470', None, True) self._simple_claim_test('P625', '21,77', None, True) @@ -968,7 +984,7 @@ gf = pagegenerators.GeneratorFactory(site=self.site) gf.handleArg('-ns:1') gf.handleArg('-prefixindex:a') - gf.handleArg("-limit:10") + gf.handleArg('-limit:10') gen = gf.getCombinedGenerator() self.assertIsNotNone(gen) self.assertPagesInNamespaces(gen, 1) @@ -1043,7 +1059,7 @@
# Get by pageids. gf = pagegenerators.GeneratorFactory(site=self.get_site()) - gf.handleArg('-pageid:%s' % pageids) + gf.handleArg('-pageid:{}'.format(pageids)) gen = gf.getCombinedGenerator() self.assertIsNotNone(gen) pages_from_pageid = list(gen) @@ -1175,7 +1191,8 @@ """Test generator of pages with lint errors.""" if not self.site.has_extension('Linter'): raise unittest.SkipTest( - 'The site {0} does not use Linter extension'.format(self.site)) + 'The site {0} does not use Linter extension' + .format(self.site)) gf = pagegenerators.GeneratorFactory(site=self.site) gf.handleArg('-ns:1') gf.handleArg('-limit:3') @@ -1193,7 +1210,8 @@ """Test generator of pages with lint errors.""" if not self.site.has_extension('Linter'): raise unittest.SkipTest( - 'The site {0} does not use Linter extension'.format(self.site)) + 'The site {0} does not use Linter extension' + .format(self.site)) gf = pagegenerators.GeneratorFactory(site=self.site) self.assertRaises(ValueError, gf.handleArg, '-linter:dummy')
@@ -1412,8 +1430,8 @@ """Test wrong logevents option.""" factory = pagegenerators.GeneratorFactory gf = factory() - self.assertFalse(gf.handleArg("-log")) - self.assertFalse(gf.handleArg("-log:text_here")) + self.assertFalse(gf.handleArg('-log')) + self.assertFalse(gf.handleArg('-log:text_here')) self.assertRaises(NotImplementedError, gf.handleArg, '-logevents:anyevent') # test that old format log option is not handled by any handler method. @@ -1530,8 +1548,8 @@ self.assertEqual(rcinfo['server_name'], site.hostname()) self.assertEqual(rcinfo['wiki'], site.dbName())
- for key in ["id", "type", "namespace", "title", "comment", "timestamp", - "user", "bot"]: + for key in ['id', 'type', 'namespace', 'title', 'comment', + 'timestamp', 'user', 'bot']: self.assertIn(key, rcinfo.keys())
diff --git a/tests/paraminfo_tests.py b/tests/paraminfo_tests.py index dff6d65..92e273d 100644 --- a/tests/paraminfo_tests.py +++ b/tests/paraminfo_tests.py @@ -83,7 +83,7 @@ if self.site.mw_version >= '1.24': types.append('unread')
- known = types + ['!%s' % item for item in types] + known = types + ['!{}'.format(item) for item in types]
self._check_param_subset(self.site, 'query+watchlist', 'show', known)
diff --git a/tests/patrolbot_tests.py b/tests/patrolbot_tests.py index 1f9fca1..17be8a1 100644 --- a/tests/patrolbot_tests.py +++ b/tests/patrolbot_tests.py @@ -24,7 +24,8 @@ * [[User:Test_2]]: [[Page 2]], [[Page 4]], [[Page 6]]
== Others == -* [[User:Prefixed]]: [[Special:PrefixIndex/Page 1]], [[Special:PREFIXINDEX/Page 2]] +* [[User:Prefixed]]: [[Special:PrefixIndex/Page 1]], + [[Special:PREFIXINDEX/Page 2]]
== More test 1 == * [[User:Test_1]]: [[Page 3]] @@ -57,13 +58,16 @@ """Test the method which returns whether a page is in the list.""" # Return the title if there is an exact match self.assertEqual(self.bot.in_list(['Foo', 'Foobar'], 'Foo'), 'Foo') - self.assertEqual(self.bot.in_list(['Foo', 'Foobar'], 'Foobar'), 'Foobar') + self.assertEqual(self.bot.in_list(['Foo', 'Foobar'], 'Foobar'), + 'Foobar')
# Return the first entry which starts with the title if there is no # exact match self.assertEqual(self.bot.in_list(['Foo', 'Foobar'], 'Foob'), 'Foo') - self.assertEqual(self.bot.in_list(['Foo', 'Foobar'], 'Foobarz'), 'Foo') - self.assertEqual(self.bot.in_list(['Foo', 'Foobar', 'Bar'], 'Barz'), 'Bar') + self.assertEqual(self.bot.in_list(['Foo', 'Foobar'], 'Foobarz'), + 'Foo') + self.assertEqual(self.bot.in_list(['Foo', 'Foobar', 'Bar'], 'Barz'), + 'Bar')
# '' returns .* if there is no exact match self.assertEqual(self.bot.in_list([''], 'Foo'), '.*') diff --git a/tests/plural_tests.py b/tests/plural_tests.py index 8ec053d..f89ca8c 100644 --- a/tests/plural_tests.py +++ b/tests/plural_tests.py @@ -36,10 +36,11 @@ index = rule['plural'](num) self.assertLess(index, rule['nplurals'], msg='Plural for {0} created an index {1} ' - '(greater than {2})'.format(num, index, - rule['nplurals'])) + '(greater than {2})' + .format(num, index, rule['nplurals'])) num_plurals.add(index) - self.assertCountEqual(num_plurals, list(range(rule['nplurals']))) + self.assertCountEqual(num_plurals, + list(range(rule['nplurals'])))
# Don't already fail on creation if callable(rule.get('plural')): diff --git a/tests/proofreadpage_tests.py b/tests/proofreadpage_tests.py index ff8fe3b..0f69c73 100644 --- a/tests/proofreadpage_tests.py +++ b/tests/proofreadpage_tests.py @@ -55,7 +55,8 @@ self._test_return_datatypes()
-class TestLoadRevisionsCachingProofreadPage(BasePageLoadRevisionsCachingTestBase): +class TestLoadRevisionsCachingProofreadPage( + BasePageLoadRevisionsCachingTestBase):
"""Test site.loadrevisions() caching."""
@@ -143,11 +144,12 @@ 'index': 'Index:Popular Science Monthly Volume 1.djvu', 'ql': 4, 'user': 'T. Mazzei', - 'header': u"{{rh|2|''THE POPULAR SCIENCE MONTHLY.''}}", - 'footer': u'\n{{smallrefs}}', - 'url_image': ('https://upload.wikimedia.org/wikipedia/commons/thumb/a/ac/' - 'Popular_Science_Monthly_Volume_1.djvu/' - 'page12-1024px-Popular_Science_Monthly_Volume_1.djvu.jpg'), + 'header': "{{rh|2|''THE POPULAR SCIENCE MONTHLY.''}}", + 'footer': '\n{{smallrefs}}', + 'url_image': ('https://upload.wikimedia.org/wikipedia/commons/' + 'thumb/a/ac/Popular_Science_Monthly_Volume_1.djvu/' + 'page12-1024px-Popular_Science_Monthly_Volume_1.djvu' + '.jpg'), }
valid_redlink = { @@ -200,7 +202,7 @@ self.assertEqual(page.title(), fixed_source.title())
def test_invalid_not_existing_page_source_wrong_ns(self): - """Test ProofreadPage from Page not existing in non-Page ns as source.""" + """Test ProofreadPage from Page not existing in non-Page ns.""" source = pywikibot.Page(self.site, self.not_existing_invalid['title1']) self.assertRaises(ValueError, ProofreadPage, source) @@ -242,7 +244,8 @@ # Fetch page text to instantiate page._full_header, in order to allow # for proper test result preparation. page.text - class_pagetext, div = self.class_pagetext_fmt[page._full_header._has_div] + class_pagetext, div = self.class_pagetext_fmt[ + page._full_header._has_div] self.assertEqual(page.text, self.fmt.format(user=self.site.username(), class_pagetext=class_pagetext, @@ -253,7 +256,8 @@ """Test ProofreadPage page decomposing/composing text.""" page = ProofreadPage(self.site, 'Page:dummy test page') page.text = '' - class_pagetext, div = self.class_pagetext_fmt[page._full_header._has_div] + class_pagetext, div = self.class_pagetext_fmt[ + page._full_header._has_div] self.assertEqual(page.text, self.fmt.format(user=self.site.username(), class_pagetext=class_pagetext, @@ -361,8 +365,10 @@
# Page without Index. page = ProofreadPage(self.site, self.existing_multilinked['title']) - index_page_1 = IndexPage(self.site, self.existing_multilinked['index_1']) - index_page_2 = IndexPage(self.site, self.existing_multilinked['index_2']) + index_page_1 = IndexPage(self.site, + self.existing_multilinked['index_1']) + index_page_2 = IndexPage(self.site, + self.existing_multilinked['index_2']) self.assertEqual(page.index, index_page_1) self.assertNotEqual(page.index, index_page_2) self.assertEqual(page._index, (index_page_1, [index_page_2])) @@ -610,7 +616,8 @@ page_set)
# Error if label does not exists. - self.assertRaises(KeyError, index_page.get_page_from_label, 'dummy label') + self.assertRaises(KeyError, index_page.get_page_from_label, + 'dummy label')
# Test get_page. for n in num_set:
pywikibot-commits@lists.wikimedia.org