jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/626847 )
Change subject: [4.0] Remove Python 2 code in casechecker.py
......................................................................
[4.0] Remove Python 2 code in casechecker.py
Also make some code improvements
Change-Id: If0806454255d75b153eada209f40a7963fd0bc99
---
M scripts/casechecker.py
1 file changed, 76 insertions(+), 74 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/casechecker.py b/scripts/casechecker.py
index f744990..548e222 100755
--- a/scripts/casechecker.py
+++ b/scripts/casechecker.py
@@ -6,29 +6,25 @@
#
# Distributed under the terms of the MIT license.
#
-from __future__ import absolute_import, division, unicode_literals
-
import codecs
-from itertools import chain, combinations
import os
import re
-from string import ascii_letters
import sys
+from itertools import chain, combinations
+from string import ascii_letters
+
import pywikibot
from pywikibot import i18n
from pywikibot.data import api
-from pywikibot.tools import first_lower, first_upper, formatter, PY2
+from pywikibot.tools import first_lower, first_upper, formatter
from scripts.category import CategoryMoveRobot as CategoryMoveBot
-if PY2:
- from future_builtins import zip
-
-class CaseChecker(object):
+class CaseChecker:
"""Case checker."""
@@ -90,7 +86,7 @@
def __init__(self):
"""Initializer with arg parsing."""
for arg in pywikibot.handle_args():
- arg, sep, value = arg.partition(':')
+ arg, _, value = arg.partition(':')
if arg == '-from':
self.apfrom = value or pywikibot.input(
'Which page to start from: ')
@@ -124,14 +120,14 @@
pywikibot.showHelp()
sys.exit()
- if self.namespaces == [] and not self.doFailed:
- if self.apfrom == '':
+ if not self.namespaces and not self.doFailed:
+ if not self.apfrom:
# 0 should be after templates ns
self.namespaces = [14, 10, 12, 0]
else:
self.namespaces = [0]
- if self.aplimit is None:
+ if not self.aplimit:
self.aplimit = 200 if self.links else 'max'
if not self.doFailed:
@@ -141,7 +137,7 @@
'gapfilterredir': self.filterredir}
else:
self.queryParams = {'action': 'query'}
- if self.apfrom != '':
+ if self.apfrom:
pywikibot.output('Argument "-from" is ignored with "-failed"')
propParam = 'info'
@@ -156,11 +152,13 @@
if len(self.localSuspects) != len(self.latinSuspects):
raise ValueError('Suspects must be the same size')
+
if len(self.localKeyboard) != len(self.latinKeyboard):
raise ValueError('Keyboard info must be the same size')
if not os.path.isabs(self.wikilogfile):
self.wikilogfile = pywikibot.config.datafilepath(self.wikilogfile)
+
self.wikilog = self.OpenLogFile(self.wikilogfile)
if not os.path.isabs(self.failedTitles):
@@ -173,17 +171,15 @@
self.failedTitles += '.failed'
iterzip = zip(self.localSuspects, self.latinSuspects)
- self.lclToLatDict = {
- ord(local): latin for local, latin in iterzip}
- self.latToLclDict = {
- ord(latin): local for local, latin in iterzip}
+ self.lclToLatDict = {ord(local): latin for local, latin in iterzip}
+ self.latToLclDict = {ord(latin): local for local, latin in iterzip}
if self.localKeyboard is not None:
iterzip = zip(self.localKeyboard, self.latinKeyboard)
- self.lclToLatKeybDict = {
- ord(local): latin for local, latin in iterzip}
- self.latToLclKeybDict = {
- ord(latin): local for local, latin in iterzip}
+ self.lclToLatKeybDict = {ord(local): latin
+ for local, latin in iterzip}
+ self.latToLclKeybDict = {ord(latin): local
+ for local, latin in iterzip}
else:
self.lclToLatKeybDict = {}
self.latToLclKeybDict = {}
@@ -218,22 +214,21 @@
pageid = data['query']['pageids'][0]
links = data['query']['pages'][pageid]['links']
- allWords = [nn for n in links
- for nn in self.FindBadWords(n['title'])]
+ self.knownWords = {nn for n in links
+ for nn in self.FindBadWords(n['title'])}
- self.knownWords = set(allWords)
else:
raise ValueError('The number of pageids is not 1')
- pywikibot.output('Loaded whitelist with %i items'
- % len(self.knownWords))
- if len(self.knownWords) > 0:
+ pywikibot.output('Loaded whitelist with {} items'
+ .format(len(self.knownWords)))
+ if self.knownWords:
pywikibot.log('Whitelist: '
+ ', '.join(self.MakeLink(i, False)
for i in self.knownWords))
else:
- pywikibot.output('Whitelist is not known for language %s'
- % self.site.code)
+ pywikibot.output(
+ 'Whitelist is not known for language ' + self.site.code)
def RunQuery(self, params):
"""API query."""
@@ -268,7 +263,6 @@
else:
raise ValueError('Unexpected query-continue values: {}'
.format(qc))
- continue
def Run(self):
"""Run the bot."""
@@ -291,13 +285,9 @@
for data in self.RunQuery(self.queryParams):
self.ProcessDataBlock(data)
except Exception:
- pywikibot.output('Exception at Title = %s, Next = %s'
- % (self.currentTitle, self.apfrom))
- try:
- import traceback
- pywikibot.output(traceback.format_exc())
- except Exception:
- pywikibot.output('Unable to print exception info')
+ pywikibot.output('Exception at Title = {}, Next = {}'
+ .format(self.currentTitle, self.apfrom))
+ pywikibot.exception()
raise
def ProcessDataBlock(self, data):
@@ -310,13 +300,16 @@
printed = False
title = page['title']
self.currentTitle = title
+
if 'missing' in page:
continue
+
if firstItem:
if self.lastLetter != title[0]:
- pywikibot.ui.output('Processing %s\n' % title)
+ pywikibot.output('Processing {}\n'.format(title))
self.lastLetter = title[0]
firstItem = False
+
if self.titles:
err = self.ProcessTitle(title)
if err:
@@ -344,14 +337,16 @@
follow_redirects=False):
if p.namespace() == 2:
continue
+
oldText = p.text
newText = self.ReplaceLink(oldText, title,
newTitle)
if not self.PutNewPage(
- p, newText, [
- self.MakeMoveSummary(title,
- newTitle)]):
+ p, newText,
+ [self.MakeMoveSummary(title,
+ newTitle)]):
replErrors = True
+
if not replErrors:
editSummary = i18n.twtranslate(
self.site,
@@ -384,7 +379,7 @@
changed = True
if not changed:
- if len(err[1]) > 0:
+ if err[1]:
self.AppendLineToLog(self.failedTitles, title)
else:
self.AddNoSuggestionTitle(title)
@@ -393,14 +388,11 @@
printed = True
if self.links:
- allLinks = None
+ allLinks = []
if 'links' in page:
- allLinks = page['links']
+ allLinks += page['links']
if 'categories' in page:
- if allLinks:
- allLinks = allLinks + page['categories']
- else:
- allLinks = page['categories']
+ allLinks += page['categories']
if allLinks:
pageObj = None
@@ -412,7 +404,7 @@
ltxt = link['title']
err = self.ProcessTitle(ltxt)
if err:
- if len(err[1]) > 0:
+ if err[1]:
foundSuggestions = True
elif self.AddNoSuggestionTitle(ltxt):
continue
@@ -447,7 +439,8 @@
if foundSuggestions:
self.AppendLineToLog(self.failedTitles, title)
- if self.stopAfter > 0:
+
+ if self.stopAfter:
self.stopAfter -= 1
if self.stopAfter == 0:
raise ValueError('Stopping because we are done')
@@ -466,14 +459,15 @@
def ProcessTitle(self, title):
"""Process title."""
badWords = list(self.FindBadWords(title))
- if len(badWords) > 0:
+ if badWords:
# Allow known words, allow any roman numerals with local suffixes
badWords = {i for i in badWords
if i not in self.knownWords
and self.romanNumSfxPtrn.match(i) is not None}
- if len(badWords) == 0 or self.Page(title).is_filepage():
- return
+ if not badWords or self.Page(title).is_filepage():
+ return None
+
count = 0
ambigBadWords = set()
ambigBadWordsCount = 0
@@ -509,12 +503,13 @@
ambigBadWords.add(badWord)
# Cannot do len(ambigBadWords) because they might be duplicates
ambigBadWordsCount += 1
+
if not mightBeLcl and not mightBeLat:
# try to match one of the knownWords
bwLen = len(badWord)
kw = [w for w in self.knownWords if len(w) == bwLen]
for p in range(bwLen):
- if len(kw) == 0:
+ if not kw:
break
c = badWord[p]
co = ord(c)
@@ -539,18 +534,17 @@
if len(mapLcl) + len(mapLat) - ambigBadWordsCount < count:
# We cannot auto-translate - offer a list of suggested words
suggestions = list(mapLcl.values()) + list(mapLat.values())
- if len(suggestions) > 0:
+ if suggestions:
infoText += ', word suggestions: ' + ', '.join(
self.ColorCodeWord(t) for t in suggestions)
else:
infoText += ', no suggestions'
else:
-
# Replace all unambiguous bad words
for k, v in dict(chain(mapLat.items(), mapLcl.items())).items():
if k not in ambigBadWords:
title = title.replace(k, v)
- if len(ambigBadWords) == 0:
+ if not ambigBadWords:
# There are no ambiguity, we can safelly convert
possibleAlternatives.append(title)
infoText += ', convert to ' + self.MakeLink(title)
@@ -570,7 +564,7 @@
title2 = title2.replace(bw, mapLat[bw])
possibleAlternatives.append(title2)
- if len(possibleAlternatives) > 0:
+ if possibleAlternatives:
infoText += ', can be converted to ' + ', '.join(
self.MakeLink(t) for t in possibleAlternatives)
else:
@@ -579,8 +573,9 @@
def PickTarget(self, title, original, candidates):
"""Pick target from candidates."""
- if len(candidates) == 0:
- return
+ if not candidates:
+ return None
+
if len(candidates) == 1:
return candidates[0]
@@ -596,11 +591,14 @@
pagesRedir[newTitle] = dst.getRedirectTarget().title()
else:
pagesExist.append(newTitle)
+
if len(pagesExist) == 1:
return pagesExist[0]
- elif len(pagesExist) == 0 and len(pagesRedir) > 0:
+
+ if not pagesExist and pagesRedir:
if len(pagesRedir) == 1:
return list(pagesRedir.keys())[0]
+
t = None
for v in pagesRedir.values():
if not t:
@@ -613,8 +611,9 @@
return list(pagesRedir.keys())[0]
if not self.autonomous:
- pywikibot.output('Could not auto-decide for page %s. Which link '
- 'should be chosen?' % self.MakeLink(title, False))
+ pywikibot.output('Could not auto-decide for page {}. Which link '
+ 'should be chosen?'
+ .format(self.MakeLink(title, False)))
pywikibot.output('Original title: ', newline=False)
self.ColorCodeWord(original + '\n', True)
for count, t in enumerate(candidates, 1):
@@ -645,6 +644,7 @@
res += self.lclClrFnt
else:
res += self.latClrFnt
+
for letter in word:
if letter in self.localLtr:
if not lastIsCyr:
@@ -655,6 +655,7 @@
res += self.suffixClr + self.latClrFnt
lastIsCyr = False
res += letter
+
return res + self.suffixClr + '</b>'
def _ColorCodeWordScreen(self, word):
@@ -664,6 +665,7 @@
res += self.colorFormatLocalColor
else:
res += self.colorFormatLatinColor
+
for letter in word:
if letter in self.localLtr:
if not lastIsCyr:
@@ -674,14 +676,15 @@
res += self.colorFormatLatinColor
lastIsCyr = False
res += letter
+
return formatter.color_format(res + self.colorFormatSuffix)
def AddNoSuggestionTitle(self, title):
"""Add backlinks to log."""
if title in self.seenUnresolvedLinks:
return True
- self.seenUnresolvedLinks.add(title)
+ self.seenUnresolvedLinks.add(title)
params = {
'action': 'query',
'list': 'backlinks',
@@ -698,10 +701,10 @@
cl = len(bl)
redirs = len([i for i in bl if 'redirect' in i])
- if cl > 0 and 'query-continue' in data:
+ if cl and 'query-continue' in data:
count = '50+'
else:
- count = str(cl if cl > 0 else 'no backlinks')
+ count = str(cl or 'no backlinks')
self.AppendLineToLog(self.nosuggestions, '* {} ({}{})'
.format(self.MakeLink(title), count,
@@ -714,8 +717,8 @@
title = pageObj.title(as_link=True, textlink=True)
coloredMsg = ', '.join(self.ColorCodeWord(m) for m in msg)
if pageObj.text == pageTxt:
- self.WikiLog('* Error: Text replacement failed in %s (%s)'
- % (self.MakeLink(title, False), coloredMsg))
+ self.WikiLog('* Error: Text replacement failed in {} ({})'
+ .format(self.MakeLink(title, False), coloredMsg))
else:
pywikibot.output('Case Replacements: {}'.format(', '.join(msg)))
pageObj.text = pageTxt
@@ -727,11 +730,9 @@
self.site.mediawiki_message(
'comma-separator').join(msg)))
return True
- except KeyboardInterrupt:
- raise
except (pywikibot.LockedPage, pywikibot.PageNotSaved):
- self.WikiLog('* Error: Could not save updated page %s (%s)'
- % (self.MakeLink(title, False), coloredMsg))
+ self.WikiLog('* Error: Could not save updated page {} ({})'
+ .format(self.MakeLink(title, False), coloredMsg))
return False
def MakeMoveSummary(self, fromTitle, toTitle):
@@ -744,7 +745,7 @@
prf = '' if self.Page(title).namespace() == 0 else ':'
cc = '|««« {} »»»'.format(
self.ColorCodeWord(title) if colorcode else '')
- return '[[%s%s%s]]' % (prf, title, cc)
+ return '[[{}{}{}]]'.format(prf, title, cc)
def OpenLogFile(self, filename):
"""Open logfile."""
@@ -771,6 +772,7 @@
if len(frmParts) != len(toParts):
raise ValueError('Splitting parts do not match counts')
+
for i, part in enumerate(frmParts):
if part != len(toParts[i]):
raise ValueError('Splitting parts do not match word length')
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/626847
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: If0806454255d75b153eada209f40a7963fd0bc99
Gerrit-Change-Number: 626847
Gerrit-PatchSet: 3
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: Matěj Suchánek <matejsuchanek97(a)gmail.com>
Gerrit-Reviewer: Withoutaname <drevitchi(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/632368 )
Change subject: [IMPR] Give a more informative assert message in package_versions
......................................................................
[IMPR] Give a more informative assert message in package_versions
Change-Id: Ibdde29f519c97fffe5d75909f334585a8ff6f727
---
M pywikibot/bot.py
M pywikibot/version.py
2 files changed, 4 insertions(+), 3 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/bot.py b/pywikibot/bot.py
index 96ea8c4..f03ed48 100644
--- a/pywikibot/bot.py
+++ b/pywikibot/bot.py
@@ -387,9 +387,8 @@
if config.verbose_output:
check_package_list += all_modules
- packages = version.package_versions(check_package_list)
-
log('PACKAGES:')
+ packages = version.package_versions(check_package_list)
for name in sorted(packages.keys()):
info = packages[name]
info.setdefault('path',
diff --git a/pywikibot/version.py b/pywikibot/version.py
index 6e5557c..45a77d0 100644
--- a/pywikibot/version.py
+++ b/pywikibot/version.py
@@ -530,7 +530,9 @@
path = path[0:path.index('__init__.py')]
info['path'] = path
- assert path not in paths, 'Path of the package is in defined paths'
+ assert path not in paths, \
+ 'Path {} of the package {} is in defined paths as {}' \
+ .format(path, name, paths[path])
paths[path] = name
if '__version__' in package.__dict__:
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/632368
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ibdde29f519c97fffe5d75909f334585a8ff6f727
Gerrit-Change-Number: 632368
Gerrit-PatchSet: 3
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Hazard-SJ <hazardsjwiki(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/630595 )
Change subject: [cleanup] remove desupported setAction method
......................................................................
[cleanup] remove desupported setAction method
Change-Id: I324551feeb5c78e99504b7b88cad6939f858c279
---
M pywikibot/__init__.py
M scripts/checkimages.py
M tests/dry_site_tests.py
3 files changed, 31 insertions(+), 50 deletions(-)
Approvals:
Hazard-SJ: Looks good to me, but someone else must approve
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/__init__.py b/pywikibot/__init__.py
index df21568..a988141 100644
--- a/pywikibot/__init__.py
+++ b/pywikibot/__init__.py
@@ -57,9 +57,7 @@
from pywikibot.site import BaseSite
import pywikibot.textlib as textlib
from pywikibot.tools import (
- # __ to avoid conflict with ModuleDeprecationWrapper._deprecated
classproperty,
- deprecated as __deprecated,
deprecate_arg as _deprecate_arg,
issue_deprecation_warning,
normalize_username,
@@ -1274,13 +1272,6 @@
link_regex = re.compile(r'\[\[(?P<title>[^\]|[<>{}]*)(\|.*?)?\]\]')
-@__deprecated('comment parameter for page saving method', since='20140604',
- future_warning=True)
-def setAction(s):
- """Set a summary to use for changed page submissions."""
- config.default_edit_summary = s
-
-
def showDiff(oldtext, newtext, context=0):
"""
Output a string showing the differences between oldtext and newtext.
diff --git a/scripts/checkimages.py b/scripts/checkimages.py
index 1d612fa..84a8adb 100755
--- a/scripts/checkimages.py
+++ b/scripts/checkimages.py
@@ -92,6 +92,7 @@
import pywikibot
from pywikibot.bot import suggest_help
+from pywikibot import config2 as config
from pywikibot.exceptions import NotEmailableError
from pywikibot.family import Family
from pywikibot import i18n
@@ -591,6 +592,7 @@
except pywikibot.NoPage:
pywikibot.output(self.imageName + ' has been deleted...')
return False
+
# You can use this function also to find only the user that
# has upload the image (FixME: Rewrite a bit this part)
if put:
@@ -603,6 +605,7 @@
except pywikibot.LockedPage:
pywikibot.output('File is locked. Skipping.')
return False
+
# paginetta it's the image page object.
try:
if reportPageObject == self.image and self.uploader:
@@ -617,6 +620,7 @@
self.report_image(self.image_to_report, self.rep_page, self.com,
repme)
return False
+
upBots = i18n.translate(self.site, uploadBots)
user = pywikibot.User(self.site, nick)
luser = user.title(as_url=True)
@@ -641,6 +645,7 @@
else:
self.notification2 = self.notification
second_text = False
+
# Getting the talk page's history, to check if there is another
# advise...
try:
@@ -669,6 +674,7 @@
pywikibot.output('The user page is blank')
second_text = False
testoattuale = i18n.translate(self.site, empty)
+
if self.commTalk:
commentox = self.commTalk
else:
@@ -677,8 +683,8 @@
if second_text:
newText = '{}\n\n{}'.format(testoattuale, self.notification2)
else:
- newText = '{0}\n\n== {1} ==\n{2}'.format(testoattuale, self.head,
- self.notification)
+ newText = '{}\n\n== {} ==\n{}'.format(testoattuale, self.head,
+ self.notification)
# Check maximum number of notifications for this talk page
if (self.num_notify is not None
@@ -1446,11 +1452,13 @@
pywikibot.output("Skipping {} because it's a redirect."
.format(self.imageName))
return
+
# Delete the fields where the templates cannot be loaded
regex_nowiki = re.compile(r'<nowiki>(.*?)</nowiki>', re.DOTALL)
regex_pre = re.compile(r'<pre>(.*?)</pre>', re.DOTALL)
self.imageCheckText = regex_nowiki.sub('', self.imageCheckText)
self.imageCheckText = regex_pre.sub('', self.imageCheckText)
+
# Deleting the useless template from the description (before adding
# sth in the image the original text will be reloaded, don't worry).
if self.isTagged():
@@ -1462,31 +1470,39 @@
if a_word in self.imageCheckText:
# There's a template, probably a license
brackets = True
+
# Is the extension allowed? (is it an image or f.e. a .xls file?)
if allowed_formats and extension.lower() not in allowed_formats:
delete = True
+
(license_found, hiddenTemplateFound) = self.smartDetection()
+
# Here begins the check block.
if brackets and license_found:
return
- elif delete:
+
+ if delete:
pywikibot.output('{} is not a file!'.format(self.imageName))
if not di:
pywikibot.output('No localized message given for '
"'delete_immediately'. Skipping.")
return
+
# Some formatting for delete immediately template
dels = dels % {'adding': di}
di = '\n' + di
+
# Modify summary text
- pywikibot.setAction(dels)
+ config.default_edit_summary = dels
+
canctext = di % extension
notification = din % {'file': self.image.title(as_link=True,
textlink=True)}
head = dih
self.report(canctext, self.imageName, notification, head)
return
- elif not self.imageCheckText.strip(): # empty image description
+
+ if not self.imageCheckText.strip(): # empty image description
pywikibot.output(
"The file's description for {} does not contain a license "
' template!'.format(self.imageName))
@@ -1498,17 +1514,15 @@
self.report(self.unvertext, self.imageName, notification, head,
smwl)
return
- else:
- pywikibot.output('{} has only text and not the specific '
- 'license...'.format(self.imageName))
- if hiddenTemplateFound and HiddenTN:
- notification = HiddenTN % self.imageName
- elif nn:
- notification = nn % self.imageName
- head = nh
- self.report(self.unvertext, self.imageName, notification, head,
- smwl)
- return
+
+ pywikibot.output('{} has only text and not the specific '
+ 'license...'.format(self.imageName))
+ if hiddenTemplateFound and HiddenTN:
+ notification = HiddenTN % self.imageName
+ elif nn:
+ notification = nn % self.imageName
+ head = nh
+ self.report(self.unvertext, self.imageName, notification, head, smwl)
def main(*args) -> bool:
diff --git a/tests/dry_site_tests.py b/tests/dry_site_tests.py
index dd3434a..2a71321 100644
--- a/tests/dry_site_tests.py
+++ b/tests/dry_site_tests.py
@@ -9,7 +9,7 @@
from pywikibot.comms.http import user_agent
-from tests.aspects import unittest, DefaultDrySiteTestCase, DeprecationTestCase
+from tests.aspects import unittest, DefaultDrySiteTestCase
class TestDrySite(DefaultDrySiteTestCase):
@@ -107,29 +107,5 @@
format_string='Foo ({script_comments})'))
-class TestSetAction(DeprecationTestCase):
-
- """Test the deprecated setAction function."""
-
- net = False
-
- def setUp(self):
- """Backup the original configuration."""
- super().setUp()
- self._old_config = pywikibot.config.default_edit_summary
-
- def tearDown(self):
- """Restore the original configuration."""
- pywikibot.config.default_edit_summary = self._old_config
- super().tearDown()
-
- def test_set_action(self):
- """Test deprecated setAction function."""
- pywikibot.setAction('{0}X{0}'.format(self._old_config))
- self.assertOneDeprecation(self.INSTEAD)
- self.assertEqual(pywikibot.config.default_edit_summary,
- '{0}X{0}'.format(self._old_config))
-
-
if __name__ == '__main__': # pragma: no cover
unittest.main()
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/630595
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I324551feeb5c78e99504b7b88cad6939f858c279
Gerrit-Change-Number: 630595
Gerrit-PatchSet: 6
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: Hazard-SJ <hazardsjwiki(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/516527 )
Change subject: [IMPR] use namedtuple as result of textlib.extract_sections
......................................................................
[IMPR] use namedtuple as result of textlib.extract_sections
Modify tests accordingly
Change-Id: Ie3e3e3f1891365178ad0b93fe1dfb2f8b1a0f0f4
---
M pywikibot/textlib.py
M tests/textlib_tests.py
2 files changed, 69 insertions(+), 60 deletions(-)
Approvals:
Hazard-SJ: Looks good to me, but someone else must approve
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index 972c290..e2a177c 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -18,7 +18,7 @@
from collections import OrderedDict, namedtuple
from contextlib import suppress
from html.parser import HTMLParser
-from typing import Optional, Union
+from typing import List, NamedTuple, Optional, Tuple, Union
import pywikibot
from pywikibot.exceptions import InvalidTitle, SiteDefinitionError
@@ -834,6 +834,7 @@
# -------------------------------
_Heading = namedtuple('_Heading', ('text', 'start', 'end'))
_Section = namedtuple('_Section', ('title', 'content'))
+_Content = namedtuple('_Content', ('header', 'sections', 'footer'))
def _extract_headings(text: str, site) -> list:
@@ -864,16 +865,21 @@
return []
-def extract_sections(text: str, site=None) -> tuple:
+def extract_sections(
+ text: str, site=None
+) -> NamedTuple('_Content', [('header', str), # noqa: F821
+ ('body', List[Tuple[str, str]]), # noqa: F821
+ ('footer', str)]): # noqa: F821
"""
Return section headings and contents found in text.
- @return: The returned tuple contains the text parsed into three
- parts: The first part is a string containing header part above
- the first heading. The last part is also a string containing
- footer part after the last section. The middle part is a list
- of tuples, each tuple containing a string with section heading
- and a string with section content. Example article::
+ @return: The returned namedtuple contains the text parsed into
+ header, contents and footer parts: The header part is a string
+ containing text part above the first heading. The footer part
+ is also a string containing text part after the last section.
+ The section part is a list of tuples, each tuple containing a
+ string with section heading and a string with section content.
+ Example article::
'''A''' is a thing.
@@ -885,15 +891,14 @@
[[Category:Things starting with A]]
- ...is parsed into the following tuple::
+ ...is parsed into the following namedtuple::
- (header, body, footer)
- header = "'''A''' is a thing."
- body = [('== History of A ==', 'Some history...'),
- ('== Usage of A ==', 'Some usage...')]
- footer = '[[Category:Things starting with A]]'
+ result = extract_sections(text, site)
+ result.header = "'''A''' is a thing."
+ result.body = [('== History of A ==', 'Some history...'),
+ ('== Usage of A ==', 'Some usage...')]
+ result.footer = '[[Category:Things starting with A]]'
- @rtype: tuple of (str, list of tuples, str)
"""
headings = _extract_headings(text, site)
sections = _extract_sections(text, headings)
@@ -912,7 +917,7 @@
sections[-1].title, last_section_content[:-len(footer)])
else:
header = header[:-len(footer)]
- return header, sections, footer
+ return _Content(header, sections, footer)
# -----------------------------------------------
diff --git a/tests/textlib_tests.py b/tests/textlib_tests.py
index b647ab6..d6db787 100644
--- a/tests/textlib_tests.py
+++ b/tests/textlib_tests.py
@@ -1628,71 +1628,77 @@
"""Test the extract_sections function."""
+ def _extract_sections_tests(self, result, header, sections, footer):
+ """Test extract_sections function."""
+ self.assertIsInstance(result, tuple)
+ self.assertIsInstance(result.sections, list)
+ self.assertEqual(result, (header, sections, footer))
+ self.assertEqual(result.header, header)
+ self.assertEqual(result.sections, sections)
+ self.assertEqual(result.footer, footer)
+ if result.sections:
+ for section in sections:
+ self.assertIsInstance(section, tuple)
+ self.assertLength(section, 2)
+
def test_no_sections_no_footer(self):
"""Test for text having no sections or footer."""
- self.assertEqual(
- extract_sections('text', self.site),
- ('text', [], '')
- )
+ text = 'text'
+ result = extract_sections(text, self.site)
+ self._extract_sections_tests(result, text, [], '')
def test_no_sections_with_footer(self):
"""Test for text having footer but no section."""
- self.assertEqual(
- extract_sections('text\n\n[[Category:A]]', self.site),
- ('text\n\n', [], '[[Category:A]]')
- )
+ text = 'text\n\n[[Category:A]]'
+ result = extract_sections(text, self.site)
+ self._extract_sections_tests(result, 'text\n\n', [], '[[Category:A]]')
def test_with_section_no_footer(self):
"""Test for text having sections but no footer."""
- self.assertEqual(
- extract_sections(
- 'text\n\n'
+ text = ('text\n\n'
'==title==\n'
- 'content',
- self.site),
- ('text\n\n', [('==title==', '\ncontent')], '')
- )
+ 'content')
+ result = extract_sections(text, self.site)
+ self._extract_sections_tests(
+ result, 'text\n\n', [('==title==', '\ncontent')], '')
def test_with_section_with_footer(self):
"""Test for text having sections and footer."""
- self.assertEqual(
- extract_sections(
- 'text\n\n'
+ text = ('text\n\n'
'==title==\n'
'content\n'
- '[[Category:A]]\n',
- self.site),
- ('text\n\n', [('==title==', '\ncontent\n')], '[[Category:A]]\n')
- )
+ '[[Category:A]]\n')
+ result = extract_sections(text, self.site)
+ self._extract_sections_tests(
+ result,
+ 'text\n\n', [('==title==', '\ncontent\n')], '[[Category:A]]\n')
def test_with_h1_and_h2_sections(self):
"""Test for text having h1 and h2 sections."""
- self.assertEqual(
- extract_sections(
- 'text\n\n'
+ text = ('text\n\n'
'=first level=\n'
'foo\n'
'==title==\n'
- 'bar',
- self.site),
- ('text\n\n',
- [('=first level=', '\nfoo\n'), ('==title==', '\nbar')],
- '')
- )
+ 'bar')
+ result = extract_sections(text, self.site)
+ self._extract_sections_tests(
+ result,
+ 'text\n\n',
+ [('=first level=', '\nfoo\n'), ('==title==', '\nbar')],
+ '')
def test_with_h4_and_h2_sections(self):
"""Test for text having h4 and h2 sections."""
- self.assertEqual(
- extract_sections(
- 'text\n\n'
+ text = ('text\n\n'
'====title====\n'
'==title 2==\n'
- 'content',
- self.site),
- ('text\n\n',
- [('====title====', '\n'), ('==title 2==', '\ncontent')],
- '')
- )
+ 'content')
+ result = extract_sections(text, self.site)
+ self._extract_sections_tests(
+ result,
+ 'text\n\n',
+ [('====title====', '\n'), ('==title 2==', '\ncontent')],
+ '')
def test_long_comment(self):
r"""Test for text having a long expanse of white space.
@@ -1705,10 +1711,8 @@
https://www.regular-expressions.info/catastrophic.html
"""
text = '<!-- -->'
- self.assertEqual(
- extract_sections(text, self.site),
- (text, [], '')
- )
+ result = extract_sections(text, self.site)
+ self._extract_sections_tests(result, text, [], '')
if __name__ == '__main__': # pragma: no cover
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/516527
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ie3e3e3f1891365178ad0b93fe1dfb2f8b1a0f0f4
Gerrit-Change-Number: 516527
Gerrit-PatchSet: 9
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Hazard-SJ <hazardsjwiki(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Matěj Suchánek <matejsuchanek97(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/i18n/+/632233 )
Change subject: Localisation updates from https://translatewiki.net.
......................................................................
Localisation updates from https://translatewiki.net.
Change-Id: Ic715ddd371d891fd1eb35064091fe7873118a306
---
M unprotect/ar.json
M unusedfiles/ar.json
M weblinkchecker/ar.json
3 files changed, 3 insertions(+), 3 deletions(-)
Approvals:
L10n-bot: Looks good to me, approved
jenkins-bot: Verified
diff --git a/unprotect/ar.json b/unprotect/ar.json
index ba5b26c..ea50ce6 100644
--- a/unprotect/ar.json
+++ b/unprotect/ar.json
@@ -9,5 +9,5 @@
"unprotect-images": "بوت: إزالة حماية كل الملفات على الصفحة %(page)s",
"unprotect-links": "بوت: رفع حماية كل الصفحات الموصولة من %(page)s",
"unprotect-ref": "بوت: رفع حماية كل الصفحات التي ترجع من %(page)s",
- "unprotect-simple": "بوت: رفع حماية قائمة من الملفات."
+ "unprotect-simple": "بوت: رفع حماية قائمة من الملفات"
}
diff --git a/unusedfiles/ar.json b/unusedfiles/ar.json
index e282624..bf6b128 100644
--- a/unusedfiles/ar.json
+++ b/unusedfiles/ar.json
@@ -5,5 +5,5 @@
"ديفيد"
]
},
- "unusedfiles-comment": "بوت: تعليم الملف كيتم"
+ "unusedfiles-comment": "بوت: وسم الملف كيتيم"
}
diff --git a/weblinkchecker/ar.json b/weblinkchecker/ar.json
index 6b1dc56..c6fd310 100644
--- a/weblinkchecker/ar.json
+++ b/weblinkchecker/ar.json
@@ -10,5 +10,5 @@
"weblinkchecker-badurl": "الوصلة الموفرة لا تبدو كأنها وصلة URL صحيحة",
"weblinkchecker-caption": "وصلة مكسورة",
"weblinkchecker-report": "خلال عدة عمليات أوتوماتيكية من البوت الوصلة الخارجية التالية كانت غير متوفرة، من فضلك تحقق من أن الوصلة لا تعمل وأزلها أو أصلحها في هذه الحالة!",
- "weblinkchecker-summary": "بوت: الإبلاغ عن وصلات خارجية غير متوفرة"
+ "weblinkchecker-summary": "بوت: الإبلاغ عن وصلة خارجية غير متوفرة"
}
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/i18n/+/632233
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/i18n
Gerrit-Branch: master
Gerrit-Change-Id: Ic715ddd371d891fd1eb35064091fe7873118a306
Gerrit-Change-Number: 632233
Gerrit-PatchSet: 1
Gerrit-Owner: L10n-bot <l10n-bot(a)translatewiki.net>
Gerrit-Reviewer: L10n-bot <l10n-bot(a)translatewiki.net>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged