jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/752129 )
Change subject: blockpageschecker: resolve pep8-naming issues
......................................................................
blockpageschecker: resolve pep8-naming issues
Change-Id: I56a175813da96fcd83a89dc47029e54d4a4e6e8d
---
M scripts/blockpageschecker.py
M tox.ini
2 files changed, 87 insertions(+), 89 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/blockpageschecker.py b/scripts/blockpageschecker.py
index b48f6a2..4ef04c6 100755
--- a/scripts/blockpageschecker.py
+++ b/scripts/blockpageschecker.py
@@ -44,7 +44,7 @@
"""
#
-# (C) Pywikibot team, 2007-2021
+# (C) Pywikibot team, 2007-2022
#
# Distributed under the terms of the MIT license.
#
@@ -65,13 +65,13 @@
# This is required for the text that is shown when you run this script
# with the parameter -help.
-docuReplacements = {
+docuReplacements = { # noqa: N816
'¶ms;': pagegenerators.parameterHelp,
}
# PREFERENCES
-templateSemiProtection = {
+template_semi_protection = {
'ar': [r'\{\{(?:[Tt]emplate:|قالب:|)(حماية\sجزئية)\}\}'],
'cs': [r'\{\{(?:[Tt]emplate:|[Šš]ablona:|)([Dd]louhodobě[ _]p|[Pp])'
r'olozamčeno(|[^\}]*)\}\}\s*'],
@@ -86,7 +86,7 @@
'ur': [r'\{\{(?:[Tt]emplate:|سانچہ:|)(نیم\sمحفوظ)\}\}']
}
# Regex to get the total-protection template
-templateTotalProtection = {
+template_total_protection = {
'ar': [r'\{\{(?:[Tt]emplate:|قالب:|)(حماية\sكاملة)\}\}'],
'cs': [r'\{\{(?:[Tt]emplate:|[Šš]ablona:|)([Dd]louhodobě[ _]z|[Zz])'
r'amčeno(|[^\}]*)\}\}\s*'],
@@ -105,7 +105,7 @@
}
# Regex to get the semi-protection move template
-templateSemiMoveProtection = {
+template_semi_move_protection = {
'it': [r'\{\{(?:[Tt]emplate:|)[Aa]vvisobloccospostamento(?:|[ _]scad\|.*?'
r'|\|.*?)\}\}'],
'ja': [r'(?<!\<nowiki\>)\{\{(?:[Tt]emplate:|)移動半保護'
@@ -113,7 +113,7 @@
}
# Regex to get the total-protection move template
-templateTotalMoveProtection = {
+template_total_move_protection = {
'it': [r'\{\{(?:[Tt]emplate:|)[Aa]vvisobloccospostamento(?:|[ _]scad\|.*?'
r'|\|.*?)\}\}'],
'ja': [r'(?<!\<nowiki\>)\{\{(?:[Tt]emplate:|)移動保護'
@@ -123,14 +123,14 @@
# If you use only one template for all the type of protection, put it here.
# You may use only one template or an unique template and some other "old"
# template that the script should still check (as on it.wikipedia)
-templateUnique = {
+template_unique = {
'ar': [r'\{\{(?:[Tt]emplate:|قالب:|)(محمية)\}\}'],
'it': [r'\{\{(?:[Tt]emplate:|)[Pp]rotetta\}\}'],
}
# Array: 0 => Semi-block, 1 => Total Block, 2 => Semi-Move, 3 => Total-Move,
# 4 => template-unique
-templateNoRegex = {
+template_no_regex = {
'ar': ['{{حماية جزئية}}', '{{حماية كاملة}}', None, None, '{{محمية}}'],
'cs': ['{{Polozamčeno}}', '{{Zamčeno}}', None, None, None],
'fr': ['{{Semi-protection}}', '{{Protection}}', None, None, None],
@@ -142,7 +142,7 @@
}
# Category where the bot will check
-categoryToCheck = {
+category_to_check = {
'ar': ['تصنيف:صفحات محمية'],
'cs': ['Kategorie:Wikipedie:Zamčené stránky',
'Kategorie:Wikipedie:Polozamčené stránky',
@@ -228,7 +228,7 @@
def skip_page(self, page):
"""Skip if the user has not permission to edit."""
# FIXME: This check does not work :
- # PreloadingGenerator cannot set correctly page.editRestriction
+ # PreloadingGenerator cannot set correctly page.edit_restrictioniction
# (see bug T57322)
# if not page.has_permission():
# pywikibot.output(
@@ -250,39 +250,39 @@
def understand_block():
"""Understand if the page is blocked has the right template."""
results = 'sysop-total', 'autoconfirmed-total', 'unique'
- for index, template in enumerate((TTP, TSP, TU)):
+ for index, template in enumerate((ttp, tsp, tu)):
if not template:
continue
- for catchRegex in template:
- resultCatch = re.findall(catchRegex, text)
- if resultCatch:
+ for catch_regex in template:
+ result_catch = re.findall(catch_regex, text)
+ if result_catch:
return ParsedTemplate(
- results[index], catchRegex, 'modifying')
+ results[index], catch_regex, 'modifying')
- if TSMP and TTMP and TTP != TTMP and TSP != TSMP:
- for catchRegex in TTMP:
- resultCatch = re.findall(catchRegex, text)
- if resultCatch:
+ if tsmp and ttmp and ttp != ttmp and tsp != tsmp:
+ for catch_regex in ttmp:
+ result_catch = re.findall(catch_regex, text)
+ if result_catch:
return ParsedTemplate(
- 'sysop-move', catchRegex, 'modifying')
+ 'sysop-move', catch_regex, 'modifying')
- for catchRegex in TSMP:
- resultCatch = re.findall(catchRegex, text)
- if resultCatch:
+ for catch_regex in tsmp:
+ result_catch = re.findall(catch_regex, text)
+ if result_catch:
return ParsedTemplate(
- 'autoconfirmed-move', catchRegex, 'modifying')
+ 'autoconfirmed-move', catch_regex, 'modifying')
# If editable means that we have no regex, won't change anything
# with this regex
return ParsedTemplate('editable', r'\A', 'adding')
- TSP = i18n.translate(self.site, templateSemiProtection)
- TTP = i18n.translate(self.site, templateTotalProtection)
- TSMP = i18n.translate(self.site, templateSemiMoveProtection)
- TTMP = i18n.translate(self.site, templateTotalMoveProtection)
- TNR = i18n.translate(self.site, templateNoRegex)
- TU = i18n.translate(self.site, templateUnique)
+ tsp = i18n.translate(self.site, template_semi_protection)
+ ttp = i18n.translate(self.site, template_total_protection)
+ tsmp = i18n.translate(self.site, template_semi_move_protection)
+ ttmp = i18n.translate(self.site, template_total_move_protection)
+ tnr = i18n.translate(self.site, template_no_regex)
+ tu = i18n.translate(self.site, template_unique)
while True:
text, restrictions = yield
@@ -291,7 +291,7 @@
# Understand, according to the template in the page, what should
# be the protection and compare it with what there really is.
- TemplateInThePage = understand_block()
+ template_in_page = understand_block()
# Only to see if the text is the same or not...
oldtext = text
@@ -299,84 +299,84 @@
changes = -1
msg_type = None # type: Optional[str]
- editRestr = restrictions.get('edit')
- if not editRestr:
+ edit_restriction = restrictions.get('edit')
+ if not edit_restriction:
# page is not edit-protected
# Deleting the template because the page doesn't need it.
- if not (TTP or TSP):
+ if not (ttp or tsp):
raise Error(
'This script is not localized to use it on {}.\n'
- 'Missing "templateSemiProtection" or'
- '"templateTotalProtection"'.format(self.site.sitename))
+ 'Missing "template_semi_protection" or'
+ '"template_total_protection"'
+ .format(self.site.sitename))
- if TU:
- replaceToPerform = '|'.join(TTP + TSP + TU)
+ if tu:
+ replacement = '|'.join(ttp + tsp + tu)
else:
- replaceToPerform = '|'.join(TTP + TSP)
+ replacement = '|'.join(ttp + tsp)
text, changes = re.subn(
- '<noinclude>({})</noinclude>'.format(replaceToPerform),
+ '<noinclude>({})</noinclude>'.format(replacement),
'', text)
if not changes:
text, changes = re.subn(
- '({})'.format(replaceToPerform), '', text)
+ '({})'.format(replacement), '', text)
msg = 'The page is editable for all'
if not self.opt.move:
msg += ', deleting the template..'
pywikibot.output(msg + '.')
msg_type = 'deleting'
- elif editRestr[0] == 'sysop':
+ elif edit_restriction[0] == 'sysop':
# total edit protection
- if TemplateInThePage.blocktype == 'sysop-total' and TTP \
- or TemplateInThePage.blocktype == 'unique' and TU:
+ if template_in_page.blocktype == 'sysop-total' and ttp \
+ or template_in_page.blocktype == 'unique' and tu:
msg = 'The page is protected to the sysop'
if not self.opt.move:
msg += ', skipping...'
pywikibot.output(msg)
else:
- if not TNR or TU and not TNR[4] or not (TU or TNR[1]):
+ if not tnr or tu and not tnr[4] or not (tu or tnr[1]):
raise Error(
'This script is not localized to use it on \n{}. '
- 'Missing "templateNoRegex"'
+ 'Missing "template_no_regex"'
.format(self.site.sitename))
pywikibot.output(
'The page is protected to the sysop, but the template '
'seems not correct. Fixing...')
- if TU:
+ if tu:
text, changes = re.subn(
- TemplateInThePage.regex, TNR[4], text)
+ template_in_page.regex, tnr[4], text)
else:
text, changes = re.subn(
- TemplateInThePage.regex, TNR[1], text)
- msg_type = TemplateInThePage.msgtype
+ template_in_page.regex, tnr[1], text)
+ msg_type = template_in_page.msgtype
- elif TSP or TU:
- # implicitly
- # editRestr[0] = 'autoconfirmed', edit-Semi-protection
- if TemplateInThePage.blocktype in ('autoconfirmed-total',
- 'unique'):
+ elif tsp or tu:
+ # implicitly edit semi-protection
+ if template_in_page.blocktype in ('autoconfirmed-total',
+ 'unique'):
msg = ('The page is editable only for the autoconfirmed '
'users')
if not self.opt.move:
msg += ', skipping...'
pywikibot.output(msg)
else:
- if not TNR or TU and not TNR[4] or not (TU or TNR[1]):
+ if not tnr or tu and not tnr[4] or not (tu or tnr[1]):
raise Error(
'This script is not localized to use it on \n'
- '{}. Missing "templateNoRegex"'
+ '{}. Missing "template_no_regex"'
.format(self.site.sitename))
pywikibot.output(
'The page is editable only for the autoconfirmed '
'users, but the template seems not correct. Fixing...')
- if TU:
+ if tu:
text, changes = re.subn(
- TemplateInThePage.regex, TNR[4], text)
+ template_in_page.regex, tnr[4], text)
else:
text, changes = re.subn(
- TemplateInThePage.regex, TNR[0], text)
- msg_type = TemplateInThePage.msgtype
+ template_in_page.regex, tnr[0], text)
+ msg_type = template_in_page.msgtype
if not changes:
# We tried to fix edit-protection templates, but it did
@@ -385,53 +385,52 @@
if self.opt.move and changes > -1:
# checking move protection now
- moveRestr = restrictions.get('move')
+ move_restriction = restrictions.get('move')
changes = -1
- if not moveRestr:
+ if not move_restriction:
pywikibot.output('The page is movable for all, deleting '
'the template...')
# Deleting the template because the page doesn't need it.
- if TU:
- replaceToPerform = '|'.join(TSMP + TTMP + TU)
+ if tu:
+ replacement = '|'.join(tsmp + ttmp + tu)
else:
- replaceToPerform = '|'.join(TSMP + TTMP)
+ replacement = '|'.join(tsmp + ttmp)
text, changes = re.subn(
- '<noinclude>({})</noinclude>'.format(replaceToPerform),
+ '<noinclude>({})</noinclude>'.format(replacement),
'', text)
if not changes:
text, changes = re.subn(
- '({})'.format(replaceToPerform), '', text)
+ '({})'.format(replacement), '', text)
msg_type = 'deleting'
- elif moveRestr[0] == 'sysop':
+ elif move_restriction[0] == 'sysop':
# move-total-protection
- if TemplateInThePage.blocktype == 'sysop-move' and TTMP \
- or TemplateInThePage.blocktype == 'unique' and TU:
+ if template_in_page.blocktype == 'sysop-move' and ttmp \
+ or template_in_page.blocktype == 'unique' and tu:
pywikibot.output('The page is protected from moving '
'to the sysop, skipping...')
- if TU:
+ if tu:
# no changes needed, better to revert the old text.
text = oldtext
else:
pywikibot.output(
'The page is protected from moving to the sysop, '
'but the template seems not correct. Fixing...')
- if TU:
+ if tu:
text, changes = re.subn(
- TemplateInThePage.regex, TNR[4], text)
+ template_in_page.regex, tnr[4], text)
else:
text, changes = re.subn(
- TemplateInThePage.regex, TNR[3], text)
- msg_type = TemplateInThePage.msgtype
+ template_in_page.regex, tnr[3], text)
+ msg_type = template_in_page.msgtype
- elif TSMP or TU:
- # implicitly
- # moveRestr[0] = 'autoconfirmed', move-semi-protection
- if TemplateInThePage.blocktype in ('autoconfirmed-move',
- 'unique'):
+ elif tsmp or tu:
+ # implicitly move semi-protection
+ if template_in_page.blocktype in ('autoconfirmed-move',
+ 'unique'):
pywikibot.output('The page is movable only for the '
'autoconfirmed users, skipping...')
- if TU:
+ if tu:
# no changes needed, better to revert the old text.
text = oldtext
else:
@@ -439,13 +438,13 @@
'The page is movable only for the autoconfirmed '
'users, but the template seems not correct. '
'Fixing...')
- if TU:
+ if tu:
text, changes = re.subn(
- TemplateInThePage.regex, TNR[4], text)
+ template_in_page.regex, tnr[4], text)
else:
text, changes = re.subn(
- TemplateInThePage.regex, TNR[2], text)
- msg_type = TemplateInThePage.msgtype
+ template_in_page.regex, tnr[2], text)
+ msg_type = template_in_page.msgtype
if not changes:
# We tried to fix move-protection templates
@@ -465,7 +464,7 @@
:param args: command line arguments
"""
# Loading the comments
- global categoryToCheck, project_inserted
+ global category_to_check, project_inserted
options = {}
generator = None
@@ -500,7 +499,7 @@
if not generator:
# Define the category if no other generator has been set
gen = []
- categories = i18n.translate(site, categoryToCheck)
+ categories = i18n.translate(site, category_to_check)
for category_name in categories:
cat = pywikibot.Category(site, category_name)
# Define the generator
diff --git a/tox.ini b/tox.ini
index 5c9e621..94a9cc3 100644
--- a/tox.ini
+++ b/tox.ini
@@ -169,7 +169,6 @@
pywikibot/xmlreader.py: N802, N803, N806
scripts/archive/*: N802, N803, N806, N815, N816
scripts/add_text.py: N803, N806
- scripts/blockpageschecker.py : N806, N816
scripts/category.py: N802, N803, N806
scripts/category_redirect.py: N802, N803, N806
scripts/checkimages.py: N801, N802, N803, N806, N816
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/752129
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I56a175813da96fcd83a89dc47029e54d4a4e6e8d
Gerrit-Change-Number: 752129
Gerrit-PatchSet: 1
Gerrit-Owner: JJMC89 <JJMC89.Wikimedia(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/752050 )
Change subject: welcome: resolve pep8-naming issues
......................................................................
welcome: resolve pep8-naming issues
Change-Id: I7f291e25dd70813b689c691a4b277053dd8faa9a
---
M scripts/welcome.py
M tox.ini
2 files changed, 61 insertions(+), 61 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/welcome.py b/scripts/welcome.py
index e79bdc1..c0e7747 100755
--- a/scripts/welcome.py
+++ b/scripts/welcome.py
@@ -156,7 +156,7 @@
badwords at all but can be used for some bad-nickname.
"""
#
-# (C) Pywikibot team, 2006-2021
+# (C) Pywikibot team, 2006-2022
#
# Distributed under the terms of the MIT license.
#
@@ -441,22 +441,22 @@
"""Container class for global settings."""
- attachEditCount = 1 # edit count that an user required to be welcomed
- dumpToLog = 15 # number of users that are required to add the log
- offset = None # skip users newer than that timestamp
- timeoffset = 0 # skip users newer than # minutes
- recursive = True # define if the Bot is recursive or not
- timeRecur = 3600 # how much time (sec.) the bot waits before restart
- makeWelcomeLog = True # create the welcome log or not
- confirm = False # should bot ask to add user to bad-username list
- welcomeAuto = False # should bot welcome auto-created users
- filtBadName = False # check if the username is ok or not
- randomSign = False # should signature be random or not
- saveSignIndex = False # should save the signature index or not
- signFileName = None # File name, default: None
- defaultSign = '--~~~~' # default signature
- queryLimit = 50 # number of users that the bot load to check
- quiet = False # Users without contributions aren't displayed
+ attach_edit_count = 1 # edit count that an user required to be welcomed
+ dump_to_log = 15 # number of users that are required to add the log
+ offset = None # skip users newer than that timestamp
+ timeoffset = 0 # skip users newer than # minutes
+ recursive = True # define if the Bot is recursive or not
+ time_recur = 3600 # seconds the bot waits before restart
+ make_welcome_log = True # create the welcome log or not
+ confirm = False # should bot ask to add user to bad-username list
+ welcome_auto = False # should bot welcome auto-created users
+ filt_bad_name = False # check if the username is ok or not
+ random_sign = False # should signature be random or not
+ save_sign_index = False # should save the signature index or not
+ sign_file_name = None # File name, default: None
+ default_sign = '--~~~~' # default signature
+ query_limit = 50 # number of users that the bot load to check
+ quiet = False # Users without contributions aren't displayed
class WelcomeBot(SingleSiteBot):
@@ -473,9 +473,9 @@
self.log_name = i18n.translate(self.site, logbook)
if not self.log_name:
- globalvar.makeWelcomeLog = False
- if globalvar.randomSign:
- self.defineSign(True)
+ globalvar.make_welcome_log = False
+ if globalvar.random_sign:
+ self.define_sign(True)
def check_managed_sites(self) -> None:
"""Check that site is managed by welcome.py."""
@@ -487,9 +487,9 @@
.format(self.site))
self.welcome_text = site_netext
- def badNameFilter(self, name, force=False) -> bool:
+ def bad_name_filer(self, name, force=False) -> bool:
"""Check for bad names."""
- if not globalvar.filtBadName:
+ if not globalvar.filt_bad_name:
return False
# initialize blacklist
@@ -605,7 +605,7 @@
else:
self._BAQueue = [name]
- if len(self._BAQueue) >= globalvar.dumpToLog:
+ if len(self._BAQueue) >= globalvar.dump_to_log:
self.report_bad_account()
def report_bad_account(self) -> None:
@@ -648,7 +648,7 @@
def makelogpage(self):
"""Make log page."""
- if not globalvar.makeWelcomeLog or not self.welcomed_users:
+ if not globalvar.make_welcome_log or not self.welcomed_users:
return
if self.site.code == 'it':
@@ -702,10 +702,10 @@
else:
start = globalvar.offset
for ue in self.site.logevents('newusers',
- total=globalvar.queryLimit,
+ total=globalvar.query_limit,
start=start):
if ue.action() == 'create' \
- or ue.action() == 'autocreate' and globalvar.welcomeAuto:
+ or ue.action() == 'autocreate' and globalvar.welcome_auto:
try:
user = ue.page()
except HiddenKeyError:
@@ -721,24 +721,24 @@
self.show_status()
strfstr = time.strftime('%d %b %Y %H:%M:%S (UTC)', time.gmtime())
pywikibot.output('Sleeping {} seconds before rerun. {}'
- .format(globalvar.timeRecur, strfstr))
- pywikibot.sleep(globalvar.timeRecur)
+ .format(globalvar.time_recur, strfstr))
+ pywikibot.sleep(globalvar.time_recur)
- def defineSign(self, force=False) -> List[str]:
+ def define_sign(self, force=False) -> List[str]:
"""Setup signature."""
- if hasattr(self, '_randomSignature') and not force:
- return self._randomSignature
+ if hasattr(self, '_random_signature') and not force:
+ return self._random_signature
sign_text = ''
creg = re.compile(r'^\* ?(.*?)$', re.M)
- if not globalvar.signFileName:
+ if not globalvar.sign_file_name:
sign_page_name = i18n.translate(self.site, random_sign)
if not sign_page_name:
self.show_status(Msg.WARN)
pywikibot.output(
"{} doesn't allow random signature, force disable."
.format(self.site))
- globalvar.randomSign = False
+ globalvar.random_sign = False
return []
sign_page = pywikibot.Page(self.site, sign_page_name)
@@ -748,23 +748,24 @@
else:
pywikibot.output('The signature list page does not exist, '
'random signature will be disabled.')
- globalvar.randomSign = False
+ globalvar.random_sign = False
else:
try:
f = codecs.open(
- pywikibot.config.datafilepath(globalvar.signFileName), 'r',
+ pywikibot.config.datafilepath(globalvar.sign_file_name),
+ 'r',
encoding=config.console_encoding)
except LookupError:
f = codecs.open(pywikibot.config.datafilepath(
- globalvar.signFileName), 'r', encoding='utf-8')
+ globalvar.sign_file_name), 'r', encoding='utf-8')
except IOError:
pywikibot.error('No fileName!')
raise FilenameNotSet('No signature filename specified.')
sign_text = f.read()
f.close()
- self._randomSignature = creg.findall(sign_text)
- return self._randomSignature
+ self._random_signature = creg.findall(sign_text)
+ return self._random_signature
def skip_page(self, user) -> bool:
"""Check whether the user is to be skipped.
@@ -785,7 +786,7 @@
pywikibot.output('{} might be a global bot!'
.format(user.username))
- elif user.editCount() < globalvar.attachEditCount:
+ elif user.editCount() < globalvar.attach_edit_count:
if not user.editCount() == 0:
self.show_status(Msg.IGNORE)
pywikibot.output('{} has only {} contributions.'
@@ -811,18 +812,18 @@
.format(user.username))
return
- if self.badNameFilter(user.username):
+ if self.bad_name_filer(user.username):
self.collect_bad_accounts(user.username)
return
welcome_text = self.welcome_text
- if globalvar.randomSign:
+ if globalvar.random_sign:
if self.site.family.name != 'wikinews':
- welcome_text = welcome_text % choice(self.defineSign())
+ welcome_text = welcome_text % choice(self.define_sign())
if self.site.sitename != 'wiktionary:it':
welcome_text += timeselected
elif self.site.sitename != 'wikinews:it':
- welcome_text = welcome_text % globalvar.defaultSign
+ welcome_text = welcome_text % globalvar.default_sign
final_text = i18n.translate(self.site, final_new_text_additions)
if final_text:
@@ -839,7 +840,7 @@
self.welcomed_users.append(user)
welcomed_count = len(self.welcomed_users)
- if globalvar.makeWelcomeLog:
+ if globalvar.make_welcome_log:
self.show_status(Msg.DONE)
if welcomed_count == 0:
count = 'No users have'
@@ -849,13 +850,13 @@
count = '{} users have'.format(welcomed_count)
pywikibot.output(count + ' been welcomed.')
- if welcomed_count >= globalvar.dumpToLog:
+ if welcomed_count >= globalvar.dump_to_log:
self.makelogpage()
def write_log(self):
"""Write logfile."""
welcomed_count = len(self.welcomed_users)
- if globalvar.makeWelcomeLog and welcomed_count > 0:
+ if globalvar.make_welcome_log and welcomed_count > 0:
self.show_status()
if welcomed_count == 1:
pywikibot.output('Putting the log of the latest user...')
@@ -886,7 +887,7 @@
self.makelogpage()
# If there is the savedata, the script must save the number_user.
- if globalvar.randomSign and globalvar.saveSignIndex \
+ if globalvar.random_sign and globalvar.save_sign_index \
and self.welcomed_users:
# Filename and Pywikibot path
# file where is stored the random signature index
@@ -938,19 +939,19 @@
mapping = {
# option: (attribute, value),
'-break': ('recursive', False),
- '-nlog': ('makeWelcomeLog', False),
+ '-nlog': ('make_welcome_log', False),
'-ask': ('confirm', True),
- '-filter': ('filtBadName', True),
- '-savedata': ('saveSignIndex', True),
- '-random': ('randomSign', True),
- '-sul': ('welcomeAuto', True),
+ '-filter': ('filt_bad_name', True),
+ '-savedata': ('save_sign_index', True),
+ '-random': ('random_sign', True),
+ '-sul': ('welcome_auto', True),
'-quiet': ('quiet', True),
}
for arg in pywikibot.handle_args(args):
arg, _, val = arg.partition(':')
if arg == '-edit':
- globalvar.attachEditCount = int(
+ globalvar.attach_edit_count = int(
val if val.isdigit() else pywikibot.input(
'After how many edits would you like to welcome new users?'
' (0 is allowed)'))
@@ -960,27 +961,27 @@
'Which time offset (in minutes) for new users would you '
'like to use?'))
elif arg == '-time':
- globalvar.timeRecur = int(
+ globalvar.time_recur = int(
val if val.isdigit() else pywikibot.input(
'For how many seconds would you like to bot to sleep '
'before checking again?'))
elif arg == '-offset':
_handle_offset(val)
elif arg == '-file':
- globalvar.randomSign = True
- globalvar.signFileName = val or pywikibot.input(
+ globalvar.random_sign = True
+ globalvar.sign_file_name = val or pywikibot.input(
'Where have you saved your signatures?')
elif arg == '-sign':
- globalvar.defaultSign = val or pywikibot.input(
+ globalvar.default_sign = val or pywikibot.input(
'Which signature to use?')
- globalvar.defaultSign += timeselected
+ globalvar.default_sign += timeselected
elif arg == '-limit':
- globalvar.queryLimit = int(
+ globalvar.query_limit = int(
val if val.isdigit() else pywikibot.input(
'How many of the latest new users would you like to '
'load?'))
elif arg == '-numberlog':
- globalvar.dumpToLog = int(
+ globalvar.dump_to_log = int(
val if val.isdigit() else pywikibot.input(
'After how many welcomed users would you like to update '
'the welcome log?'))
diff --git a/tox.ini b/tox.ini
index 5c9e621..1e97c8a 100644
--- a/tox.ini
+++ b/tox.ini
@@ -190,7 +190,6 @@
scripts/solve_disambiguation.py: N802, N806
scripts/templatecount.py: N802
scripts/weblinkchecker.py: N802, N803, N806
- scripts/welcome.py: N802, N815
setup.py: T001
tests/archive/*: N802
tests/api_tests.py: N802
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/752050
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I7f291e25dd70813b689c691a4b277053dd8faa9a
Gerrit-Change-Number: 752050
Gerrit-PatchSet: 1
Gerrit-Owner: JJMC89 <JJMC89.Wikimedia(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/752131 )
Change subject: [IMPR] Show a warning if a pagegenerator option is not enabled
......................................................................
[IMPR] Show a warning if a pagegenerator option is not enabled
This reverts commit 6ec9e1308df9ea4abfa1f2e4a47f555f13681922.
Change-Id: I515c2d3254a897ba9ea6350168ffffbeed8c65d7
---
M scripts/category.py
1 file changed, 16 insertions(+), 12 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/category.py b/scripts/category.py
index e6523d1..681c9de 100755
--- a/scripts/category.py
+++ b/scripts/category.py
@@ -23,9 +23,6 @@
-create - If a page doesn't exist, do not skip it, create it instead.
-redirect - Follow redirects.
-If action is "add", the following options are supported:
-
-¶ms;
Options for "listify" action:
@@ -94,6 +91,10 @@
-depth: - The max depth limit beyond which no subcategories will be
listed.
+If action is "add", the following additional options are supported:
+
+¶ms;
+
For the actions tidy and tree, the bot will store the category structure
locally in category.dump. This saves time and server load, but if it uses
these data later, they may be outdated; use the -rebuild parameter in this
@@ -112,7 +113,7 @@
This will move all pages in the category US to the category United States.
"""
#
-# (C) Pywikibot team, 2004-2021
+# (C) Pywikibot team, 2004-2022
#
# Distributed under the terms of the MIT license.
#
@@ -1347,7 +1348,6 @@
# Process global args and prepare generator args parser
local_args = pywikibot.handle_args(args)
- gen_factory = pagegenerators.GeneratorFactory()
# When this is True then the custom edit summary given for removing
# categories from articles will also be used as the deletion reason.
@@ -1359,6 +1359,7 @@
follow_redirects = False
delete_empty_cat = True
unknown = []
+ pg_options = []
for arg in local_args:
if arg in ('add', 'remove', 'move', 'tidy', 'tree', 'listify'):
action = arg
@@ -1423,17 +1424,23 @@
elif option == 'prefix':
prefix = value
else:
- gen_factory.handle_arg(arg)
+ pg_options.append(arg)
- bot = None # type: Optional[BaseBot]
+ enabled = ['namespace'] if action in ('tidy', 'listify') else None
+ if action in ('add', 'listify', 'tidy'):
+ gen_factory = pagegenerators.GeneratorFactory(enabled_options=enabled)
+ unknown += gen_factory.handle_args(pg_options)
+ else:
+ unknown += pg_options
+ suggest_help(unknown_parameters=unknown)
cat_db = CategoryDatabase(rebuild=rebuild)
- gen = gen_factory.getCombinedGenerator()
if action == 'add':
if not to_given:
new_cat_title = pywikibot.input(
'Category to add (do not give namespace):')
+ gen = gen_factory.getCombinedGenerator()
if not gen:
# default for backwards compatibility
gen_factory.handle_arg('-links')
@@ -1510,9 +1517,8 @@
prefix=prefix,
namespaces=gen_factory.namespaces)
- if bot:
+ if not suggest_help(missing_action=not action):
pywikibot.Site().login()
- suggest_help(unknown_parameters=unknown)
try:
bot.run()
except Error:
@@ -1520,8 +1526,6 @@
finally:
if cat_db:
cat_db.dump()
- else:
- suggest_help(missing_action=True, unknown_parameters=unknown)
if __name__ == '__main__':
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/752131
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I515c2d3254a897ba9ea6350168ffffbeed8c65d7
Gerrit-Change-Number: 752131
Gerrit-PatchSet: 1
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/752009 )
Change subject: Revert "[IMPR] Show a warning if a pagegenerator option is not enabled"
......................................................................
Revert "[IMPR] Show a warning if a pagegenerator option is not enabled"
This reverts commit 5c2715dadca6e44ae5532d5fe6fc6d47baaf9df0.
Reason for revert: failing tests
Change-Id: Ibf92d9477d9462ad8479cb5725572fc332843afb
---
M scripts/category.py
1 file changed, 12 insertions(+), 16 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/category.py b/scripts/category.py
index a14b477..e6523d1 100755
--- a/scripts/category.py
+++ b/scripts/category.py
@@ -23,6 +23,9 @@
-create - If a page doesn't exist, do not skip it, create it instead.
-redirect - Follow redirects.
+If action is "add", the following options are supported:
+
+¶ms;
Options for "listify" action:
@@ -91,10 +94,6 @@
-depth: - The max depth limit beyond which no subcategories will be
listed.
-If action is "add", the following additional options are supported:
-
-¶ms;
-
For the actions tidy and tree, the bot will store the category structure
locally in category.dump. This saves time and server load, but if it uses
these data later, they may be outdated; use the -rebuild parameter in this
@@ -113,7 +112,7 @@
This will move all pages in the category US to the category United States.
"""
#
-# (C) Pywikibot team, 2004-2022
+# (C) Pywikibot team, 2004-2021
#
# Distributed under the terms of the MIT license.
#
@@ -1348,6 +1347,7 @@
# Process global args and prepare generator args parser
local_args = pywikibot.handle_args(args)
+ gen_factory = pagegenerators.GeneratorFactory()
# When this is True then the custom edit summary given for removing
# categories from articles will also be used as the deletion reason.
@@ -1359,7 +1359,6 @@
follow_redirects = False
delete_empty_cat = True
unknown = []
- pg_options = []
for arg in local_args:
if arg in ('add', 'remove', 'move', 'tidy', 'tree', 'listify'):
action = arg
@@ -1424,23 +1423,17 @@
elif option == 'prefix':
prefix = value
else:
- pg_options.append(arg)
+ gen_factory.handle_arg(arg)
- enabled = ['namespace'] if action in ('tidy', 'listify') else None
- if action in ('add', 'listify', 'tidy'):
- gen_factory = pagegenerators.GeneratorFactory(enabled_options=enabled)
- unknown += gen_factory.handle_args(pg_options)
- else:
- unknown += pg_options
- suggest_help(unknown_parameters=unknown)
+ bot = None # type: Optional[BaseBot]
cat_db = CategoryDatabase(rebuild=rebuild)
+ gen = gen_factory.getCombinedGenerator()
if action == 'add':
if not to_given:
new_cat_title = pywikibot.input(
'Category to add (do not give namespace):')
- gen = gen_factory.getCombinedGenerator()
if not gen:
# default for backwards compatibility
gen_factory.handle_arg('-links')
@@ -1517,8 +1510,9 @@
prefix=prefix,
namespaces=gen_factory.namespaces)
- if suggest_help(missing_action=True):
+ if bot:
pywikibot.Site().login()
+ suggest_help(unknown_parameters=unknown)
try:
bot.run()
except Error:
@@ -1526,6 +1520,8 @@
finally:
if cat_db:
cat_db.dump()
+ else:
+ suggest_help(missing_action=True, unknown_parameters=unknown)
if __name__ == '__main__':
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/752009
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ibf92d9477d9462ad8479cb5725572fc332843afb
Gerrit-Change-Number: 752009
Gerrit-PatchSet: 1
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Arjunaraoc <arjunaraoc(a)gmail.com>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-CC: JJMC89 <JJMC89.Wikimedia(a)gmail.com>
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/751733 )
Change subject: [IMPR] Show a warning if a pagegenerator option is not enabled
......................................................................
[IMPR] Show a warning if a pagegenerator option is not enabled
Bug: T298522
Change-Id: Ib38b0b01a75330279728f9953bc5bbb8b93cfa04
---
M scripts/category.py
1 file changed, 16 insertions(+), 12 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/category.py b/scripts/category.py
index e6523d1..a14b477 100755
--- a/scripts/category.py
+++ b/scripts/category.py
@@ -23,9 +23,6 @@
-create - If a page doesn't exist, do not skip it, create it instead.
-redirect - Follow redirects.
-If action is "add", the following options are supported:
-
-¶ms;
Options for "listify" action:
@@ -94,6 +91,10 @@
-depth: - The max depth limit beyond which no subcategories will be
listed.
+If action is "add", the following additional options are supported:
+
+¶ms;
+
For the actions tidy and tree, the bot will store the category structure
locally in category.dump. This saves time and server load, but if it uses
these data later, they may be outdated; use the -rebuild parameter in this
@@ -112,7 +113,7 @@
This will move all pages in the category US to the category United States.
"""
#
-# (C) Pywikibot team, 2004-2021
+# (C) Pywikibot team, 2004-2022
#
# Distributed under the terms of the MIT license.
#
@@ -1347,7 +1348,6 @@
# Process global args and prepare generator args parser
local_args = pywikibot.handle_args(args)
- gen_factory = pagegenerators.GeneratorFactory()
# When this is True then the custom edit summary given for removing
# categories from articles will also be used as the deletion reason.
@@ -1359,6 +1359,7 @@
follow_redirects = False
delete_empty_cat = True
unknown = []
+ pg_options = []
for arg in local_args:
if arg in ('add', 'remove', 'move', 'tidy', 'tree', 'listify'):
action = arg
@@ -1423,17 +1424,23 @@
elif option == 'prefix':
prefix = value
else:
- gen_factory.handle_arg(arg)
+ pg_options.append(arg)
- bot = None # type: Optional[BaseBot]
+ enabled = ['namespace'] if action in ('tidy', 'listify') else None
+ if action in ('add', 'listify', 'tidy'):
+ gen_factory = pagegenerators.GeneratorFactory(enabled_options=enabled)
+ unknown += gen_factory.handle_args(pg_options)
+ else:
+ unknown += pg_options
+ suggest_help(unknown_parameters=unknown)
cat_db = CategoryDatabase(rebuild=rebuild)
- gen = gen_factory.getCombinedGenerator()
if action == 'add':
if not to_given:
new_cat_title = pywikibot.input(
'Category to add (do not give namespace):')
+ gen = gen_factory.getCombinedGenerator()
if not gen:
# default for backwards compatibility
gen_factory.handle_arg('-links')
@@ -1510,9 +1517,8 @@
prefix=prefix,
namespaces=gen_factory.namespaces)
- if bot:
+ if suggest_help(missing_action=True):
pywikibot.Site().login()
- suggest_help(unknown_parameters=unknown)
try:
bot.run()
except Error:
@@ -1520,8 +1526,6 @@
finally:
if cat_db:
cat_db.dump()
- else:
- suggest_help(missing_action=True, unknown_parameters=unknown)
if __name__ == '__main__':
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/751733
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ib38b0b01a75330279728f9953bc5bbb8b93cfa04
Gerrit-Change-Number: 751733
Gerrit-PatchSet: 1
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Arjunaraoc <arjunaraoc(a)gmail.com>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-CC: JJMC89 <JJMC89.Wikimedia(a)gmail.com>
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/749160 )
Change subject: [bugfix] Allow title placeholder $1 in the middle of an url
......................................................................
[bugfix] Allow title placeholder $1 in the middle of an url
- add a new articlepath property which gives a nice article path with
placeholder where $1 is replaced by {} for format string usage
- deprecate the old article_path property
- yield urls with placeholder as format strings
BaseSite._interwiki_urls generator
- update Family.from_url method to ignore placeholder
- update Page.full_url to format the full article path
- update fixSyntaxSave method of CosmeticChangesToolkit
- Remove workaround patches in pagegenerators.py, fixing_redirects.py
solve_disambiguation.py
- update family_tests.py
- update cosmetic_changes_tests.py
Bug: T298078
Bug: T111513
Change-Id: I03387845ee37f378f09359f32371e0581462467b
---
M pywikibot/cosmetic_changes.py
M pywikibot/family.py
M pywikibot/page/__init__.py
M pywikibot/pagegenerators.py
M pywikibot/site/_apisite.py
M pywikibot/site/_basesite.py
M scripts/fixing_redirects.py
M scripts/solve_disambiguation.py
M tests/cosmetic_changes_tests.py
M tests/family_tests.py
10 files changed, 217 insertions(+), 172 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/cosmetic_changes.py b/pywikibot/cosmetic_changes.py
index 2f674f7..b3bb6f8 100755
--- a/pywikibot/cosmetic_changes.py
+++ b/pywikibot/cosmetic_changes.py
@@ -51,7 +51,7 @@
'your_script_name_2']
"""
#
-# (C) Pywikibot team, 2006-2021
+# (C) Pywikibot team, 2006-2022
#
# Distributed under the terms of the MIT license.
#
@@ -59,6 +59,7 @@
from enum import IntEnum
from typing import Any, Union
+from urllib.parse import urlparse, urlunparse
import pywikibot
from pywikibot import textlib
@@ -519,11 +520,7 @@
trailingChars = match.group('linktrail')
newline = match.group('newline')
- try:
- is_interwiki = self.site.isInterwikiLink(titleWithSection)
- except ValueError: # T111513
- is_interwiki = True
-
+ is_interwiki = self.site.isInterwikiLink(titleWithSection)
if is_interwiki:
return match.group()
@@ -826,6 +823,7 @@
exceptions = ['comment', 'math', 'nowiki', 'pre', 'startspace',
'syntaxhighlight']
+
# link to the wiki working on
# Only use suffixes for article paths
for suffix in self.site._interwiki_urls(True):
@@ -834,31 +832,43 @@
https_url = None
else:
https_url = self.site.base_url(suffix, 'https')
+
# compare strings without the protocol, if they are empty support
# also no prefix (//en.wikipedia.org/…)
- if https_url is not None and http_url[4:] == https_url[5:]:
- urls = ['(?:https?:)?' + re.escape(http_url[5:])]
+ http = urlparse(http_url)
+ https = urlparse(https_url)
+ if https_url is not None and http.netloc == https.netloc:
+ urls = ['(?:https?:)?'
+ + re.escape(urlunparse(('', *http[1:])))]
else:
urls = [re.escape(url) for url in (http_url, https_url)
if url is not None]
+
for url in urls:
- # Only include links which don't include the separator as
- # the wikilink won't support additional parameters
- separator = '?'
- if '?' in suffix:
- separator += '&'
+ # unescape {} placeholder
+ url = url.replace(r'\{\}', '{title}')
+
+ # Only include links which don't include the separator
+ # as the wikilink won't support additional parameters
+ separator = '?&' if '?' in suffix else '?'
+
# Match first a non space in the title to prevent that multiple
# spaces at the end without title will be matched by it
+ title_regex = (r'(?P<link>[^{sep}]+?)'
+ r'(\s+(?P<title>[^\s].*?))'
+ .format(sep=separator))
+ url_regex = r'\[\[?{url}?\s*\]\]?'.format(url=url)
text = textlib.replaceExcept(
text,
- r'\[\[?' + url + r'(?P<link>[^' + separator + r']+?)'
- r'(\s+(?P<title>[^\s].*?))?\s*\]\]?',
+ url_regex.format(title=title_regex),
replace_link, exceptions, site=self.site)
+
# external link in/starting with double brackets
text = textlib.replaceExcept(
text,
r'\[\[(?P<url>https?://[^\]]+?)\]\]?',
r'[\g<url>]', exceptions, site=self.site)
+
# external link and description separated by a pipe, with
# whitespace in front of the pipe, so that it is clear that
# the dash is not a legitimate part of the URL.
@@ -866,6 +876,7 @@
text,
r'\[(?P<url>https?://[^\|\] \r\n]+?) +\| *(?P<label>[^\|\]]+?)\]',
r'[\g<url> \g<label>]', exceptions)
+
# dash in external link, where the correct end of the URL can
# be detected from the file extension. It is very unlikely that
# this will cause mistakes.
diff --git a/pywikibot/family.py b/pywikibot/family.py
index 56a0ad5..e20d90f 100644
--- a/pywikibot/family.py
+++ b/pywikibot/family.py
@@ -1,6 +1,6 @@
"""Objects representing MediaWiki families."""
#
-# (C) Pywikibot team, 2004-2021
+# (C) Pywikibot team, 2004-2022
#
# Distributed under the terms of the MIT license.
#
@@ -821,12 +821,12 @@
return config.site_interface
def from_url(self, url: str) -> Optional[str]:
- """
- Return whether this family matches the given url.
+ """Return whether this family matches the given url.
It is first checking if a domain of this family is in the domain of
the URL. If that is the case it's checking all codes and verifies that
- a path generated via :py:obj:`APISite.article_path` and
+ a path generated via
+ :py:obj:`APISite.articlepath<pywikibot.site.APISite.articlepath>` and
:py:obj:`Family.path` matches the path of the URL together with
the hostname for that code.
@@ -835,13 +835,11 @@
determine which code applies.
:param url: the URL which may contain a ``$1``. If it's missing it is
- assumed to be at the end and if it's present nothing is allowed
- after it.
+ assumed to be at the end.
:return: The language code of the url. None if that url is not from
this family.
:raises RuntimeError: When there are multiple languages in this family
which would work with the given URL.
- :raises ValueError: When text is present after $1.
"""
parsed = urlparse.urlparse(url)
if not re.match('(https?)?$', parsed.scheme):
@@ -852,10 +850,7 @@
path += '?' + parsed.query
# Discard $1 and everything after it
- path, _, suffix = path.partition('$1')
- if suffix:
- raise ValueError('Url: {}\nText {} after the $1 placeholder is '
- 'not supported (T111513).'.format(url, suffix))
+ path, *_ = path.partition('$1')
for domain in self.domains:
if domain in parsed.netloc:
@@ -875,6 +870,7 @@
pywikibot.log('Found candidate {}'.format(site))
for iw_url in site._interwiki_urls():
+ iw_url, *_ = iw_url.partition('{}')
if path.startswith(iw_url):
matched_sites.add(site)
break
diff --git a/pywikibot/page/__init__.py b/pywikibot/page/__init__.py
index 346698d..bd13ca7 100644
--- a/pywikibot/page/__init__.py
+++ b/pywikibot/page/__init__.py
@@ -385,8 +385,8 @@
def full_url(self):
"""Return the full URL."""
- return self.site.base_url(self.site.article_path
- + self.title(as_url=True))
+ return self.site.base_url(
+ self.site.articlepath.format(self.title(as_url=True)))
def autoFormat(self):
"""
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index 54653cc..84f2643 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -2764,17 +2764,20 @@
yield from google.search(query)
def __iter__(self):
- """Iterate results."""
+ """Iterate results.
+
+ Google contains links in the format:
+ https://de.wikipedia.org/wiki/en:Foobar
+ """
# restrict query to local site
localQuery = '{} site:{}'.format(self.query, self.site.hostname())
base = 'http://{}{}'.format(self.site.hostname(),
- self.site.article_path)
+ self.site.articlepath)
+ pattern = base.replace('{}', '(.+)')
for url in self.queryGoogle(localQuery):
- if url[:len(base)] == base:
- title = url[len(base):]
- page = pywikibot.Page(pywikibot.Link(title, self.site))
- # Google contains links in the format
- # https://de.wikipedia.org/wiki/en:Foobar
+ m = re.search(pattern, url)
+ if m:
+ page = pywikibot.Page(pywikibot.Link(m.group(1), self.site))
if page.site == self.site:
yield page
diff --git a/pywikibot/site/_apisite.py b/pywikibot/site/_apisite.py
index 74b45cd..eecca96 100644
--- a/pywikibot/site/_apisite.py
+++ b/pywikibot/site/_apisite.py
@@ -1,6 +1,6 @@
"""Objects representing API interface to MediaWiki site."""
#
-# (C) Pywikibot team, 2008-2021
+# (C) Pywikibot team, 2008-2022
#
# Distributed under the terms of the MIT license.
#
@@ -624,12 +624,26 @@
in ['1', True]}
@property
+ @deprecated('articlepath', since='7.0.0')
def article_path(self):
- """Get the nice article path without $1."""
- # Assert and remove the trailing $1 and assert that it'll end in /
- assert self.siteinfo['general']['articlepath'].endswith('/$1'), \
- 'articlepath must end with /$1'
- return self.siteinfo['general']['articlepath'][:-2]
+ """Get the nice article path without $1.
+
+ .. deprecated:: 7.0
+ Replaced by :py:meth:`articlepath`
+ """
+ return self.articlepath[:-2]
+
+ @property
+ def articlepath(self):
+ """Get the nice article path with placeholder.
+
+ .. versionadded:: 7.0
+ Replaces :py:meth:`article_path`
+ """
+ # Assert $1 placeholder is present
+ path = self.siteinfo['general']['articlepath']
+ assert '$1' in path, 'articlepath must contain "$1" placeholder'
+ return path.replace('$1', '{}')
@staticmethod
def assert_valid_iter_params(msg_prefix, start, end, reverse,
diff --git a/pywikibot/site/_basesite.py b/pywikibot/site/_basesite.py
index 8856d35..db86186 100644
--- a/pywikibot/site/_basesite.py
+++ b/pywikibot/site/_basesite.py
@@ -1,6 +1,6 @@
"""Objects with site methods independent of the communication interface."""
#
-# (C) Pywikibot team, 2008-2021
+# (C) Pywikibot team, 2008-2022
#
# Distributed under the terms of the MIT license.
#
@@ -233,10 +233,10 @@
def _interwiki_urls(self, only_article_suffixes=False):
base_path = self.path()
if not only_article_suffixes:
- yield base_path
- yield base_path + '/'
- yield base_path + '?title='
- yield self.article_path
+ yield base_path + '{}'
+ yield base_path + '/{}'
+ yield base_path + '?title={}'
+ yield self.articlepath
def _build_namespaces(self):
"""Create default namespaces."""
diff --git a/scripts/fixing_redirects.py b/scripts/fixing_redirects.py
index 6fc4ef1..42e0df7 100755
--- a/scripts/fixing_redirects.py
+++ b/scripts/fixing_redirects.py
@@ -77,12 +77,9 @@
break
# Make sure that next time around we will not find this same hit.
curpos = m.start() + 1
- # T283403
- try:
- is_interwikilink = mysite.isInterwikiLink(m.group('title'))
- except ValueError:
- pywikibot.exception()
- continue
+
+ is_interwikilink = mysite.isInterwikiLink(m.group('title'))
+
# ignore interwiki links, links in the disabled area
# and links to sections of the same page
if (m.group('title').strip() == ''
diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py
index 16506b1..fac2821 100755
--- a/scripts/solve_disambiguation.py
+++ b/scripts/solve_disambiguation.py
@@ -899,7 +899,7 @@
foundlink = pywikibot.Link(m.group('title'),
disamb_page.site)
foundlink.parse()
- except (Error, ValueError): # T111513
+ except Error:
continue
# ignore interwiki links
diff --git a/tests/cosmetic_changes_tests.py b/tests/cosmetic_changes_tests.py
index 61d1996..b0f8fed 100644
--- a/tests/cosmetic_changes_tests.py
+++ b/tests/cosmetic_changes_tests.py
@@ -1,6 +1,6 @@
"""Test cosmetic_changes module."""
#
-# (C) Pywikibot team, 2015-2021
+# (C) Pywikibot team, 2015-2022
#
# Distributed under the terms of the MIT license.
#
@@ -9,6 +9,8 @@
from pywikibot import Page
from pywikibot.cosmetic_changes import CosmeticChangesToolkit
+from pywikibot.site._namespace import NamespacesDict
+
from tests.aspects import TestCase
@@ -22,7 +24,7 @@
@classmethod
def setUpClass(cls):
"""Setup class for all tests."""
- super(TestCosmeticChanges, cls).setUpClass()
+ super().setUpClass()
cls.cct = CosmeticChangesToolkit(Page(cls.site, 'Test'))
@@ -137,100 +139,6 @@
'{{Quellen_fehlen|foo}}'
))
- def test_fixSyntaxSave(self):
- """Test fixSyntaxSave method."""
- # necessary as the fixer needs the article path to fix it
- self.cct.site._siteinfo._cache['general'] = (
- {'articlepath': '/wiki/$1'}, True)
- self.cct.site._namespaces = {
- 6: ['Datei', 'File'],
- 14: ['Kategorie', 'Category'],
- }
- self.assertEqual(
- '[[Example|Page]]\n[[Example|Page]]\n[[Example|Page]]\n'
- '[[Example]]\n[[Example]]\n[[Example]]\n'
- '[https://de.wikipedia.org/w/index.php?title=Example&'
- 'oldid=68181978 Page]\n'
- '[https://de.wikipedia.org/w/index.php?title=Example&'
- 'oldid=68181978&diff=next Page]\n'
- '[https://en.wikipedia.org/w/index.php?title=Example]\n'
- '[https://de.wiktionary.org/w/index.php?title=Example]\n',
- self.cct.fixSyntaxSave(
- '[https://de.wikipedia.org/w/index.php?title=Example Page]\n'
- '[https://de.wikipedia.org/w/index.php?title=Example Page ]\n'
- '[https://de.wikipedia.org/w/index.php?title=Example Page ]\n'
- '[https://de.wikipedia.org/w/index.php?title=Example]\n'
- '[https://de.wikipedia.org/w/index.php?title=Example ]\n'
- '[https://de.wikipedia.org/w/index.php?title=Example ]\n'
- '[https://de.wikipedia.org/w/index.php?title=Example&'
- 'oldid=68181978 Page]\n'
- '[https://de.wikipedia.org/w/index.php?title=Example&'
- 'oldid=68181978&diff=next Page]\n'
- '[https://en.wikipedia.org/w/index.php?title=Example]\n'
- '[https://de.wiktionary.org/w/index.php?title=Example]\n'
- ))
- self.assertEqual(
- '[[Example]]\n[[Example]]\n[[Example]]\n'
- '[https://de.wikipedia.org/wiki/Example?oldid=68181978 Page]\n'
- '[https://de.wikipedia.org/wiki/Example?'
- 'oldid=68181978&diff=next Page]\n'
- '[[Example]]\n[[Example]]\n[[Example]]\n'
- '[https://de.wikipedia.org/w/index.php/Example?'
- 'oldid=68181978 Page]\n'
- '[https://de.wikipedia.org/w/index.php/Example?'
- 'oldid=68181978&diff=next Page]\n'
- '[[&]]\n[[&]]\n',
- self.cct.fixSyntaxSave(
- '[https://de.wikipedia.org/wiki/Example]\n'
- '[https://de.wikipedia.org/wiki/Example ]\n'
- '[https://de.wikipedia.org/wiki/Example ]\n'
- '[https://de.wikipedia.org/wiki/Example?oldid=68181978 Page]\n'
- '[https://de.wikipedia.org/wiki/Example?'
- 'oldid=68181978&diff=next Page]\n'
- '[https://de.wikipedia.org/w/index.php/Example]\n'
- '[https://de.wikipedia.org/w/index.php/Example ]\n'
- '[https://de.wikipedia.org/w/index.php/Example ]\n'
- '[https://de.wikipedia.org/w/index.php/Example?'
- 'oldid=68181978 Page]\n'
- '[https://de.wikipedia.org/w/index.php/Example?'
- 'oldid=68181978&diff=next Page]\n'
- '[https://de.wikipedia.org/wiki/&]\n'
- '[https://de.wikipedia.org/w/index.php/&]\n'
- ))
- self.assertEqual(
- '[https://de.wikipedia.org]',
- self.cct.fixSyntaxSave('[[https://de.wikipedia.org]]'))
- self.assertEqual(
- '[https://de.wikipedia.org]',
- self.cct.fixSyntaxSave('[[https://de.wikipedia.org]'))
- self.assertEqual(
- '[https://de.wikipedia.org/w/api.php API]',
- self.cct.fixSyntaxSave('[https://de.wikipedia.org/w/api.php|API]'))
- self.assertEqual(
- '[[:Kategorie:Example]]\n'
- '[[:Category:Example|Description]]\n'
- '[[:Datei:Example.svg]]\n'
- '[[:File:Example.svg|Description]]\n'
- '[[:Category:Example]]\n'
- '[[:Kategorie:Example|Description]]\n'
- '[[:File:Example.svg]]\n'
- '[[:Datei:Example.svg|Description]]\n',
- self.cct.fixSyntaxSave(
- '[https://de.wikipedia.org/wiki/Kategorie:Example]\n'
- '[https://de.wikipedia.org/wiki/Category:Example '
- 'Description]\n'
- '[https://de.wikipedia.org/wiki/Datei:Example.svg]\n'
- '[https://de.wikipedia.org/wiki/File:Example.svg '
- 'Description]\n'
- '[[https://de.wikipedia.org/wiki/Category:Example]]\n'
- '[[https://de.wikipedia.org/wiki/Kategorie:Example '
- 'Description]]\n'
- '[[https://de.wikipedia.org/wiki/File:Example.svg]]\n'
- '[[https://de.wikipedia.org/wiki/Datei:Example.svg '
- 'Description]]\n'
- ))
- del self.cct.site._namespaces
-
def test_fixHtml(self):
"""Test fixHtml method."""
self.assertEqual("'''Foo''' bar",
@@ -273,6 +181,126 @@
self.assertEqual(text, self.cct.fixArabicLetters(text))
+class TestDryFixSyntaxSave(TestCosmeticChanges):
+
+ """Test fixSyntaxSave not requiring a live wiki."""
+
+ dry = True
+
+ @classmethod
+ def setUpClass(cls):
+ """Setup class for all tests."""
+ super().setUpClass()
+ cls.cct.site._siteinfo._cache['general'] = (
+ {'articlepath': '/wiki/$1'}, True)
+ cls.cct.site._namespaces = NamespacesDict({
+ 6: ['Datei', 'File'],
+ 14: ['Kategorie', 'Category'],
+ })
+
+ def test_title_param(self):
+ """Test fixing url with title parameter."""
+ # necessary as the fixer needs the article path to fix it
+ self.assertEqual(
+ '[[Example|Page]]\n[[Example|Page]]\n[[Example|Page]]\n'
+ '[[Example]]\n[[Example]]\n[[Example]]\n'
+ '[https://de.wikipedia.org/w/index.php?title=Example&'
+ 'oldid=68181978 Page]\n'
+ '[https://de.wikipedia.org/w/index.php?title=Example&'
+ 'oldid=68181978&diff=next Page]\n'
+ '[https://en.wikipedia.org/w/index.php?title=Example]\n'
+ '[https://de.wiktionary.org/w/index.php?title=Example]\n',
+ self.cct.fixSyntaxSave(
+ '[https://de.wikipedia.org/w/index.php?title=Example Page]\n'
+ '[https://de.wikipedia.org/w/index.php?title=Example Page ]\n'
+ '[https://de.wikipedia.org/w/index.php?title=Example Page ]\n'
+ '[https://de.wikipedia.org/w/index.php?title=Example]\n'
+ '[https://de.wikipedia.org/w/index.php?title=Example ]\n'
+ '[https://de.wikipedia.org/w/index.php?title=Example ]\n'
+ '[https://de.wikipedia.org/w/index.php?title=Example&'
+ 'oldid=68181978 Page]\n'
+ '[https://de.wikipedia.org/w/index.php?title=Example&'
+ 'oldid=68181978&diff=next Page]\n'
+ '[https://en.wikipedia.org/w/index.php?title=Example]\n'
+ '[https://de.wiktionary.org/w/index.php?title=Example]\n'
+ ))
+
+ def test_fix_url(self):
+ """Test fixing urls."""
+ self.assertEqual(
+ '[[Example]]\n[[Example]]\n[[Example]]\n'
+ '[https://de.wikipedia.org/wiki/Example?oldid=68181978 Page]\n'
+ '[https://de.wikipedia.org/wiki/Example?'
+ 'oldid=68181978&diff=next Page]\n'
+ '[[Example]]\n[[Example]]\n[[Example]]\n'
+ '[https://de.wikipedia.org/w/index.php/Example?'
+ 'oldid=68181978 Page]\n'
+ '[https://de.wikipedia.org/w/index.php/Example?'
+ 'oldid=68181978&diff=next Page]\n'
+ '[[&]]\n[[&]]\n',
+ self.cct.fixSyntaxSave(
+ '[https://de.wikipedia.org/wiki/Example]\n'
+ '[https://de.wikipedia.org/wiki/Example ]\n'
+ '[https://de.wikipedia.org/wiki/Example ]\n'
+ '[https://de.wikipedia.org/wiki/Example?oldid=68181978 Page]\n'
+ '[https://de.wikipedia.org/wiki/Example?'
+ 'oldid=68181978&diff=next Page]\n'
+ '[https://de.wikipedia.org/w/index.php/Example]\n'
+ '[https://de.wikipedia.org/w/index.php/Example ]\n'
+ '[https://de.wikipedia.org/w/index.php/Example ]\n'
+ '[https://de.wikipedia.org/w/index.php/Example?'
+ 'oldid=68181978 Page]\n'
+ '[https://de.wikipedia.org/w/index.php/Example?'
+ 'oldid=68181978&diff=next Page]\n'
+ '[https://de.wikipedia.org/wiki/&]\n'
+ '[https://de.wikipedia.org/w/index.php/&]\n'
+ ))
+
+ def test_fix_brackets(self):
+ """Test fixing brackets."""
+ self.assertEqual(
+ '[https://de.wikipedia.org]',
+ self.cct.fixSyntaxSave('[[https://de.wikipedia.org]]'))
+
+ def test_fix_missing_bracket(self):
+ """Test fixing missing bracket."""
+ self.assertEqual(
+ '[https://de.wikipedia.org]',
+ self.cct.fixSyntaxSave('[[https://de.wikipedia.org]'))
+
+ def test_fix_link_text(self):
+ """Test fixing link text."""
+ self.assertEqual(
+ '[https://de.wikipedia.org/w/api.php API]',
+ self.cct.fixSyntaxSave('[https://de.wikipedia.org/w/api.php|API]'))
+
+ def test_fix_files_and_categories(self):
+ """Test files and categories fix."""
+ self.assertEqual(
+ '[[:Kategorie:Example]]\n'
+ '[[:Category:Example|Description]]\n'
+ '[[:Datei:Example.svg]]\n'
+ '[[:File:Example.svg|Description]]\n'
+ '[[:Category:Example]]\n'
+ '[[:Kategorie:Example|Description]]\n'
+ '[[:File:Example.svg]]\n'
+ '[[:Datei:Example.svg|Description]]\n',
+ self.cct.fixSyntaxSave(
+ '[https://de.wikipedia.org/wiki/Kategorie:Example]\n'
+ '[https://de.wikipedia.org/wiki/Category:Example '
+ 'Description]\n'
+ '[https://de.wikipedia.org/wiki/Datei:Example.svg]\n'
+ '[https://de.wikipedia.org/wiki/File:Example.svg '
+ 'Description]\n'
+ '[[https://de.wikipedia.org/wiki/Category:Example]]\n'
+ '[[https://de.wikipedia.org/wiki/Kategorie:Example '
+ 'Description]]\n'
+ '[[https://de.wikipedia.org/wiki/File:Example.svg]]\n'
+ '[[https://de.wikipedia.org/wiki/Datei:Example.svg '
+ 'Description]]\n'
+ ))
+
+
class TestLiveCosmeticChanges(TestCosmeticChanges):
"""Test cosmetic_changes requiring a live wiki."""
diff --git a/tests/family_tests.py b/tests/family_tests.py
index 6f5cb63..bcaea49 100644
--- a/tests/family_tests.py
+++ b/tests/family_tests.py
@@ -1,6 +1,6 @@
"""Tests for the family module."""
#
-# (C) Pywikibot team, 2014-2021
+# (C) Pywikibot team, 2014-2022
#
# Distributed under the terms of the MIT license.
#
@@ -160,14 +160,14 @@
self.assertEqual(code, self.current_code)
self.assertEqual(fam, self.current_family)
site = DrySite(code, fam, None)
- site._siteinfo._cache['general'] = ({'articlepath': self.article_path},
+ site._siteinfo._cache['general'] = ({'articlepath': self.articlepath},
True)
return site
def setUp(self):
"""Setup default article path."""
super().setUp()
- self.article_path = '/wiki/$1'
+ self.articlepath = '/wiki/$1'
def test_from_url_wikipedia_extra(self):
"""Test various URLs against wikipedia regex."""
@@ -191,12 +191,8 @@
self.assertEqual(f.from_url(prefix + '/wiki/Main_page'), 'vo')
self.assertEqual(f.from_url(prefix + '/w/index.php?title=Foo'), 'vo')
- # Text after $1 is not allowed
- with self.assertRaisesRegex(
- ValueError,
- 'Url: .+\nText /foo after the '
- r'\$1 placeholder is not supported \(T111513\)\.'):
- f.from_url('//vo.wikipedia.org/wiki/$1/foo')
+ # Text after $1 is allowed
+ self.assertEqual(f.from_url('//vo.wikipedia.org/wiki/$1/foo'), 'vo')
# the IWM may contain the wrong protocol, but it's only used to
# determine a site so using HTTP or HTTPS is not an issue
@@ -216,20 +212,20 @@
def test_each_family(self):
"""Test each family builds a working regex."""
for family in pywikibot.config.family_files:
- with self.subTest(family=family):
- if family == 'wowwiki':
- self.skipTest(
- 'Family.from_url() does not work for {} (T215077)'
- .format(family))
- self.current_family = family
- family = Family.load(family)
- for code in family.codes:
- self.current_code = code
- url = ('{}://{}{}/$1'.format(family.protocol(code),
- family.hostname(code),
- family.path(code)))
- # Families can switch off if they want to be detected using
- # URL. This applies for test:test (there is test:wikipedia)
+ if family == 'wowwiki':
+ self.skipTest(
+ 'Family.from_url() does not work for {} (T215077)'
+ .format(family))
+ self.current_family = family
+ family = Family.load(family)
+ for code in family.codes:
+ self.current_code = code
+ url = ('{}://{}{}/$1'.format(family.protocol(code),
+ family.hostname(code),
+ family.path(code)))
+ # Families can switch off if they want to be detected using
+ # URL. This applies for test:test (there is test:wikipedia)
+ with self.subTest(url=url):
self.assertEqual(family.from_url(url), code)
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/749160
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I03387845ee37f378f09359f32371e0581462467b
Gerrit-Change-Number: 749160
Gerrit-PatchSet: 11
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Matěj Suchánek <matejsuchanek97(a)gmail.com>
Gerrit-Reviewer: Meno25 <meno25mail(a)gmail.com>
Gerrit-Reviewer: Wesalius
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged