jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/675988 )
Change subject: [bugfix] Fix isDone() method
......................................................................
[bugfix] Fix isDone() method
Change-Id: Ifa368f65ccee3320697892b9f41f9db5454150fc
---
M scripts/interwiki.py
1 file changed, 1 insertion(+), 1 deletion(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index 0096eab..684e2fd 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -2123,7 +2123,7 @@
def isDone(self):
"""Check whether there is still more work to do."""
- return self and self.pageGenerator is None
+ return not self and self.pageGenerator is None
def plus(self, site, count=1):
"""Helper routine that the Subject class expects in a counter."""
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/675988
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ifa368f65ccee3320697892b9f41f9db5454150fc
Gerrit-Change-Number: 675988
Gerrit-PatchSet: 1
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/675844 )
Change subject: [IMPR] Improvements for interwiki.py
......................................................................
[IMPR] Improvements for interwiki.py
- simplify InterwikiBotConfig arg parsing for two items
- [bugfix] exceptions must derive from BaseException
- shorten comparing len
- decrease nested flow statements
- use defaultdict in interwiki_graph.Subject
Change-Id: Ie559eb9063b52acf4d531819a4474c908b7d032e
---
M scripts/interwiki.py
1 file changed, 162 insertions(+), 156 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index 8cac302..0096eab 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -516,12 +516,10 @@
self.parenthesesonly = True
elif arg == 'localright':
self.followinterwiki = False
- elif arg == 'array':
- if value.isdigit():
- self.minsubjects = int(value)
- elif arg == 'query':
- if value.isdigit():
- self.maxquerysize = int(value)
+ elif arg == 'array' and value.isdigit():
+ self.minsubjects = int(value)
+ elif arg == 'query' and value.isdigit():
+ self.maxquerysize = int(value)
elif arg == 'back':
self.nobackonly = True
elif arg == 'async':
@@ -811,8 +809,9 @@
# Bug-check: Isn't there any work still in progress? We can't work on
# different sites at a time!
if self.pending:
- raise "BUG: Can't start to work on {}; still working on {}".format(
- site, self.pending)
+ raise RuntimeError(
+ "BUG: Can't start to work on {}; still working on {}"
+ .format(site, self.pending))
# Prepare a list of suitable pages
result = []
for page in self.todo.filter(site):
@@ -1262,7 +1261,7 @@
break
if self.origin == page:
- self.untranslated = (len(iw) == 0)
+ self.untranslated = not iw
if self.conf.untranslatedonly:
# Ignore the interwiki links.
iw = ()
@@ -1307,13 +1306,12 @@
for link in iw:
linkedPage = pywikibot.Page(link)
- if self.conf.hintsareright:
- if linkedPage.site in self.hintedsites:
- pywikibot.output(
- 'NOTE: {}: {} extra interwiki on hinted site '
- 'ignored {}'
- .format(self.origin, page, linkedPage))
- break
+ if self.conf.hintsareright \
+ and linkedPage.site in self.hintedsites:
+ pywikibot.output(
+ 'NOTE: {}: {} extra interwiki on hinted site '
+ 'ignored {}'.format(self.origin, page, linkedPage))
+ break
if not self.skipPage(page, linkedPage, counter):
if self.conf.followinterwiki or page == self.origin:
@@ -1372,7 +1370,7 @@
errorCount = self.problemfound
# Build up a dictionary of all pages found, with the site as key.
# Each value will be a list of pages.
- new = {}
+ new = defaultdict(list)
for page in self.done:
if page.exists() and not page.isRedirectPage() \
and not page.isCategoryRedirect():
@@ -1382,16 +1380,13 @@
# inhibit the forwarding families pages to be updated.
continue
- if site == self.origin.site:
- if page != self.origin:
- self.problem('Found link to ' + page)
- self.whereReport(page)
- errorCount += 1
- else:
- if site in new:
- new[site].append(page)
- else:
- new[site] = [page]
+
+ if site != self.origin.site:
+ new[site].append(page)
+ elif page != self.origin:
+ self.problem('Found link to ' + page)
+ self.whereReport(page)
+ errorCount += 1
# See if new{} contains any problematic values
result = {}
@@ -1519,10 +1514,10 @@
# processing
# TODO: should be move to assemble()
# replaceLinks will skip the site it's working on.
- if self.origin.site not in new:
# TODO: make this possible as well.
- if not self.origin.site.family.interwiki_forward:
- new[self.origin.site] = self.origin
+ if self.origin.site not in new \
+ and not self.origin.site.family.interwiki_forward:
+ new[self.origin.site] = self.origin
updatedSites = []
notUpdatedSites = []
@@ -1607,21 +1602,23 @@
def replaceLinks(self, page, newPages):
"""Return True if saving was successful."""
- if self.conf.localonly:
- # In this case only continue on the Page we started with
- if page != self.origin:
- raise SaveError('-localonly and page != origin')
+ # In this case only continue on the Page we started with
+ if self.conf.localonly and page != self.origin:
+ raise SaveError('-localonly and page != origin')
+
if page.section():
# This is not a page, but a subpage. Do not edit it.
pywikibot.output('Not editing {}: not doing interwiki on subpages'
.format(page))
raise SaveError('Link has a #section')
+
try:
pagetext = page.get()
except pywikibot.NoPage:
pywikibot.output('Not editing {}: page does not exist'
.format(page))
raise SaveError("Page doesn't exist")
+
if page_empty_check(page):
pywikibot.output('Not editing {}: page is empty'.format(page))
raise SaveError('Page is empty.')
@@ -1667,9 +1664,9 @@
# Do not add interwiki links to foreign families that page.site() does
# not forward to
for stmp in new.keys():
- if stmp.family != page.site.family:
- if stmp.family.name != page.site.family.interwiki_forward:
- del new[stmp]
+ if stmp.family != page.site.family \
+ and stmp.family.name != page.site.family.interwiki_forward:
+ del new[stmp]
# Put interwiki links into a map
old = {}
@@ -1751,87 +1748,86 @@
ask = False
if self.conf.confirm and not self.conf.always:
ask = True
- # If we need to ask, do so
- if ask:
- if self.conf.autonomous:
- # If we cannot ask, deny permission
- answer = 'n'
- else:
- answer = pywikibot.input_choice('Submit?',
- [('Yes', 'y'), ('No', 'n'),
- ('open in Browser', 'b'),
- ('Give up', 'g'),
- ('Always', 'a')],
- automatic_quit=False)
- if answer == 'b':
- pywikibot.bot.open_webbrowser(page)
- return True
- if answer == 'a':
- # don't ask for the rest of this subject
- self.conf.always = True
- answer = 'y'
- else:
+
+ if not ask:
# If we do not need to ask, allow
answer = 'y'
-
- # If we got permission to submit, do so
- if answer == 'y':
- self.conf.note('Updating live wiki...')
- timeout = 60
- page.text = newtext
- while True:
- try:
- page.save(summary=mcomment,
- asynchronous=self.conf.asynchronous,
- nocreate=True)
- except pywikibot.NoCreateError:
- pywikibot.exception()
- return False
- except pywikibot.LockedPage:
- pywikibot.output('Page {} is locked. Skipping.'
- .format(page))
- raise SaveError('Locked')
- except pywikibot.EditConflict:
- pywikibot.output(
- 'ERROR putting page: An edit conflict occurred. '
- 'Giving up.')
- raise SaveError('Edit conflict')
- except pywikibot.SpamblacklistError as error:
- pywikibot.output(
- 'ERROR putting page: {0} blacklisted by spamfilter. '
- 'Giving up.'.format(error.url))
- raise SaveError('Spam filter')
- except pywikibot.PageSaveRelatedError as error:
- pywikibot.output('ERROR putting page: {}'
- .format(error.args,))
- raise SaveError('PageSaveRelatedError')
- except (socket.error, IOError) as error:
- if timeout > 3600:
- raise
- pywikibot.output('ERROR putting page: {}'
- .format(error.args,))
- pywikibot.output('Sleeping {} seconds before trying again.'
- .format(timeout,))
- timeout *= 2
- pywikibot.sleep(timeout)
- except pywikibot.ServerError:
- if timeout > 3600:
- raise
- pywikibot.output('ERROR putting page: ServerError.')
- pywikibot.output('Sleeping {} seconds before trying again.'
- .format(timeout,))
- timeout *= 2
- pywikibot.sleep(timeout)
- else:
- break
- return True
+ elif self.conf.autonomous:
+ # If we cannot ask, deny permission
+ answer = 'n'
+ else: # If we need to ask, do so
+ answer = pywikibot.input_choice('Submit?',
+ [('Yes', 'y'), ('No', 'n'),
+ ('open in Browser', 'b'),
+ ('Give up', 'g'),
+ ('Always', 'a')],
+ automatic_quit=False)
+ if answer == 'b':
+ pywikibot.bot.open_webbrowser(page)
+ return True
+ if answer == 'a':
+ # don't ask for the rest of this subject
+ self.conf.always = True
+ answer = 'y'
if answer == 'g':
raise GiveUpOnPage('User asked us to give up')
- raise LinkMustBeRemoved('Found incorrect link to {} in {}'
- .format(', '.join(x.code for x in removing),
- page))
+ # If we got permission to submit, do so
+ if answer != 'y':
+ raise LinkMustBeRemoved(
+ 'Found incorrect link to {} in {}'
+ .format(', '.join(x.code for x in removing), page))
+
+ self.conf.note('Updating live wiki...')
+ timeout = 60
+ page.text = newtext
+ while True:
+ try:
+ page.save(summary=mcomment,
+ asynchronous=self.conf.asynchronous,
+ nocreate=True)
+ except pywikibot.NoCreateError:
+ pywikibot.exception()
+ return False
+ except pywikibot.LockedPage:
+ pywikibot.output('Page {} is locked. Skipping.'
+ .format(page))
+ raise SaveError('Locked')
+ except pywikibot.EditConflict:
+ pywikibot.output(
+ 'ERROR putting page: An edit conflict occurred. '
+ 'Giving up.')
+ raise SaveError('Edit conflict')
+ except pywikibot.SpamblacklistError as error:
+ pywikibot.output(
+ 'ERROR putting page: {0} blacklisted by spamfilter. '
+ 'Giving up.'.format(error.url))
+ raise SaveError('Spam filter')
+ except pywikibot.PageSaveRelatedError as error:
+ pywikibot.output('ERROR putting page: {}'
+ .format(error.args,))
+ raise SaveError('PageSaveRelatedError')
+ except (socket.error, IOError) as error:
+ if timeout > 3600:
+ raise
+ pywikibot.output('ERROR putting page: {}'
+ .format(error.args,))
+ pywikibot.output('Sleeping {} seconds before trying again.'
+ .format(timeout,))
+ timeout *= 2
+ pywikibot.sleep(timeout)
+ except pywikibot.ServerError:
+ if timeout > 3600:
+ raise
+ pywikibot.output('ERROR putting page: ServerError.')
+ pywikibot.output('Sleeping {} seconds before trying again.'
+ .format(timeout,))
+ timeout *= 2
+ pywikibot.sleep(timeout)
+ else:
+ break
+ return True
def reportBacklinks(self, new, updatedSites):
"""
@@ -1847,45 +1843,49 @@
try:
for site in expectedSites - set(updatedSites):
page = new[site]
- if not page.section():
+ if page.section():
+ continue
+
+ try:
+ linkedPages = {pywikibot.Page(link)
+ for link in page.iterlanglinks()}
+ except pywikibot.NoPage:
+ pywikibot.warning(
+ 'Page {} does no longer exist?!'.format(page))
+ break
+
+ # To speed things up, create a dictionary which maps sites
+ # to pages. This assumes that there is only one interwiki
+ # link per language.
+ linkedPagesDict = {}
+ for linkedPage in linkedPages:
+ linkedPagesDict[linkedPage.site] = linkedPage
+ for expectedPage in expectedPages - linkedPages:
+ if expectedPage == page:
+ continue
try:
- linkedPages = {pywikibot.Page(link)
- for link in page.iterlanglinks()}
- except pywikibot.NoPage:
+ linkedPage = linkedPagesDict[expectedPage.site]
pywikibot.warning(
- 'Page {} does no longer exist?!'.format(page))
- break
- # To speed things up, create a dictionary which maps sites
- # to pages. This assumes that there is only one interwiki
- # link per language.
- linkedPagesDict = {}
- for linkedPage in linkedPages:
- linkedPagesDict[linkedPage.site] = linkedPage
- for expectedPage in expectedPages - linkedPages:
- if expectedPage != page:
- try:
- linkedPage = linkedPagesDict[expectedPage.site]
- pywikibot.warning(
- '{}: {} does not link to {} but to {}'
- .format(page.site.family.name,
- page, expectedPage, linkedPage))
- except KeyError:
- if not expectedPage.site.is_data_repository():
- pywikibot.warning(
- '{}: {} does not link to {}'
- .format(page.site.family.name,
- page, expectedPage))
- # Check for superfluous links
- for linkedPage in linkedPages:
- if linkedPage not in expectedPages:
- # Check whether there is an alternative page on
- # that language.
- # In this case, it was already reported above.
- if linkedPage.site not in expectedSites:
- pywikibot.warning(
- '{}: {} links to incorrect {}'
- .format(page.site.family.name,
- page, linkedPage))
+ '{}: {} does not link to {} but to {}'
+ .format(page.site.family.name,
+ page, expectedPage, linkedPage))
+ except KeyError:
+ if not expectedPage.site.is_data_repository():
+ pywikibot.warning('{}: {} does not link to {}'
+ .format(page.site.family.name,
+ page, expectedPage))
+ # Check for superfluous links
+ for linkedPage in linkedPages:
+ if linkedPage in expectedPages:
+ continue
+ # Check whether there is an alternative page on
+ # that language.
+ # In this case, it was already reported above.
+ if linkedPage.site not in expectedSites:
+ pywikibot.warning('{}: {} links to incorrect {}'
+ .format(page.site.family.name,
+ page, linkedPage))
+
except (socket.error, IOError):
pywikibot.output('ERROR: could not report backlinks')
@@ -2006,6 +2006,7 @@
"""Return the first subject that is still being worked on."""
if self.subjects:
return self.subjects[0]
+ return None
def maxOpenSite(self):
"""
@@ -2014,7 +2015,7 @@
If there is nothing left, return None.
Only languages that are TODO for the first Subject are returned.
"""
- max = 0
+ max_ = 0
maxlang = None
if not self.firstSubject():
return None
@@ -2024,12 +2025,15 @@
# because we have to wait before submitting another modification to
# go live. Select any language from counts.
oc = self.counts
- if pywikibot.Site() in oc:
- return pywikibot.Site()
+
+ default_site = pywikibot.Site()
+ if default_site in oc:
+ return default_site
+
for lang in oc:
count = self.counts[lang]
- if count > max:
- max = count
+ if count > max_:
+ max_ = count
maxlang = lang
return maxlang
@@ -2091,9 +2095,11 @@
if len(pageGroup) >= self.conf.maxquerysize:
# We have found enough pages to fill the bandwidth.
break
- if len(pageGroup) == 0:
+
+ if not pageGroup:
pywikibot.output('NOTE: Nothing left to do 2')
return False
+
# Get the content of the assembled list in one blow
gen = site.preloadpages(pageGroup, templates=True, langlinks=True,
pageprops=True)
@@ -2117,7 +2123,7 @@
def isDone(self):
"""Check whether there is still more work to do."""
- return len(self) == 0 and self.pageGenerator is None
+ return self and self.pageGenerator is None
def plus(self, site, count=1):
"""Helper routine that the Subject class expects in a counter."""
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/675844
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ie559eb9063b52acf4d531819a4474c908b7d032e
Gerrit-Change-Number: 675844
Gerrit-PatchSet: 2
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/673555 )
Change subject: [IMPR] use sys.exit() instead of exit()
......................................................................
[IMPR] use sys.exit() instead of exit()
Change-Id: I4a6611ead6d361cd407567afeb022d6bf471af14
---
M scripts/archive/compat2core.py
M scripts/harvest_template.py
M scripts/maintenance/cache.py
3 files changed, 9 insertions(+), 10 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/archive/compat2core.py b/scripts/archive/compat2core.py
index 8fde419..f9ee6bc 100755
--- a/scripts/archive/compat2core.py
+++ b/scripts/archive/compat2core.py
@@ -25,13 +25,14 @@
python pwb.py compat2core <scriptname> -warnonly
"""
#
-# (C) Pywikibot team, 2014-2020
+# (C) Pywikibot team, 2014-2021
#
# Distributed under the terms of the MIT license.
#
import codecs
import os
import re
+import sys
import pywikibot
@@ -155,7 +156,7 @@
'Please input the .py file to convert '
'(no input to leave):')
if not self.source:
- exit()
+ sys.exit()
if not self.source.endswith('.py'):
self.source += '.py'
if os.path.exists(self.source):
@@ -174,7 +175,7 @@
'Destination file is {}.'.format(self.dest),
default=True, automatic_quit=False):
pywikibot.output('Quitting...')
- exit()
+ sys.exit()
def convert(self):
"""Convert script."""
diff --git a/scripts/harvest_template.py b/scripts/harvest_template.py
index 85665c7..81f83de 100755
--- a/scripts/harvest_template.py
+++ b/scripts/harvest_template.py
@@ -92,6 +92,7 @@
# Distributed under the terms of MIT License.
#
import signal
+import sys
from typing import Any, Optional
@@ -182,9 +183,7 @@
"""Fetch redirects of the title, so we can check against them."""
temp = pywikibot.Page(pywikibot.Site(), title, ns=10)
if not temp.exists():
- pywikibot.error('Template {} does not exist.'
- .format(temp.title()))
- exit()
+ sys.exit('Template {} does not exist.'.format(temp.title()))
# Put some output here since it can take a while
pywikibot.output('Finding redirects...')
diff --git a/scripts/maintenance/cache.py b/scripts/maintenance/cache.py
index 6a8b1b2..f7f4212 100755
--- a/scripts/maintenance/cache.py
+++ b/scripts/maintenance/cache.py
@@ -72,6 +72,7 @@
import hashlib
import os
import pickle
+import sys
import pywikibot
@@ -418,13 +419,11 @@
command = 'has_password(entry)'
elif arg == '-c':
if command:
- pywikibot.error('Only one command may be executed.')
- exit(1)
+ sys.exit('Only one command may be executed.')
command = ''
elif arg == '-o':
if output:
- pywikibot.error('Only one output may be defined.')
- exit(1)
+ sys.exit('Only one output may be defined.')
output = ''
else:
if not cache_paths:
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/673555
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I4a6611ead6d361cd407567afeb022d6bf471af14
Gerrit-Change-Number: 673555
Gerrit-PatchSet: 2
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/675168 )
Change subject: [cleanup] remove dict entries of archived scripts in script_tests.py
......................................................................
[cleanup] remove dict entries of archived scripts in script_tests.py
Change-Id: Ic3cb1e1cf9022b8b797fb1ee78d67926efa3ae62
---
M tests/script_tests.py
1 file changed, 0 insertions(+), 17 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/tests/script_tests.py b/tests/script_tests.py
index b168df3..b07c1a7 100644
--- a/tests/script_tests.py
+++ b/tests/script_tests.py
@@ -22,12 +22,6 @@
# These dependencies are not always the package name which is in setup.py.
# Here, the name given to the module which will be imported is required.
script_deps = {
- 'flickrripper': ['flickrapi', 'PIL'],
- 'imageharvest': ['bs4'],
- 'isbn': ['stdnum'],
- 'match_images': ['PIL.Image'],
- 'states_redirect': ['pycountry'],
- 'patrol': ['mwparserfromhell'],
'weblinkchecker': ['memento_client'],
}
@@ -67,12 +61,6 @@
['login'] + sorted(set(script_list) - {'login'} - unrunnable_script_set))
script_input = {
- 'catall': 'q\n', # q for quit
- 'editarticle': 'Test page\n',
- 'imageuncat': 'q\n',
- 'imageharvest':
- 'https://upload.wikimedia.org/wikipedia/commons/'
- '8/80/Wikipedia-logo-v2.svg\n\n',
'interwiki': 'Test page that should not exist\n',
'misspelling': 'q\n',
'pagefromfile': 'q\n',
@@ -107,14 +95,9 @@
# TODO: until done here, remember to set editor = None in user-config.py
'change_pagelang': 'No -setlang parameter given',
'checkimages': 'Execution time: 0 seconds',
- 'editarticle': 'Nothing changed',
- 'freebasemappingupload': 'Cannot find ',
'harvest_template': 'ERROR: Please specify',
- 'imageuncat':
- 'WARNING: This script is primarily written for Wikimedia Commons',
# script_input['interwiki'] above lists a title that should not exist
'interwiki': 'does not exist. Skipping.',
- 'imageharvest': 'From what URL should I get the images',
'login': 'Logged in on ',
'pagefromfile': 'Please enter the file name',
'parser_function_count': 'Hold on, this will need some time.',
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/675168
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ic3cb1e1cf9022b8b797fb1ee78d67926efa3ae62
Gerrit-Change-Number: 675168
Gerrit-PatchSet: 2
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/675765 )
Change subject: [scripts] Recover interwiki.py
......................................................................
[scripts] Recover interwiki.py
Bug: T278675
Change-Id: I24e93a4cad87064e58fb6e326ef3a40fa0d3daf1
---
M docs/scripts/scripts.rst
M scripts/README.rst
R scripts/interwiki.py
M scripts/interwikidata.py
M tests/pwb_tests.py
M tox.ini
6 files changed, 18 insertions(+), 13 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/docs/scripts/scripts.rst b/docs/scripts/scripts.rst
index 291246f..1fa7a03 100644
--- a/docs/scripts/scripts.rst
+++ b/docs/scripts/scripts.rst
@@ -106,6 +106,11 @@
.. automodule:: scripts.imagetransfer
+scripts.interwiki script
+------------------------
+
+.. automodule:: scripts.interwiki
+
scripts.interwikidata script
----------------------------
diff --git a/scripts/README.rst b/scripts/README.rst
index 36f1217..9f54ffe 100644
--- a/scripts/README.rst
+++ b/scripts/README.rst
@@ -61,6 +61,9 @@
| | images, and let the user choose among them for |
| | images to upload. |
+------------------------+---------------------------------------------------------+
+ | interwiki.py | A robot to check interwiki links on all pages (or |
+ | | a range of pages) of a wiki. |
+ +------------------------+---------------------------------------------------------+
| interwikidata.py | Script to handle interwiki links based on Wikibase. |
+------------------------+---------------------------------------------------------+
| listpages.py | listpages: report number of pages found |
@@ -190,9 +193,6 @@
| imageuncat.py | Adds uncat template to images without categories at |
| | Commons |
+------------------------+---------------------------------------------------------+
- | interwiki.py | A robot to check interwiki links on all pages (or |
- | | a range of pages) of a wiki. |
- +------------------------+---------------------------------------------------------+
| isbn.py | Bot to convert all ISBN-10 codes to the ISBN-13 |
| | format. |
+------------------------+---------------------------------------------------------+
diff --git a/scripts/archive/interwiki.py b/scripts/interwiki.py
similarity index 99%
rename from scripts/archive/interwiki.py
rename to scripts/interwiki.py
index bd70ad0..c46ef2d 100755
--- a/scripts/archive/interwiki.py
+++ b/scripts/interwiki.py
@@ -328,7 +328,7 @@
"""
#
-# (C) Pywikibot team, 2003-2020
+# (C) Pywikibot team, 2003-2021
#
# Distributed under the terms of the MIT license.
#
@@ -363,11 +363,12 @@
"""An attempt to save a page with changed interwiki has failed."""
-class LinkMustBeRemoved(SaveError): # noqa: D205,D400
+class LinkMustBeRemoved(SaveError):
- """
- An interwiki link has to be removed, but this can't be done because of user
- preferences or because the user chose not to change the page.
+ """An interwiki link has to be removed manually.
+
+ An interwiki link has to be removed, but this can't be done because
+ of user preferences or because the user chose not to change the page.
"""
@@ -375,8 +376,6 @@
"""User chose not to work on this page and its linked pages any more."""
- pass
-
# A list of template names in different languages.
# Pages which contain these shouldn't be changed.
diff --git a/scripts/interwikidata.py b/scripts/interwikidata.py
index 073ca0d..22f118c 100755
--- a/scripts/interwikidata.py
+++ b/scripts/interwikidata.py
@@ -61,8 +61,8 @@
})
super().__init__(**kwargs)
if not self.site.has_data_repository:
- raise ValueError('{site} does not have a data repository.'
- .format(site=self.site))
+ raise ValueError('{site} does not have a data repository, use '
+ 'interwiki.py instead.'.format(site=self.site))
self.repo = self.site.data_repository()
if not self.opt.summary:
self.opt.summary = pywikibot.i18n.twtranslate(
diff --git a/tests/pwb_tests.py b/tests/pwb_tests.py
index 2a59374..f542a67 100644
--- a/tests/pwb_tests.py
+++ b/tests/pwb_tests.py
@@ -112,7 +112,7 @@
'',
'The most similar scripts are:',
'1 - interwikidata',
- '2 - illustrate_wikidata',
+ '2 - interwiki',
]
stderr = io.StringIO(
execute_pwb(['inter_wikidata'], data_in='q')['stderr'])
diff --git a/tox.ini b/tox.ini
index 04b4e0c..13cc35a 100644
--- a/tox.ini
+++ b/tox.ini
@@ -157,6 +157,7 @@
scripts/cosmetic_changes.py : N816
scripts/fixing_redirects.py : N803, N806
scripts/harvest_template.py : N802, N816
+ scripts/interwiki.py : N802, N803, N806, N815, N816
scripts/imagetransfer.py : N803, N806, N816
scripts/maintenance/make_i18n_dict.py : T001
scripts/misspelling.py : N802, N806
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/675765
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I24e93a4cad87064e58fb6e326ef3a40fa0d3daf1
Gerrit-Change-Number: 675765
Gerrit-PatchSet: 3
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged