jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/781963 )
Change subject: [W0048] Merge 'if' statements
......................................................................
[W0048] Merge 'if' statements
Change-Id: I054e3070ae5fceee7d014d723dd328f7fef16731
---
M pywikibot/comms/http.py
M pywikibot/page/_pages.py
M pywikibot/scripts/generate_user_files.py
M pywikibot/site/_upload.py
M pywikibot/site_detect.py
M pywikibot/specialbots/_upload.py
M scripts/checkimages.py
M scripts/coordinate_import.py
M scripts/download_dump.py
M scripts/imagetransfer.py
M scripts/interwiki.py
M scripts/interwikidata.py
12 files changed, 92 insertions(+), 95 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/comms/http.py b/pywikibot/comms/http.py
index 84eb68f..465ef29 100644
--- a/pywikibot/comms/http.py
+++ b/pywikibot/comms/http.py
@@ -268,9 +268,9 @@
:type response: :py:obj:`requests.Response`
"""
# TODO: do some error correcting stuff
- if isinstance(response, requests.exceptions.SSLError):
- if SSL_CERT_VERIFY_FAILED_MSG in str(response):
- raise FatalServerError(str(response))
+ if isinstance(response, requests.exceptions.SSLError) \
+ and SSL_CERT_VERIFY_FAILED_MSG in str(response):
+ raise FatalServerError(str(response))
if isinstance(response, requests.ConnectionError):
msg = str(response)
diff --git a/pywikibot/page/_pages.py b/pywikibot/page/_pages.py
index d398b87..893b236 100644
--- a/pywikibot/page/_pages.py
+++ b/pywikibot/page/_pages.py
@@ -1907,11 +1907,10 @@
been retrieved earlier). If timestamp is not found, returns
empty list.
"""
- if hasattr(self, '_deletedRevs'):
- if timestamp in self._deletedRevs and (
- not content
- or 'content' in self._deletedRevs[timestamp]):
- return self._deletedRevs[timestamp]
+ if hasattr(self, '_deletedRevs') \
+ and timestamp in self._deletedRevs \
+ and (not content or 'content' in self._deletedRevs[timestamp]):
+ return self._deletedRevs[timestamp]
for item in self.site.deletedrevs(self, start=timestamp,
content=content, total=1, **kwargs):
diff --git a/pywikibot/scripts/generate_user_files.py b/pywikibot/scripts/generate_user_files.py
index 0281985..304b766 100755
--- a/pywikibot/scripts/generate_user_files.py
+++ b/pywikibot/scripts/generate_user_files.py
@@ -157,12 +157,12 @@
mycode = None
while not mycode:
mycode = pywikibot.input(message, default=default_lang, force=force)
- if known_langs and mycode and mycode not in known_langs:
- if not pywikibot.input_yn(
- fill('The site code {!r} is not in the list of known '
- 'sites. Do you want to continue?'.format(mycode)),
- default=False, automatic_quit=False):
- mycode = None
+ if known_langs and mycode and mycode not in known_langs \
+ and not pywikibot.input_yn(
+ fill('The site code {!r} is not in the list of known sites. '
+ 'Do you want to continue?'.format(mycode)),
+ default=False, automatic_quit=False):
+ mycode = None
message = 'Username on {}:{}'.format(mycode, fam.name)
username = pywikibot.input(message, default=default_username, force=force)
diff --git a/pywikibot/site/_upload.py b/pywikibot/site/_upload.py
index c85a397..73e9e7d 100644
--- a/pywikibot/site/_upload.py
+++ b/pywikibot/site/_upload.py
@@ -170,10 +170,9 @@
if report_success is None:
report_success = isinstance(ignore_warnings, bool)
- if report_success is True:
- if not isinstance(ignore_warnings, bool):
- raise ValueError('report_success may only be set to True when '
- 'ignore_warnings is a boolean')
+ if report_success is True and not isinstance(ignore_warnings, bool):
+ raise ValueError('report_success may only be set to True when '
+ 'ignore_warnings is a boolean')
if isinstance(ignore_warnings, Iterable):
ignored_warnings = ignore_warnings
diff --git a/pywikibot/site_detect.py b/pywikibot/site_detect.py
index b236b16..8bd4a78 100644
--- a/pywikibot/site_detect.py
+++ b/pywikibot/site_detect.py
@@ -240,9 +240,9 @@
else:
if self._parsed_url:
# allow upgrades to https, but not downgrades
- if self._parsed_url.scheme == 'https':
- if new_parsed_url.scheme != self._parsed_url.scheme:
- return
+ if self._parsed_url.scheme == 'https' \
+ and new_parsed_url.scheme != self._parsed_url.scheme:
+ return
# allow http://www.brickwiki.info/ vs http://brickwiki.info/
if (new_parsed_url.netloc in self._parsed_url.netloc
diff --git a/pywikibot/specialbots/_upload.py b/pywikibot/specialbots/_upload.py
index 4bfb79f..9fa7dc2 100644
--- a/pywikibot/specialbots/_upload.py
+++ b/pywikibot/specialbots/_upload.py
@@ -190,9 +190,8 @@
:return: False if this warning should cause an abort, True if it should
be ignored or None if this warning has no default handler.
"""
- if self.aborts is not True:
- if warning in self.aborts:
- return False
+ if self.aborts is not True and warning in self.aborts:
+ return False
if self.ignore_warning is True or (self.ignore_warning is not False
and warning in self.ignore_warning):
return True
diff --git a/scripts/checkimages.py b/scripts/checkimages.py
index d8598de..33adbe2 100755
--- a/scripts/checkimages.py
+++ b/scripts/checkimages.py
@@ -1266,12 +1266,13 @@
# Found the templates ONLY in the image's description
for template_selected in templates_in_the_image_raw:
tp = pywikibot.Page(self.site, template_selected)
+ page_title = tp.title(as_url=True, with_ns=False).lower()
for template_real in self.licenses_found:
- if (tp.title(as_url=True, with_ns=False).lower()
- == template_real.title(as_url=True,
- with_ns=False).lower()):
- if template_real not in self.all_licenses:
- self.all_licenses.append(template_real)
+ template_title = template_real.title(as_url=True,
+ with_ns=False).lower()
+ if page_title == template_title \
+ and template_real not in self.all_licenses:
+ self.all_licenses.append(template_real)
break
if self.licenses_found:
@@ -1448,19 +1449,8 @@
if find_tipe.lower() == 'findonly':
search_results = re.findall(r'{}'.format(k.lower()),
self.image_check_text.lower())
- if search_results:
- if search_results[0] == self.image_check_text.lower():
- self.some_problem = True
- self.text_used = text
- self.head_used = head_2
- self.imagestatus_used = imagestatus
- self.name_used = name
- self.summary_used = summary
- self.mex_used = mex_catched
- break
- elif find_tipe.lower() == 'find':
- if re.findall(r'{}'.format(k.lower()),
- self.image_check_text.lower()):
+ if search_results \
+ and search_results[0] == self.image_check_text.lower():
self.some_problem = True
self.text_used = text
self.head_used = head_2
@@ -1468,7 +1458,18 @@
self.name_used = name
self.summary_used = summary
self.mex_used = mex_catched
- continue
+ break
+ elif find_tipe.lower() == 'find' \
+ and re.findall(r'{}'.format(k.lower()),
+ self.image_check_text.lower()):
+ self.some_problem = True
+ self.text_used = text
+ self.head_used = head_2
+ self.imagestatus_used = imagestatus
+ self.name_used = name
+ self.summary_used = summary
+ self.mex_used = mex_catched
+ continue
def check_step(self) -> None:
"""Check a single file page."""
@@ -1752,14 +1753,14 @@
continue
# Check on commons if there's already an image with the same name
- if commons_active and site.family.name != 'commons':
- if not bot.check_image_on_commons():
- continue
+ if commons_active and site.family.name != 'commons' \
+ and not bot.check_image_on_commons():
+ continue
# Check if there are duplicates of the image on the project
- if duplicates_active:
- if not bot.check_image_duplicated(duplicates_rollback):
- continue
+ if duplicates_active \
+ and not bot.check_image_duplicated(duplicates_rollback):
+ continue
bot.check_step()
diff --git a/scripts/coordinate_import.py b/scripts/coordinate_import.py
index d5cfe39..08f843e 100755
--- a/scripts/coordinate_import.py
+++ b/scripts/coordinate_import.py
@@ -118,9 +118,9 @@
if page is None:
# running over items, search in linked pages
for page in item.iterlinks():
- if page.site.has_extension('GeoData'):
- if self.try_import_coordinates_from_page(page, item):
- break
+ if page.site.has_extension('GeoData') \
+ and self.try_import_coordinates_from_page(page, item):
+ break
return
self.try_import_coordinates_from_page(page, item)
diff --git a/scripts/download_dump.py b/scripts/download_dump.py
index cc977b8..66626d8 100755
--- a/scripts/download_dump.py
+++ b/scripts/download_dump.py
@@ -102,9 +102,8 @@
if toolforge_dump_filepath:
pywikibot.output('Symlinking file from '
+ toolforge_dump_filepath)
- if non_atomic:
- if os.path.exists(file_final_storepath):
- remove(file_final_storepath)
+ if non_atomic and os.path.exists(file_final_storepath):
+ remove(file_final_storepath)
symlink(toolforge_dump_filepath, file_current_storepath)
else:
url = 'https://dumps.wikimedia.org/{}/{}/{}'.format(
diff --git a/scripts/imagetransfer.py b/scripts/imagetransfer.py
index e8e08d9..96c653b 100755
--- a/scripts/imagetransfer.py
+++ b/scripts/imagetransfer.py
@@ -242,9 +242,9 @@
reason = i18n.twtranslate(sourceSite,
'imagetransfer-nowcommons_notice')
# try to delete the original image if we have a sysop account
- if sourceSite.has_right('delete'):
- if sourceImagePage.delete(reason):
- return
+ if sourceSite.has_right('delete') \
+ and sourceImagePage.delete(reason):
+ return
if sourceSite.lang in nowCommonsTemplate \
and sourceSite.family.name in config.usernames \
and sourceSite.lang in \
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index 8d6a194..2ca1cd4 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -1103,12 +1103,12 @@
elif page.isStaticRedirect():
self.conf.note('not following static {}redirects.'.format(redir))
elif (page.site.family == redirect_target.site.family
- and not self.skipPage(page, redirect_target, counter)):
- if self.addIfNew(redirect_target, counter, page):
- if config.interwiki_shownew:
- pywikibot.output('{}: {} gives new {}redirect {}'
- .format(self.origin, page, redir,
- redirect_target))
+ and not self.skipPage(page, redirect_target, counter)
+ and self.addIfNew(redirect_target, counter, page)
+ and config.interwiki_shownew):
+ pywikibot.output('{}: {} gives new {}redirect {}'
+ .format(self.origin, page, redir,
+ redirect_target))
return True
def check_page(self, page, counter) -> None:
@@ -1181,10 +1181,10 @@
if self.conf.untranslatedonly:
# Ignore the interwiki links.
iw = ()
- if self.conf.lacklanguage:
- if self.conf.lacklanguage in (link.site.lang for link in iw):
- iw = ()
- self.workonme = False
+ if self.conf.lacklanguage \
+ and self.conf.lacklanguage in (link.site.lang for link in iw):
+ iw = ()
+ self.workonme = False
if len(iw) < self.conf.minlinks:
iw = ()
self.workonme = False
@@ -1223,28 +1223,26 @@
.format(self.origin, page, linkedPage))
break
- if not self.skipPage(page, linkedPage, counter):
- if self.conf.followinterwiki or page == self.origin:
- if self.addIfNew(linkedPage, counter, page):
- # It is new. Also verify whether it is the second
- # on the same site
- lpsite = linkedPage.site
- for prevPage in self.found_in:
- if prevPage != linkedPage and \
- prevPage.site == lpsite:
- # Still, this could be "no problem" as
- # either may be a redirect to the other.
- # No way to find out quickly!
- pywikibot.output(
- 'NOTE: {}: {} gives duplicate '
- 'interwiki on same site {}'
- .format(self.origin, page, linkedPage))
- break
- else:
- if config.interwiki_shownew:
- pywikibot.output(
- '{}: {} gives new interwiki {}'
- .format(self.origin, page, linkedPage))
+ if not self.skipPage(page, linkedPage, counter) \
+ and self.conf.followinterwiki or page == self.origin \
+ and self.addIfNew(linkedPage, counter, page):
+ # It is new. Also verify whether it is the second on the
+ # same site
+ lpsite = linkedPage.site
+ for prevPage in self.found_in:
+ if prevPage != linkedPage and prevPage.site == lpsite:
+ # Still, this could be "no problem" as
+ # either may be a redirect to the other.
+ # No way to find out quickly!
+ pywikibot.output(
+ 'NOTE: {}: {} gives duplicate interwiki on same '
+ 'site {}'.format(self.origin, page, linkedPage))
+ break
+ else:
+ if config.interwiki_shownew:
+ pywikibot.output(
+ '{}: {} gives new interwiki {}'
+ .format(self.origin, page, linkedPage))
if self.forcedStop:
break
diff --git a/scripts/interwikidata.py b/scripts/interwikidata.py
index 3628b2b..fc068b4 100755
--- a/scripts/interwikidata.py
+++ b/scripts/interwikidata.py
@@ -161,12 +161,14 @@
"""Clean interwiki links from the page."""
if not self.iwlangs:
return
+
dbnames = [iw_site.dbName() for iw_site in self.iwlangs]
- if set(dbnames) - set(self.current_item.sitelinks.keys()):
- if not self.handle_complicated():
- warning('Interwiki conflict in {}, skipping...'
- .format(self.current_page.title(as_link=True)))
- return
+ if set(dbnames) - set(self.current_item.sitelinks.keys()) \
+ and not self.handle_complicated():
+ warning('Interwiki conflict in {}, skipping...'
+ .format(self.current_page.title(as_link=True)))
+ return
+
output('Cleaning up the page')
new_text = pywikibot.textlib.removeLanguageLinks(
self.current_page.text, site=self.current_page.site)
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/781963
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I054e3070ae5fceee7d014d723dd328f7fef16731
Gerrit-Change-Number: 781963
Gerrit-PatchSet: 2
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
Xqt has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/785119 )
Change subject: [IMPR] Reword add_text argument descriptions to be more succinct.
......................................................................
[IMPR] Reword add_text argument descriptions to be more succinct.
Also position add_text -help arguments above '¶ms;'
Change-Id: I7bdb0d52facaa93eac41feb11b438f0cb3423af9
---
M scripts/add_text.py
M tests/add_text_tests.py
2 files changed, 21 insertions(+), 22 deletions(-)
Approvals:
jenkins-bot: Verified
Xqt: Looks good to me, approved
diff --git a/scripts/add_text.py b/scripts/add_text.py
index b8b2e0e..f221e39 100755
--- a/scripts/add_text.py
+++ b/scripts/add_text.py
@@ -1,22 +1,18 @@
#!/usr/bin/python3
r"""
-This is a Bot to add text to the top or bottom of a page.
+Append text to the top or bottom of a page.
By default this adds the text to the bottom above the categories and interwiki.
-These command line parameters can be used to specify which pages to work on:
+Use the following command line parameters to specify what to add:
-¶ms;
+-text Text to append. "\n" are interpreted as newlines.
-Furthermore, the following command line parameters are supported:
+-textfile Path to a file with text to append
--text Define what text to add. "\n" are interpreted as newlines.
+-summary Change summary to use
--textfile Define a texfile name which contains the text to add
-
--summary Define the summary to use
-
--up If used, put the text at the top of the page
+-up Append text to the top of the page rather than the bottom
-create Create the page if necessary. Note that talk pages are
created already without of this option.
@@ -31,13 +27,16 @@
-talkpage Put the text onto the talk page instead
-talk
--excepturl Use the html page as text where you want to see if there's
- the text, not the wiki-page.
+-excepturl Skip pages with a url that matches this regular expression
--noreorder Avoid reordering cats and interwiki
+-noreorder Place the text beneath the categories and interwiki
-Example
--------
+Furthermore, the following can be used to specify which pages to process...
+
+¶ms;
+
+Examples
+--------
1. Append 'hello world' to the bottom of the sandbox:
@@ -93,7 +92,6 @@
'-excepturl': 'What url pattern should we skip?',
}
-
docuReplacements = {'¶ms;': pagegenerators.parameterHelp} # noqa: N816
@@ -199,12 +197,13 @@
generator_factory: pagegenerators.GeneratorFactory
) -> ARGS_TYPE:
"""
- Parses our arguments and provide a named tuple with their values.
+ Parses our arguments and provide a dictionary with their values.
:param argv: input arguments to be parsed
- :param generator_factory: factory that will determine the page to edit
+ :param generator_factory: factory that will determine what pages to
+ process
:return: dictionary with our parsed arguments
- :raise ValueError: invalid arguments received
+ :raise ValueError: if we receive invalid arguments
"""
args = dict(DEFAULT_ARGS)
argv = pywikibot.handle_args(argv)
diff --git a/tests/add_text_tests.py b/tests/add_text_tests.py
index adbfe21..0e7bc62 100755
--- a/tests/add_text_tests.py
+++ b/tests/add_text_tests.py
@@ -1,5 +1,5 @@
#!/usr/bin/python3
-"""Test add_text bot module."""
+"""Test add_text script."""
#
# (C) Pywikibot team, 2016-2022
#
@@ -29,9 +29,9 @@
return page
-class TestAdding(TestCase):
+class TestAddTextScript(TestCase):
- """Test adding text."""
+ """Test add_text script."""
family = 'wikipedia'
code = 'en'
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/785119
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I7bdb0d52facaa93eac41feb11b438f0cb3423af9
Gerrit-Change-Number: 785119
Gerrit-PatchSet: 1
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: Damian <atagar1(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/780808 )
Change subject: [IMPR] add -quiet option to omit message when no change was made
......................................................................
[IMPR] add -quiet option to omit message when no change was made
with -quiet option the message "No changes were necessary in [[page]]"
will be omitted because it can be too noisy.
Bug: T306134
Change-Id: I65d6269001920679878adcf5fb691ed3f6a643dd
---
M scripts/replace.py
1 file changed, 7 insertions(+), 3 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/replace.py b/scripts/replace.py
index 7b748d3..bfd599a 100755
--- a/scripts/replace.py
+++ b/scripts/replace.py
@@ -91,6 +91,8 @@
-always Don't prompt you for each replacement
+-quiet Don't prompt a message if a page keeps unchanged
+
-recursive Recurse replacement as long as possible. Be careful, this
might lead to an infinite loop.
@@ -532,6 +534,7 @@
self.available_options.update({
'addcat': None,
'allowoverlap': False,
+ 'quiet': False,
'recursive': False,
'sleep': 0.0,
'summary': None,
@@ -686,8 +689,9 @@
break
if new_text == original_text:
- pywikibot.output('No changes were necessary in '
- + page.title(as_link=True))
+ if not self.opt.quiet:
+ pywikibot.output('No changes were necessary in '
+ + page.title(as_link=True))
return
if self.opt.addcat:
@@ -933,7 +937,7 @@
fixes_set.append(value)
elif opt == '-sleep':
options['sleep'] = float(value)
- elif opt in ('-allowoverlap', '-always', '-recursive'):
+ elif opt in ('-allowoverlap', '-always', '-quiet', '-recursive'):
options[opt[1:]] = True
elif opt == '-nocase':
flags |= re.IGNORECASE
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/780808
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I65d6269001920679878adcf5fb691ed3f6a643dd
Gerrit-Change-Number: 780808
Gerrit-PatchSet: 3
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/783817 )
Change subject: [IMPR] use_disambig BaseBot attribute to hande disambig skipping
......................................................................
[IMPR] use_disambig BaseBot attribute to hande disambig skipping
use_disambig attribute is introduced to determine whether to use disambiguation
pages. If set to True to use disambigs only, if set to False to skip disambigs.
If None both are processed.
Change-Id: I244c4a1c40c7337e9a4f35d5708af79b838fa336
---
M pywikibot/bot.py
M scripts/commonscat.py
M scripts/noreferences.py
3 files changed, 21 insertions(+), 12 deletions(-)
Approvals:
DannyS712: Looks good to me, but someone else must approve
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/bot.py b/pywikibot/bot.py
index b70f2f3..98dc354 100644
--- a/pywikibot/bot.py
+++ b/pywikibot/bot.py
@@ -1227,6 +1227,14 @@
The default counters are 'read', 'write' and 'skip'.
"""
+ use_disambigs = None # type: Optional[bool]
+ """Attribute to determine whether to use disambiguation pages. Set
+ it to True to use disambigs only, set it to False to skip disambigs.
+ If None both are processed.
+
+ .. versionadded:: 7.2
+ """
+
use_redirects = None # type: Optional[bool]
"""Attribute to determine whether to use redirect pages. Set it to
True to use redirects only, set it to False to skip redirects. If
@@ -1527,7 +1535,8 @@
.. versionadded:: 3.0
.. versionchanged:: 7.2
- use :attr:`use_redirects` to handle redirects
+ use :attr:`use_redirects` to handle redirects,
+ use :attr:`use_disambigs` to handle disambigs
:param page: Page object to be processed
"""
@@ -1539,6 +1548,14 @@
.format(page=page, not_='not ' if self.use_redirects else ''))
return True
+ if isinstance(self.use_disambigs, bool) \
+ and page.isDisambig() is not self.use_disambigs:
+ pywikibot.warning(
+ 'Page {page} on {page.site} is skipped because it is {not_}'
+ 'a disambig'
+ .format(page=page, not_='not ' if self.use_disambigs else ''))
+ return True
+
return False
def treat(self, page: Any) -> None:
diff --git a/scripts/commonscat.py b/scripts/commonscat.py
index 4f4bf22..d2b0915 100755
--- a/scripts/commonscat.py
+++ b/scripts/commonscat.py
@@ -233,21 +233,17 @@
CommonscatBot is a ConfigParserBot
"""
+ use_disambigs = False
use_redirects = False
update_options = {'summary': ''}
def skip_page(self, page):
- """Skip category redirects or disambigs."""
+ """Skip category redirects."""
if page.isCategoryRedirect():
pywikibot.warning(
'Page {page} on {page.site} is a category redirect. '
'Skipping.'.format(page=page))
return True
- if page.isDisambig():
- pywikibot.warning(
- 'Page {page} on {page.site} is a disambiguation. '
- 'Skipping.'.format(page=page))
- return True
return super().skip_page(page)
@staticmethod
diff --git a/scripts/noreferences.py b/scripts/noreferences.py
index 34ffb9e..fa0a37e 100755
--- a/scripts/noreferences.py
+++ b/scripts/noreferences.py
@@ -515,6 +515,7 @@
"""References section bot."""
+ use_disambigs = False
use_redirects = False
def __init__(self, **kwargs) -> None:
@@ -708,11 +709,6 @@
def skip_page(self, page):
"""Check whether the page could be processed."""
- if page.isDisambig():
- pywikibot.output('Page {} is a disambig; skipping.'
- .format(page.title(as_link=True)))
- return True
-
if self.site.sitename == 'wikipedia:en' and page.isIpEdit():
pywikibot.warning(
'Page {} is edited by IP. Possible vandalized'
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/783817
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I244c4a1c40c7337e9a4f35d5708af79b838fa336
Gerrit-Change-Number: 783817
Gerrit-PatchSet: 5
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: DannyS712 <dannys712.wiki(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/775347 )
Change subject: [IMPR] Deprecate RedirectPageBot and NoRedirectPageBot
......................................................................
[IMPR] Deprecate RedirectPageBot and NoRedirectPageBot
Instead of having multiple classes to determine the pages to be processed,
just use an attribute. As first step deprecate RedirectPageBot and
NoRedirectPageBot and use 'use-redirects' attribute instead.
Change-Id: Ibbc97a3ade4c04e881afcd7a27e631197f0db730
---
M pywikibot/bot.py
M pywikibot/specialbots/_unlink.py
M scripts/add_text.py
M scripts/basic.py
M scripts/commonscat.py
M scripts/cosmetic_changes.py
M scripts/fixing_redirects.py
M scripts/newitem.py
M scripts/noreferences.py
M scripts/parser_function_count.py
M scripts/redirect.py
M scripts/reflinks.py
12 files changed, 107 insertions(+), 69 deletions(-)
Approvals:
DannyS712: Looks good to me, but someone else must approve
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/bot.py b/pywikibot/bot.py
index 7401883..b70f2f3 100644
--- a/pywikibot/bot.py
+++ b/pywikibot/bot.py
@@ -1202,28 +1202,39 @@
"""
Generic Bot to be subclassed.
- This class provides a run() method for basic processing of a
+ This class provides a :meth:`run` method for basic processing of a
generator one page at a time.
- If the subclass places a page generator in self.generator,
- Bot will process each page in the generator, invoking the method treat()
- which must then be implemented by subclasses.
+ If the subclass places a page generator in
+ :attr:`self.generator<generator>`, Bot will process each page in the
+ generator, invoking the method :meth:`treat` which must then be
+ implemented by subclasses.
- Each item processed by treat() must be a :py:obj:`pywikibot.page.BasePage`
- type. Use init_page() to upcast the type. To enable other types, set
- BaseBot.treat_page_type to an appropriate type; your bot should
- derive from BaseBot in that case and handle site properties.
+ Each item processed by :meth:`treat` must be a
+ :class:`pywikibot.page.BasePage` type. Use :meth:`init_page` to
+ upcast the type. To enable other types, set
+ :attr:`BaseBot.treat_page_type` to an appropriate type; your bot
+ should derive from :class:`BaseBot` in that case and handle site
+ properties.
If the subclass does not set a generator, or does not override
- treat() or run(), NotImplementedError is raised.
+ :meth:`treat` or :meth:`run`, NotImplementedError is raised.
- For bot options handling refer OptionHandler class above.
+ For bot options handling refer :class:`OptionHandler` class above.
.. versionchanged:: 7.0
- A counter attribute is provided which is a collections.Counter;
+ A counter attribute is provided which is a `collections.Counter`;
The default counters are 'read', 'write' and 'skip'.
"""
+ use_redirects = None # type: Optional[bool]
+ """Attribute to determine whether to use redirect pages. Set it to
+ True to use redirects only, set it to False to skip redirects. If
+ None both are processed.
+
+ .. versionadded:: 7.2
+ """
+
# Handler configuration.
# The values are the default values
# Extend this in subclasses!
@@ -1232,10 +1243,13 @@
'always': False, # By default ask for confirmation when putting a page
}
- # update_options can be used to update available_options;
- # do not use it if the bot class is to be derived but use
- # self.available_options.update(<dict>) initializer in such case
update_options = {} # type: Dict[str, Any]
+ """update_options can be used to update available_options;
+ do not use it if the bot class is to be derived but use
+ self.available_options.update(<dict>) initializer in such case.
+
+ .. versionadded:: 6.4
+ """
_current_page = None # type: Optional[pywikibot.page.BasePage]
@@ -1243,13 +1257,14 @@
"""Only accept 'generator' and options defined in available_options.
:param kwargs: bot options
- :keyword generator: a generator processed by run method
+ :keyword generator: a :attr:`generator` processed by :meth:`run` method
"""
if 'generator' in kwargs:
if hasattr(self, 'generator'):
pywikibot.warn('{} has a generator already. Ignoring argument.'
.format(self.__class__.__name__))
else:
+ #: generator processed by :meth:`run` method
self.generator = kwargs.pop('generator')
self.available_options.update(self.update_options)
@@ -1257,7 +1272,8 @@
self.counter = Counter()
self._generator_completed = False
- self.treat_page_type = pywikibot.page.BasePage # default type
+ #: instance variable to hold the default page type
+ self.treat_page_type = pywikibot.page.BasePage # type: Any
@property
@deprecated("self.counter['read']", since='7.0.0')
@@ -1510,14 +1526,27 @@
.. versionadded:: 3.0
+ .. versionchanged:: 7.2
+ use :attr:`use_redirects` to handle redirects
+
:param page: Page object to be processed
"""
+ if isinstance(self.use_redirects, bool) \
+ and page.isRedirectPage() is not self.use_redirects:
+ pywikibot.warning(
+ 'Page {page} on {page.site} is skipped because it is {not_}'
+ 'a redirect'
+ .format(page=page, not_='not ' if self.use_redirects else ''))
+ return True
+
return False
- def treat(self, page: 'pywikibot.page.BasePage') -> None:
+ def treat(self, page: Any) -> None:
"""Process one page (abstract method).
- :param page: Page object to be processed
+ :param page: Object to be processed, usually a
+ :class:`pywikibot.page.BasePage`. For other page types the
+ :attr:`treat_page_type` must be set.
"""
raise NotImplementedError('Method {}.treat() not implemented.'
.format(self.__class__.__name__))
@@ -1940,7 +1969,18 @@
class RedirectPageBot(CurrentPageBot):
- """A RedirectPageBot class which only treats redirects."""
+ """A RedirectPageBot class which only treats redirects.
+
+ .. deprecated:: 7.2
+ use BaseBot attribute 'use_redirects = True' instead
+ """
+
+ def __init__(self, *args, **kwargs):
+ """Deprecate RedirectPageBot."""
+ issue_deprecation_warning('RedirectPageBot',
+ "BaseBot attribute 'use_redirects = True'",
+ since='7.2.0')
+ super().__init__(*args, **kwargs)
def skip_page(self, page: 'pywikibot.page.BasePage') -> bool:
"""Treat only redirect pages and handle IsNotRedirectPageError."""
@@ -1954,7 +1994,18 @@
class NoRedirectPageBot(CurrentPageBot):
- """A NoRedirectPageBot class which only treats non-redirects."""
+ """A NoRedirectPageBot class which only treats non-redirects.
+
+ .. deprecated:: 7.2
+ use BaseBot attribute 'use_redirects = False' instead
+ """
+
+ def __init__(self, *args, **kwargs):
+ """Deprecate NoRedirectPageBot."""
+ issue_deprecation_warning('RedirectPageBot',
+ "BaseBot attribute 'use_redirects = False'",
+ since='7.2.0')
+ super().__init__(*args, **kwargs)
def skip_page(self, page: 'pywikibot.page.BasePage') -> bool:
"""Treat only non-redirect pages and handle IsRedirectPageError."""
diff --git a/pywikibot/specialbots/_unlink.py b/pywikibot/specialbots/_unlink.py
index 76fd66f..70fe226 100644
--- a/pywikibot/specialbots/_unlink.py
+++ b/pywikibot/specialbots/_unlink.py
@@ -13,7 +13,6 @@
ChoiceException,
ExistingPageBot,
InteractiveReplace,
- NoRedirectPageBot,
)
from pywikibot.bot_choice import UnhandledAnswer
from pywikibot.editor import TextEditor
@@ -54,10 +53,12 @@
return answer
-class BaseUnlinkBot(ExistingPageBot, NoRedirectPageBot, AutomaticTWSummaryBot):
+class BaseUnlinkBot(ExistingPageBot, AutomaticTWSummaryBot):
"""A basic bot unlinking a given link from the current page."""
+ use_redirects = False
+
def __init__(self, **kwargs) -> None:
"""Redirect all parameters and add namespace as an available option."""
self.available_options.update({
diff --git a/scripts/add_text.py b/scripts/add_text.py
index 3cc2a96..b8b2e0e 100755
--- a/scripts/add_text.py
+++ b/scripts/add_text.py
@@ -68,11 +68,7 @@
import pywikibot
from pywikibot import config, pagegenerators, textlib
from pywikibot.backports import Dict, Sequence
-from pywikibot.bot import (
- AutomaticTWSummaryBot,
- ExistingPageBot,
- NoRedirectPageBot,
-)
+from pywikibot.bot import AutomaticTWSummaryBot, ExistingPageBot
ARGS_TYPE = Dict[str, Union[bool, str]]
@@ -101,10 +97,11 @@
docuReplacements = {'¶ms;': pagegenerators.parameterHelp} # noqa: N816
-class AddTextBot(AutomaticTWSummaryBot, ExistingPageBot, NoRedirectPageBot):
+class AddTextBot(AutomaticTWSummaryBot, ExistingPageBot):
"""A bot which adds a text to a page."""
+ use_redirects = False
summary_key = 'add_text-adding'
update_options = DEFAULT_ARGS
diff --git a/scripts/basic.py b/scripts/basic.py
index 9c07582..1bcd7ba 100755
--- a/scripts/basic.py
+++ b/scripts/basic.py
@@ -54,7 +54,6 @@
AutomaticTWSummaryBot,
ConfigParserBot,
ExistingPageBot,
- NoRedirectPageBot,
SingleSiteBot,
)
@@ -71,7 +70,6 @@
# CurrentPageBot, # Sets 'current_page'. Process it in treat_page method.
# # Not needed here because we have subclasses
ExistingPageBot, # CurrentPageBot which only treats existing pages
- NoRedirectPageBot, # CurrentPageBot which only treats non-redirects
AutomaticTWSummaryBot, # Automatically defines summary; needs summary_key
):
@@ -86,6 +84,7 @@
:type summary_key: str
"""
+ use_redirects = False # treats non-redirects only
summary_key = 'basic-changing'
update_options = {
diff --git a/scripts/commonscat.py b/scripts/commonscat.py
index aac502a..4f4bf22 100755
--- a/scripts/commonscat.py
+++ b/scripts/commonscat.py
@@ -46,7 +46,7 @@
import pywikibot
from pywikibot import i18n, pagegenerators
-from pywikibot.bot import ConfigParserBot, ExistingPageBot, NoRedirectPageBot
+from pywikibot.bot import ConfigParserBot, ExistingPageBot
from pywikibot.exceptions import InvalidTitleError
from pywikibot.textlib import add_text
@@ -225,7 +225,7 @@
}
-class CommonscatBot(ConfigParserBot, ExistingPageBot, NoRedirectPageBot):
+class CommonscatBot(ConfigParserBot, ExistingPageBot):
"""Commons categorisation bot.
@@ -233,6 +233,7 @@
CommonscatBot is a ConfigParserBot
"""
+ use_redirects = False
update_options = {'summary': ''}
def skip_page(self, page):
diff --git a/scripts/cosmetic_changes.py b/scripts/cosmetic_changes.py
index 296ea02..f432a61 100755
--- a/scripts/cosmetic_changes.py
+++ b/scripts/cosmetic_changes.py
@@ -32,17 +32,13 @@
For further information see pywikibot/cosmetic_changes.py
"""
#
-# (C) Pywikibot team, 2006-2021
+# (C) Pywikibot team, 2006-2022
#
# Distributed under the terms of the MIT license.
#
import pywikibot
from pywikibot import config, pagegenerators
-from pywikibot.bot import (
- AutomaticTWSummaryBot,
- ExistingPageBot,
- NoRedirectPageBot,
-)
+from pywikibot.bot import AutomaticTWSummaryBot, ExistingPageBot
from pywikibot.cosmetic_changes import CANCEL, CosmeticChangesToolkit
from pywikibot.exceptions import InvalidPageError
@@ -59,12 +55,11 @@
}
-class CosmeticChangesBot(AutomaticTWSummaryBot,
- ExistingPageBot,
- NoRedirectPageBot):
+class CosmeticChangesBot(AutomaticTWSummaryBot, ExistingPageBot):
"""Cosmetic changes bot."""
+ use_redirects = False
summary_key = 'cosmetic_changes-standalone'
update_options = {
'async': False,
diff --git a/scripts/fixing_redirects.py b/scripts/fixing_redirects.py
index 3814a1a..bdb3700 100755
--- a/scripts/fixing_redirects.py
+++ b/scripts/fixing_redirects.py
@@ -30,7 +30,6 @@
from pywikibot.bot import (
AutomaticTWSummaryBot,
ExistingPageBot,
- NoRedirectPageBot,
SingleSiteBot,
suggest_help,
)
@@ -54,11 +53,11 @@
FEATURED_ARTICLES = 'Q4387444'
-class FixingRedirectBot(SingleSiteBot, ExistingPageBot, NoRedirectPageBot,
- AutomaticTWSummaryBot):
+class FixingRedirectBot(SingleSiteBot, ExistingPageBot, AutomaticTWSummaryBot):
"""Run over pages and resolve redirect links."""
+ use_redirects = False
ignore_save_related_errors = True
ignore_server_errors = True
summary_key = 'fixing_redirects-fixing'
diff --git a/scripts/newitem.py b/scripts/newitem.py
index d828cad..693a79b 100755
--- a/scripts/newitem.py
+++ b/scripts/newitem.py
@@ -20,7 +20,7 @@
"""
#
-# (C) Pywikibot team, 2014-2021
+# (C) Pywikibot team, 2014-2022
#
# Distributed under the terms of the MIT license.
#
@@ -30,7 +30,7 @@
import pywikibot
from pywikibot import pagegenerators
from pywikibot.backports import Set
-from pywikibot.bot import NoRedirectPageBot, WikidataBot
+from pywikibot.bot import WikidataBot
from pywikibot.exceptions import (
LockedPageError,
NoCreateError,
@@ -42,10 +42,11 @@
DELETION_TEMPLATES = ('Q4847311', 'Q6687153', 'Q21528265')
-class NewItemRobot(WikidataBot, NoRedirectPageBot):
+class NewItemRobot(WikidataBot):
"""A bot to create new items."""
+ use_redirect = False
treat_missing_item = True
update_options = {
'always': True,
diff --git a/scripts/noreferences.py b/scripts/noreferences.py
index 5805928..34ffb9e 100755
--- a/scripts/noreferences.py
+++ b/scripts/noreferences.py
@@ -38,7 +38,7 @@
import pywikibot
from pywikibot import i18n, pagegenerators, textlib
-from pywikibot.bot import ExistingPageBot, NoRedirectPageBot, SingleSiteBot
+from pywikibot.bot import ExistingPageBot, SingleSiteBot
from pywikibot.exceptions import LockedPageError
from pywikibot.pagegenerators import XMLDumpPageGenerator
@@ -511,10 +511,12 @@
XMLDumpPageGenerator, text_predicate=_match_xml_page_text)
-class NoReferencesBot(SingleSiteBot, ExistingPageBot, NoRedirectPageBot):
+class NoReferencesBot(SingleSiteBot, ExistingPageBot):
"""References section bot."""
+ use_redirects = False
+
def __init__(self, **kwargs) -> None:
"""Initializer."""
self.available_options.update({
diff --git a/scripts/parser_function_count.py b/scripts/parser_function_count.py
index 5cdc4cd..83d4566 100755
--- a/scripts/parser_function_count.py
+++ b/scripts/parser_function_count.py
@@ -60,14 +60,15 @@
import pywikibot
from pywikibot import pagegenerators
-from pywikibot.bot import ExistingPageBot, NoRedirectPageBot, SingleSiteBot
+from pywikibot.bot import ExistingPageBot, SingleSiteBot
-class ParserFunctionCountBot(SingleSiteBot,
- ExistingPageBot, NoRedirectPageBot):
+class ParserFunctionCountBot(SingleSiteBot, ExistingPageBot):
"""Bot class used for obtaining Parser function Count."""
+ use_redirects = False
+
update_options = {
'atleast': None,
'first': None,
diff --git a/scripts/redirect.py b/scripts/redirect.py
index 9be2554..277acf7 100755
--- a/scripts/redirect.py
+++ b/scripts/redirect.py
@@ -78,12 +78,7 @@
import pywikibot.data
from pywikibot import i18n, pagegenerators, xmlreader
from pywikibot.backports import Dict, List, Set, Tuple
-from pywikibot.bot import (
- ExistingPageBot,
- OptionHandler,
- RedirectPageBot,
- suggest_help,
-)
+from pywikibot.bot import ExistingPageBot, OptionHandler, suggest_help
from pywikibot.exceptions import (
CircularRedirectError,
InterwikiRedirectPageError,
@@ -388,10 +383,12 @@
continue
-class RedirectRobot(ExistingPageBot, RedirectPageBot):
+class RedirectRobot(ExistingPageBot):
"""Redirect bot."""
+ use_redirects = True
+
update_options = {
'limit': float('inf'),
'delete': False,
diff --git a/scripts/reflinks.py b/scripts/reflinks.py
index 06492d5..67283a7 100755
--- a/scripts/reflinks.py
+++ b/scripts/reflinks.py
@@ -63,12 +63,7 @@
import pywikibot
from pywikibot import comms, config, i18n, pagegenerators, textlib
from pywikibot.backports import Match, removeprefix
-from pywikibot.bot import (
- ConfigParserBot,
- ExistingPageBot,
- NoRedirectPageBot,
- SingleSiteBot,
-)
+from pywikibot.bot import ConfigParserBot, ExistingPageBot, SingleSiteBot
from pywikibot.exceptions import (
FatalServerError,
Server414Error,
@@ -421,10 +416,7 @@
return text
-class ReferencesRobot(SingleSiteBot,
- ConfigParserBot,
- ExistingPageBot,
- NoRedirectPageBot):
+class ReferencesRobot(SingleSiteBot, ConfigParserBot, ExistingPageBot):
"""References bot.
@@ -432,6 +424,8 @@
ReferencesRobot is a ConfigParserBot
"""
+ use_redirects = False
+
update_options = {
'ignorepdf': False,
'limit': 0, # stop after n modified pages
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/775347
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ibbc97a3ade4c04e881afcd7a27e631197f0db730
Gerrit-Change-Number: 775347
Gerrit-PatchSet: 10
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: DannyS712 <dannys712.wiki(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged