jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1062743?usp=email )
Change subject: unusedfiles: remove Flow support
......................................................................
unusedfiles: remove Flow support
flow support did never work with template substitution.
Anyway flow support is to be dropped and there is no need to fix it.
Flow talk page is only found in urwiki recently
Bug: T372477
Change-Id: Icf51219ff7125ea3298f3f012a2540ec79aa3173
---
M scripts/unusedfiles.py
1 file changed, 30 insertions(+), 31 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
Meno25: Looks good to me, but someone else must approve
diff --git a/scripts/unusedfiles.py b/scripts/unusedfiles.py
index bf831b3..c424b82 100755
--- a/scripts/unusedfiles.py
+++ b/scripts/unusedfiles.py
@@ -18,7 +18,7 @@
-usertemplate: Use a custom template to warn the uploader.
"""
#
-# (C) Pywikibot team, 2007-2022
+# (C) Pywikibot team, 2007-2024
#
# Distributed under the terms of the MIT license.
#
@@ -35,7 +35,6 @@
SingleSiteBot,
)
from pywikibot.exceptions import Error, NoPageError, TranslationError
-from pywikibot.flow import Board
template_to_the_image = {
@@ -108,30 +107,39 @@
f'This script is not localized for {self.site} site;\n'
'try using -filetemplate:<template name>.')
+ def skip_page(self, image: pywikibot.page.FilePage) -> bool:
+ """Skip processing on repository images or if image is already tagged.
+
+ Use get_file_url() and file_is_shared() to confirm it is local
+ media rather than a local page with the same name as shared
+ media.
+ """
+ if not image.get_file_url() or image.file_is_shared() \
+ or 'http://' in image.text:
+ return True
+
+ if self.opt.filetemplate in image.text:
+ pywikibot.info(f'{image} done already')
+ return True
+
+ return super().skip_page(image)
+
def treat(self, image) -> None:
"""Process one image page."""
- # Use get_file_url() and file_is_shared() to confirm it is local media
- # rather than a local page with the same name as shared media.
- if (image.get_file_url() and not image.file_is_shared()
- and 'http://' not in image.text):
- if self.opt.filetemplate in image.text:
- pywikibot.info(f'{image} done already')
- return
+ self.append_text(image, '\n\n' + self.opt.filetemplate)
- self.append_text(image, '\n\n' + self.opt.filetemplate)
- if self.opt.nouserwarning:
- return
+ if self.opt.nouserwarning:
+ return
- uploader = image.oldest_file_info.user
- user = pywikibot.User(image.site, uploader)
- usertalkpage = user.getUserTalkPage()
- template2uploader = self.opt.usertemplate \
- % {'title': image.title()}
- msg2uploader = self.site.expand_text(template2uploader)
- if usertalkpage.is_flow_page():
- self.post_to_flow_board(usertalkpage, msg2uploader)
- else:
- self.append_text(usertalkpage, '\n\n' + msg2uploader + ' ~~~~')
+ uploader = image.oldest_file_info.user
+ user = pywikibot.User(image.site, uploader)
+ usertalkpage = user.getUserTalkPage()
+ msg2uploader = self.opt.usertemplate % {'title': image.title()}
+ if usertalkpage.is_flow_page():
+ pywikibot.warning(f'Unsupported Flow talkpage {usertalkpage};'
+ '\n uploader cannot be informed.')
+ else:
+ self.append_text(usertalkpage, '\n\n' + msg2uploader + ' ~~~~')
def append_text(self, page, apptext):
"""Append apptext to the page."""
@@ -149,15 +157,6 @@
self.current_page = page
self.put_current(text)
- @staticmethod
- def post_to_flow_board(page, post) -> None:
- """Post message as a Flow topic."""
- board = Board(page)
- header, rest = post.split('\n', 1)
- title = header.strip('=')
- content = rest.lstrip()
- board.new_topic(title, content)
-
def main(*args: str) -> None:
"""
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1062743?usp=email
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings?usp=email
Gerrit-MessageType: merged
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Icf51219ff7125ea3298f3f012a2540ec79aa3173
Gerrit-Change-Number: 1062743
Gerrit-PatchSet: 3
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Aram <arambakr1620(a)gmail.com>
Gerrit-Reviewer: D3r1ck01 <dalangi-ctr(a)wikimedia.org>
Gerrit-Reviewer: Meno25 <meno25mail(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1062698?usp=email )
Change subject: [doc] use :wiki: role for documentation
......................................................................
[doc] use :wiki: role for documentation
Change-Id: Ifbd7462232c96b855a9faa20dbab9f5754050b16
---
M docs/glossary.rst
M docs/scripts/archive.rst
M pywikibot/family.py
M pywikibot/logging.py
M scripts/category.py
M scripts/create_isbn_edition.py
M tox.ini
7 files changed, 14 insertions(+), 16 deletions(-)
Approvals:
jenkins-bot: Verified
Xqt: Looks good to me, approved
diff --git a/docs/glossary.rst b/docs/glossary.rst
index f88a02e..73b0a5a 100644
--- a/docs/glossary.rst
+++ b/docs/glossary.rst
@@ -16,8 +16,7 @@
compat
The first Pywikibot package formerly known as *Pywikipediabot*
also called :term:`trunk` was started in 2003. MediaWiki didn't
- have an API so a `screen scrapping
- <https://en.wikipedia.org/wiki/Screen_scraper>`_ was used.
+ have an API so a :wiki:`screen scrapping<Screen_scraper>` was used.
core
In 2007 a new branch of Pywikibot formerly known as
diff --git a/docs/scripts/archive.rst b/docs/scripts/archive.rst
index ff7adb6..5779860 100644
--- a/docs/scripts/archive.rst
+++ b/docs/scripts/archive.rst
@@ -119,7 +119,7 @@
**Script to copy self published files from English Wikipedia to Commons**
This bot is based on imagecopy.py and intended to be used to empty out
-https://en.wikipedia.org/wiki/Category:Self-published_work
+:wiki:`Category:Self-published_work`
This bot uses a graphical interface and may not work from commandline
only environment.
diff --git a/pywikibot/family.py b/pywikibot/family.py
index b5520cd..26cd685 100644
--- a/pywikibot/family.py
+++ b/pywikibot/family.py
@@ -715,8 +715,8 @@
"""
Do a conversion on the retrieved text from the Wiki.
- For example a X-conversion in Esperanto
- https://en.wikipedia.org/wiki/Esperanto_orthography#X-system.
+ For example a :wiki:`X-conversion in Esperanto
+ <Esperanto_orthography#X-system>`.
"""
return getText
@@ -724,8 +724,8 @@
"""
Do a conversion on the text to insert on the Wiki.
- For example a X-conversion in Esperanto
- https://en.wikipedia.org/wiki/Esperanto_orthography#X-system.
+ For example a :wiki:`X-conversion in Esperanto
+ <Esperanto_orthography#X-system>`.
"""
return putText
diff --git a/pywikibot/logging.py b/pywikibot/logging.py
index 1091e45..02bbce9 100644
--- a/pywikibot/logging.py
+++ b/pywikibot/logging.py
@@ -220,7 +220,7 @@
parameter *layer* was added.
.. seealso::
- :python:`Logger.log()<library/logging.html#logging.Logger.log>`
- - https://en.wikipedia.org/wiki/Pipeline_%28Unix%29
+ - :wiki:`Pipeline (Unix)`
"""
logoutput(msg, *args, level=STDOUT, **kwargs)
diff --git a/scripts/category.py b/scripts/category.py
index 756a800..a430341 100755
--- a/scripts/category.py
+++ b/scripts/category.py
@@ -1426,9 +1426,8 @@
.. note:: For details please read:
- - https://en.wikipedia.org/wiki/WP:SUBCAT
-
- - https://en.wikipedia.org/wiki/WP:DIFFUSE
+ - :wiki:`WP:SUBCAT`
+ - :wiki:`WP:DIFFUSE`
.. versionadded:: 7.0
"""
diff --git a/scripts/create_isbn_edition.py b/scripts/create_isbn_edition.py
index f5dcb5c..53a4b36 100755
--- a/scripts/create_isbn_edition.py
+++ b/scripts/create_isbn_edition.py
@@ -119,8 +119,8 @@
GNU General Public License v3.0, User:Geertivp
**Documentation:**
- * https://en.wikipedia.org/wiki/ISBN
- * https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes
+ * :wiki:`ISBN`
+ * :wiki:`List_of_ISO_639-1_codes`
* https://www.geeksforgeeks.org/searching-books-with-python/
* https://www.freecodecamp.org/news/python-json-how-to-convert-a-string-to-js…
* https://pypi.org/project/isbnlib/
@@ -213,12 +213,12 @@
* :phab:`T208134`
* :phab:`T138911`
* :phab:`T20814`
- * https://en.wikipedia.org/wiki/User:Citation_bot
+ * :wiki:`User:Citation_bot`
* https://meta.wikimedia.org/wiki/Community_Wishlist_Survey_2021/Wikidata/Bib…
* https://zenodo.org/record/55004#.YvwO4hTP1D8
**Other systems:**
- * https://en.wikipedia.org/wiki/bibliographic_database
+ * `wiki:`bibliographic_database`
* https://www.titelbank.nl/pls/ttb/f?p=103:4012:::NO::P4012_TTEL_ID:3496019&c…
.. versionadded:: 7.7
diff --git a/tox.ini b/tox.ini
index 1faab65..62d61a8 100644
--- a/tox.ini
+++ b/tox.ini
@@ -104,7 +104,7 @@
basepython = python3.12
commands =
rstcheck --version
- rstcheck -r --report-level WARNING --ignore-roles api,phab,source .
+ rstcheck -r --report-level WARNING --ignore-roles api,phab,source,wiki .
deps =
-rrequirements.txt
-rdocs/requirements.txt
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1062698?usp=email
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings?usp=email
Gerrit-MessageType: merged
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ifbd7462232c96b855a9faa20dbab9f5754050b16
Gerrit-Change-Number: 1062698
Gerrit-PatchSet: 2
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <dalangi-ctr(a)wikimedia.org>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1061456?usp=email )
Change subject: [IMPR] add -category option to delinker.py
......................................................................
[IMPR] add -category option to delinker.py
The -category option works as follows:
- retrieve pages from "Pages with missing files" listed on wikibase with
item Q4989282. Any other category can be given with this option.
- for every page found in this category process their image links
- skip further processing if the FilePage exists locally or in the
image repository
- also skip further processing if the file was not deleted. In that case
there is an invalid link found on the source page.
- finally delink the found image link
Bug: T372206
Change-Id: I49d9260f2cbcb7e98f1916da82b191119a0bf127
---
M scripts/delinker.py
1 file changed, 119 insertions(+), 36 deletions(-)
Approvals:
jenkins-bot: Verified
Aram: Looks good to me, but someone else must approve
Xqt: Looks good to me, approved
diff --git a/scripts/delinker.py b/scripts/delinker.py
index 975d424..cfb83a4 100755
--- a/scripts/delinker.py
+++ b/scripts/delinker.py
@@ -7,6 +7,11 @@
The following parameters are supported:
+-category: Retrieve pages to delink from "Pages with missing files"
+ category. Usually the category is found on Q4989282 wikibase
+ item but can be overwritten by giving the category title
+ with that option. *-since* option is ignored.
+
-exclude: If the deletion log contains this pattern, the file is not
delinked (default is 'no-delink').
@@ -27,6 +32,8 @@
.. versionadded:: 7.2
This script is completely rewriten from compat branch.
+.. versionchanged:: 9.4
+ *-category* option was added.
"""
#
# (C) Pywikibot team, 2006-2024
@@ -52,14 +59,120 @@
class CommonsDelinker(SingleSiteBot, ConfigParserBot, AutomaticTWSummaryBot):
- """Bot to delink deleted images."""
+ """Base Delinker Bot."""
+
+ summary_key = 'delinker-delink'
+
+ def skip_page(self, page) -> bool:
+ """Skip pages which neither exists locally nor on shared repository."""
+ pywikibot.info('.', newline=False)
+ if page.exists() or page.file_is_shared():
+ return True
+ return super().skip_page(page)
+
+ def treat(self, file_page):
+ """Set page to current page and delink that page."""
+ # use image_regex from image.py
+ namespace = file_page.site.namespaces[6]
+ escaped = case_escape(namespace.case,
+ file_page.title(with_ns=False),
+ underscore=True)
+ self.image_regex = re.compile(
+ r'\[\[ *(?:{})\s*:\s*{} *(?P<parameters>\|'
+ r'(?:[^\[\]]|\[\[[^\]]+\]\]|\[[^\]]+\])*|) *\]\]'
+ .format('|'.join(ignore_case(s) for s in namespace), escaped))
+
+ shown = False
+ for page in file_page.using_pages(
+ content=True, namespaces=self.site.namespaces.MAIN):
+ if not shown:
+ pywikibot.info('\n>>> Delinking <<lightgreen>>'
+ f'{file_page.title()}<<default>> <<<')
+ shown = True
+ super().treat(page)
+
+ def treat_page(self):
+ """Delink a single page."""
+ new = re.sub(self.image_regex, '', self.current_page.text)
+ self.put_current(new)
+
+
+class DelinkerFromCategory(CommonsDelinker):
+
+ """Bot to delink deleted images from pages found in category."""
+
+ pages_with_missing_files = 'Q4989282'
+
+ update_options = {
+ 'exclude': 'no-delink',
+ 'localonly': False,
+ 'category': True,
+ }
+
+ @property
+ def generator(self):
+ """Retrieve pages with missing files and yield there image links."""
+ if self.opt.category is True:
+ cat = self.site.page_from_repository(self.pages_with_missing_files)
+ else:
+ cat = pywikibot.Category(self.site, self.opt.category)
+ if not cat.exists():
+ cat = None
+
+ if not cat:
+ pywikibot.warning('No valid category given for generator')
+ return
+
+ for article in cat.articles(namespaces=self.site.namespaces.MAIN):
+ yield from article.imagelinks()
+
+ def init_page(self, item) -> pywikibot.page.FilePage:
+ """Upcast logevent to FilePage and combine edit summary."""
+ return pywikibot.FilePage(item, ignore_extension=True)
+
+ def skip_page(self, page) -> pywikibot.page.FilePage:
+ """Skip pages which aren't deleted on any repository."""
+ if super().skip_page(page):
+ return True
+
+ params = {
+ 'logtype': 'delete',
+ 'reverse': True,
+ 'page': 'File:' + page.title(underscore=True, with_ns=False),
+ }
+ try:
+ entry = next(self.site.logevents(**params))
+ except StopIteration:
+ try:
+ entry = next(self.site.image_repository().logevents(**params))
+ except StopIteration:
+ pywikibot.info()
+ pywikibot.warning(
+ f'unable to delink missing {page.title(as_link=True)}')
+ found = list(self.site.search(
+ page.title(),
+ namespaces=self.site.namespaces.MAIN,
+ total=1
+ ))
+ if found:
+ pywikibot.info('probably <<lightblue>>'
+ f'{found[0].title(as_link=True)}'
+ '<<default>> is meant')
+ return True
+
+ self.summary_parameters = dict(entry)
+ return False
+
+
+class DelinkerFromLog(CommonsDelinker):
+
+ """Bot to delink deleted images from deletion log."""
update_options = {
'exclude': 'no-delink',
'localonly': False,
'since': '',
}
- summary_key = 'delinker-delink'
@property
def generator(self):
@@ -90,38 +203,6 @@
self.summary_parameters = dict(item)
return pywikibot.FilePage(item.page(), ignore_extension=True)
- def skip_page(self, page) -> bool:
- """Skip pages which neither exists locally nor on shared repository."""
- pywikibot.info('.', newline=False)
- if page.exists() or page.file_is_shared():
- return True
- return super().skip_page(page)
-
- def treat(self, file_page):
- """Set page to current page and delink that page."""
- # use image_regex from image.py
- namespace = file_page.site.namespaces[6]
- escaped = case_escape(namespace.case,
- file_page.title(with_ns=False),
- underscore=True)
- self.image_regex = re.compile(
- r'\[\[ *(?:{})\s*:\s*{} *(?P<parameters>\|'
- r'(?:[^\[\]]|\[\[[^\]]+\]\]|\[[^\]]+\])*|) *\]\]'
- .format('|'.join(ignore_case(s) for s in namespace), escaped))
-
- shown = False
- for page in file_page.using_pages(content=True, namespaces=0):
- if not shown:
- pywikibot.info('\n>>> <<lightgreen>>Delinking '
- f'{file_page.title()}<<default>> <<<')
- shown = True
- super().treat(page)
-
- def treat_page(self):
- """Delink a single page."""
- new = re.sub(self.image_regex, '', self.current_page.text)
- self.put_current(new)
-
def teardown(self):
"""Save the last used logevent timestamp."""
if not hasattr(self, 'last_ts'):
@@ -153,11 +234,13 @@
opt = removeprefix(opt, '-')
if opt == 'localonly':
options[opt] = True
+ elif opt == 'category':
+ options[opt] = value or True
else:
options[opt] = value
- bot = CommonsDelinker(site=pywikibot.Site(), **options)
- bot.run()
+ bot = DelinkerFromCategory if options.get('category') else DelinkerFromLog
+ bot(**options).run()
if __name__ == '__main__':
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1061456?usp=email
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings?usp=email
Gerrit-MessageType: merged
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I49d9260f2cbcb7e98f1916da82b191119a0bf127
Gerrit-Change-Number: 1061456
Gerrit-PatchSet: 3
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Aram <arambakr1620(a)gmail.com>
Gerrit-Reviewer: D3r1ck01 <dalangi-ctr(a)wikimedia.org>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1061154?usp=email )
Change subject: [IMPR] Recover unlink.py
......................................................................
[IMPR] Recover unlink.py
- update and improve restored unlink.py
- update documentation
- update tests after adding unlink messages
Bug: T223826
Change-Id: Id93308289f398e664d6fd777b53b5bcaa1394b6c
---
M docs/scripts/archive.rst
M docs/scripts/unsorted.rst
M docs/scripts_ref/scripts.rst
M pywikibot/i18n.py
M scripts/CHANGELOG.rst
M scripts/README.rst
M scripts/unlink.py
7 files changed, 54 insertions(+), 33 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/docs/scripts/archive.rst b/docs/scripts/archive.rst
index d745d97..ff7adb6 100644
--- a/docs/scripts/archive.rst
+++ b/docs/scripts/archive.rst
@@ -254,12 +254,6 @@
**Nifty script to convert HTML-tables to MediaWiki's own syntax**
-unlink script
-=============
-
-**This bot unlinks a page on every page that links to it**
-
-
wikisourcetext script
=====================
diff --git a/docs/scripts/unsorted.rst b/docs/scripts/unsorted.rst
index a543041..7826315 100644
--- a/docs/scripts/unsorted.rst
+++ b/docs/scripts/unsorted.rst
@@ -84,6 +84,13 @@
:no-members:
:noindex:
+unlink script
+=============
+
+.. automodule:: scripts.unlink
+ :no-members:
+ :noindex:
+
watchlist script
================
diff --git a/docs/scripts_ref/scripts.rst b/docs/scripts_ref/scripts.rst
index 977a844..10397e5 100644
--- a/docs/scripts_ref/scripts.rst
+++ b/docs/scripts_ref/scripts.rst
@@ -259,6 +259,11 @@
.. automodule:: scripts.transwikiimport
+unlink script
+=============
+
+.. automodule:: scripts.unlink
+
unusedfiles script
==================
diff --git a/pywikibot/i18n.py b/pywikibot/i18n.py
index 7d1aa55..f3ac0fb 100644
--- a/pywikibot/i18n.py
+++ b/pywikibot/i18n.py
@@ -876,11 +876,11 @@
>>> from pywikibot import i18n
>>> bundles = sorted(i18n.bundles(stem=True))
>>> len(bundles)
- 38
+ 39
>>> bundles[:4]
['add_text', 'archivebot', 'basic', 'blockpageschecker']
>>> bundles[-5:]
- ['undelete', 'unprotect', 'unusedfiles', 'weblinkchecker', 'welcome']
+ ['unlink', 'unprotect', 'unusedfiles', 'weblinkchecker', 'welcome']
>>> 'pywikibot' in bundles
True
diff --git a/scripts/CHANGELOG.rst b/scripts/CHANGELOG.rst
index 21852ca..6c6f23b 100644
--- a/scripts/CHANGELOG.rst
+++ b/scripts/CHANGELOG.rst
@@ -1,6 +1,15 @@
Scripts Changelog
=================
+9.4.0
+-----
+
+unlink
+^^^^^^
+
+* unlink script was recovered
+
+
9.3.1
-----
diff --git a/scripts/README.rst b/scripts/README.rst
index 3815f29..881889d 100644
--- a/scripts/README.rst
+++ b/scripts/README.rst
@@ -151,6 +151,8 @@
| unusedfiles.py | Bot appends some text to all unused images and other |
| | text to the respective uploaders. |
+--------------------------+---------------------------------------------------------+
+| unlink.py | This bot unlinks a page on every page that links to it. |
++--------------------------+---------------------------------------------------------+
| upload.py | Upload an image to a wiki. |
+--------------------------+---------------------------------------------------------+
| watchlists.py | Allows access to the account's watchlist. |
diff --git a/scripts/unlink.py b/scripts/unlink.py
index 0d73d0c..ea8ed3f 100755
--- a/scripts/unlink.py
+++ b/scripts/unlink.py
@@ -1,14 +1,12 @@
-#!/usr/bin/python
-"""
-This bot unlinks a page on every page that links to it.
+#!/usr/bin/env python3
+"""This bot unlinks a page on every page that links to it.
This script understands this command-line argument:
- -namespace:n Number of namespace to process. The parameter can be used
- multiple times. It works in combination with all other
- parameters, except for the -start parameter. If you e.g.
- want to iterate over all user pages starting at User:M, use
- -start:User:M.
+-always Don't prompt you for each replacement.
+
+-namespace:n Number of namespace to process. The parameter can be used
+ multiple times.
Any other parameter will be regarded as the title of the page
that should be unlinked.
@@ -20,12 +18,21 @@
descriptions:
python pwb.py unlink "Foo bar" -namespace:0 -namespace:6
+
+
+.. versionchanged:: 6.0
+ script was archived.
+.. versionchanged:: 7.0
+ script was deleted.
+.. versionchanged:: 9.4
+ script was recovered.
"""
#
-# (C) Pywikibot team, 2007-2020
+# (C) Pywikibot team, 2007-2024
#
# Distributed under the terms of the MIT license.
#
+from __future__ import annotations
import pywikibot
from pywikibot.bot import SingleSiteBot
@@ -38,11 +45,11 @@
summary_key = 'unlink-unlinking'
- def __init__(self, pageToUnlink, **kwargs):
+ def __init__(self, page_title: str, **kwargs):
"""Initialize a UnlinkBot instance with the given page to unlink."""
super().__init__(**kwargs)
- self.pageToUnlink = pageToUnlink
- self.generator = pageToUnlink.getReferences(
+ self.pageToUnlink = pywikibot.Page(self.site, page_title) # noqa: N803
+ self.generator = self.pageToUnlink.getReferences(
namespaces=self.opt.namespaces, content=True)
@property
@@ -55,36 +62,33 @@
self.unlink(self.pageToUnlink)
-def main(*args):
- """
- Process command line arguments and invoke bot.
+def main(*args: str) -> None:
+ """Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
- @param args: command line arguments
- @type args: str
+ :param args: command line arguments
"""
# This temporary string is used to read the title
# of the page that should be unlinked.
- page_title = None
+ page_title: str = ''
options = {}
for arg in pywikibot.handle_args(args):
- if arg.startswith('-namespace:'):
- if 'namespaces' not in options:
- options['namespaces'] = []
+ opt, _, value = arg.partition(':')
+ if opt == '-namespace':
+ options.setdefault('namespaces', [])
try:
- options['namespaces'].append(int(arg[11:]))
+ options['namespaces'].append(int(value))
except ValueError:
- options['namespaces'].append(arg[11:])
+ options['namespaces'].append(value)
elif arg == '-always':
options['always'] = True
else:
page_title = arg
if page_title:
- page = pywikibot.Page(pywikibot.Site(), page_title)
- bot = UnlinkBot(page, **options)
+ bot = UnlinkBot(page_title, **options)
bot.run()
else:
pywikibot.bot.suggest_help(missing_parameters=['page title'])
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1061154?usp=email
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings?usp=email
Gerrit-MessageType: merged
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Id93308289f398e664d6fd777b53b5bcaa1394b6c
Gerrit-Change-Number: 1061154
Gerrit-PatchSet: 6
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <dalangi-ctr(a)wikimedia.org>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot