jenkins-bot submitted this change.

View Change

Approvals: Xqt: Looks good to me, approved jenkins-bot: Verified
[6.0] Archive unused scripts and remove already archived scripts

Any archived script may be unarchived upon request on the task.

Tests updated/archived accordingly.

Bug: T223826
Change-Id: Ic694a413623b7e2ffed9cc7e8d64d191df3c1248
---
M docs/scripts/scripts.rst
M scripts/README.rst
R scripts/archive/blockpageschecker.py
R scripts/archive/capitalize_redirects.py
R scripts/archive/casechecker.py
R scripts/archive/catall.py
D scripts/archive/cfd.py
R scripts/archive/commons_link.py
R scripts/archive/commonscat.py
R scripts/archive/create_categories.py
R scripts/archive/data_ingestion.py
R scripts/archive/disambredir.py
R scripts/archive/djvutext.py
R scripts/archive/editarticle.py
D scripts/archive/featured.py
R scripts/archive/flickrripper.py
R scripts/archive/followlive.py
R scripts/archive/freebasemappingupload.py
R scripts/archive/image.py
R scripts/archive/imagecopy.py
R scripts/archive/imagecopy_self.py
R scripts/archive/imageharvest.py
R scripts/archive/imagerecat.py
R scripts/archive/imageuncat.py
R scripts/archive/interwiki.py
R scripts/archive/isbn.py
R scripts/archive/lonelypages.py
R scripts/archive/makecat.py
R scripts/archive/match_images.py
R scripts/archive/ndashredir.py
R scripts/archive/nowcommons.py
R scripts/archive/patrol.py
R scripts/archive/piper.py
R scripts/archive/reflinks.py
R scripts/archive/replicate_wiki.py
R scripts/archive/selflink.py
R scripts/archive/spamremove.py
R scripts/archive/standardize_interwiki.py
R scripts/archive/states_redirect.py
R scripts/archive/surnames_redirects.py
R scripts/archive/table2wiki.py
R scripts/archive/unlink.py
R scripts/archive/unusedfiles.py
R scripts/archive/watchlist.py
R scripts/archive/wikisourcetext.py
M scripts/interwikidata.py
M tests/__init__.py
R tests/archive/data_ingestion_tests.py
R tests/archive/disambredir_tests.py
R tests/archive/imagecopy_tests.py
R tests/archive/isbn_tests.py
R tests/archive/patrolbot_tests.py
R tests/archive/reflinks_tests.py
M tests/pwb_tests.py
M tests/script_tests.py
M tox.ini
56 files changed, 111 insertions(+), 1,223 deletions(-)

diff --git a/docs/scripts/scripts.rst b/docs/scripts/scripts.rst
index af4bab2..0877531 100644
--- a/docs/scripts/scripts.rst
+++ b/docs/scripts/scripts.rst
@@ -31,26 +31,6 @@

.. automodule:: scripts.basic

-scripts.blockpageschecker script
---------------------------------
-
-.. automodule:: scripts.blockpageschecker
-
-scripts.capitalize\_redirects script
-------------------------------------
-
-.. automodule:: scripts.capitalize_redirects
-
-scripts.casechecker script
---------------------------
-
-.. automodule:: scripts.casechecker
-
-scripts.catall script
----------------------
-
-.. automodule:: scripts.catall
-
scripts.category script
-----------------------

@@ -81,16 +61,6 @@

.. automodule:: scripts.clean_sandbox

-scripts.commons\_link script
-----------------------------
-
-.. automodule:: scripts.commons_link
-
-scripts.commonscat script
--------------------------
-
-.. automodule:: scripts.commonscat
-
scripts.coordinate\_import script
---------------------------------

@@ -101,61 +71,21 @@

.. automodule:: scripts.cosmetic_changes

-scripts.create\_categories script
----------------------------------
-
-.. automodule:: scripts.create_categories
-
-scripts.data\_ingestion script
-------------------------------
-
-.. automodule:: scripts.data_ingestion
-
scripts.delete script
---------------------

.. automodule:: scripts.delete

-scripts.disambredir script
---------------------------
-
-.. automodule:: scripts.disambredir
-
-scripts.djvutext script
------------------------
-
-.. automodule:: scripts.djvutext
-
scripts.download\_dump script
-----------------------------

.. automodule:: scripts.download_dump

-scripts.editarticle script
---------------------------
-
-.. automodule:: scripts.editarticle
-
scripts.fixing\_redirects script
--------------------------------

.. automodule:: scripts.fixing_redirects

-scripts.flickrripper script
----------------------------
-
-.. automodule:: scripts.flickrripper
-
-scripts.followlive script
--------------------------
-
-.. automodule:: scripts.followlive
-
-scripts.freebasemappingupload script
-------------------------------------
-
-.. automodule:: scripts.freebasemappingupload
-
scripts.harvest\_template script
--------------------------------

@@ -166,81 +96,21 @@

.. automodule:: scripts.illustrate_wikidata

-scripts.image script
---------------------
-
-.. automodule:: scripts.image
-
-scripts.imagecopy script
-------------------------
-
-.. automodule:: scripts.imagecopy
-
-scripts.imagecopy\_self script
-------------------------------
-
-.. automodule:: scripts.imagecopy_self
-
-scripts.imageharvest script
----------------------------
-
-.. automodule:: scripts.imageharvest
-
-scripts.imagerecat script
--------------------------
-
-.. automodule:: scripts.imagerecat
-
scripts.imagetransfer script
----------------------------

.. automodule:: scripts.imagetransfer

-scripts.imageuncat script
--------------------------
-
-.. automodule:: scripts.imageuncat
-
-scripts.interwiki script
-------------------------
-
-.. automodule:: scripts.interwiki
-
scripts.interwikidata script
----------------------------

.. automodule:: scripts.interwikidata

-scripts.isbn script
--------------------
-
-.. automodule:: scripts.isbn
-
scripts.listpages script
------------------------

.. automodule:: scripts.listpages

-scripts.login script
---------------------
-
-.. automodule:: scripts.login
-
-scripts.lonelypages script
---------------------------
-
-.. automodule:: scripts.lonelypages
-
-scripts.makecat script
-----------------------
-
-.. automodule:: scripts.makecat
-
-scripts.match\_images script
-----------------------------
-
-.. automodule:: scripts.match_images
-
scripts.misspelling script
--------------------------

@@ -251,11 +121,6 @@

.. automodule:: scripts.movepages

-scripts.ndashredir script
--------------------------
-
-.. automodule:: scripts.ndashredir
-
scripts.newitem script
----------------------

@@ -266,11 +131,6 @@

.. automodule:: scripts.noreferences

-scripts.nowcommons script
--------------------------
-
-.. automodule:: scripts.nowcommons
-
scripts.pagefromfile script
---------------------------

@@ -281,16 +141,6 @@

.. automodule:: scripts.parser_function_count

-scripts.patrol script
----------------------
-
-.. automodule:: scripts.patrol
-
-scripts.piper script
---------------------
-
-.. automodule:: scripts.piper
-
scripts.protect script
----------------------

@@ -300,31 +150,16 @@
-----------------------
.. automodule:: scripts.redirect

-scripts.reflinks script
------------------------
-
-.. automodule:: scripts.reflinks
-
scripts.replace script
----------------------

.. automodule:: scripts.replace

-scripts.replicate\_wiki script
-------------------------------
-
-.. automodule:: scripts.replicate_wiki
-
scripts.revertbot script
------------------------

.. automodule:: scripts.revertbot

-scripts.selflink script
------------------------
-
-.. automodule:: scripts.selflink
-
scripts.shell script
--------------------

@@ -335,36 +170,11 @@

.. automodule:: scripts.solve_disambiguation

-scripts.spamremove script
--------------------------
-
-.. automodule:: scripts.spamremove
-
scripts.speedy\_delete script
-----------------------------

.. automodule:: scripts.speedy_delete

-scripts.standardize\_interwiki script
--------------------------------------
-
-.. automodule:: scripts.standardize_interwiki
-
-scripts.states\_redirect script
--------------------------------
-
-.. automodule:: scripts.states_redirect
-
-scripts.surnames\_redirects script
-----------------------------------
-
-.. automodule:: scripts.surnames_redirects
-
-scripts.table2wiki script
--------------------------
-
-.. automodule:: scripts.table2wiki
-
scripts.template script
-----------------------

@@ -385,16 +195,6 @@

.. automodule:: scripts.transferbot

-scripts.unlink script
----------------------
-
-.. automodule:: scripts.unlink
-
-scripts.unusedfiles script
---------------------------
-
-.. automodule:: scripts.unusedfiles
-
scripts.upload script
---------------------

@@ -405,11 +205,6 @@

.. automodule:: scripts.version

-scripts.watchlist script
-------------------------
-
-.. automodule:: scripts.watchlist
-
scripts.weblinkchecker script
-----------------------------

@@ -420,9 +215,4 @@

.. automodule:: scripts.welcome

-scripts.wikisourcetext script
------------------------------
-
-.. automodule:: scripts.wikisourcetext
-

diff --git a/scripts/README.rst b/scripts/README.rst
index 8a2ec98..e9e6d69 100644
--- a/scripts/README.rst
+++ b/scripts/README.rst
@@ -27,17 +27,6 @@
+------------------------+---------------------------------------------------------+
| basic.py | Is a template from which simple bots can be made. |
+------------------------+---------------------------------------------------------+
- | blockpagechecker.py | Deletes any protection templates that are on pages |
- | | which aren't actually protected. |
- +------------------------+---------------------------------------------------------+
- | capitalize_redirects.py| Script to create a redirect of capitalize articles. |
- +------------------------+---------------------------------------------------------+
- | casechecker.py | Script to enumerate all pages in the wikipedia and |
- | | find all titles with mixed Latin and Cyrillic |
- | | alphabets. |
- +------------------------+---------------------------------------------------------+
- | catall.py | Add or change categories on a number of pages. |
- +------------------------+---------------------------------------------------------+
| category.py | Add a category link to all pages mentioned on a page, |
| | change or remove category tags |
+------------------------+---------------------------------------------------------+
@@ -56,13 +45,6 @@
+------------------------+---------------------------------------------------------+
| clean_sandbox.py | This bot makes the cleaned of the page of tests. |
+------------------------+---------------------------------------------------------+
- | commons_link.py | This robot include commons template to linking Commons |
- | | and your wiki project. |
- +------------------------+---------------------------------------------------------+
- | commonscat.py | Adds {{commonscat}} to Wikipedia categories (or |
- | | articles), if other language wikipedia already has such |
- | | a template |
- +------------------------+---------------------------------------------------------+
| coordinate_import.py | Coordinate importing script. |
+------------------------+---------------------------------------------------------+
| #copyright.py | This robot check copyright text in Google, Yahoo! and |
@@ -76,93 +58,41 @@
| cosmetic_changes.py | Can do slight modifications to a wiki page source code |
| | such that the code looks cleaner. |
+------------------------+---------------------------------------------------------+
- | create_categories.py | Program to batch create categories. |
- +------------------------+---------------------------------------------------------+
- | data_ingestion.py | A generic bot to do batch uploading to Commons. |
- +------------------------+---------------------------------------------------------+
| delete.py | This script can be used to delete pages en masse. |
+------------------------+---------------------------------------------------------+
- | disambredir.py | Changing redirect names in disambiguation pages. |
- +------------------------+---------------------------------------------------------+
- | djvutext.py | Extracts OCR text from djvu files and uploads onto |
- | | pages in the "Page" namespace on Wikisource. |
- +------------------------+---------------------------------------------------------+
| download_dump.py | Downloads dumps from dumps.wikimedia.org |
+------------------------+---------------------------------------------------------+
- | editarticle.py | Edit a Wikipedia article with your favourite editor |
- +------------------------+---------------------------------------------------------+
| fixing_redirects.py | Correct all redirect links of processed pages. |
+------------------------+---------------------------------------------------------+
- | flickrripper.py | Upload images from Flickr easily. |
- +------------------------+---------------------------------------------------------+
- | followlive.py | follow new articles on a wikipedia and flag them |
- | | with a template. |
- +------------------------++--------------------------------------------------------+
- | freebasemappingupload.py| Docstring fixes in scripts |
- +------------------------++--------------------------------------------------------+
| harvest_template.py | [IMPROV] Reduce maximum line length to 130 |
+------------------------+---------------------------------------------------------+
| illustrate_wikidata.py | Don't use 'gen' to refer to the generator factory |
+------------------------+---------------------------------------------------------+
- | image.py | This script can be used to change one image to another |
- | | or remove an image entirely. |
- +------------------------+---------------------------------------------------------+
- | imagecopy.py | Copies images from a Wikimedia wiki to Commons |
- +------------------------+---------------------------------------------------------+
- | imagecopy_self.py | Copy self published files from the English Wikipedia to |
- | | Commons. |
- +------------------------+---------------------------------------------------------+
- | imageharvest.py | Bot for getting multiple images from an external site. |
- +------------------------+---------------------------------------------------------+
- | iamgerecat.py | Try to find categories for media on Commons. |
- +------------------------+---------------------------------------------------------+
| imagetransfer.py | Given a wiki page, check the interwiki links for |
| | images, and let the user choose among them for |
| | images to upload. |
+------------------------+---------------------------------------------------------+
- | imageuncat.py | Adds uncat template to images without categories at |
- | | Commons |
- +------------------------+---------------------------------------------------------+
| #inline_images.py | This bot looks for images that are linked inline |
| | (i.e., they are hosted from an external server and |
| | hotlinked). |
+------------------------+---------------------------------------------------------+
- | interwiki.py | A robot to check interwiki links on all pages (or |
- | | a range of pages) of a wiki. |
- +------------------------+---------------------------------------------------------+
| interwikidata.py | Script to handle interwiki links based on Wikibase. |
+------------------------+---------------------------------------------------------+
- | isbn.py | Bot to convert all ISBN-10 codes to the ISBN-13 |
- | | format. |
- +------------------------+---------------------------------------------------------+
| listpages.py | listpages: report number of pages found |
+------------------------+---------------------------------------------------------+
| login.py | Script to log the bot in to a wiki account. |
+------------------------+---------------------------------------------------------+
- | lonelypages.py | Place a template on pages which are not linked to by |
- | | other pages, and are therefore lonely |
- +------------------------+---------------------------------------------------------+
- | makecat.py | Given an existing or new category, find pages for that |
- | | category. |
- +------------------------+---------------------------------------------------------+
- | match_images.py | Match two images based on histograms. |
- +------------------------+---------------------------------------------------------+
| misspelling.py | Similar to solve_disambiguation.py. It is supposed to |
| | fix links that contain common spelling mistakes. |
+------------------------+---------------------------------------------------------+
| movepages.py | Bot page moves to another title. |
+------------------------+---------------------------------------------------------+
- | ndashredir.py | Creates hyphenated redirects to articles with n dash |
- | | or m dash in their title. |
- +------------------------+---------------------------------------------------------+
| newitem.py | Script creates new items on Wikidata based on criteria. |
+------------------------+---------------------------------------------------------+
| noreferences.py | Searches for pages where <references /> is missing |
| | although a <ref> tag is present, and in that case adds |
| | a new references section. |
+------------------------+---------------------------------------------------------+
- | nowcommons.py | This bot can delete images with NowCommons template. |
- +------------------------+---------------------------------------------------------+
| pagefromfile.py | This bot takes its input from a file that contains a |
| | number of pages to be put on the wiki. |
+------------------------+---------------------------------------------------------+
@@ -174,46 +104,21 @@
| patrol.py | Obtains a list pages and marks the edits as patrolled |
| | based on a whitelist. |
+------------------------+---------------------------------------------------------+
- | piper.py | Pipes article text through external program(s) on |
- | | STDIN and collects its STDOUT which is used as the |
- | | new article text if it differs from the original. |
- +------------------------+---------------------------------------------------------+
- | protect.py | Protect and unprotect pages en masse. |
- +------------------------+---------------------------------------------------------+
| redirect.py | Fix double redirects and broken redirects. Note: |
| | solve_disambiguation also has functions which treat |
| | redirects. |
+------------------------+---------------------------------------------------------+
- | reflinks.py | Search for references which are only made of a link |
- | | without title and fetch the html title from the link to |
- | | use it as the title of the wiki link in the reference. |
- +------------------------+---------------------------------------------------------+
| replace.py | Search articles for a text and replace it by another |
| | text. Both text are set in two configurable |
| | text files. The bot can either work on a set of given |
| | pages or crawl an SQL dump. |
+------------------------+---------------------------------------------------------+
- | replicate_wiki.py | Replicates pages in wiki to a second wiki within family|
- +------------------------+---------------------------------------------------------+
| revertbot.py | Revert edits. |
+------------------------+---------------------------------------------------------+
- | selflink.py | This bot goes over multiple pages of the home wiki, |
- | | searches for selflinks, and allows removing them. |
- +------------------------+---------------------------------------------------------+
| shell.py | Spawns an interactive Python shell |
+------------------------+---------------------------------------------------------+
| solve_disambiguation.py| Interactive robot doing disambiguation. |
+------------------------+---------------------------------------------------------+
- | spamremove.py | Remove links that are being or have been spammed. |
- +------------------------+--+------------------------------------------------------+
- | standardize_interwiki.py | A robot that downloads a page, and reformats the |
- | | interwiki links in a standard way (i.e. move all |
- | | of them to the bottom or the top, with the same |
- | | separator, in the right order). |
- +------------------------+--+------------------------------------------------------+
- | states-redirect.py | A robot to add redirects to cities for state |
- | | abbreviations. |
- +------------------------+---------------------------------------------------------+
| speedy_delete.py | Help sysops to quickly check and/or delete pages listed |
| | for speedy deletion. |
+------------------------+---------------------------------------------------------+
@@ -226,10 +131,6 @@
| | [[Special:Statistics]] in a table on a wiki page. |
| | Thus it creates and updates a statistics wikitable. |
+------------------------+---+-----------------------------------------------------+
- | surnames_redirects.py | Bot to create redirects based on name order. |
- +------------------------+---------------------------------------------------------+
- | table2wiki.py | Semi-automatic converting HTML-tables to wiki-tables |
- +------------------------+---------------------------------------------------------+
| template.py | change one template (that is {{...}}) into another. |
+------------------------+---------------------------------------------------------+
| templatecount.py | Display the list of pages transcluding a given list |
@@ -240,34 +141,127 @@
+------------------------+---------------------------------------------------------+
| transferbot.py | Transfers pages from a source wiki to a target wiki |
+------------------------+---------------------------------------------------------+
- | unlink.py | This bot unlinks a page on every page that links to it. |
- +------------------------+---------------------------------------------------------+
- | unusedfiles.py | Bot appends some text to all unused images and other |
- | | text to the respective uploaders. |
- +------------------------+---------------------------------------------------------+
| upload.py | upload an image to a wiki. |
+------------------------+---------------------------------------------------------+
| version.py | Outputs Pywikibot's revision number, Python's version |
| | and OS used. |
+------------------------+---------------------------------------------------------+
- | watchlists.py | Information retrieved by watchlist.py will be stored |
- +------------------------+---------------------------------------------------------+
| weblinkchecker.py | Check if external links are still working. |
+------------------------+---------------------------------------------------------+
| welcome.py | Script to welcome new users. |
+------------------------+---------------------------------------------------------+
- | wikisourcetext.py | This bot applies to Wikisource sites to upload text. |
- +------------------------+---------------------------------------------------------+


+------------------------+---------------------------------------------------------+
| archive | Scripts no longer maintained. |
+========================+=========================================================+
- | cfd.py | Processes the categories for discussion working page. |
- | | It parses out the actions that need to be taken as a |
- | | result of CFD discussions and performs them. |
+ | blockpagechecker.py | Deletes any protection templates that are on pages |
+ | | which aren't actually protected. |
+------------------------+---------------------------------------------------------+
- | featured.py | A robot to check feature articles. |
+ | capitalize_redirects.py| Script to create a redirect of capitalize articles. |
+ +------------------------+---------------------------------------------------------+
+ | casechecker.py | Script to enumerate all pages in the wikipedia and |
+ | | find all titles with mixed Latin and Cyrillic |
+ | | alphabets. |
+ +------------------------+---------------------------------------------------------+
+ | catall.py | Add or change categories on a number of pages. |
+ +------------------------+---------------------------------------------------------+
+ | commons_link.py | This robot include commons template to linking Commons |
+ | | and your wiki project. |
+ +------------------------+---------------------------------------------------------+
+ | commonscat.py | Adds {{commonscat}} to Wikipedia categories (or |
+ | | articles), if other language wikipedia already has such |
+ | | a template |
+ +------------------------+---------------------------------------------------------+
+ | create_categories.py | Program to batch create categories. |
+ +------------------------+---------------------------------------------------------+
+ | data_ingestion.py | A generic bot to do batch uploading to Commons. |
+ +------------------------+---------------------------------------------------------+
+ | disambredir.py | Changing redirect names in disambiguation pages. |
+ +------------------------+---------------------------------------------------------+
+ | djvutext.py | Extracts OCR text from djvu files and uploads onto |
+ | | pages in the "Page" namespace on Wikisource. |
+ +------------------------+---------------------------------------------------------+
+ | editarticle.py | Edit a Wikipedia article with your favourite editor |
+ +------------------------+---------------------------------------------------------+
+ | flickrripper.py | Upload images from Flickr easily. |
+ +------------------------+---------------------------------------------------------+
+ | followlive.py | follow new articles on a wikipedia and flag them |
+ | | with a template. |
+ +------------------------++--------------------------------------------------------+
+ | freebasemappingupload.py| Docstring fixes in scripts |
+ +------------------------++--------------------------------------------------------+
+ | image.py | This script can be used to change one image to another |
+ | | or remove an image entirely. |
+ +------------------------+---------------------------------------------------------+
+ | imagecopy.py | Copies images from a Wikimedia wiki to Commons |
+ +------------------------+---------------------------------------------------------+
+ | imagecopy_self.py | Copy self published files from the English Wikipedia to |
+ | | Commons. |
+ +------------------------+---------------------------------------------------------+
+ | imageharvest.py | Bot for getting multiple images from an external site. |
+ +------------------------+---------------------------------------------------------+
+ | iamgerecat.py | Try to find categories for media on Commons. |
+ +------------------------+---------------------------------------------------------+
+ | imageuncat.py | Adds uncat template to images without categories at |
+ | | Commons |
+ +------------------------+---------------------------------------------------------+
+ | interwiki.py | A robot to check interwiki links on all pages (or |
+ | | a range of pages) of a wiki. |
+ +------------------------+---------------------------------------------------------+
+ | isbn.py | Bot to convert all ISBN-10 codes to the ISBN-13 |
+ | | format. |
+ +------------------------+---------------------------------------------------------+
+ | lonelypages.py | Place a template on pages which are not linked to by |
+ | | other pages, and are therefore lonely |
+ +------------------------+---------------------------------------------------------+
+ | makecat.py | Given an existing or new category, find pages for that |
+ | | category. |
+ +------------------------+---------------------------------------------------------+
+ | match_images.py | Match two images based on histograms. |
+ +------------------------+---------------------------------------------------------+
+ | ndashredir.py | Creates hyphenated redirects to articles with n dash |
+ | | or m dash in their title. |
+ +------------------------+---------------------------------------------------------+
+ | nowcommons.py | This bot can delete images with NowCommons template. |
+ +------------------------+---------------------------------------------------------+
+ | piper.py | Pipes article text through external program(s) on |
+ | | STDIN and collects its STDOUT which is used as the |
+ | | new article text if it differs from the original. |
+ +------------------------+---------------------------------------------------------+
+ | protect.py | Protect and unprotect pages en masse. |
+ +------------------------+---------------------------------------------------------+
+ | reflinks.py | Search for references which are only made of a link |
+ | | without title and fetch the html title from the link to |
+ | | use it as the title of the wiki link in the reference. |
+ +------------------------+---------------------------------------------------------+
+ | replicate_wiki.py | Replicates pages in wiki to a second wiki within family|
+ +------------------------+---------------------------------------------------------+
+ | selflink.py | This bot goes over multiple pages of the home wiki, |
+ | | searches for selflinks, and allows removing them. |
+ +------------------------+---------------------------------------------------------+
+ | spamremove.py | Remove links that are being or have been spammed. |
+ +------------------------+--+------------------------------------------------------+
+ | standardize_interwiki.py | A robot that downloads a page, and reformats the |
+ | | interwiki links in a standard way (i.e. move all |
+ | | of them to the bottom or the top, with the same |
+ | | separator, in the right order). |
+ +------------------------+--+------------------------------------------------------+
+ | states_redirect.py | A robot to add redirects to cities for state |
+ | | abbreviations. |
+ +------------------------+---------------------------------------------------------+
+ | surnames_redirects.py | Bot to create redirects based on name order. |
+ +------------------------+---------------------------------------------------------+
+ | table2wiki.py | Semi-automatic converting HTML-tables to wiki-tables |
+ +------------------------+---------------------------------------------------------+
+ | unlink.py | This bot unlinks a page on every page that links to it. |
+ +------------------------+---------------------------------------------------------+
+ | unusedfiles.py | Bot appends some text to all unused images and other |
+ | | text to the respective uploaders. |
+ +------------------------+---------------------------------------------------------+
+ | watchlists.py | Information retrieved by watchlist.py will be stored |
+ +------------------------+---------------------------------------------------------+
+ | wikisourcetext.py | This bot applies to Wikisource sites to upload text. |
+------------------------+---------------------------------------------------------+


diff --git a/scripts/blockpageschecker.py b/scripts/archive/blockpageschecker.py
similarity index 100%
rename from scripts/blockpageschecker.py
rename to scripts/archive/blockpageschecker.py
diff --git a/scripts/capitalize_redirects.py b/scripts/archive/capitalize_redirects.py
similarity index 100%
rename from scripts/capitalize_redirects.py
rename to scripts/archive/capitalize_redirects.py
diff --git a/scripts/casechecker.py b/scripts/archive/casechecker.py
similarity index 100%
rename from scripts/casechecker.py
rename to scripts/archive/casechecker.py
diff --git a/scripts/catall.py b/scripts/archive/catall.py
similarity index 100%
rename from scripts/catall.py
rename to scripts/archive/catall.py
diff --git a/scripts/archive/cfd.py b/scripts/archive/cfd.py
deleted file mode 100755
index 287c907..0000000
--- a/scripts/archive/cfd.py
+++ /dev/null
@@ -1,229 +0,0 @@
-#!/usr/bin/python
-"""
-This script processes the Categories for discussion working page.
-
-It parses out the actions that need to be taken as a result of CFD discussions
-(as posted to the working page by an administrator) and performs them.
-
-Syntax:
-
- python pwb.py cfd
-
-"""
-#
-# (C) Pywikibot team, 2008-2020
-#
-# Distributed under the terms of the MIT license.
-#
-import re
-import sys
-
-import pywikibot
-
-from scripts.category import CategoryMoveRobot as CategoryMoveBot
-
-
-DEFAULT_CFD_PAGE = 'Wikipedia:Categories for discussion/Working'
-
-# A list of templates that are used on category pages as part of the CFD
-# process that contain information such as the link to the per-day discussion
-# page.
-cfd_templates = ['Cfd full', 'Cfr full']
-
-# Regular expression declarations
-# See the en-wiki CFD working page at
-# [[Wikipedia:Categories for discussion/Working]]
-# to see how these work in context. To get this bot working on other wikis you
-# will need to adjust these regular expressions at the very least.
-nobots = re.compile(r'NO\s*BOTS', re.IGNORECASE)
-example = re.compile(r'\[\[:Category:(.)\1\1\1\1\]\]', re.IGNORECASE)
-speedymode = re.compile(r'^===*\s*Speedy Moves\s*===*\s*$', re.IGNORECASE)
-movemode = re.compile(r'^===*\s*Move/Merge then delete\s*===*\s*$',
- re.IGNORECASE)
-emptymode = re.compile(r'^===*\s*Empty then delete\s*===*\s*$', re.IGNORECASE)
-deletemode = re.compile(r'^===*\s*Ready for deletion\s*===*\s*$',
- re.IGNORECASE)
-maintenance = re.compile(
- r'^===*\s*Old by month categories with entries\s*===*\s*$', re.IGNORECASE)
-dateheader = re.compile(
- r'(\[\[Wikipedia:Categories[_ ]for[_ ]'
- r'(?:discussion|deletion)/Log/([^\]]*?)\]\])', re.IGNORECASE)
-movecat = re.compile(
- (r'\[\[:Category:([^\]]*?)\]\][^\]]*?\[\[:Category:([^\]]*?)\]\]'),
- re.IGNORECASE)
-deletecat = re.compile(r'\[\[:Category:([^\]]*?)\]\]', re.IGNORECASE)
-findday = re.compile(r'\[\[(Wikipedia:Categories for '
- r'(?:discussion|deletion)/Log/\d{4} \w+ \d+)#',
- re.IGNORECASE)
-
-
-class ReCheck:
-
- """Helper class."""
-
- def __init__(self):
- """Initializer."""
- self.result = None
-
- def check(self, pattern, text):
- """Search pattern."""
- self.result = pattern.search(text)
- return self.result
-
-
-def main(*args):
- """
- Process command line arguments and perform task.
-
- If args is an empty list, sys.argv is used.
-
- @param args: command line arguments
- @type args: str
- """
- cfd_page = DEFAULT_CFD_PAGE
- local_args = pywikibot.handle_args(args)
-
- for arg in local_args:
- if arg.startswith('-page'):
- if len(arg) == len('-page'):
- cfd_page = pywikibot.input(
- 'Enter the CFD working page to use:')
- else:
- cfd_page = arg[len('-page:'):]
-
- page = pywikibot.Page(pywikibot.Site(), cfd_page)
- try:
- page.get()
- except pywikibot.NoPage:
- pywikibot.error(
- 'CFD working page "{0}" does not exist!'.format(cfd_page))
- sys.exit(1)
-
- # Variable declarations
- day = 'None'
- mode = 'None'
- summary = ''
- robot = None
-
- m = ReCheck()
- for line in page.text.split('\n'):
- if nobots.search(line) or example.search(line):
- # NO BOTS or example line
- continue
- if speedymode.search(line):
- mode = 'Speedy'
- day = 'None'
- elif movemode.search(line):
- mode = 'Move'
- day = 'None'
- elif emptymode.search(line):
- mode = 'Empty'
- day = 'None'
- elif deletemode.search(line):
- mode = 'Delete'
- day = 'None'
- elif maintenance.search(line):
- # It's probably best not to try to handle these in an automated
- # fashion.
- mode = 'None'
- day = 'None'
- elif m.check(dateheader, line):
- day = m.result.group(1)
- pywikibot.output('Found day header: {}'.format(day))
- elif m.check(movecat, line):
- src = m.result.group(1)
- dest = m.result.group(2)
- thisDay = findDay(src, day)
- if mode == 'Move' and thisDay != 'None':
- summary = (
- 'Robot - Moving category ' + src + ' to [[:Category:'
- + dest + ']] per [[WP:CFD|CFD]] at ' + thisDay + '.')
- action_summary = \
- 'Robot - Result of [[WP:CFD|CFD]] at ' + thisDay + '.'
- elif mode == 'Speedy':
- summary = (
- 'Robot - Speedily moving category ' + src
- + ' to [[:Category:' + dest + ']] per [[WP:CFDS|CFDS]].')
- action_summary = 'Robot - Speedily moved per [[WP:CFDS|CFDS]].'
- else:
- continue
- # If the category is redirect, we do NOT want to move articles to
- # it. The safest thing to do here is abort and wait for human
- # intervention.
- destpage = pywikibot.Page(page.site, dest, ns=14)
- if destpage.isCategoryRedirect():
- summary = 'CANCELED. Destination is redirect: ' + summary
- pywikibot.stdout(summary)
- robot = None
- else:
- deletion_comment_same = (
- CategoryMoveBot.DELETION_COMMENT_SAME_AS_EDIT_COMMENT)
- robot = CategoryMoveBot(oldcat=src, newcat=dest, batch=True,
- comment=summary, inplace=True,
- move_oldcat=True, delete_oldcat=True,
- deletion_comment=deletion_comment_same,
- move_comment=action_summary)
- elif m.check(deletecat, line):
- src = m.result.group(1)
- # I currently don't see any reason to handle these two cases
- # separately, though if are guaranteed that the category in the
- # "Delete" case is empty, it might be easier to call delete.py on
- # it.
- thisDay = findDay(src, day)
- if (mode == 'Empty' or mode == 'Delete') and thisDay != 'None':
- summary = (
- 'Robot - Removing category {0} per [[WP:CFD|CFD]] '
- 'at {1}.'.format(src, thisDay))
- action_summary = \
- 'Robot - Result of [[WP:CFD|CFD]] at ' + thisDay + '.'
- else:
- continue
- robot = CategoryMoveBot(oldcat=src, batch=True, comment=summary,
- deletion_comment=action_summary,
- inplace=True)
- if summary and robot is not None:
- pywikibot.stdout(summary)
- # Run, robot, run!
- robot.run()
- summary = ''
- robot = None
-
-
-def findDay(pageTitle, oldDay):
- """
- Find day link from CFD template.
-
- This function grabs the wiki source of a category page and attempts to
- extract a link to the CFD per-day discussion page from the CFD template.
- If the CFD template is not there, it will return the value of the second
- parameter, which is essentially a fallback that is extracted from the
- per-day subheadings on the working page.
- """
- page = pywikibot.Page(pywikibot.Site(), 'Category:' + pageTitle)
- try:
- pageSrc = page.text
- except pywikibot.NoPage:
- m = None
- else:
- m = findday.search(pageSrc)
-
- if m is not None:
- return '[[{}]]'.format(m.group(1))
-
- # Try to parse day link from CFD template parameters.
- templates = page.templatesWithParams()
- for template, params in templates:
- if template.title() in cfd_templates:
- period = {'year': None, 'day': None, 'month': None}
- for param in params:
- name, _, val = param.partition('=')
- if name in period:
- period[name] = val
- if all(period.values()):
- return ('[[Wikipedia:Categories for discussion/Log/'
- '{year} {month} {day}]]'.format(**period))
- return oldDay
-
-
-if __name__ == '__main__':
- main()
diff --git a/scripts/commons_link.py b/scripts/archive/commons_link.py
similarity index 100%
rename from scripts/commons_link.py
rename to scripts/archive/commons_link.py
diff --git a/scripts/commonscat.py b/scripts/archive/commonscat.py
similarity index 100%
rename from scripts/commonscat.py
rename to scripts/archive/commonscat.py
diff --git a/scripts/create_categories.py b/scripts/archive/create_categories.py
similarity index 100%
rename from scripts/create_categories.py
rename to scripts/archive/create_categories.py
diff --git a/scripts/data_ingestion.py b/scripts/archive/data_ingestion.py
similarity index 100%
rename from scripts/data_ingestion.py
rename to scripts/archive/data_ingestion.py
diff --git a/scripts/disambredir.py b/scripts/archive/disambredir.py
similarity index 100%
rename from scripts/disambredir.py
rename to scripts/archive/disambredir.py
diff --git a/scripts/djvutext.py b/scripts/archive/djvutext.py
similarity index 100%
rename from scripts/djvutext.py
rename to scripts/archive/djvutext.py
diff --git a/scripts/editarticle.py b/scripts/archive/editarticle.py
similarity index 100%
rename from scripts/editarticle.py
rename to scripts/archive/editarticle.py
diff --git a/scripts/archive/featured.py b/scripts/archive/featured.py
deleted file mode 100755
index 2e24ff8..0000000
--- a/scripts/archive/featured.py
+++ /dev/null
@@ -1,630 +0,0 @@
-#!/usr/bin/python
-"""
-Manage featured/good article/list status template.
-
-*** This script understands various command-line arguments: ***
-
-Task commands:
-
- -featured use this script for featured articles. Default task
- if no task command is specified
-
- -good use this script for good articles.
-
- -lists use this script for featured lists.
-
- -former use this script for removing {{Link FA|xx}} from former
- fearured articles
-
- NOTE: you may have all of these commands in one run
-
-Option commands:
-
- -interactive: ask before changing each page
-
- -nocache doesn't include cache files file to remember if the article
- already was verified.
-
- -nocache:xx,yy you may ignore language codes xx,yy,... from cache file
-
- -fromlang:xx,yy xx,yy,zz,.. are the languages to be verified.
- -fromlang:ar--fi Another possible with range the languages
-
- -fromall to verify all languages.
-
- -tolang:xx,yy xx,yy,zz,.. are the languages to be updated
-
- -after:zzzz process pages after and including page zzzz
- (sorry, not implemented yet)
-
- -side use -side if you want to move all {{Link FA|lang}} next
- to the corresponding interwiki links. Default is placing
- {{Link FA|lang}} on top of the interwiki links.
- (This option is deprecated with wikidata)
-
- -count Only counts how many featured/good articles exist
- on all wikis (given with the "-fromlang" argument) or
- on several language(s) (when using the "-fromall" argument).
- Example: python pwb.py featured -fromlang:en,he -count
- counts how many featured articles exist in the en and he
- wikipedias.
-
- -quiet no corresponding pages are displayed.
-
-"""
-#
-# (C) Pywikibot team, 2005-2020
-#
-# Distributed under the terms of the MIT license.
-#
-import pickle
-import re
-
-import pywikibot
-
-from pywikibot import i18n, textlib, config
-
-from pywikibot.pagegenerators import PreloadingGenerator
-from pywikibot.tools.formatter import color_format
-from pywikibot.tools import issue_deprecation_warning
-
-
-def CAT(site, name, hide):
- name = site.namespace(14) + ':' + name
- cat = pywikibot.Category(site, name)
- yield from cat.articles(endsort=hide)
- if hide:
- yield from cat.articles(startFrom=chr(ord(hide) + 1))
-
-
-def BACK(site, name, hide):
- p = pywikibot.Page(site, name, ns=10)
- return [page for page in p.getReferences(follow_redirects=False,
- only_template_inclusion=True)]
-
-
-def DATA(site, name, hide):
- dp = pywikibot.ItemPage(site.data_repository(), name)
- try:
- title = dp.getSitelink(site)
- except pywikibot.NoPage:
- return
- cat = pywikibot.Category(site, title)
- if isinstance(hide, dict):
- hide = hide.get(site.code)
- yield from cat.articles(endsort=hide)
- if hide:
- yield from cat.articles(startsort=chr(ord(hide) + 1))
-
-
-# not implemented yet
-def TMPL(site, name, hide):
- return
-
-
-# ALL wikis use 'Link FA', and sometimes other localized templates.
-# We use _default AND the localized ones
-template = {
- '_default': ['Link FA'],
- 'als': ['LinkFA'],
- 'an': ['Destacato', 'Destacau'],
- 'ar': ['وصلة مقالة مختارة'],
- 'ast': ['Enllaz AD'],
- 'az': ['Link FM'],
- 'br': ['Liamm PuB', 'Lien AdQ'],
- 'ca': ['Enllaç AD', 'Destacat'],
- 'cy': ['Cyswllt erthygl ddethol', 'Dolen ED'],
- 'eo': ['LigoElstara'],
- 'en': ['Link FA', 'FA link'],
- 'es': ['Destacado'],
- 'eu': ['NA lotura'],
- 'fr': ['Lien AdQ'],
- 'fur': ['Leam VdC'],
- 'ga': ['Nasc AR'],
- 'gl': ['Ligazón AD', 'Destacado'],
- 'hi': ['Link FA', 'Lien AdQ'],
- 'is': ['Tengill ÚG'],
- 'it': ['Link V', 'Link AdQ'],
- 'no': ['Link UA'],
- 'oc': ['Ligam AdQ', 'Lien AdQ'],
- 'ro': ['Legătură AC', 'Legătură AF'],
- 'sv': ['UA', 'Link UA'],
- 'tr': ['Link SM'],
- 'vi': ['Liên kết chọn lọc'],
- 'vo': ['Yüm YG'],
- 'yi': ['רא'],
-}
-
-template_good = {
- '_default': ['Link GA'],
- 'ar': ['وصلة مقالة جيدة'],
- 'ca': ['Enllaç AB', 'Lien BA', 'Abo'],
- 'da': ['Link GA', 'Link AA'],
- 'eo': ['LigoLeginda'],
- 'es': ['Bueno'],
- 'fr': ['Lien BA'],
- 'gl': ['Ligazón AB'],
- 'is': ['Tengill GG'],
- 'it': ['Link VdQ'],
- 'nn': ['Link AA'],
- 'no': ['Link AA'],
- 'pt': ['Bom interwiki'],
- # 'tr': ['Link GA', 'Link KM'],
- 'vi': ['Liên kết bài chất lượng tốt'],
- 'wo': ['Lien BA'],
-}
-
-template_lists = {
- '_default': ['Link FL'],
- 'no': ['Link GL'],
-}
-
-featured_name = {
- 'wikidata': (DATA, 'Q4387444'),
-}
-
-good_name = {
- 'wikidata': (DATA, 'Q7045856'),
-}
-
-lists_name = {
- 'wikidata': (TMPL, 'Q5857568'),
- 'ar': (BACK, 'قائمة مختارة'),
- 'da': (BACK, 'FremragendeListe'),
- 'de': (BACK, 'Informativ'),
- 'en': (BACK, 'Featured list'),
- 'fa': (BACK, 'فهرست برگزیده'),
- 'id': (BACK, 'Featured list'),
- 'ja': (BACK, 'Featured List'),
- 'ksh': (CAT, 'Joode Leß'),
- 'no': (BACK, 'God liste'),
- 'pl': (BACK, 'Medalista'),
- 'pt': (BACK, 'Anexo destacado'),
- 'ro': (BACK, 'Listă de calitate'),
- 'ru': (BACK, 'Избранный список или портал'),
- 'tr': (BACK, 'Seçkin liste'),
- 'uk': (BACK, 'Вибраний список'),
- 'vi': (BACK, 'Sao danh sách chọn lọc'),
- 'zh': (BACK, '特色列表'),
-}
-
-# Third parameter is the sort key indicating articles to hide from the given
-# list
-former_name = {
- 'wikidata': (DATA, 'Q7045853', {'en': '#'})
-}
-
-
-class FeaturedBot(pywikibot.Bot):
-
- """Featured article bot."""
-
- # Bot configuration.
- # Only the keys of the dict can be passed as init options
- # The values are the default values
-
- def __init__(self, **kwargs):
- """Only accepts options defined in availableOptions."""
- self.availableOptions.update({
- 'async': False, # True for asynchronously putting a page
- 'afterpage': '!',
- 'count': False, # featuredcount
- 'featured': False,
- 'former': False,
- 'fromall': False,
- 'fromlang': None,
- 'good': False,
- 'lists': False,
- 'nocache': [],
- 'side': False, # not template_on_top
- 'quiet': False,
- 'interactive': False,
- })
-
- super().__init__(**kwargs)
- self.cache = {}
- self.filename = None
- self.site = pywikibot.Site()
- self.repo = self.site.data_repository()
-
- # if no source site is given, give up
- if self.getOption('fromlang') is True:
- self.options['fromlang'] = False
-
- # setup tasks running
- self.tasks = []
- for task in ('featured', 'good', 'lists', 'former'):
- if self.getOption(task):
- self.tasks.append(task)
- if not self.tasks:
- self.tasks = ['featured']
-
- def itersites(self, task):
- """Generator for site codes to be processed."""
- def _generator():
- if task == 'good':
- item_no = good_name['wikidata'][1]
- elif task == 'featured':
- item_no = featured_name['wikidata'][1]
- elif task == 'former':
- item_no = former_name['wikidata'][1]
- dp = pywikibot.ItemPage(self.repo, item_no)
- dp.get()
- for key in sorted(dp.sitelinks.keys()):
- try:
- site = self.site.fromDBName(key)
- except pywikibot.SiteDefinitionError:
- pywikibot.output('"%s" is not a valid site. Skipping...'
- % key)
- else:
- if site.family == self.site.family:
- yield site
-
- generator = _generator()
-
- if self.getOption('fromall'):
- return generator
- elif self.getOption('fromlang'):
- fromlang = self.getOption('fromlang')
- if len(fromlang) == 1 and fromlang[0].find('--') >= 0:
- start, end = fromlang[0].split('--', 1)
- if not start:
- start = ''
- if not end:
- end = 'zzzzzzz'
- return (site for site in generator
- if site.code >= start and site.code <= end)
- else:
- return (site for site in generator if site.code in fromlang)
- else:
- pywikibot.warning('No sites given to verify %s articles.\n'
- 'Please use -fromlang: or fromall option\n'
- % task)
- return ()
-
- def hastemplate(self, task):
- add_tl, remove_tl = self.getTemplateList(self.site.code, task)
- for i, tl in enumerate(add_tl):
- tp = pywikibot.Page(self.site, tl, ns=10)
- if tp.exists():
- return True
- else:
- pywikibot.output(tl + ' does not exist')
- # The first item is the default template to be added.
- # It must exist. Otherwise the script must not run.
- if i == 0:
- return
- else:
- return
-
- def readcache(self, task):
- if self.getOption('count') or self.getOption('nocache') is True:
- return
- self.filename = pywikibot.config.datafilepath('cache', task)
- try:
- f = open(self.filename, 'rb')
- self.cache = pickle.load(f)
- f.close()
- pywikibot.output('Cache file %s found with %d items.'
- % (self.filename, len(self.cache)))
- except IOError:
- pywikibot.output('Cache file %s not found.' % self.filename)
-
- def writecache(self):
- if self.getOption('count'):
- return
- if not self.getOption('nocache') is True:
- pywikibot.output('Writing %d items to cache file %s.'
- % (len(self.cache), self.filename))
- with open(self.filename, 'wb') as f:
- pickle.dump(self.cache, f, protocol=config.pickle_protocol)
- self.cache = {}
-
- def run(self):
- for task in self.tasks:
- self.run_task(task)
- pywikibot.output('%d pages written.' % self._save_counter)
-
- def run_task(self, task):
- if not self.hastemplate(task):
- pywikibot.output('\nNOTE: %s articles are not implemented at %s.'
- % (task, self.site))
- return
-
- self.readcache(task)
- for site in self.itersites(task):
- try:
- self.treat(site, task)
- except KeyboardInterrupt:
- pywikibot.output('\nQuitting %s treat...' % task)
- break
- self.writecache()
-
- def treat(self, fromsite, task):
- if fromsite != self.site:
- self.featuredWithInterwiki(fromsite, task)
-
- def featuredArticles(self, site, task, cache):
- articles = []
- info = globals()[task + '_name']
- if task == 'lists':
- code = site.code
- else:
- code = 'wikidata'
- try:
- method = info[code][0]
- except KeyError:
- pywikibot.error(
- "language %s doesn't has %s category source."
- % (code, task))
- return
- name = info[code][1]
- # hide #-sorted items on en-wiki
- try:
- hide = info[code][2]
- except IndexError:
- hide = None
- for p in method(site, name, hide):
- if p.namespace() == 0: # Article
- articles.append(p)
- # Article talk (like in English)
- elif p.namespace() == 1 and site.code != 'el':
- articles.append(pywikibot.Page(p.site,
- p.title(with_ns=False)))
- pywikibot.output(color_format(
- '{lightred}** {0} has {1} {2} articles{default}',
- site, len(articles), task))
- while articles:
- p = articles.pop(0)
- if p.title() < self.getOption('afterpage'):
- continue
-
- if '/' in p.title() and p.namespace() != 0:
- pywikibot.output('%s is a subpage' % p.title())
- continue
-
- if p.title() in cache:
- pywikibot.output('(cached) %s -> %s' % (
- p.title(), cache[p.title()]))
- continue
- yield p
-
- def findTranslated(self, page, oursite=None):
- quiet = self.getOption('quiet')
- if not oursite:
- oursite = self.site
- if page.isRedirectPage():
- page = page.getRedirectTarget()
-
- ourpage = None
- for link in page.iterlanglinks():
- if link.site == oursite:
- ourpage = pywikibot.Page(link)
- break
-
- if not ourpage:
- if not quiet:
- pywikibot.output('%s -> no corresponding page in %s'
- % (page.title(), oursite))
- elif ourpage.section():
- pywikibot.output('%s -> our page is a section link: %s'
- % (page.title(), ourpage.title()))
- elif not ourpage.exists():
- pywikibot.output("%s -> our page doesn't exist: %s"
- % (page.title(), ourpage.title()))
- else:
- if ourpage.isRedirectPage():
- ourpage = ourpage.getRedirectTarget()
-
- pywikibot.output('%s -> corresponding page is %s'
- % (page.title(), ourpage.title()))
- if ourpage.namespace() != 0:
- pywikibot.output('%s -> not in the main namespace, skipping'
- % page.title())
- elif ourpage.isRedirectPage():
- pywikibot.output(
- '%s -> double redirect, skipping' % page.title())
- elif not ourpage.exists():
- pywikibot.output("%s -> page doesn't exist, skipping"
- % ourpage.title())
- else:
- backpage = None
- for link in ourpage.iterlanglinks():
- if link.site == page.site:
- backpage = pywikibot.Page(link)
- break
- if not backpage:
- pywikibot.output(
- '%s -> no back interwiki ref' % page.title())
- elif backpage == page:
- # everything is ok
- yield ourpage
- elif backpage.isRedirectPage():
- backpage = backpage.getRedirectTarget()
- if backpage == page:
- # everything is ok
- yield ourpage
- else:
- pywikibot.output(
- '%s -> back interwiki ref target is redirect to %s'
- % (page.title(), backpage.title()))
- else:
- pywikibot.output('%s -> back interwiki ref target is %s'
- % (page.title(), backpage.title()))
-
- def getTemplateList(self, code, task):
- add_templates = []
- remove_templates = []
- if task == 'featured':
- try:
- add_templates = template[code]
- add_templates += template['_default']
- except KeyError:
- add_templates = template['_default']
- try:
- remove_templates = template_good[code]
- remove_templates += template_good['_default']
- except KeyError:
- remove_templates = template_good['_default']
- elif task == 'good':
- try:
- add_templates = template_good[code]
- add_templates += template_good['_default']
- except KeyError:
- add_templates = template_good['_default']
- try:
- remove_templates = template[code]
- remove_templates += template['_default']
- except KeyError:
- remove_templates = template['_default']
- elif task == 'lists':
- try:
- add_templates = template_lists[code]
- add_templates += template_lists['_default']
- except KeyError:
- add_templates = template_lists['_default']
- else: # task == 'former'
- try:
- remove_templates = template[code]
- remove_templates += template['_default']
- except KeyError:
- remove_templates = template['_default']
- return add_templates, remove_templates
-
- def featuredWithInterwiki(self, fromsite, task):
- """Read featured articles and find the corresponding pages.
-
- Find corresponding pages on other sites, place the template and
- remember the page in the cache dict.
-
- """
- tosite = self.site
- if fromsite.code not in self.cache:
- self.cache[fromsite.code] = {}
- if tosite.code not in self.cache[fromsite.code]:
- self.cache[fromsite.code][tosite.code] = {}
- cc = self.cache[fromsite.code][tosite.code]
- if self.getOption('nocache') is True or \
- fromsite.code in self.getOption('nocache'):
- cc = {}
-
- gen = self.featuredArticles(fromsite, task, cc)
- if self.getOption('count'):
- next(gen, None)
- return # count only, we are ready here
- gen = PreloadingGenerator(gen)
-
- for source in gen:
- if source.isRedirectPage():
- source = source.getRedirectTarget()
-
- if not source.exists():
- pywikibot.output("source page doesn't exist: %s"
- % source)
- continue
-
- for dest in self.findTranslated(source, tosite):
- self.add_template(source, dest, task, fromsite)
- cc[source.title()] = dest.title()
-
- def add_template(self, source, dest, task, fromsite):
- """Place or remove the Link_GA/FA template on/from a page."""
- def compile_link(site, templates):
- """Compile one link template list."""
- findtemplate = '(%s)' % '|'.join(templates)
- return re.compile(r'\{\{%s\|%s\}\}'
- % (findtemplate.replace(' ', '[ _]'),
- site.code), re.IGNORECASE)
-
- tosite = dest.site
- add_tl, remove_tl = self.getTemplateList(tosite.code, task)
- re_link_add = compile_link(fromsite, add_tl)
- re_link_remove = compile_link(fromsite, remove_tl)
-
- text = dest.text
- m1 = add_tl and re_link_add.search(text)
- m2 = remove_tl and re_link_remove.search(text)
- changed = False
- interactive = self.getOption('interactive')
- if add_tl:
- if m1:
- pywikibot.output('(already added)')
- else:
- # insert just before interwiki
- if (not interactive
- or pywikibot.input_yn(
- 'Connecting %s -> %s. Proceed?'
- % (source.title(), dest.title()),
- default=False, automatic_quit=False)):
- if self.getOption('side'):
- # Placing {{Link FA|xx}} right next to
- # corresponding interwiki
- text = (text[:m1.end()]
- + ' {{%s|%s}}' % (add_tl[0], fromsite.code)
- + text[m1.end():])
- else:
- # Moving {{Link FA|xx}} to top of interwikis
- iw = textlib.getLanguageLinks(text, tosite)
- text = textlib.removeLanguageLinks(text, tosite)
- text += '%s{{%s|%s}}%s' % (
- config.LS, add_tl[0], fromsite.code, config.LS)
- text = textlib.replaceLanguageLinks(text,
- iw, tosite)
- changed = True
- if remove_tl:
- if m2:
- if (changed # Don't force the user to say "Y" twice
- or not interactive
- or pywikibot.input_yn(
- 'Connecting %s -> %s. Proceed?'
- % (source.title(), dest.title()),
- default=False, automatic_quit=False)):
- text = re.sub(re_link_remove, '', text)
- changed = True
- elif task == 'former':
- pywikibot.output('(already removed)')
- if changed:
- comment = i18n.twtranslate(tosite, 'featured-' + task,
- {'page': source})
- try:
- dest.put(text, comment)
- self._save_counter += 1
- except pywikibot.LockedPage:
- pywikibot.output('Page %s is locked!'
- % dest.title())
- except pywikibot.PageSaveRelatedError:
- pywikibot.output('Page not saved')
-
-
-def main(*args):
- """
- Process command line arguments and invoke bot.
-
- If args is an empty list, sys.argv is used.
-
- @param args: command line arguments
- @type args: str
- """
- options = {}
- local_args = pywikibot.handle_args(args)
-
- issue_deprecation_warning(
- 'featured.py script', 'Wikibase Client extension',
- 0, UserWarning, since='20160307')
-
- for arg in local_args:
- if arg.startswith('-fromlang:'):
- options[arg[1:9]] = arg[10:].split(',')
- elif arg.startswith('-after:'):
- options['afterpage'] = arg[7:]
- elif arg.startswith('-nocache:'):
- options[arg[1:8]] = arg[9:].split(',')
- else:
- options[arg[1:].lower()] = True
-
- bot = FeaturedBot(**options)
- bot.run()
-
-
-if __name__ == '__main__':
- main()
diff --git a/scripts/flickrripper.py b/scripts/archive/flickrripper.py
similarity index 100%
rename from scripts/flickrripper.py
rename to scripts/archive/flickrripper.py
diff --git a/scripts/followlive.py b/scripts/archive/followlive.py
similarity index 100%
rename from scripts/followlive.py
rename to scripts/archive/followlive.py
diff --git a/scripts/freebasemappingupload.py b/scripts/archive/freebasemappingupload.py
similarity index 100%
rename from scripts/freebasemappingupload.py
rename to scripts/archive/freebasemappingupload.py
diff --git a/scripts/image.py b/scripts/archive/image.py
similarity index 100%
rename from scripts/image.py
rename to scripts/archive/image.py
diff --git a/scripts/imagecopy.py b/scripts/archive/imagecopy.py
similarity index 100%
rename from scripts/imagecopy.py
rename to scripts/archive/imagecopy.py
diff --git a/scripts/imagecopy_self.py b/scripts/archive/imagecopy_self.py
similarity index 100%
rename from scripts/imagecopy_self.py
rename to scripts/archive/imagecopy_self.py
diff --git a/scripts/imageharvest.py b/scripts/archive/imageharvest.py
similarity index 100%
rename from scripts/imageharvest.py
rename to scripts/archive/imageharvest.py
diff --git a/scripts/imagerecat.py b/scripts/archive/imagerecat.py
similarity index 100%
rename from scripts/imagerecat.py
rename to scripts/archive/imagerecat.py
diff --git a/scripts/imageuncat.py b/scripts/archive/imageuncat.py
similarity index 100%
rename from scripts/imageuncat.py
rename to scripts/archive/imageuncat.py
diff --git a/scripts/interwiki.py b/scripts/archive/interwiki.py
similarity index 100%
rename from scripts/interwiki.py
rename to scripts/archive/interwiki.py
diff --git a/scripts/isbn.py b/scripts/archive/isbn.py
similarity index 100%
rename from scripts/isbn.py
rename to scripts/archive/isbn.py
diff --git a/scripts/lonelypages.py b/scripts/archive/lonelypages.py
similarity index 100%
rename from scripts/lonelypages.py
rename to scripts/archive/lonelypages.py
diff --git a/scripts/makecat.py b/scripts/archive/makecat.py
similarity index 100%
rename from scripts/makecat.py
rename to scripts/archive/makecat.py
diff --git a/scripts/match_images.py b/scripts/archive/match_images.py
similarity index 100%
rename from scripts/match_images.py
rename to scripts/archive/match_images.py
diff --git a/scripts/ndashredir.py b/scripts/archive/ndashredir.py
similarity index 100%
rename from scripts/ndashredir.py
rename to scripts/archive/ndashredir.py
diff --git a/scripts/nowcommons.py b/scripts/archive/nowcommons.py
similarity index 100%
rename from scripts/nowcommons.py
rename to scripts/archive/nowcommons.py
diff --git a/scripts/patrol.py b/scripts/archive/patrol.py
similarity index 100%
rename from scripts/patrol.py
rename to scripts/archive/patrol.py
diff --git a/scripts/piper.py b/scripts/archive/piper.py
similarity index 100%
rename from scripts/piper.py
rename to scripts/archive/piper.py
diff --git a/scripts/reflinks.py b/scripts/archive/reflinks.py
similarity index 100%
rename from scripts/reflinks.py
rename to scripts/archive/reflinks.py
diff --git a/scripts/replicate_wiki.py b/scripts/archive/replicate_wiki.py
similarity index 100%
rename from scripts/replicate_wiki.py
rename to scripts/archive/replicate_wiki.py
diff --git a/scripts/selflink.py b/scripts/archive/selflink.py
similarity index 100%
rename from scripts/selflink.py
rename to scripts/archive/selflink.py
diff --git a/scripts/spamremove.py b/scripts/archive/spamremove.py
similarity index 100%
rename from scripts/spamremove.py
rename to scripts/archive/spamremove.py
diff --git a/scripts/standardize_interwiki.py b/scripts/archive/standardize_interwiki.py
similarity index 100%
rename from scripts/standardize_interwiki.py
rename to scripts/archive/standardize_interwiki.py
diff --git a/scripts/states_redirect.py b/scripts/archive/states_redirect.py
similarity index 100%
rename from scripts/states_redirect.py
rename to scripts/archive/states_redirect.py
diff --git a/scripts/surnames_redirects.py b/scripts/archive/surnames_redirects.py
similarity index 100%
rename from scripts/surnames_redirects.py
rename to scripts/archive/surnames_redirects.py
diff --git a/scripts/table2wiki.py b/scripts/archive/table2wiki.py
similarity index 100%
rename from scripts/table2wiki.py
rename to scripts/archive/table2wiki.py
diff --git a/scripts/unlink.py b/scripts/archive/unlink.py
similarity index 100%
rename from scripts/unlink.py
rename to scripts/archive/unlink.py
diff --git a/scripts/unusedfiles.py b/scripts/archive/unusedfiles.py
similarity index 100%
rename from scripts/unusedfiles.py
rename to scripts/archive/unusedfiles.py
diff --git a/scripts/watchlist.py b/scripts/archive/watchlist.py
similarity index 100%
rename from scripts/watchlist.py
rename to scripts/archive/watchlist.py
diff --git a/scripts/wikisourcetext.py b/scripts/archive/wikisourcetext.py
similarity index 100%
rename from scripts/wikisourcetext.py
rename to scripts/archive/wikisourcetext.py
diff --git a/scripts/interwikidata.py b/scripts/interwikidata.py
index acf398d..073ca0d 100755
--- a/scripts/interwikidata.py
+++ b/scripts/interwikidata.py
@@ -61,9 +61,8 @@
})
super().__init__(**kwargs)
if not self.site.has_data_repository:
- raise ValueError('{site} does not have a data repository, '
- 'use interwiki.py instead.'.format(
- site=self.site))
+ raise ValueError('{site} does not have a data repository.'
+ .format(site=self.site))
self.repo = self.site.data_repository()
if not self.opt.summary:
self.opt.summary = pywikibot.i18n.twtranslate(
diff --git a/tests/__init__.py b/tests/__init__.py
index 1da2957..aad974c 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -136,21 +136,15 @@
'category_bot',
'checkimages',
'compat2core',
- 'data_ingestion',
'deletionbot',
- 'disambredir',
'fixing_redirects',
'generate_family_file',
'generate_user_files',
- 'imagecopy',
'interwikidata',
- 'isbn',
'l10n',
- 'patrolbot',
'protectbot',
'pwb',
'redirect_bot',
- 'reflinks',
'replacebot',
'script',
'template_bot',
diff --git a/tests/data_ingestion_tests.py b/tests/archive/data_ingestion_tests.py
similarity index 100%
rename from tests/data_ingestion_tests.py
rename to tests/archive/data_ingestion_tests.py
diff --git a/tests/disambredir_tests.py b/tests/archive/disambredir_tests.py
similarity index 100%
rename from tests/disambredir_tests.py
rename to tests/archive/disambredir_tests.py
diff --git a/tests/imagecopy_tests.py b/tests/archive/imagecopy_tests.py
similarity index 100%
rename from tests/imagecopy_tests.py
rename to tests/archive/imagecopy_tests.py
diff --git a/tests/isbn_tests.py b/tests/archive/isbn_tests.py
similarity index 100%
rename from tests/isbn_tests.py
rename to tests/archive/isbn_tests.py
diff --git a/tests/patrolbot_tests.py b/tests/archive/patrolbot_tests.py
similarity index 100%
rename from tests/patrolbot_tests.py
rename to tests/archive/patrolbot_tests.py
diff --git a/tests/reflinks_tests.py b/tests/archive/reflinks_tests.py
similarity index 100%
rename from tests/reflinks_tests.py
rename to tests/archive/reflinks_tests.py
diff --git a/tests/pwb_tests.py b/tests/pwb_tests.py
index 89ccc1b..07baafe 100644
--- a/tests/pwb_tests.py
+++ b/tests/pwb_tests.py
@@ -104,6 +104,7 @@
with self.subTest(line=2):
self.assertEqual(stderr.readline().strip(), result[2])

+ @unittest.skip('No multiple results currently')
def test_similar_scripts_found(self):
"""Test script call which gives multiple similar results."""
result = [
diff --git a/tests/script_tests.py b/tests/script_tests.py
index cc2942d..774b605 100644
--- a/tests/script_tests.py
+++ b/tests/script_tests.py
@@ -86,29 +86,16 @@
}

auto_run_script_list = [
- 'blockpageschecker',
- 'casechecker',
- 'catall',
'category_redirect',
'checkimages',
'clean_sandbox',
- 'disambredir',
- 'followlive',
- 'imagerecat',
'login',
- 'lonelypages',
'misspelling',
'revertbot',
'noreferences',
- 'nowcommons',
'parser_function_count',
- 'patrol',
'shell',
- 'standardize_interwiki',
- 'states_redirect',
- 'unusedfiles',
'upload',
- 'watchlist',
'welcome',
]

diff --git a/tox.ini b/tox.ini
index e2f6b21..2ba602f 100644
--- a/tox.ini
+++ b/tox.ini
@@ -34,7 +34,7 @@

fasttest: python -W error::UserWarning -m generate_user_files -family:wikipedia -lang:test -v
fasttest: nosetests --version
- fasttest: nosetests --with-detecthttp -v -a '!net' tests
+ fasttest: nosetests --with-detecthttp --ignore-files="archive/.+" -v -a '!net' tests
deps =
flake8: .[flake8]

@@ -99,7 +99,7 @@
ignore = B007,C103,D105,D211,D401,D413,D412,FI1,H101,H238,H301,H404,H405,H903,P101,P102,P103,P205,W503
select = B901,B903
enable-extensions = H203,H204,H205
-exclude = .tox,.git,./*.egg,build,scripts/archive/*,./scripts/i18n/*
+exclude = .tox,.git,./*.egg,build,scripts/archive/*,./scripts/i18n/*,./tests/archive/*
classmethod-decorators = classmethod,classproperty

# flake8-coding
@@ -166,43 +166,25 @@
pywikibot/userinterfaces/win32_unicode.py : N801, N812, T001, N803, N806
pywikibot/xmlreader.py : N803, N806, N802
scripts/add_text.py : N803, N806
- scripts/blockpageschecker.py : N802, N803, N806, N816
- scripts/casechecker.py : N802, N803, N806, N815
scripts/category.py : N803, N806, N802
scripts/category_redirect.py : N803, N806, N802
scripts/checkimages.py : N801, N802, N803, N806, N816
scripts/clean_sandbox.py : N815, N816
- scripts/commonscat.py : N802, N803, N806, N816
scripts/cosmetic_changes.py : N816
- scripts/data_ingestion.py : N803, N806, N802
scripts/fixing_redirects.py : N803, N806
- scripts/flickrripper.py : N803, N806, N802
- scripts/freebasemappingupload.py: N802
scripts/harvest_template.py : N802, N815, N816
- scripts/imagecopy.py : N801, N802, N803, N806, N816
- scripts/imagecopy_self.py : N801, N802, N803, N806, N816
- scripts/imagerecat.py : N803, N806, N802
scripts/imagetransfer.py : N803, N806, N816
- scripts/imageuncat.py: N802, N816
- scripts/interwiki.py : N802, N803, N806, N815, N816
- scripts/isbn.py : N802, N803, N806, N816
scripts/maintenance/* : T001
scripts/maintenance/download_dump.py : N815
- scripts/match_images.py : N803, N806
scripts/misspelling.py : N802, N803, N806, N815
scripts/movepages.py : N803, N806, N802
scripts/noreferences.py : N802, N803, N806, N816
- scripts/nowcommons.py : N802, N803, N806, N816
scripts/pagefromfile.py : N815
scripts/redirect.py : N803, N806, N815
- scripts/reflinks.py: N802, N816
scripts/replace.py : N802, N803, N806, N816
scripts/revertbot.py: N815
scripts/solve_disambiguation.py : N803, N806, N802
- scripts/table2wiki.py: N802
scripts/templatecount.py: N802
- scripts/unlink.py : N803
- scripts/watchlist.py : N803, N802
scripts/weblinkchecker.py : N803, N806, N802
scripts/welcome.py: N802, N815
setup.py : T001

To view, visit change 660644. To unsubscribe, or for help writing mail filters, visit settings.

Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ic694a413623b7e2ffed9cc7e8d64d191df3c1248
Gerrit-Change-Number: 660644
Gerrit-PatchSet: 10
Gerrit-Owner: JJMC89 <JJMC89.Wikimedia@gmail.com>
Gerrit-Reviewer: Xqt <info@gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged