jenkins-bot has submitted this change and it was merged.
Change subject: Remove old code dealing with starsList
......................................................................
Remove old code dealing with starsList
- stars has been replaces with Wikidata's sitelinks and the code is no longer
needed. Remove these code parts from add_text.py and cosmetic_changes.py (but
keep featured.py unchanged which is in archive folder now.
- put code parts into textlib which can be used with version history contents
- change add_text_tests
- change test_standardizePageFolder test is cosmetic_changes_test
- add several tests to textlib_tests
Bug: T123150
Change-Id: I06ca86805693f8ce57e78c34b2ee5ace2659a3ba
---
M pywikibot/cosmetic_changes.py
M pywikibot/textlib.py
M scripts/add_text.py
M tests/add_text_tests.py
M tests/cosmetic_changes_tests.py
M tests/textlib_tests.py
6 files changed, 172 insertions(+), 117 deletions(-)
Approvals:
John Vandenberg: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/cosmetic_changes.py b/pywikibot/cosmetic_changes.py
index 6afa7b0..72b74d2 100755
--- a/pywikibot/cosmetic_changes.py
+++ b/pywikibot/cosmetic_changes.py
@@ -49,7 +49,7 @@
cosmetic_changes_deny_script += ['your_script_name_1',
'your_script_name_2']
"""
#
-# (C) xqt, 2009-2015
+# (C) xqt, 2009-2016
# (C) Pywikibot team, 2006-2016
#
# Distributed under the terms of the MIT license.
@@ -296,51 +296,18 @@
"""
Standardize page footer.
- Makes sure that interwiki links, categories and star templates are
- put to the correct position and into the right order. This combines the
- old instances standardizeInterwiki and standardizeCategories
+ Makes sure that interwiki links and categories are put to the correct
+ position and into the right order. This combines the old instances
+ standardizeInterwiki and standardizeCategories.
The page footer has the following section in that sequence:
1. categories
2. ## TODO: template beyond categories ##
3. additional information depending on local site policy
- 4. stars templates for featured and good articles
- 5. interwiki links
+ 4. interwiki links
"""
- # TODO: T123150
- starsList = [
- u'bueno',
- u'bom interwiki',
- u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed',
- u'destacado', u'destaca[tu]',
- u'enllaç[ _]ad',
- u'enllaz[ _]ad',
- u'leam[ _]vdc',
- u'legătură[ _]a[bcf]',
- u'liamm[ _]pub',
- u'lien[ _]adq',
- u'lien[ _]ba',
- u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt',
- u'liên[ _]kết[ _]chọn[ _]lọc',
- u'ligam[ _]adq',
- u'ligazón[ _]a[bd]',
- u'ligoelstara',
- u'ligoleginda',
- u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]',
u'link[ _]km',
- u'link[ _]sm', u'linkfa',
- u'na[ _]lotura',
- u'nasc[ _]ar',
- u'tengill[ _][úg]g',
- u'ua',
- u'yüm yg',
- u'רא',
- u'وصلة مقالة جيدة',
- u'وصلة مقالة مختارة',
- ]
-
categories = None
interwikiLinks = None
- allstars = []
# Pywikibot is no longer allowed to touch categories on the
# German Wikipedia. See
@@ -367,15 +334,6 @@
# Removing the interwiki
text = textlib.removeLanguageLinks(text, site=self.site)
- # Removing the stars' issue
- starstext = textlib.removeDisabledParts(text)
- for star in starsList:
- regex = re.compile(r'(\{\{(?:template:|)%s\|.*?\}\}[\s]*)'
- % star, re.I)
- found = regex.findall(starstext)
- if found != []:
- text = regex.sub('', text)
- allstars += found
# Adding categories
if categories:
@@ -390,13 +348,6 @@
# categories.insert(0, name)
text = textlib.replaceCategoryLinks(text, categories,
site=self.site)
- # Adding stars templates
- if allstars:
- text = text.strip() + self.site.family.interwiki_text_separator
- allstars.sort()
- for element in allstars:
- text += '%s%s' % (element.strip(), config.line_separator)
- pywikibot.log(u'%s' % element.strip())
# Adding the interwiki
if interwikiLinks:
text = textlib.replaceLanguageLinks(text, interwikiLinks,
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index 511f03e..32f8fe9 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -7,7 +7,7 @@
"""
#
-# (C) Pywikibot team, 2008-2015
+# (C) Pywikibot team, 2008-2016
#
# Distributed under the terms of the MIT license.
#
@@ -1669,6 +1669,108 @@
return u'{{%s\n%s}}' % (template, text)
+# ---------------------------------
+# functions dealing with stars list
+# ---------------------------------
+
+starsList = [
+ 'bueno',
+ 'bom interwiki',
+ 'cyswllt[ _]erthygl[ _]ddethol', 'dolen[ _]ed',
+ 'destacado', 'destaca[tu]',
+ 'enllaç[ _]ad',
+ 'enllaz[ _]ad',
+ 'leam[ _]vdc',
+ 'legătură[ _]a[bcf]',
+ 'liamm[ _]pub',
+ 'lien[ _]adq',
+ 'lien[ _]ba',
+ 'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt',
+ 'liên[ _]kết[ _]chọn[ _]lọc',
+ 'ligam[ _]adq',
+ 'ligazón[ _]a[bd]',
+ 'ligoelstara',
+ 'ligoleginda',
+ 'link[ _][afgu]a', 'link[ _]adq', 'link[ _]f[lm]', 'link[
_]km',
+ 'link[ _]sm', 'linkfa',
+ 'na[ _]lotura',
+ 'nasc[ _]ar',
+ 'tengill[ _][úg]g',
+ 'ua',
+ 'yüm yg',
+ 'רא',
+ 'وصلة مقالة جيدة',
+ 'وصلة مقالة مختارة',
+]
+
+
+def get_stars(text):
+ """
+ Extract stars templates from wikitext.
+
+ @param text: a wiki text
+ @type text: str
+ @return: list of stars templates
+ @rtype: list
+ """
+ allstars = []
+ starstext = removeDisabledParts(text)
+ for star in starsList:
+ regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)'
+ % star, re.I)
+ found = regex.findall(starstext)
+ if found:
+ allstars += found
+ return allstars
+
+
+def remove_stars(text, stars_list):
+ """
+ Remove stars templates from text.
+
+ @param text: a wiki text
+ @type text: str
+ @param start_list: list of stars templates previously found in text
+ @return: modified text
+ @rtype: str
+ """
+ for star in stars_list:
+ text = text.replace(star, '')
+ return text
+
+
+def append_stars(text, stars_list, site=None):
+ """
+ Remove stars templates from text.
+
+ @param text: a wiki text
+ @type text: str
+ @param stars_list: list of stars templates previously found in text
+ @type stars_list: list
+ @param site: a site where the given text is used.
+ interwiki_text_separator is used when a site object is given.
+ Otherwise line_separator is used twice to separate stars list.
+ @type site: BaseSite
+ @return: modified text
+ @rtype: str
+ """
+ LS = (config.line_separator * 2
+ if not site else site.family.interwiki_text_separator)
+ text = text.strip() + LS
+ stars = stars_list[:]
+ stars.sort()
+ for element in stars:
+ text += element.strip() + config.line_separator
+ return text
+
+
+def standardize_stars(text):
+ """Make sure that star templates are in the right
order."""
+ allstars = get_stars(text)
+ text = remove_stars(text, allstars)
+ return append_stars(text, allstars)
+
+
# --------------------------
# Page parsing functionality
# --------------------------
diff --git a/scripts/add_text.py b/scripts/add_text.py
index a5b03f1..9beb31a 100755
--- a/scripts/add_text.py
+++ b/scripts/add_text.py
@@ -3,8 +3,7 @@
r"""
This is a Bot to add a text at the end of the content of the page.
-By default it adds the text above categories, interwiki and template
-for the stars of the interwiki.
+By default it adds the text above categories and interwiki.
Alternatively it may also add a text at the top of the page.
These command line parameters can be used to specify which pages to work on:
@@ -80,37 +79,6 @@
}
-starsList = [
- u'bueno',
- u'bom interwiki',
- u'cyswllt[ _]erthygl[ _]ddethol', u'dolen[ _]ed',
- u'destacado', u'destaca[tu]',
- u'enllaç[ _]ad',
- u'enllaz[ _]ad',
- u'leam[ _]vdc',
- u'legătură[ _]a[bcf]',
- u'liamm[ _]pub',
- u'lien[ _]adq',
- u'lien[ _]ba',
- u'liên[ _]kết[ _]bài[ _]chất[ _]lượng[ _]tốt',
- u'liên[ _]kết[ _]chọn[ _]lọc',
- u'ligam[ _]adq',
- u'ligazón[ _]a[bd]',
- u'ligoelstara',
- u'ligoleginda',
- u'link[ _][afgu]a', u'link[ _]adq', u'link[ _]f[lm]',
u'link[ _]km',
- u'link[ _]sm', u'linkfa',
- u'na[ _]lotura',
- u'nasc[ _]ar',
- u'tengill[ _][úg]g',
- u'ua',
- u'yüm yg',
- u'רא',
- u'وصلة مقالة جيدة',
- u'وصلة مقالة مختارة',
-]
-
-
def add_text(page, addText, summary=None, regexSkip=None,
regexSkipUrl=None, always=False, up=False, putText=True,
oldTextGiven=None, reorderEnabled=True, create=False):
@@ -123,10 +91,6 @@
if not summary:
summary = i18n.twtranslate(site, 'add_text-adding',
{'adding': addText[:200]})
-
- # When a page is tagged as "really well written" it has a star in the
- # interwiki links. This is a list of all the templates used (in regex
- # format) to make the stars appear.
errorCount = 0
@@ -188,22 +152,6 @@
newtext = textlib.replaceCategoryLinks(newtext,
categoriesInside, site,
True)
- # Dealing the stars' issue
- # TODO: T123150
- allstars = []
- starstext = textlib.removeDisabledParts(text)
- for star in starsList:
- regex = re.compile('(\{\{(?:template:|)%s\|.*?\}\}[\s]*)'
- % star, re.I)
- found = regex.findall(starstext)
- if found != []:
- newtext = regex.sub('', newtext)
- allstars += found
- if allstars != []:
- newtext = newtext.strip() + config.line_separator * 2
- allstars.sort()
- for element in allstars:
- newtext += '%s%s' % (element.strip(), config.LS)
# Adding the interwiki
newtext = textlib.replaceLanguageLinks(newtext, interwikiInside,
site)
diff --git a/tests/add_text_tests.py b/tests/add_text_tests.py
index 5430858..02d9bc8 100644
--- a/tests/add_text_tests.py
+++ b/tests/add_text_tests.py
@@ -16,9 +16,9 @@
from tests.aspects import unittest, TestCase
-class TestStarList(TestCase):
+class TestAdding(TestCase):
- """Test starlist."""
+ """Test adding text."""
family = 'wikipedia'
code = 'en'
@@ -26,7 +26,7 @@
dry = True
def test_basic(self):
- """Test adding text before {{linkfa}} without
parameters."""
+ """Test adding text."""
page = pywikibot.Page(self.site, 'foo')
(text, newtext, always) = add_text(
page, 'bar', putText=False,
@@ -35,14 +35,14 @@
'foo\n{{linkfa}}\nbar',
newtext)
- def test_with_params(self):
- """Test adding text before {{linkfa|...}}."""
+ def test_with_category(self):
+ """Test adding text before categories."""
page = pywikibot.Page(self.site, 'foo')
(text, newtext, always) = add_text(
page, 'bar', putText=False,
- oldTextGiven='foo\n{{linkfa|...}}')
+ oldTextGiven='foo\n[[Category:Foo]]')
self.assertEqual(
- 'foo\nbar\n\n{{linkfa|...}}\n',
+ 'foo\nbar\n\n[[Category:Foo]]',
newtext)
diff --git a/tests/cosmetic_changes_tests.py b/tests/cosmetic_changes_tests.py
index 94c13c2..ad795a0 100644
--- a/tests/cosmetic_changes_tests.py
+++ b/tests/cosmetic_changes_tests.py
@@ -44,9 +44,9 @@
def test_standardizePageFooter(self):
"""Test standardizePageFooter method."""
- self.assertEqual('Foo\n{{link fa}}\n\n[[Category:Foo]]',
+ self.assertEqual('Foo\n{{any template}}\n\n[[Category:Foo]]',
self.cct.standardizePageFooter(
- 'Foo [[category:foo]] {{link fa}}'))
+ 'Foo\n[[category:foo]]\n{{any template}}'))
def test_resolveHtmlEntities(self):
"""Test resolveHtmlEntities method."""
diff --git a/tests/textlib_tests.py b/tests/textlib_tests.py
index e6d67ff..89093ac 100644
--- a/tests/textlib_tests.py
+++ b/tests/textlib_tests.py
@@ -1455,6 +1455,60 @@
self.assertEqual(textlib.unescape('!23<>'"&&'),
'!23<>\'"&&')
+
+class TestStarList(TestCase):
+
+ """Test starlist."""
+
+ net = False
+
+ def test_basic(self):
+ """Test standardizing {{linkfa}} without
parameters."""
+ self.assertEqual(
+ 'foo\n{{linkfa}}\nbar\n\n',
+ textlib.standardize_stars('foo\n{{linkfa}}\nbar'))
+
+ def test_with_params(self):
+ """Test standardizing text with {{linkfa|...}}."""
+ self.assertEqual(
+ 'foo\nbar\n\n{{linkfa|...}}\n',
+ textlib.standardize_stars('foo\n{{linkfa|...}}\nbar'))
+
+ def test_with_sorting_params(self):
+ """Test standardizing text with sorting
parameters."""
+ self.assertEqual(
+ 'foo\n\n{{linkfa|bar}}\n{{linkfa|de}}\n'
+ '{{linkfa|en}}\n{{linkfa|fr}}\n',
+ textlib.standardize_stars(
+ 'foo\n{{linkfa|en}}\n{{linkfa|de}}\n'
+ '{{linkfa|fr}}\n{{linkfa|bar}}'))
+
+ def test_get_stars(self):
+ """Test get_starts method."""
+ self.assertEqual(
+ ['{{linkfa|en}}\n', '{{linkfa|de}}\n',
+ '{{linkfa|fr}}\n', '{{linkfa|bar}}'],
+ textlib.get_stars(
+ 'foo\n{{linkfa|en}}\n{{linkfa|de}}\n'
+ '{{linkfa|fr}}\n{{linkfa|bar}}'))
+
+ def test_remove_stars(self):
+ """Test remove_stars method."""
+ self.assertEqual(
+ 'foo\n{{linkfa|en}}\n{{linkfa|fr}}\n{{linkfa|bar}}',
+ textlib.remove_stars(
+ 'foo\n{{linkfa|en}}\n{{linkfa|de}}\n'
+ '{{linkfa|fr}}\n{{linkfa|bar}}', ['{{linkfa|de}}\n']))
+
+ def test_append_stars(self):
+ """Test append_stars method."""
+ self.assertEqual(
+ 'foo\n\n{{linkfa|bar}}\n{{linkfa|de}}\n'
+ '{{linkfa|en}}\n{{linkfa|fr}}\n',
+ textlib.append_stars(
+ 'foo', ['{{linkfa|en}}\n', '{{linkfa|de}}\n',
+ '{{linkfa|fr}}\n', '{{linkfa|bar}}']))
+
if __name__ == '__main__': # pragma: no cover
try:
unittest.main()
--
To view, visit
https://gerrit.wikimedia.org/r/282352
To unsubscribe, visit
https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I06ca86805693f8ce57e78c34b2ee5ace2659a3ba
Gerrit-PatchSet: 8
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>