jenkins-bot has submitted this change and it was merged.
Change subject: [IMPROV] APISite: Deprecate language method
......................................................................
[IMPROV] APISite: Deprecate language method
The APISite.language() method and APISite.lang property return the exact same
thing so it does make sense to remove one of them. As BaseSite itself has a
lang property already it makes sense to deprecate the language() method.
This makes the lang property in APISite more visible and it's clearer that
APISite actually overrides the implementation from BaseSite. Searching for
'def lang(' for example would only yield BaseSite's implementation.
Also fixed several usages to use the code instead of the language.
Change-Id: I938c6b587cc020ba401fb09883520bbb2a5367c0
---
M pywikibot/families/wikipedia_family.py
M pywikibot/interwiki_graph.py
M pywikibot/page.py
M pywikibot/site.py
M pywikibot/textlib.py
M scripts/imagerecat.py
M scripts/interwiki.py
M scripts/makecat.py
M scripts/noreferences.py
M scripts/script_wui.py
M scripts/solve_disambiguation.py
M tests/aspects.py
M tests/pagegenerators_tests.py
M tests/site_tests.py
M tests/wikibase_tests.py
15 files changed, 49 insertions(+), 39 deletions(-)
Approvals:
John Vandenberg: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/families/wikipedia_family.py b/pywikibot/families/wikipedia_family.py
index e726b71..fb7893d 100644
--- a/pywikibot/families/wikipedia_family.py
+++ b/pywikibot/families/wikipedia_family.py
@@ -523,7 +523,7 @@
"""Override the family interwiki prefixes for each site."""
# In Swedish Wikipedia 's:' is part of page title not a family
# prefix for 'wikisource'.
- if site.language() == 'sv':
+ if site.code == 'sv':
d = self.known_families.copy()
d.pop('s')
d['src'] = 'wikisource'
diff --git a/pywikibot/interwiki_graph.py b/pywikibot/interwiki_graph.py
index 6f2c49a..91aa181 100644
--- a/pywikibot/interwiki_graph.py
+++ b/pywikibot/interwiki_graph.py
@@ -134,7 +134,7 @@
def getLabel(self, page):
"""Get label for page."""
- return '"%s:%s"' % (page.site.language(), page.title())
+ return '"%s:%s"' % (page.site.code, page.title())
def addNode(self, page):
"""Add a node for page."""
@@ -242,7 +242,7 @@
@rtype: str
"""
filename = '%s-%s-%s' % (page.site.family.name,
- page.site.language(),
+ page.site.code,
page.titleForFilename())
if extension:
filename += '.%s' % extension
diff --git a/pywikibot/page.py b/pywikibot/page.py
index b47badd..6fc0ed8 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -3401,7 +3401,7 @@
"""
for key in data:
if isinstance(key, pywikibot.site.BaseSite):
- data[key.language()] = data[key]
+ data[key.lang] = data[key]
del data[key]
return data
diff --git a/pywikibot/site.py b/pywikibot/site.py
index c4d53ce..f89156a 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -2333,11 +2333,15 @@
"""Return this site's internal id."""
return self.siteinfo['wikiid']
+ @deprecated('APISite.lang')
def language(self):
"""Return the code for the language of this Site."""
- return self.siteinfo['lang']
+ return self.lang
- lang = property(fget=language, doc=language.__doc__)
+ @property
+ def lang(self):
+ """Return the code for the language of this Site."""
+ return self.siteinfo['lang']
def version(self):
"""
@@ -6249,8 +6253,8 @@
}
}
labels = {
- page.site.language(): {
- 'language': page.site.language(),
+ page.site.lang: {
+ 'language': page.site.lang,
'value': page.title(),
}
}
@@ -6259,8 +6263,8 @@
'site': link.site.dbName(),
'title': link.title,
}
- labels[link.site.language()] = {
- 'language': link.site.language(),
+ labels[link.site.lang] = {
+ 'language': link.site.lang,
'value': link.title,
}
data = {
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index cffc04c..d3ed4c6 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -811,10 +811,10 @@
separator=separatorstripped)
s = interwikiFormat(new, insite=site)
if s:
- if site.language() in site.family.interwiki_attop or \
+ if site.code in site.family.interwiki_attop or \
u'<!-- interwiki at top -->' in oldtext:
# do not add separator if interwiki links are on one line
- newtext = s + (u'' if site.language()
+ newtext = s + (u'' if site.code
in site.family.interwiki_on_one_line
else separator) + s2.replace(marker, '').strip()
else:
@@ -831,7 +831,7 @@
newtext = (s2[:firstafter].replace(marker, '') +
s +
s2[firstafter:])
- elif site.language() in site.family.categories_last:
+ elif site.code in site.family.categories_last:
cats = getCategoryLinks(s2, site=site)
s2 = removeCategoryLinksAndSeparator(
s2.replace(marker, cseparatorstripped).strip(), site) + \
@@ -903,7 +903,7 @@
except AttributeError:
s.append(pywikibot.Site(site, insite.family).linkto(
links[site], othersite=insite))
- if insite.lang in insite.family.interwiki_on_one_line:
+ if insite.code in insite.family.interwiki_on_one_line:
sep = u' '
else:
sep = config.line_separator
@@ -1097,7 +1097,7 @@
separator=separatorstripped)
s = categoryFormat(new, insite=site)
if s:
- if site.language() in site.family.category_attop:
+ if site.code in site.family.category_attop:
newtext = s + separator + s2
else:
# calculate what was after the categories links on the page
@@ -1114,7 +1114,7 @@
newtext = (s2[:firstafter].replace(marker, '') +
s +
s2[firstafter:])
- elif site.language() in site.family.categories_last:
+ elif site.code in site.family.categories_last:
newtext = s2.replace(marker, '').strip() + separator + s
else:
interwiki = getLanguageLinks(s2, insite=site)
diff --git a/scripts/imagerecat.py b/scripts/imagerecat.py
index ae0c5c1..657bcbd 100755
--- a/scripts/imagerecat.py
+++ b/scripts/imagerecat.py
@@ -128,7 +128,7 @@
global search_wikis
global hint_wiki
site = imagepage.site
- lang = site.language()
+ lang = site.code
family = site.family.name
if lang == u'commons' and family == u'commons':
parameters = urlencode(
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index 574e843..151f21d 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -1050,9 +1050,9 @@
# Allow for a mapping between different namespaces
crossFrom = self.originPage.site.family.crossnamespace.get(
self.originPage.namespace(), {})
- crossTo = crossFrom.get(self.originPage.site.language(),
+ crossTo = crossFrom.get(self.originPage.site.lang,
crossFrom.get('_default', {}))
- nsmatch = crossTo.get(linkedPage.site.language(),
+ nsmatch = crossTo.get(linkedPage.site.lang,
crossTo.get('_default', []))
if linkedPage.namespace() in nsmatch:
return False
@@ -1090,7 +1090,7 @@
newHint = pywikibot.input(
u'Give the alternative for language %s, not '
u'using a language code:'
- % linkedPage.site.language())
+ % linkedPage.site.lang)
if newHint:
alternativePage = pywikibot.Page(
linkedPage.site, newHint)
@@ -1190,7 +1190,7 @@
elif choice == 'a':
newHint = pywikibot.input(
u'Give the alternative for language %s, not using a '
- u'language code:' % page.site.language())
+ u'language code:' % page.site.lang)
alternativePage = pywikibot.Page(page.site, newHint)
return (True, alternativePage)
elif choice == 'g':
@@ -1200,7 +1200,7 @@
return (False, None)
def isIgnored(self, page):
- if page.site.language() in globalvar.neverlink:
+ if page.site.lang in globalvar.neverlink:
pywikibot.output(u"Skipping link %s to an ignored language" % page)
return True
if page in globalvar.ignore:
@@ -1286,8 +1286,8 @@
pywikibot.output(
u'WARNING: %s:%s relates to %s:%s, which is an '
u'auto entry %s(%s)'
- % (self.originPage.site.language(), self.originPage,
- page.site.language(), page, dictName, year))
+ % (self.originPage.site.lang, self.originPage,
+ page.site.lang, page, dictName, year))
# Abort processing if the bot is running in autonomous mode.
if globalvar.autonomous:
@@ -1419,7 +1419,7 @@
# Ignore the interwiki links.
iw = ()
if globalvar.lacklanguage:
- if globalvar.lacklanguage in [link.site.language()
+ if globalvar.lacklanguage in [link.site.lang
for link in iw]:
iw = ()
self.workonme = False
@@ -2440,7 +2440,7 @@
# expects a list of strings, so we convert it back.
# TODO: This is a quite ugly hack, in the future we should maybe make
# titletranslate expect a list of pagelinks.
- hintStrings = ['%s:%s' % (hintedPage.site.language(),
+ hintStrings = ['%s:%s' % (hintedPage.site.lang,
hintedPage.title()) for hintedPage in pagelist]
bot.add(page, hints=hintStrings)
diff --git a/scripts/makecat.py b/scripts/makecat.py
index d1a80f9..ded4a20 100755
--- a/scripts/makecat.py
+++ b/scripts/makecat.py
@@ -45,14 +45,8 @@
import sys
import codecs
import pywikibot
-from pywikibot import date, pagegenerators, i18n, textlib
+from pywikibot import pagegenerators, i18n, textlib
from pywikibot.tools import DequeGenerator
-
-
-def isdate(s):
- """Return true if s is a date or year."""
- dict, val = date.getAutoFormat(pywikibot.Site().language(), s)
- return dict is not None
def needcheck(pl):
@@ -62,7 +56,7 @@
if pl in checked:
return False
if skipdates:
- if isdate(pl.title()):
+ if pl.autoFormat()[0] is not None:
return False
return True
diff --git a/scripts/noreferences.py b/scripts/noreferences.py
index a8f12b6..11f03b0 100755
--- a/scripts/noreferences.py
+++ b/scripts/noreferences.py
@@ -638,7 +638,7 @@
def createReferenceSection(self, oldText, index, ident='=='):
"""Create a reference section and insert it into the given text."""
- if self.site.language() in noTitleRequired:
+ if self.site.code in noTitleRequired:
newSection = u'\n%s\n' % (self.referencesText)
else:
newSection = u'\n%s %s %s\n%s\n' % (ident,
diff --git a/scripts/script_wui.py b/scripts/script_wui.py
index 052d721..aa87b53 100755
--- a/scripts/script_wui.py
+++ b/scripts/script_wui.py
@@ -315,7 +315,7 @@
site = pywikibot.Site()
site.login()
- chan = '#' + site.language() + '.' + site.family.name
+ chan = '#' + site.code + '.' + site.family.name
bot = ScriptWUIBot(site, chan, site.user() + "_WUI", "irc.wikimedia.org")
try:
bot.start()
diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py
index 03ed627..4ac3b0d 100755
--- a/scripts/solve_disambiguation.py
+++ b/scripts/solve_disambiguation.py
@@ -568,7 +568,7 @@
self.minimum = minimum
self.mysite = pywikibot.Site()
- self.mylang = self.mysite.language()
+ self.mylang = self.mysite.lang
self.comment = None
self.dn_template_str = i18n.translate(self.mysite, dn_template)
diff --git a/tests/aspects.py b/tests/aspects.py
index 3e6f6c2..d6160f4 100644
--- a/tests/aspects.py
+++ b/tests/aspects.py
@@ -1273,6 +1273,13 @@
if self._do_test_warning_filename:
self.assertDeprecationFile(self.expect_warning_filename)
+ def assertOneDeprecation(self, msg=None, reset=True):
+ """Assert that exactly one deprecation happened and reset if wished."""
+ self.assertEqual(len(self.deprecation_messages), 1)
+ self.assertDeprecation(msg)
+ if reset:
+ self._reset_messages()
+
def assertNoDeprecation(self, msg=None):
if msg:
self.assertNotIn(msg, self.deprecation_messages)
diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py
index d845b84..5e20097 100755
--- a/tests/pagegenerators_tests.py
+++ b/tests/pagegenerators_tests.py
@@ -298,7 +298,7 @@
def test_basic(self):
site = self.get_site()
# Some languages are missing (T85681)
- if (site.language() not in date.formats['YearBC']) or (site.language() not in date.formats['YearAD']):
+ if (site.lang not in date.formats['YearBC']) or (site.lang not in date.formats['YearAD']):
raise unittest.SkipTest('Date formats for this language are missing from date.py')
start = -20
end = 2026
diff --git a/tests/site_tests.py b/tests/site_tests.py
index 7b25577..f3d6dae 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -109,6 +109,11 @@
self.assertIn('statistics', self.site.siteinfo('statistics', dump=True))
self.assertDeprecation('Calling siteinfo is deprecated, use itself instead.')
+ def test_language_method(self):
+ """Test if the language method returns the same as the lang property."""
+ self.assertEqual(self.site.language(), self.site.lang)
+ self.assertOneDeprecation()
+
def test_allpages_filterredir_True(self):
"""Test that filterredir set to 'only' is deprecated to True."""
for page in self.site.allpages(filterredir='only', total=1):
diff --git a/tests/wikibase_tests.py b/tests/wikibase_tests.py
index bbac998..bcd1f70 100644
--- a/tests/wikibase_tests.py
+++ b/tests/wikibase_tests.py
@@ -654,7 +654,7 @@
self.wdp.get()
def test_iterlinks_page_object(self):
- page = [pg for pg in self.wdp.iterlinks() if pg.site.language() == 'af'][0]
+ page = [pg for pg in self.wdp.iterlinks() if pg.site.code == 'af'][0]
self.assertEqual(page, pywikibot.Page(self.get_site('afwiki'), u'New York Stad'))
def test_iterlinks_filtering(self):
--
To view, visit https://gerrit.wikimedia.org/r/221593
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I938c6b587cc020ba401fb09883520bbb2a5367c0
Gerrit-PatchSet: 4
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Ricordisamoa <ricordisamoa(a)openmailbox.org>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: Move UserEditFilterGenerator into pagegenerators
......................................................................
Move UserEditFilterGenerator into pagegenerators
Bug: T104265
Change-Id: I175f220955cd4ceeeb0576dbb7773dd21f6f3634
---
M pywikibot/pagegenerators.py
M scripts/template.py
2 files changed, 41 insertions(+), 32 deletions(-)
Approvals:
John Vandenberg: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index df3382c..f00acd1 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -1481,6 +1481,45 @@
yield page
+def UserEditFilterGenerator(generator, username, timestamp=None, skip=False,
+ max_revision_depth=None):
+ """
+ Generator which will yield Pages modified by username.
+
+ It only looks at the last editors given by max_revision_depth.
+ If timestamp is set in MediaWiki format JJJJMMDDhhmmss, older edits are
+ ignored.
+ If skip is set, pages edited by the given user are ignored otherwise only
+ pages edited by this user are given back.
+
+ @param generator: A generator object
+ @param username: user name which edited the page
+ @type username: str
+ @param timestamp: ignore edits which are older than this timestamp
+ @type timestamp: str (MediaWiki format JJJJMMDDhhmmss) or None
+ @param skip: Ignore pages edited by the given user
+ @type skip: bool
+ @param max_revision_depth: It only looks at the last editors given by
+ max_revision_depth
+ @type max_revision_depth: int or None
+ """
+ if timestamp:
+ ts = pywikibot.Timestamp.fromtimestampformat(timestamp)
+ else:
+ ts = pywikibot.Timestamp.min
+ for page in generator:
+ found = False
+ for ed in page.revisions(total=max_revision_depth):
+ if ed.timestamp >= ts:
+ if username == ed.user:
+ found = True
+ break
+ else:
+ break
+ if found != bool(skip): # xor operation
+ yield page
+
+
def CombinedPageGenerator(generators):
"""Yield from each iterable until exhausted, then proceed with the next."""
return itertools.chain(*generators)
diff --git a/scripts/template.py b/scripts/template.py
index dd28369..849f50f 100755
--- a/scripts/template.py
+++ b/scripts/template.py
@@ -121,36 +121,6 @@
from scripts.replace import ReplaceRobot as ReplaceBot
-def UserEditFilterGenerator(generator, username, timestamp=None, skip=False,
- max_revision_depth=None):
- """
- Generator which will yield Pages modified by username.
-
- It only looks at the last editors given by max_revision_depth.
- If timestamp is set in MediaWiki format JJJJMMDDhhmmss, older edits are
- ignored.
- If skip is set, pages edited by the given user are ignored otherwise only
- pages edited by this user are given back.
- """
- if timestamp:
- ts = pywikibot.Timestamp.fromtimestampformat(timestamp)
- else:
- ts = pywikibot.Timestamp.min
- for page in generator:
- found = False
- for ed in page.revisions(total=max_revision_depth):
- if ed.timestamp >= ts:
- if username == ed.user:
- found = True
- break
- else:
- break
- if found != bool(skip): # xor operation
- yield page
- else:
- pywikibot.output(u'Skipping %s' % page.title(asLink=True))
-
-
class XmlDumpTemplatePageGenerator(object):
"""
@@ -385,8 +355,8 @@
gen = pagegenerators.CombinedPageGenerator(gens)
gen = pagegenerators.DuplicateFilterPageGenerator(gen)
if user:
- gen = UserEditFilterGenerator(gen, user, timestamp, skip,
- max_revision_depth=100)
+ gen = pagegenerators.UserEditFilterGenerator(gen, user, timestamp, skip,
+ max_revision_depth=100)
if not genFactory.gens:
# make sure that proper namespace filtering etc. is handled
--
To view, visit https://gerrit.wikimedia.org/r/221660
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I175f220955cd4ceeeb0576dbb7773dd21f6f3634
Gerrit-PatchSet: 4
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>