jenkins-bot submitted this change.

View Change

Approvals: Mpaa: Looks good to me, approved jenkins-bot: Verified
[cleanup] Drop support of MediaWiki 1.14-1.18

We already announced that mw support prior than 1.19 will be dropped.
Remove those parts from repository excluding logentries which should
be checked first. Probably fandom uses old style.

Bug: T245350
Change-Id: I307febd7fadb3dd33ab72231718cf2f46b621bbb
---
M README.rst
M pywikibot/cosmetic_changes.py
M pywikibot/data/api.py
M pywikibot/page/__init__.py
M pywikibot/pagegenerators.py
M pywikibot/site/__init__.py
M pywikibot/site_detect.py
M tests/dry_api_tests.py
M tests/dry_site_tests.py
M tests/page_tests.py
M tests/site_decorators_tests.py
M tests/site_detect_tests.py
M tests/site_tests.py
13 files changed, 104 insertions(+), 359 deletions(-)

diff --git a/README.rst b/README.rst
index 73b73e6..d06a268 100644
--- a/README.rst
+++ b/README.rst
@@ -22,7 +22,7 @@

The Pywikibot framework is a Python library that interfaces with the
`MediaWiki API <https://www.mediawiki.org/wiki/API:Main_page>`_
-version 1.14 or higher.
+version 1.19 or higher.

Also included are various general function scripts that can be adapted for
different tasks.
diff --git a/pywikibot/cosmetic_changes.py b/pywikibot/cosmetic_changes.py
index 3d85630..2bd8d34 100755
--- a/pywikibot/cosmetic_changes.py
+++ b/pywikibot/cosmetic_changes.py
@@ -74,6 +74,7 @@
except ImportError:
stdnum_isbn = None

+
# Subpage templates. Must be in lower case,
# whereas subpage itself must be case sensitive
# This is also used by interwiki.py
@@ -389,8 +390,7 @@
# a clone is needed. Won't change the namespace dict
namespaces = list(namespace)
if namespace == 6 and self.site.family.name == 'wikipedia':
- if self.site.code in ('en', 'fr') \
- and self.site.mw_version >= '1.14':
+ if self.site.code in ('en', 'fr'):
# do not change "Image" on en-wiki and fr-wiki
assert 'Image' in namespaces
namespaces.remove('Image')
@@ -549,7 +549,7 @@
titleWithSection = titleWithSection.rstrip()
hadTrailingSpaces = len(titleWithSection) != titleLength

- # Convert URL-encoded characters to unicode
+ # Convert URL-encoded characters to str
titleWithSection = url2unicode(titleWithSection,
encodings=self.site)

@@ -637,7 +637,7 @@
return text

def resolveHtmlEntities(self, text):
- """Replace HTML entities with unicode."""
+ """Replace HTML entities with string."""
ignore = [
38, # Ampersand (&amp;)
39, # Single quotation mark (&quot;) per T26093
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index 9bd5791..d79300c 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -240,9 +240,6 @@
return
mw_ver = self.site.mw_version

- if mw_ver < '1.15':
- self._parse_help()
-
# The paraminfo api deprecated the old request syntax of
# querymodules='info'; to avoid warnings sites with 1.25wmf4+
# must only use 'modules' parameter.
@@ -251,17 +248,13 @@
if self.modules_only_mode:
self.paraminfo_keys = frozenset(['modules'])

- # v1.18 and earlier paraminfo doesn't include modules; must use 'query'
# Assume that by v1.26, it will be desirable to prefetch 'query'
- if mw_ver > '1.26' or mw_ver < '1.19':
+ if mw_ver > '1.26':
self.preloaded_modules |= {'query'}

self._fetch(self.preloaded_modules)

- # paraminfo 'mainmodule' was added 1.15
- assert('main' in self._paraminfo)
main_modules_param = self.parameter('main', 'action')
-
assert(main_modules_param)
assert('type' in main_modules_param)
assert(isinstance(main_modules_param['type'], list))
@@ -277,9 +270,8 @@
self._limit = query_modules_param['limit']

if query_modules_param and 'type' in query_modules_param:
- # 1.19+ 'type' is the list of modules; on 1.18, it is 'string'
- if isinstance(query_modules_param['type'], list):
- self._add_submodules('query', query_modules_param['type'])
+ # 'type' is the list of modules
+ self._add_submodules('query', query_modules_param['type'])

if 'query' not in self._modules:
assert 'query' not in self._paraminfo
@@ -287,7 +279,7 @@
assert 'query' in self._modules

def _emulate_pageset(self):
- """Emulate the pageset module, which existed in MW 1.15-1.24."""
+ """Emulate the pageset module, which existed until MW 1.24."""
# pageset isn't a module in the new system, so it is emulated, with
# the paraminfo from the query module.
assert('query' in self._paraminfo)
@@ -302,56 +294,6 @@
'parameters': self._paraminfo['query']['parameters']
}

- def _parse_help(self):
- """Emulate paraminfo['main'] data using help for mw 1.14."""
- # Request need ParamInfo to determine use_get
- request = self.site._request(expiry=config.API_config_expiry,
- use_get=True,
- parameters={'action': 'help'})
- result = request.submit()
-
- assert('help' in result)
- assert(isinstance(result['help'], dict))
- assert('mime' in result['help'])
- assert(result['help']['mime'] == 'text/plain')
- assert('help' in result['help'])
- assert(isinstance(result['help']['help'], str))
-
- help_text = result['help']['help']
-
- start = help_text.find('What action you would like to perform')
- start = help_text.find('One value: ', start) + len('One value: ')
- end = help_text.find('\n', start)
-
- action_modules = help_text[start:end].split(', ')
-
- start = help_text.find('The format of the output')
- start = help_text.find('One value: ', start) + len('One value: ')
- end = help_text.find('\n', start)
-
- format_modules = help_text[start:end].split(', ')
-
- self._paraminfo['main'] = {
- 'name': 'main',
- 'path': 'main',
- 'classname': 'ApiMain',
- 'prefix': '',
- 'readrights': '',
- 'helpurls': [],
- 'parameters': [
- {
- 'name': 'action',
- 'type': action_modules,
- 'submodules': '',
- },
- {
- 'name': 'format',
- 'type': format_modules,
- 'submodules': '',
- },
- ],
- }
-
@staticmethod
def _modules_to_set(modules) -> set:
"""Return modules as a set.
diff --git a/pywikibot/page/__init__.py b/pywikibot/page/__init__.py
index bee913d..58aa798 100644
--- a/pywikibot/page/__init__.py
+++ b/pywikibot/page/__init__.py
@@ -25,6 +25,7 @@

from collections import Counter, defaultdict, OrderedDict
from collections.abc import MutableMapping
+from contextlib import suppress
from html.entities import name2codepoint
from itertools import chain
from typing import Any, Dict, List, Optional, Union
@@ -317,7 +318,6 @@
-family and -lang option i.e. config.family and config.mylang
@param without_brackets: (cannot be used with as_link) if true, remove
the last pair of brackets(usually removes disambiguation brackets).
- @rtype: str
"""
title = self._link.canonical_title()
label = self._link.title
@@ -705,7 +705,7 @@
"""Return True if last editor was unregistered."""
return self.latest_revision.anon

- def lastNonBotUser(self):
+ def lastNonBotUser(self) -> str:
"""
Return name or IP address of last human/non-bot user to edit page.

@@ -715,8 +715,6 @@
If the edit was done by a bot which is no longer flagged as 'bot',
i.e. which is not returned by Site.botusers(), it will be returned
as a non-bot edit.
-
- @rtype: str
"""
if hasattr(self, '_lastNonBotUser'):
return self._lastNonBotUser
@@ -745,8 +743,6 @@
Return the revision id for the previous revision of this Page.

If the page has only one revision, it shall return -1.
-
- @raise AssertionError: Use on MediaWiki prior to v1.16.
"""
return self.latest_revision.parent_id or -1

@@ -757,8 +753,6 @@
Return the revision id for the previous revision.

DEPRECATED: Use latest_revision.parent_id instead.
-
- @raise AssertionError: Use on MediaWiki prior to v1.16.
"""
return self.latest_revision.parent_id or -1

@@ -792,16 +786,14 @@

@param force: Bypass local caching
"""
- found = False
if self.isRedirectPage():
static_keys = self.site.getmagicwords('staticredirect')
text = self.get(get_redirect=True, force=force)
if static_keys:
for key in static_keys:
if key in text:
- found = True
- break
- return found
+ return True
+ return False

def isCategoryRedirect(self) -> bool:
"""Return True if this is a category redirect page, False otherwise."""
@@ -1358,10 +1350,8 @@
"""Clear the cached attributes of the page."""
self._revisions = {}
for attr in self._cache_attrs:
- try:
+ with suppress(AttributeError):
delattr(self, attr)
- except AttributeError:
- pass

def purge(self, **kwargs) -> bool:
"""
@@ -1996,7 +1986,7 @@
protections[arg_name] = value
warn('"protections" argument of protect() replaces "{0}"'
.format(arg_name),
- DeprecationWarning)
+ FutureWarning)
else:
if value:
warn('"protections" argument of protect() replaces "{0}";'
@@ -2018,7 +2008,7 @@
reason = pywikibot.input('Please enter a reason for the action:')
if unprotect:
warn('"unprotect" argument of protect() is deprecated',
- DeprecationWarning, 2)
+ FutureWarning, 2)
protections = {p_type: ''
for p_type in self.applicable_protections()}
answer = 'y'
@@ -2026,7 +2016,7 @@
prompt = True
if prompt:
warn('"prompt" argument of protect() is deprecated',
- DeprecationWarning, 2)
+ FutureWarning, 2)
if prompt and not hasattr(self.site, '_noProtectPrompt'):
answer = pywikibot.input_choice(
'Do you want to change the protection level of %s?'
@@ -2743,10 +2733,8 @@
(Deprecated in MW 1.24)
@param startprefix: if provided, only generate pages >= this title
lexically; not valid if sortby="timestamp"; overrides "startsort"
- (requires MW 1.18+)
@param endprefix: if provided, only generate pages < this title
lexically; not valid if sortby="timestamp"; overrides "endsort"
- (requires MW 1.18+)
@rtype: typing.Iterable[pywikibot.Page]
"""
seen = set()
@@ -2901,8 +2889,7 @@
'Namespace of the page is not consistent'
cached = check_cache(pywikibot.Timestamp.fromISOformat(
member['timestamp']))
- for cached_page in cached:
- yield cached_page
+ yield from cached
if total is not None:
total -= len(cached)
if total <= 0:
@@ -2912,8 +2899,7 @@
# clear cache
assert total is None or total > 0, \
'As many items as given in total already returned'
- for cached_page in check_cache(pywikibot.Timestamp.min):
- yield cached_page
+ yield from check_cache(pywikibot.Timestamp.min)


class User(Page):
@@ -3210,7 +3196,7 @@
return next(iter(self.logevents(total=1)), None)

@deprecated_args(limit='total', namespace='namespaces')
- def contributions(self, total=500, **kwargs):
+ def contributions(self, total=500, **kwargs) -> tuple:
"""
Yield tuples describing this user edits.

@@ -3233,7 +3219,6 @@
@keyword top_only: if True, iterate only edits which are the latest
revision (default: False)
@return: tuple of pywikibot.Page, revid, pywikibot.Timestamp, comment
- @rtype: tuple
"""
for contrib in self.site.usercontribs(
user=self.username, total=total, **kwargs):
@@ -4753,12 +4738,8 @@
self._type = datatype

@property
- def type(self):
- """
- Return the type of this property.
-
- @rtype: str
- """
+ def type(self) -> str:
+ """Return the type of this property."""
if not hasattr(self, '_type'):
self._type = self.repo.getPropertyType(self)
return self._type
@@ -5391,11 +5372,9 @@
precision = coord_args[2]
else:
precision = 0.0001 # Default value (~10 m at equator)
- try:
+ with suppress(TypeError):
if self.target.precision is not None:
precision = max(precision, self.target.precision)
- except TypeError:
- pass

return (abs(self.target.lat - coord_args[0]) <= precision
and abs(self.target.lon - coord_args[1]) <= precision)
@@ -5487,11 +5466,11 @@
@type minor: bool
@param rollbacktoken: rollback token
@type rollbacktoken: str
- @param parentid: id of parent Revision (v1.16+)
+ @param parentid: id of parent Revision
@type parentid: int
@param contentmodel: content model label (v1.21+)
@type contentmodel: str
- @param sha1: sha1 of revision text (v1.19+)
+ @param sha1: sha1 of revision text
@type sha1: str
@param slots: revision slots (v1.32+)
@type slots: dict
@@ -5921,7 +5900,7 @@
if source_is_page:
self._text = source.title(with_section=False) + self._text

- def parse_site(self):
+ def parse_site(self) -> tuple:
"""
Parse only enough text to determine which site the link points to.

@@ -5932,7 +5911,6 @@
@return: The family name and site code for the linked site. If the site
is not supported by the configured families it returns None instead
of a str.
- @rtype: tuple
"""
t = self._text
fam = self._source.family
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index 4f19dc6..b9b163e 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -1498,21 +1498,12 @@

"""
kwargs = {
- 'recurse': recurse, 'total': total,
- 'content': content, 'namespaces': namespaces,
+ 'content': content,
+ 'namespaces': namespaces,
+ 'recurse': recurse,
+ 'startprefix': start,
+ 'total': total,
}
- if start:
- if category.site.mw_version < '1.18':
- kwargs.pop('total', None)
- count = 0
- for article in category.articles(**kwargs):
- if article.title() >= start:
- count += 1
- yield article
- if count == total:
- return
- return
- kwargs['startprefix'] = start
yield from category.articles(**kwargs)


diff --git a/pywikibot/site/__init__.py b/pywikibot/site/__init__.py
index 6b1c978..f28d123 100644
--- a/pywikibot/site/__init__.py
+++ b/pywikibot/site/__init__.py
@@ -31,7 +31,6 @@
from itertools import zip_longest
from textwrap import fill
from typing import Optional
-from urllib.parse import urlparse
from warnings import warn

import pywikibot
@@ -1290,12 +1289,6 @@
# Be careful with version tests inside this here as it might need to
# query this method to actually get the version number

- if prop == 'general':
- if 'articlepath' not in data: # Introduced in 1.16.0
- # Old version of MediaWiki, extract from base
- path = urlparse(data['base'])[2].rsplit('/', 1)[0] + '/$1'
- data['articlepath'] = path
-
# Convert boolean props from empty strings to actual boolean values
if prop in Siteinfo.BOOLEAN_PROPS.keys():
# siprop=namespaces and
@@ -2277,14 +2270,6 @@
raise NotImplementedError(
'MediaWiki messages missing: {0}'.format(needed_mw_messages))

- if self.mw_version < '1.16':
- for key, value in msgs.items():
- if key == 'and' and value == ',&#32;and':
- # v1.14 defined and as ',&#32;and'; fixed in v1.15
- msgs['and'] = ' and'
- else:
- msgs[key] = pywikibot.html2unicode(value)
-
args = list(args)
concat = msgs['and'] + msgs['word-separator']
return msgs['comma-separator'].join(
@@ -2338,20 +2323,14 @@
"""
Return a Timestamp object representing the current server time.

- For wikis with a version newer than 1.16 it uses the 'time' property
- of the siteinfo 'general'. It'll force a reload before returning the
- time. It requests to expand the text '{{CURRENTTIMESTAMP}}' for older
- wikis.
+ It uses the 'time' property of the siteinfo 'general'. It'll force a
+ reload before returning the time.

@return: the current server time
@rtype: L{Timestamp}
"""
- if self.mw_version >= '1.16':
- return pywikibot.Timestamp.fromISOformat(
- self.siteinfo.get('time', expiry=True))
- else:
- return pywikibot.Timestamp.fromtimestampformat(
- self.expand_text('{{CURRENTTIMESTAMP}}'))
+ return pywikibot.Timestamp.fromISOformat(
+ self.siteinfo.get('time', expiry=True))

def getmagicwords(self, word):
"""Return list of localized "word" magic words for the site."""
@@ -2418,8 +2397,6 @@
custom_name = nsdata.pop('*')
canonical_name = nsdata.pop('canonical')

- nsdata.setdefault('content', ns == 0) # mw < 1.16
-
default_case = Namespace.default_case(ns)
if 'case' not in nsdata:
nsdata['case'] = default_case or self.siteinfo['case']
@@ -2502,17 +2479,10 @@
version = self.family.version(self.code)

if MediaWikiVersion(version) < MediaWikiVersion('1.19'):
- warn('\n'
- + fill('Support of MediaWiki {mw_version} will be dropped. '
- 'It is recommended to use MediaWiki 1.19 or above. '
- 'You may use every Pywikibot 3.0.X release, Pywikibot '
- '"{py_version}" or the "python2" release from the '
- 'repository for older MediaWiki versions. '
- 'See T245350 for further information.'
- .format(mw_version=version,
- py_version=pywikibot.__version__)),
- FutureWarning)
-
+ raise RuntimeError(
+ 'Pywikibot "{}" does not support MediaWiki "{}".\n'
+ 'Use Pywikibot prior to "5.0" or "python2" branch '
+ 'instead.'.format(pywikibot.__version__, version))
return version

@property
@@ -3208,11 +3178,6 @@
types_wiki.append('patrol')
valid_types = [token for token in types if token in types_wiki]

- # Pre 1.17, preload token was the same as the edit token.
- if mw_ver < '1.17':
- if 'patrol' in types and 'edit' not in valid_types:
- valid_types.append('edit')
-
elif mw_ver < '1.24wmf19':
types_wiki = self._paraminfo.parameter('tokens',
'type')['type']
@@ -3235,8 +3200,7 @@
def get_tokens(self, types, all=False):
"""Preload one or multiple tokens.

- For all MediaWiki versions prior to 1.20, only one token can be
- retrieved at once.
+ For MediaWiki version 1.19, only one token can be retrieved at once.
For MediaWiki versions since 1.24wmfXXX a new token
system was introduced which reduced the amount of tokens available.
Most of them were merged into the 'csrf' token. If the token type in
@@ -3250,9 +3214,7 @@
- userrights
- watch

- (*) Patrol was added in v1.14.
- Until v1.16, the patrol token is same as the edit token.
- For v1.17-19, the patrol token must be obtained from the query
+ (*) For v1.19, the patrol token must be obtained from the query
list recentchanges.

@see: U{https://www.mediawiki.org/wiki/API:Tokens}
@@ -3298,24 +3260,20 @@
# patrol token require special handling.
# TODO: try to catch exceptions?
if 'patrol' in valid_tokens:
- if mw_ver < '1.17':
- if 'edit' in user_tokens:
- user_tokens['patrol'] = user_tokens['edit']
- else:
- req = self._simple_request(action='query',
- list='recentchanges',
- rctoken='patrol', rclimit=1)
+ req = self._simple_request(action='query',
+ list='recentchanges',
+ rctoken='patrol', rclimit=1)

- req._warning_handler = warn_handler
- data = req.submit()
+ req._warning_handler = warn_handler
+ data = req.submit()

- if 'query' in data:
- data = data['query']
- if 'recentchanges' in data:
- item = data['recentchanges'][0]
- pywikibot.debug(str(item), _logger)
- if 'patroltoken' in item:
- user_tokens['patrol'] = item['patroltoken']
+ if 'query' in data:
+ data = data['query']
+ if 'recentchanges' in data:
+ item = data['recentchanges'][0]
+ pywikibot.debug(str(item), _logger)
+ if 'patroltoken' in item:
+ user_tokens['patrol'] = item['patroltoken']
else:
if mw_ver < '1.24wmf19':
if all is not False:
@@ -3645,11 +3603,9 @@
@type endsort: str
@param startprefix: if provided, only generate pages >= this title
lexically; not valid if sortby="timestamp"; overrides "startsort"
- (requires MW 1.18+)
@type startprefix: str
@param endprefix: if provided, only generate pages < this title
lexically; not valid if sortby="timestamp"; overrides "endsort"
- (requires MW 1.18+)
@type endprefix: str
@param content: if True, load the current content of each iterated page
(default False)
@@ -3661,8 +3617,6 @@
values: page, subcat, file
@rtype: typing.Iterable[pywikibot.Page]
@raises KeyError: a namespace identifier was not resolved
- @raises NotImplementedError: startprefix or endprefix parameters are
- given but site.version is less than 1.18.
@raises TypeError: a namespace identifier has an inappropriate
type such as NoneType or bool
"""
@@ -3747,9 +3701,6 @@
raise ValueError('categorymembers: '
"invalid combination of 'sortby' and 'endtime'")
if startprefix and sortby != 'timestamp':
- if self.mw_version < '1.18':
- raise NotImplementedError(
- 'categorymembers: "startprefix" requires MW 1.18+')
cmargs['gcmstartsortkeyprefix'] = startprefix
elif startprefix:
raise ValueError('categorymembers: invalid combination of '
@@ -3760,9 +3711,6 @@
raise ValueError('categorymembers: '
"invalid combination of 'sortby' and 'startsort'")
if endprefix and sortby != 'timestamp':
- if self.mw_version < '1.18':
- raise NotImplementedError(
- 'categorymembers: "endprefix" requires MW 1.18+')
cmargs['gcmendsortkeyprefix'] = endprefix
elif endprefix:
raise ValueError('categorymembers: '
@@ -3852,11 +3800,10 @@

rvargs = {'type_arg': 'info|revisions'}

- rvargs['rvprop'] = ['ids', 'timestamp', 'flags', 'comment', 'user']
+ rvargs['rvprop'] = ['comment', 'flags', 'ids', 'sha1', 'timestamp',
+ 'user']
if self.mw_version >= '1.21':
rvargs['rvprop'].append('contentmodel')
- if self.mw_version >= '1.19':
- rvargs['rvprop'].append('sha1')
if content:
rvargs['rvprop'].append('content')
if section is not None:
@@ -4213,7 +4160,6 @@
aigen.request['gaisha1base36'] = sha1base36
return aigen

- @need_version('1.17')
@deprecated_args(limit='total') # ignore falimit setting
def filearchive(self, start=None, end=None, reverse=False, total=None,
**kwargs):
@@ -4258,7 +4204,6 @@

@note: logevents only logs user blocks, while this method
iterates all blocks including IP ranges.
- @note: C{userid} key will be given for mw 1.18+ only
@note: C{iprange} parameter cannot be used together with C{users}.

@param starttime: start iterating at this Timestamp
@@ -4284,9 +4229,7 @@
bkgen = self._generator(api.ListGenerator, type_arg='blocks',
total=total)
bkgen.request['bkprop'] = ['id', 'user', 'by', 'timestamp', 'expiry',
- 'reason', 'range', 'flags']
- if self.mw_version >= '1.18':
- bkgen.request['bkprop'] += ['userid']
+ 'reason', 'range', 'flags', 'userid']
if starttime:
bkgen.request['bkstart'] = starttime
if endtime:
@@ -4440,7 +4383,6 @@
if namespace is not None:
legen.set_namespace(namespace)
if tag:
- # Supported in version 1.16+; earlier sites will cause APIError
legen.request['letag'] = tag

return legen
@@ -4452,11 +4394,10 @@
rcnamespace='namespaces', number='total', rclimit='total',
showMinor='minor', showBot='bot', showAnon='anon',
showRedirects='redirect', showPatrolled='patrolled',
- topOnly='top_only')
+ topOnly='top_only', pagelist=None)
def recentchanges(self, start=None, end=None, reverse=False,
- namespaces=None, pagelist=None, changetype=None,
- minor=None, bot=None, anon=None,
- redirect=None, patrolled=None, top_only=False,
+ namespaces=None, changetype=None, minor=None, bot=None,
+ anon=None, redirect=None, patrolled=None, top_only=False,
total=None, user=None, excludeuser=None, tag=None):
"""Iterate recent changes.

@@ -4472,8 +4413,6 @@
@type namespaces: iterable of basestring or Namespace key,
or a single instance of those types. May be a '|' separated
list of namespace identifiers.
- @param pagelist: iterate changes to pages in this list only
- @param pagelist: list of Pages
@param changetype: only iterate changes of this type ("edit" for
edits to existing pages, "new" for new pages, "log" for log
entries)
@@ -4520,13 +4459,6 @@
rcgen.request['rcend'] = end
if reverse:
rcgen.request['rcdir'] = 'newer'
- if pagelist:
- if self.mw_version > '1.14':
- pywikibot.warning(
- 'recentchanges: pagelist option is disabled; ignoring.')
- else:
- rcgen.request['rctitles'] = (p.title(with_section=False)
- for p in pagelist)
if changetype:
rcgen.request['rctype'] = changetype
filters = {'minor': minor,
@@ -4576,9 +4508,7 @@
@raises TypeError: a namespace identifier has an inappropriate
type such as NoneType or bool
"""
- where_types = ['text', 'title', 'titles']
- if self.mw_version >= '1.17':
- where_types.append('nearmatch')
+ where_types = ['nearmatch', 'text', 'title', 'titles']
if not searchstring:
raise Error('search: searchstring cannot be empty')
if where not in where_types:
@@ -4840,12 +4770,8 @@
@param usernames: a list of user names
@type usernames: list, or other iterable, of str
"""
- usprop = ['blockinfo', 'groups', 'editcount', 'registration',
- 'emailable']
- if self.mw_version >= '1.16':
- usprop.append('gender')
- if self.mw_version >= '1.17':
- usprop.append('rights')
+ usprop = ['blockinfo', 'gender', 'groups', 'editcount', 'registration',
+ 'rights', 'emailable']
usgen = api.ListGenerator(
'users', site=self, parameters={
'ususers': usernames, 'usprop': usprop})
@@ -4954,7 +4880,6 @@
to one of "watch", "unwatch", "preferences", "nochange":
* watch: add the page to the watchlist
* unwatch: remove the page from the watchlist
- The following settings are supported by mw >= 1.16 only
* preferences: use the preference settings (default)
* nochange: don't change the watchlist
@param bot: if True, mark edit with bot flag
@@ -5024,14 +4949,7 @@

watch_items = {'watch', 'unwatch', 'preferences', 'nochange'}
if watch in watch_items:
- if self.mw_version < '1.16':
- if watch in ['preferences', 'nochange']:
- pywikibot.warning('The watch value {0} is not supported '
- 'by {1}'.format(watch, self))
- else:
- params[watch] = True
- else:
- params['watchlist'] = watch
+ params['watchlist'] = watch
elif watch:
pywikibot.warning(
"editpage: Invalid watch value '%(watch)s' ignored."
@@ -5560,9 +5478,9 @@
@see: U{https://www.mediawiki.org/wiki/API:Protect}

@param protections: A dict mapping type of protection to protection
- level of that type. Valid types of protection are 'edit', 'move',
- 'create', and 'upload'. Valid protection levels (in MediaWiki 1.12)
- are '' (equivalent to 'none'), 'autoconfirmed', and 'sysop'.
+ level of that type. Valid restriction types are 'edit', 'create',
+ 'move' and 'upload'. Valid restriction levels are '' (equivalent
+ to 'none' or 'all'), 'autoconfirmed', and 'sysop'.
If None is given, however, that protection will be skipped.
@type protections: dict
@param reason: Reason for the action
@@ -6503,7 +6421,6 @@
"""
return self.querypage('Unwatchedpages', total)

- @need_version('1.18')
@deprecated_args(step=None)
def wantedpages(self, total=None):
"""Yield Pages from Special:Wantedpages.
@@ -6512,7 +6429,6 @@
"""
return self.querypage('Wantedpages', total)

- @need_version('1.18')
def wantedfiles(self, total=None):
"""Yield Pages from Special:Wantedfiles.

@@ -6520,7 +6436,6 @@
"""
return self.querypage('Wantedfiles', total)

- @need_version('1.18')
def wantedtemplates(self, total=None):
"""Yield Pages from Special:Wantedtemplates.

@@ -6528,7 +6443,6 @@
"""
return self.querypage('Wantedtemplates', total)

- @need_version('1.18')
@deprecated_args(number='total', step=None, repeat=None)
def wantedcategories(self, total=None):
"""Yield Pages from Special:Wantedcategories.
@@ -6596,7 +6510,6 @@
"""
return self.querypage('Withoutinterwiki', total)

- @need_version('1.18')
@deprecated_args(step=None)
def broken_redirects(self, total=None):
"""Yield Pages with broken redirects from Special:BrokenRedirects.
@@ -6605,7 +6518,6 @@
"""
return self.querypage('BrokenRedirects', total)

- @need_version('1.18')
@deprecated_args(step=None)
def double_redirects(self, total=None):
"""Yield Pages with double redirects from Special:DoubleRedirects.
@@ -6614,7 +6526,6 @@
"""
return self.querypage('DoubleRedirects', total)

- @need_version('1.18')
@deprecated_args(step=None)
def redirectpages(self, total=None):
"""Yield redirect pages from Special:ListRedirects.
@@ -6658,10 +6569,6 @@
assert 'create' in self.protection_types(), \
"'create' should be a valid protection type."
if type == 'create':
- if self.mw_version < '1.15':
- raise NotImplementedError(
- 'protectedpages(type=create) requires MW 1.15+')
-
return self._generator(
api.PageGenerator, type_arg='protectedtitles',
namespaces=namespaces, gptlevel=level, total=total)
@@ -6703,7 +6610,6 @@
return self._generator(api.PageGenerator, type_arg='pageswithprop',
gpwppropname=propname, total=total)

- @need_version('1.18')
def compare(self, old, diff):
"""
Corresponding method to the 'action=compare' API action.
diff --git a/pywikibot/site_detect.py b/pywikibot/site_detect.py
index 9d1be29..d55342a 100644
--- a/pywikibot/site_detect.py
+++ b/pywikibot/site_detect.py
@@ -6,16 +6,13 @@
# Distributed under the terms of the MIT license.
#
import json
-import re

from contextlib import suppress
from html.parser import HTMLParser
from urllib.parse import urljoin, urlparse
-
from requests.exceptions import RequestException

import pywikibot
-
from pywikibot.comms.http import fetch
from pywikibot.exceptions import ServerError
from pywikibot.tools import MediaWikiVersion
@@ -29,13 +26,6 @@

"""Minimal wiki site class."""

- REwgEnableApi = re.compile(r'wgEnableAPI ?= ?true')
- REwgServer = re.compile(r'wgServer ?= ?"([^"]*)"')
- REwgScriptPath = re.compile(r'wgScriptPath ?= ?"([^"]*)"')
- REwgArticlePath = re.compile(r'wgArticlePath ?= ?"([^"]*)"')
- REwgContentLanguage = re.compile(r'wgContentLanguage ?= ?"([^"]*)"')
- REwgVersion = re.compile(r'wgVersion ?= ?"([^"]*)"')
-
def __init__(self, fromurl):
"""
Initializer.
@@ -43,7 +33,7 @@
@raises pywikibot.exceptions.ServerError: a server error occurred
while loading the site
@raises Timeout: a timeout occurred while loading the site
- @raises RuntimeError: Version not found or version less than 1.14
+ @raises RuntimeError: Version not found or version less than 1.19
"""
if fromurl.endswith('$1'):
fromurl = fromurl[:-2]
@@ -66,18 +56,13 @@
self.scriptpath = wp.scriptpath
self.articlepath = None

- try:
- self._parse_pre_117(data)
- except Exception as e:
- pywikibot.log('MW pre-1.17 detection failed: {0!r}'.format(e))
-
if self.api:
try:
- self._parse_post_117()
+ self._parse_site()
except (ServerError, RequestException):
raise
except Exception as e:
- pywikibot.log('MW 1.17+ detection failed: {0!r}'.format(e))
+ pywikibot.log('MW detection failed: {0!r}'.format(e))

if not self.version:
self._fetch_old_version()
@@ -98,7 +83,7 @@
'{0}'.format(self.fromurl))

if (not self.version
- or self.version < MediaWikiVersion('1.14')):
+ or self.version < MediaWikiVersion('1.19')):
raise RuntimeError('Unsupported version: {0}'.format(self.version))

def __repr__(self):
@@ -119,21 +104,6 @@
return [wiki for wiki in iw['query']['interwikimap']
if 'language' in wiki]

- def _parse_pre_117(self, data):
- """Parse HTML."""
- if not self.REwgEnableApi.search(data):
- pywikibot.log(
- 'wgEnableApi is not enabled in HTML of %s'
- % self.fromurl)
- with suppress(AttributeError):
- self.version = MediaWikiVersion(
- self.REwgVersion.search(data).group(1))
-
- self.server = self.REwgServer.search(data).groups()[0]
- self.scriptpath = self.REwgScriptPath.search(data).groups()[0]
- self.articlepath = self.REwgArticlePath.search(data).groups()[0]
- self.lang = self.REwgContentLanguage.search(data).groups()[0]
-
def _fetch_old_version(self):
"""Extract the version from API help with ?version enabled."""
if self.version is None:
@@ -154,8 +124,8 @@
else:
self.version = MediaWikiVersion(self.version)

- def _parse_post_117(self):
- """Parse 1.17+ siteinfo data."""
+ def _parse_site(self):
+ """Parse siteinfo data."""
response = fetch(self.api + '?action=query&meta=siteinfo&format=json')
check_response(response)
# remove preleading newlines and Byte Order Mark (BOM), see T128992
diff --git a/tests/dry_api_tests.py b/tests/dry_api_tests.py
index 9e61e60..7f7f4f2 100644
--- a/tests/dry_api_tests.py
+++ b/tests/dry_api_tests.py
@@ -5,8 +5,6 @@
#
# Distributed under the terms of the MIT license.
#
-from __future__ import absolute_import, division, unicode_literals
-
import datetime

import pywikibot
@@ -45,7 +43,7 @@

def setUp(self):
"""Initialize the fake requests."""
- super(DryCachedRequestTests, self).setUp()
+ super().setUp()
self.parms = {'action': 'query',
'meta': 'userinfo'}
self.req = CachedRequest(expiry=1, site=self.basesite,
@@ -157,7 +155,7 @@
self._siteinfo = DummySiteinfo({'case': 'first-letter'})

def version(self):
- return '1.14' # lowest supported release
+ return '1.19' # lowest supported release

def protocol(self):
return 'http'
@@ -185,7 +183,7 @@
raise Exception('Attribute {!r} not defined'.format(attr))

self.mocksite = MockSite()
- super(MockCachedRequestKeyTests, self).setUp()
+ super().setUp()

def test_cachefile_path_different_users(self):
"""Test and compare file paths when different usernames are used."""
@@ -385,7 +383,7 @@

def setUp(self):
"""Add a real ParamInfo to the DrySite."""
- super(ParamInfoDictTests, self).setUp()
+ super().setUp()
site = self.get_site()
site._paraminfo = ParamInfo(site)
# Pretend that paraminfo has been loaded
diff --git a/tests/dry_site_tests.py b/tests/dry_site_tests.py
index 3a9298b..dd3434a 100644
--- a/tests/dry_site_tests.py
+++ b/tests/dry_site_tests.py
@@ -5,8 +5,6 @@
#
# Distributed under the terms of the MIT license.
#
-from __future__ import absolute_import, division, unicode_literals
-
import pywikibot

from pywikibot.comms.http import user_agent
@@ -117,13 +115,13 @@

def setUp(self):
"""Backup the original configuration."""
- super(TestSetAction, self).setUp()
+ super().setUp()
self._old_config = pywikibot.config.default_edit_summary

def tearDown(self):
"""Restore the original configuration."""
pywikibot.config.default_edit_summary = self._old_config
- super(TestSetAction, self).tearDown()
+ super().tearDown()

def test_set_action(self):
"""Test deprecated setAction function."""
diff --git a/tests/page_tests.py b/tests/page_tests.py
index fff7879..7df0893 100644
--- a/tests/page_tests.py
+++ b/tests/page_tests.py
@@ -630,7 +630,7 @@
@classmethod
def setUpClass(cls):
"""Initialize page instance."""
- super(TestPageBaseUnicode, cls).setUpClass()
+ super().setUpClass()
cls.page = pywikibot.Page(cls.site, 'Ō')


@@ -677,14 +677,14 @@

def setUp(self):
"""Force the console encoding to UTF-8."""
- super(TestPageRepr, self).setUp()
+ super().setUp()
self._old_encoding = config.console_encoding
config.console_encoding = 'utf8'

def tearDown(self):
"""Restore the original console encoding."""
config.console_encoding = self._old_encoding
- super(TestPageRepr, self).tearDown()
+ super().tearDown()

def test_mainpage_type(self):
"""Test the return type of repr(Page(<main page>)) is str."""
@@ -709,14 +709,14 @@

def setUp(self):
"""Patch the current console encoding to ASCII."""
- super(TestPageReprASCII, self).setUp()
+ super().setUp()
self._old_encoding = config.console_encoding
config.console_encoding = 'ascii'

def tearDown(self):
"""Restore the original console encoding."""
config.console_encoding = self._old_encoding
- super(TestPageReprASCII, self).tearDown()
+ super().tearDown()


class TestPageBotMayEdit(TestCase):
@@ -731,7 +731,7 @@

def setUp(self):
"""Setup test."""
- super(TestPageBotMayEdit, self).setUp()
+ super().setUp()
self.page = pywikibot.Page(self.site,
'not_existent_page_for_pywikibot_tests')
if self.page.exists():
diff --git a/tests/site_decorators_tests.py b/tests/site_decorators_tests.py
index 5578164..f2d30c0 100644
--- a/tests/site_decorators_tests.py
+++ b/tests/site_decorators_tests.py
@@ -32,7 +32,7 @@
self._userinfo = []
self.obsolete = False
super(TestMustBe, self).setUp()
- self.version = lambda: '1.14' # lowest supported release
+ self.version = lambda: '1.19' # lowest supported release

def login(self, group):
"""Fake the log in as required user group."""
diff --git a/tests/site_detect_tests.py b/tests/site_detect_tests.py
index 5695266..03b9946 100644
--- a/tests/site_detect_tests.py
+++ b/tests/site_detect_tests.py
@@ -58,10 +58,6 @@

"""Test detection of MediaWiki sites."""

- def test_hrwiki(self):
- """Test detection of MediaWiki sites for www.hrwiki.org."""
- self.assertSite('http://www.hrwiki.org/index.php/$1') # v 1.15
-
def test_proofwiki(self):
"""Test detection of MediaWiki sites for www.proofwiki.org."""
self.assertSite('http://www.proofwiki.org/wiki/$1')
@@ -105,9 +101,13 @@
self.assertSite('http://tfwiki.net/wiki/$1')


-class Pre114SiteTestCase(SiteDetectionTestCase):
+class Pre119SiteTestCase(SiteDetectionTestCase):

- """Test pre 1.14 sites which should be detected as unsupported."""
+ """Test pre 1.19 sites which should be detected as unsupported."""
+
+ def test_hrwiki(self):
+ """Test detection of MediaWiki sites for www.hrwiki.org."""
+ self.assertNoSite('http://www.hrwiki.org/index.php/$1') # v 1.15

def test_wikifon(self):
"""Test detection of MediaWiki sites for www.wikifon.org."""
diff --git a/tests/site_tests.py b/tests/site_tests.py
index 5ed3dd7..6286be3 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -88,9 +88,6 @@

def test_siteinfo_normal_call(self):
"""Test calling the Siteinfo without setting dump."""
- if self.site.mw_version < '1.16':
- self.skipTest('requires v1.16+')
-
old = self.site.siteinfo('general')
self.assertIn('time', old)
self.assertEqual(old, self.site.siteinfo['general'])
@@ -276,14 +273,13 @@
self.assertTrue(mysite.sametitle('Template: Test', 'Template: Test'))
self.assertTrue(mysite.sametitle('Test name', 'Test name'))
self.assertFalse(mysite.sametitle('Test name', 'Test Name'))
- # User, MediaWiki (both since 1.16) and Special are always
+ # User, MediaWiki and Special are always
# first-letter (== only first non-namespace letter is case insensitive)
# See also: https://www.mediawiki.org/wiki/Manual:$wgCapitalLinks
self.assertTrue(mysite.sametitle('Special:Always', 'Special:always'))
- if mysite.mw_version >= '1.16':
- self.assertTrue(mysite.sametitle('User:Always', 'User:always'))
- self.assertTrue(mysite.sametitle('MediaWiki:Always',
- 'MediaWiki:always'))
+ self.assertTrue(mysite.sametitle('User:Always', 'User:always'))
+ self.assertTrue(mysite.sametitle('MediaWiki:Always',
+ 'MediaWiki:always'))

def test_constructors(self):
"""Test cases for site constructors."""
@@ -964,9 +960,6 @@

def test_protectedpages_create(self):
"""Test that protectedpages returns protected page titles."""
- if self.site.mw_version < '1.15':
- self.skipTest('requires v1.15+')
-
pages = list(self.get_site().protectedpages(type='create', total=10))
# Do not check for the existence of pages as they might exist (T205883)
self.assertLessEqual(len(pages), 10)
@@ -1479,22 +1472,6 @@
self.assertIn(self.site.namespaces.lookup_name(prefix).id, [6, 7])
self.assertIn(change['ns'], [6, 7])

- def test_pagelist(self):
- """Test the site.recentchanges() with pagelist deprecated MW 1.14."""
- mysite = self.site
- mainpage = self.get_mainpage()
- imagepage = self.imagepage
- if mysite.mw_version <= '1.14':
- pagelist = [mainpage]
- if imagepage:
- pagelist += [imagepage]
- titlelist = {page.title() for page in pagelist}
- for change in mysite.recentchanges(pagelist=pagelist,
- total=5):
- self.assertIsInstance(change, dict)
- self.assertIn('title', change)
- self.assertIn(change['title'], titlelist)
-
def test_changetype(self):
"""Test the site.recentchanges() with changetype."""
mysite = self.site
@@ -2249,12 +2226,8 @@

def _test_tokens(self, version, test_version, additional_token):
"""Test tokens."""
- if version and self._version < version:
- raise unittest.SkipTest(
- 'Site {} version {} is too low for this tests.'
- .format(self.mysite, self._version))
-
- if version and self._version < test_version:
+ if version and (self._version < version
+ or self._version < test_version):
raise unittest.SkipTest(
'Site {} version {} is too low for this tests.'
.format(self.mysite, self._version))
@@ -2283,17 +2256,13 @@
# test __contains__
self.assertIn(tokentype[0], self.mysite.tokens)

- def test_patrol_tokens_in_mw_116(self):
- """Test ability to get patrol token on MW 1.16 wiki."""
- self._test_tokens('1.14', '1.16', 'patrol')
-
def test_tokens_in_mw_119(self):
"""Test ability to get page tokens."""
self._test_tokens(None, '1.19', 'delete')

def test_patrol_tokens_in_mw_119(self):
"""Test ability to get patrol token on MW 1.19 wiki."""
- self._test_tokens('1.14', '1.19', 'patrol')
+ self._test_tokens('1.19', '1.19', 'patrol')

def test_tokens_in_mw_120_124wmf18(self):
"""Test ability to get page tokens."""
@@ -2301,7 +2270,7 @@

def test_patrol_tokens_in_mw_120(self):
"""Test ability to get patrol token."""
- self._test_tokens('1.14', '1.20', 'patrol')
+ self._test_tokens('1.19', '1.20', 'patrol')

def test_tokens_in_mw_124wmf19(self):
"""Test ability to get page tokens."""
@@ -2429,6 +2398,11 @@
'([A-Z]{3,4}|[A-Z][a-z]+/[A-Z][a-z]+)')
self.assertIn(mysite.siteinfo['case'], ['first-letter',
'case-sensitive'])
+ self.assertIsInstance(
+ datetime.strptime(mysite.siteinfo['time'], '%Y-%m-%dT%H:%M:%SZ'),
+ datetime)
+ self.assertEqual(re.findall(r'\$1', mysite.siteinfo['articlepath']),
+ ['$1'])

def test_siteinfo_boolean(self):
"""Test conversion of boolean properties from empty strings."""
@@ -2438,18 +2412,6 @@
self.assertIsInstance(mysite.namespaces[0].subpages, bool)
self.assertIsInstance(mysite.namespaces[0].content, bool)

- def test_siteinfo_v1_16(self):
- """Test v.16+ siteinfo values."""
- if self.site.mw_version < '1.16':
- self.skipTest('requires v1.16+')
-
- mysite = self.get_site()
- self.assertIsInstance(
- datetime.strptime(mysite.siteinfo['time'], '%Y-%m-%dT%H:%M:%SZ'),
- datetime)
- self.assertEqual(re.findall(r'\$1', mysite.siteinfo['articlepath']),
- ['$1'])
-
def test_properties_with_defaults(self):
"""Test the siteinfo properties with defaults."""
# This does not test that the defaults work correct,

To view, visit change 577589. To unsubscribe, or for help writing mail filters, visit settings.

Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I307febd7fadb3dd33ab72231718cf2f46b621bbb
Gerrit-Change-Number: 577589
Gerrit-PatchSet: 24
Gerrit-Owner: Xqt <info@gno.de>
Gerrit-Reviewer: Matěj Suchánek <matejsuchanek97@gmail.com>
Gerrit-Reviewer: Mpaa <mpaa.wiki@gmail.com>
Gerrit-Reviewer: Xqt <info@gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged