jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/634785 )
Change subject: [IMPR] Replaced basestring by str
......................................................................
[IMPR] Replaced basestring by str
Bug: T265128
Change-Id: Ib43659d1ac070c37912559994ae4d9bf30798192
---
M pywikibot/config2.py
M pywikibot/cosmetic_changes.py
M pywikibot/date.py
M pywikibot/diff.py
4 files changed, 6 insertions(+), 16 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/config2.py b/pywikibot/config2.py
index cfc3d68..f65d25e 100644
--- a/pywikibot/config2.py
+++ b/pywikibot/config2.py
@@ -48,7 +48,7 @@
from locale import getdefaultlocale
from os import getenv, environ
from textwrap import fill
-from typing import Dict, List, Tuple
+from typing import Dict, List, Optional, Tuple
from warnings import warn
from pywikibot.__metadata__ import __version__ as pwb_version
@@ -271,7 +271,7 @@
return os.path.join(os.path.expanduser('~'), path)
-def get_base_dir(test_directory=None):
+def get_base_dir(test_directory: Optional[str] = None) -> str:
r"""Return the directory in which user-specific information is stored.
This is determined in the following order:
@@ -292,8 +292,6 @@
@param test_directory: Assume that a user config file exists in this
directory. Used to test whether placing a user config file in this
directory will cause it to be selected as the base directory.
- @type test_directory: str or None
- @rtype: str
"""
def exists(directory):
directory = os.path.abspath(directory)
@@ -883,7 +881,7 @@
# #############################################
-def makepath(path, create=True):
+def makepath(path: str, create: bool = True):
"""Return a normalized absolute version of the path argument.
If the given path already exists in the filesystem or create is False
@@ -895,10 +893,8 @@
from holger(a)trillke.net 2002/03/18
@param path: path in the filesystem
- @type path: str
@param create: create the directory if it is True. Otherwise do not change
the filesystem. Default is True.
- @type create: bool
"""
dpath = os.path.normpath(os.path.dirname(path))
if create and not os.path.exists(dpath):
diff --git a/pywikibot/cosmetic_changes.py b/pywikibot/cosmetic_changes.py
index b5406d6..f84d6c8 100755
--- a/pywikibot/cosmetic_changes.py
+++ b/pywikibot/cosmetic_changes.py
@@ -480,7 +480,7 @@
return textlib.replaceExcept(
text, regex, replace_magicword, exceptions)
- def cleanUpLinks(self, text):
+ def cleanUpLinks(self, text: str) -> str:
"""Tidy up wikilinks found in a string.
This function will:
@@ -497,9 +497,7 @@
* Capitalize the article title of the link, if appropriate
@param text: string to perform the clean-up on
- @type text: str
@return: text with tidied wikilinks
- @rtype: str
"""
# helper function which works on one link and either returns it
# unmodified, or returns a replacement.
diff --git a/pywikibot/date.py b/pywikibot/date.py
index 848a08e..7b9d641 100644
--- a/pywikibot/date.py
+++ b/pywikibot/date.py
@@ -1814,7 +1814,7 @@
formats[monthOfYear] = {}
-def addFmt1(lang, isMnthOfYear, patterns):
+def addFmt1(lang: str, isMnthOfYear, patterns):
"""Add 12 month formats for a specific type ('January', 'Feb.').
The function must accept one parameter for the ->int or ->string
@@ -1822,7 +1822,6 @@
The patterns parameter is a list of 12 elements to be used for each month.
@param lang: language code
- @type lang: str
"""
assert len(patterns) == 12, 'pattern %s does not have 12 elements' % lang
@@ -2234,7 +2233,6 @@
"""Format a date localized to given lang.
@param month: month in range of 1..12
- @type month: int
@param day: day of month in range of 1..31
@type day: int
@param lang: a site object or language key. Defaults to current site.
diff --git a/pywikibot/diff.py b/pywikibot/diff.py
index 3b48a05..a77918d 100644
--- a/pywikibot/diff.py
+++ b/pywikibot/diff.py
@@ -579,7 +579,7 @@
return text
-def html_comparator(compare_string):
+def html_comparator(compare_string: str) -> dict:
"""List of added and deleted contexts from 'action=compare' html string.
This function is useful when combineds with site.py's "compare" method.
@@ -587,9 +587,7 @@
Here we use BeautifulSoup to get the un-HTML-ify the context of changes.
Finally we present the added and deleted contexts.
@param compare_string: HTML string from mediawiki API
- @type compare_string: str
@return: deleted and added list of contexts
- @rtype: dict
"""
from bs4 import BeautifulSoup
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/634785
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ib43659d1ac070c37912559994ae4d9bf30798192
Gerrit-Change-Number: 634785
Gerrit-PatchSet: 4
Gerrit-Owner: Udoka <UdokakuUgochukwu(a)gmail.com>
Gerrit-Reviewer: Reviewer-bot <gerritreviewerbot(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/634735 )
Change subject: [IMPR] Replaced basestring by str
......................................................................
[IMPR] Replaced basestring by str
Bug: T265128
Change-Id: I6487433ca0cc3cde522f45fdabb557d734d43bea
---
M pywikibot/editor.py
M pywikibot/exceptions.py
M pywikibot/family.py
M pywikibot/flow.py
M pywikibot/interwiki_graph.py
M pywikibot/logentries.py
6 files changed, 36 insertions(+), 75 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/editor.py b/pywikibot/editor.py
index a95401d..97528fc 100644
--- a/pywikibot/editor.py
+++ b/pywikibot/editor.py
@@ -76,7 +76,8 @@
"""Return editor selected in user-config.py."""
return TextEditor._concat(self._command(tempFilename, text, jumpIndex))
- def edit(self, text: str, jumpIndex=None, highlight=None) -> Optional[str]:
+ def edit(self, text: str, jumpIndex: Optional[int] = None,
+ highlight: Optional[str] = None) -> Optional[str]:
"""
Call the editor and thus allows the user to change the text.
@@ -84,9 +85,7 @@
@param text: the text to be edited
@param jumpIndex: position at which to put the caret
- @type jumpIndex: int
@param highlight: each occurrence of this substring will be highlighted
- @type highlight: str
@return: the modified text, or None if the user didn't save the text
file in his text editor
"""
diff --git a/pywikibot/exceptions.py b/pywikibot/exceptions.py
index 9a9dd28..662a1dd 100644
--- a/pywikibot/exceptions.py
+++ b/pywikibot/exceptions.py
@@ -87,7 +87,7 @@
#
# Distributed under the terms of the MIT license.
#
-from typing import Optional
+from typing import Optional, Union
from pywikibot.tools import deprecated, _NotImplementedWarning
@@ -190,11 +190,10 @@
message = 'Edit to page %(title)s failed:\n%(reason)s'
- def __init__(self, page, reason):
+ def __init__(self, page, reason: Union[str, Exception]):
"""Initializer.
@param reason: Details of the problem
- @type reason: Exception or basestring
"""
self.reason = reason
super().__init__(page)
@@ -247,13 +246,12 @@
"""Page receives a title inconsistent with query."""
- def __init__(self, page, actual):
+ def __init__(self, page, actual: str):
"""Initializer.
@param page: Page that caused the exception
@type page: Page object
@param actual: title obtained by query
- @type actual: basestring
"""
self.message = "Query on %s returned data on '{0}'".format(actual)
diff --git a/pywikibot/family.py b/pywikibot/family.py
index 7b1d78b..1aea0c0 100644
--- a/pywikibot/family.py
+++ b/pywikibot/family.py
@@ -681,11 +681,10 @@
@staticmethod
@deprecated_args(fatal=None)
- def load(fam=None):
+ def load(fam: Optional[str] = None):
"""Import the named family.
@param fam: family name (if omitted, uses the configured default)
- @type fam: str
@return: a Family instance configured for the named family.
@raises pywikibot.exceptions.UnknownFamily: family not known
"""
@@ -827,27 +826,23 @@
.format(code))
# Methods
- def protocol(self, code):
+ def protocol(self, code: str) -> str:
"""
The protocol to use to connect to the site.
May be overridden to return 'https'. Other protocols are not supported.
@param code: language code
- @type code: str
@return: protocol that this family uses
- @rtype: str
"""
return 'http'
- def ignore_certificate_error(self, code):
+ def ignore_certificate_error(self, code: str) -> bool:
"""
Return whether a HTTPS certificate error should be ignored.
@param code: language code
- @type code: str
@return: flag to allow access if certificate has an error.
- @rtype: bool
"""
return False
@@ -859,7 +854,7 @@
"""The hostname to use for SSL connections."""
return self.hostname(code)
- def scriptpath(self, code):
+ def scriptpath(self, code: str) -> str:
"""The prefix used to locate scripts on this wiki.
This is the value displayed when you enter {{SCRIPTPATH}} on a
@@ -871,10 +866,8 @@
uses a different value.
@param code: Site code
- @type code: str
@raises KeyError: code is not recognised
@return: URL path without ending '/'
- @rtype: str
"""
return '/w'
@@ -893,18 +886,15 @@
host = self.hostname(code)
return protocol, host
- def base_url(self, code, uri, protocol=None):
+ def base_url(self, code: str, uri: str, protocol=None) -> str:
"""
Prefix uri with port and hostname.
@param code: The site code
- @type code: str
@param uri: The absolute path after the hostname
- @type uri: str
@param protocol: The protocol which is used. If None it'll determine
the protocol from the code.
@return: The full URL ending with uri
- @rtype: str
"""
protocol, host = self._hostname(code, protocol)
if protocol == 'https':
@@ -964,7 +954,7 @@
return config.site_interface
- def from_url(self, url):
+ def from_url(self, url: str) -> Optional[str]:
"""
Return whether this family matches the given url.
@@ -980,10 +970,8 @@
@param url: the URL which may contain a C{$1}. If it's missing it is
assumed to be at the end and if it's present nothing is allowed
after it.
- @type url: str
@return: The language code of the url. None if that url is not from
this family.
- @rtype: str or None
@raises RuntimeError: When there are multiple languages in this family
which would work with the given URL.
@raises ValueError: When text is present after $1.
@@ -1153,7 +1141,7 @@
These domains may also exist in another family.
- @rtype: iterable of str
+ @rtype: set of str
"""
return set(cls.langs.values())
@@ -1162,7 +1150,7 @@
"""
Get list of codes used by this family.
- @rtype: iterable of str
+ @rtype: set of str
"""
return set(cls.langs.keys())
@@ -1401,14 +1389,12 @@
@deprecated_args(site=None)
-def AutoFamily(name, url):
+def AutoFamily(name: str, url: str):
"""
Family that automatically loads the site configuration.
@param name: Name for the family
- @type name: str
@param url: API endpoint URL of the wiki
- @type url: str
@return: Generated family class
@rtype: SingleSiteFamily
"""
diff --git a/pywikibot/flow.py b/pywikibot/flow.py
index 792ca54..fea2130 100644
--- a/pywikibot/flow.py
+++ b/pywikibot/flow.py
@@ -26,13 +26,12 @@
It cannot be instantiated directly.
"""
- def __init__(self, source, title=''):
+ def __init__(self, source, title: str = ''):
"""Initializer.
@param source: A Flow-enabled site or a Link or Page on such a site
@type source: Site, pywikibot.page.Link, or pywikibot.page.Page
@param title: normalized title of the page
- @type title: str
@raises TypeError: incorrect use of parameters
@raises ValueError: use of non-Flow-enabled Site
@@ -52,11 +51,10 @@
raise NotImplementedError
@property
- def uuid(self):
+ def uuid(self) -> str:
"""Return the UUID of the page.
@return: UUID of the page
- @rtype: str
"""
if not hasattr(self, '_uuid'):
self._uuid = self._load()['workflowId']
@@ -187,15 +185,13 @@
return cls(board.site, data['topic-page'])
@classmethod
- def from_topiclist_data(cls, board, root_uuid, topiclist_data):
+ def from_topiclist_data(cls, board, root_uuid: str, topiclist_data: dict):
"""Create a Topic object from API data.
@param board: The topic's parent Flow board
@type board: Board
@param root_uuid: The UUID of the topic and its root post
- @type root_uuid: str
@param topiclist_data: The data returned by view-topiclist
- @type topiclist_data: dict
@return: A Topic object derived from the supplied data
@rtype: Topic
@raises TypeError: any passed parameters have wrong types
@@ -253,56 +249,50 @@
return self.root.reply(content, content_format)
# Moderation
- def lock(self, reason):
+ def lock(self, reason: str):
"""Lock this topic.
@param reason: The reason for locking this topic
- @type reason: str
"""
self.site.lock_topic(self, True, reason)
self._reload()
- def unlock(self, reason):
+ def unlock(self, reason: str):
"""Unlock this topic.
@param reason: The reason for unlocking this topic
- @type reason: str
"""
self.site.lock_topic(self, False, reason)
self._reload()
- def delete_mod(self, reason):
+ def delete_mod(self, reason: str):
"""Delete this topic through the Flow moderation system.
@param reason: The reason for deleting this topic.
- @type reason: str
"""
self.site.delete_topic(self, reason)
self._reload()
- def hide(self, reason):
+ def hide(self, reason: str):
"""Hide this topic.
@param reason: The reason for hiding this topic.
- @type reason: str
"""
self.site.hide_topic(self, reason)
self._reload()
- def suppress(self, reason):
+ def suppress(self, reason: str):
"""Suppress this topic.
@param reason: The reason for suppressing this topic.
- @type reason: str
"""
self.site.suppress_topic(self, reason)
self._reload()
- def restore(self, reason):
+ def restore(self, reason: str):
"""Restore this topic.
@param reason: The reason for restoring this topic.
- @type reason: str
"""
self.site.restore_topic(self, reason)
self._reload()
@@ -313,14 +303,13 @@
"""A post to a Flow discussion topic."""
- def __init__(self, page, uuid):
+ def __init__(self, page, uuid: str):
"""
Initializer.
@param page: Flow topic
@type page: Topic
@param uuid: UUID of a Flow post
- @type uuid: str
@raises TypeError: incorrect types of parameters
"""
@@ -337,16 +326,14 @@
self._content = {}
@classmethod
- def fromJSON(cls, page, post_uuid, data):
+ def fromJSON(cls, page, post_uuid: str, data: dict):
"""
Create a Post object using the data returned from the API call.
@param page: A Flow topic
@type page: Topic
@param post_uuid: The UUID of the post
- @type post_uuid: str
@param data: The JSON data returned from the API
- @type data: dict
@return: A Post object
@raises TypeError: data is not a dict
@@ -357,11 +344,10 @@
return post
- def _set_data(self, data):
+ def _set_data(self, data: dict):
"""Set internal data and cache content.
@param data: The data to store internally
- @type data: dict
@raises TypeError: data is not a dict
@raises ValueError: missing data entries or post/revision not found
"""
@@ -399,11 +385,10 @@
return self._current_revision
@property
- def uuid(self):
+ def uuid(self) -> str:
"""Return the UUID of the post.
@return: UUID of the post
- @rtype: str
"""
return self._uuid
@@ -508,38 +493,34 @@
return post
# Moderation
- def delete(self, reason):
+ def delete(self, reason: str):
"""Delete this post through the Flow moderation system.
@param reason: The reason for deleting this post.
- @type reason: str
"""
self.site.delete_post(self, reason)
self._load()
- def hide(self, reason):
+ def hide(self, reason: str):
"""Hide this post.
@param reason: The reason for hiding this post.
- @type reason: str
"""
self.site.hide_post(self, reason)
self._load()
- def suppress(self, reason):
+ def suppress(self, reason: str):
"""Suppress this post.
@param reason: The reason for suppressing this post.
- @type reason: str
"""
self.site.suppress_post(self, reason)
self._load()
- def restore(self, reason):
+ def restore(self, reason: str):
"""Restore this post.
@param reason: The reason for restoring this post.
- @type reason: str
"""
self.site.restore_post(self, reason)
self._load()
diff --git a/pywikibot/interwiki_graph.py b/pywikibot/interwiki_graph.py
index b7ba7a2..4078774 100644
--- a/pywikibot/interwiki_graph.py
+++ b/pywikibot/interwiki_graph.py
@@ -10,6 +10,7 @@
from collections import Counter
import itertools
import threading
+from typing import Optional
try:
import pydot
@@ -250,16 +251,14 @@
self.saveGraphFile()
-def getFilename(page, extension=None):
+def getFilename(page, extension: Optional[str] = None) -> str:
"""
Create a filename that is unique for the page.
@param page: page used to create the new filename
@type page: pywikibot.page.Page
@param extension: file extension
- @type extension: str
@return: filename of <family>-<lang>-<page>.<ext>
- @rtype: str
"""
filename = '%s-%s-%s' % (page.site.family.name,
page.site.code,
diff --git a/pywikibot/logentries.py b/pywikibot/logentries.py
index 7ffa816..1015ea9 100644
--- a/pywikibot/logentries.py
+++ b/pywikibot/logentries.py
@@ -6,7 +6,7 @@
# Distributed under the terms of the MIT license.
#
from collections import UserDict
-from typing import Optional
+from typing import List, Optional
import pywikibot
from pywikibot.exceptions import Error, HiddenKeyError
@@ -189,14 +189,13 @@
else:
return super(BlockEntry, self).page()
- def flags(self):
+ def flags(self) -> List[str]:
"""
Return a list of (str) flags associated with the block entry.
It raises an Error if the entry is an unblocking log entry.
@return: list of flags strings
- @rtype: list
"""
if self.action() == 'unblock':
return []
@@ -449,12 +448,11 @@
classname, bases, {'_expected_type': logtype})
return cls._logtypes[logtype]
- def _createFromData(self, logdata):
+ def _createFromData(self, logdata: dict):
"""
Check for logtype from data, and creates the correct LogEntry.
@param logdata: log entry data
- @type logdata: dict
@rtype: LogEntry
"""
try:
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/634735
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I6487433ca0cc3cde522f45fdabb557d734d43bea
Gerrit-Change-Number: 634735
Gerrit-PatchSet: 2
Gerrit-Owner: Udoka <UdokakuUgochukwu(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/634327 )
Change subject: [IMPR] Replaced basestring by str
......................................................................
[IMPR] Replaced basestring by str
Bug: T265128
Change-Id: If6a9cfdddfd1c75fa78bf4181c1eb59951b7c683
---
M pywikibot/logentries.py
M pywikibot/logging.py
M pywikibot/login.py
M pywikibot/pagegenerators.py
M pywikibot/proofreadpage.py
5 files changed, 105 insertions(+), 170 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/logentries.py b/pywikibot/logentries.py
index b9a5d41..7ffa816 100644
--- a/pywikibot/logentries.py
+++ b/pywikibot/logentries.py
@@ -339,24 +339,16 @@
_expected_type = 'patrol'
@property
- def current_id(self):
- """
- Return the current id.
-
- @rtype: int
- """
+ def current_id(self) -> int:
+ """Return the current id."""
# key has been changed in mw 1.19; try the new mw style first
# sometimes it returns strs sometimes ints
return int(self._params['curid']
if 'curid' in self._params else self._params['cur'])
@property
- def previous_id(self):
- """
- Return the previous id.
-
- @rtype: int
- """
+ def previous_id(self) -> int:
+ """Return the previous id."""
# key has been changed in mw 1.19; try the new mw style first
# sometimes it returns strs sometimes ints
return int(self._params['previd']
diff --git a/pywikibot/logging.py b/pywikibot/logging.py
index 22ba07f..ed4e842 100644
--- a/pywikibot/logging.py
+++ b/pywikibot/logging.py
@@ -11,6 +11,7 @@
# logging levels
from logging import DEBUG, INFO, WARNING, ERROR, CRITICAL
+from typing import Optional
STDOUT = 16
VERBOSE = 18
@@ -140,16 +141,14 @@
logoutput(text, decoder, newline, STDOUT, **kwargs)
-def warning(text, decoder=None, newline=True, **kwargs):
+def warning(text: str, decoder: Optional[str] = None,
+ newline: bool = True, **kwargs):
"""Output a warning message to the user via the userinterface.
@param text: the message the user wants to display.
- @type text: str
@param decoder: If None, text should be a unicode string. Otherwise it
should be encoded in the given encoding.
- @type decoder: str
@param newline: If True, a line feed will be added after printing the text.
- @type newline: bool
@param kwargs: The keyword arguments can be found in the python doc:
https://docs.python.org/3/howto/logging-cookbook.html.
"""
diff --git a/pywikibot/login.py b/pywikibot/login.py
index 7b5a105..c923969 100644
--- a/pywikibot/login.py
+++ b/pywikibot/login.py
@@ -11,6 +11,7 @@
import webbrowser
from enum import IntEnum
+from typing import Optional
from warnings import warn
import pywikibot
@@ -89,7 +90,8 @@
"""Site login manager."""
@deprecated_args(username='user', verbose=None, sysop=None)
- def __init__(self, password=None, site=None, user=None):
+ def __init__(self, password: Optional[str] = None,
+ site=None, user: Optional[str] = None):
"""
Initializer.
@@ -99,9 +101,7 @@
@type site: BaseSite
@param user: username to use.
If user is None, the username is loaded from config.usernames.
- @type user: basestring
@param password: password to use
- @type password: basestring
@raises pywikibot.exceptions.NoUsername: No username is configured
for the requested site.
@@ -197,14 +197,11 @@
# THIS IS OVERRIDDEN IN data/api.py
return None
- def storecookiedata(self, data):
+ def storecookiedata(self, data: str) -> None:
"""
Store cookie data.
@param data: The raw data as returned by getCookie()
- @type data: str
-
- @return: None
"""
# THIS IS OVERRIDDEN IN data/api.py
filename = config.datafilepath('pywikibot.lwp')
@@ -387,7 +384,8 @@
# authentication process
@deprecated_args(sysop=None)
- def __init__(self, password=None, site=None, user=None):
+ def __init__(self, password: Optional[str] = None, site=None,
+ user: Optional[str] = None):
"""
Initializer.
@@ -396,9 +394,7 @@
@param site: Site object to log into
@type site: BaseSite
@param user: consumer key
- @type user: str
@param password: consumer secret
- @type password: str
@raises pywikibot.exceptions.NoUsername: No username is configured
for the requested site.
@@ -477,12 +473,8 @@
return self._access_token
@property
- def identity(self):
- """
- Get identifying information about a user via an authorized token.
-
- @rtype: None or dict
- """
+ def identity(self) -> Optional[dict]:
+ """Get identifying information about a user via an authorized token."""
if self.access_token is None:
pywikibot.error('Access token not set')
return None
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index b301109..561474d 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -31,6 +31,7 @@
from functools import partial
from itertools import zip_longest
from requests.exceptions import ReadTimeout
+from typing import List, Optional, Union
import pywikibot
@@ -420,7 +421,7 @@
that are used by many scripts and that determine which pages to work on.
"""
- def __init__(self, site=None, positional_arg_name=None):
+ def __init__(self, site=None, positional_arg_name: Optional[str] = None):
"""
Initializer.
@@ -428,7 +429,6 @@
@type site: L{pywikibot.site.BaseSite}
@param positional_arg_name: generator to use for positional args,
which do not begin with a hyphen
- @type positional_arg_name: basestring
"""
self.gens = []
self._namespaces = []
@@ -610,13 +610,12 @@
return cat, startfrom
@deprecated_args(arg='category')
- def getCategoryGen(self, category, recurse=False, content=False,
- gen_func=None):
+ def getCategoryGen(self, category: str, recurse: bool = False,
+ content: bool = False, gen_func=None):
"""
Return generator based on Category defined by category and gen_func.
@param category: category name with start parameter
- @type category: str
@rtype: generator
"""
cat, startfrom = self.getCategory(category)
@@ -627,15 +626,14 @@
content=content)
@staticmethod
- def _parse_log_events(logtype, user=None, start=None, end=None):
+ def _parse_log_events(logtype: str, user: Optional[str] = None,
+ start=None, end=None):
"""
Parse the -logevent argument information.
@param logtype: A valid logtype
- @type logtype: str
@param user: A username associated to the log events. Ignored if
empty string or None.
- @type user: str
@param start: Timestamp to start listing from. For backward
compatibility, this can also be the total amount of pages
that should be returned. It is taken as 'total' if the value does
@@ -1137,7 +1135,7 @@
'Invalid -logevents parameter "{0}"'.format(params[0]))
return self._parse_log_events(*params)
- def handleArg(self, arg):
+ def handleArg(self, arg: str) -> bool:
"""Parse one argument at a time.
If it is recognized as an argument that specifies a generator, a
@@ -1147,9 +1145,7 @@
arguments have been parsed to get the final output generator.
@param arg: Pywikibot argument consisting of -name:value
- @type arg: basestring
@return: True if the argument supplied was recognised by the factory
- @rtype: bool
"""
if not arg.startswith('-') and self._positional_arg_name:
value = arg
@@ -1178,8 +1174,9 @@
@deprecated('Site.allpages()', since='20180512')
@deprecated_args(step=None)
-def AllpagesPageGenerator(start='!', namespace=0, includeredirects=True,
- site=None, total=None, content=False
+def AllpagesPageGenerator(start: str = '!', namespace=0,
+ includeredirects=True, site=None,
+ total: Optional[int] = None, content: bool = False
): # pragma: no cover
"""
Iterate Page objects for all titles in a single namespace.
@@ -1188,7 +1185,6 @@
includeredirects equals the string 'only', only redirects are added.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param content: If True, load current version of each page (default False)
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
@@ -1208,26 +1204,24 @@
@deprecated_args(step=None)
-def PrefixingPageGenerator(prefix, namespace=None, includeredirects=True,
- site=None, total=None, content=False):
+def PrefixingPageGenerator(prefix: str, namespace=None,
+ includeredirects: Union[None, bool, str] = True,
+ site=None, total: int = None,
+ content: bool = False):
"""
Prefixed Page generator.
@param prefix: The prefix of the pages.
- @type prefix: str
@param namespace: Namespace to retrieve pages from
@type namespace: Namespace or int
@param includeredirects: If includeredirects is None, False or an empty
string, redirects will not be found. If includeredirects equals the
string 'only', only redirects will be found. Otherwise redirects will
be included.
- @type includeredirects: None, bool, str
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param content: If True, load current version of each page (default False)
- @type content: bool
@return: a generator that yields Page objects
@rtype: generator
"""
@@ -1249,27 +1243,25 @@
@deprecated_args(number='total', mode='logtype', repeat=None)
-def LogeventsPageGenerator(logtype=None, user=None, site=None, namespace=None,
- total=None, start=None, end=None, reverse=False):
+def LogeventsPageGenerator(logtype: Optional[str] = None,
+ user: Optional[str] = None, site=None,
+ namespace: Optional[int] = None,
+ total: Optional[int] = None, start=None,
+ end=None, reverse: bool = False):
"""
Generate Pages for specified modes of logevents.
@param logtype: Mode of logs to retrieve
- @type logtype: basestring
@param user: User of logs retrieved
- @type user: basestring
@param site: Site for generator results
@type site: L{pywikibot.site.BaseSite}
@param namespace: Namespace to retrieve logs from
- @type namespace: int
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param start: Timestamp to start listing from
@type start: pywikibot.Timestamp
@param end: Timestamp to end listing at
@type end: pywikibot.Timestamp
@param reverse: if True, start with oldest changes (default: newest)
- @type reverse: bool
"""
if site is None:
site = pywikibot.Site()
@@ -1287,12 +1279,12 @@
@deprecated_args(number='total', step=None, namespace='namespaces',
repeat=None, get_redirect=None)
-def NewpagesPageGenerator(site=None, namespaces=(0, ), total=None):
+def NewpagesPageGenerator(site=None, namespaces=(0, ),
+ total: Optional[int] = None):
"""
Iterate Page objects for all new titles in a single namespace.
@param total: Maxmium number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -1329,12 +1321,11 @@
@deprecated('site.unconnected_pages()', since='20180512')
@deprecated_args(step=None)
-def UnconnectedPageGenerator(site=None, total=None):
+def UnconnectedPageGenerator(site=None, total: Optional[int] = None):
"""
Iterate Page objects for all unconnected pages to a Wikibase repository.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.APISite}
"""
@@ -1440,7 +1431,7 @@
@deprecated('Page.linkedPages()', since='20200515')
@deprecated_args(step=None)
-def LinkedPageGenerator(linkingPage, total=None, content=False):
+def LinkedPageGenerator(linkingPage, total: int = None, content: bool = False):
"""DEPRECATED. Yield all pages linked from a specific page.
See L{pywikibot.page.BasePage.linkedPages} for details.
@@ -1448,9 +1439,7 @@
@param linkingPage: the page that links to the pages we want
@type linkingPage: L{pywikibot.Page}
@param total: the total number of pages to iterate
- @type total: int
@param content: if True, retrieve the current content of each linked page
- @type content: bool
@return: a generator that yields Page objects of pages linked to
linkingPage
@rtype: generator
@@ -1459,7 +1448,7 @@
content=content) # pragma: no cover
-def TextfilePageGenerator(filename=None, site=None):
+def TextfilePageGenerator(filename: Optional[str] = None, site=None):
"""Iterate pages from a list in a text file.
The file must contain page links between double-square-brackets or, in
@@ -1468,7 +1457,6 @@
@param filename: the name of the file that should be read. If no name is
given, the generator prompts the user.
- @type filename: str
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
@@ -1533,15 +1521,13 @@
@deprecated_args(number='total', step=None)
-def UserContributionsGenerator(username, namespaces=None, site=None,
- total=None,
+def UserContributionsGenerator(username, namespaces: List[int] = None,
+ site=None, total: Optional[int] = None,
_filter_unique=_filter_unique_pages):
"""Yield unique pages edited by user:username.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param namespaces: list of namespace numbers to fetch contribs from
- @type namespaces: list of int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -1572,7 +1558,7 @@
namespace filtering more efficiently than this generator.
@param namespaces: list of namespace identifiers to limit results
- @type namespaces: iterable of basestring or Namespace key,
+ @type namespaces: iterable of str or Namespace key,
or a single instance of those types.
@param site: Site for generator results; mandatory if
namespaces contains namespace names. Defaults to the default site.
@@ -1598,14 +1584,13 @@
@deprecated_args(ignoreList='ignore_list')
-def PageTitleFilterPageGenerator(generator, ignore_list):
+def PageTitleFilterPageGenerator(generator, ignore_list: dict):
"""
Yield only those pages are not listed in the ignore list.
@param ignore_list: family names are mapped to dictionaries in which
language codes are mapped to lists of page titles. Each title must
be a valid regex as they are compared using L{re.search}.
- @type ignore_list: dict
"""
def is_ignored(page):
@@ -1623,16 +1608,14 @@
yield page
-def RedirectFilterPageGenerator(generator, no_redirects=True,
- show_filtered=False):
+def RedirectFilterPageGenerator(generator, no_redirects: bool = True,
+ show_filtered: bool = False):
"""
Yield pages from another generator that are redirects or not.
@param no_redirects: Exclude redirects if True, else only include
redirects.
- @param no_redirects: bool
@param show_filtered: Output a message for each page not yielded
- @type show_filtered: bool
"""
for page in generator or []:
if no_redirects:
@@ -1692,20 +1675,19 @@
return False
@classmethod
- def filter(cls, generator, prop, claim, qualifiers=None, negate=False):
+ def filter(cls, generator, prop: str, claim,
+ qualifiers: Optional[dict] = None,
+ negate: bool = False):
"""
Yield all ItemPages which contain certain claim in a property.
@param prop: property id to check
- @type prop: str
@param claim: value of the property to check. Can be exact value (for
instance, ItemPage instance) or a string (e.g. 'Q37470').
@param qualifiers: dict of qualifiers that must be present, or None if
qualifiers are irrelevant
- @type qualifiers: dict or None
@param negate: true if pages that do *not* contain specified claim
should be yielded, false otherwise
- @type negate: bool
"""
for page in generator:
if cls.__filter_match(page, prop, claim, qualifiers) is not negate:
@@ -1716,7 +1698,8 @@
ItemClaimFilterPageGenerator = ItemClaimFilter.filter
-def SubpageFilterGenerator(generator, max_depth=0, show_filtered=False):
+def SubpageFilterGenerator(generator, max_depth: int = 0,
+ show_filtered: bool = False):
"""
Generator which filters out subpages based on depth.
@@ -1726,9 +1709,7 @@
@param generator: A generator object
@type generator: any generator or iterator
@param max_depth: Max depth of subpages to yield, at least zero
- @type max_depth: int
@param show_filtered: Output a message for each page not yielded
- @type show_filtered: bool
"""
assert max_depth >= 0, 'Max subpage depth must be at least 0'
@@ -1824,7 +1805,7 @@
if cls.__filter_match(reg, page.text, quantifier))
-def QualityFilterPageGenerator(generator, quality):
+def QualityFilterPageGenerator(generator, quality: List[int]):
"""
Wrap a generator to filter pages according to quality levels.
@@ -1833,7 +1814,6 @@
@param generator: A generator object
@param quality: proofread-page quality levels (valid range 0-4)
- @type quality: list of int
"""
for page in generator:
@@ -1934,8 +1914,10 @@
yield page
-def UserEditFilterGenerator(generator, username, timestamp=None, skip=False,
- max_revision_depth=None, show_filtered=False):
+def UserEditFilterGenerator(generator, username: str, timestamp=None,
+ skip: bool = False,
+ max_revision_depth: Optional[int] = None,
+ show_filtered: bool = False):
"""
Generator which will yield Pages modified by username.
@@ -1947,16 +1929,12 @@
@param generator: A generator object
@param username: user name which edited the page
- @type username: str
@param timestamp: ignore edits which are older than this timestamp
@type timestamp: datetime or str (MediaWiki format JJJJMMDDhhmmss) or None
@param skip: Ignore pages edited by the given user
- @type skip: bool
@param max_revision_depth: It only looks at the last editors given by
max_revision_depth
- @type max_revision_depth: int or None
@param show_filtered: Output a message for each page not yielded
- @type show_filtered: bool
"""
ts = None
if timestamp:
@@ -2012,7 +1990,7 @@
@deprecated('LiveRCPageGenerator or EventStreams', since='20180415')
def RepeatingGenerator(generator, key_func=lambda x: x, sleep_duration=60,
- total=None, **kwargs):
+ total: Optional[int] = None, **kwargs):
"""Yield items in live time.
The provided generator must support parameter 'start', 'end',
@@ -2037,7 +2015,6 @@
@param sleep_duration: duration between each query
@param total: if it is a positive number, iterate no more than this
number of items in total. Otherwise, iterate forever
- @type total: int or None
@return: a generator yielding items in ascending order by time
"""
kwargs.pop('reverse', None) # always get newest item first
@@ -2062,13 +2039,12 @@
@deprecated_args(pageNumber='groupsize', step='groupsize', lookahead=None)
-def PreloadingGenerator(generator, groupsize=50):
+def PreloadingGenerator(generator, groupsize: int = 50):
"""
Yield preloaded pages taken from another generator.
@param generator: pages to iterate over
@param groupsize: how many pages to preload at once
- @type groupsize: int
"""
# pages may be on more than one site, for example if an interwiki
# generator is used, so use a separate preloader for each site
@@ -2102,7 +2078,7 @@
@deprecated_args(step='groupsize')
-def PreloadingEntityGenerator(generator, groupsize=50):
+def PreloadingEntityGenerator(generator, groupsize: int = 50):
"""
Yield preloaded pages taken from another generator.
@@ -2111,7 +2087,6 @@
@param generator: pages to iterate over
@type generator: Iterable
@param groupsize: how many pages to preload at once
- @type groupsize: int
"""
sites = {}
for page in generator:
@@ -2130,12 +2105,11 @@
@deprecated_args(number='total', step=None, repeat=None)
-def NewimagesPageGenerator(total=None, site=None):
+def NewimagesPageGenerator(total: Optional[int] = None, site=None):
"""
New file generator.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2166,8 +2140,8 @@
yield pywikibot.ItemPage.fromPage(page)
-def WikibaseItemFilterPageGenerator(generator, has_item=True,
- show_filtered=False):
+def WikibaseItemFilterPageGenerator(generator, has_item: bool = True,
+ show_filtered: bool = False):
"""
A wrapper generator used to exclude if page has a wikibase item or not.
@@ -2175,9 +2149,7 @@
@type generator: generator
@param has_item: Exclude pages without an item if True, or only
include pages without an item if False
- @type has_item: bool
@param show_filtered: Output a message for each page not yielded
- @type show_filtered: bool
@return: Wrapped generator
@rtype: generator
"""
@@ -2205,12 +2177,12 @@
@deprecated('Site.unusedfiles()', since='20200515')
@deprecated_args(extension=None, number='total', repeat=None)
-def UnusedFilesGenerator(total=None, site=None): # pragma: no cover
+def UnusedFilesGenerator(total: Optional[int] = None,
+ site=None): # pragma: no cover
"""
DEPRECATED. Unused files generator.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2236,12 +2208,12 @@
@deprecated('Site.uncategorizedcategories()', since='20200515')
@deprecated_args(number='total', repeat=None)
-def UnCategorizedCategoryGenerator(total=100, site=None): # pragma: no cover
+def UnCategorizedCategoryGenerator(total: Optional[int] = 100,
+ site=None): # pragma: no cover
"""
DEPRECATED. Uncategorized category generator.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2252,12 +2224,12 @@
@deprecated('Site.uncategorizedimages()', since='20200515')
@deprecated_args(number='total', repeat=None)
-def UnCategorizedImageGenerator(total=100, site=None): # pragma: no cover
+def UnCategorizedImageGenerator(total: int = 100,
+ site=None): # pragma: no cover
"""
DEPRECATED. Uncategorized file generator.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2268,12 +2240,12 @@
@deprecated('Site.uncategorizedpages()', since='20200515')
@deprecated_args(number='total', repeat=None)
-def UnCategorizedPageGenerator(total=100, site=None): # pragma: no cover
+def UnCategorizedPageGenerator(total: int = 100,
+ site=None): # pragma: no cover
"""
DEPRECATED. Uncategorized page generator.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2284,12 +2256,12 @@
@deprecated('Site.uncategorizedtemplates()', since='20200515')
@deprecated_args(number='total', repeat=None)
-def UnCategorizedTemplateGenerator(total=100, site=None): # pragma: no cover
+def UnCategorizedTemplateGenerator(total: int = 100,
+ site=None): # pragma: no cover
"""
DEPRECATED. Uncategorized template generator.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2300,12 +2272,12 @@
@deprecated('Site.lonelypages()', since='20200515')
@deprecated_args(number='total', repeat=None)
-def LonelyPagesPageGenerator(total=None, site=None): # pragma: no cover
+def LonelyPagesPageGenerator(total: Optional[int] = None,
+ site=None): # pragma: no cover
"""
DEPRECATED. Lonely page generator.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2316,12 +2288,12 @@
@deprecated('Site.unwatchedpages()', since='20200515')
@deprecated_args(number='total', repeat=None)
-def UnwatchedPagesPageGenerator(total=None, site=None): # pragma: no cover
+def UnwatchedPagesPageGenerator(total: Optional[int] = None,
+ site=None): # pragma: no cover
"""
DEPRECATED. Unwatched page generator.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2331,15 +2303,13 @@
@deprecated('Site.pages_with_property()', since='20200515')
-def page_with_property_generator(name, total=None,
+def page_with_property_generator(name: str, total: Optional[int] = None,
site=None): # pragma: no cover
"""
Special:PagesWithProperty page generator.
@param name: Property name of pages to be retrieved
- @type name: str
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2349,12 +2319,11 @@
@deprecated('Site.wantedpages', since='20180803')
-def WantedPagesPageGenerator(total=100, site=None): # pragma: no cover
+def WantedPagesPageGenerator(total: int = 100, site=None): # pragma: no cover
"""
Wanted page generator.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2364,12 +2333,11 @@
@deprecated_args(number='total', repeat=None)
-def AncientPagesPageGenerator(total=100, site=None): # pragma: no cover
+def AncientPagesPageGenerator(total: int = 100, site=None): # pragma: no cover
"""
Ancient page generator.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2380,12 +2348,11 @@
@deprecated('Site.deadendpages()', since='20200515')
@deprecated_args(number='total', repeat=None)
-def DeadendPagesPageGenerator(total=100, site=None): # pragma: no cover
+def DeadendPagesPageGenerator(total: int = 100, site=None): # pragma: no cover
"""
DEPRECATED. Dead-end page generator.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2395,12 +2362,11 @@
@deprecated_args(number='total', repeat=None)
-def LongPagesPageGenerator(total=100, site=None):
+def LongPagesPageGenerator(total: int = 100, site=None):
"""
Long page generator.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2410,12 +2376,11 @@
@deprecated_args(number='total', repeat=None)
-def ShortPagesPageGenerator(total=100, site=None):
+def ShortPagesPageGenerator(total: int = 100, site=None):
"""
Short page generator.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2426,13 +2391,12 @@
@deprecated('Site.randompages()', since='20200515')
@deprecated_args(number='total')
-def RandomPageGenerator(total=None, site=None,
+def RandomPageGenerator(total: Optional[int] = None, site=None,
namespaces=None): # pragma: no cover
"""
DEPRECATED. Random page generator.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2443,13 +2407,12 @@
@deprecated('Site.randompages()', since='20200515')
@deprecated_args(number='total')
-def RandomRedirectPageGenerator(total=None, site=None,
+def RandomRedirectPageGenerator(total: Optional[int] = None, site=None,
namespaces=None): # pragma: no cover
"""
DEPRECATED. Random redirect generator.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2461,23 +2424,20 @@
@deprecated('Site.exturlusage()', since='20200515')
@deprecated_args(link='url', euprotocol='protocol', step=None)
-def LinksearchPageGenerator(url, namespaces=None, total=None,
- site=None, protocol=None):
+def LinksearchPageGenerator(url: str, namespaces: List[int] = None,
+ total: Optional[int] = None, site=None,
+ protocol: Optional[str] = None):
"""DEPRECATED. Yield all pages that link to a certain URL.
@param url: The URL to search for (with ot without the protocol prefix);
this may include a '*' as a wildcard, only at the start of the
hostname
- @type url: str
@param namespaces: list of namespace numbers to fetch contribs from
- @type namespaces: list of int
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results
@type site: L{pywikibot.site.BaseSite}
@param protocol: Protocol to search for, likely http or https, http by
default. Full list shown on Special:LinkSearch wikipage
- @type protocol: str
"""
if site is None:
site = pywikibot.Site()
@@ -2487,13 +2447,12 @@
@deprecated('Site.search()', since='20200515')
@deprecated_args(number='total', step=None)
-def SearchPageGenerator(query, total=None, namespaces=None,
+def SearchPageGenerator(query, total: Optional[int] = None, namespaces=None,
site=None): # pragma: no cover
"""
DEPRECATED. Yield pages from the MediaWiki internal search engine.
@param total: Maximum number of pages to retrieve in total
- @type total: int
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
@@ -2502,7 +2461,7 @@
return site.search(query, total=total, namespaces=namespaces)
-def LiveRCPageGenerator(site=None, total=None):
+def LiveRCPageGenerator(site=None, total: Optional[int] = None):
"""
Yield pages from a socket.io RC stream.
@@ -2516,7 +2475,6 @@
@param site: site to return recent changes for
@type site: pywikibot.BaseSite
@param total: the maximum number of changes to return
- @type total: int
"""
if site is None:
site = pywikibot.Site()
@@ -2678,7 +2636,8 @@
"""
@deprecated_args(xmlFilename='filename', xmlStart='start')
- def __init__(self, filename, start=None, namespaces=None, site=None,
+ def __init__(self, filename: str, start: Optional[str] = None,
+ namespaces=None, site=None,
text_predicate=None):
"""Initializer."""
self.text_predicate = text_predicate
@@ -2756,14 +2715,14 @@
@deprecated_args(startMonth='start_month', endMonth='end_month')
-def DayPageGenerator(start_month=1, end_month=12, site=None, year=2000):
+def DayPageGenerator(start_month: int = 1, end_month: int = 12,
+ site=None, year: int = 2000):
"""
Day page generator.
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
@param year: considering leap year.
- @type year: int
"""
if site is None:
site = pywikibot.Site()
@@ -2801,20 +2760,19 @@
yield pywikibot.Page(site, sitelink)
-def WikidataSPARQLPageGenerator(query, site=None,
- item_name='item', endpoint=None,
- entity_url=None, result_type=set):
+def WikidataSPARQLPageGenerator(query,
+ site=None, item_name: str = 'item',
+ endpoint: Optional[str] = None,
+ entity_url: Optional[str] = None,
+ result_type=set):
"""Generate pages that result from the given SPARQL query.
@param query: the SPARQL query string.
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
@param item_name: name of the item in the SPARQL query
- @type item_name: str
@param endpoint: SPARQL endpoint URL
- @type endpoint: str
@param entity_url: URL prefix for any entities returned in a query.
- @type entity_url: str
@param result_type: type of the iterable in which
SPARQL results are stored (default set)
@type result_type: iterable
@@ -2839,20 +2797,17 @@
return WikidataPageFromItemGenerator(entities, site)
-def WikibaseSearchItemPageGenerator(
- text, language=None, total=None, site=None
-):
+def WikibaseSearchItemPageGenerator(text: str,
+ language: Optional[str] = None,
+ total: Optional[int] = None, site=None):
"""
Generate pages that contain the provided text.
@param text: Text to look for.
- @type text: str
@param language: Code of the language to search in. If not specified,
value from pywikibot.config.data_lang is used.
- @type language: str
@param total: Maximum number of pages to retrieve in total, or None in
case of no limit.
- @type total: int or None
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
"""
diff --git a/pywikibot/proofreadpage.py b/pywikibot/proofreadpage.py
index 3f6ba26..e878859 100644
--- a/pywikibot/proofreadpage.py
+++ b/pywikibot/proofreadpage.py
@@ -32,6 +32,7 @@
from functools import partial
from requests.exceptions import ReadTimeout
+from typing import Optional
try:
from bs4 import BeautifulSoup
@@ -438,14 +439,13 @@
return self._text
@text.setter
- def text(self, value):
+ def text(self, value: str):
"""Update current text.
Mainly for use within the class, called by other methods.
Use self.header, self.body and self.footer to set page content,
@param value: New value or None
- @param value: basestring
@raise Error: the page is not formatted according to ProofreadPage
extension.
@@ -968,24 +968,21 @@
"""
return len(self._page_from_numbers)
- def page_gen(self, start=1, end=None, filter_ql=None,
- only_existing=False, content=True):
+ def page_gen(self, start: Optional[int] = 1,
+ end: Optional[int] = None, filter_ql=None,
+ only_existing: bool = False, content: bool = True):
"""Return a page generator which yields pages contained in Index page.
Range is [start ... end], extremes included.
@param start: first page, defaults to 1
- @type start: int
@param end: num_pages if end is None
- @type end: int
@param filter_ql: filters quality levels
if None: all but 'Without Text'.
@type filter_ql: list of ints (corresponding to ql constants
defined in ProofreadPage).
@param only_existing: yields only existing pages.
- @type only_existing: bool
@param content: preload content.
- @type content: bool
"""
if end is None:
end = self.num_pages
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/634327
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: If6a9cfdddfd1c75fa78bf4181c1eb59951b7c683
Gerrit-Change-Number: 634327
Gerrit-PatchSet: 8
Gerrit-Owner: Udoka <UdokakuUgochukwu(a)gmail.com>
Gerrit-Reviewer: Reviewer-bot <gerritreviewerbot(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/634784 )
Change subject: [IMPR] remove IDE warning for unfilled params
......................................................................
[IMPR] remove IDE warning for unfilled params
Change-Id: I4626d8310281d74092e09684518641de6d7d2d61
---
M pywikibot/tools/__init__.py
1 file changed, 1 insertion(+), 1 deletion(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/tools/__init__.py b/pywikibot/tools/__init__.py
index 0a71018..2118f55 100644
--- a/pywikibot/tools/__init__.py
+++ b/pywikibot/tools/__init__.py
@@ -171,7 +171,7 @@
def __exit__(self, exc_type, exc_val, exc_tb):
"""Stop logging warnings and show those that do not match to params."""
- super().__exit__()
+ super().__exit__(exc_type, exc_val, exc_tb)
for warning in self.log:
if (
not issubclass(warning.category, self.category)
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/634784
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I4626d8310281d74092e09684518641de6d7d2d61
Gerrit-Change-Number: 634784
Gerrit-PatchSet: 1
Gerrit-Owner: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/634769 )
Change subject: [4.0] Remove remaining Python 2 code parts
......................................................................
[4.0] Remove remaining Python 2 code parts
Change-Id: Ide02a21d20a0d060e66ee9596aa8f8b2b010389b
---
M .appveyor.yml
M .coveragerc
M docs/requirements-py3.txt
M pwb.py
M pywikibot/bot.py
M pywikibot/family.py
M pywikibot/logging.py
M pywikibot/page/__init__.py
M pywikibot/textlib.py
M pywikibot/tools/__init__.py
M setup.py
M tests/textlib_tests.py
12 files changed, 32 insertions(+), 37 deletions(-)
Approvals:
Mpaa: Looks good to me, approved
jenkins-bot: Verified
diff --git a/.appveyor.yml b/.appveyor.yml
index c5f21b9..caf0905 100644
--- a/.appveyor.yml
+++ b/.appveyor.yml
@@ -92,7 +92,6 @@
test_script:
- chcp 65001
- set PYTHONIOENCODING=utf8
- - set PYTHONWARNINGS=ignore:::pkg_resources.py2_warn
- "mkdir %PYWIKIBOT_DIR%"
- "python -Werror::UserWarning -m generate_user_files -dir:%PYWIKIBOT_DIR% -family:wikipedia -lang:en -v -debug -user:%PYWIKIBOT_USERNAME%"
diff --git a/.coveragerc b/.coveragerc
index cf272bf..8d0e536 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -1,7 +1,5 @@
[report]
exclude_lines =
- if __debug__ and not PY2: # pyflakes workaround
-
# Have to re-enable the standard pragma
pragma: no cover
diff --git a/docs/requirements-py3.txt b/docs/requirements-py3.txt
index 986aee3..3799161 100644
--- a/docs/requirements-py3.txt
+++ b/docs/requirements-py3.txt
@@ -1,5 +1,4 @@
# This is a PIP requirements file for building Sphinx documentation of pywikibot
-# using sphinx on python3.4.
# requirements.txt is also needed
sphinx >= 1.8, != 3.1.0
diff --git a/pwb.py b/pwb.py
index 70842a6..80bbc7e 100755
--- a/pwb.py
+++ b/pwb.py
@@ -18,6 +18,7 @@
#
# Distributed under the terms of the MIT license.
#
+# ## KEEP PYTHON 2 SUPPORT FOR THIS SCRIPT ## #
from __future__ import print_function
import os
@@ -33,7 +34,7 @@
pwb = None
# The following snippet was developed by Ned Batchelder (and others)
-# for coverage [1], with python 3 support [2] added later,
+# for coverage [1], with Python 3 support [2] added later,
# and is available under the BSD license (see [3])
# [1]
# https://bitbucket.org/ned/coveragepy/src/b5abcee50dbe/coverage/execfile.py
diff --git a/pywikibot/bot.py b/pywikibot/bot.py
index dcd8f50..2abb868 100644
--- a/pywikibot/bot.py
+++ b/pywikibot/bot.py
@@ -1411,10 +1411,7 @@
# exc_info contains exception from self.run() while terminating
exc_info = sys.exc_info()
pywikibot.output('Script terminated ', newline=False)
- # Python 2 also needs QuitKeyboardInterrupt
- # to be compared with exc_info[0] (T195687)
- if exc_info[0] is None or exc_info[0] in (KeyboardInterrupt,
- QuitKeyboardInterrupt):
+ if exc_info[0] is None or exc_info[0] is KeyboardInterrupt:
pywikibot.output('successfully.')
else:
pywikibot.output('by exception:\n')
diff --git a/pywikibot/family.py b/pywikibot/family.py
index 2b1412d..4005b6c 100644
--- a/pywikibot/family.py
+++ b/pywikibot/family.py
@@ -1427,8 +1427,7 @@
# AutoFamily refers to the variable set below, not the function
return super().scriptpath(code)
- # str() used because py2 can't accept a unicode as the name of a class
- AutoFamily = type(str('AutoFamily'), (SingleSiteFamily,), locals())
+ AutoFamily = type('AutoFamily', (SingleSiteFamily,), locals())
return AutoFamily()
diff --git a/pywikibot/logging.py b/pywikibot/logging.py
index 6951f62..22ba07f 100644
--- a/pywikibot/logging.py
+++ b/pywikibot/logging.py
@@ -10,7 +10,7 @@
import sys
# logging levels
-from logging import DEBUG, INFO, WARNING, ERROR, CRITICAL, StreamHandler
+from logging import DEBUG, INFO, WARNING, ERROR, CRITICAL
STDOUT = 16
VERBOSE = 18
@@ -70,9 +70,6 @@
else:
logger = logging.getLogger('pywiki')
- if not logger.handlers: # lastResort for Python 2 (T188417)
- logger.handlers.append(StreamHandler())
-
# invoke any init routines
if _init_routines:
_init()
diff --git a/pywikibot/page/__init__.py b/pywikibot/page/__init__.py
index 55c9a23..82e0059 100644
--- a/pywikibot/page/__init__.py
+++ b/pywikibot/page/__init__.py
@@ -20,7 +20,6 @@
import logging
import os.path
import re
-import sys
import unicodedata
from collections import Counter, defaultdict, OrderedDict
@@ -3820,8 +3819,7 @@
if not hasattr(cls, 'title_pattern'):
return True
- # todo: use re.fullmatch when Python 3.4+ required
- return bool(re.match(cls.title_pattern + '$', entity_id))
+ return bool(re.fullmatch(cls.title_pattern, entity_id))
def __getattr__(self, name):
if name in self.DATA_ATTRIBUTES:
@@ -6458,6 +6456,7 @@
# match.string stores original text so we do not need
# to pass it to handle_entity, ♥ Python
return match.group(0)
+
if match.group('decimal'):
unicode_codepoint = int(match.group('decimal'))
elif match.group('hex'):
@@ -6470,14 +6469,11 @@
unicode_codepoint, unicode_codepoint)
if unicode_codepoint and unicode_codepoint not in ignore:
- if unicode_codepoint > sys.maxunicode:
- # solve narrow Python 2 build exception (UTF-16)
- return eval("'\\U{0:08x}'".format(unicode_codepoint))
- else:
- return chr(unicode_codepoint)
- else:
- # Leave the entity unchanged
- return match.group(0)
+ return chr(unicode_codepoint)
+
+ # Leave the entity unchanged
+ return match.group(0)
+
return _ENTITY_SUB(handle_entity, text)
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index 481608a..e495ca2 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -681,15 +681,18 @@
m = link_pattern.search(text, pos=curpos)
if not m:
break
+
# Ignore links to sections of the same page
if not m.group('title').strip():
curpos = m.end()
continue
+
# Ignore interwiki links
if (site.isInterwikiLink(m.group('title').strip())
and not m.group('title').strip().startswith(':')):
curpos = m.end()
continue
+
groups = m.groupdict()
if groups['label'] and '[[' in groups['label']:
# TODO: Work on the link within the label too
@@ -704,10 +707,12 @@
end = extended_match.end()
else:
end = m.end()
+
start = m.start()
# Since this point the m variable shouldn't be used as it may not
# contain all contents
del m
+
try:
link = pywikibot.Link.create_separated(
groups['title'], site, section=groups['section'],
@@ -735,15 +740,16 @@
# remove preleading ":" from the link text
if new_label[0] == ':':
new_label = new_label[1:]
+
new_linktrail = groups['linktrail']
if new_linktrail:
new_label += new_linktrail
if new_link is False:
# unlink - we remove the section if there's any
- assert isinstance(new_label, str), \
- 'link text must be str.'
+ assert isinstance(new_label, str), 'link text must be str.'
new_link = new_label
+
if isinstance(new_link, str):
# Nothing good can come out of the fact that bytes is returned so
# force unicode
@@ -751,9 +757,9 @@
# Make sure that next time around we will not find this same hit.
curpos = start + len(new_link)
continue
- elif isinstance(new_link, bytes):
- raise ValueError('The result must be unicode (str in Python 3) '
- 'and not bytes (str in Python 2).')
+
+ if isinstance(new_link, bytes):
+ raise ValueError('The result must be str and not bytes.')
# Verify that it's either Link, Page or str
check_classes(new_link)
@@ -780,6 +786,7 @@
if new_section:
new_title += '#' + new_section
+
if new_label is None:
new_label = new_title
@@ -806,6 +813,7 @@
else:
new_text = '[[{0}]]{1}'.format(new_label[:len(new_title)],
new_label[len(new_title):])
+
text = text[:start] + new_text + text[end:]
# Make sure that next time around we will not find this same hit.
curpos = start + len(new_text)
diff --git a/pywikibot/tools/__init__.py b/pywikibot/tools/__init__.py
index b44f246..0a71018 100644
--- a/pywikibot/tools/__init__.py
+++ b/pywikibot/tools/__init__.py
@@ -932,7 +932,7 @@
"""A generator that allows items to be added during generating."""
def __next__(self):
- """Python 3 iterator method."""
+ """Iterator method."""
if len(self):
return self.popleft()
else:
@@ -964,8 +964,7 @@
@raises ValueError: When 7za is not available or the opening mode is
unknown or it tries to write a 7z archive.
@raises FileNotFoundError: When the filename doesn't exist and it tries
- to read from it or it tries to determine the compression algorithm (or
- IOError on Python 2).
+ to read from it or it tries to determine the compression algorithm.
@raises OSError: When it's not a 7z archive but the file extension is 7z.
It is also raised by bz2 when its content is invalid. gzip does not
immediately raise that error but only on reading it.
diff --git a/setup.py b/setup.py
index 35af98d..e52b33c 100644
--- a/setup.py
+++ b/setup.py
@@ -28,6 +28,7 @@
#
# Distributed under the terms of the MIT license.
#
+# ## KEEP PYTHON 2 SUPPORT FOR THIS SCRIPT ## #
import os
import sys
diff --git a/tests/textlib_tests.py b/tests/textlib_tests.py
index 10e7020..0df9bc9 100644
--- a/tests/textlib_tests.py
+++ b/tests/textlib_tests.py
@@ -1008,9 +1008,10 @@
if link.title == 'World':
# This must be a bytes instance not unicode
return b'homeworlder'
- self.assertRaisesRegex(
- ValueError, r'unicode \(str.*bytes \(str',
- textlib.replace_links, self.text, callback, self.wp_site)
+
+ with self.assertRaisesRegex(ValueError,
+ r'The result must be str and not bytes\.'):
+ textlib.replace_links(self.text, callback, self.wp_site)
def test_replace_interwiki_links(self):
"""Make sure interwiki links can not be replaced."""
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/634769
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ide02a21d20a0d060e66ee9596aa8f8b2b010389b
Gerrit-Change-Number: 634769
Gerrit-PatchSet: 3
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/634768 )
Change subject: [doc] Additional hint to sitelink badges
......................................................................
[doc] Additional hint to sitelink badges
Bug: T265800
Change-Id: Ib0eb1b421edf0b87641142b03dc277aa3a61df8a
---
M HISTORY.rst
1 file changed, 1 insertion(+), 1 deletion(-)
Approvals:
Mpaa: Looks good to me, approved
jenkins-bot: Verified
diff --git a/HISTORY.rst b/HISTORY.rst
index 288065f..c781cc0 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -245,7 +245,7 @@
* TokenWallet: login automatically
* Add closed_wikis to Family.langs property (T225413)
* Redirect 'mo' site code to 'ro' and remove interwiki_replacement_overrides (T225417, T89451)
-* Add support for badges on Wikibase item sitelinks (T128202)
+* Add support for badges on Wikibase item sitelinks through a SiteLink object instead plain str (T128202)
* Remove login.showCaptchaWindow() method
* New parameter supplied in suggest_help function for missing dependencies
* Remove NonMWAPISite class
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/634768
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ib0eb1b421edf0b87641142b03dc277aa3a61df8a
Gerrit-Change-Number: 634768
Gerrit-PatchSet: 1
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/634747 )
Change subject: [IMPR] fix type in docstring
......................................................................
[IMPR] fix type in docstring
Change-Id: I047b37ca572f780901e1fe58a5710706e643a7b1
---
M pywikibot/flow.py
1 file changed, 1 insertion(+), 1 deletion(-)
Approvals:
Mpaa: Looks good to me, approved
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/flow.py b/pywikibot/flow.py
index d494666..792ca54 100644
--- a/pywikibot/flow.py
+++ b/pywikibot/flow.py
@@ -421,7 +421,7 @@
"""Return the page associated with the post.
@return: Page associated with the post
- @rtype: FlowPage
+ @rtype: Topic
"""
return self._page
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/634747
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I047b37ca572f780901e1fe58a5710706e643a7b1
Gerrit-Change-Number: 634747
Gerrit-PatchSet: 3
Gerrit-Owner: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged