jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/637598 )
Change subject: [cleanup] Remove Python 2 hints
......................................................................
[cleanup] Remove Python 2 hints
Change-Id: I4938f593685c9d9178ae6e5cd32c7f1bfcd7ef31
---
M pywikibot/textlib.py
M tests/ui_tests.py
2 files changed, 2 insertions(+), 3 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index 471357f..617a7d9 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -601,8 +601,7 @@
If it's a string and the replacement was a sequence it converts it into a
Page instance. If the replacement is done via a callable it'll use it like
unlinking and directly replace the link with the text itself. It only
- supports unicode when used by the callable and bytes (str in Python 2) are
- not allowed.
+ supports unicode when used by the callable and bytes are not allowed.
If either the section or label should be used the replacement can be a
function which returns a Link instance and copies the value which should
diff --git a/tests/ui_tests.py b/tests/ui_tests.py
index 52cb8e4..ac68cb9 100644
--- a/tests/ui_tests.py
+++ b/tests/ui_tests.py
@@ -702,7 +702,7 @@
len(self._colors) if self.expect_color else 0)
def _getvalue(self):
- """Get the value of the stream and also decode it on Python 2."""
+ """Get the value of the stream."""
return self.stream.getvalue()
def _encounter_color(self, color, target_stream):
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/637598
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I4938f593685c9d9178ae6e5cd32c7f1bfcd7ef31
Gerrit-Change-Number: 637598
Gerrit-PatchSet: 2
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/637595 )
Change subject: [bugfix] Fix Subject.isDone()
......................................................................
[bugfix] Fix Subject.isDone()
Change-Id: I88078923a58aac75d5e632d2b99e317d2cceab6e
---
M scripts/interwiki.py
1 file changed, 1 insertion(+), 1 deletion(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index f91203b..6cf62dd 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -1352,7 +1352,7 @@
def isDone(self):
"""Return True if all the work for this subject has completed."""
- return bool(self.todo)
+ return len(self.todo) == 0
def problem(self, txt, createneed=True):
"""Report a problem with the resolution of this subject."""
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/637595
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I88078923a58aac75d5e632d2b99e317d2cceab6e
Gerrit-Change-Number: 637595
Gerrit-PatchSet: 1
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/606116 )
Change subject: [cleanup] Replace deprecated originPage by origin in Subjects
......................................................................
[cleanup] Replace deprecated originPage by origin in Subjects
interwiki_graph.py:
- deprecate originPage parameter in GraphSavingThread and use origin instead
- deprecate originPage attribute in GraphSavingThread and use origin instead
- introduce originPage properties and add the deprecation warning
- update doc in Subject.__init__
interwiki.py:
- replace all originPage occurrences by origin
- replace all foundIn occurrences by found_in
- decrease deeply nested statements
Change-Id: I361e2bb7636bf05a077763b4b4111d8e51017892
---
M pywikibot/interwiki_graph.py
M scripts/interwiki.py
2 files changed, 302 insertions(+), 262 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/interwiki_graph.py b/pywikibot/interwiki_graph.py
index 216e2f7..febbecf 100644
--- a/pywikibot/interwiki_graph.py
+++ b/pywikibot/interwiki_graph.py
@@ -14,7 +14,9 @@
import pywikibot
from pywikibot import config2 as config
-from pywikibot.tools import deprecated, ModuleDeprecationWrapper
+from pywikibot.tools import (
+ deprecated, deprecated_args, ModuleDeprecationWrapper)
+
try:
import pydot
except ImportError as e:
@@ -39,21 +41,36 @@
mechanism to kill a thread if it takes too long.
"""
- def __init__(self, graph, originPage):
+ @deprecated_args(originPage='origin') # since 20200617
+ def __init__(self, graph, origin):
"""Initializer."""
super().__init__()
self.graph = graph
- self.originPage = originPage
+ self.origin = origin
+
+ @property
+ @deprecated('GraphSavingThread.origin', since='20200617')
+ def originPage(self):
+ """Deprecated property for the origin page.
+
+ DEPRECATED. Use origin.
+ """
+ return self.origin
+
+ @originPage.setter
+ @deprecated('GraphSavingThread.origin', since='20200617')
+ def originPage(self, value):
+ self.origin = value
def run(self):
"""Write graphs to the data directory."""
- for format in config.interwiki_graph_formats:
+ for fmt in config.interwiki_graph_formats:
filename = config.datafilepath(
- 'interwiki-graphs/' + getFilename(self.originPage, format))
- if self.graph.write(filename, prog='dot', format=format):
- pywikibot.output('Graph saved as %s' % filename)
+ 'interwiki-graphs/' + getFilename(self.origin, fmt))
+ if self.graph.write(filename, prog='dot', format=fmt):
+ pywikibot.output('Graph saved as ' + filename)
else:
- pywikibot.output('Graph could not be saved as %s' % filename)
+ pywikibot.output('Graph could not be saved as ' + filename)
class Subject:
@@ -69,7 +86,7 @@
# Remember the "origin page"
self._origin = origin
- # foundIn is a dictionary where pages are keys and lists of
+ # found_in is a dictionary where pages are keys and lists of
# pages are values. It stores where we found each page.
# As we haven't yet found a page that links to the origin page, we
# start with an empty list for it.
@@ -217,8 +234,8 @@
For more info see U{https://meta.wikimedia.org/wiki/Interwiki_graphs}
"""
- pywikibot.output('Preparing graph for %s'
- % self.subject.origin.title())
+ pywikibot.output('Preparing graph for {}'
+ .format(self.subject.origin.title()))
# create empty graph
self.graph = pydot.Dot()
# self.graph.set('concentrate', 'true')
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index ff4baad..f91203b 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -561,8 +561,8 @@
While using dict values would be faster for the remove() operation,
keeping list values is important, because the order in which the pages
were found matters: the earlier a page is found, the closer it is to
- the Subject.originPage. Chances are that pages found within 2 interwiki
- distance from the originPage are more related to the original topic
+ the Subject.origin. Chances are that pages found within 2 interwiki
+ distance from the origin are more related to the original topic
than pages found later on, after 3, 4, 5 or more interwiki hops.
Keeping this order is hence important to display an ordered
@@ -632,7 +632,7 @@
pseudocode::
- todo <- [originPage]
+ todo <- [origin]
done <- []
while todo != []:
pending <- todo
@@ -657,7 +657,7 @@
Code becomes::
- todo <- {originPage.site:[originPage]}
+ todo <- {origin.site: [origin]}
done <- []
while todo != {}:
site <- electSite()
@@ -678,7 +678,7 @@
this Object.
"""
- def __init__(self, originPage=None, hints=None, conf=None):
+ def __init__(self, origin=None, hints=None, conf=None):
"""
Initializer.
@@ -687,14 +687,14 @@
"""
self.conf = conf
- super().__init__(originPage)
+ super().__init__(origin)
self.repoPage = None
# todo is a list of all pages that still need to be analyzed.
# Mark the origin page as todo.
self.todo = PageTree()
- if originPage:
- self.todo.add(originPage)
+ if origin:
+ self.todo.add(origin)
# done is a list of all pages that have been analyzed and that
# are known to belong to this subject.
@@ -726,6 +726,7 @@
for page in tree.filter(site):
if page.exists() and page.isDisambig():
return page
+ return None
def getFoundNonDisambig(self, site):
"""
@@ -738,9 +739,12 @@
"""
for tree in [self.done, self.pending]:
for page in tree.filter(site):
- if page.exists() and not page.isDisambig() and \
- not page.isRedirectPage() and not page.isCategoryRedirect():
+ if page.exists() \
+ and not page.isDisambig() \
+ and not page.isRedirectPage() \
+ and not page.isCategoryRedirect():
return page
+ return None
def getFoundInCorrectNamespace(self, site):
"""
@@ -755,26 +759,27 @@
for page in tree.filter(site):
# -hintsonly: before we have an origin page, any namespace will
# do.
- if self.originPage and \
- page.namespace() == self.originPage.namespace():
- if page.exists() and not \
- page.isRedirectPage() and not page.isCategoryRedirect():
+ if self.origin and page.namespace() == self.origin.namespace():
+ if page.exists() \
+ and not page.isRedirectPage() \
+ and not page.isCategoryRedirect():
return page
+ return None
def translate(self, hints=None, keephintedsites=False):
"""Add the given translation hints to the todo list."""
- if self.conf.same and self.originPage:
+ if self.conf.same and self.origin:
if hints:
hints += ['all:']
else:
hints = ['all:']
- site = self.originPage.site
+ site = self.origin.site
else:
site = pywikibot.Site()
links = titletranslate.translate(
- self.originPage,
+ self.origin,
hints=hints,
auto=self.conf.auto,
removebrackets=self.conf.hintnobracket,
@@ -783,7 +788,7 @@
for link in links:
page = pywikibot.Page(link)
self.todo.add(page)
- self.foundIn[page] = [None]
+ self.found_in[page] = [None]
if keephintedsites:
self.hintedsites.add(page.site)
@@ -808,7 +813,7 @@
"""
# Bug-check: Isn't there any work still in progress? We can't work on
# different sites at a time!
- if len(self.pending) > 0:
+ if self.pending:
raise "BUG: Can't start to work on {}; still working on {}".format(
site, self.pending)
# Prepare a list of suitable pages
@@ -842,32 +847,32 @@
"""
if self.forcedStop:
return False
- # cannot check backlink before we have an origin page
- if self.conf.nobackonly and self.originPage:
- if page == self.originPage:
- try:
- pywikibot.output('{} has a backlink from {}.'
- .format(page, linkingPage))
- except UnicodeDecodeError:
- pywikibot.output('Found a backlink for a page.')
- self.makeForcedStop(counter)
- return False
- if page in self.foundIn:
- # not new
- self.foundIn[page].append(linkingPage)
+ # cannot check backlink before we have an origin page
+ if self.conf.nobackonly and self.origin and page == self.origin:
+ try:
+ pywikibot.output('{} has a backlink from {}.'
+ .format(page, linkingPage))
+ except UnicodeDecodeError:
+ pywikibot.output('Found a backlink for a page.')
+ self.makeForcedStop(counter)
return False
- else:
- self.foundIn[page] = [linkingPage]
- self.todo.add(page)
- counter.plus(page.site)
- return True
+
+ if page in self.found_in:
+ # not new
+ self.found_in[page].append(linkingPage)
+ return False
+
+ self.found_in[page] = [linkingPage]
+ self.todo.add(page)
+ counter.plus(page.site)
+ return True
def skipPage(self, page, target, counter):
"""Return whether page has to be skipped."""
- return self.isIgnored(target) or \
- self.namespaceMismatch(page, target, counter) or \
- self.wiktionaryMismatch(target)
+ return self.isIgnored(target) \
+ or self.namespaceMismatch(page, target, counter) \
+ or self.wiktionaryMismatch(target)
def namespaceMismatch(self, linkingPage, linkedPage, counter):
"""
@@ -876,92 +881,94 @@
Returns True if the namespaces are different and the user
has selected not to follow the linked page.
"""
- if linkedPage in self.foundIn:
+ if linkedPage in self.found_in:
# We have seen this page before, don't ask again.
return False
- if (self.originPage
- and self.originPage.namespace() != linkedPage.namespace()):
+
+ if self.origin and self.origin.namespace() != linkedPage.namespace():
# Allow for a mapping between different namespaces
- crossFrom = self.originPage.site.family.crossnamespace.get(
- self.originPage.namespace(), {})
- crossTo = crossFrom.get(self.originPage.site.lang,
+ crossFrom = self.origin.site.family.crossnamespace.get(
+ self.origin.namespace(), {})
+ crossTo = crossFrom.get(self.origin.site.lang,
crossFrom.get('_default', {}))
nsmatch = crossTo.get(linkedPage.site.lang,
crossTo.get('_default', []))
if linkedPage.namespace() in nsmatch:
return False
+
if self.conf.autonomous:
pywikibot.output(
'NOTE: Ignoring link from page {} in namespace'
' {} to page {} in namespace {}.'
.format(linkingPage, linkingPage.namespace(), linkedPage,
linkedPage.namespace()))
- # Fill up foundIn, so that we will not write this notice
- self.foundIn[linkedPage] = [linkingPage]
+ # Fill up found_in, so that we will not write this notice
+ self.found_in[linkedPage] = [linkingPage]
return True
- else:
- preferredPage = self.getFoundInCorrectNamespace(
- linkedPage.site)
- if preferredPage:
- pywikibot.output(
- 'NOTE: Ignoring link from page {} in namespace {} to '
- 'page {} in namespace {} because page {} in the '
- 'correct namespace has already been found.'
- .format(linkingPage, linkingPage.namespace(),
- linkedPage, linkedPage.namespace(),
- preferredPage))
- return True
+
+ preferredPage = self.getFoundInCorrectNamespace(linkedPage.site)
+ if preferredPage:
+ pywikibot.output(
+ 'NOTE: Ignoring link from page {} in namespace {} to '
+ 'page {} in namespace {} because page {} in the '
+ 'correct namespace has already been found.'
+ .format(linkingPage, linkingPage.namespace(),
+ linkedPage, linkedPage.namespace(),
+ preferredPage))
+ return True
+
+ choice = pywikibot.input_choice(
+ 'WARNING: {} is in namespace "{}", but {} is in '
+ 'namespace "{}". Follow it anyway?'
+ .format(self.origin, self.origin.namespace(),
+ linkedPage, linkedPage.namespace()),
+ [('Yes', 'y'), ('No', 'n'),
+ ('Add an alternative', 'a'), ('give up', 'g')],
+ automatic_quit=False)
+
+ if choice != 'y':
+ # Fill up found_in, so that we will not ask again
+ self.found_in[linkedPage] = [linkingPage]
+ if choice == 'g':
+ self.makeForcedStop(counter)
+ elif choice == 'a':
+ newHint = pywikibot.input(
+ 'Give the alternative for language {}, not '
+ 'using a language code:'
+ .format(linkedPage.site.lang))
+ if newHint:
+ alternativePage = pywikibot.Page(
+ linkedPage.site, newHint)
+ if alternativePage:
+ # add the page that was entered by the user
+ self.addIfNew(alternativePage, counter, None)
else:
- choice = pywikibot.input_choice(
- 'WARNING: {} is in namespace "{}", but {} is in '
- 'namespace "{}". Follow it anyway?'
- .format(self.originPage, self.originPage.namespace(),
- linkedPage, linkedPage.namespace()),
- [('Yes', 'y'), ('No', 'n'),
- ('Add an alternative', 'a'), ('give up', 'g')],
- automatic_quit=False)
- if choice != 'y':
- # Fill up foundIn, so that we will not ask again
- self.foundIn[linkedPage] = [linkingPage]
- if choice == 'g':
- self.makeForcedStop(counter)
- elif choice == 'a':
- newHint = pywikibot.input(
- 'Give the alternative for language {}, not '
- 'using a language code:'
- .format(linkedPage.site.lang))
- if newHint:
- alternativePage = pywikibot.Page(
- linkedPage.site, newHint)
- if alternativePage:
- # add the page that was entered by the user
- self.addIfNew(alternativePage, counter,
- None)
- else:
- pywikibot.output(
- 'NOTE: ignoring {} and its interwiki links'
- .format(linkedPage))
- return True
- else:
- # same namespaces, no problem
- # or no origin page yet, also no problem
- return False
+ pywikibot.output(
+ 'NOTE: ignoring {} and its interwiki links'
+ .format(linkedPage))
+ return True
+
+ # same namespaces, no problem
+ # or no origin page yet, also no problem
+ return False
def wiktionaryMismatch(self, page):
"""Check for ignoring pages."""
- if self.originPage and self.conf.same == 'wiktionary':
- if page.title().lower() != self.originPage.title().lower():
+ if self.origin and self.conf.same == 'wiktionary':
+ if page.title().lower() != self.origin.title().lower():
pywikibot.output('NOTE: Ignoring {} for {} in wiktionary mode'
- .format(page, self.originPage))
+ .format(page, self.origin))
return True
- if (page.title() != self.originPage.title()
- and self.originPage.namespace().case == 'case-sensitive'
+
+ if (page.title() != self.origin.title()
+ and self.origin.namespace().case == 'case-sensitive'
and page.namespace().case == 'case-sensitive'):
pywikibot.output(
'NOTE: Ignoring {} for {} in wiktionary mode because both '
'languages are uncapitalized.'
- .format(page, self.originPage))
+ .format(page, self.origin))
return True
+
return False
def disambigMismatch(self, page, counter):
@@ -977,65 +984,73 @@
alternativePage is either None, or a page that the user has
chosen to use instead of the given page.
"""
- if not self.originPage:
+ if not self.origin:
return (False, None) # any page matches til we have an origin page
+
if self.conf.autonomous:
- if self.originPage.isDisambig() and not page.isDisambig():
+ if self.origin.isDisambig() and not page.isDisambig():
pywikibot.output(
'NOTE: Ignoring link from disambiguation page {} to '
- 'non-disambiguation {}'.format(self.originPage, page))
+ 'non-disambiguation {}'.format(self.origin, page))
return (True, None)
- elif not self.originPage.isDisambig() and page.isDisambig():
+
+ if not self.origin.isDisambig() and page.isDisambig():
pywikibot.output(
'NOTE: Ignoring link from non-disambiguation page {} to '
- 'disambiguation {}'.format(self.originPage, page))
+ 'disambiguation {}'.format(self.origin, page))
return (True, None)
+
else:
choice = 'y'
- if self.originPage.isDisambig() and not page.isDisambig():
+ if self.origin.isDisambig() and not page.isDisambig():
disambig = self.getFoundDisambig(page.site)
if disambig:
pywikibot.output(
'NOTE: Ignoring non-disambiguation page {} for {} '
'because disambiguation page {} has already been '
'found.'
- .format(page, self.originPage, disambig))
+ .format(page, self.origin, disambig))
return (True, None)
- else:
- choice = pywikibot.input_choice(
- "WARNING: {} is a disambiguation page, but {} doesn't "
- 'seem to be one. Follow it anyway?'
- .format(self.originPage, page),
- [('Yes', 'y'), ('No', 'n'),
- ('Add an alternative', 'a'), ('give up', 'g')],
- automatic_quit=False)
- elif not self.originPage.isDisambig() and page.isDisambig():
+
+ choice = pywikibot.input_choice(
+ "WARNING: {} is a disambiguation page, but {} doesn't "
+ 'seem to be one. Follow it anyway?'
+ .format(self.origin, page),
+ [('Yes', 'y'), ('No', 'n'),
+ ('Add an alternative', 'a'), ('give up', 'g')],
+ automatic_quit=False)
+
+ elif not self.origin.isDisambig() and page.isDisambig():
nondisambig = self.getFoundNonDisambig(page.site)
if nondisambig:
pywikibot.output(
'NOTE: Ignoring disambiguation page {} for {} because '
'non-disambiguation page {} has already been found.'
- .format(page, self.originPage, nondisambig))
+ .format(page, self.origin, nondisambig))
return (True, None)
- else:
- choice = pywikibot.input_choice(
- "WARNING: {} doesn't seem to be a disambiguation "
- 'page, but {} is one. Follow it anyway?'
- .format(self.originPage, page),
- [('Yes', 'y'), ('No', 'n'),
- ('Add an alternative', 'a'), ('give up', 'g')],
- automatic_quit=False)
+
+ choice = pywikibot.input_choice(
+ "WARNING: {} doesn't seem to be a disambiguation "
+ 'page, but {} is one. Follow it anyway?'
+ .format(self.origin, page),
+ [('Yes', 'y'), ('No', 'n'),
+ ('Add an alternative', 'a'), ('give up', 'g')],
+ automatic_quit=False)
+
if choice == 'n':
return (True, None)
- elif choice == 'a':
+
+ if choice == 'a':
newHint = pywikibot.input(
'Give the alternative for language {}, not using a '
'language code:'.format(page.site.lang))
alternativePage = pywikibot.Page(page.site, newHint)
return (True, alternativePage)
- elif choice == 'g':
+
+ if choice == 'g':
self.makeForcedStop(counter)
return (True, None)
+
# We can follow the page.
return (False, None)
@@ -1045,16 +1060,18 @@
pywikibot.output('Skipping link {} to an ignored language'
.format(page))
return True
+
if page in self.conf.ignore:
pywikibot.output('Skipping link {} to an ignored page'
.format(page))
return True
+
return False
def reportInterwikilessPage(self, page):
"""Report interwikiless page."""
self.conf.note('{} does not have any interwiki links'
- .format(self.originPage))
+ .format(self.origin))
if config.without_interwiki:
with codecs.open(
pywikibot.config.datafilepath('without_interwiki.txt'),
@@ -1066,10 +1083,10 @@
if (not self.workonme # we don't work on it anyway
or not self.untranslated and not self.conf.askhints
or self.hintsAsked
- or not self.originPage
- or not self.originPage.exists()
- or self.originPage.isRedirectPage()
- or self.originPage.isCategoryRedirect()):
+ or not self.origin
+ or not self.origin.exists()
+ or self.origin.isRedirectPage()
+ or self.origin.isCategoryRedirect()):
return
self.hintsAsked = True
@@ -1078,7 +1095,7 @@
t = self.conf.showtextlink
if t:
- pywikibot.output(self.originPage.get()[:t])
+ pywikibot.output(self.origin.get()[:t])
while True:
newhint = pywikibot.input('Give a hint (? to see pagetext):')
@@ -1088,14 +1105,14 @@
if newhint == '?':
t += self.conf.showtextlinkadd
- pywikibot.output(self.originPage.get()[:t])
+ pywikibot.output(self.origin.get()[:t])
elif ':' not in newhint:
pywikibot.output(fill(
'Please enter a hint in the format language:pagename '
'or type nothing if you do not have a hint.'))
else:
links = titletranslate.translate(
- self.originPage,
+ self.origin,
hints=[newhint],
auto=self.conf.auto,
removebrackets=self.conf.hintnobracket)
@@ -1127,11 +1144,11 @@
if self.conf.skipauto:
dictName, year = page.autoFormat()
if dictName is not None:
- if self.originPage:
+ if self.origin:
pywikibot.warning(
'{}:{} relates to {}:{}, which is an '
'auto entry {}({})'
- .format(self.originPage.site.lang, self.originPage,
+ .format(self.origin.site.lang, self.origin,
page.site.lang, page, dictName, year))
# Abort processing if the bot is running in autonomous mode
@@ -1147,7 +1164,7 @@
if not page.exists():
self.conf.remove.append(str(page))
self.conf.note('{} does not exist. Skipping.'.format(page))
- if page == self.originPage:
+ if page == self.origin:
# The page we are working on is the page that does not
# exist. No use in doing any work on it in that case.
for site, count in self.todo.siteCounts():
@@ -1158,7 +1175,7 @@
self.done = PageTree()
continue
- elif page.isRedirectPage() or page.isCategoryRedirect():
+ if page.isRedirectPage() or page.isCategoryRedirect():
if page.isRedirectPage():
redirectTargetPage = page.getRedirectTarget()
redir = ''
@@ -1167,7 +1184,7 @@
redir = 'category '
self.conf.note('{} is {}redirect to {}'
.format(page, redir, redirectTargetPage))
- if self.originPage is None or page == self.originPage:
+ if self.origin is None or page == self.origin:
# the 1st existig page becomes the origin page, if none was
# supplied
if self.conf.initialredirect:
@@ -1175,7 +1192,7 @@
# loop
if not redirectTargetPage.isRedirectPage() \
and not redirectTargetPage.isCategoryRedirect():
- self.originPage = redirectTargetPage
+ self.origin = redirectTargetPage
self.todo.add(redirectTargetPage)
counter.plus(redirectTargetPage.site)
else:
@@ -1196,33 +1213,34 @@
if self.addIfNew(redirectTargetPage, counter, page):
if config.interwiki_shownew:
pywikibot.output('{}: {} gives new {}redirect {}'
- .format(self.originPage,
+ .format(self.origin,
page, redir,
redirectTargetPage))
continue
# must be behind the page.isRedirectPage() part
# otherwise a redirect error would be raised
- elif page_empty_check(page):
+ if page_empty_check(page):
self.conf.remove.append(str(page))
self.conf.note('{} is empty. Skipping.'.format(page))
- if page == self.originPage:
+ if page == self.origin:
for site, count in self.todo.siteCounts():
counter.minus(site, count)
self.todo = PageTree()
self.done = PageTree()
- self.originPage = None
+ self.origin = None
continue
- elif page.section():
+ if page.section():
self.conf.note('{} is a page section. Skipping.'.format(page))
continue
# Page exists, isn't a redirect, and is a plain link (no section)
- if self.originPage is None:
+ if self.origin is None:
# the 1st existig page becomes the origin page, if none was
# supplied
- self.originPage = page
+ self.origin = page
+
try:
iw = page.langlinks()
except pywikibot.UnknownSite:
@@ -1246,7 +1264,7 @@
duplicate = p
break
- if self.originPage == page:
+ if self.origin == page:
self.untranslated = (len(iw) == 0)
if self.conf.untranslatedonly:
# Ignore the interwiki links.
@@ -1263,7 +1281,7 @@
elif self.conf.autonomous and duplicate and not skip:
pywikibot.output('Stopping work on {} because duplicate pages'
' {} and {} are found'
- .format(self.originPage, duplicate, page))
+ .format(self.originP, duplicate, page))
self.makeForcedStop(counter)
try:
with codecs.open(
@@ -1271,11 +1289,11 @@
'autonomous_problems.dat'),
'a', 'utf-8') as f:
f.write('* %s {Found more than one link for %s}'
- % (self.originPage, page.site))
+ % (self.origin, page.site))
if config.interwiki_graph \
and config.interwiki_graph_url:
filename = interwiki_graph.getFilename(
- self.originPage,
+ self.origin,
extension=config.interwiki_graph_formats[0])
f.write(' [{}{} graph]'
.format(config.interwiki_graph_url,
@@ -1297,15 +1315,16 @@
pywikibot.output(
'NOTE: {}: {} extra interwiki on hinted site '
'ignored {}'
- .format(self.originPage, page, linkedPage))
+ .format(self.origin, page, linkedPage))
break
+
if not self.skipPage(page, linkedPage, counter):
- if self.conf.followinterwiki or page == self.originPage:
+ if self.conf.followinterwiki or page == self.origin:
if self.addIfNew(linkedPage, counter, page):
# It is new. Also verify whether it is the second
# on the same site
lpsite = linkedPage.site
- for prevPage in self.foundIn:
+ for prevPage in self.found_in:
if prevPage != linkedPage and \
prevPage.site == lpsite:
# Still, this could be "no problem" as
@@ -1314,17 +1333,16 @@
pywikibot.output(
'NOTE: {}: {} gives duplicate '
'interwiki on same site {}'
- .format(self.originPage, page,
- linkedPage))
+ .format(self.origin, page, linkedPage))
break
else:
if config.interwiki_shownew:
pywikibot.output(
'{0}: {1} gives new interwiki {2}'
- .format(self.originPage,
- page, linkedPage))
+ .format(self.origin, page, linkedPage))
if self.forcedStop:
break
+
# These pages are no longer 'in progress'
self.pending = PageTree()
# Check whether we need hints and the user offered to give them
@@ -1334,18 +1352,18 @@
def isDone(self):
"""Return True if all the work for this subject has completed."""
- return len(self.todo) == 0
+ return bool(self.todo)
def problem(self, txt, createneed=True):
"""Report a problem with the resolution of this subject."""
- pywikibot.output('ERROR: ' + txt)
+ pywikibot.error(txt)
self.confirm = True
if createneed:
self.problemfound = True
def whereReport(self, page, indent=4):
"""Report found interlanguage links with conflicts."""
- for page2 in sorted(self.foundIn[page]):
+ for page2 in sorted(self.found_in[page]):
if page2 is None:
pywikibot.output(' ' * indent + 'Given as a hint.')
else:
@@ -1359,16 +1377,16 @@
# Each value will be a list of pages.
new = {}
for page in self.done:
- if page.exists() and not page.isRedirectPage() and \
- not page.isCategoryRedirect():
+ if page.exists() and not page.isRedirectPage() \
+ and not page.isCategoryRedirect():
site = page.site
if site.family.interwiki_forward:
# TODO: allow these cases to be propagated!
# inhibit the forwarding families pages to be updated.
continue
- if site == self.originPage.site:
- if page != self.originPage:
+ if site == self.origin.site:
+ if page != self.origin:
self.problem('Found link to ' + page)
self.whereReport(page)
errorCount += 1
@@ -1377,6 +1395,7 @@
new[site].append(page)
else:
new[site] = [page]
+
# See if new{} contains any problematic values
result = {}
for site, pages in new.items():
@@ -1402,58 +1421,61 @@
# First loop over the ones that have more solutions
for site, pages in new.items():
- if len(pages) > 1:
- pywikibot.output('=' * 30)
- pywikibot.output('Links to ' + site)
- for i, page2 in enumerate(pages, 1):
- pywikibot.output(' ({}) Found link to {} in:'
- .format(i, page2))
- self.whereReport(page2, indent=8)
+ if len(pages) <= 1:
+ continue
- # TODO: allow answer to repeat previous or go back after a
- # mistake
- answer = pywikibot.input_choice(
- 'Which variant should be used?',
- (ListOption(pages),
- StandardOption('none', 'n'),
- StandardOption('give up', 'g')))
- if answer == 'g':
- return None
- elif answer != 'n':
- result[site] = answer[1]
+ pywikibot.output('=' * 30)
+ pywikibot.output('Links to ' + site)
+ for i, page2 in enumerate(pages, 1):
+ pywikibot.output(' ({}) Found link to {} in:'
+ .format(i, page2))
+ self.whereReport(page2, indent=8)
+
+ # TODO: allow answer to repeat previous or go back after a mistake
+ answer = pywikibot.input_choice(
+ 'Which variant should be used?',
+ (ListOption(pages),
+ StandardOption('none', 'n'),
+ StandardOption('give up', 'g')))
+ if answer == 'g':
+ return None
+ elif answer != 'n':
+ result[site] = answer[1]
# Loop over the ones that have one solution, so are in principle
# not a problem.
acceptall = False
for site, pages in new.items():
- if len(pages) == 1:
- if not acceptall:
- pywikibot.output('=' * 30)
- page2 = pages[0]
- pywikibot.output('Found link to {} in:'.format(page2))
- self.whereReport(page2, indent=4)
- while True:
- if acceptall:
- answer = 'a'
- else:
- # TODO: allow answer to repeat previous or go back
- # after a mistake
- answer = pywikibot.input_choice(
- 'What should be done?',
- [('accept', 'a'), ('reject', 'r'),
- ('give up', 'g'), ('accept all', 'l')], 'a',
- automatic_quit=False)
- if answer == 'l': # accept all
- acceptall = True
- answer = 'a'
- if answer == 'a': # accept this one
- result[site] = pages[0]
- break
- elif answer == 'g': # give up
- return None
- elif answer == 'r': # reject
- # None acceptable
- break
+ if len(pages) != 1:
+ continue
+
+ if not acceptall:
+ pywikibot.output('=' * 30)
+ page2 = pages[0]
+ pywikibot.output('Found link to {} in:'.format(page2))
+ self.whereReport(page2, indent=4)
+ while True:
+ if acceptall:
+ answer = 'a'
+ else:
+ # TODO: allow answer to repeat previous or go back
+ # after a mistake
+ answer = pywikibot.input_choice(
+ 'What should be done?',
+ [('accept', 'a'), ('reject', 'r'),
+ ('give up', 'g'), ('accept all', 'l')], 'a',
+ automatic_quit=False)
+ if answer == 'l': # accept all
+ acceptall = True
+ answer = 'a'
+ if answer == 'a': # accept this one
+ result[site] = pages[0]
+ break
+ elif answer == 'g': # give up
+ return None
+ elif answer == 'r': # reject
+ # None acceptable
+ break
return result
def finish(self):
@@ -1465,60 +1487,59 @@
"""
if not self.isDone():
raise Exception('Bugcheck: finish called before done')
- if not self.workonme:
+
+ if not self.workonme or not self.origin:
return
- if self.originPage:
- if self.originPage.isRedirectPage():
- return
- if self.originPage.isCategoryRedirect():
- return
- else:
+
+ if self.origin.isRedirectPage() or self.origin.isCategoryRedirect():
return
+
if not self.untranslated and self.conf.untranslatedonly:
return
+
if self.forcedStop: # autonomous with problem
pywikibot.output('======Aborted processing {}======'
- .format(self.originPage))
+ .format(self.origin))
return
+
# The following check is not always correct and thus disabled.
# self.done might contain no interwiki links because of the -neverlink
# argument or because of disambiguation conflicts.
# if len(self.done) == 1:
# # No interwiki at all
# return
+
pywikibot.output('======Post-processing {}======'
- .format(self.originPage))
+ .format(self.origin))
# Assemble list of accepted interwiki links
new = self.assemble()
if new is None: # User said give up
pywikibot.output('======Aborted processing {}======'
- .format(self.originPage))
+ .format(self.origin))
return
# Make sure new contains every page link, including the page we are
# processing
# TODO: should be move to assemble()
# replaceLinks will skip the site it's working on.
- if self.originPage.site not in new:
+ if self.origin.site not in new:
# TODO: make this possible as well.
- if not self.originPage.site.family.interwiki_forward:
- new[self.originPage.site] = self.originPage
-
- # self.replaceLinks(self.originPage, new, True)
+ if not self.origin.site.family.interwiki_forward:
+ new[self.origin.site] = self.origin
updatedSites = []
notUpdatedSites = []
# Process all languages here
self.conf.always = False
if self.conf.limittwo:
- lclSite = self.originPage.site
+ lclSite = self.origin.site
lclSiteDone = False
frgnSiteDone = False
for siteCode in lclSite.family.languages_by_size:
site = pywikibot.Site(siteCode, lclSite.family)
- if (not lclSiteDone and site == lclSite) or \
- (not frgnSiteDone and site != lclSite and site in new):
+ if (not lclSiteDone and site == lclSite) \
+ or (not frgnSiteDone and site != lclSite and site in new):
if site == lclSite:
lclSiteDone = True # even if we fail the update
if (site.family.name in config.usernames
@@ -1547,9 +1568,9 @@
mods, mcomment, adding, removing, modifying \
= compareLanguages(old, new, lclSite,
self.conf.summary)
- if (len(removing) > 0 and not self.conf.autonomous
- or len(modifying) > 0 and self.problemfound
- or len(old) == 0
+ if (removing and not self.conf.autonomous
+ or modifying and self.problemfound
+ or not old
or (self.conf.needlimit
and len(adding) + len(modifying)
>= self.conf.needlimit + 1)):
@@ -1565,9 +1586,9 @@
else:
for (site, page) in new.items():
# if we have an account for this site
- if site.family.name in config.usernames and \
- site.code in config.usernames[site.family.name] and \
- not site.has_data_repository:
+ if site.family.name in config.usernames \
+ and site.code in config.usernames[site.family.name] \
+ and not site.has_data_repository:
# Try to do the changes
try:
if self.replaceLinks(page, new):
@@ -1591,8 +1612,8 @@
"""Return True if saving was successful."""
if self.conf.localonly:
# In this case only continue on the Page we started with
- if page != self.originPage:
- raise SaveError('-localonly and page != originPage')
+ if page != self.origin:
+ raise SaveError('-localonly and page != origin')
if page.section():
# This is not a page, but a subpage. Do not edit it.
pywikibot.output('Not editing {}: not doing interwiki on subpages'
@@ -1668,7 +1689,7 @@
# When running in autonomous mode without -force switch, make sure we
# don't remove any items, but allow addition of the new ones
- if self.conf.autonomous and not self.conf.force and len(removing) > 0:
+ if self.conf.autonomous and not self.conf.force and removing:
for rmsite in removing:
# Sometimes sites have an erroneous link to itself as an
# interwiki
@@ -1676,10 +1697,8 @@
continue
rmPage = old[rmsite]
# put it to new means don't delete it
- if (
- not self.conf.cleanup
- or str(rmPage) not in self.conf.remove
- ):
+ if (not self.conf.cleanup
+ or str(rmPage) not in self.conf.remove):
new[rmsite] = rmPage
pywikibot.warning(
'{} is either deleted or has a mismatching '
@@ -1716,8 +1735,10 @@
'SKIPPING: {} is under construction or to be deleted.'
.format(page))
return False
+
if newtext == oldtext:
return False
+
pywikibot.showDiff(oldtext, newtext)
# Determine whether we need permission to submit
@@ -1755,6 +1776,7 @@
else:
# If we do not need to ask, allow
answer = 'y'
+
# If we got permission to submit, do so
if answer == 'y':
self.conf.note('Updating live wiki...')
@@ -1806,11 +1828,13 @@
else:
break
return True
- elif answer == 'g':
+
+ if answer == 'g':
raise GiveUpOnPage('User asked us to give up')
- else:
- raise LinkMustBeRemoved('Found incorrect link to {} in {}'.format(
- ', '.join(x.code for x in removing), page))
+
+ raise LinkMustBeRemoved('Found incorrect link to {} in {}'
+ .format(', '.join(x.code for x in removing),
+ page))
def reportBacklinks(self, new, updatedSites):
"""
@@ -1912,7 +1936,7 @@
@property
def dump_titles(self):
"""Return list of titles for dump file."""
- return [s.originPage.title() for s in self.subjects]
+ return [s.origin.title() for s in self.subjects]
def dump(self, append=True):
"""Write dump file."""
@@ -1942,7 +1966,7 @@
fs = self.firstSubject()
if fs:
self.conf.note('The first unfinished subject is {}'
- .format(fs.originPage))
+ .format(fs.origin))
pywikibot.output(
'NOTE: Number of pages queued is {0}, trying to add {1} more.'
.format(len(self.subjects), number))
@@ -2149,8 +2173,7 @@
modifying = sorted(site for site in oldiw & newiw
if old[site] != new[site])
- if not summary and \
- len(adding) + len(removing) + len(modifying) <= 3:
+ if not summary and len(adding) + len(removing) + len(modifying) <= 3:
# Use an extended format for the string linking to all added pages.
def fmt(d, site):
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/606116
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I361e2bb7636bf05a077763b4b4111d8e51017892
Gerrit-Change-Number: 606116
Gerrit-PatchSet: 12
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: Dvorapa <dvorapa(a)seznam.cz>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: Zhuyifei1999 <zhuyifei1999(a)gmail.com>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/637496 )
Change subject: [IMPR] add type annotation in pywikibot.__init__.py
......................................................................
[IMPR] add type annotation in pywikibot.__init__.py
Also remove some IDE warnings.
Change-Id: I219dc63f9643e0247d4a4ce59d9fe3dcbb676002
---
M pywikibot/__init__.py
M pywikibot/bot.py
2 files changed, 47 insertions(+), 90 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/__init__.py b/pywikibot/__init__.py
index f51482a..e9f40c7 100644
--- a/pywikibot/__init__.py
+++ b/pywikibot/__init__.py
@@ -54,7 +54,7 @@
from pywikibot.logging import (
critical, debug, error, exception, log, output, stdout, warning
)
-from pywikibot.site import BaseSite
+from pywikibot.site import BaseSite, DataSite, APISite, ClosedSite
from pywikibot.tools import (
classproperty,
deprecate_arg as _deprecate_arg,
@@ -134,12 +134,11 @@
return cls._ISO8601Format()
@classmethod
- def _ISO8601Format(cls, sep: str = 'T'):
+ def _ISO8601Format(cls, sep: str = 'T') -> str:
"""ISO8601 format string.
@param sep: one-character separator, placed between the date and time
@return: ISO8601 format string
- @rtype: str
"""
assert(len(sep) == 1)
return '%Y-%m-%d{0}%H:%M:%SZ'.format(sep)
@@ -219,7 +218,8 @@
precision: Optional[float] = None,
globe: Optional[str] = None, typ: str = '',
name: str = '', dim: Optional[int] = None,
- site=None, globe_item=None, primary: bool = False):
+ site: Optional[DataSite] = None, globe_item=None,
+ primary: bool = False):
"""
Represent a geo coordinate.
@@ -227,13 +227,11 @@
@param lon: Longitude
@param alt: Altitude? TODO FIXME
@param precision: precision
- @type precision: float
@param globe: Which globe the point is on
@param typ: The type of coordinate point
@param name: The name
@param dim: Dimension (in meters)
@param site: The Wikibase site
- @type site: pywikibot.site.DataSite
@param globe_item: The Wikibase item for the globe, or the entity URI
of this Wikibase item. Takes precedence over 'globe'
if present.
@@ -272,14 +270,13 @@
return self._entity
- def toWikibase(self):
+ def toWikibase(self) -> dict:
"""
Export the data to a JSON object for the Wikibase API.
FIXME: Should this be in the DataSite object?
@return: Wikibase JSON
- @rtype: dict
"""
return {'latitude': self.lat,
'longitude': self.lon,
@@ -289,15 +286,13 @@
}
@classmethod
- def fromWikibase(cls, data, site):
+ def fromWikibase(cls, data: dict, site: DataSite):
"""
Constructor to create an object from Wikibase's JSON output.
@param data: Wikibase JSON
- @type data: dict
@param site: The Wikibase site
- @type site: pywikibot.site.DataSite
- @rtype: pywikibot.Coordinate
+ @rtype: Coordinate
"""
globe = None
@@ -388,7 +383,8 @@
)
return self._dim
- def get_globe_item(self, repo=None, lazy_load=False):
+ def get_globe_item(self, repo: Optional[DataSite] = None,
+ lazy_load: bool = False):
"""
Return the ItemPage corresponding to the globe.
@@ -400,9 +396,7 @@
@param repo: the Wikibase site for the globe, if different from that
provided with the Coordinate.
- @type repo: pywikibot.site.DataSite
@param lazy_load: Do not raise NoPage if ItemPage does not exist.
- @type lazy_load: bool
@return: pywikibot.ItemPage
"""
if isinstance(self._entity, ItemPage):
@@ -450,7 +444,7 @@
after: int = 0,
timezone: int = 0,
calendarmodel: Optional[str] = None,
- site=None):
+ site: Optional[DataSite] = None):
"""Create a new WbTime object.
The precision can be set by the Wikibase int value (0-14) or by a human
@@ -482,7 +476,6 @@
@param timezone: Timezone information in minutes.
@param calendarmodel: URI identifying the calendar model
@param site: The Wikibase site
- @type site: pywikibot.site.DataSite
"""
if year is None:
raise ValueError('no year given')
@@ -537,7 +530,7 @@
after: int = 0,
timezone: int = 0,
calendarmodel: Optional[str] = None,
- site=None):
+ site: Optional[DataSite] = None):
"""Create a new WbTime object from a UTC date/time string.
The timestamp differs from ISO 8601 in that:
@@ -557,7 +550,6 @@
@param timezone: Timezone information in minutes.
@param calendarmodel: URI identifying the calendar model
@param site: The Wikibase site
- @type site: pywikibot.site.DataSite
@rtype: pywikibot.WbTime
"""
match = re.match(r'([-+]?\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+)Z',
@@ -573,7 +565,7 @@
def fromTimestamp(cls, timestamp, precision: Union[int, str] = 14,
before: int = 0, after: int = 0,
timezone: int = 0, calendarmodel: Optional[str] = None,
- site=None):
+ site: Optional[DataSite] = None):
"""
Create a new WbTime object from a pywikibot.Timestamp.
@@ -587,7 +579,6 @@
@param timezone: Timezone information in minutes.
@param calendarmodel: URI identifying the calendar model
@param site: The Wikibase site
- @type site: pywikibot.site.DataSite
@rtype: pywikibot.WbTime
"""
return cls.fromTimestr(timestamp.isoformat(), precision=precision,
@@ -595,7 +586,7 @@
timezone=timezone, calendarmodel=calendarmodel,
site=site)
- def toTimestr(self, force_iso=False):
+ def toTimestr(self, force_iso: bool = False) -> str:
"""
Convert the data to a UTC date/time string.
@@ -603,9 +594,7 @@
force_iso.
@param force_iso: whether the output should be forced to ISO 8601
- @type force_iso: bool
@return: Timestamp in a format resembling ISO 8601
- @rtype: str
"""
if force_iso:
return Timestamp._ISO8601Format_new.format(
@@ -614,13 +603,10 @@
return self.FORMATSTR.format(self.year, self.month, self.day,
self.hour, self.minute, self.second)
- def toTimestamp(self):
+ def toTimestamp(self) -> Timestamp:
"""
Convert the data to a pywikibot.Timestamp.
- @return: Timestamp
- @rtype: pywikibot.Timestamp
-
@raises ValueError: instance value can not be represented using
Timestamp
"""
@@ -629,12 +615,11 @@
return Timestamp.fromISOformat(
self.toTimestr(force_iso=True).lstrip('+'))
- def toWikibase(self):
+ def toWikibase(self) -> dict:
"""
Convert the data to a JSON object for the Wikibase API.
@return: Wikibase JSON
- @rtype: dict
"""
json = {'time': self.toTimestr(),
'precision': self.precision,
@@ -646,13 +631,12 @@
return json
@classmethod
- def fromWikibase(cls, wb: dict, site=None):
+ def fromWikibase(cls, wb: dict, site: Optional[DataSite] = None):
"""
Create a WbTime from the JSON data given by the Wikibase API.
@param wb: Wikibase JSON
@param site: The Wikibase site
- @type site: pywikibot.site.DataSite
@rtype: pywikibot.WbTime
"""
return cls.fromTimestr(wb['time'], wb['precision'],
@@ -667,15 +651,13 @@
_items = ('amount', 'upperBound', 'lowerBound', 'unit')
@staticmethod
- def _require_errors(site):
+ def _require_errors(site: DataSite) -> bool:
"""
Check if Wikibase site is so old it requires error bounds to be given.
If no site item is supplied it raises a warning and returns True.
@param site: The Wikibase site
- @type site: pywikibot.site.DataSite
- @rtype: bool
"""
if not site:
warning(
@@ -685,14 +667,13 @@
return site.mw_version < '1.29.0-wmf.2'
@staticmethod
- def _todecimal(value: str):
+ def _todecimal(value: str) -> Optional[Decimal]:
"""
Convert a string to a Decimal for use in WbQuantity.
None value is returned as is.
@param value: decimal number to convert
- @rtype: Decimal
"""
if isinstance(value, Decimal):
return value
@@ -701,21 +682,20 @@
return Decimal(str(value))
@staticmethod
- def _fromdecimal(value):
+ def _fromdecimal(value: Decimal) -> Optional[str]:
"""
Convert a Decimal to a string representation suitable for WikiBase.
None value is returned as is.
@param value: decimal number to convert
- @type value: Decimal
- @rtype: str
"""
if value is None:
return None
return format(value, '+g')
- def __init__(self, amount, unit=None, error=None, site=None):
+ def __init__(self, amount, unit=None, error=None,
+ site: Optional[DataSite] = None):
"""
Create a new WbQuantity object.
@@ -729,7 +709,6 @@
@type error: same as amount, or tuple of two values, where the first
value is the upper error and the second is the lower error value.
@param site: The Wikibase site
- @type site: pywikibot.site.DataSite
"""
if amount is None:
raise ValueError('no amount given')
@@ -764,7 +743,8 @@
return self._unit.concept_uri()
return self._unit or '1'
- def get_unit_item(self, repo=None, lazy_load=False):
+ def get_unit_item(self, repo: Optional[DataSite] = None,
+ lazy_load: bool = False):
"""
Return the ItemPage corresponding to the unit.
@@ -776,9 +756,7 @@
@param repo: the Wikibase site for the unit, if different from that
provided with the WbQuantity.
- @type repo: pywikibot.site.DataSite
@param lazy_load: Do not raise NoPage if ItemPage does not exist.
- @type lazy_load: bool
@return: pywikibot.ItemPage
"""
if not isinstance(self._unit, str):
@@ -788,12 +766,11 @@
self._unit = ItemPage.from_entity_uri(repo, self._unit, lazy_load)
return self._unit
- def toWikibase(self):
+ def toWikibase(self) -> dict:
"""
Convert the data to a JSON object for the Wikibase API.
@return: Wikibase JSON
- @rtype: dict
"""
json = {'amount': self._fromdecimal(self.amount),
'upperBound': self._fromdecimal(self.upperBound),
@@ -803,13 +780,12 @@
return json
@classmethod
- def fromWikibase(cls, wb: dict, site=None):
+ def fromWikibase(cls, wb: dict, site: Optional[DataSite] = None):
"""
Create a WbQuantity from the JSON data given by the Wikibase API.
@param wb: Wikibase JSON
@param site: The Wikibase site
- @type site: pywikibot.site.DataSite
@rtype: pywikibot.WbQuantity
"""
amount = cls._todecimal(wb['amount'])
@@ -843,12 +819,11 @@
self.text = text
self.language = language
- def toWikibase(self):
+ def toWikibase(self) -> dict:
"""
Convert the data to a JSON object for the Wikibase API.
@return: Wikibase JSON
- @rtype: dict
"""
json = {'text': self.text,
'language': self.language
@@ -878,20 +853,18 @@
_items = ('page', )
@classmethod
- def _get_data_site(cls, repo_site):
+ def _get_data_site(cls, repo_site: DataSite) -> APISite:
"""
Return the site serving as a repository for a given data type.
Must be implemented in the extended class.
- @param site: The Wikibase site
- @type site: pywikibot.site.APISite
- @rtype: pywikibot.site.APISite
+ @param repo_site: The Wikibase site
"""
raise NotImplementedError
@classmethod
- def _get_type_specifics(cls, site) -> dict:
+ def _get_type_specifics(cls, site: DataSite) -> dict:
"""
Return the specifics for a given data type.
@@ -901,28 +874,26 @@
* ending: str, required filetype-like ending in page titles.
* label: str, describing the data type for use in error messages.
- * data_site: pywikibot.site.APISite, site serving as a repository for
+ * data_site: APISite, site serving as a repository for
the given data type.
@param site: The Wikibase site
- @type site: pywikibot.site.APISite
"""
raise NotImplementedError
@staticmethod
- def _validate(page, data_site, ending: str, label):
+ def _validate(page, data_site, ending: str, label: str):
"""
Validate the provided page against general and type specific rules.
@param page: Page containing the data.
- @type text: pywikibot.Page
+ @type page: pywikibot.Page
@param data_site: The site serving as a repository for the given
data type.
- @type data_site: pywikibot.site.APISite
+ @type data_site: APISite
@param ending: Required filetype-like ending in page titles.
E.g. '.map'
@param label: Label describing the data type in error messages.
- @type site: str
"""
if not isinstance(page, Page):
raise ValueError(
@@ -952,14 +923,13 @@
"Page must be in 'Data:' namespace and end in '{0}' "
'for {1}.'.format(ending, label))
- def __init__(self, page, site=None):
+ def __init__(self, page, site: Optional[DataSite] = None):
"""
Create a new _WbDataPage object.
@param page: page containing the data
- @type text: pywikibot.Page
+ @type page: pywikibot.Page
@param site: The Wikibase site
- @type site: pywikibot.site.DataSite
"""
site = site or Site().data_repository()
specifics = type(self)._get_type_specifics(site)
@@ -971,23 +941,21 @@
"""Override super.hash() as toWikibase is a string for _WbDataPage."""
return hash(self.toWikibase())
- def toWikibase(self):
+ def toWikibase(self) -> str:
"""
Convert the data to the value required by the Wikibase API.
@return: title of the data page incl. namespace
- @rtype: str
"""
return self.page.title()
@classmethod
- def fromWikibase(cls, page_name: str, site):
+ def fromWikibase(cls, page_name: str, site: DataSite):
"""
Create a _WbDataPage from the JSON data given by the Wikibase API.
@param page_name: page name from Wikibase value
@param site: The Wikibase site
- @type site: pywikibot.site.DataSite
@rtype: pywikibot._WbDataPage
"""
data_site = cls._get_data_site(site)
@@ -999,24 +967,20 @@
"""A Wikibase geo-shape representation."""
@classmethod
- def _get_data_site(cls, site):
+ def _get_data_site(cls, site: DataSite) -> APISite:
"""
Return the site serving as a geo-shape repository.
@param site: The Wikibase site
- @type site: pywikibot.site.DataSite
- @rtype: pywikibot.site.APISite
"""
return site.geo_shape_repository()
@classmethod
- def _get_type_specifics(cls, site):
+ def _get_type_specifics(cls, site: DataSite) -> dict:
"""
Return the specifics for WbGeoShape.
@param site: The Wikibase site
- @type site: pywikibot.site.DataSite
- @rtype: dict
"""
specifics = {
'ending': '.map',
@@ -1030,24 +994,20 @@
"""A Wikibase tabular-data representation."""
@classmethod
- def _get_data_site(cls, site):
+ def _get_data_site(cls, site: DataSite) -> APISite:
"""
Return the site serving as a tabular-data repository.
@param site: The Wikibase site
- @type site: pywikibot.site.DataSite
- @rtype: pywikibot.site.APISite
"""
return site.tabular_data_repository()
@classmethod
- def _get_type_specifics(cls, site):
+ def _get_type_specifics(cls, site: DataSite) -> dict:
"""
Return the specifics for WbTabularData.
@param site: The Wikibase site
- @type site: pywikibot.site.DataSite
- @rtype: dict
"""
specifics = {
'ending': '.tab',
@@ -1069,31 +1029,28 @@
_items = ('json',)
- def __init__(self, json):
+ def __init__(self, json) -> dict:
"""
Create a new WbUnknown object.
@param json: Wikibase JSON
- @type: dict
"""
self.json = json
- def toWikibase(self):
+ def toWikibase(self) -> dict:
"""
Return the JSON object for the Wikibase API.
@return: Wikibase JSON
- @rtype: dict
"""
return self.json
@classmethod
- def fromWikibase(cls, json):
+ def fromWikibase(cls, json: dict):
"""
Create a WbUnknown from the JSON data given by the Wikibase API.
@param json: Wikibase JSON
- @type json: dict
@rtype: pywikibot.WbUnknown
"""
return cls(json)
@@ -1131,7 +1088,8 @@
@_deprecate_arg('sysop', None)
def Site(code: Optional[str] = None, fam=None, user: Optional[str] = None,
- sysop=None, interface=None, url: Optional[str] = None):
+ sysop=None, interface=None,
+ url: Optional[str] = None) -> Union[APISite, DataSite, ClosedSite]:
"""A factory method to obtain a Site object.
Site objects are cached and reused by this method.
@@ -1148,7 +1106,6 @@
@type interface: subclass of L{pywikibot.site.BaseSite} or string
@param url: Instead of code and fam, does try to get a Site based on the
URL. Still requires that the family supporting that URL exists.
- @rtype: pywikibot.site.APISite
@raises ValueError: URL and pair of code and family given
@raises ValueError: Invalid interface name
@raises pywikibot.exceptions.SiteDefinitionError: Unknown URL
diff --git a/pywikibot/bot.py b/pywikibot/bot.py
index 21bd532..6a954c7 100644
--- a/pywikibot/bot.py
+++ b/pywikibot/bot.py
@@ -63,7 +63,7 @@
'VERBOSE', 'critical', 'debug', 'error', 'exception', 'log', 'warning',
'output', 'stdout', 'LoggingFormatter', 'RotatingFileHandler',
'init_handlers', 'writelogheader',
- 'input', 'input_choice', 'input_yn', 'input_list_choice',
+ 'input', 'input_choice', 'input_yn', 'input_list_choice', 'ui',
'Option', 'StandardOption', 'NestedOption', 'IntegerOption',
'ContextOption', 'ListOption', 'ShowingListOption', 'MultipleChoiceList',
'ShowingMultipleChoiceList', 'OutputProxyOption',
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/637496
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I219dc63f9643e0247d4a4ce59d9fe3dcbb676002
Gerrit-Change-Number: 637496
Gerrit-PatchSet: 3
Gerrit-Owner: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/637036 )
Change subject: [IMPR] cleanup IDE warnings in api.py
......................................................................
[IMPR] cleanup IDE warnings in api.py
Change-Id: I23810d6a0460f815a261029e3c01d19e30a95784
---
M pywikibot/data/api.py
M tests/dry_api_tests.py
2 files changed, 46 insertions(+), 38 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index e2ec2be..512d246 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -46,8 +46,9 @@
if PYTHON_VERSION >= (3, 9):
Set = set
Tuple = tuple
+ FrozenSet = frozenset
else:
- from typing import Set, Tuple
+ from typing import Set, Tuple, FrozenSet
_logger = 'data.api'
@@ -149,8 +150,8 @@
"""
Create a new UploadWarning instance.
- @param filekey: The filekey of the uploaded file to reuse it later. If
- no key is known or it is an incomplete file it may be None.
+ @param file_key: The file_key of the uploaded file to reuse it later.
+ If no key is known or it is an incomplete file it may be None.
@param offset: The starting offset for a chunked upload. Is False when
there is no offset.
"""
@@ -263,9 +264,9 @@
self._fetch(self.preloaded_modules)
main_modules_param = self.parameter('main', 'action')
- assert(main_modules_param)
- assert('type' in main_modules_param)
- assert(isinstance(main_modules_param['type'], list))
+ assert main_modules_param
+ assert 'type' in main_modules_param
+ assert isinstance(main_modules_param['type'], list)
assert self._action_modules == set(main_modules_param['type'])
# While deprecated with warning in 1.25, paraminfo param 'querymodules'
@@ -341,7 +342,7 @@
self._fetch(modules)
- def _fetch(self, modules: set) -> None:
+ def _fetch(self, modules: Union[set, frozenset]) -> None:
"""
Fetch paraminfo for multiple modules without initializing beforehand.
@@ -430,7 +431,7 @@
normalized_result = {missing_modules[0]: normalized_result}
elif len(module_batch) > 1 and missing_modules:
# Rerequest the missing ones separately
- pywikibot.log('Inconsitency in batch "{0}"; rerequest '
+ pywikibot.log('Inconsistency in batch "{0}"; rerequest '
'separately'.format(missing_modules))
failed_modules.extend(missing_modules)
@@ -498,6 +499,8 @@
for param in parameters:
if param['name'] == 'generator':
break
+ else:
+ param = {}
assert param['name'] == 'generator' and \
submodules >= set(param['type'])
@@ -506,7 +509,7 @@
# Users will supply the wrong type, and expect it to work.
modules = self._modules_to_set(modules)
- assert(self._action_modules)
+ assert self._action_modules
return {'query+' + mod
if '+' not in mod and mod in self.query_modules
@@ -655,7 +658,7 @@
@property
@deprecated('submodules() or module_paths', since='20150715')
- def modules(self) -> Set[str]:
+ def modules(self) -> Union[Set[str], FrozenSet[str]]:
"""
Set of all main and query modules without path prefixes.
@@ -852,11 +855,11 @@
self._valid_disable = set()
if site is None:
return
- for type in site._paraminfo.parameter(module, param)['type']:
- if type[0] == '!':
- self._valid_disable.add(type[1:])
+ for type_value in site._paraminfo.parameter(module, param)['type']:
+ if type_value[0] == '!':
+ self._valid_disable.add(type_value[1:])
else:
- self._valid_enable.add(type)
+ self._valid_enable.add(type_value)
if clear_invalid:
self._enabled &= self._valid_enable
self._disabled &= self._valid_disable
@@ -868,7 +871,7 @@
'"{0}"'.format('", "'.join(invalid_names)))
self._site_set = True
- def from_dict(self, dict):
+ def from_dict(self, dictionary):
"""
Load options from the dict.
@@ -876,16 +879,17 @@
previously, but only the dict values should be applied it needs to be
cleared first.
- @param dict: A dictionary containing for each entry either the value
+ @param dictionary:
+ a dictionary containing for each entry either the value
False, True or None. The names must be valid depending on whether
they enable or disable the option. All names with the value None
can be in either of the list.
- @type dict: dict (keys are strings, values are bool/None)
+ @type dictionary: dict (keys are strings, values are bool/None)
"""
enabled = set()
disabled = set()
removed = set()
- for name, value in dict.items():
+ for name, value in dictionary.items():
if value is True:
enabled.add(name)
elif value is False:
@@ -1473,7 +1477,8 @@
return {action: {'result': 'Success', 'nochange': ''}}
return None
- def _is_wikibase_error_retryable(self, error):
+ @staticmethod
+ def _is_wikibase_error_retryable(error):
ERR_MSG = (
'edit-already-exists',
'actionthrottledtext', # T192912
@@ -1495,7 +1500,7 @@
return message in ERR_MSG
@staticmethod
- def _generate_MIME_part(key, content, keytype=None, headers=None):
+ def _generate_mime_part(key, content, keytype=None, headers=None):
if not keytype:
try:
content.encode('ascii')
@@ -1555,10 +1560,10 @@
# construct a MIME message containing all API key/values
container = MIMEMultipart(_subtype='form-data')
for key, value in params.items():
- submsg = cls._generate_MIME_part(key, value)
+ submsg = cls._generate_mime_part(key, value)
container.attach(submsg)
for key, value in mime_params.items():
- submsg = cls._generate_MIME_part(key, *value)
+ submsg = cls._generate_mime_part(key, *value)
container.attach(submsg)
# strip the headers to get the HTTP message body
@@ -1628,7 +1633,7 @@
self.wait()
return None, use_get
- def _json_loads(self, data: str) -> dict:
+ def _json_loads(self, data: Union[str, bytes]) -> Optional[dict]:
"""Read source text and return a dict.
@param data: raw data string
@@ -2026,7 +2031,7 @@
@return: base directory path for cache entries
"""
- path = os.path.join(pywikibot.config2.base_dir,
+ path = os.path.join(config.base_dir,
'apicache-py{0:d}'.format(PYTHON_VERSION[0]))
cls._make_dir(path)
cls._get_cache_dir = classmethod(lambda c: path) # cache the result
@@ -2497,7 +2502,7 @@
@return: True if yes, False otherwise
"""
- assert(self.limited_module) # some modules do not have a prefix
+ assert self.limited_module # some modules do not have a prefix
return bool(
self.site._paraminfo.parameter('query+' + self.limited_module,
'namespace'))
@@ -2518,7 +2523,7 @@
# type such as NoneType or bool, or more than one namespace
# if the API module does not support multiple namespaces
"""
- assert(self.limited_module) # some modules do not have a prefix
+ assert self.limited_module # some modules do not have a prefix
param = self.site._paraminfo.parameter('query+' + self.limited_module,
'namespace')
if not param:
@@ -2760,7 +2765,7 @@
"""
# If possible, use self.request after __init__ instead of appendParams
- def appendParams(params, key, value):
+ def append_params(params, key, value):
if key in params:
params[key] += '|' + value
else:
@@ -2768,18 +2773,18 @@
kwargs = self._clean_kwargs(kwargs)
parameters = kwargs['parameters']
# get some basic information about every page generated
- appendParams(parameters, 'prop', 'info|imageinfo|categoryinfo')
+ append_params(parameters, 'prop', 'info|imageinfo|categoryinfo')
if g_content:
# retrieve the current revision
- appendParams(parameters, 'prop', 'revisions')
- appendParams(parameters, 'rvprop',
- 'ids|timestamp|flags|comment|user|content')
+ append_params(parameters, 'prop', 'revisions')
+ append_params(parameters, 'rvprop',
+ 'ids|timestamp|flags|comment|user|content')
if not ('inprop' in parameters
and 'protection' in parameters['inprop']):
- appendParams(parameters, 'inprop', 'protection')
- appendParams(parameters, 'iiprop',
- 'timestamp|user|comment|url|size|sha1|metadata')
- appendParams(parameters, 'iilimit', 'max') # T194233
+ append_params(parameters, 'inprop', 'protection')
+ append_params(parameters, 'iiprop',
+ 'timestamp|user|comment|url|size|sha1|metadata')
+ append_params(parameters, 'iilimit', 'max') # T194233
parameters['generator'] = generator
super().__init__(**kwargs)
self.resultkey = 'pages' # element to look for in result
@@ -3049,6 +3054,8 @@
if match:
delta = datetime.timedelta(
**{match.group(2): int(match.group(1))})
+ else:
+ delta = 0
wait = response.get('wait')
if wait:
delta = datetime.timedelta(seconds=int(wait))
@@ -3204,7 +3211,7 @@
page._coords = coords
-def update_page(page, pagedict: dict, props=[]):
+def update_page(page, pagedict: dict, props=None):
"""Update attributes of Page object page, based on query data in pagedict.
@param page: object to be updated
@@ -3222,6 +3229,7 @@
_update_pageid(page, pagedict)
_update_contentmodel(page, pagedict)
+ props = props or []
if 'info' in props:
page._isredir = 'redirect' in pagedict
diff --git a/tests/dry_api_tests.py b/tests/dry_api_tests.py
index 23cd552..45095a7 100644
--- a/tests/dry_api_tests.py
+++ b/tests/dry_api_tests.py
@@ -283,11 +283,11 @@
net = False
def test_mime_file_payload(self):
- """Test Request._generate_MIME_part loads binary as binary."""
+ """Test Request._generate_mime_part loads binary as binary."""
local_filename = join_images_path('MP_sounds.png')
with open(local_filename, 'rb') as f:
file_content = f.read()
- submsg = Request._generate_MIME_part(
+ submsg = Request._generate_mime_part(
'file', file_content, ('image', 'png'),
{'filename': local_filename})
self.assertEqual(file_content, submsg.get_payload(decode=True))
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/637036
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I23810d6a0460f815a261029e3c01d19e30a95784
Gerrit-Change-Number: 637036
Gerrit-PatchSet: 3
Gerrit-Owner: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged