http://www.mediawiki.org/wiki/Special:Code/pywikipedia/9703
Revision: 9703
Author: xqt
Date: 2011-10-30 20:34:20 +0000 (Sun, 30 Oct 2011)
Log Message:
-----------
commenting out unicode test due to bug #3426312
Modified Paths:
--------------
branches/rewrite/scripts/interwiki.py
Modified: branches/rewrite/scripts/interwiki.py
===================================================================
--- branches/rewrite/scripts/interwiki.py 2011-10-30 20:25:02 UTC (rev 9702)
+++ branches/rewrite/scripts/interwiki.py 2011-10-30 20:34:20 UTC (rev 9703)
@@ -2248,8 +2248,8 @@
if adding or removing or modifying:
#Version info marks bots without unicode error
#This also prevents abuse filter blocking on de-wiki
- if not pywikibot.unicode_error:
- mcomment += u'r%s) (' % sys.version.split()[0]
+## if not pywikibot.unicode_error:
+## mcomment += u'r%s) (' % sys.version.split()[0]
mcomment += globalvar.summary
http://www.mediawiki.org/wiki/Special:Code/pywikipedia/9702
Revision: 9702
Author: xqt
Date: 2011-10-30 20:25:02 +0000 (Sun, 30 Oct 2011)
Log Message:
-----------
some updates from trunk release
Modified Paths:
--------------
branches/rewrite/scripts/interwiki.py
Modified: branches/rewrite/scripts/interwiki.py
===================================================================
--- branches/rewrite/scripts/interwiki.py 2011-10-30 20:23:27 UTC (rev 9701)
+++ branches/rewrite/scripts/interwiki.py 2011-10-30 20:25:02 UTC (rev 9702)
@@ -309,6 +309,7 @@
# (C) Rob W.W. Hooft, 2003
# (C) Daniel Herding, 2004
# (C) Yuri Astrakhan, 2005-2006
+# (C) xqt, 2009-2011
# (C) Pywikipedia bot team, 2007-2011
#
# Distributed under the terms of the MIT license.
@@ -798,7 +799,7 @@
this Object.
"""
- def __init__(self, originPage, hints = None):
+ def __init__(self, originPage, hints=None):
"""Constructor. Takes as arguments the Page on the home wiki
plus optionally a list of hints for translation"""
@@ -885,7 +886,7 @@
pages = titletranslate.translate(self.originPage, hints = ['all:'],
auto = globalvar.auto, removebrackets = globalvar.hintnobracket)
else:
- pages = titletranslate.translate(self.originPage, hints = hints,
+ pages = titletranslate.translate(self.originPage, hints=hints,
auto = globalvar.auto, removebrackets = globalvar.hintnobracket)
for page in pages:
if globalvar.contentsondisk:
@@ -1006,24 +1007,29 @@
linkedPage.namespace(), preferredPage))
return True
else:
- choice = pywikibot.inputChoice(u'WARNING: %s is in namespace %i, but %s is in namespace %i. Follow it anyway?'
- % (self.originPage, self.originPage.namespace(),
- linkedPage, linkedPage.namespace()),
- ['Yes', 'No', 'Add an alternative', 'give up'], ['y', 'n', 'a', 'g'])
+ choice = pywikibot.inputChoice(
+u'WARNING: %s is in namespace %i, but %s is in namespace %i. Follow it anyway?'
+ % (self.originPage, self.originPage.namespace(),
+ linkedPage, linkedPage.namespace()),
+ ['Yes', 'No', 'Add an alternative', 'give up'],
+ ['y', 'n', 'a', 'g'])
if choice != 'y':
# Fill up foundIn, so that we will not ask again
self.foundIn[linkedPage] = [linkingPage]
if choice == 'g':
self.makeForcedStop(counter)
elif choice == 'a':
- newHint = pywikibot.input(u'Give the alternative for language %s, not using a language code:' % linkedPage.site.language())
+ newHint = pywikibot.input(u'Give the alternative for language %s, not using a language code:'
+ % linkedPage.site.language())
if newHint:
alternativePage = pywikibot.Page(linkedPage.site, newHint)
if alternativePage:
# add the page that was entered by the user
self.addIfNew(alternativePage, counter, None)
else:
- pywikibot.output(u"NOTE: ignoring %s and its interwiki links" % linkedPage)
+ pywikibot.output(
+ u"NOTE: ignoring %s and its interwiki links"
+ % linkedPage)
return True
else:
# same namespaces, no problem
@@ -1035,7 +1041,8 @@
pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode" % (page, self.originPage))
return True
elif page.title() != self.originPage.title() and self.originPage.site.nocapitalize and page.site.nocapitalize:
- pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode because both languages are uncapitalized." % (page, self.originPage))
+ pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode because both languages are uncapitalized."
+ % (page, self.originPage))
return True
return False
@@ -1067,14 +1074,16 @@
if self.originPage.isDisambig() and not page.isDisambig():
disambig = self.getFoundDisambig(page.site)
if disambig:
- pywikibot.output(u"NOTE: Ignoring non-disambiguation page %s for %s because disambiguation page %s has already been found."
- % (page, self.originPage, disambig))
+ pywikibot.output(
+ u"NOTE: Ignoring non-disambiguation page %s for %s because disambiguation page %s has already been found."
+ % (page, self.originPage, disambig))
return (True, None)
else:
- choice = pywikibot.inputChoice(u'WARNING: %s is a disambiguation page, but %s doesn\'t seem to be one. Follow it anyway?'
- % (self.originPage, page),
- ['Yes', 'No', 'Add an alternative', 'Give up'],
- ['y', 'n', 'a', 'g'])
+ choice = pywikibot.inputChoice(
+ u'WARNING: %s is a disambiguation page, but %s doesn\'t seem to be one. Follow it anyway?'
+ % (self.originPage, page),
+ ['Yes', 'No', 'Add an alternative', 'Give up'],
+ ['y', 'n', 'a', 'g'])
elif not self.originPage.isDisambig() and page.isDisambig():
nondisambig = self.getFoundNonDisambig(page.site)
if nondisambig:
@@ -1082,10 +1091,11 @@
% (page, self.originPage, nondisambig))
return (True, None)
else:
- choice = pywikibot.inputChoice(u'WARNING: %s doesn\'t seem to be a disambiguation page, but %s is one. Follow it anyway?'
- % (self.originPage, page),
- ['Yes', 'No', 'Add an alternative', 'Give up'],
- ['y', 'n', 'a', 'g'])
+ choice = pywikibot.inputChoice(
+ u'WARNING: %s doesn\'t seem to be a disambiguation page, but %s is one. Follow it anyway?'
+ % (self.originPage, page),
+ ['Yes', 'No', 'Add an alternative', 'Give up'],
+ ['y', 'n', 'a', 'g'])
if choice == 'n':
return (True, None)
elif choice == 'a':
@@ -1110,10 +1120,12 @@
def reportInterwikilessPage(self, page):
if not globalvar.quiet:
- pywikibot.output(u"NOTE: %s does not have any interwiki links" % self.originPage)
+ pywikibot.output(u"NOTE: %s does not have any interwiki links"
+ % self.originPage)
if config.without_interwiki:
f = codecs.open(
- pywikibot.config.datafilepath('without_interwiki.txt'), 'a', 'utf-8')
+ pywikibot.config.datafilepath('without_interwiki.txt'),
+ 'a', 'utf-8')
f.write(u"# %s \n" % page)
f.close()
@@ -1170,7 +1182,7 @@
if dictName is not None:
pywikibot.output(u'WARNING: %s:%s relates to %s:%s, which is an auto entry %s(%s)'
% (self.originPage.site.language(), self.originPage,
- page.site.language(), page,dictName,year))
+ page.site.language(), page, dictName, year))
# Abort processing if the bot is running in autonomous mode.
if globalvar.autonomous:
@@ -1183,7 +1195,7 @@
# todo list.
if not page.exists():
- globalvar.remove.append(page.title(asLink=True, forceInterwiki=True))
+ globalvar.remove.append(unicode(page))
if not globalvar.quiet:
pywikibot.output(u"NOTE: %s does not exist" % page)
if page == self.originPage:
@@ -1227,22 +1239,25 @@
self.todo = PageTree()
elif not globalvar.followredirect:
if not globalvar.quiet:
- pywikibot.output(u"NOTE: not following %sredirects." % redir)
+ pywikibot.output(u"NOTE: not following %sredirects."
+ % redir)
elif page.isStaticRedirect():
if not globalvar.quiet:
- pywikibot.output(u"NOTE: not following static %sredirects." % redir)
+ pywikibot.output(
+ u"NOTE: not following static %sredirects." % redir)
elif page.site.family == redirectTargetPage.site.family \
and not self.skipPage(page, redirectTargetPage, counter):
if self.addIfNew(redirectTargetPage, counter, page):
if config.interwiki_shownew:
pywikibot.output(u"%s: %s gives new %sredirect %s"
- % (self.originPage, page, redir, redirectTargetPage))
+ % (self.originPage, page, redir,
+ redirectTargetPage))
continue
# must be behind the page.isRedirectPage() part
# otherwise a redirect error would be raised
if page.isEmpty() and not page.isCategory():
- globalvar.remove.append(page.title(asLink=True, forceInterwiki=True))
+ globalvar.remove.append(unicode(page))
if not globalvar.quiet:
pywikibot.output(u"NOTE: %s is empty. Skipping." % page)
if page == self.originPage:
@@ -1254,7 +1269,8 @@
elif page.section():
if not globalvar.quiet:
- pywikibot.output(u"NOTE: %s is a page section. Skipping." % page)
+ pywikibot.output(u"NOTE: %s is a page section. Skipping."
+ % page)
continue
# Page exists, isnt a redirect, and is a plain link (no section)
@@ -1267,7 +1283,8 @@
(skip, alternativePage) = self.disambigMismatch(page, counter)
if skip:
- pywikibot.output(u"NOTE: ignoring %s and its interwiki links" % page)
+ pywikibot.output(u"NOTE: ignoring %s and its interwiki links"
+ % page)
self.done.remove(page)
iw = ()
if alternativePage:
@@ -1295,15 +1312,14 @@
elif globalvar.autonomous and duplicate and not skip:
pywikibot.output(u"Stopping work on %s because duplicate pages"\
- " %s and %s are found" % (self.originPage,
- duplicate,
- page))
+ " %s and %s are found" % (self.originPage, duplicate, page))
self.makeForcedStop(counter)
try:
f = codecs.open(
pywikibot.config.datafilepath('autonomous_problems.dat'),
'a', 'utf-8')
- f.write(u"* %s {Found more than one link for %s}" % (self.originPage, page.site))
+ f.write(u"* %s {Found more than one link for %s}"
+ % (self.originPage, page.site))
if config.interwiki_graph and config.interwiki_graph_url:
filename = interwiki_graph.getFilename(self.originPage, extension = config.interwiki_graph_formats[0])
f.write(u" [%s%s graph]" % (config.interwiki_graph_url, filename))
@@ -1317,9 +1333,10 @@
sys.exit()
iw = ()
elif page.isEmpty() and not page.isCategory():
- globalvar.remove.append(page.title(asLink=True, forceInterwiki=True))
+ globalvar.remove.append(unicode(page))
if not globalvar.quiet:
- pywikibot.output(u"NOTE: %s is empty; ignoring it and its interwiki links" % page)
+ pywikibot.output(u"NOTE: %s is empty; ignoring it and its interwiki links"
+ % page)
# Ignore the interwiki links
self.done.remove(page)
iw = ()
@@ -1328,7 +1345,8 @@
linkedPage = pywikibot.Page(link)
if globalvar.hintsareright:
if linkedPage.site in self.hintedsites:
- pywikibot.output(u"NOTE: %s: %s extra interwiki on hinted site ignored %s" % (self.originPage, page, linkedPage))
+ pywikibot.output(u"NOTE: %s: %s extra interwiki on hinted site ignored %s"
+ % (self.originPage, page, linkedPage))
break
if not self.skipPage(page, linkedPage, counter):
if globalvar.followinterwiki or page == self.originPage:
@@ -1340,11 +1358,15 @@
if prevPage != linkedPage and prevPage.site == lpsite:
# Still, this could be "no problem" as either may be a
# redirect to the other. No way to find out quickly!
- pywikibot.output(u"NOTE: %s: %s gives duplicate interwiki on same site %s" % (self.originPage, page, linkedPage))
+ pywikibot.output(u"NOTE: %s: %s gives duplicate interwiki on same site %s"
+ % (self.originPage, page,
+ linkedPage))
break
else:
if config.interwiki_shownew:
- pywikibot.output(u"%s: %s gives new interwiki %s"% (self.originPage, page, linkedPage))
+ pywikibot.output(u"%s: %s gives new interwiki %s"
+ % (self.originPage,
+ page, linkedPage))
# These pages are no longer 'in progress'
self.pending = PageTree()
@@ -1369,7 +1391,7 @@
if page2 is None:
pywikibot.output(u" "*indent + "Given as a hint.")
else:
- pywikibot.output(u" "*indent + page2)
+ pywikibot.output(u" "*indent + unicode(page2))
def assemble(self):
@@ -1384,7 +1406,7 @@
site = page.site
if site == self.originPage.site:
if page != self.originPage:
- self.problem(u"Found link to %s" % page )
+ self.problem(u"Found link to %s" % page)
self.whereReport(page)
errorCount += 1
else:
@@ -1423,7 +1445,8 @@
i = 0
for page2 in pages:
i += 1
- pywikibot.output(u" (%d) Found link to %s in:" % (i, page2))
+ pywikibot.output(u" (%d) Found link to %s in:"
+ % (i, page2))
self.whereReport(page2, indent = 8)
while True:
#TODO: allow answer to repeat previous or go back after a mistake
@@ -1500,7 +1523,8 @@
if not self.workonme:
return
if self.forcedStop: # autonomous with problem
- pywikibot.output(u"======Aborted processing %s======" % self.originPage)
+ pywikibot.output(u"======Aborted processing %s======"
+ % self.originPage)
return
if self.originPage.isRedirectPage():
return
@@ -1518,7 +1542,8 @@
# Assemble list of accepted interwiki links
new = self.assemble()
if new is None: # User said give up
- pywikibot.output(u"======Aborted processing %s======" % self.originPage)
+ pywikibot.output(u"======Aborted processing %s======"
+ % self.originPage)
return
# Make sure new contains every page link, including the page we are processing
@@ -1595,7 +1620,7 @@
old[mypage.site] = mypage
except pywikibot.NoPage:
pywikibot.output(u"BUG>>> %s no longer exists?"
- % new[site].aslink(True))
+ % new[site])
continue
mods, mcomment, adding, removing, modifying \
= compareLanguages(old, new, insite=site)
@@ -1674,7 +1699,6 @@
# In this case only continue on the Page we started with
if page != self.originPage:
raise SaveError(u'-localonly and page != originPage')
-
if page.section():
# This is not a page, but a subpage. Do not edit it.
pywikibot.output(u"Not editing %s: not doing interwiki on subpages"
@@ -1683,12 +1707,10 @@
try:
pagetext = page.get()
except pywikibot.NoPage:
- pywikibot.output(u"Not editing %s: page does not exist"
- % page)
+ pywikibot.output(u"Not editing %s: page does not exist" % page)
raise SaveError(u'Page doesn\'t exist')
if page.isEmpty() and not page.isCategory():
- pywikibot.output(u"Not editing %s: page is empty"
- % page.aslink(True))
+ pywikibot.output(u"Not editing %s: page is empty" % page)
raise SaveError
# clone original newPages dictionary, so that we can modify it to the
@@ -1702,7 +1724,6 @@
ignorepage = pywikibot.Page(page.site, iw.groups()[0])
except (pywikibot.NoSuchSite, ):
continue
-
try:
if (new[ignorepage.site] == ignorepage) and \
(ignorepage.site != page.site):
@@ -1755,7 +1776,7 @@
rmPage = old[rmsite]
#put it to new means don't delete it
if not globalvar.cleanup or \
- rmPage.title(asLink=True, forceInterwiki=True) not in globalvar.remove or \
+ unicode(rmPage) not in globalvar.remove or \
rmPage.site.sitename() == 'wikipedia:hi' and \
page.site.sitename() != 'wikipedia:de': #work-arround for bug #3081100 (do not remove hi-pages)
new[rmsite] = rmPage
@@ -1768,15 +1789,12 @@
insite=page.site)
if not mods:
if not globalvar.quiet:
- pywikibot.output(u'No changes needed on page %s'
- % page.title(asLink=True,
- forceInterwiki=True))
+ pywikibot.output(u'No changes needed on page %s' % page)
return False
# Show a message in purple.
pywikibot.output(
- u"\03{lightpurple}Updating links on page %s.\03{default}"
- % page)
+ u"\03{lightpurple}Updating links on page %s.\03{default}" % page)
pywikibot.output(u"Changes to be made: %s" % mods)
oldtext = page.get()
template = (page.namespace() == 10)
@@ -1789,11 +1807,11 @@
if template:
pywikibot.output(
u'SKIPPING: %s should have interwiki links on subpage.'
- % page.aslink(True))
+ % page)
else:
pywikibot.output(
u'SKIPPING: %s is under construction or to be deleted.'
- % page.aslink(True))
+ % page)
return False
if newtext == oldtext:
return False
@@ -1806,8 +1824,8 @@
# Allow for special case of a self-pointing interwiki link
if removing and removing != [page.site]:
self.problem(u'Found incorrect link to %s in %s'
- % (",".join([x.lang for x in removing]),
- page), createneed=False)
+ % (", ".join([x.lang for x in removing]), page),
+ createneed=False)
ask = True
if globalvar.force or globalvar.cleanup:
ask = False
@@ -1861,9 +1879,7 @@
else:
status, reason, data = page.put(newtext, comment=mcomment)
except pywikibot.LockedPage:
- pywikibot.output(u'Page %s is locked. Skipping.'
- % page.title(asLink=True,
- forceInterwiki=True))
+ pywikibot.output(u'Page %s is locked. Skipping.' % page)
raise SaveError(u'Locked')
except pywikibot.EditConflict:
pywikibot.output(
@@ -1904,7 +1920,7 @@
raise GiveUpOnPage(u'User asked us to give up')
else:
raise LinkMustBeRemoved(u'Found incorrect link to %s in %s'
- % (",".join([x.lang for x in removing]),
+ % (", ".join([x.lang for x in removing]),
page))
def reportBacklinks(self, new, updatedSites):
@@ -1936,22 +1952,25 @@
if expectedPage != page:
try:
linkedPage = linkedPagesDict[expectedPage.site]
- pywikibot.output(u"WARNING: %s: %s does not link to %s but to %s"
- % (page.site.family.name, page,
- expectedPage, linkedPage))
+ pywikibot.output(
+ u"WARNING: %s: %s does not link to %s but to %s"
+ % (page.site.family.name,
+ page, expectedPage, linkedPage))
except KeyError:
- pywikibot.output(u"WARNING: %s: %s does not link to %s"
- % (page.site.family.name, page,
- expectedPage))
+ pywikibot.output(
+ u"WARNING: %s: %s does not link to %s"
+ % (page.site.family.name,
+ page, expectedPage))
# Check for superfluous links
for linkedPage in linkedPages:
if linkedPage not in expectedPages:
# Check whether there is an alternative page on that language.
# In this case, it was already reported above.
if linkedPage.site not in expectedSites:
- pywikibot.output(u"WARNING: %s: %s links to incorrect %s"
- % (page.site.family.name, page,
- linkedPage))
+ pywikibot.output(
+ u"WARNING: %s: %s links to incorrect %s"
+ % (page.site.family.name,
+ page, linkedPage))
except (socket.error, IOError):
pywikibot.output(u'ERROR: could not report backlinks')
@@ -2007,8 +2026,10 @@
PageGenerator"""
fs = self.firstSubject()
if fs and (not globalvar.quiet):
- pywikibot.output(u"NOTE: The first unfinished subject is %s" % fs.originPage)
- pywikibot.output(u"NOTE: Number of pages queued is %d, trying to add %d more."%(len(self.subjects), number))
+ pywikibot.output(u"NOTE: The first unfinished subject is %s"
+ % fs.originPage)
+ pywikibot.output(u"NOTE: Number of pages queued is %d, trying to add %d more."
+ % (len(self.subjects), number))
for i in range(number):
try:
while True:
@@ -2023,7 +2044,7 @@
if globalvar.skipauto:
dictName, year = page.autoFormat()
if dictName is not None:
- pywikibot.output(u'Skipping: %s is an auto entry %s(%s)' % (page,dictName,year))
+ pywikibot.output(u'Skipping: %s is an auto entry %s(%s)' % (page, dictName, year))
continue
if globalvar.parenthesesonly:
# Only yield pages that have ( ) in titles
@@ -2043,7 +2064,7 @@
del tmpl
except KeyError:
pass
- if loc != None and loc in page.title():
+ if loc is not None and loc in page.title():
pywikibot.output(u'Skipping: %s is a templates subpage' % page.title())
continue
break
@@ -2052,7 +2073,7 @@
until = self.generateUntil
if page.site.lang not in page.site.family.nocapitalize:
until = until[0].upper()+until[1:]
- if page.titleWithoutNamespace() > until:
+ if page.title(withNamespace=False) > until:
raise StopIteration
self.add(page, hints = globalvar.hints)
self.generated += 1
http://www.mediawiki.org/wiki/Special:Code/pywikipedia/9700
Revision: 9700
Author: xqt
Date: 2011-10-30 20:22:27 +0000 (Sun, 30 Oct 2011)
Log Message:
-----------
shorten Page.title() with Page__unicode__(); some updates from rewrite branch
Modified Paths:
--------------
trunk/pywikipedia/interwiki.py
Modified: trunk/pywikipedia/interwiki.py
===================================================================
--- trunk/pywikipedia/interwiki.py 2011-10-30 20:20:20 UTC (rev 9699)
+++ trunk/pywikipedia/interwiki.py 2011-10-30 20:22:27 UTC (rev 9700)
@@ -823,7 +823,7 @@
this Object.
"""
- def __init__(self, originPage = None, hints = None):
+ def __init__(self, originPage=None, hints=None):
"""Constructor. Takes as arguments the Page on the home wiki
plus optionally a list of hints for translation"""
@@ -916,9 +916,9 @@
pages = titletranslate.translate(self.originPage, hints = ['all:'],
auto = globalvar.auto, removebrackets = globalvar.hintnobracket)
else:
- pages = titletranslate.translate(self.originPage, hints = hints,
+ pages = titletranslate.translate(self.originPage, hints=hints,
auto = globalvar.auto, removebrackets = globalvar.hintnobracket,
- site = pywikibot.getSite() )
+ site = pywikibot.getSite())
for page in pages:
if globalvar.contentsondisk:
page = StoredPage(page)
@@ -982,7 +982,7 @@
if page == self.originPage:
try:
pywikibot.output(u"%s has a backlink from %s."
- % (page.title(asLink=True), linkingPage.title(asLink=True)))
+ % (page, linkingPage))
except UnicodeDecodeError:
pywikibot.output(u"Found a backlink for a page.")
self.makeForcedStop(counter)
@@ -1025,12 +1025,8 @@
return False
if globalvar.autonomous:
pywikibot.output(u"NOTE: Ignoring link from page %s in namespace %i to page %s in namespace %i."
- % (linkingPage.title(asLink=True,
- forceInterwiki=True),
- linkingPage.namespace(),
- linkedPage.title(asLink=True,
- forceInterwiki=True),
- linkedPage.namespace()))
+ % (linkingPage, linkingPage.namespace(),
+ linkedPage, linkedPage.namespace()))
# Fill up foundIn, so that we will not write this notice
self.foundIn[linkedPage] = [linkingPage]
return True
@@ -1038,23 +1034,14 @@
preferredPage = self.getFoundInCorrectNamespace(linkedPage.site())
if preferredPage:
pywikibot.output(u"NOTE: Ignoring link from page %s in namespace %i to page %s in namespace %i because page %s in the correct namespace has already been found."
- % (linkingPage.title(asLink=True,
- forceInterwiki=True),
- linkingPage.namespace(),
- linkedPage.title(asLink=True,
- forceInterwiki=True),
- linkedPage.namespace(),
- preferredPage.title(asLink=True,
- forceInterwiki=True)))
+ % (linkingPage, linkingPage.namespace(), linkedPage,
+ linkedPage.namespace(), preferredPage))
return True
else:
choice = pywikibot.inputChoice(
u'WARNING: %s is in namespace %i, but %s is in namespace %i. Follow it anyway?'
- % (self.originPage.title(asLink=True,
- forceInterwiki=True),
- self.originPage.namespace(),
- linkedPage.title(asLink=True, forceInterwiki=True),
- linkedPage.namespace()),
+ % (self.originPage, self.originPage.namespace(),
+ linkedPage, linkedPage.namespace()),
['Yes', 'No', 'Add an alternative', 'give up'],
['y', 'n', 'a', 'g'])
if choice != 'y':
@@ -1063,7 +1050,8 @@
if choice == 'g':
self.makeForcedStop(counter)
elif choice == 'a':
- newHint = pywikibot.input(u'Give the alternative for language %s, not using a language code:' % linkedPage.site().language())
+ newHint = pywikibot.input(u'Give the alternative for language %s, not using a language code:'
+ % linkedPage.site().language())
if newHint:
alternativePage = pywikibot.Page(linkedPage.site(), newHint)
if alternativePage:
@@ -1072,8 +1060,7 @@
else:
pywikibot.output(
u"NOTE: ignoring %s and its interwiki links"
- % linkedPage.title(asLink=True,
- forceInterwiki=True))
+ % linkedPage)
return True
else:
# same namespaces, no problem
@@ -1083,10 +1070,11 @@
def wiktionaryMismatch(self, page):
if self.originPage and globalvar.same=='wiktionary':
if page.title().lower() != self.originPage.title().lower():
- pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode" % (page.title(asLink=True), self.originPage.title(asLink=True)))
+ pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode" % (page, self.originPage))
return True
elif page.title() != self.originPage.title() and self.originPage.site().nocapitalize and page.site().nocapitalize:
- pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode because both languages are uncapitalized." % (page.title(asLink=True), self.originPage.title(asLink=True)))
+ pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode because both languages are uncapitalized."
+ % (page, self.originPage))
return True
return False
@@ -1109,17 +1097,11 @@
if globalvar.autonomous:
if self.originPage.isDisambig() and not page.isDisambig():
pywikibot.output(u"NOTE: Ignoring link from disambiguation page %s to non-disambiguation %s"
- % (self.originPage.title(asLink=True,
- forceInterwiki=True),
- page.title(asLink=True,
- forceInterwiki=True)))
+ % (self.originPage, page))
return (True, None)
elif not self.originPage.isDisambig() and page.isDisambig():
pywikibot.output(u"NOTE: Ignoring link from non-disambiguation page %s to disambiguation %s"
- % (self.originPage.title(asLink=True,
- forceInterwiki=True),
- page.title(asLink=True,
- forceInterwiki=True)))
+ % (self.originPage, page))
return (True, None)
else:
choice = 'y'
@@ -1128,36 +1110,24 @@
if disambig:
pywikibot.output(
u"NOTE: Ignoring non-disambiguation page %s for %s because disambiguation page %s has already been found."
- % (page.title(asLink=True, forceInterwiki=True),
- self.originPage.title(asLink=True,
- forceInterwiki=True),
- disambig.title(asLink=True, forceInterwiki=True)))
+ % (page, self.originPage, disambig))
return (True, None)
else:
choice = pywikibot.inputChoice(
u'WARNING: %s is a disambiguation page, but %s doesn\'t seem to be one. Follow it anyway?'
- % (self.originPage.title(asLink=True,
- forceInterwiki=True),
- page.title(asLink=True, forceInterwiki=True)),
+ % (self.originPage, page),
['Yes', 'No', 'Add an alternative', 'Give up'],
['y', 'n', 'a', 'g'])
elif not self.originPage.isDisambig() and page.isDisambig():
nondisambig = self.getFoundNonDisambig(page.site())
if nondisambig:
pywikibot.output(u"NOTE: Ignoring disambiguation page %s for %s because non-disambiguation page %s has already been found."
- % (page.title(asLink=True,
- forceInterwiki=True),
- self.originPage.title(asLink=True,
- forceInterwiki=True),
- nondisambig.title(asLink=True,
- forceInterwiki=True)))
+ % (page, self.originPage, nondisambig))
return (True, None)
else:
choice = pywikibot.inputChoice(
u'WARNING: %s doesn\'t seem to be a disambiguation page, but %s is one. Follow it anyway?'
- % (self.originPage.title(asLink=True,
- forceInterwiki=True),
- page.title(asLink=True, forceInterwiki=True)),
+ % (self.originPage, page),
['Yes', 'No', 'Add an alternative', 'Give up'],
['y', 'n', 'a', 'g'])
if choice == 'n':
@@ -1175,22 +1145,22 @@
def isIgnored(self, page):
if page.site().language() in globalvar.neverlink:
- pywikibot.output(u"Skipping link %s to an ignored language" % page.title(asLink=True))
+ pywikibot.output(u"Skipping link %s to an ignored language" % page)
return True
if page in globalvar.ignore:
- pywikibot.output(u"Skipping link %s to an ignored page" % page.title(asLink=True))
+ pywikibot.output(u"Skipping link %s to an ignored page" % page)
return True
return False
def reportInterwikilessPage(self, page):
if not globalvar.quiet or pywikibot.verbose:
pywikibot.output(u"NOTE: %s does not have any interwiki links"
- % self.originPage.title(asLink=True,
- forceInterwiki=True))
+ % self.originPage)
if config.without_interwiki:
f = codecs.open(
- pywikibot.config.datafilepath('without_interwiki.txt'), 'a', 'utf-8')
- f.write(u"# %s \n" % page.title(asLink=True, forceInterwiki=True))
+ pywikibot.config.datafilepath('without_interwiki.txt'),
+ 'a', 'utf-8')
+ f.write(u"# %s \n" % page)
f.close()
def askForHints(self, counter):
@@ -1248,7 +1218,7 @@
if self.originPage:
pywikibot.output(u'WARNING: %s:%s relates to %s:%s, which is an auto entry %s(%s)'
% (self.originPage.site().language(), self.originPage.title(),
- page.site().language(),page.title(),dictName,year))
+ page.site().language(), page.title(), dictName, year))
# Abort processing if the bot is running in autonomous mode.
if globalvar.autonomous:
@@ -1261,12 +1231,10 @@
# todo list.
if not page.exists():
- globalvar.remove.append(page.title(asLink=True,
- forceInterwiki=True))
+ globalvar.remove.append(unicode(page))
if not globalvar.quiet or pywikibot.verbose:
pywikibot.output(u"NOTE: %s does not exist. Skipping."
- % page.title(asLink=True,
- forceInterwiki=True))
+ % page)
if page == self.originPage:
# The page we are working on is the page that does not exist.
# No use in doing any work on it in that case.
@@ -1291,17 +1259,12 @@
# MW considers #redirect [[en:#foo]] as a redirect page,
# but we can't do anything useful with such pages
if not globalvar.quiet or pywikibot.verbose:
- pywikibot.output(u"NOTE: %s redirects to an invalid title"
- % page.title(asLink=True,
- forceInterwiki=True))
+ pywikibot.output(
+ u"NOTE: %s redirects to an invalid title" % page)
continue
if not globalvar.quiet or pywikibot.verbose:
pywikibot.output(u"NOTE: %s is %sredirect to %s"
- % (page.title(asLink=True,
- forceInterwiki=True),
- redir,
- redirectTargetPage.title(asLink=True,
- forceInterwiki=True)))
+ % (page, redir, redirectTargetPage))
if self.originPage is None or page == self.originPage:
# the 1st existig page becomes the origin page, if none was supplied
if globalvar.initialredirect:
@@ -1322,31 +1285,27 @@
self.todo = PageTree()
elif not globalvar.followredirect:
if not globalvar.quiet or pywikibot.verbose:
- pywikibot.output(u"NOTE: not following %sredirects." % redir)
+ pywikibot.output(u"NOTE: not following %sredirects."
+ % redir)
elif page.isStaticRedirect():
if not globalvar.quiet or pywikibot.verbose:
- pywikibot.output(u"NOTE: not following static %sredirects." % redir)
+ pywikibot.output(
+ u"NOTE: not following static %sredirects." % redir)
elif page.site().family == redirectTargetPage.site().family \
and not self.skipPage(page, redirectTargetPage, counter):
if self.addIfNew(redirectTargetPage, counter, page):
if config.interwiki_shownew or pywikibot.verbose:
pywikibot.output(u"%s: %s gives new %sredirect %s"
- % (self.originPage.title(asLink=True),
- page.title(asLink=True, forceInterwiki=True),
- redir,
- redirectTargetPage.title(asLink=True,
- forceInterwiki=True)))
+ % (self.originPage, page, redir,
+ redirectTargetPage))
continue
# must be behind the page.isRedirectPage() part
# otherwise a redirect error would be raised
elif page.isEmpty() and not page.isCategory():
- globalvar.remove.append(page.title(asLink=True,
- forceInterwiki=True))
+ globalvar.remove.append(unicode(page))
if not globalvar.quiet or pywikibot.verbose:
- pywikibot.output(u"NOTE: %s is empty. Skipping."
- % page.title(asLink=True,
- forceInterwiki=True))
+ pywikibot.output(u"NOTE: %s is empty. Skipping." % page)
if page == self.originPage:
for site, count in self.todo.siteCounts():
counter.minus(site, count)
@@ -1358,8 +1317,7 @@
elif page.section():
if not globalvar.quiet or pywikibot.verbose:
pywikibot.output(u"NOTE: %s is a page section. Skipping."
- % page.title(asLink=True,
- forceInterwiki=True))
+ % page)
continue
# Page exists, isnt a redirect, and is a plain link (no section)
@@ -1376,7 +1334,7 @@
(skip, alternativePage) = self.disambigMismatch(page, counter)
if skip:
pywikibot.output(u"NOTE: ignoring %s and its interwiki links"
- % page.title(asLink=True, forceInterwiki=True))
+ % page)
self.done.remove(page)
iw = ()
if alternativePage:
@@ -1404,21 +1362,14 @@
elif globalvar.autonomous and duplicate and not skip:
pywikibot.output(u"Stopping work on %s because duplicate pages"\
- " %s and %s are found" % (self.originPage.title(asLink=True,
- forceInterwiki=True),
- duplicate.title(asLink=True,
- forceInterwiki=True),
- page.title(asLink=True,
- forceInterwiki=True)))
+ " %s and %s are found" % (self.originPage, duplicate, page))
self.makeForcedStop(counter)
try:
f = codecs.open(
pywikibot.config.datafilepath('autonomous_problems.dat'),
'a', 'utf-8')
f.write(u"* %s {Found more than one link for %s}"
- % (self.originPage.title(asLink=True,
- forceInterwiki=True),
- page.site()))
+ % (self.originPage, page.site()))
if config.interwiki_graph and config.interwiki_graph_url:
filename = interwiki_graph.getFilename(self.originPage, extension = config.interwiki_graph_formats[0])
f.write(u" [%s%s graph]" % (config.interwiki_graph_url, filename))
@@ -1432,12 +1383,10 @@
sys.exit()
iw = ()
elif page.isEmpty() and not page.isCategory():
- globalvar.remove.append(page.title(asLink=True,
- forceInterwiki=True))
+ globalvar.remove.append(unicode(page))
if not globalvar.quiet or pywikibot.verbose:
pywikibot.output(u"NOTE: %s is empty; ignoring it and its interwiki links"
- % page.title(asLink=True,
- forceInterwiki=True))
+ % page)
# Ignore the interwiki links
self.done.remove(page)
iw = ()
@@ -1446,11 +1395,7 @@
if globalvar.hintsareright:
if linkedPage.site in self.hintedsites:
pywikibot.output(u"NOTE: %s: %s extra interwiki on hinted site ignored %s"
- % (self.originPage.title(asLink=True),
- page.title(asLink=True,
- forceInterwiki=True),
- linkedPage.title(asLink=True,
- forceInterwiki=True)))
+ % (self.originPage, page, linkedPage))
break
if not self.skipPage(page, linkedPage, counter):
if globalvar.followinterwiki or page == self.originPage:
@@ -1463,21 +1408,14 @@
# Still, this could be "no problem" as either may be a
# redirect to the other. No way to find out quickly!
pywikibot.output(u"NOTE: %s: %s gives duplicate interwiki on same site %s"
- % (self.originPage.title(asLink=True,
- forceInterwiki=True),
- page.title(asLink=True,
- forceInterwiki=True),
- linkedPage.title(asLink=True,
- forceInterwiki=True)))
+ % (self.originPage, page,
+ linkedPage))
break
else:
if config.interwiki_shownew or pywikibot.verbose:
pywikibot.output(u"%s: %s gives new interwiki %s"
- % (self.originPage.title(asLink=True),
- page.title(asLink=True,
- forceInterwiki=True),
- linkedPage.title(asLink=True,
- forceInterwiki=True)))
+ % (self.originPage,
+ page, linkedPage))
if self.forcedStop:
break
# These pages are no longer 'in progress'
@@ -1503,8 +1441,7 @@
if page2 is None:
pywikibot.output(u" "*indent + "Given as a hint.")
else:
- pywikibot.output(u" "*indent + page2.title(asLink=True,
- forceInterwiki=True))
+ pywikibot.output(u" "*indent + unicode(page2))
def assemble(self):
@@ -1522,9 +1459,7 @@
continue # inhibit the forwarding families pages to be updated.
if site == self.originPage.site():
if page != self.originPage:
- self.problem(u"Found link to %s"
- % page.title(asLink=True,
- forceInterwiki=True))
+ self.problem(u"Found link to %s" % page)
self.whereReport(page)
errorCount += 1
else:
@@ -1564,8 +1499,7 @@
for page2 in pages:
i += 1
pywikibot.output(u" (%d) Found link to %s in:"
- % (i, page2.title(asLink=True,
- forceInterwiki=True)))
+ % (i, page2))
self.whereReport(page2, indent = 8)
while True:
#TODO: allow answer to repeat previous or go back after a mistake
@@ -1593,9 +1527,7 @@
if not acceptall:
pywikibot.output(u"=" * 30)
page2 = pages[0]
- pywikibot.output(u"Found link to %s in:"
- % page2.title(asLink=True,
- forceInterwiki=True))
+ pywikibot.output(u"Found link to %s in:" % page2)
self.whereReport(page2, indent = 4)
while True:
if acceptall:
@@ -1654,8 +1586,7 @@
return
if self.forcedStop: # autonomous with problem
pywikibot.output(u"======Aborted processing %s======"
- % self.originPage.title(asLink=True,
- forceInterwiki=True))
+ % self.originPage)
return
# The following check is not always correct and thus disabled.
# self.done might contain no interwiki links because of the -neverlink
@@ -1663,19 +1594,16 @@
# if len(self.done) == 1:
# # No interwiki at all
# return
- pywikibot.output(u"======Post-processing %s======"
- % self.originPage.title(asLink=True,
- forceInterwiki=True))
+ pywikibot.output(u"======Post-processing %s======" % self.originPage)
# Assemble list of accepted interwiki links
new = self.assemble()
if new is None: # User said give up
pywikibot.output(u"======Aborted processing %s======"
- % self.originPage.title(asLink=True,
- forceInterwiki=True))
+ % self.originPage)
return
# Make sure new contains every page link, including the page we are processing
- # TODO: sould be move to assemble()
+ # TODO: should be move to assemble()
# replaceLinks will skip the site it's working on.
if self.originPage.site() not in new:
if not self.originPage.site().family.interwiki_forward: #TODO: make this possible as well.
@@ -1716,8 +1644,7 @@
old[page.site()] = page
except pywikibot.NoPage:
pywikibot.output(u"BUG>>> %s no longer exists?"
- % new[site].title(asLink=True,
- forceInterwiki=True))
+ % new[site])
continue
mods, mcomment, adding, removing, modifying \
= compareLanguages(old, new, insite = lclSite)
@@ -1750,8 +1677,7 @@
old[mypage.site()] = mypage
except pywikibot.NoPage:
pywikibot.output(u"BUG>>> %s no longer exists?"
- % new[site].title(asLink=True,
- forceInterwiki=True))
+ % new[site])
continue
mods, mcomment, adding, removing, modifying \
= compareLanguages(old, new, insite=site)
@@ -1774,7 +1700,8 @@
if diff > 30*24*60:
smallWikiAllowed = True
else:
- pywikibot.output( u'NOTE: number of edits are restricted at %s'
+ pywikibot.output(
+u'NOTE: number of edits are restricted at %s'
% page.site().sitename())
# if we have an account for this site
@@ -1828,24 +1755,21 @@
if globalvar.localonly:
# In this case only continue on the Page we started with
if page != self.originPage:
- raise SaveError
- if page.title() != page.sectionFreeTitle():
+ raise SaveError(u'-localonly and page != originPage')
+ if page.section():
# This is not a page, but a subpage. Do not edit it.
pywikibot.output(u"Not editing %s: not doing interwiki on subpages"
- % page.title(asLink=True, forceInterwiki=True))
- raise SaveError
+ % page)
+ raise SaveError(u'Link has a #section')
try:
pagetext = page.get()
except pywikibot.NoPage:
- pywikibot.output(u"Not editing %s: page does not exist"
- % page.title(asLink=True, forceInterwiki=True))
- raise SaveError
+ pywikibot.output(u"Not editing %s: page does not exist" % page)
+ raise SaveError(u'Page doesn\'t exist')
if page.isEmpty() and not page.isCategory():
- pywikibot.output(u"Not editing %s: page is empty"
- % page.title(asLink=True, forceInterwiki=True))
+ pywikibot.output(u"Not editing %s: page is empty" % page)
raise SaveError
-
# clone original newPages dictionary, so that we can modify it to the
# local page's needs
new = dict(newPages)
@@ -1863,14 +1787,14 @@
if (ignorepage not in interwikis):
pywikibot.output(
u"Ignoring link to %(to)s for %(from)s"
- % {'to': ignorepage.title(asLink=True),
- 'from': page.title(asLink=True)})
+ % {'to': ignorepage,
+ 'from': page})
new.pop(ignorepage.site())
else:
pywikibot.output(
u"NOTE: Not removing interwiki from %(from)s to %(to)s (exists both commented and non-commented)"
- % {'to': ignorepage.title(asLink=True),
- 'from': page.title(asLink=True)})
+ % {'to': ignorepage,
+ 'from': page})
except KeyError:
pass
@@ -1879,12 +1803,11 @@
pltmp = new[page.site()]
if pltmp != page:
s = u"None"
- if pltmp is not None: s = pltmp.title(asLink=True,
- forceInterwiki=True)
+ if pltmp is not None: s = pltmp
pywikibot.output(
u"BUG>>> %s is not in the list of new links! Found %s."
- % (page.title(asLink=True, forceInterwiki=True), s))
- raise SaveError
+ % (page, s))
+ raise SaveError(u'BUG: sanity check failed')
# Avoid adding an iw link back to itself
del new[page.site()]
@@ -1919,28 +1842,25 @@
#put it to new means don't delete it
if not globalvar.cleanup and not globalvar.force or \
globalvar.cleanup and \
- rmPage.title(asLink=True,
- forceInterwiki=True) not in globalvar.remove or \
+ unicode(rmPage) not in globalvar.remove or \
rmPage.site().lang in ['hak', 'hi', 'cdo'] and \
pywikibot.unicode_error: #work-arround for bug #3081100 (do not remove hi-pages)
new[rmsite] = rmPage
pywikibot.output(
u"WARNING: %s is either deleted or has a mismatching disambiguation state."
- % rmPage.title(asLink=True, forceInterwiki=True))
+ % rmPage)
# Re-Check what needs to get done
mods, mcomment, adding, removing, modifying = compareLanguages(old,
new,
insite=page.site())
if not mods:
if not globalvar.quiet or pywikibot.verbose:
- pywikibot.output(u'No changes needed on page %s'
- % page.title(asLink=True, forceInterwiki=True))
+ pywikibot.output(u'No changes needed on page %s' % page)
return False
# Show a message in purple.
pywikibot.output(
- u"\03{lightpurple}Updating links on page %s.\03{default}"
- % page.title(asLink=True, forceInterwiki=True))
+ u"\03{lightpurple}Updating links on page %s.\03{default}" % page)
pywikibot.output(u"Changes to be made: %s" % mods)
oldtext = page.get()
template = (page.namespace() == 10)
@@ -1953,25 +1873,24 @@
if template:
pywikibot.output(
u'SKIPPING: %s should have interwiki links on subpage.'
- % page.title(asLink=True, forceInterwiki=True))
+ % page)
else:
pywikibot.output(
u'SKIPPING: %s is under construction or to be deleted.'
- % page.title(asLink=True, forceInterwiki=True))
+ % page)
return False
if newtext == oldtext:
return False
pywikibot.showDiff(oldtext, newtext)
- # pywikibot.output(u"NOTE: Replace %s" % page.title(asLink=True))
+ # pywikibot.output(u"NOTE: Replace %s" % page)
# Determine whether we need permission to submit
ask = False
# Allow for special case of a self-pointing interwiki link
if removing and removing != [page.site()]:
self.problem(u'Found incorrect link to %s in %s'
- % (", ".join([x.lang for x in removing]),
- page.title(asLink=True, forceInterwiki=True)),
+ % (", ".join([x.lang for x in removing]), page),
createneed=False)
if pywikibot.unicode_error:
for x in removing:
@@ -2032,22 +1951,20 @@
else:
status, reason, data = page.put(newtext, comment=mcomment)
except pywikibot.LockedPage:
- pywikibot.output(u'Page %s is locked. Skipping.'
- % page.title(asLink=True,
- forceInterwiki=True))
- raise SaveError
+ pywikibot.output(u'Page %s is locked. Skipping.' % page)
+ raise SaveError(u'Locked')
except pywikibot.EditConflict:
pywikibot.output(
u'ERROR putting page: An edit conflict occurred. Giving up.')
- raise SaveError
+ raise SaveError(u'Edit conflict')
except (pywikibot.SpamfilterError), error:
pywikibot.output(
u'ERROR putting page: %s blacklisted by spamfilter. Giving up.'
% (error.url,))
- raise SaveError
+ raise SaveError(u'Spam filter')
except (pywikibot.PageNotSaved), error:
pywikibot.output(u'ERROR putting page: %s' % (error.args,))
- raise SaveError
+ raise SaveError(u'PageNotSaved')
except (socket.error, IOError), error:
if timeout>3600:
raise
@@ -2072,12 +1989,11 @@
pywikibot.output(u'%s %s' % (status, reason))
return False
elif answer == 'g':
- raise GiveUpOnPage
+ raise GiveUpOnPage(u'User asked us to give up')
else:
raise LinkMustBeRemoved(u'Found incorrect link to %s in %s'
% (", ".join([x.lang for x in removing]),
- page.title(asLink=True,
- forceInterwiki=True)))
+ page))
def reportBacklinks(self, new, updatedSites):
"""
@@ -2097,7 +2013,7 @@
try:
linkedPages = set(page.interwiki())
except pywikibot.NoPage:
- pywikibot.output(u"WARNING: Page %s does no longer exist?!" % page.title())
+ pywikibot.output(u"WARNING: Page %s does no longer exist?!" % page)
break
# To speed things up, create a dictionary which maps sites to pages.
# This assumes that there is only one interwiki link per language.
@@ -2108,33 +2024,25 @@
if expectedPage != page:
try:
linkedPage = linkedPagesDict[expectedPage.site()]
- pywikibot.output(u"WARNING: %s: %s does not link to %s but to %s"
- % (page.site().family.name,
- page.title(asLink=True,
- forceInterwiki=True),
- expectedPage.title(asLink=True,
- forceInterwiki=True),
- linkedPage.title(asLink=True,
- forceInterwiki=True)))
+ pywikibot.output(
+ u"WARNING: %s: %s does not link to %s but to %s"
+ % (page.site().family.name,
+ page, expectedPage, linkedPage))
except KeyError:
- pywikibot.output(u"WARNING: %s: %s does not link to %s"
- % (page.site().family.name,
- page.title(asLink=True,
- forceInterwiki=True),
- expectedPage.title(asLink=True,
- forceInterwiki=True)))
+ pywikibot.output(
+ u"WARNING: %s: %s does not link to %s"
+ % (page.site().family.name,
+ page, expectedPage))
# Check for superfluous links
for linkedPage in linkedPages:
if linkedPage not in expectedPages:
# Check whether there is an alternative page on that language.
# In this case, it was already reported above.
if linkedPage.site() not in expectedSites:
- pywikibot.output(u"WARNING: %s: %s links to incorrect %s"
- % (page.site().family.name,
- page.title(asLink=True,
- forceInterwiki=True),
- linkedPage.title(asLink=True,
- forceInterwiki=True)))
+ pywikibot.output(
+ u"WARNING: %s: %s links to incorrect %s"
+ % (page.site().family.name,
+ page, linkedPage))
except (socket.error, IOError):
pywikibot.output(u'ERROR: could not report backlinks')
@@ -2191,8 +2099,7 @@
fs = self.firstSubject()
if fs and (not globalvar.quiet or pywikibot.verbose):
pywikibot.output(u"NOTE: The first unfinished subject is %s"
- % fs.originPage.title(asLink=True,
- forceInterwiki=True))
+ % fs.originPage)
pywikibot.output(u"NOTE: Number of pages queued is %d, trying to add %d more."
% (len(self.subjects), number))
for i in range(number):
@@ -2204,19 +2111,19 @@
pywikibot.output(u'IOError occured; skipping')
continue
if page in globalvar.skip:
- pywikibot.output(u'Skipping: %s is in the skip list' % page.title())
+ pywikibot.output(u'Skipping: %s is in the skip list' % page)
continue
if globalvar.skipauto:
dictName, year = page.autoFormat()
if dictName is not None:
- pywikibot.output(u'Skipping: %s is an auto entry %s(%s)' % (page.title(),dictName,year))
+ pywikibot.output(u'Skipping: %s is an auto entry %s(%s)' % (page, dictName, year))
continue
if globalvar.parenthesesonly:
# Only yield pages that have ( ) in titles
if "(" not in page.title():
continue
if page.isTalkPage():
- pywikibot.output(u'Skipping: %s is a talk page' % page.title())
+ pywikibot.output(u'Skipping: %s is a talk page' % page)
continue
#doesn't work: page must be preloaded for this test
#if page.isEmpty():
@@ -2229,7 +2136,7 @@
del tmpl
except KeyError:
pass
- if loc != None and loc in page.title():
+ if loc is not None and loc in page.title():
pywikibot.output(u'Skipping: %s is a templates subpage' % page.title())
continue
break
@@ -2395,7 +2302,7 @@
if not globalvar.summary and \
len(adding) + len(removing) + len(modifying) <= 3:
# Use an extended format for the string linking to all added pages.
- fmt = lambda d, site: d[site].title(asLink=True, forceInterwiki=True)
+ fmt = lambda d, site: unicode(d[site])
else:
# Use short format, just the language code
fmt = lambda d, site: site.lang
http://www.mediawiki.org/wiki/Special:Code/pywikipedia/9699
Revision: 9699
Author: xqt
Date: 2011-10-30 20:20:20 +0000 (Sun, 30 Oct 2011)
Log Message:
-----------
singleton comparison (PEP 8)
Modified Paths:
--------------
trunk/pywikipedia/wikipedia.py
Modified: trunk/pywikipedia/wikipedia.py
===================================================================
--- trunk/pywikipedia/wikipedia.py 2011-10-30 17:27:49 UTC (rev 9698)
+++ trunk/pywikipedia/wikipedia.py 2011-10-30 20:20:20 UTC (rev 9699)
@@ -2916,7 +2916,7 @@
@param total: iterate no more than this number of revisions in total
"""
- if total == None:
+ if total is None:
total = 500 #set to default of getVersionHistory
edits = self.getVersionHistory(revCount=total)
users = set([edit[2] for edit in edits])
http://www.mediawiki.org/wiki/Special:Code/pywikipedia/9698
Revision: 9698
Author: xqt
Date: 2011-10-30 17:27:49 +0000 (Sun, 30 Oct 2011)
Log Message:
-----------
shorten Page.title() with Page__unicode__()
Modified Paths:
--------------
trunk/pywikipedia/archivebot.py
trunk/pywikipedia/featured.py
trunk/pywikipedia/imagetransfer.py
trunk/pywikipedia/wikipedia.py
Modified: trunk/pywikipedia/archivebot.py
===================================================================
--- trunk/pywikipedia/archivebot.py 2011-10-30 17:26:49 UTC (rev 9697)
+++ trunk/pywikipedia/archivebot.py 2011-10-30 17:27:49 UTC (rev 9698)
@@ -591,8 +591,7 @@
Archiver.run()
time.sleep(10)
except:
- pywikibot.output(u'Error occured while processing page %s'
- % pg.title(asLink=True, forceInterwiki=True))
+ pywikibot.output(u'Error occured while processing page %s' % pg)
traceback.print_exc()
if __name__ == '__main__':
Modified: trunk/pywikipedia/featured.py
===================================================================
--- trunk/pywikipedia/featured.py 2011-10-30 17:26:49 UTC (rev 9697)
+++ trunk/pywikipedia/featured.py 2011-10-30 17:27:49 UTC (rev 9698)
@@ -474,10 +474,8 @@
continue
site = pywikibot.getSite()
comment = pywikibot.setAction(
- i18n.twtranslate(
- site, 'featured-' + pType,
- {'page': a.title(
- asLink=True, forceInterwiki=True)}))
+ i18n.twtranslate(site, 'featured-' + pType,
+ {'page': unicode(a)}))
### Moving {{Link FA|xx}} to top of interwikis ###
if template_on_top == True:
# Getting the interwiki
@@ -521,10 +519,8 @@
continue
site = pywikibot.getSite()
comment = pywikibot.setAction(
- i18n.twtranslate(
- site, 'featured-former',
- {'page': a.title(
- asLink=True, forceInterwiki=True)}))
+ i18n.twtranslate(site, 'featured-former',
+ {'page': unicode(a)}))
text = re.sub(re_Link_FA,'',text)
if not dry:
try:
Modified: trunk/pywikipedia/imagetransfer.py
===================================================================
--- trunk/pywikipedia/imagetransfer.py 2011-10-30 17:26:49 UTC (rev 9697)
+++ trunk/pywikipedia/imagetransfer.py 2011-10-30 17:27:49 UTC (rev 9698)
@@ -200,9 +200,7 @@
description += '\n\n' + sourceImagePage.getFileVersionHistoryTable()
# add interwiki link
if sourceSite.family == self.targetSite.family:
- description += "\r\n\r\n" + \
- sourceImagePage.title(asLink=True,
- forceInterwiki=True)
+ description += "\r\n\r\n" + unicode(sourceImagePage)
except pywikibot.NoPage:
description=''
print "Image does not exist or description page is empty."
Modified: trunk/pywikipedia/wikipedia.py
===================================================================
--- trunk/pywikipedia/wikipedia.py 2011-10-30 17:26:49 UTC (rev 9697)
+++ trunk/pywikipedia/wikipedia.py 2011-10-30 17:27:49 UTC (rev 9698)
@@ -715,9 +715,7 @@
raise
except UserBlocked:
if self.site().loggedInAs(sysop=sysop):
- raise UserBlocked(self.site(),
- self.title(asLink=True,
- forceInterwiki=True))
+ raise UserBlocked(self.site(), unicode(self))
else:
if verbose:
output("The IP address is blocked, retry by login.")
@@ -772,8 +770,7 @@
pageInfo = data['query']['pages'].values()[0]
if data['query']['pages'].keys()[0] == "-1":
if 'missing' in pageInfo:
- raise NoPage(self.site(),
- self.title(asLink=True, forceInterwiki=True),
+ raise NoPage(self.site(), unicode(self),
"Page does not exist. In rare cases, if you are certain the page does exist, look into overriding family.RversionTab")
elif 'invalid' in pageInfo:
raise BadTitle('BadTitle: %s' % self)
@@ -869,31 +866,26 @@
if self.site().mediawiki_message('whitelistedittitle') in text:
raise NoPage(u'Page editing is forbidden for anonymous users.')
elif self.site().has_mediawiki_message('nocreatetitle') and self.site().mediawiki_message('nocreatetitle') in text:
- raise NoPage(self.site(), self.title(asLink=True,
- forceInterwiki=True))
+ raise NoPage(self.site(), unicode(self))
# Bad title
elif 'var wgPageName = "Special:Badtitle";' in text \
or self.site().mediawiki_message('badtitle') in text:
raise BadTitle('BadTitle: %s' % self)
# find out if the username or IP has been blocked
elif self.site().isBlocked():
- raise UserBlocked(self.site(),
- self.title(asLink=True,
- forceInterwiki=True))
+ raise UserBlocked(self.site(), unicode(self))
# If there is no text area and the heading is 'View Source'
# but user is not blocked, the page does not exist, and is
# locked
elif self.site().mediawiki_message('viewsource') in text:
- raise NoPage(self.site(), self.title(asLink=True,
- forceInterwiki=True))
+ raise NoPage(self.site(), unicode(self))
# Some of the newest versions don't have a "view source" tag for
# non-existant pages
# Check also the div class because if the language is not english
# the bot can not seeing that the page is blocked.
elif self.site().mediawiki_message('badaccess') in text or \
"<div class=\"permissions-errors\">" in text:
- raise NoPage(self.site(), self.title(asLink=True,
- forceInterwiki=True))
+ raise NoPage(self.site(), unicode(self))
elif config.retry_on_fail:
if "<title>Wikimedia Error</title>" in text:
output( u"Wikimedia has technical problems; will retry in %i minutes." % retry_idle_time)
@@ -947,8 +939,7 @@
RversionTab = re.compile(r'<li id="ca-history"><a href=".*?title=.*?&action=history".*?>.*?</a></li>', re.DOTALL)
matchVersionTab = RversionTab.search(text)
if not matchVersionTab and not self.site().family.name == 'wikitravel':
- raise NoPage(self.site(),
- self.title(asLink=True, forceInterwiki=True),
+ raise NoPage(self.site(), unicode(self),
"Page does not exist. In rare cases, if you are certain the page does exist, look into overriding family.RversionTab" )
# Look if the page is on our watchlist
matchWatching = Rwatchlist.search(text)
@@ -2715,8 +2706,7 @@
pageInfo = result['query']['pages'].values()[0]
if result['query']['pages'].keys()[0] == "-1":
if 'missing' in pageInfo:
- raise NoPage(self.site(), self.title(asLink=True,
- forceInterwiki=True),
+ raise NoPage(self.site(), unicode(self),
"Page does not exist.")
elif 'invalid' in pageInfo:
raise BadTitle('BadTitle: %s' % self)
@@ -2885,8 +2875,7 @@
pageInfo = result['query']['pages'].values()[0]
if result['query']['pages'].keys()[0] == "-1":
if 'missing' in pageInfo:
- raise NoPage(self.site(), self.title(asLink=True,
- forceInterwiki=True),
+ raise NoPage(self.site(), unicode(self),
"Page does not exist.")
elif 'invalid' in pageInfo:
raise BadTitle('BadTitle: %s' % self)
@@ -3803,8 +3792,7 @@
self._local = pageInfo["imagerepository"] != "shared"
if data['query']['pages'].keys()[0] == "-1":
if 'missing' in pageInfo and self._local:
- raise NoPage(self.site(),
- self.title(asLink=True, forceInterwiki=True),
+ raise NoPage(self.site(), unicode(self),
"Page does not exist.")
elif 'invalid' in pageInfo:
raise BadTitle('BadTitle: %s' % self)