http://www.mediawiki.org/wiki/Special:Code/pywikipedia/9683
Revision: 9683 Author: xqt Date: 2011-10-30 10:50:42 +0000 (Sun, 30 Oct 2011) Log Message: ----------- update from rewrite branche: replace deprecated Page.aslink() with Page.title(asLink=True)
Modified Paths: -------------- trunk/pywikipedia/archivebot.py trunk/pywikipedia/copyright.py trunk/pywikipedia/copyright_clean.py trunk/pywikipedia/copyright_put.py trunk/pywikipedia/djvutext.py trunk/pywikipedia/imagetransfer.py trunk/pywikipedia/inline_images.py trunk/pywikipedia/interwiki.py trunk/pywikipedia/patrol.py trunk/pywikipedia/pywikibot/textlib.py trunk/pywikipedia/revertbot.py trunk/pywikipedia/unusedfiles.py trunk/pywikipedia/wikipedia.py
Modified: trunk/pywikipedia/archivebot.py =================================================================== --- trunk/pywikipedia/archivebot.py 2011-10-29 15:45:02 UTC (rev 9682) +++ trunk/pywikipedia/archivebot.py 2011-10-30 10:50:42 UTC (rev 9683) @@ -592,7 +592,7 @@ time.sleep(10) except: pywikibot.output(u'Error occured while processing page %s' - % pg.aslink(True)) + % pg.title(asLink=True, forceInterwiki=True)) traceback.print_exc()
if __name__ == '__main__':
Modified: trunk/pywikipedia/copyright.py =================================================================== --- trunk/pywikipedia/copyright.py 2011-10-29 15:45:02 UTC (rev 9682) +++ trunk/pywikipedia/copyright.py 2011-10-30 10:50:42 UTC (rev 9683) @@ -324,14 +324,16 @@ download = force_update try: if not os.path.exists(path): - print 'Creating file '%s' (%s)' % (pywikibot.config.shortpath(path), - page.aslink()) + print 'Creating file '%s' (%s)' \ + % (pywikibot.config.shortpath(path), + page.title(asLink=True)) download = True else: file_age = time.time() - os.path.getmtime(path) if download or file_age > 24 * 60 * 60: - print 'Updating file '%s' (%s)' % ( - pywikibot.config.shortpath(path), page.aslink()) + print 'Updating file '%s' (%s)' \ + % (pywikibot.config.shortpath(path), + page.title(asLink=True)) download = True except OSError: raise @@ -1040,17 +1042,20 @@ continue except pywikibot.IsRedirectPage: newpage = page.getRedirectTarget() - pywikibot.output(u'Page %s redirects to '%s'' % (page.aslink(), newpage.title())) + pywikibot.output(u'Page %s redirects to '%s'' + % (page.title(asLink=True), newpage.title())) bot = CheckRobot(iter([newpage,])) bot.run() continue except pywikibot.SectionError: - error("Page %s has no section %s" % (page.title(), page.section())) + error("Page %s has no section %s" + % (page.title(), page.section())) continue
if skip_disambig: if page.isDisambig(): - pywikibot.output(u'Page %s is a disambiguation page' % page.aslink()) + pywikibot.output(u'Page %s is a disambiguation page' + % page.title(asLink=True)) continue
pywikibot.output(page.title())
Modified: trunk/pywikipedia/copyright_clean.py =================================================================== --- trunk/pywikipedia/copyright_clean.py 2011-10-29 15:45:02 UTC (rev 9682) +++ trunk/pywikipedia/copyright_clean.py 2011-10-30 10:50:42 UTC (rev 9683) @@ -98,7 +98,7 @@
for page in gen: data = page.get() - pywikibot.output(page.aslink()) + pywikibot.output(page.title(asLink=True)) output = ''
#
Modified: trunk/pywikipedia/copyright_put.py =================================================================== --- trunk/pywikipedia/copyright_put.py 2011-10-29 15:45:02 UTC (rev 9682) +++ trunk/pywikipedia/copyright_put.py 2011-10-30 10:50:42 UTC (rev 9683) @@ -146,7 +146,8 @@ sn = len(data) gnt += gn ; ynt += yn ; mnt += mn ; ent += en ; snt += sn if en > 0: - output += u"|%s||%s||%s KB||%s||%s||%s\n|-\n" % (page.aslink(), en, sn / 1024, gn, yn, mn) + output += u"|%s||%s||%s KB||%s||%s||%s\n|-\n" \ + % (page.title(asLink=True), en, sn / 1024, gn, yn, mn) output += u"""| |||||||| |- |'''%s'''||%s||%s KB||%s||%s||%s
Modified: trunk/pywikipedia/djvutext.py =================================================================== --- trunk/pywikipedia/djvutext.py 2011-10-29 15:45:02 UTC (rev 9682) +++ trunk/pywikipedia/djvutext.py 2011-10-30 10:50:42 UTC (rev 9683) @@ -142,7 +142,8 @@ ask = True old_text = page.get() if old_text == text: - pywikibot.output(u"No changes were needed on %s" % page.aslink()) + pywikibot.output(u"No changes were needed on %s" + % page.title(asLink=True)) return else: old_text = '' @@ -162,7 +163,8 @@ # Save the page page.put_async(text) except pywikibot.LockedPage: - pywikibot.output(u"Page %s is locked; skipping." % page.aslink()) + pywikibot.output(u"Page %s is locked; skipping." + % page.title(asLink=True)) except pywikibot.EditConflict: pywikibot.output(u'Skipping %s because of edit conflict' % (page.title())) except pywikibot.SpamfilterError, error: @@ -215,7 +217,8 @@ u"%s:%s" % (index_namespace, index)) if not index_page.exists(): raise pywikibot.NoPage(u"Page '%s' does not exist" % index) - pywikibot.output(u"uploading text from %s to %s" % (djvu, index_page.aslink()) ) + pywikibot.output(u"uploading text from %s to %s" + % (djvu, index_page.title(asLink=True)) ) bot = DjVuTextBot(djvu, index, pages, ask, dry) if not bot.has_text(): raise ValueError("No text layer in djvu file")
Modified: trunk/pywikipedia/imagetransfer.py =================================================================== --- trunk/pywikipedia/imagetransfer.py 2011-10-29 15:45:02 UTC (rev 9682) +++ trunk/pywikipedia/imagetransfer.py 2011-10-30 10:50:42 UTC (rev 9683) @@ -200,7 +200,9 @@ description += '\n\n' + sourceImagePage.getFileVersionHistoryTable() # add interwiki link if sourceSite.family == self.targetSite.family: - description += "\r\n\r\n" + sourceImagePage.aslink(forceInterwiki = True) + description += "\r\n\r\n" + \ + sourceImagePage.title(asLink=True, + forceInterwiki=True) except pywikibot.NoPage: description='' print "Image does not exist or description page is empty." @@ -215,24 +217,33 @@ verifyDescription = not self.keep_name) # try to upload targetFilename = bot.run() - if targetFilename and self.targetSite.family.name == 'commons' and self.targetSite.lang == 'commons': + if targetFilename and self.targetSite.family.name == 'commons' and \ + self.targetSite.lang == 'commons': # upload to Commons was successful reason = pywikibot.translate(sourceSite, nowCommonsMessage) # try to delete the original image if we have a sysop account - if sourceSite.family.name in config.sysopnames and sourceSite.lang in config.sysopnames[sourceSite.family.name]: + if sourceSite.family.name in config.sysopnames and \ + sourceSite.lang in config.sysopnames[sourceSite.family.name]: if sourceImagePage.delete(reason): return - if sourceSite.lang in nowCommonsTemplate and sourceSite.family.name in config.usernames and sourceSite.lang in config.usernames[sourceSite.family.name]: + if sourceSite.lang in nowCommonsTemplate and \ + sourceSite.family.name in config.usernames and \ + sourceSite.lang in config.usernames[sourceSite.family.name]: # add the nowCommons template. - pywikibot.output(u'Adding nowCommons template to %s' % sourceImagePage.title()) - sourceImagePage.put(sourceImagePage.get() + '\n\n' + nowCommonsTemplate[sourceSite.lang] % targetFilename, comment = nowCommonsMessage[sourceSite.lang]) + pywikibot.output(u'Adding nowCommons template to %s' + % sourceImagePage.title()) + sourceImagePage.put(sourceImagePage.get() + '\n\n' + + nowCommonsTemplate[sourceSite.lang] + % targetFilename, + comment=nowCommonsMessage[sourceSite.lang])
def showImageList(self, imagelist): for i in range(len(imagelist)): image = imagelist[i] #sourceSite = sourceImagePage.site() print "-"*60 - pywikibot.output(u"%s. Found image: %s"% (i, image.aslink())) + pywikibot.output(u"%s. Found image: %s" + % (i, image.title(asLink=True))) try: # Show the image description page's contents pywikibot.output(image.get(throttle=False)) @@ -242,10 +253,12 @@ # to upload anyway, using another name. try: # Maybe the image is on the target site already - targetTitle = '%s:%s' % (self.targetSite.image_namespace(), image.title().split(':', 1)[1]) + targetTitle = '%s:%s' % (self.targetSite.image_namespace(), + image.title().split(':', 1)[1]) targetImage = pywikibot.Page(self.targetSite, targetTitle) targetImage.get(throttle=False) - pywikibot.output(u"Image with this name is already on %s." % self.targetSite) + pywikibot.output(u"Image with this name is already on %s." + % self.targetSite) print "-"*60 pywikibot.output(targetImage.get(throttle=False)) sys.exit() @@ -253,7 +266,8 @@ # That's the normal case pass except pywikibot.IsRedirectPage: - pywikibot.output(u"Description page on target wiki is redirect?!") + pywikibot.output( + u"Description page on target wiki is redirect?!")
except pywikibot.NoPage: break @@ -264,7 +278,7 @@ if self.interwiki: imagelist = [] for linkedPage in page.interwiki(): - imagelist += linkedPage.imagelinks(followRedirects = True) + imagelist += linkedPage.imagelinks(followRedirects=True) elif page.isImage(): imagePage = pywikibot.ImagePage(page.site(), page.title()) imagelist = [imagePage] @@ -277,14 +291,16 @@ # no need to query the user, only one possibility todo = 0 else: - pywikibot.output(u"Give the number of the image to transfer.") + pywikibot.output( + u"Give the number of the image to transfer.") todo = pywikibot.input(u"To end uploading, press enter:") if not todo: break todo = int(todo) if todo in range(len(imagelist)): if imagelist[todo].fileIsOnCommons(): - pywikibot.output(u'The image is already on Wikimedia Commons.') + pywikibot.output( + u'The image is already on Wikimedia Commons.') else: self.transferImage(imagelist[todo], debug = False) # remove the selected image from the list @@ -314,7 +330,8 @@ targetFamily = arg[10:] elif arg.startswith('-file'): if len(arg) == 5: - filename = pywikibot.input(u'Please enter the list's filename: ') + filename = pywikibot.input( + u'Please enter the list's filename: ') else: filename = arg[6:] gen = pagegenerators.TextfilePageGenerator(filename)
Modified: trunk/pywikipedia/inline_images.py =================================================================== --- trunk/pywikipedia/inline_images.py 2011-10-29 15:45:02 UTC (rev 9682) +++ trunk/pywikipedia/inline_images.py 2011-10-30 10:50:42 UTC (rev 9683) @@ -74,11 +74,12 @@ #if text != originalText: # page.put(text) except pywikibot.NoPage: - print "Page %s does not exist?!" % page.aslink() + print "Page %s does not exist?!" % page.title(asLink=True) except pywikibot.IsRedirectPage: - print "Page %s is a redirect; skipping." % page.aslink() + print "Page %s is a redirect; skipping." \ + % page.title(asLink=True) except pywikibot.LockedPage: - print "Page %s is locked?!" % page.aslink() + print "Page %s is locked?!" % page.title(asLink=True)
def main(): #page generator
Modified: trunk/pywikipedia/interwiki.py =================================================================== --- trunk/pywikipedia/interwiki.py 2011-10-29 15:45:02 UTC (rev 9682) +++ trunk/pywikipedia/interwiki.py 2011-10-30 10:50:42 UTC (rev 9683) @@ -1024,8 +1024,12 @@ return False if globalvar.autonomous: pywikibot.output(u"NOTE: Ignoring link from page %s in namespace %i to page %s in namespace %i." - % (linkingPage.aslink(True), linkingPage.namespace(), - linkedPage.aslink(True), linkedPage.namespace())) + % (linkingPage.title(asLink=True, + forceInterwiki=True), + linkingPage.namespace(), + linkedPage.title(asLink=True, + forceInterwiki=True), + linkedPage.namespace())) # Fill up foundIn, so that we will not write this notice self.foundIn[linkedPage] = [linkingPage] return True @@ -1033,14 +1037,25 @@ preferredPage = self.getFoundInCorrectNamespace(linkedPage.site()) if preferredPage: pywikibot.output(u"NOTE: Ignoring link from page %s in namespace %i to page %s in namespace %i because page %s in the correct namespace has already been found." - % (linkingPage.aslink(True), linkingPage.namespace(), linkedPage.aslink(True), - linkedPage.namespace(), preferredPage.aslink(True))) + % (linkingPage.title(asLink=True, + forceInterwiki=True), + linkingPage.namespace(), + linkedPage.title(asLink=True, + forceInterwiki=True), + linkedPage.namespace(), + preferredPage.title(asLink=True, + forceInterwiki=True))) return True else: - choice = pywikibot.inputChoice(u'WARNING: %s is in namespace %i, but %s is in namespace %i. Follow it anyway?' - % (self.originPage.aslink(True), self.originPage.namespace(), - linkedPage.aslink(True), linkedPage.namespace()), - ['Yes', 'No', 'Add an alternative', 'give up'], ['y', 'n', 'a', 'g']) + choice = pywikibot.inputChoice( +u'WARNING: %s is in namespace %i, but %s is in namespace %i. Follow it anyway?' + % (self.originPage.title(asLink=True, + forceInterwiki=True), + self.originPage.namespace(), + linkedPage.title(asLink=True, forceInterwiki=True), + linkedPage.namespace()), + ['Yes', 'No', 'Add an alternative', 'give up'], + ['y', 'n', 'a', 'g']) if choice != 'y': # Fill up foundIn, so that we will not ask again self.foundIn[linkedPage] = [linkingPage] @@ -1054,7 +1069,10 @@ # add the page that was entered by the user self.addIfNew(alternativePage, counter, None) else: - pywikibot.output(u"NOTE: ignoring %s and its interwiki links" % linkedPage.aslink(True)) + pywikibot.output( + u"NOTE: ignoring %s and its interwiki links" + % linkedPage.title(asLink=True, + forceInterwiki=True)) return True else: # same namespaces, no problem @@ -1090,36 +1108,57 @@ if globalvar.autonomous: if self.originPage.isDisambig() and not page.isDisambig(): pywikibot.output(u"NOTE: Ignoring link from disambiguation page %s to non-disambiguation %s" - % (self.originPage.aslink(True), page.aslink(True))) + % (self.originPage.title(asLink=True, + forceInterwiki=True), + page.title(asLink=True, + forceInterwiki=True))) return (True, None) elif not self.originPage.isDisambig() and page.isDisambig(): pywikibot.output(u"NOTE: Ignoring link from non-disambiguation page %s to disambiguation %s" - % (self.originPage.aslink(True), page.aslink(True))) + % (self.originPage.title(asLink=True, + forceInterwiki=True), + page.title(asLink=True, + forceInterwiki=True))) return (True, None) else: choice = 'y' if self.originPage.isDisambig() and not page.isDisambig(): disambig = self.getFoundDisambig(page.site()) if disambig: - pywikibot.output(u"NOTE: Ignoring non-disambiguation page %s for %s because disambiguation page %s has already been found." - % (page.aslink(True), self.originPage.aslink(True), disambig.aslink(True))) + pywikibot.output( + u"NOTE: Ignoring non-disambiguation page %s for %s because disambiguation page %s has already been found." + % (page.title(asLink=True, forceInterwiki=True), + self.originPage.title(asLink=True, + forceInterwiki=True), + disambig.title(asLink=True, forceInterwiki=True))) return (True, None) else: - choice = pywikibot.inputChoice(u'WARNING: %s is a disambiguation page, but %s doesn't seem to be one. Follow it anyway?' - % (self.originPage.aslink(True), page.aslink(True)), - ['Yes', 'No', 'Add an alternative', 'Give up'], - ['y', 'n', 'a', 'g']) + choice = pywikibot.inputChoice( + u'WARNING: %s is a disambiguation page, but %s doesn't seem to be one. Follow it anyway?' + % (self.originPage.title(asLink=True, + forceInterwiki=True), + page.title(asLink=True, forceInterwiki=True)), + ['Yes', 'No', 'Add an alternative', 'Give up'], + ['y', 'n', 'a', 'g']) elif not self.originPage.isDisambig() and page.isDisambig(): nondisambig = self.getFoundNonDisambig(page.site()) if nondisambig: pywikibot.output(u"NOTE: Ignoring disambiguation page %s for %s because non-disambiguation page %s has already been found." - % (page.aslink(True), self.originPage.aslink(True), nondisambig.aslink(True))) + % (page.title(asLink=True, + forceInterwiki=True), + self.originPage.title(asLink=True, + forceInterwiki=True), + nondisambig.title(asLink=True, + forceInterwiki=True))) return (True, None) else: - choice = pywikibot.inputChoice(u'WARNING: %s doesn't seem to be a disambiguation page, but %s is one. Follow it anyway?' - % (self.originPage.aslink(True), page.aslink(True)), - ['Yes', 'No', 'Add an alternative', 'Give up'], - ['y', 'n', 'a', 'g']) + choice = pywikibot.inputChoice( + u'WARNING: %s doesn't seem to be a disambiguation page, but %s is one. Follow it anyway?' + % (self.originPage.title(asLink=True, + forceInterwiki=True), + page.title(asLink=True, forceInterwiki=True)), + ['Yes', 'No', 'Add an alternative', 'Give up'], + ['y', 'n', 'a', 'g']) if choice == 'n': return (True, None) elif choice == 'a': @@ -1144,11 +1183,13 @@
def reportInterwikilessPage(self, page): if not globalvar.quiet or pywikibot.verbose: - pywikibot.output(u"NOTE: %s does not have any interwiki links" % self.originPage.aslink(True)) + pywikibot.output(u"NOTE: %s does not have any interwiki links" + % self.originPage.title(asLink=True, + forceInterwiki=True)) if config.without_interwiki: f = codecs.open( pywikibot.config.datafilepath('without_interwiki.txt'), 'a', 'utf-8') - f.write(u"# %s \n" % page.aslink(forceInterwiki=True)) + f.write(u"# %s \n" % page.title(asLink=True, forceInterwiki=True)) f.close()
def askForHints(self, counter): @@ -1219,9 +1260,12 @@ # todo list.
if not page.exists(): - globalvar.remove.append(page.aslink(forceInterwiki=True)) + globalvar.remove.append(page.title(asLink=True, + forceInterwiki=True)) if not globalvar.quiet or pywikibot.verbose: - pywikibot.output(u"NOTE: %s does not exist. Skipping." % page.aslink(True)) + pywikibot.output(u"NOTE: %s does not exist. Skipping." + % page.title(asLink=True, + forceInterwiki=True)) if page == self.originPage: # The page we are working on is the page that does not exist. # No use in doing any work on it in that case. @@ -1247,11 +1291,16 @@ # but we can't do anything useful with such pages if not globalvar.quiet or pywikibot.verbose: pywikibot.output(u"NOTE: %s redirects to an invalid title" - % page.aslink(True)) + % page.title(asLink=True, + forceInterwiki=True)) continue if not globalvar.quiet or pywikibot.verbose: pywikibot.output(u"NOTE: %s is %sredirect to %s" - % (page.aslink(True), redir, redirectTargetPage.aslink(True))) + % (page.title(asLink=True, + forceInterwiki=True), + redir, + redirectTargetPage.title(asLink=True, + forceInterwiki=True))) if self.originPage is None or page == self.originPage: # the 1st existig page becomes the origin page, if none was supplied if globalvar.initialredirect: @@ -1281,16 +1330,22 @@ if self.addIfNew(redirectTargetPage, counter, page): if config.interwiki_shownew or pywikibot.verbose: pywikibot.output(u"%s: %s gives new %sredirect %s" - % (self.originPage.title(asLink=True), page.aslink(True), - redir, redirectTargetPage.aslink(True))) + % (self.originPage.title(asLink=True), + page.title(asLink=True, forceInterwiki=True), + redir, + redirectTargetPage.title(asLink=True, + forceInterwiki=True))) continue
# must be behind the page.isRedirectPage() part # otherwise a redirect error would be raised elif page.isEmpty() and not page.isCategory(): - globalvar.remove.append(page.aslink(forceInterwiki=True)) + globalvar.remove.append(page.title(asLink=True, + forceInterwiki=True)) if not globalvar.quiet or pywikibot.verbose: - pywikibot.output(u"NOTE: %s is empty. Skipping." % page.aslink(True)) + pywikibot.output(u"NOTE: %s is empty. Skipping." + % page.title(asLink=True, + forceInterwiki=True)) if page == self.originPage: for site, count in self.todo.siteCounts(): counter.minus(site, count) @@ -1301,7 +1356,9 @@
elif page.section(): if not globalvar.quiet or pywikibot.verbose: - pywikibot.output(u"NOTE: %s is a page section. Skipping." % page.aslink(True)) + pywikibot.output(u"NOTE: %s is a page section. Skipping." + % page.title(asLink=True, + forceInterwiki=True)) continue
# Page exists, isnt a redirect, and is a plain link (no section) @@ -1317,7 +1374,8 @@
(skip, alternativePage) = self.disambigMismatch(page, counter) if skip: - pywikibot.output(u"NOTE: ignoring %s and its interwiki links" % page.aslink(True)) + pywikibot.output(u"NOTE: ignoring %s and its interwiki links" + % page.title(asLink=True, forceInterwiki=True)) self.done.remove(page) iw = () if alternativePage: @@ -1345,15 +1403,21 @@
elif globalvar.autonomous and duplicate and not skip: pywikibot.output(u"Stopping work on %s because duplicate pages"\ - " %s and %s are found" % (self.originPage.aslink(True), - duplicate.aslink(True), - page.aslink(True))) + " %s and %s are found" % (self.originPage.title(asLink=True, + forceInterwiki=True), + duplicate.title(asLink=True, + forceInterwiki=True), + page.title(asLink=True, + forceInterwiki=True))) self.makeForcedStop(counter) try: f = codecs.open( pywikibot.config.datafilepath('autonomous_problems.dat'), 'a', 'utf-8') - f.write(u"* %s {Found more than one link for %s}" % (self.originPage.aslink(True), page.site())) + f.write(u"* %s {Found more than one link for %s}" + % (self.originPage.title(asLink=True, + forceInterwiki=True), + page.site())) if config.interwiki_graph and config.interwiki_graph_url: filename = interwiki_graph.getFilename(self.originPage, extension = config.interwiki_graph_formats[0]) f.write(u" [%s%s graph]" % (config.interwiki_graph_url, filename)) @@ -1367,9 +1431,12 @@ sys.exit() iw = () elif page.isEmpty() and not page.isCategory(): - globalvar.remove.append(page.aslink(forceInterwiki=True)) + globalvar.remove.append(page.title(asLink=True, + forceInterwiki=True)) if not globalvar.quiet or pywikibot.verbose: - pywikibot.output(u"NOTE: %s is empty; ignoring it and its interwiki links" % page.aslink(True)) + pywikibot.output(u"NOTE: %s is empty; ignoring it and its interwiki links" + % page.title(asLink=True, + forceInterwiki=True)) # Ignore the interwiki links self.done.remove(page) iw = () @@ -1377,7 +1444,12 @@ for linkedPage in iw: if globalvar.hintsareright: if linkedPage.site in self.hintedsites: - pywikibot.output(u"NOTE: %s: %s extra interwiki on hinted site ignored %s" % (self.originPage.title(asLink=True), page.aslink(True), linkedPage.aslink(True))) + pywikibot.output(u"NOTE: %s: %s extra interwiki on hinted site ignored %s" + % (self.originPage.title(asLink=True), + page.title(asLink=True, + forceInterwiki=True), + linkedPage.title(asLink=True, + forceInterwiki=True))) break if not self.skipPage(page, linkedPage, counter): if globalvar.followinterwiki or page == self.originPage: @@ -1389,11 +1461,22 @@ if prevPage != linkedPage and prevPage.site() == lpsite: # Still, this could be "no problem" as either may be a # redirect to the other. No way to find out quickly! - pywikibot.output(u"NOTE: %s: %s gives duplicate interwiki on same site %s" % (self.originPage.aslink(True), page.aslink(True), linkedPage.aslink(True))) + pywikibot.output(u"NOTE: %s: %s gives duplicate interwiki on same site %s" + % (self.originPage.title(asLink=True, + forceInterwiki=True), + page.title(asLink=True, + forceInterwiki=True), + linkedPage.title(asLink=True, + forceInterwiki=True))) break else: if config.interwiki_shownew or pywikibot.verbose: - pywikibot.output(u"%s: %s gives new interwiki %s"% (self.originPage.title(asLink=True), page.aslink(True), linkedPage.aslink(True))) + pywikibot.output(u"%s: %s gives new interwiki %s" + % (self.originPage.title(asLink=True), + page.title(asLink=True, + forceInterwiki=True), + linkedPage.title(asLink=True, + forceInterwiki=True))) if self.forcedStop: break # These pages are no longer 'in progress' @@ -1419,7 +1502,8 @@ if page2 is None: pywikibot.output(u" "*indent + "Given as a hint.") else: - pywikibot.output(u" "*indent + page2.aslink(True)) + pywikibot.output(u" "*indent + page2.title(asLink=True, + forceInterwiki=True))
def assemble(self): @@ -1437,7 +1521,9 @@ continue # inhibit the forwarding families pages to be updated. if site == self.originPage.site(): if page != self.originPage: - self.problem(u"Found link to %s" % page.aslink(True) ) + self.problem(u"Found link to %s" + % page.title(asLink=True, + forceInterwiki=True)) self.whereReport(page) errorCount += 1 else: @@ -1476,7 +1562,9 @@ i = 0 for page2 in pages: i += 1 - pywikibot.output(u" (%d) Found link to %s in:" % (i, page2.aslink(True))) + pywikibot.output(u" (%d) Found link to %s in:" + % (i, page2.title(asLink=True, + forceInterwiki=True))) self.whereReport(page2, indent = 8) while True: #TODO: allow answer to repeat previous or go back after a mistake @@ -1504,7 +1592,9 @@ if not acceptall: pywikibot.output(u"=" * 30) page2 = pages[0] - pywikibot.output(u"Found link to %s in:" % page2.aslink(True)) + pywikibot.output(u"Found link to %s in:" + % page2.title(asLink=True, + forceInterwiki=True)) self.whereReport(page2, indent = 4) while True: if acceptall: @@ -1562,7 +1652,9 @@ if not self.untranslated and globalvar.untranslatedonly: return if self.forcedStop: # autonomous with problem - pywikibot.output(u"======Aborted processing %s======" % self.originPage.aslink(True)) + pywikibot.output(u"======Aborted processing %s======" + % self.originPage.title(asLink=True, + forceInterwiki=True)) return # The following check is not always correct and thus disabled. # self.done might contain no interwiki links because of the -neverlink @@ -1570,11 +1662,15 @@ # if len(self.done) == 1: # # No interwiki at all # return - pywikibot.output(u"======Post-processing %s======" % self.originPage.aslink(True)) + pywikibot.output(u"======Post-processing %s======" + % self.originPage.title(asLink=True, + forceInterwiki=True)) # Assemble list of accepted interwiki links new = self.assemble() if new is None: # User said give up - pywikibot.output(u"======Aborted processing %s======" % self.originPage.aslink(True)) + pywikibot.output(u"======Aborted processing %s======" + % self.originPage.title(asLink=True, + forceInterwiki=True)) return
# Make sure new contains every page link, including the page we are processing @@ -1619,7 +1715,8 @@ old[page.site()] = page except pywikibot.NoPage: pywikibot.output(u"BUG>>> %s no longer exists?" - % new[site].aslink(True)) + % new[site].title(asLink=True, + forceInterwiki=True)) continue mods, mcomment, adding, removing, modifying \ = compareLanguages(old, new, insite = lclSite) @@ -1652,7 +1749,8 @@ old[mypage.site()] = mypage except pywikibot.NoPage: pywikibot.output(u"BUG>>> %s no longer exists?" - % new[site].aslink(True)) + % new[site].title(asLink=True, + forceInterwiki=True)) continue mods, mcomment, adding, removing, modifying \ = compareLanguages(old, new, insite=site) @@ -1733,20 +1831,20 @@ if page.title() != page.sectionFreeTitle(): # This is not a page, but a subpage. Do not edit it. pywikibot.output(u"Not editing %s: not doing interwiki on subpages" - % page.aslink(True)) + % page.title(asLink=True, forceInterwiki=True)) raise SaveError try: pagetext = page.get() except pywikibot.NoPage: pywikibot.output(u"Not editing %s: page does not exist" - % page.aslink(True)) + % page.title(asLink=True, forceInterwiki=True)) raise SaveError if page.isEmpty() and not page.isCategory(): pywikibot.output(u"Not editing %s: page is empty" - % page.aslink(True)) + % page.title(asLink=True, forceInterwiki=True)) raise SaveError -
+ # clone original newPages dictionary, so that we can modify it to the # local page's needs new = dict(newPages) @@ -1780,10 +1878,11 @@ pltmp = new[page.site()] if pltmp != page: s = u"None" - if pltmp is not None: s = pltmp.aslink(True) + if pltmp is not None: s = pltmp.title(asLink=True, + forceInterwiki=True) pywikibot.output( u"BUG>>> %s is not in the list of new links! Found %s." - % (page.aslink(True), s)) + % (page.title(asLink=True, forceInterwiki=True), s)) raise SaveError
# Avoid adding an iw link back to itself @@ -1819,13 +1918,14 @@ #put it to new means don't delete it if not globalvar.cleanup and not globalvar.force or \ globalvar.cleanup and \ - rmPage.aslink(forceInterwiki=True) not in globalvar.remove or \ + rmPage.title(asLink=True, + forceInterwiki=True) not in globalvar.remove or \ rmPage.site().lang in ['hak', 'hi', 'cdo'] and \ pywikibot.unicode_error: #work-arround for bug #3081100 (do not remove hi-pages) new[rmsite] = rmPage pywikibot.output( u"WARNING: %s is either deleted or has a mismatching disambiguation state." - % rmPage.aslink(True)) + % rmPage.title(asLink=True, forceInterwiki=True)) # Re-Check what needs to get done mods, mcomment, adding, removing, modifying = compareLanguages(old, new, @@ -1833,13 +1933,13 @@ if not mods: if not globalvar.quiet or pywikibot.verbose: pywikibot.output(u'No changes needed on page %s' - % page.aslink(True)) + % page.title(asLink=True, forceInterwiki=True)) return False
# Show a message in purple. pywikibot.output( u"\03{lightpurple}Updating links on page %s.\03{default}" - % page.aslink(True)) + % page.title(asLink=True, forceInterwiki=True)) pywikibot.output(u"Changes to be made: %s" % mods) oldtext = page.get() template = (page.namespace() == 10) @@ -1852,11 +1952,11 @@ if template: pywikibot.output( u'SKIPPING: %s should have interwiki links on subpage.' - % page.aslink(True)) + % page.title(asLink=True, forceInterwiki=True)) else: pywikibot.output( u'SKIPPING: %s is under construction or to be deleted.' - % page.aslink(True)) + % page.title(asLink=True, forceInterwiki=True)) return False if newtext == oldtext: return False @@ -1870,7 +1970,8 @@ if removing and removing != [page.site()]: self.problem(u'Found incorrect link to %s in %s' % (", ".join([x.lang for x in removing]), - page.aslink(True)), createneed=False) + page.title(asLink=True, forceInterwiki=True)), + createneed=False) if pywikibot.unicode_error: for x in removing: if x.lang in ['hi', 'cdo']: @@ -1931,7 +2032,8 @@ status, reason, data = page.put(newtext, comment=mcomment) except pywikibot.LockedPage: pywikibot.output(u'Page %s is locked. Skipping.' - % page.aslink(True)) + % page.title(asLink=True, + forceInterwiki=True)) raise SaveError except pywikibot.EditConflict: pywikibot.output( @@ -1973,7 +2075,8 @@ else: raise LinkMustBeRemoved(u'Found incorrect link to %s in %s' % (", ".join([x.lang for x in removing]), - page.aslink(True))) + page.title(asLink=True, + forceInterwiki=True)))
def reportBacklinks(self, new, updatedSites): """ @@ -2005,12 +2108,20 @@ try: linkedPage = linkedPagesDict[expectedPage.site()] pywikibot.output(u"WARNING: %s: %s does not link to %s but to %s" - % (page.site().family.name, page.aslink(True), - expectedPage.aslink(True), linkedPage.aslink(True))) + % (page.site().family.name, + page.title(asLink=True, + forceInterwiki=True), + expectedPage.title(asLink=True, + forceInterwiki=True), + linkedPage.title(asLink=True, + forceInterwiki=True))) except KeyError: pywikibot.output(u"WARNING: %s: %s does not link to %s" - % (page.site().family.name, page.aslink(True), - expectedPage.aslink(True))) + % (page.site().family.name, + page.title(asLink=True, + forceInterwiki=True), + expectedPage.title(asLink=True, + forceInterwiki=True))) # Check for superfluous links for linkedPage in linkedPages: if linkedPage not in expectedPages: @@ -2018,8 +2129,11 @@ # In this case, it was already reported above. if linkedPage.site() not in expectedSites: pywikibot.output(u"WARNING: %s: %s links to incorrect %s" - % (page.site().family.name, page.aslink(True), - linkedPage.aslink(True))) + % (page.site().family.name, + page.title(asLink=True, + forceInterwiki=True), + linkedPage.title(asLink=True, + forceInterwiki=True))) except (socket.error, IOError): pywikibot.output(u'ERROR: could not report backlinks')
@@ -2064,7 +2178,7 @@ f = codecs.open(dumpfn, mode[0], 'utf-8') for subj in self.subjects: if subj.originPage: - f.write(subj.originPage.aslink(None)+'\n') + f.write(subj.originPage.title(asLink=True)+'\n') f.close() pywikibot.output(u'Dump %s (%s) %s.' % (site.lang, site.family.name, mode)) return dumpfn @@ -2075,8 +2189,11 @@ PageGenerator""" fs = self.firstSubject() if fs and (not globalvar.quiet or pywikibot.verbose): - pywikibot.output(u"NOTE: The first unfinished subject is " + fs.originPage.aslink(True)) - pywikibot.output(u"NOTE: Number of pages queued is %d, trying to add %d more."%(len(self.subjects), number)) + pywikibot.output(u"NOTE: The first unfinished subject is %s" + % fs.originPage.title(asLink=True, + forceInterwiki=True)) + pywikibot.output(u"NOTE: Number of pages queued is %d, trying to add %d more." + % (len(self.subjects), number)) for i in range(number): try: while True: @@ -2277,7 +2394,7 @@ if not globalvar.summary and \ len(adding) + len(removing) + len(modifying) <= 3: # Use an extended format for the string linking to all added pages. - fmt = lambda d, site: d[site].aslink(forceInterwiki=True) + fmt = lambda d, site: d[site].title(asLink=True, forceInterwiki=True) else: # Use short format, just the language code fmt = lambda d, site: site.lang
Modified: trunk/pywikipedia/patrol.py =================================================================== --- trunk/pywikipedia/patrol.py 2011-10-29 15:45:02 UTC (rev 9682) +++ trunk/pywikipedia/patrol.py 2011-10-30 10:50:42 UTC (rev 9683) @@ -314,10 +314,12 @@ self.rc_item_counter = self.rc_item_counter + 1
except pywikibot.NoPage: - pywikibot.output(u"Page %s does not exist; skipping." % page.aslink()) + pywikibot.output(u"Page %s does not exist; skipping." + % page.title(asLink=True)) return except pywikibot.IsRedirectPage: - pywikibot.output(u"Page %s is a redirect; skipping." % page.aslink()) + pywikibot.output(u"Page %s is a redirect; skipping." + % page.title(asLink=True)) return
def title_match(prefix, title):
Modified: trunk/pywikipedia/pywikibot/textlib.py =================================================================== --- trunk/pywikipedia/pywikibot/textlib.py 2011-10-29 15:45:02 UTC (rev 9682) +++ trunk/pywikipedia/pywikibot/textlib.py 2011-10-30 10:50:42 UTC (rev 9683) @@ -484,7 +484,8 @@ s = [] for site in ar: try: - link = links[site].aslink(forceInterwiki=True).replace('[[:', '[[') + link = links[site].title(asLink=True, + forceInterwiki=True).replace('[[:', '[[') s.append(link) except AttributeError: s.append(getSite(site).linkto(links[site], othersite=insite))
Modified: trunk/pywikipedia/revertbot.py =================================================================== --- trunk/pywikipedia/revertbot.py 2011-10-29 15:45:02 UTC (rev 9682) +++ trunk/pywikipedia/revertbot.py 2011-10-30 10:50:42 UTC (rev 9683) @@ -104,7 +104,8 @@
page = pywikibot.Page(self.site, item['title']) pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" - % page.aslink(True, True)) + % page.title(asLink=True, forceInterwiki=True, + textlink=True)) old = page.get() new = rev['*'] pywikibot.showDiff(old, new)
Modified: trunk/pywikipedia/unusedfiles.py =================================================================== --- trunk/pywikipedia/unusedfiles.py 2011-10-29 15:45:02 UTC (rev 9682) +++ trunk/pywikipedia/unusedfiles.py 2011-10-30 10:50:42 UTC (rev 9683) @@ -111,7 +111,8 @@ 'http://' not in page.get(): pywikibot.output(u'\n' + page.title()) if template_image in page.get(): - pywikibot.output(u"%s done already" % page.aslink()) + pywikibot.output(u"%s done already" + % page.title(asLink=True)) continue appendtext(page, u"\n\n"+template_image) uploader = page.getFileVersionHistory().pop()[1]
Modified: trunk/pywikipedia/wikipedia.py =================================================================== --- trunk/pywikipedia/wikipedia.py 2011-10-29 15:45:02 UTC (rev 9682) +++ trunk/pywikipedia/wikipedia.py 2011-10-30 10:50:42 UTC (rev 9683) @@ -521,8 +521,9 @@ def section(self, underscore = False): """Return the name of the section this Page refers to.
- The section is the part of the title following a '#' character, if any. - If no section is present, return None. + The section is the part of the title following a '#' character, if + any. If no section is present, return None. + """ section = self._section if section and underscore: @@ -549,14 +550,17 @@
def __str__(self): """Return a console representation of the pagelink.""" - return self.aslink().encode(config.console_encoding, 'replace') + return self.title(asLink=True + ).encode(config.console_encoding, + 'replace')
def __unicode__(self): return self.title(asLink=True, forceInterwiki=True)
def __repr__(self): """Return a more complete string representation.""" - return "%s{%s}" % (self.__class__.__name__, str(self)) + return "%s{%s}" % (self.__class__.__name__, + str(self))
def __cmp__(self, other): """Test for equality and inequality of Page objects. @@ -581,7 +585,7 @@ # representation of an instance can not change after the construction. return hash(unicode(self))
- #@deprecated("Page.title(asLink=True)") + @deprecated("Page.title(asLink=True)") def aslink(self, forceInterwiki=False, textlink=False, noInterwiki=False): """Return a string representation in the form of a wikilink.
@@ -655,13 +659,16 @@ for illegalChar in u'#<>[]|{}\n\ufffd': if illegalChar in self.sectionFreeTitle(): if verbose: - output(u'Illegal character in %s!' % self.aslink()) - raise NoPage('Illegal character in %s!' % self.aslink()) + output(u'Illegal character in %s!' + % self.title(asLink=True)) + raise NoPage('Illegal character in %s!' + % self.title(asLink=True)) if self.namespace() == -1: - raise NoPage('%s is in the Special namespace!' % self.aslink()) + raise NoPage('%s is in the Special namespace!' + % self.title(asLink=True)) if self.site().isInterwikiLink(self.title()): raise NoPage('%s is not a local page on %s!' - % (self.aslink(), self.site())) + % (self.title(asLink=True), self.site())) if force: # When forcing, we retry the page no matter what: # * Old exceptions and contents do not apply any more @@ -696,7 +703,8 @@ if hn: m = re.search("=+[ ']*%s[ ']*=+" % hn, self._contents) if verbose and not m: - output(u"WARNING: Section does not exist: %s" % self.aslink(forceInterwiki = True)) + output(u"WARNING: Section does not exist: %s" + % self.title(asLink=True, forceInterwiki=True)) # Store any exceptions for later reference except NoPage: self._getexception = NoPage @@ -711,7 +719,9 @@ raise except UserBlocked: if self.site().loggedInAs(sysop=sysop): - raise UserBlocked(self.site(), self.aslink(forceInterwiki = True)) + raise UserBlocked(self.site(), + self.title(asLink=True, + forceInterwiki=True)) else: if verbose: output("The IP address is blocked, retry by login.") @@ -766,7 +776,10 @@ pageInfo = data['query']['pages'].values()[0] if data['query']['pages'].keys()[0] == "-1": if 'missing' in pageInfo: - raise NoPage(self.site(), self.aslink(forceInterwiki = True),"Page does not exist. In rare cases, if you are certain the page does exist, look into overriding family.RversionTab" ) + raise NoPage(self.site(), + self.title(asLink=True, + forceInterwiki = True), +"Page does not exist. In rare cases, if you are certain the page does exist, look into overriding family.RversionTab") elif 'invalid' in pageInfo: raise BadTitle('BadTitle: %s' % self) elif 'revisions' in pageInfo: #valid Title @@ -833,7 +846,7 @@ """Get the contents of the Page via the edit page."""
if verbose: - output(u'Getting page %s' % self.aslink()) + output(u'Getting page %s' % self.title(asLink=True)) path = self.site().edit_address(self.urlname()) if oldid: path += "&oldid="+oldid @@ -861,26 +874,31 @@ if self.site().mediawiki_message('whitelistedittitle') in text: raise NoPage(u'Page editing is forbidden for anonymous users.') elif self.site().has_mediawiki_message('nocreatetitle') and self.site().mediawiki_message('nocreatetitle') in text: - raise NoPage(self.site(), self.aslink(forceInterwiki = True)) + raise NoPage(self.site(), self.title(asLink=True, + forceInterwiki=True)) # Bad title elif 'var wgPageName = "Special:Badtitle";' in text \ or self.site().mediawiki_message('badtitle') in text: raise BadTitle('BadTitle: %s' % self) # find out if the username or IP has been blocked elif self.site().isBlocked(): - raise UserBlocked(self.site(), self.aslink(forceInterwiki = True)) + raise UserBlocked(self.site(), + self.title(asLink=True, + forceInterwiki=True)) # If there is no text area and the heading is 'View Source' # but user is not blocked, the page does not exist, and is # locked elif self.site().mediawiki_message('viewsource') in text: - raise NoPage(self.site(), self.aslink(forceInterwiki = True)) + raise NoPage(self.site(), self.title(asLink=True, + forceInterwiki = True)) # Some of the newest versions don't have a "view source" tag for # non-existant pages # Check also the div class because if the language is not english # the bot can not seeing that the page is blocked. elif self.site().mediawiki_message('badaccess') in text or \ "<div class="permissions-errors">" in text: - raise NoPage(self.site(), self.aslink(forceInterwiki = True)) + raise NoPage(self.site(), self.title(asLink=True, + forceInterwiki=True)) elif config.retry_on_fail: if "<title>Wikimedia Error</title>" in text: output( u"Wikimedia has technical problems; will retry in %i minutes." % retry_idle_time) @@ -934,7 +952,9 @@ RversionTab = re.compile(r'<li id="ca-history"><a href=".*?title=.*?&action=history".*?>.*?</a></li>', re.DOTALL) matchVersionTab = RversionTab.search(text) if not matchVersionTab and not self.site().family.name == 'wikitravel': - raise NoPage(self.site(), self.aslink(forceInterwiki = True),"Page does not exist. In rare cases, if you are certain the page does exist, look into overriding family.RversionTab" ) + raise NoPage(self.site(), + self.title(asLink=True, forceInterwiki=True), +"Page does not exist. In rare cases, if you are certain the page does exist, look into overriding family.RversionTab" ) # Look if the page is on our watchlist matchWatching = Rwatchlist.search(text) if matchWatching: @@ -1415,7 +1435,8 @@
while not allDone: if not internal: - output(u'Getting references to %s via API...' % self.aslink()) + output(u'Getting references to %s via API...' + % self.title(asLink=True))
datas = query.GetData(params, self.site()) data = datas['query'].values() @@ -1491,7 +1512,7 @@ # to avoid duplicates: refPages = set() while path: - output(u'Getting references to %s' % self.aslink()) + output(u'Getting references to %s' % self.title(asLink=True)) get_throttle() txt = self.site().getUrl(path) body = BeautifulSoup(txt, @@ -1634,7 +1655,7 @@ for pageid in text: if 'missing' in text[pageid]: self._getexception = NoPage - raise NoPage('Page %s does not exist' % self.aslink()) + raise NoPage('Page %s does not exist' % self.title(asLink=True)) elif not 'pageid' in text[pageid]: # Don't know what may happen here. # We may want to have better error handling @@ -1707,7 +1728,7 @@ if not self.botMayEdit(username): raise LockedPage( u'Not allowed to edit %s because of a restricting template' - % self.aslink()) + % self.title(asLink=True)) elif self.site().has_api() and self.namespace() in [2,3] \ and (self.title().endswith('.css') or \ self.title().endswith('.js')): @@ -1728,7 +1749,7 @@ if self._editrestriction: output( u'Page %s is semi-protected. Getting edit page to find out if we are allowed to edit.' - % self.aslink()) + % self.title(asLink=True)) oldtime = self.editTime() # Note: change_edit_time=True is always True since # self.get() calls self._getEditPage without this parameter @@ -1860,10 +1881,10 @@ put_throttle() # Which web-site host are we submitting to? if newPage: - output(u'Creating page %s via API' % self.aslink()) + output(u'Creating page %s via API' % self.title(asLink=True)) params['createonly'] = 1 else: - output(u'Updating page %s via API' % self.aslink()) + output(u'Updating page %s via API' % self.title(asLink=True)) params['nocreate'] = 1 # Submit the prepared information try: @@ -1877,7 +1898,7 @@ retry_attempt += 1 if retry_attempt > config.maxretries: raise - output(u'Got a server error when putting %s; will retry in %i minute%s.' % (self.aslink(), retry_delay, retry_delay != 1 and "s" or "")) + output(u'Got a server error when putting %s; will retry in %i minute%s.' % (self.title(asLink=True), retry_delay, retry_delay != 1 and "s" or "")) time.sleep(60 * retry_delay) retry_delay *= 2 if retry_delay > 30: @@ -2093,9 +2114,9 @@ put_throttle() # Which web-site host are we submitting to? if newPage: - output(u'Creating page %s' % self.aslink()) + output(u'Creating page %s' % self.title(asLink=True)) else: - output(u'Changing page %s' % self.aslink()) + output(u'Changing page %s' % self.title(asLink=True)) # Submit the prepared information try: response, data = self.site().postForm(address, predata, sysop) @@ -2121,7 +2142,7 @@ raise output( u'Got a server error when putting %s; will retry in %i minute%s.' - % (self.aslink(), retry_delay, retry_delay != 1 and "s" or "")) + % (self.title(asLink=True), retry_delay, retry_delay != 1 and "s" or "")) time.sleep(60 * retry_delay) retry_delay *= 2 if retry_delay > 30: @@ -2315,7 +2336,7 @@ self.site().language()): text = text.replace(u"{{%s}}" % pagenametext, self.title())
- ll = getLanguageLinks(text, insite=self.site(), pageLink=self.aslink()) + ll = getLanguageLinks(text, insite=self.site(), pageLink=self.title(asLink=True))
result = ll.values()
@@ -2348,7 +2369,7 @@ if not self.site().isAllowed('apihighlimits') and config.special_page_limit > 500: params['cllimit'] = 500
- output(u'Getting categories in %s via API...' % self.aslink()) + output(u'Getting categories in %s via API...' % self.title(asLink=True)) allDone = False cats=[] while not allDone: @@ -2698,7 +2719,8 @@ pageInfo = result['query']['pages'].values()[0] if result['query']['pages'].keys()[0] == "-1": if 'missing' in pageInfo: - raise NoPage(self.site(), self.aslink(forceInterwiki=True), + raise NoPage(self.site(), self.title(asLink=True, + forceInterwiki=True), "Page does not exist.") elif 'invalid' in pageInfo: raise BadTitle('BadTitle: %s' % self) @@ -2770,9 +2792,11 @@
if verbose: if startFromPage: - output(u'Continuing to get version history of %s' % self.aslink(forceInterwiki = True)) + output(u'Continuing to get version history of %s' + % self.title(asLink=True, forceInterwiki=True)) else: - output(u'Getting version history of %s' % self.aslink(forceInterwiki = True)) + output(u'Getting version history of %s' + % self.title(asLink=True, forceInterwiki=True))
txt = self.site().getUrl(path)
@@ -2867,7 +2891,8 @@ pageInfo = result['query']['pages'].values()[0] if result['query']['pages'].keys()[0] == "-1": if 'missing' in pageInfo: - raise NoPage(self.site(), self.aslink(forceInterwiki=True), + raise NoPage(self.site(), self.title(asLink=True, + forceInterwiki=True), "Page does not exist.") elif 'invalid' in pageInfo: raise BadTitle('BadTitle: %s' % self) @@ -3154,7 +3179,7 @@ except NoUsername: if mark and self.exists(): text = self.get(get_redirect = True) - output(u'Cannot delete page %s - marking the page for deletion instead:' % self.aslink()) + output(u'Cannot delete page %s - marking the page for deletion instead:' % self.title(asLink=True)) # Note: Parameters to {{delete}}, and their meanings, vary from one Wikipedia to another. # If you want or need to use them, you must be careful not to break others. Else don't. self.put(u'{{delete|bot=yes}}\n%s --~~~~\n----\n\n%s' % (reason, text), comment = reason) @@ -3172,7 +3197,9 @@ reason = input(u'Please enter a reason for the deletion:') answer = u'y' if prompt and not hasattr(self.site(), '_noDeletePrompt'): - answer = inputChoice(u'Do you want to delete %s?' % self.aslink(forceInterwiki = True), ['yes', 'no', 'all'], ['y', 'N', 'a'], 'N') + answer = inputChoice(u'Do you want to delete %s?' + % self.title(asLink=True, forceInterwiki=True), + ['yes', 'no', 'all'], ['y', 'N', 'a'], 'N') if answer == 'a': answer = 'y' self.site()._noDeletePrompt = True @@ -3191,13 +3218,16 @@ } datas = query.GetData(params, self.site(), sysop = True) if 'delete' in datas: - output(u'Page %s deleted' % self.aslink(forceInterwiki = True)) + output(u'Page %s deleted' + % self.title(asLink=True, forceInterwiki=True)) return True else: if datas['error']['code'] == 'missingtitle': - output(u'Page %s could not be deleted - it doesn't exist' % self.aslink(forceInterwiki = True)) + output(u'Page %s could not be deleted - it doesn't exist' + % self.title(asLink=True, forceInterwiki=True)) else: - output(u'Deletion of %s failed for an unknown reason. The response text is:' % self.aslink(forceInterwiki = True)) + output(u'Deletion of %s failed for an unknown reason. The response text is:' + % self.title(asLink=True, forceInterwiki=True)) output('%s' % datas)
return False @@ -3218,13 +3248,16 @@ if data: self.site().checkBlocks(sysop = True) if self.site().mediawiki_message('actioncomplete') in data: - output(u'Page %s deleted' % self.aslink(forceInterwiki = True)) + output(u'Page %s deleted' + % self.title(asLink=True, forceInterwiki=True)) return True elif self.site().mediawiki_message('cannotdelete') in data: - output(u'Page %s could not be deleted - it doesn't exist' % self.aslink(forceInterwiki = True)) + output(u'Page %s could not be deleted - it doesn't exist' + % self.title(asLink=True, forceInterwiki=True)) return False else: - output(u'Deletion of %s failed for an unknown reason. The response text is:' % self.aslink(forceInterwiki = True)) + output(u'Deletion of %s failed for an unknown reason. The response text is:' + % self.title(asLink=True, forceInterwiki=True)) try: ibegin = data.index('<!-- start content -->') + 22 iend = data.index('<!-- end content -->') @@ -3398,7 +3431,7 @@ if 'error' in result: raise RuntimeError("%s" % result['error']) elif 'undelete' in result: - output(u'Page %s undeleted' % self.aslink()) + output(u'Page %s undeleted' % self.title(asLink=True))
return result
@@ -3420,7 +3453,7 @@ self._deletedRevs = None #TODO: Check for errors below (have we succeeded? etc): result = self.site().postForm(address,formdata,sysop=True) - output(u'Page %s undeleted' % self.aslink()) + output(u'Page %s undeleted' % self.title(asLink=True))
return result
@@ -3462,7 +3495,7 @@ if prompt and not hasattr(self.site(), '_noProtectPrompt'): answer = inputChoice( u'Do you want to change the protection level of %s?' - % self.aslink(forceInterwiki = True), + % self.title(asLink=True, forceInterwiki=True), ['Yes', 'No', 'All'], ['Y', 'N', 'A'], 'N') if answer == 'a': answer = 'y' @@ -3528,7 +3561,7 @@ # else: if result['protect']: - output(u'Changed protection level of page %s.' % self.aslink()) + output(u'Changed protection level of page %s.' % self.title(asLink=True)) return True
return False @@ -3591,13 +3624,13 @@ response, data = self.site().postForm(address, predata, sysop=True)
if response.code == 302 and not data: - output(u'Changed protection level of page %s.' % self.aslink()) + output(u'Changed protection level of page %s.' % self.title(asLink=True)) return True else: #Normally, we expect a 302 with no data, so this means an error self.site().checkBlocks(sysop = True) output(u'Failed to change protection level of page %s:' - % self.aslink()) + % self.title(asLink=True)) output(u"HTTP response code %s" % response.code) output(data) return False @@ -3777,7 +3810,8 @@ self._local = pageInfo["imagerepository"] != "shared" if data['query']['pages'].keys()[0] == "-1": if 'missing' in pageInfo and self._local: - raise NoPage(self.site(), self.aslink(forceInterwiki=True), + raise NoPage(self.site(), + self.title(asLink=True, forceInterwiki=True), "Page does not exist.") elif 'invalid' in pageInfo: raise BadTitle('BadTitle: %s' % self) @@ -4032,7 +4066,7 @@ if (not hasattr(page, '_contents') and not hasattr(page, '_getexception')) or force: self.pages.append(page) elif verbose: - output(u"BUGWARNING: %s already done!" % page.aslink()) + output(u"BUGWARNING: %s already done!" % page.title(asLink=True))
def sleep(self): time.sleep(self.sleeptime) @@ -4153,7 +4187,7 @@ page2._contents = text m = self.site.redirectRegex().match(text) if m: - ## output(u"%s is a redirect" % page2.aslink()) + ## output(u"%s is a redirect" % page2.title(asLink=True)) redirectto = m.group(1) if section and not "#" in redirectto: redirectto += "#" + section @@ -4170,7 +4204,8 @@ try: page2._getexception output(u"WARNING: Section not found: %s" - % page2.aslink(forceInterwiki = True)) + % page2.title(asLink=True, + forceInterwiki=True)) except AttributeError: # There is no exception yet page2._getexception = SectionError @@ -4179,9 +4214,10 @@ # might be duplicates in the pages list. if not successful: output(u"BUG>> title %s (%s) not found in list" - % (title, page.aslink(forceInterwiki=True))) + % (title, page.title(asLink=True, forceInterwiki=True))) output(u'Expected one of: %s' - % u','.join([page2.aslink(forceInterwiki=True) for page2 in self.pages])) + % u','.join([page2.title(asLink=True, forceInterwiki=True) + for page2 in self.pages])) raise PageNotFound
def headerDone(self, header): @@ -4317,7 +4353,7 @@ page2._revisionId = revisionId section = page2.section() if 'redirect' in data: - ## output(u"%s is a redirect" % page2.aslink()) + ## output(u"%s is a redirect" % page2.title(asLink=True)) m = self.site.redirectRegex().match(text) redirectto = m.group(1) if section and not "#" in redirectto: @@ -4333,7 +4369,8 @@ if not m: try: page2._getexception - output(u"WARNING: Section not found: %s" % page2.aslink(forceInterwiki = True)) + output(u"WARNING: Section not found: %s" + % page2.title(asLink=True, forceInterwiki=True)) except AttributeError: # There is no exception yet page2._getexception = SectionError @@ -4341,8 +4378,11 @@ # Note that there is no break here. The reason is that there # might be duplicates in the pages list. if not successful: - output(u"BUG>> title %s (%s) not found in list" % (title, page.aslink(forceInterwiki=True))) - output(u'Expected one of: %s' % u','.join([page2.aslink(forceInterwiki=True) for page2 in self.pages])) + output(u"BUG>> title %s (%s) not found in list" + % (title, page.title(asLink=True, forceInterwiki=True))) + output(u'Expected one of: %s' + % u','.join([page2.title(asLink=True, forceInterwiki=True) + for page2 in self.pages])) raise PageNotFound
def headerDoneApi(self, header): @@ -8039,18 +8079,20 @@ continue if isinstance(error, SpamfilterError): output(u"Saving page %s prevented by spam filter: %s" - % (page.aslink(True), error.url)) + % (page.title(asLink=True, forceInterwiki=True), error.url)) elif isinstance(error, PageNotSaved): - output(u"Saving page %s failed: %s" % (page.aslink(True), error)) + output(u"Saving page %s failed: %s" + % (page.title(asLink=True, forceInterwiki=True), error)) elif isinstance(error, LockedPage): - output(u"Page %s is locked; not saved." % page.aslink(True)) + output(u"Page %s is locked; not saved." + % page.title(asLink=True, forceInterwiki=True)) elif isinstance(error, NoUsername): output(u"Page %s not saved; sysop privileges required." - % page.aslink(True)) + % page.title(asLink=True, forceInterwiki=True)) elif error is not None: tb = traceback.format_exception(*sys.exc_info()) output(u"Saving page %s failed:\n%s" - % (page.aslink(True), "".join(tb))) + % (page.title(asLink=True, forceInterwiki=True), "".join(tb)))
_putthread = threading.Thread(target=async_put) # identification for debugging purposes
pywikipedia-svn@lists.wikimedia.org