Revision: 7565 Author: xqt Date: 2009-10-29 15:33:46 +0000 (Thu, 29 Oct 2009)
Log Message: ----------- import wikipedia as pywikibot for easier comparison with rewrite branch; some fixes
Modified Paths: -------------- trunk/pywikipedia/imageuncat.py trunk/pywikipedia/isbn.py trunk/pywikipedia/replace.py trunk/pywikipedia/solve_disambiguation.py trunk/pywikipedia/titletranslate.py
Modified: trunk/pywikipedia/imageuncat.py =================================================================== --- trunk/pywikipedia/imageuncat.py 2009-10-29 15:29:31 UTC (rev 7564) +++ trunk/pywikipedia/imageuncat.py 2009-10-29 15:33:46 UTC (rev 7565) @@ -13,7 +13,8 @@ #
import os, sys, re, codecs -import wikipedia, config, pagegenerators, query +import wikipedia as pywikibot +import config, pagegenerators, query from datetime import datetime from datetime import timedelta
@@ -1263,7 +1264,7 @@ ''' Return a pagegenerator containing all the images edited in a certain timespan. The delay is the amount of minutes to wait and the block is the timespan to return images in. - Should probably copied to somewhere else + Should probably be copied to somewhere else '''
result = [] @@ -1299,14 +1300,15 @@ ''' Do we want to skip this page?
- If we found a category which is not in the ignore list it means that the page is categorized so skip the page. + If we found a category which is not in the ignore list it means + that the page is categorized so skip the page. If we found a template which is in the ignore list, skip the page. ''' - wikipedia.output(u'Working on '+ page.title()) + pywikibot.output(u'Working on '+ page.title())
for category in page.categories(): if category not in ignoreCategories: - wikipedia.output(u'Got category ' + category.title()) + pywikibot.output(u'Got category ' + category.title()) return False
for templateWithTrail in page.templates(): @@ -1314,13 +1316,13 @@ template = templateWithTrail.rstrip('\n').rstrip() if template in skipTemplates: # Already tagged with a template, skip it - wikipedia.output(u'Already tagged, skip it') + pywikibot.output(u'Already tagged, skip it') return False elif template in ignoreTemplates: # template not relevant for categorization - wikipedia.output(u'Ignore ' + template) + pywikibot.output(u'Ignore ' + template) else: - wikipedia.output(u'Not ignoring ' + template) + pywikibot.output(u'Not ignoring ' + template) return False return True
@@ -1329,13 +1331,13 @@ Add the uncat template to the page ''' newtext = page.get() + puttext - wikipedia.showDiff(page.get(), newtext) + pywikibot.showDiff(page.get(), newtext) try: page.put(newtext, putcomment) - except wikipedia.EditConflict: + except pywikibot.EditConflict: # Skip this page pass - except wikipedia.LockedPage: + except pywikibot.LockedPage: # Skip this page pass return @@ -1344,27 +1346,28 @@ ''' Grab a bunch of images and tag them if they are not categorized. ''' - generator = None; + generator = None genFactory = pagegenerators.GeneratorFactory()
- site = wikipedia.getSite(u'commons', u'commons') - wikipedia.setSite(site) - for arg in wikipedia.handleArgs(): + site = pywikibot.getSite(u'commons', u'commons') + pywikibot.setSite(site) + for arg in pywikibot.handleArgs(): if arg.startswith('-yesterday'): generator = uploadedYesterday(site) elif arg.startswith('-recentchanges'): generator = recentChanges(site=site, delay=120) else: genFactory.handleArg(arg) - if not generator: generator = genFactory.getCombinedGenerator() if not generator: - wikipedia.output('You have to specify the generator you want to use for the program!') + pywikibot.output( + u'You have to specify the generator you want to use for the program!') else: pregenerator = pagegenerators.PreloadingGenerator(generator) for page in pregenerator: - if page.exists() and (page.namespace() == 6) and (not page.isRedirectPage()) : + if page.exists() and (page.namespace() == 6) \ + and (not page.isRedirectPage()) : if isUncat(page): addUncat(page)
@@ -1372,4 +1375,4 @@ try: main(sys.argv[1:]) finally: - wikipedia.stopme() + pywikibot.stopme()
Modified: trunk/pywikipedia/isbn.py =================================================================== --- trunk/pywikipedia/isbn.py 2009-10-29 15:29:31 UTC (rev 7564) +++ trunk/pywikipedia/isbn.py 2009-10-29 15:33:46 UTC (rev 7565) @@ -38,7 +38,8 @@
__version__='$Id$'
-import wikipedia, pagegenerators +import wikipedia as pywikibot +import pagegenerators import sys, re
docuReplacements = { @@ -1174,15 +1175,15 @@ text = page.get(get_redirect = self.touch_redirects) # convert ISBN numbers page.put(text) - except wikipedia.NoPage: + except pywikibot.NoPage: print "Page %s does not exist?!" % page.aslink() - except wikipedia.IsRedirectPage: + except pywikibot.IsRedirectPage: print "Page %s is a redirect; skipping." % page.aslink() - except wikipedia.LockedPage: + except pywikibot.LockedPage: print "Page %s is locked?!" % page.aslink()
-class InvalidIsbnException(wikipedia.Error): +class InvalidIsbnException(pywikibot.Error): """Invalid ISBN"""
class ISBN: @@ -1204,7 +1205,7 @@ result += prefix + '-' rest = rest[len(prefix):] break - + # Determine the group for groupNumber in ranges.iterkeys(): if rest.startswith(groupNumber): @@ -1393,7 +1394,7 @@ try: getIsbn(code) except InvalidIsbnException, e: - wikipedia.output(e) + pywikibot.output(e)
newText = oldText if self.to13: @@ -1402,21 +1403,21 @@ if self.format: newText = self.isbnR.sub(_hyphenateIsbnNumber, newText) self.save(page, newText) - except wikipedia.NoPage: - wikipedia.output(u"Page %s does not exist?!" % page.aslink()) - except wikipedia.IsRedirectPage: - wikipedia.output(u"Page %s is a redirect; skipping." % page.aslink()) - except wikipedia.LockedPage: - wikipedia.output(u"Page %s is locked?!" % page.aslink()) + except pywikibot.NoPage: + pywikibot.output(u"Page %s does not exist?!" % page.aslink()) + except pywikibot.IsRedirectPage: + pywikibot.output(u"Page %s is a redirect; skipping." % page.aslink()) + except pywikibot.LockedPage: + pywikibot.output(u"Page %s is locked?!" % page.aslink())
def save(self, page, text): if text != page.get(): # Show the title of the page we're working on. # Highlight the title in purple. - wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) - wikipedia.showDiff(page.get(), text) + pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) + pywikibot.showDiff(page.get(), text) if not self.always: - choice = wikipedia.inputChoice(u'Do you want to accept these changes?', ['Yes', 'No', 'Always yes'], ['y', 'N', 'a'], 'N') + choice = pywikibot.inputChoice(u'Do you want to accept these changes?', ['Yes', 'No', 'Always yes'], ['y', 'N', 'a'], 'N') if choice == 'n': return elif choice == 'a': @@ -1425,20 +1426,20 @@ if self.always: try: page.put(text) - except wikipedia.EditConflict: - wikipedia.output(u'Skipping %s because of edit conflict' % (page.title(),)) - except wikipedia.SpamfilterError, e: - wikipedia.output(u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) - except wikipedia.LockedPage: - wikipedia.output(u'Skipping %s (locked page)' % (page.title(),)) + except pywikibot.EditConflict: + pywikibot.output(u'Skipping %s because of edit conflict' % (page.title(),)) + except pywikibot.SpamfilterError, e: + pywikibot.output(u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) + except pywikibot.LockedPage: + pywikibot.output(u'Skipping %s (locked page)' % (page.title(),)) else: # Save the page in the background. No need to catch exceptions. page.put_async(text)
def run(self): - comment = wikipedia.translate(wikipedia.getSite(), msg) - wikipedia.setAction(comment) + comment = pywikibot.translate(pywikibot.getSite(), msg) + pywikibot.setAction(comment)
for page in self.generator: self.treat(page) @@ -1462,7 +1463,7 @@ to13 = False format = False
- for arg in wikipedia.handleArgs(): + for arg in pywikibot.handleArgs(): if arg.startswith('-namespace:'): try: namespaces.append(int(arg[11:])) @@ -1479,12 +1480,12 @@ pageTitle.append(arg)
if pageTitle: - page = wikipedia.Page(wikipedia.getSite(), ' '.join(pageTitle)) + page = pywikibot.Page(pywikibot.getSite(), ' '.join(pageTitle)) gen = iter([page]) if not gen: gen = genFactory.getCombinedGenerator() if not gen: - wikipedia.showHelp('isbn') + pywikibot.showHelp('isbn') else: if namespaces != []: gen = pagegenerators.NamespaceFilterPageGenerator(gen, namespaces) @@ -1496,4 +1497,4 @@ try: main() finally: - wikipedia.stopme() + pywikibot.stopme()
Modified: trunk/pywikipedia/replace.py =================================================================== --- trunk/pywikipedia/replace.py 2009-10-29 15:29:31 UTC (rev 7564) +++ trunk/pywikipedia/replace.py 2009-10-29 15:33:46 UTC (rev 7565) @@ -114,15 +114,18 @@
python replace.py referer referrer -file:typos.txt -excepttext:HTTP """ +from __future__ import generators # -# (C) Daniel Herding & the Pywikipediabot Team, 2004-2008 +# (C) Daniel Herding & the Pywikipedia team, 2004-2009 # +__version__='$Id$' +# # Distributed under the terms of the MIT license. #
-from __future__ import generators import sys, re, time -import wikipedia, pagegenerators +import wikipedia as pywikibot +import pagegenerators import editarticle import webbrowser
@@ -136,11 +139,10 @@ '&fixes-help;': fixes.help, }
-__version__='$Id$'
# Summary messages in different languages # NOTE: Predefined replacement tasks might use their own dictionary, see 'fixes' -# below.`v +# below. msg = { 'ar': u'%s روبوت : استبدال تلقائي للنص', 'ca': u'Robot: Reemplaçament automàtic de text %s', @@ -202,12 +204,12 @@ self.skipping = bool(xmlStart)
self.excsInside = [] - if 'inside-tags' in self.exceptions: + if "inside-tags" in self.exceptions: self.excsInside += self.exceptions['inside-tags'] - if 'inside' in self.exceptions: + if "inside" in self.exceptions: self.excsInside += self.exceptions['inside'] import xmlreader - self.site = wikipedia.getSite() + self.site = pywikibot.getSite() dump = xmlreader.XmlDump(self.xmlFilename) self.parser = dump.parse()
@@ -222,24 +224,24 @@ and not self.isTextExcepted(entry.text): new_text = entry.text for old, new in self.replacements: - new_text = wikipedia.replaceExcept(new_text, old, new, self.excsInside, self.site) + new_text = pywikibot.replaceExcept(new_text, old, new, self.excsInside, self.site) if new_text != entry.text: - yield wikipedia.Page(self.site, entry.title) + yield pywikibot.Page(self.site, entry.title) except KeyboardInterrupt: try: if not self.skipping: - wikipedia.output( + pywikibot.output( u'To resume, use "-xmlstart:%s" on the command line.' % entry.title) except NameError: pass
def isTitleExcepted(self, title): - if 'title' in self.exceptions: + if "title" in self.exceptions: for exc in self.exceptions['title']: if exc.search(title): return True - if 'require-title' in self.exceptions: + if "require-title" in self.exceptions: for req in self.exceptions['require-title']: if not req.search(title): # if not all requirements are met: return True @@ -247,7 +249,7 @@ return False
def isTextExcepted(self, text): - if 'text-contains' in self.exceptions: + if "text-contains" in self.exceptions: for exc in self.exceptions['text-contains']: if exc.search(text): return True @@ -292,7 +294,7 @@ regular expressions. inside-tags A list of strings. These strings must be keys from the - exceptionRegexes dictionary in wikipedia.replaceExcept(). + exceptionRegexes dictionary in pywikibot.replaceExcept().
""" self.generator = generator @@ -301,22 +303,22 @@ self.acceptall = acceptall self.allowoverlap = allowoverlap self.recursive = recursive + if addedCat: + site = pywikibot.getSite() + self.addedCat = pywikibot.Page(site, addCat, defaultNamespace=14) + self.sleep = sleep # Some function to set default editSummary should probably be added self.editSummary = editSummary - if addedCat: - site = wikipedia.getSite() - self.addedCat = wikipedia.Page(site, addCat, defaultNamespace=14) - self.sleep = sleep
def isTitleExcepted(self, title): """ Iff one of the exceptions applies for the given title, returns True. """ - if 'title' in self.exceptions: + if "title" in self.exceptions: for exc in self.exceptions['title']: if exc.search(title): return True - if 'require-title' in self.exceptions: + if "require-title" in self.exceptions: for req in self.exceptions['require-title']: if not req.search(title): return True @@ -327,7 +329,7 @@ Iff one of the exceptions applies for the given page contents, returns True. """ - if 'text-contains' in self.exceptions: + if "text-contains" in self.exceptions: for exc in self.exceptions['text-contains']: if exc.search(original_text): return True @@ -340,14 +342,14 @@ """ new_text = original_text exceptions = [] - if 'inside-tags' in self.exceptions: + if "inside-tags" in self.exceptions: exceptions += self.exceptions['inside-tags'] - if 'inside' in self.exceptions: + if "inside" in self.exceptions: exceptions += self.exceptions['inside'] for old, new in self.replacements: if self.sleep is not None: time.sleep(self.sleep) - new_text = wikipedia.replaceExcept(new_text, old, new, exceptions, + new_text = pywikibot.replaceExcept(new_text, old, new, exceptions, allowoverlap=self.allowoverlap) return new_text
@@ -359,7 +361,7 @@ # changed. for page in self.generator: if self.isTitleExcepted(page.title()): - wikipedia.output( + pywikibot.output( u'Skipping %s because the title is on the exceptions list.' % page.aslink()) continue @@ -367,23 +369,23 @@ # Load the page's text from the wiki original_text = page.get(get_redirect=True) if not page.canBeEdited(): - wikipedia.output(u"You can't edit page %s" + pywikibot.output(u"You can't edit page %s" % page.aslink()) continue - except wikipedia.NoPage: - wikipedia.output(u'Page %s not found' % page.aslink()) + except pywikibot.NoPage: + pywikibot.output(u'Page %s not found' % page.aslink()) continue new_text = original_text while True: if self.isTextExcepted(new_text): - wikipedia.output( + pywikibot.output( u'Skipping %s because it contains text that is on the exceptions list.' % page.aslink()) break new_text = self.doReplacements(new_text) if new_text == original_text: - wikipedia.output('No changes were necessary in %s' - % page.aslink()) + pywikibot.output(u'No changes were necessary in %s' + % page.aslink()) break if self.recursive: newest_text = self.doReplacements(new_text) @@ -394,16 +396,16 @@ cats = page.categories() if self.addedCat not in cats: cats.append(self.addedCat) - new_text = wikipedia.replaceCategoryLinks(new_text, + new_text = pywikibot.replaceCategoryLinks(new_text, cats) # Show the title of the page we're working on. # Highlight the title in purple. - wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" + pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) - wikipedia.showDiff(original_text, new_text) + pywikibot.showDiff(original_text, new_text) if self.acceptall: break - choice = wikipedia.inputChoice( + choice = pywikibot.inputChoice( u'Do you want to accept these changes?', ['Yes', 'No', 'Edit', 'open in Browser', 'All', "Quit"], ['y', 'N', 'e', 'b', 'a', 'q'], 'N') @@ -419,7 +421,7 @@ page.site().hostname(), page.site().nice_get_address(page.title()) )) - wikipedia.input("Press Enter when finished in browser.") + pywikibot.input("Press Enter when finished in browser.") original_text = page.get(get_redirect=True, force=True) new_text = original_text continue @@ -434,18 +436,18 @@ if self.acceptall and new_text != original_text: try: page.put(new_text, self.editSummary) - except wikipedia.EditConflict: - wikipedia.output(u'Skipping %s because of edit conflict' + except pywikibot.EditConflict: + pywikibot.output(u'Skipping %s because of edit conflict' % (page.title(),)) - except wikipedia.SpamfilterError, e: - wikipedia.output( + except pywikibot.SpamfilterError, e: + pywikibot.output( u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url)) - except wikipedia.PageNotSaved, error: - wikipedia.output(u'Error putting page: %s' + except pywikibot.PageNotSaved, error: + pywikibot.output(u'Error putting page: %s' % (error.args,)) - except wikipedia.LockedPage: - wikipedia.output(u'Skipping %s (locked page)' + except pywikibot.LockedPage: + pywikibot.output(u'Skipping %s (locked page)' % (page.title(),))
def prepareRegexForMySQL(pattern): @@ -510,24 +512,24 @@ genFactory = pagegenerators.GeneratorFactory() # Load default summary message. # BUG WARNING: This is probably incompatible with the -lang parameter. - editSummary = wikipedia.translate(wikipedia.getSite(), msg) + editSummary = pywikibot.translate(pywikibot.getSite(), msg) # Between a regex and another (using -fix) sleep some time (not to waste # too much CPU sleep = None
# Read commandline parameters. - for arg in wikipedia.handleArgs(*args): + for arg in pywikibot.handleArgs(*args): if arg == '-regex': regex = True elif arg.startswith('-xmlstart'): if len(arg) == 9: - xmlStart = wikipedia.input( + xmlStart = pywikibot.input( u'Please enter the dumped article to start with:') else: xmlStart = arg[10:] elif arg.startswith('-xml'): if len(arg) == 4: - xmlFilename = wikipedia.input( + xmlFilename = pywikibot.input( u'Please enter the XML dump's filename:') else: xmlFilename = arg[5:] @@ -535,7 +537,7 @@ useSql = True elif arg.startswith('-page'): if len(arg) == 5: - PageTitles.append(wikipedia.input( + PageTitles.append(pywikibot.input( u'Which page do you want to change?')) else: PageTitles.append(arg[6:]) @@ -564,9 +566,9 @@ elif arg == '-multiline': multiline = True elif arg.startswith('-addcat:'): - add_cat = arg[len('addcat:'):] + add_cat = arg[8:] elif arg.startswith('-summary:'): - editSummary = arg[len('-summary:'):] + editSummary = arg[9:] summary_commandline = True elif arg.startswith('-allowoverlap'): allowoverlap = True @@ -577,47 +579,47 @@ commandline_replacements.append(arg)
if (len(commandline_replacements) % 2): - raise wikipedia.Error, 'require even number of replacements.' + raise pywikibot.Error, 'require even number of replacements.' elif (len(commandline_replacements) == 2 and fix is None): replacements.append((commandline_replacements[0], commandline_replacements[1])) - if summary_commandline == False: - editSummary = wikipedia.translate(wikipedia.getSite(), msg ) % (' (-' + commandline_replacements[0] + ' +' + if not summary_commandline: + editSummary = pywikibot.translate(pywikibot.getSite(), msg ) % (' (-' + commandline_replacements[0] + ' +' + commandline_replacements[1] + ')') elif (len(commandline_replacements) > 1): if (fix is None): for i in xrange (0, len(commandline_replacements), 2): replacements.append((commandline_replacements[i], commandline_replacements[i + 1])) - if summary_commandline == False: + if not summary_commandline: pairs = [( commandline_replacements[i], commandline_replacements[i + 1] ) for i in range(0, len(commandline_replacements), 2)] replacementsDescription = '(%s)' % ', '.join( [('-' + pair[0] + ' +' + pair[1]) for pair in pairs]) - editSummary = wikipedia.translate(wikipedia.getSite(), msg ) % replacementsDescription + editSummary = pywikibot.translate(pywikibot.getSite(), msg ) % replacementsDescription else: - raise wikipedia.Error( + raise pywikibot.Error( 'Specifying -fix with replacements is undefined') elif fix is None: - old = wikipedia.input(u'Please enter the text that should be replaced:') - new = wikipedia.input(u'Please enter the new text:') + old = pywikibot.input(u'Please enter the text that should be replaced:') + new = pywikibot.input(u'Please enter the new text:') change = '(-' + old + ' +' + new replacements.append((old, new)) while True: - old = wikipedia.input( + old = pywikibot.input( u'Please enter another text that should be replaced, or press Enter to start:') if old == '': change = change + ')' break - new = wikipedia.input(u'Please enter the new text:') + new = pywikibot.input(u'Please enter the new text:') change = change + ' & -' + old + ' +' + new replacements.append((old, new)) - if not summary_commandline == True: - default_summary_message = wikipedia.translate(wikipedia.getSite(), msg) % change - wikipedia.output(u'The summary message will default to: %s' + if not summary_commandline: + default_summary_message = pywikibot.translate(pywikibot.getSite(), msg) % change + pywikibot.output(u'The summary message will default to: %s' % default_summary_message) - summary_message = wikipedia.input( + summary_message = pywikibot.input( u'Press Enter to use this default message, or enter a description of the\nchanges your bot will make:') if summary_message == '': summary_message = default_summary_message @@ -628,16 +630,16 @@ try: fix = fixes.fixes[fix] except KeyError: - wikipedia.output(u'Available predefined fixes are: %s' + pywikibot.output(u'Available predefined fixes are: %s' % fixes.fixes.keys()) return - if 'regex' in fix: + if "regex" in fix: regex = fix['regex'] - if 'msg' in fix: - editSummary = wikipedia.translate(wikipedia.getSite(), fix['msg']) - if 'exceptions' in fix: + if "msg" in fix: + editSummary = pywikibot.translate(pywikibot.getSite(), fix['msg']) + if "exceptions" in fix: exceptions = fix['exceptions'] - if 'nocase' in fix: + if "nocase" in fix: caseInsensitive = fix['nocase'] replacements = fix['replacements']
@@ -692,14 +694,14 @@ LIMIT 200""" % (whereClause, exceptClause) gen = pagegenerators.MySQLPageGenerator(query) elif PageTitles: - pages = [wikipedia.Page(wikipedia.getSite(), PageTitle) + pages = [pywikibot.Page(pywikibot.getSite(), PageTitle) for PageTitle in PageTitles] gen = iter(pages)
gen = genFactory.getCombinedGenerator(gen) if not gen: # syntax error, show help text from the top of this file - wikipedia.showHelp('replace') + pywikibot.showHelp('replace') return if xmlFilename: # XML parsing can be quite slow, so use smaller batches and @@ -711,8 +713,9 @@ bot = ReplaceRobot(preloadingGen, replacements, exceptions, acceptall, allowoverlap, recursive, add_cat, sleep, editSummary) bot.run()
+ if __name__ == "__main__": try: main() finally: - wikipedia.stopme() + pywikibot.stopme()
Modified: trunk/pywikipedia/solve_disambiguation.py =================================================================== --- trunk/pywikipedia/solve_disambiguation.py 2009-10-29 15:29:31 UTC (rev 7564) +++ trunk/pywikipedia/solve_disambiguation.py 2009-10-29 15:33:46 UTC (rev 7565) @@ -70,6 +70,7 @@ # (C) Daniel Herding, 2004 # (C) Andre Engels, 2003-2004 # (C) WikiWichtel, 2004 +# (C) Pywikipedia team, 2003-2009 # __version__='$Id$' # @@ -81,7 +82,8 @@ import re, sys, codecs
# Application specific imports -import wikipedia, pagegenerators, editarticle +import wikipedia as pywikibot +import pagegenerators, editarticle
# Summary message when working on disambiguation pages msg = { @@ -418,23 +420,23 @@ def __iter__(self): # TODO: start yielding before all referring pages have been found refs = [page for page in self.disambPage.getReferences(follow_redirects = False, withTemplateInclusion = False)] - wikipedia.output(u"Found %d references." % len(refs)) + pywikibot.output(u"Found %d references." % len(refs)) # Remove ignorables if self.disambPage.site().family.name in ignore_title and self.disambPage.site().lang in ignore_title[self.disambPage.site().family.name]: for ig in ignore_title[self.disambPage.site().family.name][self.disambPage.site().lang]: for i in range(len(refs)-1, -1, -1): if re.match(ig, refs[i].title()): - if wikipedia.verbose: - wikipedia.output('Ignoring page %s' + if pywikibot.verbose: + pywikibot.output('Ignoring page %s' % refs[i].title()) del refs[i] elif self.primaryIgnoreManager.isIgnored(refs[i]): - #wikipedia.output('Ignoring page %s because it was skipped before' % refs[i].title()) + #pywikibot.output('Ignoring page %s because it was skipped before' % refs[i].title()) del refs[i] if len(refs) < self.minimum: - wikipedia.output(u"Found only %d pages to work on; skipping." % len(refs)) + pywikibot.output(u"Found only %d pages to work on; skipping." % len(refs)) return - wikipedia.output(u"Will work on %d pages." % len(refs)) + pywikibot.output(u"Will work on %d pages." % len(refs)) for ref in refs: yield ref
@@ -449,7 +451,7 @@ self.enabled = enabled
self.ignorelist = [] - filename = wikipedia.config.datafilepath('disambiguations', + filename = pywikibot.config.datafilepath('disambiguations', self.disambPage.titleForFilename() + '.txt') try: # The file is stored in the disambiguation/ subdir. @@ -472,7 +474,7 @@ def ignore(self, refPage): if self.enabled: # Skip this occurence next time. - filename = wikipedia.config.datafilepath('disambiguations', + filename = pywikibot.config.datafilepath('disambiguations', self.disambPage.urlname() + '.txt') try: # Open file for appending. If none exists yet, create a new one. @@ -516,7 +518,7 @@ self.main_only = main_only self.minimum = minimum
- self.mysite = wikipedia.getSite() + self.mysite = pywikibot.getSite() self.mylang = self.mysite.language() self.comment = None
@@ -547,7 +549,7 @@ list = u'\n' for i in range(len(self.alternatives)): list += (u"%3i - %s\n" % (i, self.alternatives[i])) - wikipedia.output(list) + pywikibot.output(list)
def setupRegexes(self): # compile regular expressions @@ -569,8 +571,8 @@ def treat(self, refPage, disambPage): """ Parameters: - disambPage - The disambiguation page or redirect we don't want anything - to link on + disambPage - The disambiguation page or redirect we don't want + anything to link to refPage - A page linking to disambPage Returns False if the user pressed q to completely quit the program. Otherwise, returns True. @@ -585,22 +587,22 @@ text=refPage.get(throttle=False) ignoreReason = self.checkContents(text) if ignoreReason: - wikipedia.output('\n\nSkipping %s because it contains %s.\n\n' % (refPage.title(), ignoreReason)) + pywikibot.output('\n\nSkipping %s because it contains %s.\n\n' % (refPage.title(), ignoreReason)) else: include = True - except wikipedia.IsRedirectPage: - wikipedia.output(u'%s is a redirect to %s' % (refPage.title(), disambPage.title())) + except pywikibot.IsRedirectPage: + pywikibot.output(u'%s is a redirect to %s' % (refPage.title(), disambPage.title())) if disambPage.isRedirectPage(): target = self.alternatives[0] - choice = wikipedia.inputChoice(u'Do you want to make redirect %s point to %s?' % (refPage.title(), target), ['yes', 'no'], ['y', 'N'], 'N') + choice = pywikibot.inputChoice(u'Do you want to make redirect %s point to %s?' % (refPage.title(), target), ['yes', 'no'], ['y', 'N'], 'N') if choice == 'y': redir_text = '#%s [[%s]]' % (self.mysite.redirect(default=True), target) try: refPage.put_async(redir_text,comment=self.comment) - except wikipedia.PageNotSaved, error: - wikipedia.output(u'Page not saved: %s' % error.args) + except pywikibot.PageNotSaved, error: + pywikibot.output(u'Page not saved: %s' % error.args) else: - choice = wikipedia.inputChoice(u'Do you want to work on pages linking to %s?' % refPage.title(), ['yes', 'no', 'change redirect'], ['y', 'N', 'c'], 'N') + choice = pywikibot.inputChoice(u'Do you want to work on pages linking to %s?' % refPage.title(), ['yes', 'no', 'change redirect'], ['y', 'N', 'c'], 'N') if choice == 'y': gen = ReferringPageGeneratorWithIgnore(refPage, self.primary) preloadingGen = pagegenerators.PreloadingGenerator(gen) @@ -611,8 +613,8 @@ elif choice == 'c': text=refPage.get(throttle=False,get_redirect=True) include = "redirect" - except wikipedia.NoPage: - wikipedia.output(u'Page [[%s]] does not seem to exist?! Skipping.' % refPage.title()) + except pywikibot.NoPage: + pywikibot.output(u'Page [[%s]] does not seem to exist?! Skipping.' % refPage.title()) include = False if include in (True, "redirect"): # make a backup of the original text so we can show the changes later @@ -625,7 +627,7 @@ m = self.linkR.search(text, pos = curpos) if not m: if n == 0: - wikipedia.output(u"No changes necessary in %s" % refPage.title()) + pywikibot.output(u"No changes necessary in %s" % refPage.title()) return True else: # stop loop and save page @@ -637,9 +639,9 @@ continue else: try: - linkPage = wikipedia.Page(disambPage.site(), m.group('title')) + linkPage = pywikibot.Page(disambPage.site(), m.group('title')) # Check whether the link found is to disambPage. - except wikipedia.InvalidTitle: + except pywikibot.InvalidTitle: continue if linkPage != disambPage: continue @@ -652,23 +654,23 @@ while True: # Show the title of the page where the link was found. # Highlight the title in purple. - wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % refPage.title()) + pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % refPage.title())
# at the beginning of the link, start red color. # at the end of the link, reset the color to default - wikipedia.output(text[max(0, m.start() - context) : m.start()] + '\03{lightred}' + text[m.start() : m.end()] + '\03{default}' + text[m.end() : m.end() + context]) + pywikibot.output(text[max(0, m.start() - context) : m.start()] + '\03{lightred}' + text[m.start() : m.end()] + '\03{default}' + text[m.end() : m.end() + context])
if not self.always: if edited: - choice = wikipedia.input(u"Option (#, r#, s=skip link, e=edit page, n=next page, u=unlink, q=quit\n" + choice = pywikibot.input(u"Option (#, r#, s=skip link, e=edit page, n=next page, u=unlink, q=quit\n" " m=more context, l=list, a=add new, x=save in this form):") else: - choice = wikipedia.input(u"Option (#, r#, s=skip link, e=edit page, n=next page, u=unlink, q=quit\n" + choice = pywikibot.input(u"Option (#, r#, s=skip link, e=edit page, n=next page, u=unlink, q=quit\n" " m=more context, d=show disambiguation page, l=list, a=add new):") else: choice = self.always if choice in ['a', 'A']: - newAlternative = wikipedia.input(u'New alternative:') + newAlternative = pywikibot.input(u'New alternative:') self.alternatives.append(newAlternative) self.listAlternatives() elif choice in ['e', 'E']: @@ -749,19 +751,21 @@ try: choice=int(choice) except ValueError: - wikipedia.output(u"Unknown option") - # step back to ask the user again what to do with the current link + pywikibot.output(u"Unknown option") + # step back to ask the user again what to do with the + # current link curpos -= 1 continue if choice >= len(self.alternatives) or choice < 0: - wikipedia.output(u"Choice out of range. Please select a number between 0 and %i." % (len(self.alternatives) - 1)) + pywikibot.output(u"Choice out of range. Please select a number between 0 and %i." % (len(self.alternatives) - 1)) # show list of possible choices self.listAlternatives() - # step back to ask the user again what to do with the current link + # step back to ask the user again what to do with the + # current link curpos -= 1 continue new_page_title = self.alternatives[choice] - repPl = wikipedia.Page(disambPage.site(), new_page_title) + repPl = pywikibot.Page(disambPage.site(), new_page_title) if (new_page_title[0].isupper()) or (link_text[0].isupper()): new_page_title = repPl.title() else: @@ -781,21 +785,21 @@ text = text[:m.start()] + newlink + text[m.end():] continue
- wikipedia.output(text[max(0,m.start()-30):m.end()+30]) + pywikibot.output(text[max(0,m.start()-30):m.end()+30]) if text == original_text: - wikipedia.output(u'\nNo changes have been made:\n') + pywikibot.output(u'\nNo changes have been made:\n') else: - wikipedia.output(u'\nThe following changes have been made:\n') - wikipedia.showDiff(original_text, text) - wikipedia.output(u'') + pywikibot.output(u'\nThe following changes have been made:\n') + pywikibot.showDiff(original_text, text) + pywikibot.output(u'') # save the page self.setSummaryMessage(disambPage, new_targets, unlink) try: refPage.put_async(text,comment=self.comment) - except wikipedia.LockedPage: - wikipedia.output(u'Page not saved: page is locked') - except wikipedia.PageNotSaved, error: - wikipedia.output(u'Page not saved: %s' % error.args) + except pywikibot.LockedPage: + pywikibot.output(u'Page not saved: page is locked') + except pywikibot.PageNotSaved, error: + pywikibot.output(u'Page not saved: %s' % error.args) return True
def findAlternatives(self, disambPage): @@ -807,11 +811,11 @@ baseTerm = template[1][1] disambTitle = primary_topic_format[self.mylang] % baseTerm try: - disambPage2 = wikipedia.Page(self.mysite, disambTitle) + disambPage2 = pywikibot.Page(self.mysite, disambTitle) links = disambPage2.linkedPages() links = [correctcap(l,disambPage2.get()) for l in links] - except wikipedia.NoPage: - wikipedia.output(u"No page at %s, using redirect target." % disambTitle) + except pywikibot.NoPage: + pywikibot.output(u"No page at %s, using redirect target." % disambTitle) links = disambPage.linkedPages()[:1] links = [correctcap(l,disambPage.get(get_redirect = True)) for l in links] self.alternatives += links @@ -819,42 +823,42 @@ try: target = disambPage.getRedirectTarget().title() self.alternatives.append(target) - except wikipedia.NoPage: - wikipedia.output(u"The specified page was not found.") - user_input = wikipedia.input(u"""\ + except pywikibot.NoPage: + pywikibot.output(u"The specified page was not found.") + user_input = pywikibot.input(u"""\ Please enter the name of the page where the redirect should have pointed at, or press enter to quit:""") if user_input == "": sys.exit(1) else: self.alternatives.append(user_input) - except wikipedia.IsNotRedirectPage: - wikipedia.output( + except pywikibot.IsNotRedirectPage: + pywikibot.output( u"The specified page is not a redirect. Skipping.") return False elif self.getAlternatives: try: if self.primary: try: - disambPage2 = wikipedia.Page(self.mysite, + disambPage2 = pywikibot.Page(self.mysite, primary_topic_format[self.mylang] % disambPage.title() ) links = disambPage2.linkedPages() links = [correctcap(l,disambPage2.get()) for l in links] - except wikipedia.NoPage: - wikipedia.output(u"Page does not exist, using the first link in page %s." % disambPage.title()) + except pywikibot.NoPage: + pywikibot.output(u"Page does not exist, using the first link in page %s." % disambPage.title()) links = disambPage.linkedPages()[:1] links = [correctcap(l,disambPage.get()) for l in links] else: try: links = disambPage.linkedPages() links = [correctcap(l,disambPage.get()) for l in links] - except wikipedia.NoPage: - wikipedia.output(u"Page does not exist, skipping.") + except pywikibot.NoPage: + pywikibot.output(u"Page does not exist, skipping.") return False - except wikipedia.IsRedirectPage: - wikipedia.output(u"Page is a redirect, skipping.") + except pywikibot.IsRedirectPage: + pywikibot.output(u"Page is a redirect, skipping.") return False self.alternatives += links return True @@ -868,32 +872,32 @@ targets = targets[:-2]
if not targets: - targets = wikipedia.translate(self.mysite, unknown_msg) + targets = pywikibot.translate(self.mysite, unknown_msg)
# first check whether user has customized the edit comment - if self.mysite.family.name in wikipedia.config.disambiguation_comment and self.mylang in wikipedia.config.disambiguation_comment[self.mysite.family.name]: + if self.mysite.family.name in pywikibot.config.disambiguation_comment and self.mylang in pywikibot.config.disambiguation_comment[self.mysite.family.name]: try: - self.comment = wikipedia.translate(self.mysite, - wikipedia.config.disambiguation_comment[ + self.comment = pywikibot.translate(self.mysite, + pywikibot.config.disambiguation_comment[ self.mysite.family.name] ) % (disambPage.title(), targets) #Backwards compatibility, type error probably caused by too many arguments for format string except TypeError: - self.comment = wikipedia.translate(self.mysite, - wikipedia.config.disambiguation_comment[ + self.comment = pywikibot.translate(self.mysite, + pywikibot.config.disambiguation_comment[ self.mysite.family.name] ) % disambPage.title() elif disambPage.isRedirectPage(): # when working on redirects, there's another summary message if unlink and not new_targets: - self.comment = wikipedia.translate(self.mysite, msg_redir_unlink) % disambPage.title() + self.comment = pywikibot.translate(self.mysite, msg_redir_unlink) % disambPage.title() else: - self.comment = wikipedia.translate(self.mysite, msg_redir) % (disambPage.title(), targets) + self.comment = pywikibot.translate(self.mysite, msg_redir) % (disambPage.title(), targets) else: if unlink and not new_targets: - self.comment = wikipedia.translate(self.mysite, msg_unlink) % disambPage.title() + self.comment = pywikibot.translate(self.mysite, msg_unlink) % disambPage.title() else: - self.comment = wikipedia.translate(self.mysite, msg) % (disambPage.title(), targets) + self.comment = pywikibot.translate(self.mysite, msg) % (disambPage.title(), targets)
def run(self): if self.main_only: @@ -912,7 +916,7 @@
self.makeAlternativesUnique() # sort possible choices - if wikipedia.config.sort_ignore_case: + if pywikibot.config.sort_ignore_case: self.alternatives.sort(lambda x,y: cmp(x.lower(), y.lower())) else: self.alternatives.sort() @@ -948,7 +952,7 @@ ignoreCase = False minimum = 0
- for arg in wikipedia.handleArgs(): + for arg in pywikibot.handleArgs(): if arg.startswith('-primary:'): primary = True getAlternatives = False @@ -964,12 +968,12 @@ generator = pagegenerators.TextfilePageGenerator(filename = arg[6:]) elif arg.startswith('-pos:'): if arg[5]!=':': - mysite = wikipedia.getSite() - page = wikipedia.Page(mysite, arg[5:]) + mysite = pywikibot.getSite() + page = pywikibot.Page(mysite, arg[5:]) if page.exists(): alternatives.append(page.title()) else: - answer = wikipedia.inputChoice(u'Possibility %s does not actually exist. Use it anyway?' + answer = pywikibot.inputChoice(u'Possibility %s does not actually exist. Use it anyway?' % page.title(), ['yes', 'no'], ['y', 'N'], 'N') if answer == 'y': alternatives.append(page.title()) @@ -984,17 +988,17 @@ elif arg.startswith('-start'): try: if len(arg) <= len('-start:'): - generator = pagegenerators.CategorizedPageGenerator(wikipedia.getSite().disambcategory()) + generator = pagegenerators.CategorizedPageGenerator(pywikibot.getSite().disambcategory()) else: - generator = pagegenerators.CategorizedPageGenerator(wikipedia.getSite().disambcategory(), start = arg[7:]) + generator = pagegenerators.CategorizedPageGenerator(pywikibot.getSite().disambcategory(), start = arg[7:]) generator = pagegenerators.NamespaceFilterPageGenerator(generator, [0]) - except wikipedia.NoPage: + except pywikibot.NoPage: print "Disambiguation category for your wiki is not known." raise elif arg.startswith("-"): print "Unrecognized command line argument: %s" % arg # show help text and exit - wikipedia.showHelp() + pywikibot.showHelp() else: pageTitle.append(arg)
@@ -1002,14 +1006,14 @@ # connect the title's parts with spaces if pageTitle != []: pageTitle = ' '.join(pageTitle) - page = wikipedia.Page(wikipedia.getSite(), pageTitle) + page = pywikibot.Page(pywikibot.getSite(), pageTitle) generator = iter([page])
# if no disambiguation pages was given as an argument, and none was # read from a file, query the user if not generator: - pageTitle = wikipedia.input(u'On which disambiguation page do you want to work?') - page = wikipedia.Page(wikipedia.getSite(), pageTitle) + pageTitle = pywikibot.input(u'On which disambiguation page do you want to work?') + page = pywikibot.Page(pywikibot.getSite(), pageTitle) generator = iter([page])
bot = DisambiguationRobot(always, alternatives, getAlternatives, generator, primary, main_only, minimum = minimum) @@ -1021,4 +1025,4 @@ try: main() finally: - wikipedia.stopme() + pywikibot.stopme()
Modified: trunk/pywikipedia/titletranslate.py =================================================================== --- trunk/pywikipedia/titletranslate.py 2009-10-29 15:29:31 UTC (rev 7564) +++ trunk/pywikipedia/titletranslate.py 2009-10-29 15:33:46 UTC (rev 7565) @@ -9,7 +9,8 @@ # import re
-import wikipedia, date +import wikipedia as pywikibot +import date
def translate(page, hints = None, auto = True, removebrackets = False): """ @@ -53,12 +54,12 @@ for newcode in codes: if newcode in site.languages(): if newcode != site.language(): - x = wikipedia.Page(site.getSite(code=newcode), newname) + x = pywikibot.Page(site.getSite(code=newcode), newname) if x not in result: result.append(x) else: - if wikipedia.verbose: - wikipedia.output(u"Ignoring unknown language code %s"%newcode) + if pywikibot.verbose: + pywikibot.output(u"Ignoring unknown language code %s"%newcode)
# Autotranslate dates into all other languages, the rest will come from existing interwiki links. if auto: @@ -66,7 +67,7 @@ dictName, value = date.getAutoFormat( page.site().language(), page.title() ) if dictName: if not (dictName == 'yearsBC' and page.site().language() in date.maxyearBC and value > date.maxyearBC[page.site().language()]) or (dictName == 'yearsAD' and page.site().language() in date.maxyearAD and value > date.maxyearAD[page.site().language()]): - wikipedia.output(u'TitleTranslate: %s was recognized as %s with value %d' % (page.title(),dictName,value)) + pywikibot.output(u'TitleTranslate: %s was recognized as %s with value %d' % (page.title(),dictName,value)) for entryLang, entry in date.formats[dictName].iteritems(): if entryLang != page.site().language(): if dictName == 'yearsBC' and entryLang in date.maxyearBC and value > date.maxyearBC[entryLang]: @@ -75,7 +76,7 @@ pass else: newname = entry(value) - x = wikipedia.Page( wikipedia.getSite(code=entryLang, fam=site.family), newname ) + x = pywikibot.Page( pywikibot.getSite(code=entryLang, fam=site.family), newname ) if x not in result: result.append(x) # add new page return result @@ -91,11 +92,11 @@ """ result = []
- wikipedia.output( u'getting poisoned links for %s' % pl.title() ) + pywikibot.output( u'getting poisoned links for %s' % pl.title() )
dictName, value = date.getAutoFormat( pl.site().language(), pl.title() ) if dictName is not None: - wikipedia.output( u'date found in %s' % dictName ) + pywikibot.output( u'date found in %s' % dictName )
# errors in year BC if dictName in date.bcFormats: