Revision: 7719 Author: xqt Date: 2009-11-30 14:51:16 +0000 (Mon, 30 Nov 2009)
Log Message: ----------- basic.py: actualized from rewrite branch; comments length max 80 chars.
Modified Paths: -------------- trunk/pywikipedia/basic.py trunk/pywikipedia/interwiki.py trunk/pywikipedia/pagegenerators.py trunk/pywikipedia/redirect.py
Modified: trunk/pywikipedia/basic.py =================================================================== --- trunk/pywikipedia/basic.py 2009-11-30 11:56:24 UTC (rev 7718) +++ trunk/pywikipedia/basic.py 2009-11-30 14:51:16 UTC (rev 7719) @@ -9,14 +9,14 @@
¶ms;
- -debug If given, doesn't do any real changes, but only shows - what would have been changed. +-dry If given, doesn't do any real changes, but only shows + what would have been changed.
All other parameters will be regarded as part of the title of a single page, and the bot will only work on that single page. """ __version__ = '$Id$' -import wikipedia +import wikipedia as pywikibot import pagegenerators
# This is required for the text that is shown when you run this script @@ -43,20 +43,20 @@ 'zh': u'機器人:編輯.....', }
- def __init__(self, generator, debug): + def __init__(self, generator, dry): """ Constructor. Parameters: * generator - The page generator that determines on which pages to work on. - * debug - If True, doesn't do any real changes, but only shows + * dry - If True, doesn't do any real changes, but only shows what would have been changed. """ self.generator = generator - self.debug = debug + self.dry = dry + # Set the edit summary message + self.summary = pywikibot.translate(pywikibot.getSite(), self.msg)
def run(self): - # Set the edit summary message - wikipedia.setAction(wikipedia.translate(wikipedia.getSite(), self.msg)) for page in self.generator: self.treat(page)
@@ -67,11 +67,11 @@ try: # Load the page text = page.get() - except wikipedia.NoPage: - wikipedia.output(u"Page %s does not exist; skipping." % page.aslink()) + except pywikibot.NoPage: + pywikibot.output(u"Page %s does not exist; skipping." % page.aslink()) return - except wikipedia.IsRedirectPage: - wikipedia.output(u"Page %s is a redirect; skipping." % page.aslink()) + except pywikibot.IsRedirectPage: + pywikibot.output(u"Page %s is a redirect; skipping." % page.aslink()) return
################################################################ @@ -86,21 +86,21 @@ if text != page.get(): # Show the title of the page we're working on. # Highlight the title in purple. - wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) + pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title()) # show what was changed - wikipedia.showDiff(page.get(), text) - if not self.debug: - choice = wikipedia.inputChoice(u'Do you want to accept these changes?', ['Yes', 'No'], ['y', 'N'], 'N') + pywikibot.showDiff(page.get(), text) + if not self.dry: + choice = pywikibot.inputChoice(u'Do you want to accept these changes?', ['Yes', 'No'], ['y', 'N'], 'N') if choice == 'y': try: # Save the page - page.put(text) - except wikipedia.LockedPage: - wikipedia.output(u"Page %s is locked; skipping." % page.aslink()) - except wikipedia.EditConflict: - wikipedia.output(u'Skipping %s because of edit conflict' % (page.title())) - except wikipedia.SpamfilterError, error: - wikipedia.output(u'Cannot change %s because of spam blacklist entry %s' % (page.title(), error.url)) + page.put(text, comment=self.summary) + except pywikibot.LockedPage: + pywikibot.output(u"Page %s is locked; skipping." % page.aslink()) + except pywikibot.EditConflict: + pywikibot.output(u'Skipping %s because of edit conflict' % (page.title())) + except pywikibot.SpamfilterError, error: + pywikibot.output(u'Cannot change %s because of spam blacklist entry %s' % (page.title(), error.url))
def main(): @@ -113,14 +113,14 @@ # This temporary array is used to read the page title if one single # page to work on is specified by the arguments. pageTitleParts = [] - # If debug is True, doesn't do any real changes, but only show + # If dry is True, doesn't do any real changes, but only show # what would have been changed. - debug = False + dry = False
# Parse command line arguments - for arg in wikipedia.handleArgs(): - if arg.startswith("-debug"): - debug = True + for arg in pywikibot.handleArgs(): + if arg.startswith("-dry"): + dry = True else: # check if a standard argument like # -start:XYZ or -ref:Asdf was given. @@ -130,7 +130,7 @@ if pageTitleParts != []: # We will only work on a single page. pageTitle = ' '.join(pageTitleParts) - page = wikipedia.Page(wikipedia.getSite(), pageTitle) + page = pywikibot.Page(pywikibot.getSite(), pageTitle) gen = iter([page])
if not gen: @@ -139,13 +139,13 @@ # The preloading generator is responsible for downloading multiple # pages from the wiki simultaneously. gen = pagegenerators.PreloadingGenerator(gen) - bot = BasicBot(gen, debug) + bot = BasicBot(gen, dry) bot.run() else: - wikipedia.showHelp() + pywikibot.showHelp()
if __name__ == "__main__": try: main() finally: - wikipedia.stopme() + pywikibot.stopme()
Modified: trunk/pywikipedia/interwiki.py =================================================================== --- trunk/pywikipedia/interwiki.py 2009-11-30 11:56:24 UTC (rev 7718) +++ trunk/pywikipedia/interwiki.py 2009-11-30 14:51:16 UTC (rev 7719) @@ -74,21 +74,22 @@ that amount of pages and then stop. This is only useful in combination with -start. The default is not to stop.
- -until: used as -until:title, specifies that the robot should process - pages in wiki default sort order up to, and including, "title" - and then stop. This is only useful in combination with -start. - The default is not to stop. + -until: used as -until:title, specifies that the robot should + process pages in wiki default sort order up to, and + including, "title" and then stop. This is only useful in + combination with -start. The default is not to stop. Note: do not specify a namespace, even if -start has one.
- -bracket only work on pages that have (in the home language) parenthesis - in their title. All other pages are skipped. + -bracket only work on pages that have (in the home language) + parenthesis in their title. All other pages are skipped. (note: without ending colon)
-skipfile: used as -skipfile:filename, skip all links mentioned in the given file. This does not work with -number!
-skipauto use to skip all pages that can be translated automatically, - like dates, centuries, months, etc. (note: without ending colon) + like dates, centuries, months, etc. + (note: without ending colon)
-lack: used as -lack:xx with xx a language code: only work on pages without links to language xx. You can also add a number nn @@ -102,7 +103,8 @@ useful if you specify a single page to work on. If no text is given after the second ':', the name of the page itself is used as the title for the hint, unless the - -hintnobracket command line option (see there) is also selected. + -hintnobracket command line option (see there) is also + selected.
There are some special hints, trying a number of languages at once: @@ -222,15 +224,15 @@ The following arguments are only important for users who have accounts for multiple languages, and specify on which sites the bot should modify pages:
- -localonly only work on the local wiki, not on other wikis in the family - I have a login at. (note: without ending colon) + -localonly only work on the local wiki, not on other wikis in the + family I have a login at. (note: without ending colon)
-limittwo only update two pages - one in the local wiki (if logged-in) and one in the top available one. For example, if the local page has links to de and fr, this option will make sure that only local and de: (larger) - site is updated. This option is useful to quickly set two way - links without updating all of wiki's sites. + site is updated. This option is useful to quickly set two + way links without updating all of wiki's sites. (note: without ending colon)
-whenneeded works like limittwo, but other languages are changed in the
Modified: trunk/pywikipedia/pagegenerators.py =================================================================== --- trunk/pywikipedia/pagegenerators.py 2009-11-30 11:56:24 UTC (rev 7718) +++ trunk/pywikipedia/pagegenerators.py 2009-11-30 14:51:16 UTC (rev 7719) @@ -135,9 +135,10 @@ -gorandom Specifies that the robot should starting at the random pages returned by [[Special:Random]].
--recentchanges Work on new and edited pages returned by [[Special:Recentchanges]]. - Can also be given as "-recentchanges:n" where n is the number - of pages to be returned, else 100 pages are returned. +-recentchanges Work on new and edited pages returned by + [[Special:Recentchanges]]. Can also be given as + "-recentchanges:n" where n is the number of pages to be + returned, else 100 pages are returned.
-redirectonly Work on redirect pages only, not their target pages. The robot goes alphabetically through all redirect pages
Modified: trunk/pywikipedia/redirect.py =================================================================== --- trunk/pywikipedia/redirect.py 2009-11-30 11:56:24 UTC (rev 7718) +++ trunk/pywikipedia/redirect.py 2009-11-30 14:51:16 UTC (rev 7719) @@ -19,18 +19,18 @@ -xml Retrieve information from a local XML dump (http://download.wikimedia.org). Argument can also be given as "-xml:filename.xml". Cannot be used with -api or -moves. - If neither of -xml -api -moves is given, info will be loaded from - a special page of the live wiki. + If neither of -xml -api -moves is given, info will be loaded + from a special page of the live wiki.
-api Retrieve information from the wiki via MediaWikis application program interface (API). Cannot be used with -xml or -moves. - If neither of -xml -api -moves is given, info will be loaded from - a special page of the live wiki. + If neither of -xml -api -moves is given, info will be loaded + from a special page of the live wiki.
-moves Use the page move log to find double-redirect candidates. Only - works with action "double", does not work with either -xml, or -api. - If neither of -xml -api -moves is given, info will be loaded from - a special page of the live wiki. + works with action "double", does not work with either -xml, or + -api. If neither of -xml -api -moves is given, info will be + loaded from a special page of the live wiki.
-namespace:n Namespace to process. Works only with an XML dump, or the API interface. Can be given multiple times, for several namespaces.
pywikipedia-svn@lists.wikimedia.org