Revision: 8629
Author: xqt
Date: 2010-10-09 16:11:46 +0000 (Sat, 09 Oct 2010)
Log Message:
-----------
import wikipedia as pywikibot for merging to rewrite
Modified Paths:
--------------
trunk/pywikipedia/copyright.py
trunk/pywikipedia/lonelypages.py
trunk/pywikipedia/makecat.py
trunk/pywikipedia/misspelling.py
trunk/pywikipedia/movepages.py
trunk/pywikipedia/pagefromfile.py
trunk/pywikipedia/pageimport.py
trunk/pywikipedia/piper.py
trunk/pywikipedia/protect.py
trunk/pywikipedia/rciw.py
trunk/pywikipedia/rcsort.py
Modified: trunk/pywikipedia/copyright.py
===================================================================
--- trunk/pywikipedia/copyright.py 2010-10-09 05:02:29 UTC (rev 8628)
+++ trunk/pywikipedia/copyright.py 2010-10-09 16:11:46 UTC (rev 8629)
@@ -638,7 +638,7 @@
consecutive = False
continue
n_query += 1
- #wikipedia.output(search_words)
+ #pywikibot.output(search_words)
if config.copyright_max_query_for_page and n_query > config.copyright_max_query_for_page:
warn(u"Max query limit for page reached")
return output
@@ -845,7 +845,7 @@
def get_results(self, query, numresults = 10):
result_list = list()
query = re.sub("[()\"<>]", "", query)
- # wikipedia.output(query)
+ # pywikibot.output(query)
if config.copyright_google:
self.soap('google', query, result_list)
if config.copyright_yahoo:
Modified: trunk/pywikipedia/lonelypages.py
===================================================================
--- trunk/pywikipedia/lonelypages.py 2010-10-09 05:02:29 UTC (rev 8628)
+++ trunk/pywikipedia/lonelypages.py 2010-10-09 16:11:46 UTC (rev 8629)
@@ -46,7 +46,8 @@
__version__ = '$Id: lonelypages.py,v 1.0 2007/12/28 19.16.00 filnik Exp$'
#
-import wikipedia, pagegenerators
+import wikipedia as pywikibot
+import pagegenerators
import re
# This is required for the text that is shown when you run this script
@@ -115,20 +116,20 @@
always = False # Check variable for always
disambigPage = None # If no disambigPage given, not use it.
# Arguments!
- for arg in wikipedia.handleArgs():
+ for arg in pywikibot.handleArgs():
if arg.startswith('-enable'):
if len(arg) == 7:
- enablePage = wikipedia.input(u'Would you like to check if the bot should run or not?')
+ enablePage = pywikibot.input(u'Would you like to check if the bot should run or not?')
else:
enablePage = arg[8:]
if arg.startswith('-disambig'):
if len(arg) == 9:
- disambigPage = wikipedia.input(u'In which page should the bot save the disambig pages?')
+ disambigPage = pywikibot.input(u'In which page should the bot save the disambig pages?')
else:
disambigPage = arg[10:]
elif arg.startswith('-limit'):
if len(arg) == 6:
- limit = int(wikipedia.input(u'How many pages do you want to check?'))
+ limit = int(pywikibot.input(u'How many pages do you want to check?'))
else:
limit = int(arg[7:])
elif arg.startswith('-newpages'):
@@ -136,14 +137,14 @@
nwlimit = 50 # Default: 50 pages
else:
nwlimit = int(arg[10:])
- generator = wikipedia.getSite().newpages(number = nwlimit)
+ generator = pywikibot.getSite().newpages(number = nwlimit)
nwpages = True
elif arg == '-always':
always = True
else:
genFactory.handleArg(arg)
# Retrive the site
- wikiSite = wikipedia.getSite()
+ wikiSite = pywikibot.getSite()
if not generator:
generator = genFactory.getCombinedGenerator()
@@ -152,49 +153,49 @@
if not generator:
generator = wikiSite.lonelypages(repeat = True, number = limit)
# Take the configurations according to our project
- comment = wikipedia.translate(wikiSite, commento)
- commentdisambig = wikipedia.translate(wikiSite, commenttodisambig)
- template = wikipedia.translate(wikiSite, Template)
- exception = wikipedia.translate(wikiSite, exception)
+ comment = pywikibot.translate(wikiSite, commento)
+ commentdisambig = pywikibot.translate(wikiSite, commenttodisambig)
+ template = pywikibot.translate(wikiSite, Template)
+ exception = pywikibot.translate(wikiSite, exception)
# EnablePage part
if enablePage != None:
# Define the Page Object
- enable = wikipedia.Page(wikiSite, enablePage)
+ enable = pywikibot.Page(wikiSite, enablePage)
# Loading the page's data
try:
getenable = enable.get()
- except wikipedia.NoPage:
- wikipedia.output(u"%s doesn't esist, I use the page as if it was blank!" % enable.title())
+ except pywikibot.NoPage:
+ pywikibot.output(u"%s doesn't esist, I use the page as if it was blank!" % enable.title())
getenable = ''
except wikiepedia.IsRedirect:
- wikipedia.output(u"%s is a redirect, skip!" % enable.title())
+ pywikibot.output(u"%s is a redirect, skip!" % enable.title())
getenable = ''
# If the enable page is set to disable, turn off the bot
# (useful when the bot is run on a server)
if getenable != 'enable':
- wikipedia.output('The bot is disabled')
+ pywikibot.output('The bot is disabled')
return
# DisambigPage part
if disambigPage != None:
- disambigpage = wikipedia.Page(wikiSite, disambigPage)
+ disambigpage = pywikibot.Page(wikiSite, disambigPage)
try:
disambigtext = disambigpage.get()
- except wikipedia.NoPage:
- wikipedia.output(u"%s doesn't esist, skip!" % disambigpage.title())
+ except pywikibot.NoPage:
+ pywikibot.output(u"%s doesn't esist, skip!" % disambigpage.title())
disambigtext = ''
except wikiepedia.IsRedirect:
- wikipedia.output(u"%s is a redirect, don't use it!" % disambigpage.title())
+ pywikibot.output(u"%s is a redirect, don't use it!" % disambigpage.title())
disambigPage = None
# Main Loop
for page in generator:
if nwpages == True:
page = page[0] # The newpages generator returns a tuple, not a Page object.
- wikipedia.output(u"Checking %s..." % page.title())
+ pywikibot.output(u"Checking %s..." % page.title())
# Used to skip the first pages in test phase...
#if page.title()[0] in ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q']:
#continue
if page.isRedirectPage(): # If redirect, skip!
- wikipedia.output(u'%s is a redirect! Skip...' % page.title())
+ pywikibot.output(u'%s is a redirect! Skip...' % page.title())
continue
# refs is not a list, it's a generator while resList... is a list, yes.
refs = page.getReferences()
@@ -202,27 +203,27 @@
for j in refs:
if j == None:
# We have to find out why the function returns that value
- wikipedia.output(u'Error: 1 --> Skip page')
+ pywikibot.output(u'Error: 1 --> Skip page')
continue
refsList.append(j)
# This isn't possible with a generator
if refsList != []:
- wikipedia.output(u"%s isn't orphan! Skip..." % page.title())
+ pywikibot.output(u"%s isn't orphan! Skip..." % page.title())
continue
# Never understood how a list can turn in "None", but it happened :-S
elif refsList == None:
# We have to find out why the function returns that value
- wikipedia.output(u'Error: 2 --> Skip page')
+ pywikibot.output(u'Error: 2 --> Skip page')
continue
else:
# Ok, no refs, no redirect... let's check if there's already the template
try:
oldtxt = page.get()
- except wikipedia.NoPage:
- wikipedia.output(u"%s doesn't exist! Skip..." % page.title())
+ except pywikibot.NoPage:
+ pywikibot.output(u"%s doesn't exist! Skip..." % page.title())
continue
- except wikipedia.IsRedirectPage:
- wikipedia.output(u"%s is a redirect! Skip..." % page.title())
+ except pywikibot.IsRedirectPage:
+ pywikibot.output(u"%s is a redirect! Skip..." % page.title())
continue
# I've used a loop in a loop. If I use continue in the second loop, it won't do anything
# in the first. So let's create a variable to avoid this problem.
@@ -231,7 +232,7 @@
res = re.findall(regexp, oldtxt.lower())
# Found a template! Let's skip the page!
if res != []:
- wikipedia.output(u'Your regex has found something in %s, skipping...' % page.title())
+ pywikibot.output(u'Your regex has found something in %s, skipping...' % page.title())
Find = True
break
# Skip the page..
@@ -239,34 +240,34 @@
continue
# Is the page a disambig?
if page.isDisambig() and disambigPage != None:
- wikipedia.output(u'%s is a disambig page, report..' % page.title())
+ pywikibot.output(u'%s is a disambig page, report..' % page.title())
if not page.title().lower() in disambigtext.lower():
disambigtext = u"%s\n*[[%s]]" % (disambigtext, page.title())
disambigpage.put(disambigtext, commentdisambig)
continue
# Is the page a disambig but there's not disambigPage? Skip!
elif page.isDisambig():
- wikipedia.output(u'%s is a disambig page, skip...' % page.title())
+ pywikibot.output(u'%s is a disambig page, skip...' % page.title())
continue
else:
# Ok, the page need the template. Let's put it there!
newtxt = u"%s\n%s" % (template, oldtxt) # Adding the template in the text
- wikipedia.output(u"\t\t>>> %s <<<" % page.title()) # Showing the title
- wikipedia.showDiff(oldtxt, newtxt) # Showing the changes
+ pywikibot.output(u"\t\t>>> %s <<<" % page.title()) # Showing the title
+ pywikibot.showDiff(oldtxt, newtxt) # Showing the changes
choice = 'y' # Default answer
if not always:
- choice = wikipedia.inputChoice(u'Orphan page found, shall I add the template?', ['Yes', 'No', 'All'], ['y', 'n', 'a'])
+ choice = pywikibot.inputChoice(u'Orphan page found, shall I add the template?', ['Yes', 'No', 'All'], ['y', 'n', 'a'])
if choice == 'a':
always = True
choice = 'y'
if choice == 'y':
try:
page.put(newtxt, comment)
- except wikipedia.EditConflict:
- wikipedia.output(u'Edit Conflict! Skip...')
+ except pywikibot.EditConflict:
+ pywikibot.output(u'Edit Conflict! Skip...')
continue
if __name__ == '__main__':
try:
main()
finally:
- wikipedia.stopme()
+ pywikibot.stopme()
Modified: trunk/pywikipedia/makecat.py
===================================================================
--- trunk/pywikipedia/makecat.py 2010-10-09 05:02:29 UTC (rev 8628)
+++ trunk/pywikipedia/makecat.py 2010-10-09 16:11:46 UTC (rev 8629)
@@ -32,14 +32,17 @@
"""
# (C) Andre Engels, 2004
+# (C) Pywikipedia bot team 2005-2010
#
# Distributed under the terms of the MIT license.
#
-
__version__='$Id$'
+#
import sys, codecs, re
-import wikipedia, date, catlib
+import date
+import wikipedia as pywikibot
+import catlib
msg={
'ar':u'إنشاء أو تحديث التصنيف:',
@@ -60,12 +63,12 @@
def rawtoclean(c):
#Given the 'raw' category, provides the 'clean' category
c2 = c.title().split('|')[0]
- return wikipedia.Page(mysite,c2)
+ return pywikibot.Page(mysite,c2)
def isdate(s):
"""returns true iff s is a date or year
"""
- dict,val = date.getAutoFormat( wikipedia.getSite().language(), s )
+ dict,val = date.getAutoFormat( pywikibot.getSite().language(), s )
return dict is not None
def needcheck(pl):
@@ -82,15 +85,16 @@
def include(pl,checklinks=True,realinclude=True,linkterm=None):
cl = checklinks
if linkterm:
- actualworkingcat = catlib.Category(mysite,workingcat.title(),sortKey=linkterm)
+ actualworkingcat = catlib.Category(mysite,workingcat.title(),
+ sortKey=linkterm)
else:
actualworkingcat = workingcat
if realinclude:
try:
text = pl.get()
- except wikipedia.NoPage:
+ except pywikibot.NoPage:
pass
- except wikipedia.IsRedirectPage:
+ except pywikibot.IsRedirectPage:
cl = True
pass
else:
@@ -103,7 +107,8 @@
catlib.change_category(pl,c,actualworkingcat)
break
else:
- pl.put(wikipedia.replaceCategoryLinks(text, cats + [actualworkingcat]))
+ pl.put(pywikibot.replaceCategoryLinks(
+ text, cats + [actualworkingcat]))
if cl:
if checkforward:
for page2 in pl.linkedPages():
@@ -130,8 +135,8 @@
checked[pl2]=pl2
return
ctoshow = 500
- wikipedia.output(u'')
- wikipedia.output(u"==%s=="%pl.title())
+ pywikibot.output(u'')
+ pywikibot.output(u"==%s=="%pl.title())
while 1:
answer = raw_input("y(es)/n(o)/i(gnore)/(o)ther options? ")
if answer=='y':
@@ -143,7 +148,8 @@
if answer=='z':
if pl.exists():
if not pl.isRedirectPage():
- linkterm = wikipedia.input(u"In what manner should it be alphabetized?")
+ linkterm = pywikibot.input(
+ u"In what manner should it be alphabetized?")
include(pl,linkterm=linkterm)
break
include(pl)
@@ -155,42 +161,46 @@
exclude(pl,real_exclude=False)
break
elif answer=='o':
- wikipedia.output(u"t: Give the beginning of the text of the page")
- wikipedia.output(u"z: Add under another title (as [[Category|Title]])")
- wikipedia.output(u"x: Add the page, but do not check links to and from it")
- wikipedia.output(u"c: Do not add the page, but do check links")
- wikipedia.output(u"a: Add another page")
- wikipedia.output(u"l: Give a list of the pages to check")
+ pywikibot.output(u"t: Give the beginning of the text of the page")
+ pywikibot.output(
+ u"z: Add under another title (as [[Category|Title]])")
+ pywikibot.output(
+ u"x: Add the page, but do not check links to and from it")
+ pywikibot.output(u"c: Do not add the page, but do check links")
+ pywikibot.output(u"a: Add another page")
+ pywikibot.output(u"l: Give a list of the pages to check")
elif answer=='a':
pagetitle = raw_input("Specify page to add:")
- page=wikipedia.Page(wikipedia.getSite(),pagetitle)
+ page=pywikibot.Page(pywikibot.getSite(),pagetitle)
if not page in checked.keys():
include(page)
elif answer=='x':
if pl.exists():
if pl.isRedirectPage():
- wikipedia.output(u"Redirect page. Will be included normally.")
+ pywikibot.output(
+ u"Redirect page. Will be included normally.")
include(pl,realinclude=False)
else:
include(pl,checklinks=False)
else:
- wikipedia.output(u"Page does not exist; not added.")
+ pywikibot.output(u"Page does not exist; not added.")
exclude(pl,real_exclude=False)
break
elif answer=='l':
- wikipedia.output(u"Number of pages still to check: %s"%len(tocheck))
- wikipedia.output(u"Pages to be checked:")
- wikipedia.output(u" - ".join(page.title() for page in tocheck))
- wikipedia.output(u"==%s=="%pl.title())
+ pywikibot.output(u"Number of pages still to check: %s"
+ % len(tocheck))
+ pywikibot.output(u"Pages to be checked:")
+ pywikibot.output(u" - ".join(page.title() for page in tocheck))
+ pywikibot.output(u"==%s=="%pl.title())
elif answer=='t':
- wikipedia.output(u"==%s=="%pl.title())
+ pywikibot.output(u"==%s=="%pl.title())
try:
- wikipedia.output(u''+pl.get(get_redirect=True)[0:ctoshow])
- except wikipedia.NoPage:
- wikipedia.output(u"Page does not exist.")
+ pywikibot.output(u''+pl.get(get_redirect=True)[0:ctoshow])
+ except pywikibot.NoPage:
+ pywikibot.output(u"Page does not exist.")
ctoshow += 500
else:
- wikipedia.output(u"Not understood.")
+ pywikibot.output(u"Not understood.")
try:
checked = {}
@@ -202,7 +212,7 @@
main = True
workingcatname = []
tocheck = []
- for arg in wikipedia.handleArgs():
+ for arg in pywikibot.handleArgs():
if arg.startswith('-nodate'):
skipdates = True
elif arg.startswith('-forward'):
@@ -221,11 +231,14 @@
workingcatname = raw_input("Which page to start with? ")
else:
workingcatname = ' '.join(workingcatname)
- mysite = wikipedia.getSite()
- wikipedia.setAction(wikipedia.translate(mysite,msg) + ' ' + workingcatname)
- workingcat = catlib.Category(mysite,mysite.category_namespace()+':'+workingcatname)
- filename = wikipedia.config.datafilepath('category',
- wikipedia.UnicodeToAsciiHtml(workingcatname) + '_exclude.txt')
+ mysite = pywikibot.getSite()
+ pywikibot.setAction(pywikibot.translate(mysite,msg) + ' ' + workingcatname)
+ workingcat = catlib.Category(mysite,
+ u'%s:%s'
+ % (mysite.category_namespace(),
+ workingcatname))
+ filename = pywikibot.config.datafilepath('category',
+ pywikibot.UnicodeToAsciiHtml(workingcatname) +'_exclude.txt')
try:
f = codecs.open(filename, 'r', encoding = mysite.encoding())
for line in f.readlines():
@@ -236,7 +249,7 @@
except IndexError:
pass
exclude(line,real_exclude=False)
- pl = wikipedia.Page(mysite,line)
+ pl = pywikibot.Page(mysite,line)
checked[pl] = pl
f.close()
excludefile = codecs.open(filename, 'a', encoding = mysite.encoding())
@@ -245,12 +258,12 @@
excludefile = codecs.open(filename, 'w', encoding = mysite.encoding())
try:
parentcats = workingcat.categories()
- except wikipedia.Error:
+ except pywikibot.Error:
parentcats = []
# Do not include articles already in subcats; only checking direct subcats
subcatlist = workingcat.subcategoriesList()
if subcatlist:
- wikipedia.getall(mysite,subcatlist)
+ pywikibot.getall(mysite,subcatlist)
for cat in subcatlist:
list = cat.articlesList()
for page in list:
@@ -260,16 +273,18 @@
if list:
for pl in list:
checked[pl]=pl
- wikipedia.getall(mysite,list)
+ pywikibot.getall(mysite,list)
for pl in list:
include(pl)
else:
- wikipedia.output(u"Category %s does not exist or is empty. Which page to start with?"%workingcatname)
- answer = wikipedia.input(u"(Default is [[%s]]):"%workingcatname)
+ pywikibot.output(
+ u"Category %s does not exist or is empty. Which page to start with?"
+ % workingcatname)
+ answer = pywikibot.input(u"(Default is [[%s]]):" % workingcatname)
if not answer:
answer = workingcatname
- wikipedia.output(u''+answer)
- pl = wikipedia.Page(mysite,answer)
+ pywikibot.output(u''+answer)
+ pl = pywikibot.Page(mysite,answer)
tocheck = []
checked[pl] = pl
include(pl)
@@ -280,7 +295,7 @@
loaded = len(tocheck)
else:
loaded = 50
- wikipedia.getall(mysite,tocheck[:loaded])
+ pywikibot.getall(mysite,tocheck[:loaded])
if not checkbroken:
if not tocheck[0].exists():
pass
@@ -290,8 +305,9 @@
asktoadd(tocheck[0])
tocheck = tocheck[1:]
loaded -= 1
+
finally:
- wikipedia.stopme()
+ pywikibot.stopme()
try:
excludefile.close()
except:
Modified: trunk/pywikipedia/misspelling.py
===================================================================
--- trunk/pywikipedia/misspelling.py 2010-10-09 05:02:29 UTC (rev 8628)
+++ trunk/pywikipedia/misspelling.py 2010-10-09 16:11:46 UTC (rev 8629)
@@ -13,9 +13,9 @@
e.g. "l" or "m".
-start:XY goes through all misspellings in the category on your wiki
- that is defined (to the bot) as the category containing misspelling
- pages, starting at XY. If the -start argument is not given, it starts
- at the beginning.
+ that is defined (to the bot) as the category containing
+ misspelling pages, starting at XY. If the -start argument is not
+ given, it starts at the beginning.
-main only check pages in the main namespace, not in the talk,
wikipedia, user, etc. namespaces.
@@ -26,7 +26,9 @@
#
# Distributed under the terms of the MIT license.
-import wikipedia, solve_disambiguation, catlib, pagegenerators
+import wikipedia as pywikibot
+import catlib, pagegenerators
+import solve_disambiguation
class MisspellingRobot(solve_disambiguation.DisambiguationRobot):
@@ -64,19 +66,27 @@
}
def __init__(self, always, firstPageTitle, main_only):
- solve_disambiguation.DisambiguationRobot.__init__(self, always, [], True, self.createPageGenerator(firstPageTitle), False, main_only)
+ solve_disambiguation.DisambiguationRobot.__init__(
+ self, always, [], True, self.createPageGenerator(firstPageTitle),
+ False, main_only)
def createPageGenerator(self, firstPageTitle):
- if wikipedia.getSite().lang in self.misspellingCategory:
- misspellingCategoryTitle = self.misspellingCategory[wikipedia.getSite().lang]
- misspellingCategory = catlib.Category(wikipedia.getSite(), misspellingCategoryTitle)
- generator = pagegenerators.CategorizedPageGenerator(misspellingCategory, recurse = True, start = firstPageTitle)
+ if pywikibot.getSite().lang in self.misspellingCategory:
+ misspellingCategoryTitle = self.misspellingCategory[pywikibot.getSite().lang]
+ misspellingCategory = catlib.Category(pywikibot.getSite(),
+ misspellingCategoryTitle)
+ generator = pagegenerators.CategorizedPageGenerator(
+ misspellingCategory, recurse = True, start=firstPageTitle)
else:
- misspellingTemplateName = 'Template:%s' % self.misspellingTemplate[wikipedia.getSite().lang]
- misspellingTemplate = wikipedia.Page(wikipedia.getSite(), misspellingTemplateName)
- generator = pagegenerators.ReferringPageGenerator(misspellingTemplate, onlyTemplateInclusion = True)
+ misspellingTemplateName = 'Template:%s' \
+ % self.misspellingTemplate[pywikibot.getSite().lang]
+ misspellingTemplate = pywikibot.Page(pywikibot.getSite(),
+ misspellingTemplateName)
+ generator = pagegenerators.ReferringPageGenerator(
+ misspellingTemplate, onlyTemplateInclusion=True)
if firstPageTitle:
- wikipedia.output(u'-start parameter unsupported on this wiki because there is no category for misspellings.')
+ pywikibot.output(
+ u'-start parameter unsupported on this wiki because there is no category for misspellings.')
preloadingGen = pagegenerators.PreloadingGenerator(generator)
return preloadingGen
@@ -87,7 +97,7 @@
return True
elif self.misspellingTemplate[disambPage.site().lang] is not None:
for templateName, params in disambPage.templatesWithParams():
- if templateName in self.misspellingTemplate[wikipedia.getSite().lang]:
+ if templateName in self.misspellingTemplate[pywikibot.getSite().lang]:
# The correct spelling is in the last paramter.
correctSpelling = params[-1]
# On de.wikipedia, there are some cases where the
@@ -106,8 +116,9 @@
def setSummaryMessage(self, disambPage, new_targets, unlink):
# TODO: setSummaryMessage() in solve_disambiguation now has parameters
# new_targets and unlink. Make use of these here.
- comment = wikipedia.translate(self.mysite, self.msg) % disambPage.title()
- wikipedia.setAction(comment)
+ comment = pywikibot.translate(self.mysite, self.msg) \
+ % disambPage.title()
+ pywikibot.setAction(comment)
def main():
# the option that's always selected when the bot wonders what to do with
@@ -116,12 +127,13 @@
main_only = False
firstPageTitle = None
- for arg in wikipedia.handleArgs():
+ for arg in pywikibot.handleArgs():
if arg.startswith('-always:'):
always = arg[8:]
elif arg.startswith('-start'):
if len(arg) == 6:
- firstPageTitle = wikipedia.input(u'At which page do you want to start?')
+ firstPageTitle = pywikibot.input(
+ u'At which page do you want to start?')
else:
firstPageTitle = arg[7:]
elif arg == '-main':
@@ -135,4 +147,4 @@
try:
main()
finally:
- wikipedia.stopme()
+ pywikibot.stopme()
Modified: trunk/pywikipedia/movepages.py
===================================================================
--- trunk/pywikipedia/movepages.py 2010-10-09 05:02:29 UTC (rev 8628)
+++ trunk/pywikipedia/movepages.py 2010-10-09 16:11:46 UTC (rev 8629)
@@ -13,8 +13,8 @@
-noredirect Leave no redirect behind.
--prefix Move pages by adding a namespace prefix to the names of the pages.
- (Will remove the old namespace prefix if any)
+-prefix Move pages by adding a namespace prefix to the names of the
+ pages. (Will remove the old namespace prefix if any)
Argument can also be given as "-prefix:namespace:".
-always Don't prompt to make changes, just do them.
@@ -22,11 +22,10 @@
-skipredirects Skip redirect pages (Warning: increases server load)
-summary Prompt for a custom summary, bypassing the predefined message
- texts.
- Argument can also be given as "-summary:XYZ".
+ texts. Argument can also be given as "-summary:XYZ".
--pairs Read pairs of file names from a file. The file must be in a format
- [[frompage]] [[topage]] [[frompage]] [[topage]] ...
+-pairs Read pairs of file names from a file. The file must be in a
+ format [[frompage]] [[topage]] [[frompage]] [[topage]] ...
Argument can also be given as "-pairs:filename"
"""
@@ -40,8 +39,9 @@
__version__='$Id$'
-import wikipedia, pagegenerators
import sys, re
+import wikipedia as pywikibot
+import pagegenerators
# This is required for the text that is shown when you run this script
# with the parameter -help.
@@ -72,7 +72,8 @@
class MovePagesBot:
- def __init__(self, generator, addprefix, noredirect, always, skipredirects, summary):
+ def __init__(self, generator, addprefix, noredirect, always, skipredirects,
+ summary):
self.generator = generator
self.addprefix = addprefix
self.noredirect = noredirect
@@ -84,30 +85,34 @@
try:
msg = self.summary
if not msg:
- msg = wikipedia.translate(wikipedia.getSite(), summary)
- wikipedia.output(u'Moving page %s to [[%s]]' % (page.aslink(), newPageTitle))
- page.move(newPageTitle, msg, throttle=True, leaveRedirect=self.noredirect)
- except wikipedia.NoPage:
- wikipedia.output(u'Page %s does not exist!' % page.title())
- except wikipedia.IsRedirectPage:
- wikipedia.output(u'Page %s is a redirect; skipping.' % page.title())
- except wikipedia.LockedPage:
- wikipedia.output(u'Page %s is locked!' % page.title())
- except wikipedia.PageNotSaved, e:
+ msg = pywikibot.translate(pywikibot.getSite(), summary)
+ pywikibot.output(u'Moving page %s to [[%s]]'
+ % (page.title(asLink=True), newPageTitle))
+ page.move(newPageTitle, msg, throttle=True,
+ leaveRedirect=self.noredirect)
+ except pywikibot.NoPage:
+ pywikibot.output(u'Page %s does not exist!' % page.title())
+ except pywikibot.IsRedirectPage:
+ pywikibot.output(u'Page %s is a redirect; skipping.' % page.title())
+ except pywikibot.LockedPage:
+ pywikibot.output(u'Page %s is locked!' % page.title())
+ except pywikibot.PageNotSaved, e:
#target newPageTitle already exists
- wikipedia.output(e.message)
+ pywikibot.output(e.message)
def treat(self, page):
# Show the title of the page we're working on.
# Highlight the title in purple.
- wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<"% page.title())
+ pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<"
+ % page.title())
if self.skipredirects and page.isRedirectPage():
- wikipedia.output(u'Page %s is a redirect; skipping.' % page.title())
+ pywikibot.output(u'Page %s is a redirect; skipping.' % page.title())
return
pagetitle = page.titleWithoutNamespace()
namesp = page.site().namespace(page.namespace())
if self.appendAll:
- newPageTitle = (u'%s%s%s' % (self.pagestart, pagetitle, self.pageend))
+ newPageTitle = (u'%s%s%s'
+ % (self.pagestart, pagetitle, self.pageend))
if not self.noNamespace and namesp:
newPageTitle = (u'%s:%s' % (namesp, newPageTitle))
elif self.regexAll:
@@ -118,7 +123,9 @@
newPageTitle = (u'%s%s' % (self.addprefix, pagetitle))
if self.addprefix or self.appendAll or self.regexAll:
if not self.always:
- choice2 = wikipedia.inputChoice(u'Change the page title to "%s"?' % newPageTitle, ['yes', 'no', 'all', 'quit'], ['y', 'n', 'a', 'q'])
+ choice2 = pywikibot.inputChoice(
+ u'Change the page title to "%s"?' % newPageTitle,
+ ['yes', 'no', 'all', 'quit'], ['y', 'n', 'a', 'q'])
if choice2 == 'y':
self.moveOne(page, newPageTitle)
elif choice2 == 'a':
@@ -133,21 +140,32 @@
else:
self.moveOne(page, newPageTitle)
else:
- choice = wikipedia.inputChoice(u'What do you want to do?', ['change page name', 'append to page name', 'use a regular expression', 'next page', 'quit'], ['c', 'a', 'r', 'n', 'q'])
+ choice = pywikibot.inputChoice(u'What do you want to do?',
+ ['change page name',
+ 'append to page name',
+ 'use a regular expression',
+ 'next page', 'quit'],
+ ['c', 'a', 'r', 'n', 'q'])
if choice == 'c':
- newPageTitle = wikipedia.input(u'New page name:')
+ newPageTitle = pywikibot.input(u'New page name:')
self.moveOne(page, newPageTitle)
elif choice == 'a':
- self.pagestart = wikipedia.input(u'Append this to the start:')
- self.pageend = wikipedia.input(u'Append this to the end:')
- newPageTitle = (u'%s%s%s' % (self.pagestart, pagetitle, self.pageend))
+ self.pagestart = pywikibot.input(u'Append this to the start:')
+ self.pageend = pywikibot.input(u'Append this to the end:')
+ newPageTitle = (u'%s%s%s'
+ % (self.pagestart, pagetitle, self.pageend))
if namesp:
- choice2 = wikipedia.inputChoice(u'Do you want to remove the namespace prefix "%s:"?' % namesp, ['yes', 'no'], ['y', 'n'])
+ choice2 = pywikibot.inputChoice(
+ u'Do you want to remove the namespace prefix "%s:"?'
+ % namesp, ['yes', 'no'], ['y', 'n'])
if choice2 == 'y':
noNamespace = True
else:
newPageTitle = (u'%s:%s' % (namesp, newPageTitle))
- choice2 = wikipedia.inputChoice(u'Change the page title to "%s"?' % newPageTitle, ['yes', 'no', 'all', 'quit'], ['y', 'n', 'a', 'q'])
+ choice2 = pywikibot.inputChoice(
+ u'Change the page title to "%s"?'
+ % newPageTitle, ['yes', 'no', 'all', 'quit'],
+ ['y', 'n', 'a', 'q'])
if choice2 == 'y':
self.moveOne(page, newPageTitle)
elif choice2 == 'a':
@@ -160,19 +178,28 @@
else:
self.treat(page)
elif choice == 'r':
- searchPattern = wikipedia.input(u'Enter the search pattern:')
- self.replacePattern = wikipedia.input(u'Enter the replace pattern:')
+ searchPattern = pywikibot.input(u'Enter the search pattern:')
+ self.replacePattern = pywikibot.input(
+ u'Enter the replace pattern:')
self.regex=re.compile(searchPattern)
if page.title() == page.titleWithoutNamespace():
- newPageTitle = self.regex.sub(self.replacePattern, page.title())
+ newPageTitle = self.regex.sub(self.replacePattern,
+ page.title())
else:
- choice2 = wikipedia.inputChoice(u'Do you want to remove the namespace prefix "%s:"?' % namesp, ['yes', 'no'], ['y', 'n'])
+ choice2 = pywikibot.inputChoice(
+ u'Do you want to remove the namespace prefix "%s:"?'
+ % namesp, ['yes', 'no'], ['y', 'n'])
if choice2 == 'y':
- newPageTitle = self.regex.sub(self.replacePattern, page.titleWithoutNamespace())
+ newPageTitle = self.regex.sub(
+ self.replacePattern, page.titleWithoutNamespace())
noNamespace = True
else:
- newPageTitle = self.regex.sub(self.replacePattern, page.title())
- choice2 = wikipedia.inputChoice(u'Change the page title to "%s"?' % newPageTitle, ['yes', 'no', 'all', 'quit'], ['y', 'n', 'a', 'q'])
+ newPageTitle = self.regex.sub(self.replacePattern,
+ page.title())
+ choice2 = pywikibot.inputChoice(
+ u'Change the page title to "%s"?'
+ % newPageTitle, ['yes', 'no', 'all', 'quit'],
+ ['y', 'n', 'a', 'q'])
if choice2 == 'y':
self.moveOne(page, newPageTitle)
elif choice2 == 'a':
@@ -215,10 +242,11 @@
# to work on.
genFactory = pagegenerators.GeneratorFactory()
- for arg in wikipedia.handleArgs():
+ for arg in pywikibot.handleArgs():
if arg.startswith('-pairs'):
if len(arg) == len('-pairs'):
- filename = wikipedia.input(u'Enter the name of the file containing pairs:')
+ filename = pywikibot.input(
+ u'Enter the name of the file containing pairs:')
else:
filename = arg[len('-pairs:'):]
oldName1 = None
@@ -229,7 +257,8 @@
else:
oldName1 = page.title()
if oldName1:
- wikipedia.output(u'WARNING: file %s contains odd number of links' % filename)
+ pywikibot.output(
+ u'WARNING: file %s contains odd number of links' % filename)
elif arg == '-noredirect':
noredirect = False
elif arg == '-always':
@@ -238,45 +267,47 @@
skipredirects = True
elif arg.startswith('-from:'):
if oldName:
- wikipedia.output(u'WARNING: -from:%s without -to:' % oldName)
+ pywikibot.output(u'WARNING: -from:%s without -to:' % oldName)
oldName = arg[len('-from:'):]
elif arg.startswith('-to:'):
if oldName:
fromToPairs.append([oldName, arg[len('-to:'):]])
oldName = None
else:
- wikipedia.output(u'WARNING: %s without -from' % arg)
+ pywikibot.output(u'WARNING: %s without -from' % arg)
elif arg.startswith('-prefix'):
if len(arg) == len('-prefix'):
- prefix = wikipedia.input(u'Enter the prefix:')
+ prefix = pywikibot.input(u'Enter the prefix:')
else:
prefix = arg[8:]
elif arg.startswith('-summary'):
if len(arg) == len('-summary'):
- summary = wikipedia.input(u'Enter the summary:')
+ summary = pywikibot.input(u'Enter the summary:')
else:
summary = arg[9:]
else:
genFactory.handleArg(arg)
if oldName:
- wikipedia.output(u'WARNING: -from:%s without -to:' % oldName)
+ pywikibot.output(u'WARNING: -from:%s without -to:' % oldName)
for pair in fromToPairs:
- page = wikipedia.Page(wikipedia.getSite(), pair[0])
- bot = MovePagesBot(None, prefix, noredirect, always, skipredirects, summary)
+ page = pywikibot.Page(pywikibot.getSite(), pair[0])
+ bot = MovePagesBot(None, prefix, noredirect, always, skipredirects,
+ summary)
bot.moveOne(page, pair[1])
if not gen:
gen = genFactory.getCombinedGenerator()
if gen:
preloadingGen = pagegenerators.PreloadingGenerator(gen)
- bot = MovePagesBot(preloadingGen, prefix, noredirect, always, skipredirects, summary)
+ bot = MovePagesBot(preloadingGen, prefix, noredirect, always,
+ skipredirects, summary)
bot.run()
elif not fromToPairs:
- wikipedia.showHelp('movepages')
+ pywikibot.showHelp('movepages')
if __name__ == '__main__':
try:
main()
finally:
- wikipedia.stopme()
+ pywikibot.stopme()
Modified: trunk/pywikipedia/pagefromfile.py
===================================================================
--- trunk/pywikipedia/pagefromfile.py 2010-10-09 05:02:29 UTC (rev 8628)
+++ trunk/pywikipedia/pagefromfile.py 2010-10-09 16:11:46 UTC (rev 8629)
@@ -46,11 +46,12 @@
#
# Distributed under the terms of the MIT license.
#
-
__version__='$Id$'
+#
import re, codecs
-import wikipedia, config
+import wikipedia as pywikibot
+import config
class NoTitle(Exception):
"""No title found"""
@@ -138,7 +139,8 @@
'zh': u'機器人: 覆寫已存在的文字',
}
- def __init__(self, reader, force, append, summary, minor, autosummary, debug):
+ def __init__(self, reader, force, append, summary, minor, autosummary,
+ debug):
self.reader = reader
self.force = force
self.append = append
@@ -152,47 +154,54 @@
self.put(title, contents)
def put(self, title, contents):
- mysite = wikipedia.getSite()
+ mysite = pywikibot.getSite()
- page = wikipedia.Page(mysite, title)
+ page = pywikibot.Page(mysite, title)
# Show the title of the page we're working on.
# Highlight the title in purple.
- wikipedia.output(u">>> \03{lightpurple}%s\03{default} <<<" % page.title())
+ pywikibot.output(u">>> \03{lightpurple}%s\03{default} <<<"
+ % page.title())
if self.summary:
comment = self.summary
else:
- comment = wikipedia.translate(mysite, self.msg)
+ comment = pywikibot.translate(mysite, self.msg)
- comment_top = comment + " - " + wikipedia.translate(mysite, self.msg_top)
- comment_bottom = comment + " - " + wikipedia.translate(mysite, self.msg_bottom)
- comment_force = comment + " *** " + wikipedia.translate(mysite, self.msg_force) + " ***"
+ comment_top = comment + " - " + pywikibot.translate(mysite,
+ self.msg_top)
+ comment_bottom = comment + " - " + pywikibot.translate(mysite,
+ self.msg_bottom)
+ comment_force = comment + " *** " + pywikibot.translate(mysite,
+ self.msg_force) + " ***"
# Remove trailing newlines (cause troubles when creating redirects)
contents = re.sub('^[\r\n]*','', contents)
if page.exists():
if self.append == "Top":
- wikipedia.output(u"Page %s already exists, appending on top!" % title)
+ pywikibot.output(u"Page %s already exists, appending on top!"
+ % title)
contents = contents + page.get()
comment = comment_top
elif self.append == "Bottom":
- wikipedia.output(u"Page %s already exists, appending on bottom!" % title)
+ pywikibot.output(u"Page %s already exists, appending on bottom!"
+ % title)
contents = page.get() + contents
comment = comment_bottom
elif self.force:
- wikipedia.output(u"Page %s already exists, ***overwriting!" % title)
+ pywikibot.output(u"Page %s already exists, ***overwriting!"
+ % title)
comment = comment_force
else:
- wikipedia.output(u"Page %s already exists, not adding!" % title)
+ pywikibot.output(u"Page %s already exists, not adding!" % title)
return
else:
if self.autosummary:
comment = ''
- wikipedia.setAction('')
+ pywikibot.setAction('')
if self.dry:
- wikipedia.output("*** Dry mode ***\n" + \
+ pywikibot.output("*** Dry mode ***\n" + \
"\03{lightpurple}title\03{default}: " + title + "\n" + \
"\03{lightpurple}contents\03{default}:\n" + contents + "\n" \
"\03{lightpurple}comment\03{default}: " + comment + "\n")
@@ -200,12 +209,14 @@
try:
page.put(contents, comment = comment, minorEdit = self.minor)
- except wikipedia.LockedPage:
- wikipedia.output(u"Page %s is locked; skipping." % title)
- except wikipedia.EditConflict:
- wikipedia.output(u'Skipping %s because of edit conflict' % title)
- except wikipedia.SpamfilterError, error:
- wikipedia.output(u'Cannot change %s because of spam blacklist entry %s' % (title, error.url))
+ except pywikibot.LockedPage:
+ pywikibot.output(u"Page %s is locked; skipping." % title)
+ except pywikibot.EditConflict:
+ pywikibot.output(u'Skipping %s because of edit conflict' % title)
+ except pywikibot.SpamfilterError, error:
+ pywikibot.output(
+ u'Cannot change %s because of spam blacklist entry %s'
+ % (title, error.url))
class PageFromFileReader:
"""
@@ -213,7 +224,8 @@
The run() method yields a (title, contents) tuple for each found page.
"""
- def __init__(self, filename, pageStartMarker, pageEndMarker, titleStartMarker, titleEndMarker, include, notitle):
+ def __init__(self, filename, pageStartMarker, pageEndMarker,
+ titleStartMarker, titleEndMarker, include, notitle):
self.filename = filename
self.pageStartMarker = pageStartMarker
self.pageEndMarker = pageEndMarker
@@ -223,9 +235,10 @@
self.notitle = notitle
def run(self):
- wikipedia.output('Reading \'%s\'...' % self.filename)
+ pywikibot.output('Reading \'%s\'...' % self.filename)
try:
- f = codecs.open(self.filename, 'r', encoding = config.textfile_encoding)
+ f = codecs.open(self.filename, 'r',
+ encoding=config.textfile_encoding)
except IOError, err:
print err
return
@@ -238,12 +251,12 @@
length, title, contents = self.findpage(text[position:])
except AttributeError:
if not length:
- wikipedia.output(u'\nStart or end marker not found.')
+ pywikibot.output(u'\nStart or end marker not found.')
else:
- wikipedia.output(u'End of file.')
+ pywikibot.output(u'End of file.')
break
except NoTitle, err:
- wikipedia.output(u'\nNo title found - skipping a page.')
+ pywikibot.output(u'\nNo title found - skipping a page.')
position += err.offset
continue
@@ -270,9 +283,9 @@
return location.end(), title, contents
def main():
- # Adapt these to the file you are using. 'pageStartMarker' and 'pageEndMarker' are
- # the beginning and end of each entry. Take text that should be included
- # and does not occur elsewhere in the text.
+ # Adapt these to the file you are using. 'pageStartMarker' and
+ # 'pageEndMarker' are the beginning and end of each entry. Take text that
+ # should be included and does not occur elsewhere in the text.
# TODO: make config variables for these.
filename = "dict.txt"
@@ -290,7 +303,7 @@
autosummary = False
dry = False
- for arg in wikipedia.handleArgs():
+ for arg in pywikibot.handleArgs():
if arg.startswith("-start:"):
pageStartMarker = arg[7:]
elif arg.startswith("-end:"):
@@ -323,10 +336,10 @@
elif arg == '-autosummary':
autosummary = True
else:
- wikipedia.output(u"Disregarding unknown argument %s." % arg)
+ pywikibot.output(u"Disregarding unknown argument %s." % arg)
- reader = PageFromFileReader(filename, pageStartMarker, pageEndMarker, titleStartMarker, titleEndMarker, include, notitle)
-
+ reader = PageFromFileReader(filename, pageStartMarker, pageEndMarker,
+ titleStartMarker, titleEndMarker, include, notitle)
bot = PageFromFileRobot(reader, force, append, summary, minor, autosummary, dry)
bot.run()
@@ -334,4 +347,4 @@
try:
main()
finally:
- wikipedia.stopme()
+ pywikibot.stopme()
Modified: trunk/pywikipedia/pageimport.py
===================================================================
--- trunk/pywikipedia/pageimport.py 2010-10-09 05:02:29 UTC (rev 8628)
+++ trunk/pywikipedia/pageimport.py 2010-10-09 16:11:46 UTC (rev 8629)
@@ -21,39 +21,41 @@
"""
#
# (C) Filnik, 2007
+# (C) Pywikipedia bot team, 2008-2010
#
# Greetings:
# Lorenzo Paulatto and Misza13
#
# Distributed under the terms of the MIT license.
#
-
__version__ = '$Id$'
+#
import urllib
-import wikipedia, login, config
+import wikipedia as pywikibot,
+import login, config
-class Importer(wikipedia.Page):
+class Importer(pywikibot.Page):
def __init__(self, site):
self.importsite = site
- wikipedia.Page.__init__(self, site, 'Special:Import', None, 0)
+ pywikibot.Page.__init__(self, site, 'Special:Import', None, 0)
def Import(self, target, project = 'w', crono = '1', namespace = '', prompt = True):
"""Import the page from the wiki. Requires administrator status.
If prompt is True, asks the user if he wants to delete the page.
"""
if project == 'w':
- site = wikipedia.getSite(fam = 'wikipedia')
+ site = pywikibot.getSite(fam = 'wikipedia')
elif project == 'b':
- site = wikipedia.getSite(fam = 'wikibooks')
+ site = pywikibot.getSite(fam = 'wikibooks')
elif project == 'wikt':
- site = wikipedia.getSite(fam = 'wiktionary')
+ site = pywikibot.getSite(fam = 'wiktionary')
elif project == 's':
- site = wikipedia.getSite(fam = 'wikisource')
+ site = pywikibot.getSite(fam = 'wikisource')
elif project == 'q':
- site = wikipedia.getSite(fam = 'wikiquote')
+ site = pywikibot.getSite(fam = 'wikiquote')
else:
- site = wikipedia.getSite()
+ site = pywikibot.getSite()
# Fixing the crono value...
if crono == True:
crono = '1'
@@ -64,7 +66,7 @@
namespace == ''
answer = 'y'
if prompt:
- answer = wikipedia.inputChoice(u'Do you want to import %s?' % target, ['Yes', 'No'], ['y', 'N'], 'N')
+ answer = pywikibot.inputChoice(u'Do you want to import %s?' % target, ['Yes', 'No'], ['y', 'N'], 'N')
if answer == 'y':
host = self.site().hostname()
address = self.site().path() + '?title=%s&action=submit' % self.urlname()
@@ -87,14 +89,14 @@
}
response, data = self.site().postForm(address, predata, sysop = True)
if data:
- wikipedia.output(u'Page imported, checking...')
- if wikipedia.Page(self.importsite, target).exists():
- wikipedia.output(u'Import success!')
+ pywikibot.output(u'Page imported, checking...')
+ if pywikibot.Page(self.importsite, target).exists():
+ pywikibot.output(u'Import success!')
return True
else:
- wikipedia.output(u'Import failed!')
+ pywikibot.output(u'Import failed!')
return False
if __name__=='__main__':
- wikipedia.output(u'This is just a module! Read the documentation and write your own script!')
- wikipedia.stopme()
+ pywikibot.output(u'This is just a module! Read the documentation and write your own script!')
+ pywikibot.stopme()
Modified: trunk/pywikipedia/piper.py
===================================================================
--- trunk/pywikipedia/piper.py 2010-10-09 05:02:29 UTC (rev 8628)
+++ trunk/pywikipedia/piper.py 2010-10-09 16:11:46 UTC (rev 8629)
@@ -40,13 +40,13 @@
# Distributed under the terms of the MIT license.
#
__version__ = '$Id$'
+#
-import wikipedia
-import pagegenerators
-
import os
import pipes
import tempfile
+import wikipedia as pywikibot
+import pagegenerators
# This is required for the text that is shown when you run this script
# with the parameter -help.
@@ -82,7 +82,8 @@
def run(self):
# Set the edit summary message
pipes = ', '.join(self.filters)
- wikipedia.setAction(wikipedia.translate(wikipedia.getSite(), self.msg) % pipes)
+ pywikibot.setAction(pywikibot.translate(pywikibot.getSite(), self.msg)
+ % pipes)
for page in self.generator:
self.treat(page)
@@ -118,11 +119,13 @@
try:
# Load the page
text = page.get()
- except wikipedia.NoPage:
- wikipedia.output(u"Page %s does not exist; skipping." % page.aslink())
+ except pywikibot.NoPage:
+ pywikibot.output(u"Page %s does not exist; skipping."
+ % page.title(asLink=True))
return
- except wikipedia.IsRedirectPage:
- wikipedia.output(u"Page %s is a redirect; skipping." % page.aslink())
+ except pywikibot.IsRedirectPage:
+ pywikibot.output(u"Page %s is a redirect; skipping."
+ % page.title(asLink=True))
return
# Munge!
@@ -133,24 +136,31 @@
if text != page.get():
# Show the title of the page we're working on.
# Highlight the title in purple.
- wikipedia.output(u"\n\n>>> %s <<<" % page.title())
+ pywikibot.output(u"\n\n>>> %s <<<" % page.title())
# show what was changed
- wikipedia.showDiff(page.get(), text)
+ pywikibot.showDiff(page.get(), text)
if not self.dry:
if not self.always:
- choice = wikipedia.inputChoice(u'Do you want to accept these changes?', ['Yes', 'No'], ['y', 'N'], 'N')
+ choice = pywikibot.inputChoice(
+ u'Do you want to accept these changes?',
+ ['Yes', 'No'], ['y', 'N'], 'N')
else:
choice = 'y'
if choice == 'y':
try:
# Save the page
page.put(text)
- except wikipedia.LockedPage:
- wikipedia.output(u"Page %s is locked; skipping." % page.aslink())
- except wikipedia.EditConflict:
- wikipedia.output(u'Skipping %s because of edit conflict' % (page.title()))
- except wikipedia.SpamfilterError, error:
- wikipedia.output(u'Cannot change %s because of spam blacklist entry %s' % (page.title(), error.url))
+ except pywikibot.LockedPage:
+ pywikibot.output(u"Page %s is locked; skipping."
+ % page.title(asLink=True))
+ except pywikibot.EditConflict:
+ pywikibot.output(
+ u'Skipping %s because of edit conflict'
+ % (page.title()))
+ except pywikibot.SpamfilterError, error:
+ pywikibot.output(
+ u'Cannot change %s because of spam blacklist entry %s'
+ % (page.title(), error.url))
def main():
@@ -172,7 +182,7 @@
filters = []
# Parse command line arguments
- for arg in wikipedia.handleArgs():
+ for arg in pywikibot.handleArgs():
if arg.startswith("-dry"):
dry = True
elif arg.startswith("-filter:"):
@@ -189,7 +199,7 @@
if pageTitleParts != []:
# We will only work on a single page.
pageTitle = ' '.join(pageTitleParts)
- page = wikipedia.Page(wikipedia.getSite(), pageTitle)
+ page = pywikibot.Page(pywikibot.getSite(), pageTitle)
gen = iter([page])
if not gen:
@@ -201,10 +211,10 @@
bot = PiperBot(gen, dry, filters, always)
bot.run()
else:
- wikipedia.showHelp()
+ pywikibot.showHelp()
if __name__ == "__main__":
try:
main()
finally:
- wikipedia.stopme()
+ pywikibot.stopme()
Modified: trunk/pywikipedia/protect.py
===================================================================
--- trunk/pywikipedia/protect.py 2010-10-09 05:02:29 UTC (rev 8628)
+++ trunk/pywikipedia/protect.py 2010-10-09 16:11:46 UTC (rev 8629)
@@ -36,15 +36,19 @@
python protect.py -file:unprotect.txt -unprotect
"""
+#
# Written by http://it.wikisource.org/wiki/Utente:Qualc1
# Created by modifying delete.py
-__version__ = '$Id$'
-
#
+# (C) Pywikipedia bot team, 2008-2010
+#
# Distributed under the terms of the MIT license.
#
+__version__ = '$Id$'
+#
-import wikipedia, catlib
+import wikipedia as pywikibot
+import catlib
import pagegenerators
# Summary messages for protecting from a category.
@@ -121,7 +125,7 @@
"""
#Loop through everything in the page generator and (un)protect it.
for page in self.generator:
- wikipedia.output(u'Processing page %s' % page.title())
+ pywikibot.output(u'Processing page %s' % page.title())
print self.edit, self.move#, self.create
page.protect(unprotect=self.unprotect, reason=self.summary, prompt=self.always,
editcreate=self.edit, move=self.move)
@@ -131,7 +135,7 @@
def choiceProtectionLevel(operation, default):
default = default[0]
firstChar = map(lambda level: level[0], protectionLevels)
- choiceChar = wikipedia.inputChoice('Choice a protection level to %s:' % operation,
+ choiceChar = pywikibot.inputChoice('Choice a protection level to %s:' % operation,
protectionLevels, firstChar, default = default)
for level in protectionLevels:
if level.startswith(choiceChar):
@@ -157,23 +161,23 @@
defaultProtection = 'sysop'
# read command line parameters
- for arg in wikipedia.handleArgs():
+ for arg in pywikibot.handleArgs():
if arg == '-always':
always = True
elif arg.startswith('-file'):
if len(arg) == len('-file'):
- fileName = wikipedia.input(u'Enter name of file to protect pages from:')
+ fileName = pywikibot.input(u'Enter name of file to protect pages from:')
else:
fileName = arg[len('-file:'):]
elif arg.startswith('-summary'):
if len(arg) == len('-summary'):
- summary = wikipedia.input(u'Enter a reason for the protection:')
+ summary = pywikibot.input(u'Enter a reason for the protection:')
else:
summary = arg[len('-summary:'):]
elif arg.startswith('-cat'):
doCategory = True
if len(arg) == len('-cat'):
- pageName = wikipedia.input(u'Enter the category to protect from:')
+ pageName = pywikibot.input(u'Enter the category to protect from:')
else:
pageName = arg[len('-cat:'):]
elif arg.startswith('-nosubcats'):
@@ -181,25 +185,25 @@
elif arg.startswith('-links'):
doLinks = True
if len(arg) == len('-links'):
- pageName = wikipedia.input(u'Enter the page to protect from:')
+ pageName = pywikibot.input(u'Enter the page to protect from:')
else:
pageName = arg[len('-links:'):]
elif arg.startswith('-ref'):
doRef = True
if len(arg) == len('-ref'):
- pageName = wikipedia.input(u'Enter the page to protect from:')
+ pageName = pywikibot.input(u'Enter the page to protect from:')
else:
pageName = arg[len('-ref:'):]
elif arg.startswith('-page'):
doSinglePage = True
if len(arg) == len('-page'):
- pageName = wikipedia.input(u'Enter the page to protect:')
+ pageName = pywikibot.input(u'Enter the page to protect:')
else:
pageName = arg[len('-page:'):]
elif arg.startswith('-images'):
doImages = True
if len(arg) == len('-images'):
- pageName = wikipedia.input(u'Enter the page with the images to protect:')
+ pageName = pywikibot.input(u'Enter the page with the images to protect:')
else:
pageName = arg[len('-images:'):]
elif arg.startswith('-unprotect'):
@@ -217,40 +221,40 @@
if create not in protectionLevels:
create = choiceProtectionLevel('create', defaultProtection)
- mysite = wikipedia.getSite()
+ mysite = pywikibot.getSite()
if doSinglePage:
if not summary:
- summary = wikipedia.input(u'Enter a reason for the protection:')
- page = wikipedia.Page(mysite, pageName)
+ summary = pywikibot.input(u'Enter a reason for the protection:')
+ page = pywikibot.Page(mysite, pageName)
gen = iter([page])
elif doCategory:
if not summary:
- summary = wikipedia.translate(mysite, msg_protect_category) % pageName
+ summary = pywikibot.translate(mysite, msg_protect_category) % pageName
ns = mysite.category_namespace()
categoryPage = catlib.Category(mysite, ns + ':' + pageName)
gen = pagegenerators.CategorizedPageGenerator(categoryPage, recurse = protectSubcategories)
elif doLinks:
if not summary:
- summary = wikipedia.translate(mysite, msg_protect_links) % pageName
- linksPage = wikipedia.Page(mysite, pageName)
+ summary = pywikibot.translate(mysite, msg_protect_links) % pageName
+ linksPage = pywikibot.Page(mysite, pageName)
gen = pagegenerators.LinkedPageGenerator(linksPage)
elif doRef:
if not summary:
- summary = wikipedia.translate(mysite, msg_protect_ref) % pageName
- refPage = wikipedia.Page(mysite, pageName)
+ summary = pywikibot.translate(mysite, msg_protect_ref) % pageName
+ refPage = pywikibot.Page(mysite, pageName)
gen = pagegenerators.ReferringPageGenerator(refPage)
elif fileName:
if not summary:
- summary = wikipedia.translate(mysite, msg_simple_protect)
+ summary = pywikibot.translate(mysite, msg_simple_protect)
gen = pagegenerators.TextfilePageGenerator(fileName)
elif doImages:
if not summary:
- summary = wikipedia.translate(mysite, msg_protect_images) % pageName
- gen = pagegenerators.ImagesPageGenerator(wikipedia.Page(mysite, pageName))
+ summary = pywikibot.translate(mysite, msg_protect_images) % pageName
+ gen = pagegenerators.ImagesPageGenerator(pywikibot.Page(mysite, pageName))
if gen:
- wikipedia.setAction(summary)
+ pywikibot.setAction(summary)
# We are just protecting pages, so we have no need of using a preloading page generator
# to actually get the text of those pages.
if not edit: edit = defaultProtection
@@ -258,10 +262,10 @@
bot = ProtectionRobot(gen, summary, always, edit=edit, move=move)
bot.run()
else:
- wikipedia.showHelp(u'protect')
+ pywikibot.showHelp(u'protect')
if __name__ == "__main__":
try:
main()
finally:
- wikipedia.stopme()
+ pywikibot.stopme()
Modified: trunk/pywikipedia/rciw.py
===================================================================
--- trunk/pywikipedia/rciw.py 2010-10-09 05:02:29 UTC (rev 8628)
+++ trunk/pywikipedia/rciw.py 2010-10-09 16:11:46 UTC (rev 8629)
@@ -16,18 +16,24 @@
Warning: experimental software, use at your own risk
"""
-__version__ = '$Id$'
# Authors: Kisbes
# http://hu.wikipedia.org/wiki/User:Kisbes
# License : GFDL
+#
+# (C) Pywikipedia bot team, 2008, 2010
+#
+# Distributed under the terms of the MIT license.
+#
+__version__ = '$Id$'
+#
-import interwiki
import threading
import re
-import wikipedia
import time
from Queue import Queue
+import wikipedia as pywikibot
+import interwiki
class IWRCBot():
def __init__(self, site, safe = True):
@@ -58,13 +64,13 @@
if name in self.processed:
return
self.processed.append(name)
- page = wikipedia.Page(self.site, name)
+ page = pywikibot.Page(self.site, name)
# the Queue has for now an unlimited size,
# it is a simple atomic append(), no need to acquire a semaphore
self.queue.put_nowait(page)
def main():
- wikipedia.output('Warning: this script can not be run manually/directly, but automatically by maintainer.py')
+ pywikibot.output('Warning: this script can not be run manually/directly, but automatically by maintainer.py')
if __name__ == "__main__":
main()
Modified: trunk/pywikipedia/rcsort.py
===================================================================
--- trunk/pywikipedia/rcsort.py 2010-10-09 05:02:29 UTC (rev 8628)
+++ trunk/pywikipedia/rcsort.py 2010-10-09 16:11:46 UTC (rev 8629)
@@ -1,17 +1,26 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-# A tool to see the recentchanges ordered by user instead of by date. This
-# is meant to be run as a CGI script.
-# Apart from the normal options of the recent changes page, you can add an option
-# ?newbies=true which will make the bot go over recently registered users only.
-# Currently only works on Dutch Wikipedia, I do intend to make it more generally
-# usable.
-# Permission has been asked to run this on the toolserver.
+"""
+A tool to see the recentchanges ordered by user instead of by date. This
+is meant to be run as a CGI script.
+Apart from the normal options of the recent changes page, you can add an option
+?newbies=true which will make the bot go over recently registered users only.
+Currently only works on Dutch Wikipedia, I do intend to make it more generally
+usable.
+Permission has been asked to run this on the toolserver.
+"""
+# (C) Pywikipedia bot team, 2007-2010
+#
+# Distributed under the terms of the MIT license.
+#
__version__ = '$Id$'
+#
import cgi
import cgitb
import re
+import wikipedia
+
cgitb.enable()
form = cgi.FieldStorage()
@@ -21,13 +30,12 @@
print "<html>"
print "<head>"
print '<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />'
-print '<style type="text/css" media="screen,projection">/*<![CDATA[*/ @import "http://nl.wikipedia.org/skins-1.5/monobook/main.css?59"; /*]]>*/</style>'
+print '<style type="text/css" media="screen,projection">/*<![CDATA[*/ @import "http://nl.pywikibot.org/skins-1.5/monobook/main.css?59"; /*]]>*/</style>'
print "</head>"
print "<body>"
print "<!--"
-import wikipedia
print "-->"
-mysite = wikipedia.getSite()
+mysite = pywikibot.getSite()
newbies = 'newbies' in form
@@ -63,10 +71,13 @@
count += 1
lines.append((user,count,line))
elif 'rcoptions' in line:
- print line.replace(mysite.path() + "?title=Speciaal:RecenteWijzigingen&","rcsort.py?")
+ print line.replace(mysite.path() + "?title=Speciaal:RecenteWijzigingen&",
+ "rcsort.py?")
rcoptions = True
elif newbies and 'Nieuwste' in line:
- line = line.replace(mysite.path() + "?title=Speciaal:Bijdragen&","rcsort.py?").replace("target=newbies","newbies=true")
+ line = line.replace(mysite.path() + "?title=Speciaal:Bijdragen&",
+ "rcsort.py?").replace("target=newbies",
+ "newbies=true")
if '</fieldset>' in line:
line = line[line.find('</fieldset>')+11:]
print line
@@ -80,10 +91,10 @@
if line[0] == None:
print "<h2>Gebruiker onbekend</h2>"
else:
- wikipedia.output(u"<h2>%s</h2>"%line[0],toStdout=True)
+ pywikibot.output(u"<h2>%s</h2>"%line[0],toStdout=True)
print "<ul>"
last = line[0]
- wikipedia.output(line[2].replace('href="/w','href="http://nl.wikipedia.org/w'), toStdout = True)
+ pywikibot.output(line[2].replace('href="/w','href="http://nl.wikipedia.org/w'), toStdout = True)
print
print "</ul>"
Revision: 8626
Author: xqt
Date: 2010-10-09 02:34:35 +0000 (Sat, 09 Oct 2010)
Log Message:
-----------
update from trunk r8625
Modified Paths:
--------------
branches/rewrite/scripts/interwiki.py
Modified: branches/rewrite/scripts/interwiki.py
===================================================================
--- branches/rewrite/scripts/interwiki.py 2010-10-09 01:46:16 UTC (rev 8625)
+++ branches/rewrite/scripts/interwiki.py 2010-10-09 02:34:35 UTC (rev 8626)
@@ -68,6 +68,10 @@
NOTE: For post-processing it always assumes that saving the
the pages was sucessful.
+ -summary: Set an additional action summary message for the edit. This
+ could be used for further explainings of the bot action.
+ This will only be used in non-autonomous mode.
+
Additionaly, these arguments can be used to restrict the bot to certain pages:
-namespace:n Number or name of namespace to process. The parameter can be
@@ -634,6 +638,7 @@
quiet = False
restoreAll = False
async = False
+ summary = u''
def readOptions(self, arg):
""" Read all commandline parameters for the global container """
@@ -732,6 +737,11 @@
self.quiet = True
elif arg == '-async':
self.async = True
+ elif arg.startswith('-summary'):
+ if len(arg) == 8:
+ self.summary = pywikibot.input(u'What summary do you want to use?')
+ else:
+ self.summary = arg[9:]
elif arg.startswith('-lack:'):
remainder = arg[6:].split(':')
self.lacklanguage = remainder[0]
@@ -1612,6 +1622,21 @@
bot, just before submitting a page change to the live wiki it is
checked whether we will have to wait. If that is the case, the bot will
be told to make another get request first."""
+
+ #from clean_sandbox
+ def minutesDiff(time1, time2):
+ if type(time1) is long:
+ time1 = str(time1)
+ if type(time2) is long:
+ time2 = str(time2)
+ t1 = (((int(time1[0:4]) * 12 + int(time1[4:6])) * 30 +
+ int(time1[6:8])) * 24 + int(time1[8:10])) * 60 + \
+ int(time1[10:12])
+ t2 = (((int(time2[0:4]) * 12 + int(time2[4:6])) * 30 +
+ int(time2[6:8])) * 24 + int(time2[8:10])) * 60 + \
+ int(time2[10:12])
+ return abs(t2-t1)
+
if not self.isDone():
raise "Bugcheck: finish called before done"
if not self.workonme:
@@ -1699,9 +1724,50 @@
break
else:
for (site, page) in new.iteritems():
+ # edit restriction on is-wiki
+ # http://is.wikipedia.org/wiki/Wikipediaspjall:V%C3%A9lmenni
+ # allow edits for the same conditions as -whenneeded
+ # or the last edit wasn't a bot
+ # or the last edit as 1 month ago
+ smallWikiAllowed = True
+ if globalvar.autonomous and page.site.sitename() == 'wikipedia:is':
+ old={}
+ try:
+ for mypage in new[page.site].interwiki():
+ old[mypage.site] = mypage
+ except pywikibot.NoPage:
+ pywikibot.output(u"BUG>>> %s no longer exists?"
+ % new[site].aslink(True))
+ continue
+ mods, mcomment, adding, removing, modifying \
+ = compareLanguages(old, new, insite=site)
+ #cannot create userlib.User with IP
+ smallWikiAllowed = page.isIpEdit() or \
+ len(removing) > 0 or len(old) == 0 or \
+ len(adding) + len(modifying) > 2 or \
+ len(removing) + len(modifying) == 0 and \
+ adding == [page.site]
+ if not smallWikiAllowed:
+ import userlib
+ user = userlib.User(page.site, page.userName())
+ if not 'bot' in user.groups() \
+ and not 'bot' in page.userName().lower(): #erstmal auch keine namen mit bot
+ smallWikiAllowed = True
+ else:
+ diff = minutesDiff(page.editTime(),
+ time.strftime("%Y%m%d%H%M%S",
+ time.gmtime()))
+ if diff > 30*24*60:
+ smallWikiAllowed = True
+ else:
+ pywikibot.output(
+u'NOTE: number of edits are restricted at %s'
+ % page.site.sitename())
+
# if we have an account for this site
if site.family.name in config.usernames \
- and site.lang in config.usernames[site.family.name]:
+ and site.lang in config.usernames[site.family.name] \
+ and smallWikiAllowed:
# Try to do the changes
try:
if self.replaceLinks(page, new, bot):
@@ -2238,7 +2304,8 @@
mcomment = mods = u''
- if len(adding) + len(removing) + len(modifying) <= 3:
+ if not globalvar.summary and \
+ len(adding) + len(removing) + len(modifying) <= 3:
# Use an extended format for the string linking to all added pages.
fmt = lambda d, site: unicode(d[site])
else:
@@ -2260,7 +2327,7 @@
if modifying:
mods += (sep + mod + colon + comma.join([fmt(new, x) for x in modifying]))
if mods:
- mcomment = head + mods
+ mcomment = head + globalvar.summary + mods
return mods, mcomment, adding, removing, modifying
def botMayEdit (page):
@@ -2371,6 +2438,12 @@
if not genFactory.handleArg(arg):
singlePageTitle.append(arg)
+ # Do not use additional summary with autonomous mode
+ if globalvar.autonomous:
+ globalvar.summary = u''
+ elif globalvar.summary:
+ globalvar.summary += u'; '
+
# ensure that we don't try to change main page
try:
site = pywikibot.getSite()
Revision: 8625
Author: xqt
Date: 2010-10-09 01:46:16 +0000 (Sat, 09 Oct 2010)
Log Message:
-----------
additional summary option for non-autonomous mode to explain manual assistent bot actions. Sets the default summary behind it.
Modified Paths:
--------------
trunk/pywikipedia/interwiki.py
Modified: trunk/pywikipedia/interwiki.py
===================================================================
--- trunk/pywikipedia/interwiki.py 2010-10-08 15:39:29 UTC (rev 8624)
+++ trunk/pywikipedia/interwiki.py 2010-10-09 01:46:16 UTC (rev 8625)
@@ -68,6 +68,10 @@
NOTE: For post-processing it always assumes that saving the
the pages was sucessful.
+ -summary: Set an additional action summary message for the edit. This
+ could be used for further explainings of the bot action.
+ This will only be used in non-autonomous mode.
+
Additionaly, these arguments can be used to restrict the bot to certain pages:
-namespace:n Number or name of namespace to process. The parameter can be
@@ -607,6 +611,7 @@
quiet = False
restoreAll = False
async = False
+ summary = u''
def readOptions(self, arg):
""" Read all commandline parameters for the global container """
@@ -705,6 +710,11 @@
self.quiet = True
elif arg == '-async':
self.async = True
+ elif arg.startswith('-summary'):
+ if len(arg) == 8:
+ self.summary = pywikibot.input(u'What summary do you want to use?')
+ else:
+ self.summary = arg[9:]
elif arg.startswith('-lack:'):
remainder = arg[6:].split(':')
self.lacklanguage = remainder[0]
@@ -2263,7 +2273,8 @@
mcomment = mods = u''
- if len(adding) + len(removing) + len(modifying) <= 3:
+ if not globalvar.summary and \
+ len(adding) + len(removing) + len(modifying) <= 3:
# Use an extended format for the string linking to all added pages.
fmt = lambda d, site: d[site].aslink(forceInterwiki=True)
else:
@@ -2285,7 +2296,7 @@
if modifying:
mods += (sep + mod + colon + comma.join([fmt(new, x) for x in modifying]))
if mods:
- mcomment = head + mods
+ mcomment = head + globalvar.summary + mods
return mods, mcomment, adding, removing, modifying
def botMayEdit (page):
@@ -2390,6 +2401,12 @@
if not genFactory.handleArg(arg):
singlePageTitle.append(arg)
+ # Do not us additional summary with autonomous mode
+ if globalvar.autonomous:
+ globalvar.summary = u''
+ elif globalvar.summary:
+ globalvar.summary += u'; '
+
# ensure that we don't try to change main page
try:
site = pywikibot.getSite()