Revision: 7557
Author: xqt
Date: 2009-10-28 18:12:26 +0000 (Wed, 28 Oct 2009)
Log Message:
-----------
import wikipedia as pywikibot for easier compare with rewrite branch
Modified Paths:
--------------
trunk/pywikipedia/blockpageschecker.py
trunk/pywikipedia/category.py
trunk/pywikipedia/category_redirect.py
trunk/pywikipedia/cosmetic_changes.py
trunk/pywikipedia/editarticle.py
trunk/pywikipedia/interwiki.py
Modified: trunk/pywikipedia/blockpageschecker.py
===================================================================
--- trunk/pywikipedia/blockpageschecker.py 2009-10-28 18:10:56 UTC (rev 7556)
+++ trunk/pywikipedia/blockpageschecker.py 2009-10-28 18:12:26 UTC (rev 7557)
@@ -63,7 +63,8 @@
#
import re, webbrowser
-import wikipedia, catlib, pagegenerators, config
+import wikipedia as pywikibot
+import catlib, pagegenerators, config
# This is required for the text that is shown when you run this script
# with the parameter -help.
@@ -115,7 +116,7 @@
'en': None,
'it': [r'\{\{(?:[Tt]emplate:|)[Pp]rotetta\}\}'],
}
-
+
# Array: 0 => Semi-block, 1 => Total Block, 2 => Semi-Move, 3 => Total-Move,
4 => template-unique
templateNoRegex = {
'it':['{{Avvisobloccoparziale}}', '{{Avvisoblocco}}',
None, None, '{{Protetta}}'],
@@ -171,7 +172,7 @@
for catchRegex in TU:
resultCatch = re.findall(catchRegex, text)
if resultCatch:
- return ('unique', catchRegex)
+ return ('unique', catchRegex)
if TSMP != None and TTMP != None and TTP != TTMP and TSP != TSMP:
for catchRegex in TTMP:
resultCatch = re.findall(catchRegex, text)
@@ -184,9 +185,9 @@
return ('editable', r'\A\n') # If editable means that we have no
regex, won't change anything with this regex
def debugQuest(site, page):
- quest = wikipedia.input(u'Do you want to open the page on your [b]rowser, [g]ui
or [n]othing?')
+ quest = pywikibot.input(u'Do you want to open the page on your [b]rowser, [g]ui
or [n]othing?')
pathWiki = site.family.nicepath(site.lang)
- url = 'http://%s%s%s?&redirect=no' % (wikipedia.getSite().hostname(),
pathWiki, page.urlname())
+ url = 'http://%s%s%s?&redirect=no' % (pywikibot.getSite().hostname(),
pathWiki, page.urlname())
while 1:
if quest.lower() in ['b', 'B']:
webbrowser.open(url)
@@ -199,7 +200,7 @@
elif quest.lower() in ['n', 'N']:
break
else:
- wikipedia.output(u'wrong entry, type "b", "g" or
"n"')
+ pywikibot.output(u'wrong entry, type "b", "g" or
"n"')
continue
def main():
@@ -207,7 +208,7 @@
# Loading the comments
global categoryToCheck; global comment; global project_inserted
if config.mylang not in project_inserted:
- wikipedia.output(u"Your project is not supported by this script. You have to
edit the script and add it!")
+ pywikibot.output(u"Your project is not supported by this script. You have to
edit the script and add it!")
return
# always, define a generator to understand if the user sets one, defining what's
genFactory
always = False; generator = False; debug = False
@@ -215,9 +216,9 @@
# To prevent Infinite loops
errorCount = 0
# Load the right site
- site = wikipedia.getSite()
+ site = pywikibot.getSite()
# Loading the default options.
- for arg in wikipedia.handleArgs():
+ for arg in pywikibot.handleArgs():
if arg == '-always':
always = True
elif arg == '-move':
@@ -237,27 +238,27 @@
type = 'move')
elif arg.startswith('-page'):
if len(arg) == 5:
- generator = [wikipedia.Page(wikipedia.getSite(),
wikipedia.input(u'What page do you want to use?'))]
+ generator = [pywikibot.Page(pywikibot.getSite(),
pywikibot.input(u'What page do you want to use?'))]
else:
- generator = [wikipedia.Page(wikipedia.getSite(), arg[6:])]
+ generator = [pywikibot.Page(pywikibot.getSite(), arg[6:])]
else:
genFactory.handleArg(arg)
# Take the right templates to use, the category and the comment
- TSP = wikipedia.translate(site, templateSemiProtection)
- TTP = wikipedia.translate(site, templateTotalProtection)
- TSMP = wikipedia.translate(site, templateSemiMoveProtection)
- TTMP = wikipedia.translate(site, templateTotalMoveProtection)
- TNR = wikipedia.translate(site, templateNoRegex)
- TU = wikipedia.translate(site, templateUnique)
+ TSP = pywikibot.translate(site, templateSemiProtection)
+ TTP = pywikibot.translate(site, templateTotalProtection)
+ TSMP = pywikibot.translate(site, templateSemiMoveProtection)
+ TTMP = pywikibot.translate(site, templateTotalMoveProtection)
+ TNR = pywikibot.translate(site, templateNoRegex)
+ TU = pywikibot.translate(site, templateUnique)
- category = wikipedia.translate(site, categoryToCheck)
- commentUsed = wikipedia.translate(site, comment)
+ category = pywikibot.translate(site, categoryToCheck)
+ commentUsed = pywikibot.translate(site, comment)
if not generator:
gen = genFactory.getCombinedGenerator()
if not generator:
generator = list()
- wikipedia.output(u'Loading categories...')
+ pywikibot.output(u'Loading categories...')
# Define the category if no other generator has been setted
for CAT in category:
cat = catlib.Category(site, CAT)
@@ -265,20 +266,20 @@
gen = pagegenerators.CategorizedPageGenerator(cat)
for pageCat in gen:
generator.append(pageCat)
- wikipedia.output(u'Categories loaded, start!')
+ pywikibot.output(u'Categories loaded, start!')
# Main Loop
preloadingGen = pagegenerators.PreloadingGenerator(generator, pageNumber = 60)
for page in preloadingGen:
pagename = page.aslink()
- wikipedia.output('Loading %s...' % pagename)
+ pywikibot.output('Loading %s...' % pagename)
try:
text = page.get()
restrictions = page.getRestrictions()
- except wikipedia.NoPage:
- wikipedia.output("%s doesn't exist! Skipping..." % pagename)
+ except pywikibot.NoPage:
+ pywikibot.output("%s doesn't exist! Skipping..." % pagename)
continue
- except wikipedia.IsRedirectPage:
- wikipedia.output("%s is a redirect! Skipping..." % pagename)
+ except pywikibot.IsRedirectPage:
+ pywikibot.output("%s is a redirect! Skipping..." % pagename)
if debug:
debugQuest(site, page)
continue
@@ -287,7 +288,7 @@
# PreloadingGenerator cannot set correctly page.editRestriction
# (see bug #1949476 )
if not page.canBeEdited():
- wikipedia.output("%s is sysop-protected : this account can't edit
it! Skipping..." % pagename)
+ pywikibot.output("%s is sysop-protected : this account can't edit
it! Skipping..." % pagename)
continue
"""
editRestr = restrictions['edit']
@@ -295,7 +296,7 @@
try:
config.sysopnames[site.family.name][site.lang]
except:
- wikipedia.output("%s is sysop-protected : this account can't
edit it! Skipping..." % pagename)
+ pywikibot.output("%s is sysop-protected : this account can't
edit it! Skipping..." % pagename)
continue
# Understand, according to the template in the page, what should be the
protection
@@ -312,11 +313,11 @@
if TU != None:
replaceToPerform = u'|'.join(TTP + TSP + TU)
else:
- replaceToPerform = u'|'.join(TTP + TSP)
+ replaceToPerform = u'|'.join(TTP + TSP)
text, changes = re.subn('<noinclude>(%s)</noinclude>' %
replaceToPerform, '', text)
if changes == 0:
- text, changes = re.subn('(%s)' % replaceToPerform, '',
text)
- wikipedia.output(u'The page is editable for all, deleting the
template...')
+ text, changes = re.subn('(%s)' % replaceToPerform, '',
text)
+ pywikibot.output(u'The page is editable for all, deleting the
template...')
elif editRestr[0] == 'sysop':
# total edit protection
@@ -324,9 +325,9 @@
msg = 'The page is protected to the sysop'
if not moveBlockCheck:
msg += ', skipping...'
- wikipedia.output(msg)
+ pywikibot.output(msg)
else:
- wikipedia.output(u'The page is protected to the sysop, but the
template seems not correct. Fixing...')
+ pywikibot.output(u'The page is protected to the sysop, but the
template seems not correct. Fixing...')
if TU != None:
text, changes = re.subn(TemplateInThePage[1], TNR[4], text)
else:
@@ -338,9 +339,9 @@
msg = 'The page is editable only for the autoconfirmed users'
if not moveBlockCheck:
msg += ', skipping...'
- wikipedia.output(msg)
+ pywikibot.output(msg)
else:
- wikipedia.output(u'The page is editable only for the autoconfirmed
users, but the template seems not correct. Fixing...')
+ pywikibot.output(u'The page is editable only for the autoconfirmed
users, but the template seems not correct. Fixing...')
if TU != None:
text, changes = re.subn(TemplateInThePage[1], TNR[4], text)
else:
@@ -348,7 +349,7 @@
if changes == 0:
# We tried to fix edit-protection templates, but it did not work.
- wikipedia.output('Warning : No edit-protection template could be
found')
+ pywikibot.output('Warning : No edit-protection template could be
found')
if moveBlockCheck:
# checking move protection now
@@ -356,7 +357,7 @@
changes = -1
if not moveRestr:
- wikipedia.output(u'The page is movable for all, deleting the
template...')
+ pywikibot.output(u'The page is movable for all, deleting the
template...')
# Deleting the template because the page doesn't need it.
if TU != None:
replaceToPerform = u'|'.join(TSMP + TTMP + TU)
@@ -368,9 +369,9 @@
elif moveRestr[0] == 'sysop':
# move-total-protection
if (TemplateInThePage[0] == 'sysop-move' and TTMP != None) or
(TemplateInThePage[0] == 'unique' and TU != None):
- wikipedia.output(u'The page is protected from moving to the
sysop, skipping...')
+ pywikibot.output(u'The page is protected from moving to the
sysop, skipping...')
else:
- wikipedia.output(u'The page is protected from moving to the
sysop, but the template seems not correct. Fixing...')
+ pywikibot.output(u'The page is protected from moving to the
sysop, but the template seems not correct. Fixing...')
if TU != None:
text, changes = re.subn(TemplateInThePage[1], TNR[4], text)
else:
@@ -379,9 +380,9 @@
elif TSMP != None or TU != None:
# implicitely moveRestr[0] = 'autoconfirmed',
move-semi-protection
if TemplateInThePage[0] == 'autoconfirmed-move' or
TemplateInThePage[0] == 'unique':
- wikipedia.output(u'The page is movable only for the autoconfirmed
users, skipping...')
+ pywikibot.output(u'The page is movable only for the autoconfirmed
users, skipping...')
else:
- wikipedia.output(u'The page is movable only for the autoconfirmed
users, but the template seems not correct. Fixing...')
+ pywikibot.output(u'The page is movable only for the autoconfirmed
users, but the template seems not correct. Fixing...')
if TU != None:
text, changes = re.subn(TemplateInThePage[1], TNR[4], text)
else:
@@ -389,43 +390,43 @@
if changes == 0:
# We tried to fix move-protection templates, but it did not work.
- wikipedia.output('Warning : No move-protection template could be
found')
+ pywikibot.output('Warning : No move-protection template could be
found')
if oldtext != text:
# Ok, asking if the change has to be performed and do it if yes.
- wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default}
<<<" % page.title())
- wikipedia.showDiff(oldtext, text)
+ pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default}
<<<" % page.title())
+ pywikibot.showDiff(oldtext, text)
if not always:
- choice = wikipedia.inputChoice(u'Do you want to accept these
changes?', ['Yes', 'No', 'All'], ['y', 'N',
'a'], 'N')
+ choice = pywikibot.inputChoice(u'Do you want to accept these
changes?', ['Yes', 'No', 'All'], ['y', 'N',
'a'], 'N')
if choice == 'a':
always = True
if always or choice == 'y':
while 1:
try:
page.put(text, commentUsed, force=True)
- except wikipedia.EditConflict:
- wikipedia.output(u'Edit conflict! skip!')
+ except pywikibot.EditConflict:
+ pywikibot.output(u'Edit conflict! skip!')
break
- except wikipedia.ServerError:
+ except pywikibot.ServerError:
# Sometimes there is this error that's quite annoying
because
# can block the whole process for nothing.
errorCount += 1
if errorCount < 5:
- wikipedia.output(u'Server Error! Wait..')
+ pywikibot.output(u'Server Error! Wait..')
time.sleep(3)
continue
else:
# Prevent Infinite Loops
- raise wikipedia.ServerError(u'Fifth Server Error!')
- except wikipedia.SpamfilterError, e:
- wikipedia.output(u'Cannot change %s because of blacklist
entry %s' % (page.title(), e.url))
+ raise pywikibot.ServerError(u'Fifth Server Error!')
+ except pywikibot.SpamfilterError, e:
+ pywikibot.output(u'Cannot change %s because of blacklist
entry %s' % (page.title(), e.url))
break
- except wikipedia.PageNotSaved, error:
- wikipedia.output(u'Error putting page: %s' %
(error.args,))
+ except pywikibot.PageNotSaved, error:
+ pywikibot.output(u'Error putting page: %s' %
(error.args,))
break
- except wikipedia.LockedPage:
- wikipedia.output(u'The page is still protected.
Skipping...')
+ except pywikibot.LockedPage:
+ pywikibot.output(u'The page is still protected.
Skipping...')
break
else:
# Break only if the errors are one after the other
@@ -436,4 +437,4 @@
try:
main()
finally:
- wikipedia.stopme()
+ pywikibot.stopme()
Modified: trunk/pywikipedia/category.py
===================================================================
--- trunk/pywikipedia/category.py 2009-10-28 18:10:56 UTC (rev 7556)
+++ trunk/pywikipedia/category.py 2009-10-28 18:12:26 UTC (rev 7557)
@@ -75,13 +75,15 @@
# (C) Rob W.W. Hooft, 2004
# (C) Daniel Herding, 2004
# (C) Anreas J Schwab, 2007
+# (C) Pywikipedia team, 2008-2009
#
__version__ = '$Id$'
#
# Distributed under the terms of the MIT license.
#
import os, re, pickle, bz2
-import wikipedia, catlib, config, pagegenerators
+import wikipedia as pywikibot
+import catlib, config, pagegenerators
# This is required for the text that is shown when you run this script
# with the parameter -help.
@@ -184,7 +186,7 @@
'lt':u'robotas: Kategorija pervadinta į [[:Category:%s|%s]]',
'nds':u'Kat-Bot: Kategorie na [[:Category:%s|%s]] schaven',
'nds-nl':u'Bot: kattegerie is herneumd naor [[:Kattegerie:%s|%s]]',
- 'nl':u'Bot: categorie is hernoemd naar [[:Category:%s|%s]]',
+ 'nl':u'Bot: Categorie is hernoemd naar [[:Category:%s|%s]]',
'no':u'Robot: Kategorien ble flyttet til [[:Category:%s|%s]]',
'nn':u'robot: kategorien blei flytta til [[:Kategori:%s|%s]]',
'pt':u'Bot: Categoria [[:Category:%s|%s]] foi movida',
@@ -219,10 +221,10 @@
else:
try:
if not os.path.isabs(filename):
- filename = wikipedia.config.datafilepath(filename)
+ filename = pywikibot.config.datafilepath(filename)
f = bz2.BZ2File(filename, 'r')
- wikipedia.output(u'Reading dump from %s'
- % wikipedia.config.shortpath(filename))
+ pywikibot.output(u'Reading dump from %s'
+ % pywikibot.config.shortpath(filename))
databases = pickle.load(f)
f.close()
# keys are categories, values are 2-tuples with lists as entries.
@@ -286,9 +288,9 @@
Saves the contents of the dictionaries superclassDB and catContentDB to disk.
'''
if not os.path.isabs(filename):
- filename = wikipedia.config.datafilepath(filename)
- wikipedia.output(u'Dumping to %s, please wait...'
- % wikipedia.config.shortpath(filename))
+ filename = pywikibot.config.datafilepath(filename)
+ pywikibot.output(u'Dumping to %s, please wait...'
+ % pywikibot.config.shortpath(filename))
f = bz2.BZ2File(filename, 'w')
databases = {
'catContentDB': self.catContentDB,
@@ -328,21 +330,21 @@
# "Neumann, John von"
sorted_key = split_string[-1] + ', ' + '
'.join(split_string[:-1])
# give explicit sort key
- return wikipedia.Page(site, catlink.title() + '|' + sorted_key)
+ return pywikibot.Page(site, catlink.title() + '|' + sorted_key)
else:
- return wikipedia.Page(site, catlink.title())
+ return pywikibot.Page(site, catlink.title())
def add_category(sort_by_last_name = False, create_pages = False):
'''A robot to mass-add a category to a list of pages.'''
- site = wikipedia.getSite()
+ site = pywikibot.getSite()
if gen:
- newcatTitle = wikipedia.input(
+ newcatTitle = pywikibot.input(
u'Category to add (do not give namespace):')
if not site.nocapitalize:
newcatTitle = newcatTitle[:1].capitalize() + newcatTitle[1:]
# set edit summary message
- editSummary = wikipedia.translate(site, msg_add) % newcatTitle
+ editSummary = pywikibot.translate(site, msg_add) % newcatTitle
cat_namespace = site.category_namespaces()[0]
@@ -352,11 +354,11 @@
answer = ''
while answer not in ('y','n','a'):
- answer = wikipedia.input(u'%s [y/n/a(ll)]:' % (page.aslink()))
+ answer = pywikibot.input(u'%s [y/n/a(ll)]:' % (page.aslink()))
if answer == 'a':
confirm = ''
while confirm not in ('y','n'):
- confirm = wikipedia.input(u"""\
+ confirm = pywikibot.input(u"""\
This should be used if and only if you are sure that your links are correct!
Are you sure? [y/n]:""")
if confirm == 'n':
@@ -365,45 +367,45 @@
if answer == 'y' or answer == 'a':
try:
text = page.get()
- except wikipedia.NoPage:
+ except pywikibot.NoPage:
if create_pages:
- wikipedia.output(u"%s doesn't exist yet.
Creating."
+ pywikibot.output(u"%s doesn't exist yet.
Creating."
% (page.title()))
text = ''
else:
- wikipedia.output(u"%s doesn't exist yet.
Ignoring."
+ pywikibot.output(u"%s doesn't exist yet.
Ignoring."
% (page.title()))
continue
- except wikipedia.IsRedirectPage, arg:
- redirTarget = wikipedia.Page(site, arg.args[0])
- wikipedia.output(
+ except pywikibot.IsRedirectPage, arg:
+ redirTarget = pywikibot.Page(site, arg.args[0])
+ pywikibot.output(
u"WARNING: %s is redirect to %s. Ignoring."
% (page.title(), redirTarget.title()))
continue
cats = page.categories()
# Show the title of the page we're working on.
# Highlight the title in purple.
- wikipedia.output(
+ pywikibot.output(
u"\n\n>>> \03{lightpurple}%s\03{default}
<<<"
% page.title())
- wikipedia.output(u"Current categories:")
+ pywikibot.output(u"Current categories:")
for cat in cats:
- wikipedia.output(u"* %s" % cat.title())
- catpl = wikipedia.Page(site,
+ pywikibot.output(u"* %s" % cat.title())
+ catpl = pywikibot.Page(site,
cat_namespace + ':' + newcatTitle)
if sort_by_last_name:
catpl = sorted_by_last_name(catpl, page)
if catpl in cats:
- wikipedia.output(u"%s is already in %s."
+ pywikibot.output(u"%s is already in %s."
% (page.title(), catpl.title()))
else:
- wikipedia.output(u'Adding %s' % catpl.aslink())
+ pywikibot.output(u'Adding %s' % catpl.aslink())
cats.append(catpl)
- text = wikipedia.replaceCategoryLinks(text, cats)
+ text = pywikibot.replaceCategoryLinks(text, cats)
try:
page.put(text, comment = editSummary)
- except wikipedia.EditConflict:
- wikipedia.output(
+ except pywikibot.EditConflict:
+ pywikibot.output(
u'Skipping %s because of edit conflict'
% (page.title()))
@@ -412,7 +414,7 @@
def __init__(self, oldCatTitle, newCatTitle, batchMode=False,
editSummary='', inPlace=False, moveCatPage=True,
deleteEmptySourceCat=True, titleRegex=None):
- site = wikipedia.getSite()
+ site = pywikibot.getSite()
self.editSummary = editSummary
self.oldCat = catlib.Category(site, 'Category:' + oldCatTitle)
self.newCatTitle = newCatTitle
@@ -423,10 +425,10 @@
self.titleRegex = titleRegex
# set edit summary message
if not self.editSummary:
- self.editSummary = wikipedia.translate(site, msg_change)%
self.oldCat.title()
+ self.editSummary = pywikibot.translate(site, msg_change)%
self.oldCat.title()
def run(self):
- site = wikipedia.getSite()
+ site = pywikibot.getSite()
newCat = catlib.Category(site, 'Category:' + self.newCatTitle)
# Copy the category contents to the new category page
@@ -435,21 +437,21 @@
if self.oldCat.exists() and self.moveCatPage:
copied = self.oldCat.copyAndKeep(
self.newCatTitle,
- wikipedia.translate(site, cfd_templates))
+ pywikibot.translate(site, cfd_templates))
# Also move the talk page
if copied:
- reason = wikipedia.translate(site, deletion_reason_move) \
+ reason = pywikibot.translate(site, deletion_reason_move) \
% (self.newCatTitle, self.newCatTitle)
oldTalk = self.oldCat.toggleTalkPage()
if oldTalk.exists():
newTalkTitle = newCat.toggleTalkPage().title()
try:
talkMoved = oldTalk.move(newTalkTitle, reason)
- except (wikipedia.NoPage, wikipedia.PageNotSaved), e:
+ except (pywikibot.NoPage, pywikibot.PageNotSaved), e:
#in order :
#Source talk does not exist, or
#Target talk already exists
- wikipedia.output(e.message)
+ pywikibot.output(e.message)
else:
if talkMoved:
oldMovedTalk = oldTalk
@@ -462,7 +464,8 @@
if not self.titleRegex or re.search(self.titleRegex,
article.title()):
catlib.change_category(article, self.oldCat, newCat,
- comment=self.editSummary, inPlace=self.inPlace)
+ comment=self.editSummary,
+ inPlace=self.inPlace)
# Move subcategories
gen = pagegenerators.SubCategoriesPageGenerator(self.oldCat,
@@ -472,19 +475,20 @@
if not self.titleRegex or re.search(self.titleRegex,
subcategory.title()):
catlib.change_category(subcategory, self.oldCat, newCat,
- comment=self.editSummary, inPlace=self.inPlace)
+ comment=self.editSummary,
+ inPlace=self.inPlace)
# Delete the old category and its moved talk page
if copied and self.deleteEmptySourceCat == True:
if self.oldCat.isEmpty():
- reason = wikipedia.translate(site, deletion_reason_move) \
+ reason = pywikibot.translate(site, deletion_reason_move) \
% (self.newCatTitle, self.newCatTitle)
confirm = not self.batchMode
self.oldCat.delete(reason, confirm, mark = True)
if oldMovedTalk is not None:
oldMovedTalk.delete(reason, confirm, mark = True)
else:
- wikipedia.output('Couldn\'t delete %s - not empty.'
+ pywikibot.output('Couldn\'t delete %s - not empty.'
% self.oldCat.title())
@@ -500,7 +504,7 @@
'he':u'בוט: יוצר רשימה מהקטגוריה %s (%d דפים)',
'kk':u'Бот: %s дегеннен (%d буын) тізімдеді',
'nds-nl':u'Bot: lieste van %s (%d pagina\'s)',
- 'nl':u'Bot: lijst van %s (%d pagina\'s)',
+ 'nl':u'Bot: Lijst van %s (%d pagina\'s)',
'sv':u'Robot: Skapar en lista från %s (%d)',
'pt':u'Bot: Listando de %s (%d entradas)',
'zh':u'機器人: 從%s提取列表(%d個項目)',
@@ -510,8 +514,8 @@
self.editSummary = editSummary
self.overwrite = overwrite
self.showImages = showImages
- self.cat = catlib.Category(wikipedia.getSite(), 'Category:' + catTitle)
- self.list = wikipedia.Page(wikipedia.getSite(), listTitle)
+ self.cat = catlib.Category(pywikibot.getSite(), 'Category:' + catTitle)
+ self.list = pywikibot.Page(pywikibot.getSite(), listTitle)
self.subCats = subCats
self.talkPages = talkPages
self.recurse = recurse
@@ -521,7 +525,7 @@
if self.subCats:
listOfArticles += self.cat.subcategoriesList()
if not self.editSummary:
- self.editSummary = wikipedia.translate(wikipedia.getSite(), self.listify_msg)
% (self.cat.title(), len(listOfArticles))
+ self.editSummary = pywikibot.translate(pywikibot.getSite(), self.listify_msg)
% (self.cat.title(), len(listOfArticles))
listString = ""
for article in listOfArticles:
@@ -536,7 +540,7 @@
else:
listString = listString + "*[[:%s]]\n" % article.title()
if self.list.exists() and not self.overwrite:
- wikipedia.output(u'Page %s already exists, aborting.' %
self.list.title())
+ pywikibot.output(u'Page %s already exists, aborting.' %
self.list.title())
else:
self.list.put(listString, comment=self.editSummary)
@@ -561,7 +565,7 @@
'ksh':u'Bot: de Saachjropp is nu opjelööß',
'nds':u'Kat-Bot: Kategorie is nu oplööst',
'nds-nl':u'Bot: kattegerie besteet neet meer',
- 'nl':u'Bot: categorie is opgeheven',
+ 'nl':u'Bot: Categorie is opgeheven',
'no':u'Robot: Kategorien ble oppløst',
'nn':u'robot: kategorien blei løyst opp',
'pt':u'Bot: Categoria foi unida',
@@ -589,7 +593,7 @@
'lb': u'Bot: Ewech huele vun %s',
'nds':u'Kat-Bot: rut ut %s',
'nds-nl':u'Bot: vort-ehaold uut %s',
- 'nl':u'Bot: verwijderd uit %s',
+ 'nl':u'Bot: Verwijderd uit %s',
'no':u'Robot: Fjerner ifra %s',
'nn':u'robot: fjerna ifrå %s',
'pt':u'Bot: Removendo [[Categoria:%s]]',
@@ -601,19 +605,19 @@
def __init__(self, catTitle, batchMode = False, editSummary = '',
useSummaryForDeletion = True, titleRegex = None, inPlace = False):
self.editSummary = editSummary
- self.cat = catlib.Category(wikipedia.getSite(), 'Category:' + catTitle)
+ self.cat = catlib.Category(pywikibot.getSite(), 'Category:' + catTitle)
# get edit summary message
self.useSummaryForDeletion = useSummaryForDeletion
self.batchMode = batchMode
self.titleRegex = titleRegex
self.inPlace = inPlace
if not self.editSummary:
- self.editSummary = wikipedia.translate(wikipedia.getSite(), self.msg_remove)
% self.cat.title()
+ self.editSummary = pywikibot.translate(pywikibot.getSite(), self.msg_remove)
% self.cat.title()
def run(self):
articles = self.cat.articlesList(recurse = 0)
if len(articles) == 0:
- wikipedia.output(u'There are no articles in category %s' %
self.cat.title())
+ pywikibot.output(u'There are no articles in category %s' %
self.cat.title())
else:
for article in articles:
if not self.titleRegex or re.search(self.titleRegex,article.title()):
@@ -621,7 +625,7 @@
# Also removes the category tag from subcategories' pages
subcategories = self.cat.subcategoriesList(recurse = 0)
if len(subcategories) == 0:
- wikipedia.output(u'There are no subcategories in category %s' %
self.cat.title())
+ pywikibot.output(u'There are no subcategories in category %s' %
self.cat.title())
else:
for subcategory in subcategories:
catlib.change_category(subcategory, self.cat, None, comment =
self.editSummary, inPlace = self.inPlace)
@@ -630,7 +634,7 @@
if self.useSummaryForDeletion and self.editSummary:
reason = self.editSummary
else:
- reason = wikipedia.translate(wikipedia.getSite(),
self.deletion_reason_remove)
+ reason = pywikibot.translate(pywikibot.getSite(),
self.deletion_reason_remove)
talkPage = self.cat.toggleTalkPage()
self.cat.delete(reason, not self.batchMode)
if (talkPage.exists()):
@@ -663,7 +667,7 @@
def __init__(self, catTitle, catDB):
self.catTitle = catTitle
self.catDB = catDB
- self.editSummary = wikipedia.translate(wikipedia.getSite(), msg_change) %
catTitle
+ self.editSummary = pywikibot.translate(pywikibot.getSite(), msg_change) %
catTitle
def move_to_category(self, article, original_cat, current_cat):
'''
@@ -673,16 +677,16 @@
NOTE: current_cat is only used for internal recursion. You should
always use current_cat = original_cat.
'''
- wikipedia.output(u'')
+ pywikibot.output(u'')
# Show the title of the page where the link was found.
# Highlight the title in purple.
- wikipedia.output(u'Treating page \03{lightpurple}%s\03{default}, currently in
\03{lightpurple}%s\03{default}' % (article.title(), current_cat.title()))
+ pywikibot.output(u'Treating page \03{lightpurple}%s\03{default}, currently in
\03{lightpurple}%s\03{default}' % (article.title(), current_cat.title()))
# Determine a reasonable amount of context to print
try:
full_text = article.get(get_redirect = True)
- except wikipedia.NoPage:
- wikipedia.output(u'Page %s not found.' % article.title())
+ except pywikibot.NoPage:
+ pywikibot.output(u'Page %s not found.' % article.title())
return
try:
contextLength = full_text.index('\n\n')
@@ -694,7 +698,7 @@
if contextLength > 1000 or contextLength < 0:
contextLength = 500
print
- wikipedia.output(full_text[:contextLength])
+ pywikibot.output(full_text[:contextLength])
print
subcatlist = self.catDB.getSubcats(current_cat)
@@ -709,32 +713,32 @@
# show subcategories as possible choices (with numbers)
for i in range(len(supercatlist)):
# layout: we don't expect a cat to have more than 10 supercats
- wikipedia.output(u'u%d - Move up to %s' % (i,
supercatlist[i].title()))
+ pywikibot.output(u'u%d - Move up to %s' % (i,
supercatlist[i].title()))
for i in range(len(subcatlist)):
# layout: we don't expect a cat to have more than 100 subcats
- wikipedia.output(u'%2d - Move down to %s' % (i,
subcatlist[i].title()))
+ pywikibot.output(u'%2d - Move down to %s' % (i,
subcatlist[i].title()))
print ' j - Jump to another category'
print ' s - Skip this article'
print ' r - Remove this category tag'
print ' ? - Print first part of the page (longer and longer)'
- wikipedia.output(u'Enter - Save category as %s' % current_cat.title())
+ pywikibot.output(u'Enter - Save category as %s' % current_cat.title())
flag = False
while not flag:
print ''
- choice=wikipedia.input(u'Choice:')
+ choice = pywikibot.input(u'Choice:')
if choice in ['s', 'S']:
flag = True
elif choice == '':
- wikipedia.output(u'Saving category as %s' % current_cat.title())
+ pywikibot.output(u'Saving category as %s' % current_cat.title())
if current_cat == original_cat:
print 'No changes necessary.'
else:
catlib.change_category(article, original_cat, current_cat, comment =
self.editSummary)
flag = True
elif choice in ['j', 'J']:
- newCatTitle = wikipedia.input(u'Please enter the category the article
should be moved to:')
- newCat = catlib.Category(wikipedia.getSite(), 'Category:' +
newCatTitle)
+ newCatTitle = pywikibot.input(u'Please enter the category the article
should be moved to:')
+ newCat = catlib.Category(pywikibot.getSite(), 'Category:' +
newCatTitle)
# recurse into chosen category
self.move_to_category(article, original_cat, newCat)
flag = True
@@ -745,7 +749,7 @@
elif choice == '?':
contextLength += 500
print
- wikipedia.output(full_text[:contextLength])
+ pywikibot.output(full_text[:contextLength])
print
# if categories possibly weren't visible, show them additionally
@@ -754,7 +758,7 @@
print ''
print 'Original categories: '
for cat in article.categories():
- wikipedia.output(u'* %s' % cat.title())
+ pywikibot.output(u'* %s' % cat.title())
elif choice[0] == 'u':
try:
choice=int(choice[1:])
@@ -774,15 +778,15 @@
flag = True
def run(self):
- cat = catlib.Category(wikipedia.getSite(), 'Category:' + self.catTitle)
+ cat = catlib.Category(pywikibot.getSite(), 'Category:' + self.catTitle)
articles = cat.articlesList(recurse = False)
if len(articles) == 0:
- wikipedia.output(u'There are no articles in category ' + catTitle)
+ pywikibot.output(u'There are no articles in category ' + catTitle)
else:
preloadingGen = pagegenerators.PreloadingGenerator(iter(articles))
for article in preloadingGen:
-
wikipedia.output(u'\n===================================================================')
+
pywikibot.output(u'\n===================================================================')
self.move_to_category(article, cat, cat)
class CategoryTreeRobot:
@@ -803,7 +807,7 @@
self.catTitle = catTitle
self.catDB = catDB
if filename and not os.path.isabs(filename):
- filename = wikipedia.config.datafilepath(filename)
+ filename = pywikibot.config.datafilepath(filename)
self.filename = filename
# TODO: make maxDepth changeable with a parameter or config file entry
self.maxDepth = maxDepth
@@ -863,7 +867,7 @@
# create a list of wiki links to the supercategories
supercat_names.append('[[:%s|%s]]' % (supercats[i].title(),
supercats[i].title().split(':', 1)[1]))
# print this list, separated with commas, using translations given in
also_in_cats
- result += ' ' + wikipedia.translate(wikipedia.getSite(),
also_in_cats) % ', '.join(supercat_names)
+ result += ' ' + pywikibot.translate(pywikibot.getSite(),
also_in_cats) % ', '.join(supercat_names)
result += '\n'
if currentDepth < self.maxDepth:
for subcat in self.catDB.getSubcats(cat):
@@ -883,16 +887,16 @@
* catTitle - the title of the category which will be the tree's root
* maxDepth - the limit beyond which no subcategories will be listed
"""
- cat = catlib.Category(wikipedia.getSite(), 'Category:' + self.catTitle)
+ cat = catlib.Category(pywikibot.getSite(), 'Category:' + self.catTitle)
tree = self.treeview(cat)
if self.filename:
- wikipedia.output(u'Saving results in %s' % self.filename)
+ pywikibot.output(u'Saving results in %s' % self.filename)
import codecs
f = codecs.open(self.filename, 'a', 'utf-8')
f.write(tree)
f.close()
else:
- wikipedia.output(tree, toStdout = True)
+ pywikibot.output(tree, toStdout = True)
if __name__ == "__main__":
fromGiven = False
@@ -913,8 +917,8 @@
# The generator gives the pages that should be worked upon.
gen = None
- #If this is set to true then the custom edit summary given for removing
- #categories from articles will also be used as the deletion reason.
+ # If this is set to true then the custom edit summary given for removing
+ # categories from articles will also be used as the deletion reason.
useSummaryForDeletion = True
try:
catDB = CategoryDatabase()
@@ -922,7 +926,7 @@
sort_by_last_name = False
restore = False
create_pages = False
- for arg in wikipedia.handleArgs():
+ for arg in pywikibot.handleArgs():
if arg == 'add':
action = 'add'
elif arg == 'remove':
@@ -962,7 +966,7 @@
editSummary = arg[len('-summary:'):]
elif arg.startswith('-match'):
if len(arg) == len('-match'):
- titleRegex = wikipedia.input(u'Which regular expression should
affected objects match?')
+ titleRegex = pywikibot.input(u'Which regular expression should
affected objects match?')
else:
titleRegex = arg[len('-match:'):]
elif arg == '-talkpages':
@@ -988,34 +992,34 @@
add_category(sort_by_last_name, create_pages)
elif action == 'remove':
if (fromGiven == False):
- oldCatTitle = wikipedia.input(u'Please enter the name of the category
that should be removed:')
+ oldCatTitle = pywikibot.input(u'Please enter the name of the category
that should be removed:')
bot = CategoryRemoveRobot(oldCatTitle, batchMode, editSummary,
useSummaryForDeletion, inPlace = inPlace)
bot.run()
elif action == 'move':
if (fromGiven == False):
- oldCatTitle = wikipedia.input(u'Please enter the old name of the
category:')
+ oldCatTitle = pywikibot.input(u'Please enter the old name of the
category:')
if (toGiven == False):
- newCatTitle = wikipedia.input(u'Please enter the new name of the
category:')
+ newCatTitle = pywikibot.input(u'Please enter the new name of the
category:')
bot = CategoryMoveRobot(oldCatTitle, newCatTitle, batchMode, editSummary,
inPlace, titleRegex = titleRegex)
bot.run()
elif action == 'tidy':
- catTitle = wikipedia.input(u'Which category do you want to tidy
up?')
+ catTitle = pywikibot.input(u'Which category do you want to tidy
up?')
bot = CategoryTidyRobot(catTitle, catDB)
bot.run()
elif action == 'tree':
- catTitle = wikipedia.input(u'For which category do you want to create a
tree view?')
- filename = wikipedia.input(u'Please enter the name of the file where the
tree should be saved, or press enter to simply show the tree:')
+ catTitle = pywikibot.input(u'For which category do you want to create a
tree view?')
+ filename = pywikibot.input(u'Please enter the name of the file where the
tree should be saved, or press enter to simply show the tree:')
bot = CategoryTreeRobot(catTitle, catDB, filename)
bot.run()
elif action == 'listify':
if (fromGiven == False):
- oldCatTitle = wikipedia.input(u'Please enter the name of the category
to listify:')
+ oldCatTitle = pywikibot.input(u'Please enter the name of the category
to listify:')
if (toGiven == False):
- newCatTitle = wikipedia.input(u'Please enter the name of the list to
create:')
+ newCatTitle = pywikibot.input(u'Please enter the name of the list to
create:')
bot = CategoryListifyRobot(oldCatTitle, newCatTitle, editSummary, overwrite,
showImages, subCats = True, talkPages = talkPages, recurse = recurse)
bot.run()
else:
- wikipedia.showHelp('category')
+ pywikibot.showHelp('category')
finally:
catDB.dump()
- wikipedia.stopme()
+ pywikibot.stopme()
Modified: trunk/pywikipedia/category_redirect.py
===================================================================
--- trunk/pywikipedia/category_redirect.py 2009-10-28 18:10:56 UTC (rev 7556)
+++ trunk/pywikipedia/category_redirect.py 2009-10-28 18:12:26 UTC (rev 7557)
@@ -12,9 +12,17 @@
are taken into account.
"""
+
+#
+# (C) Pywikipedia team, 2008-2009
+#
__version__ = '$Id$'
+#
+# Distributed under the terms of the MIT license.
+#
-import wikipedia, catlib, query, pagegenerators
+import wikipedia as pywikibot
+import catlib, query, pagegenerators
import cPickle
import math
import re
@@ -37,11 +45,11 @@
class CategoryRedirectBot(object):
def __init__(self):
self.cooldown = 7 # days
- self.site = wikipedia.getSite()
+ self.site = pywikibot.getSite()
self.catprefix = self.site.namespace(14)+":"
self.log_text = []
self.edit_requests = []
- self.log_page = wikipedia.Page(self.site,
+ self.log_page = pywikibot.Page(self.site,
u"User:%(user)s/category redirect log" %
{'user': self.site.loggedInAs()})
@@ -165,7 +173,7 @@
'zh': u"分类重定向维护机器人",
}
- self.edit_request_text = wikipedia.translate(self.site.lang,
+ self.edit_request_text = pywikibot.translate(self.site.lang,
{'en': u"""\
The following protected pages have been detected as requiring updates to \
category links:
@@ -186,10 +194,10 @@
""",
})
- self.edit_request_item = wikipedia.translate(self.site.lang,
+ self.edit_request_item = pywikibot.translate(self.site.lang,
{
- 'en': u"* %s is in %s, which is a redirect to %s",
- 'fr': u"* %s est dans %s, qui est une redirection vers %s",
+ 'en': u"* %s is in %s, which is a redirect to %s",
+ 'fr': u"* %s est dans %s, qui est une redirection vers
%s",
'ksh': u"* %s es en %s, un dat es en Ömleidung op %s",
})
@@ -199,42 +207,42 @@
Moves subcategories of oldCat as well. oldCat and newCat should be
Category objects. If newCat is None, the category will be removed.
- This is a copy of portions of catlib.change_category(), with some
- changes.
+ This is a copy of portions of [old] catlib.change_category(), with
+ some changes.
"""
oldtext = article.get(get_redirect=True, force=True)
- newtext = wikipedia.replaceCategoryInPlace(oldtext, oldCat, newCat)
+ newtext = pywikibot.replaceCategoryInPlace(oldtext, oldCat, newCat)
try:
# even if no changes, still save the page, in case it needs
# an update due to changes in a transcluded template
article.put(newtext, comment)
if newtext == oldtext:
- wikipedia.output(
+ pywikibot.output(
u'No changes in made in page %s.' % article.aslink())
return False
return True
- except wikipedia.EditConflict:
- wikipedia.output(
+ except pywikibot.EditConflict:
+ pywikibot.output(
u'Skipping %s because of edit conflict' % article.aslink())
- except wikipedia.LockedPage:
- wikipedia.output(u'Skipping locked page %s' % article.aslink())
+ except pywikibot.LockedPage:
+ pywikibot.output(u'Skipping locked page %s' % article.aslink())
self.edit_requests.append((article.aslink(),
oldCat.aslink(textlink=True),
newCat.aslink(textlink=True)))
- except wikipedia.SpamfilterError, error:
- wikipedia.output(
+ except pywikibot.SpamfilterError, error:
+ pywikibot.output(
u'Changing page %s blocked by spam filter (URL=%s)'
% (article.aslink(), error.url))
- except wikipedia.NoUsername:
- wikipedia.output(
+ except pywikibot.NoUsername:
+ pywikibot.output(
u"Page %s not saved; sysop privileges required."
% article.aslink())
self.edit_requests.append((article.aslink(textlink=True),
oldCat.aslink(textlink=True),
newCat.aslink(textlink=True)))
- except wikipedia.PageNotSaved, error:
- wikipedia.output(u"Saving page %s failed: %s"
+ except pywikibot.PageNotSaved, error:
+ pywikibot.output(u"Saving page %s failed: %s"
% (article.aslink(), error.message))
return False
@@ -255,7 +263,7 @@
cmlimit="max"):
found += len(result['categorymembers'])
for item in result['categorymembers']:
- article = wikipedia.Page(self.site, item['title'])
+ article = pywikibot.Page(self.site, item['title'])
changed = self.change_category(article, oldCat, newCat,
comment=editSummary)
if changed: moved += 1
@@ -267,21 +275,21 @@
cmnamespace="10",
cmlimit="max"):
for item in result['categorymembers']:
- doc = wikipedia.Page(self.site,
item['title']+"/doc")
+ doc = pywikibot.Page(self.site,
item['title']+"/doc")
try:
old_text = doc.get()
- except wikipedia.Error:
+ except pywikibot.Error:
continue
changed = self.change_category(doc, oldCat, newCat,
comment=editSummary)
if changed: moved += 1
if found:
- wikipedia.output(u"%s: %s found, %s moved"
+ pywikibot.output(u"%s: %s found, %s moved"
% (oldCat.title(), found, moved))
return (found, moved)
- except wikipedia.ServerError:
- wikipedia.output(u"Server error: retrying in 5 seconds...")
+ except pywikibot.ServerError:
+ pywikibot.output(u"Server error: retrying in 5 seconds...")
time.sleep(5)
continue
except KeyboardInterrupt:
@@ -301,7 +309,7 @@
def query_results(self, **data):
"""Iterate results from API action=query, using data as
parameters."""
querydata = {'action': 'query',
- 'maxlag': str(wikipedia.config.maxlag)}
+ 'maxlag': str(pywikibot.config.maxlag)}
querydata = query.CombineParams(querydata, data)
if not "action" in querydata or not querydata['action'] ==
'query':
raise ValueError(
@@ -314,8 +322,8 @@
#if data.startswith(u"unknown_action"):
# e = {'code': data[:14], 'info': data[16:]}
# raise APIError(e)
- except wikipedia.ServerError:
- wikipedia.output(u"Wikimedia Server Error; retrying...")
+ except pywikibot.ServerError:
+ pywikibot.output(u"Wikimedia Server Error; retrying...")
time.sleep(5)
continue
except ValueError:
@@ -323,7 +331,7 @@
# problem. Wait a few seconds and try again
# WARNING: if the server is down, this could
# cause an infinite loop
- wikipedia.output(u"Invalid API response received;
retrying...")
+ pywikibot.output(u"Invalid API response received;
retrying...")
time.sleep(5)
continue
if type(result) is dict and "error" in result:
@@ -332,7 +340,7 @@
time.sleep(5)
waited += 5
if waited % 30 == 0:
- wikipedia.output(
+ pywikibot.output(
u"(Waited %i seconds due to server lag.)"
% waited)
continue
@@ -367,7 +375,7 @@
LOG_SIZE = 7 # Number of items to keep in active log
try:
log_text = self.log_page.get()
- except wikipedia.NoPage:
+ except pywikibot.NoPage:
log_text = u""
log_items = {}
header = None
@@ -408,9 +416,9 @@
l = time.localtime()
today = "%04d-%02d-%02d" % l[:3]
- edit_request_page = wikipedia.Page(self.site,
+ edit_request_page = pywikibot.Page(self.site,
u"User:%(user)s/category edit requests" %
locals())
- datafile = wikipedia.config.datafilepath(
+ datafile = pywikibot.config.datafilepath(
"%s-catmovebot-data" % self.site.dbName())
try:
inp = open(datafile, "rb")
@@ -425,7 +433,7 @@
template_list = self.redir_templates[self.site.family.name
][self.site.lang]
except KeyError:
- wikipedia.output(u"No redirect templates defined for %s"
+ pywikibot.output(u"No redirect templates defined for %s"
% self.site.sitename())
return
# regex to match soft category redirects
@@ -445,13 +453,13 @@
# check for hard-redirected categories that are not already marked
# with an appropriate template
- comment = wikipedia.translate(self.site.lang, self.redir_comment)
+ comment = pywikibot.translate(self.site.lang, self.redir_comment)
for result in self.query_results(list='allpages',
apnamespace='14', # Category:
apfrom='!',
apfilterredir='redirects',
aplimit='max'):
- gen = (wikipedia.Page(self.site, page_item['title'])
+ gen = (pywikibot.Page(self.site, page_item['title'])
for page_item in result['allpages'])
# gen yields all hard redirect pages in namespace 14
for page in pagegenerators.PreloadingGenerator(gen, 120):
@@ -469,7 +477,7 @@
self.log_text.append(u"* Added {{tl|%s}} to %s"
% (template_list[0],
page.aslink(textlink=True)))
- except wikipedia.Error, e:
+ except pywikibot.Error, e:
self.log_text.append(
u"* Failed to add {{tl|%s}} to %s (%s)"
% (template_list[0],
@@ -481,9 +489,9 @@
% (page.aslink(textlink=True),
target.aslink(textlink=True)))
- wikipedia.output("Done checking hard-redirect category pages.")
+ pywikibot.output("Done checking hard-redirect category pages.")
- comment = wikipedia.translate(self.site.lang, self.move_comment)
+ comment = pywikibot.translate(self.site.lang, self.move_comment)
scan_data = {
u'action': 'query',
u'list': 'embeddedin',
@@ -502,7 +510,7 @@
gcmlimit=u'max',
prop='info|categoryinfo'):
for catdata in result['pages'].values():
- thispage = wikipedia.Page(self.site, catdata['title'])
+ thispage = pywikibot.Page(self.site, catdata['title'])
catpages.append(thispage)
if 'categoryinfo' in catdata \
and catdata['categoryinfo']['size'] !=
"0":
@@ -510,8 +518,8 @@
nonemptypages.append(thispage)
# preload the category pages for redirected categories
- wikipedia.output(u"")
- wikipedia.output(u"Preloading %s category redirect pages"
+ pywikibot.output(u"")
+ pywikibot.output(u"Preloading %s category redirect pages"
% len(catpages))
for cat in pagegenerators.PreloadingGenerator(catpages, 120):
cat_title = cat.titleWithoutNamespace()
@@ -521,7 +529,7 @@
continue
try:
text = cat.get(get_redirect=True)
- except wikipedia.Error:
+ except pywikibot.Error:
self.log_text.append(u"* Could not load %s; ignoring"
% cat.aslink(textlink=True))
continue
@@ -546,7 +554,7 @@
## self.log_text.append(
## u"* Removed category prefix from parameter in %s"
## % cat.aslink(textlink=True))
-## except wikipedia.Error:
+## except pywikibot.Error:
## self.log_text.append(
## u"* Unable to save changes to %s"
## % cat.aslink(textlink=True))
@@ -557,8 +565,8 @@
self.catprefix+cat_name) not in catmap:
del record[cat_name]
- wikipedia.output(u"")
- wikipedia.output(u"Checking %s destination categories" % len(destmap))
+ pywikibot.output(u"")
+ pywikibot.output(u"Checking %s destination categories" % len(destmap))
for dest in pagegenerators.PreloadingGenerator(destmap.keys(), 120):
if not dest.exists():
for d in destmap[dest]:
@@ -600,17 +608,17 @@
newtext = newtext + oldtext.strip()
try:
d.put(newtext,
- wikipedia.translate(self.site.lang,
+ pywikibot.translate(self.site.lang,
self.dbl_redir_comment),
minorEdit=True)
- except wikipedia.Error, e:
+ except pywikibot.Error, e:
self.log_text.append("** Failed: %s" % str(e))
# only scan those pages that have contents (nonemptypages)
# and that haven't been removed from catlist as broken redirects
cats_to_empty = set(catlist) & set(nonemptypages)
- wikipedia.output(u"")
- wikipedia.output(u"Moving pages out of %s redirected categories."
+ pywikibot.output(u"")
+ pywikibot.output(u"Moving pages out of %s redirected categories."
% len(cats_to_empty))
# thread_limit = int(math.log(len(cats_to_empty), 8) + 1)
# threadpool = ThreadList(limit=1) # disabling multi-threads
@@ -638,7 +646,7 @@
cPickle.dump(record, open(datafile, "wb"))
- wikipedia.setAction(wikipedia.translate(self.site.lang,
+ pywikibot.setAction(pywikibot.translate(self.site.lang,
self.maint_comment))
self.log_text.sort()
self.log_page.put(u"\n==%i-%02i-%02iT%02i:%02i:%02iZ==\n"
@@ -655,7 +663,7 @@
def main(*args):
global bot
try:
- a = wikipedia.handleArgs(*args)
+ a = pywikibot.handleArgs(*args)
if len(a) == 1:
raise RuntimeError('Unrecognized argument "%s"' % a[0])
elif a:
@@ -664,7 +672,7 @@
bot = CategoryRedirectBot()
bot.run()
finally:
- wikipedia.stopme()
+ pywikibot.stopme()
if __name__ == "__main__":
Modified: trunk/pywikipedia/cosmetic_changes.py
===================================================================
--- trunk/pywikipedia/cosmetic_changes.py 2009-10-28 18:10:56 UTC (rev 7556)
+++ trunk/pywikipedia/cosmetic_changes.py 2009-10-28 18:12:26 UTC (rev 7557)
@@ -32,7 +32,8 @@
all of them, but be careful if you do.
"""
__version__ = '$Id$'
-import wikipedia, pagegenerators, isbn
+import wikipedia as pywikibot
+import pagegenerators, isbn
import sys
import re
@@ -188,7 +189,7 @@
except isbn.InvalidIsbnException, error:
pass
if self.debug:
- wikipedia.showDiff(oldText, text)
+ pywikibot.showDiff(oldText, text)
return text
def fixSelfInterwiki(self, text):
@@ -205,9 +206,9 @@
Makes sure that interwiki links are put to the correct position and
into the right order.
"""
- if wikipedia.calledModuleName() <> 'interwiki':
- interwikiLinks = wikipedia.getLanguageLinks(text, insite = self.site)
- text = wikipedia.replaceLanguageLinks(text, interwikiLinks, site = self.site,
template = self.template)
+ if pywikibot.calledModuleName() <> 'interwiki':
+ interwikiLinks = pywikibot.getLanguageLinks(text, insite = self.site)
+ text = pywikibot.replaceLanguageLinks(text, interwikiLinks, site = self.site,
template = self.template)
return text
def standardizeCategories(self, text):
@@ -216,9 +217,9 @@
does not sort them.
"""
# The PyWikipediaBot is no longer allowed to touch categories on the German
Wikipedia. See
http://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/bis_2006…
- if self.site != wikipedia.getSite('de', 'wikipedia') and not
self.template:
- categories = wikipedia.getCategoryLinks(text, site = self.site)
- text = wikipedia.replaceCategoryLinks(text, categories, site = self.site)
+ if self.site != pywikibot.getSite('de', 'wikipedia') and not
self.template:
+ categories = pywikibot.getCategoryLinks(text, site = self.site)
+ text = pywikibot.replaceCategoryLinks(text, categories, site = self.site)
return text
def translateAndCapitalizeNamespaces(self, text):
@@ -242,7 +243,7 @@
# skip main (article) namespace
if thisNs and namespaces:
- text = wikipedia.replaceExcept(text, r'\[\[\s*(' +
'|'.join(namespaces) + ') *:(?P<nameAndLabel>.*?)\]\]',
r'[[' + thisNs + ':\g<nameAndLabel>]]', exceptions)
+ text = pywikibot.replaceExcept(text, r'\[\[\s*(' +
'|'.join(namespaces) + ') *:(?P<nameAndLabel>.*?)\]\]',
r'[[' + thisNs + ':\g<nameAndLabel>]]', exceptions)
return text
def cleanUpLinks(self, text):
@@ -259,8 +260,8 @@
# We only work on namespace 0 because pipes and linktrails work
# differently for images and categories.
try:
- page = wikipedia.Page(self.site, titleWithSection)
- except wikipedia.InvalidTitle:
+ page = pywikibot.Page(self.site, titleWithSection)
+ except pywikibot.InvalidTitle:
return match.group()
if page.namespace() == 0:
# Replace underlines by spaces, also multiple underlines
@@ -284,7 +285,7 @@
hadTrailingSpaces = (len(titleWithSection) != titleLength)
# Convert URL-encoded characters to unicode
- titleWithSection = wikipedia.url2unicode(titleWithSection, site =
self.site)
+ titleWithSection = pywikibot.url2unicode(titleWithSection, site =
self.site)
if titleWithSection == '':
# just skip empty links.
@@ -347,7 +348,7 @@
# note that the definition of 'letter' varies from language to language.
linkR =
re.compile(r'\[\[(?P<titleWithSection>[^\]\|]+)(\|(?P<label>[^\]\|]*))?\]\](?P<linktrail>'
+ self.site.linktrail() + ')')
- text = wikipedia.replaceExcept(text, linkR, handleOneLink, ['comment',
'math', 'nowiki', 'pre', 'startspace'])
+ text = pywikibot.replaceExcept(text, linkR, handleOneLink, ['comment',
'math', 'nowiki', 'pre', 'startspace'])
return text
def resolveHtmlEntities(self, text):
@@ -363,11 +364,11 @@
# ignore ' see
http://eo.wikipedia.org/w/index.php?title=Liberec&diff=next&oldid=2…
if self.site.lang == 'eo':
ignore += [39]
- text = wikipedia.html2unicode(text, ignore = ignore)
+ text = pywikibot.html2unicode(text, ignore = ignore)
return text
def validXhtml(self, text):
- text = wikipedia.replaceExcept(text, r'<br>', r'<br
/>', ['comment', 'math', 'nowiki', 'pre'])
+ text = pywikibot.replaceExcept(text, r'<br>', r'<br
/>', ['comment', 'math', 'nowiki', 'pre'])
return text
def removeUselessSpaces(self, text):
@@ -376,8 +377,8 @@
spaceAtLineEndR = re.compile(' $')
exceptions = ['comment', 'math', 'nowiki', 'pre',
'startspace', 'table', 'template']
- text = wikipedia.replaceExcept(text, multipleSpacesR, ' ', exceptions)
- text = wikipedia.replaceExcept(text, spaceAtLineEndR, '', exceptions)
+ text = pywikibot.replaceExcept(text, multipleSpacesR, ' ', exceptions)
+ text = pywikibot.replaceExcept(text, spaceAtLineEndR, '', exceptions)
return text
@@ -387,7 +388,7 @@
front of a percent sign, so it is no longer required to place it
manually.
'''
- text = wikipedia.replaceExcept(text, r'(\d) %', r'\1 %',
['timeline'])
+ text = pywikibot.replaceExcept(text, r'(\d) %', r'\1 %',
['timeline'])
return text
def cleanUpSectionHeaders(self, text):
@@ -402,7 +403,7 @@
"""
for level in range(1, 7):
equals = '=' * level
- text = wikipedia.replaceExcept(text, r'\n' + equals + '
*(?P<title>[^=]+?) *' + equals + ' *\r\n', '\n' + equals + '
\g<title> ' + equals + '\r\n', ['comment', 'math',
'nowiki', 'pre'])
+ text = pywikibot.replaceExcept(text, r'\n' + equals + '
*(?P<title>[^=]+?) *' + equals + ' *\r\n', '\n' + equals + '
\g<title> ' + equals + '\r\n', ['comment', 'math',
'nowiki', 'pre'])
return text
def putSpacesInLists(self, text):
@@ -415,76 +416,77 @@
If there are any complaints, please file a bug report.
"""
if not self.redirect:
- text = wikipedia.replaceExcept(text,
r'(?m)^(?P<bullet>(\*+|#+):*)(?P<char>[^\s\*#:].+?)',
'\g<bullet> \g<char>', ['comment', 'math',
'nowiki', 'pre'])
+ text = pywikibot.replaceExcept(text,
r'(?m)^(?P<bullet>(\*+|#+):*)(?P<char>[^\s\*#:].+?)',
'\g<bullet> \g<char>', ['comment', 'math',
'nowiki', 'pre'])
return text
#from fixes.py
def fixSyntaxSave(self, text):
exceptions = ['nowiki', 'comment', 'math', 'pre',
'source', 'startspace']
# external link in double brackets
- text = wikipedia.replaceExcept(text,
r'\[\[(?P<url>https?://[^\]]+?)\]\]', r'[\g<url>]',
exceptions)
+ text = pywikibot.replaceExcept(text,
r'\[\[(?P<url>https?://[^\]]+?)\]\]', r'[\g<url>]',
exceptions)
# external link starting with double bracket
- text = wikipedia.replaceExcept(text,
r'\[\[(?P<url>https?://.+?)\]', r'[\g<url>]', exceptions)
+ text = pywikibot.replaceExcept(text,
r'\[\[(?P<url>https?://.+?)\]', r'[\g<url>]', exceptions)
# external link and description separated by a dash, with
# whitespace in front of the dash, so that it is clear that
# the dash is not a legitimate part of the URL.
- text = wikipedia.replaceExcept(text, r'\[(?P<url>https?://[^\|\]
\r\n]+?) +\| *(?P<label>[^\|\]]+?)\]', r'[\g<url>
\g<label>]', exceptions)
+ text = pywikibot.replaceExcept(text, r'\[(?P<url>https?://[^\|\]
\r\n]+?) +\| *(?P<label>[^\|\]]+?)\]', r'[\g<url>
\g<label>]', exceptions)
# dash in external link, where the correct end of the URL can
# be detected from the file extension. It is very unlikely that
# this will cause mistakes.
- text = wikipedia.replaceExcept(text, r'\[(?P<url>https?://[^\|\]
]+?(\.pdf|\.html|\.htm|\.php|\.asp|\.aspx|\.jsp)) *\| *(?P<label>[^\|\]]+?)\]',
r'[\g<url> \g<label>]', exceptions)
+ text = pywikibot.replaceExcept(text, r'\[(?P<url>https?://[^\|\]
]+?(\.pdf|\.html|\.htm|\.php|\.asp|\.aspx|\.jsp)) *\| *(?P<label>[^\|\]]+?)\]',
r'[\g<url> \g<label>]', exceptions)
return text
def fixHtml(self, text):
# Everything case-insensitive (?i)
# Keep in mind that MediaWiki automatically converts <br> to <br />
exceptions = ['nowiki', 'comment', 'math', 'pre',
'source', 'startspace']
- text = wikipedia.replaceExcept(text, r'(?i)<b>(.*?)</b>',
r"'''\1'''" , exceptions)
- text = wikipedia.replaceExcept(text,
r'(?i)<strong>(.*?)</strong>',
r"'''\1'''" , exceptions)
- text = wikipedia.replaceExcept(text, r'(?i)<i>(.*?)</i>',
r"''\1''" , exceptions)
- text = wikipedia.replaceExcept(text, r'(?i)<em>(.*?)</em>',
r"''\1''" , exceptions)
+ text = pywikibot.replaceExcept(text, r'(?i)<b>(.*?)</b>',
r"'''\1'''" , exceptions)
+ text = pywikibot.replaceExcept(text,
r'(?i)<strong>(.*?)</strong>',
r"'''\1'''" , exceptions)
+ text = pywikibot.replaceExcept(text, r'(?i)<i>(.*?)</i>',
r"''\1''" , exceptions)
+ text = pywikibot.replaceExcept(text, r'(?i)<em>(.*?)</em>',
r"''\1''" , exceptions)
# horizontal line without attributes in a single line
- text = wikipedia.replaceExcept(text, r'(?i)([\r\n])<hr[
/]*>([\r\n])', r'\1----\2', exceptions)
+ text = pywikibot.replaceExcept(text, r'(?i)([\r\n])<hr[
/]*>([\r\n])', r'\1----\2', exceptions)
# horizontal line with attributes; can't be done with wiki syntax
# so we only make it XHTML compliant
- text = wikipedia.replaceExcept(text, r'(?i)<hr ([^>/]+?)>',
r'<hr \1 />', exceptions)
+ text = pywikibot.replaceExcept(text, r'(?i)<hr ([^>/]+?)>',
r'<hr \1 />', exceptions)
# TODO: maybe we can make the bot replace <p> tags with \r\n's.
return text
class CosmeticChangesBot:
- def __init__(self, generator, acceptall = False):
+ def __init__(self, generator, acceptall = False, comment=u'Robot: Cosmetic
changes'):
self.generator = generator
self.acceptall = acceptall
+ self.comment = comment
def treat(self, page):
try:
# Show the title of the page we're working on.
# Highlight the title in purple.
- wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default}
<<<" % page.title())
+ pywikibot.output(u"\n\n>>> \03{lightpurple}%s\03{default}
<<<" % page.title())
ccToolkit = CosmeticChangesToolkit(page.site(), debug = True, namespace =
page.namespace())
changedText = ccToolkit.change(page.get())
if changedText != page.get():
if not self.acceptall:
- choice = wikipedia.inputChoice(u'Do you want to accept these
changes?', ['Yes', 'No', 'All'], ['y', 'N',
'a'], 'N')
+ choice = pywikibot.inputChoice(u'Do you want to accept these
changes?', ['Yes', 'No', 'All'], ['y', 'N',
'a'], 'N')
if choice == 'a':
self.acceptall = True
if self.acceptall or choice == 'y':
- page.put(changedText)
+ page.put(changedText, comment=self.comment)
else:
- wikipedia.output('No changes were necessary in %s' %
page.title())
- except wikipedia.NoPage:
- wikipedia.output("Page %s does not exist?!" % page.aslink())
- except wikipedia.IsRedirectPage:
- wikipedia.output("Page %s is a redirect; skipping." %
page.aslink())
- except wikipedia.LockedPage:
- wikipedia.output("Page %s is locked?!" % page.aslink())
+ pywikibot.output('No changes were necessary in %s' %
page.title())
+ except pywikibot.NoPage:
+ pywikibot.output("Page %s does not exist?!" % page.aslink())
+ except pywikibot.IsRedirectPage:
+ pywikibot.output("Page %s is a redirect; skipping." %
page.aslink())
+ except pywikibot.LockedPage:
+ pywikibot.output("Page %s is locked?!" % page.aslink())
def run(self):
try:
for page in self.generator:
self.treat(page)
except KeyboardInterrupt:
- wikipedia.output('\nQuitting program...')
+ pywikibot.output('\nQuitting program...')
def main():
#page generator
@@ -498,7 +500,7 @@
# to work on.
genFactory = pagegenerators.GeneratorFactory()
- for arg in wikipedia.handleArgs():
+ for arg in pywikibot.handleArgs():
if arg.startswith('-summary:'):
editSummary = arg[len('-summary:'):]
elif arg == '-always':
@@ -508,12 +510,11 @@
if editSummary == '':
# Load default summary message.
- editSummary = wikipedia.translate(wikipedia.getSite(), msg_standalone)
- wikipedia.setAction(editSummary)
+ editSummary = pywikibot.translate(pywikibot.getSite(), msg_standalone)
# Disabled this check. Although the point is still valid, there
# is now a warning and a prompt (see below).
- #if wikipedia.getSite() == wikipedia.getSite('nl','wikipedia'):
+ #if pywikibot.getSite() == pywikibot.getSite('nl','wikipedia'):
#print "Deze bot is op WikipediaNL niet gewenst."
#print "Het toevoegen van cosmetic changes bij andere wijzigingen is
toegestaan,"
#print "maar cosmetic_changes als stand-alone bot niet."
@@ -521,22 +522,22 @@
#sys.exit()
if pageTitle:
- page = wikipedia.Page(wikipedia.getSite(), ' '.join(pageTitle))
+ page = pywikibot.Page(pywikibot.getSite(), ' '.join(pageTitle))
gen = iter([page])
if not gen:
gen = genFactory.getCombinedGenerator()
if not gen:
- wikipedia.showHelp()
+ pywikibot.showHelp()
elif not always:
- answer = wikipedia.inputChoice(warning + '\nDo you really want to
continue?', ['yes', 'no'], ['y', 'N'], 'N')
-
+ answer = pywikibot.inputChoice(warning + '\nDo you really want to
continue?', ['yes', 'no'], ['y', 'N'], 'N')
+
if answer == 'y':
preloadingGen = pagegenerators.PreloadingGenerator(gen)
- bot = CosmeticChangesBot(preloadingGen, acceptall=always)
+ bot = CosmeticChangesBot(preloadingGen, acceptall=always, comment=editSummary)
bot.run()
if __name__ == "__main__":
try:
main()
finally:
- wikipedia.stopme()
+ pywikibot.stopme()
Modified: trunk/pywikipedia/editarticle.py
===================================================================
--- trunk/pywikipedia/editarticle.py 2009-10-28 18:10:56 UTC (rev 7556)
+++ trunk/pywikipedia/editarticle.py 2009-10-28 18:12:26 UTC (rev 7557)
@@ -28,7 +28,7 @@
import optparse
import tempfile
-import wikipedia
+import wikipedia as pywikibot
import config
msg = {
@@ -126,7 +126,7 @@
os.unlink(tempFilename)
return self.restoreLinebreaks(newcontent)
else:
- return self.restoreLinebreaks(wikipedia.ui.editText(text, jumpIndex =
jumpIndex, highlight = highlight))
+ return self.restoreLinebreaks(pywikibot.ui.editText(text, jumpIndex =
jumpIndex, highlight = highlight))
class ArticleEditor:
# join lines if line starts with this ones
@@ -135,12 +135,12 @@
def __init__(self):
self.set_options()
self.setpage()
- self.site = wikipedia.getSite()
+ self.site = pywikibot.getSite()
def set_options(self):
"""Parse commandline and set options attribute"""
my_args = []
- for arg in wikipedia.handleArgs():
+ for arg in pywikibot.handleArgs():
my_args.append(arg)
parser = optparse.OptionParser()
parser.add_option("-r", "--edit_redirect",
action="store_true", default=False, help="Ignore/edit redirects")
@@ -156,9 +156,9 @@
def setpage(self):
"""Sets page and page title"""
- site = wikipedia.getSite()
- pageTitle = self.options.page or wikipedia.input(u"Page to edit:")
- self.page = wikipedia.Page(site, pageTitle)
+ site = pywikibot.getSite()
+ pageTitle = self.options.page or pywikibot.input(u"Page to edit:")
+ self.page = pywikibot.Page(site, pageTitle)
if not self.options.edit_redirect and self.page.isRedirectPage():
self.page = self.page.getRedirectTarget()
@@ -167,25 +167,25 @@
fp = open(fn, 'w')
fp.write(new)
fp.close()
- wikipedia.output(u"An edit conflict has arisen. Your edit has been saved to
%s. Please try again." % fn)
+ pywikibot.output(u"An edit conflict has arisen. Your edit has been saved to
%s. Please try again." % fn)
def run(self):
try:
old = self.page.get(get_redirect = self.options.edit_redirect)
- except wikipedia.NoPage:
+ except pywikibot.NoPage:
old = ""
textEditor = TextEditor()
new = textEditor.edit(old)
if new and old != new:
- wikipedia.showDiff(old, new)
- changes = wikipedia.input(u"What did you change?")
- comment = wikipedia.translate(wikipedia.getSite(), msg) % changes
+ pywikibot.showDiff(old, new)
+ changes = pywikibot.input(u"What did you change?")
+ comment = pywikibot.translate(pywikibot.getSite(), msg) % changes
try:
self.page.put(new, comment = comment, minorEdit = False,
watchArticle=self.options.watch)
- except wikipedia.EditConflict:
+ except pywikibot.EditConflict:
self.handle_edit_conflict(new)
else:
- wikipedia.output(u"Nothing changed")
+ pywikibot.output(u"Nothing changed")
def main():
app = ArticleEditor()
@@ -195,5 +195,5 @@
try:
main()
finally:
- wikipedia.stopme()
+ pywikibot.stopme()
Modified: trunk/pywikipedia/interwiki.py
===================================================================
--- trunk/pywikipedia/interwiki.py 2009-10-28 18:10:56 UTC (rev 7556)
+++ trunk/pywikipedia/interwiki.py 2009-10-28 18:12:26 UTC (rev 7557)
@@ -152,7 +152,7 @@
These arguments define how much user confirmation is required:
-autonomous run automatically, do not ask any questions. If a question
- to an operator is needed, write the name of the page
+ -auto to an operator is needed, write the name of the page
to autonomous_problems.dat and continue on the next page.
(note: without ending colon)
@@ -313,7 +313,8 @@
seq2.reverse()
return seq2
-import wikipedia, config, pagegenerators, catlib
+import wikipedia as pywikibot
+import config, pagegenerators, catlib
import titletranslate, interwiki_graph
import webbrowser
@@ -337,17 +338,17 @@
def __iter__(self):
import xmlreader
- mysite = wikipedia.getSite()
+ mysite = pywikibot.getSite()
dump = xmlreader.XmlDump(self.xmlFilename)
r = re.compile(r'\d')
for entry in dump.parse():
if not r.search(entry.title):
selflinkR = re.compile(r'\[\[lmo:')
if selflinkR.search(entry.text):
- yield wikipedia.Page(mysite, entry.title)
+ yield pywikibot.Page(mysite, entry.title)
-class SaveError(wikipedia.Error):
+class SaveError(pywikibot.Error):
"""
An attempt to save a page with changed interwiki has failed.
"""
@@ -358,7 +359,7 @@
preferences or because the user chose not to change the page.
"""
-class GiveUpOnPage(wikipedia.Error):
+class GiveUpOnPage(pywikibot.Error):
"""
The user chose not to work on this page and its linked pages any more.
"""
@@ -516,7 +517,7 @@
lacklanguage = None
minlinks = 0
-class StoredPage(wikipedia.Page):
+class StoredPage(pywikibot.Page):
"""
Store the Page contents on disk to avoid sucking too much
memory when a big number of Page objects will be loaded
@@ -524,14 +525,14 @@
"""
# Please prefix the class members names by SP
- # to avoid possible name clashes with wikipedia.Page
+ # to avoid possible name clashes with pywikibot.Page
# path to the shelve
SPpath = None
# shelve
SPstore = None
- # attributes created by wikipedia.Page.__init__
+ # attributes created by pywikibot.Page.__init__
SPcopy = [ '_editrestriction',
'_site',
'_namespace',
@@ -869,7 +870,7 @@
return False
if globalvar.nobackonly:
if page == self.originPage:
- wikipedia.output("%s has a backlink from
%s."%(page,linkingPage))
+ pywikibot.output(u"%s has a backlink from
%s."%(page,linkingPage))
self.makeForcedStop(counter)
return False
@@ -910,31 +911,31 @@
if linkedPage.namespace() in nsmatch:
return False
if globalvar.autonomous:
- wikipedia.output(u"NOTE: Ignoring link from page %s in namespace %i
to page %s in namespace %i." % (self.originPage.aslink(True),
self.originPage.namespace(), linkedPage.aslink(True), linkedPage.namespace()))
+ pywikibot.output(u"NOTE: Ignoring link from page %s in namespace %i
to page %s in namespace %i." % (self.originPage.aslink(True),
self.originPage.namespace(), linkedPage.aslink(True), linkedPage.namespace()))
# Fill up foundIn, so that we will not write this notice
self.foundIn[linkedPage] = [linkingPage]
return True
else:
preferredPage = self.getFoundInCorrectNamespace(linkedPage.site())
if preferredPage:
- wikipedia.output(u"NOTE: Ignoring link from page %s in namespace
%i to page %s in namespace %i because page %s in the correct namespace has already been
found." % (self.originPage.aslink(True), self.originPage.namespace(),
linkedPage.aslink(True), linkedPage.namespace(), preferredPage.aslink(True)))
+ pywikibot.output(u"NOTE: Ignoring link from page %s in namespace
%i to page %s in namespace %i because page %s in the correct namespace has already been
found." % (self.originPage.aslink(True), self.originPage.namespace(),
linkedPage.aslink(True), linkedPage.namespace(), preferredPage.aslink(True)))
return True
else:
- choice = wikipedia.inputChoice('WARNING: %s is in namespace %i,
but %s is in namespace %i. Follow it anyway?' % (self.originPage.aslink(True),
self.originPage.namespace(), linkedPage.aslink(True), linkedPage.namespace()),
['Yes', 'No', 'Add an alternative', 'give up'],
['y', 'n', 'a', 'g'])
+ choice = pywikibot.inputChoice('WARNING: %s is in namespace %i,
but %s is in namespace %i. Follow it anyway?' % (self.originPage.aslink(True),
self.originPage.namespace(), linkedPage.aslink(True), linkedPage.namespace()),
['Yes', 'No', 'Add an alternative', 'give up'],
['y', 'n', 'a', 'g'])
if choice != 'y':
# Fill up foundIn, so that we will not ask again
self.foundIn[linkedPage] = [linkingPage]
if choice == 'g':
self.makeForcedStop(counter)
elif choice == 'a':
- newHint = wikipedia.input(u'Give the alternative for
language %s, not using a language code:' % linkedPage.site().language())
+ newHint = pywikibot.input(u'Give the alternative for
language %s, not using a language code:' % linkedPage.site().language())
if newHint:
- alternativePage = wikipedia.Page(linkedPage.site(),
newHint)
+ alternativePage = pywikibot.Page(linkedPage.site(),
newHint)
if alternativePage:
# add the page that was entered by the user
self.addIfNew(alternativePage, counter, None)
else:
- wikipedia.output(u"NOTE: ignoring %s and its interwiki
links" % linkedPage.aslink(True))
+ pywikibot.output(u"NOTE: ignoring %s and its interwiki
links" % linkedPage.aslink(True))
return True
else:
# same namespaces, no problem
@@ -943,10 +944,10 @@
def wiktionaryMismatch(self, page):
if globalvar.same=='wiktionary':
if page.title().lower() != self.originPage.title().lower():
- wikipedia.output(u"NOTE: Ignoring %s for %s in wiktionary mode"
% (page.aslink(), self.originPage.aslink()))
+ pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode"
% (page.aslink(), self.originPage.aslink()))
return True
elif page.title() != self.originPage.title() and
self.originPage.site().nocapitalize and page.site().nocapitalize:
- wikipedia.output(u"NOTE: Ignoring %s for %s in wiktionary mode
because both languages are uncapitalized." % (page.aslink(),
self.originPage.aslink()))
+ pywikibot.output(u"NOTE: Ignoring %s for %s in wiktionary mode
because both languages are uncapitalized." % (page.aslink(),
self.originPage.aslink()))
return True
return False
@@ -966,32 +967,32 @@
"""
if globalvar.autonomous:
if self.originPage.isDisambig() and not page.isDisambig():
- wikipedia.output(u"NOTE: Ignoring link from disambiguation page %s
to non-disambiguation %s" % (self.originPage.aslink(True), page.aslink(True)))
+ pywikibot.output(u"NOTE: Ignoring link from disambiguation page %s
to non-disambiguation %s" % (self.originPage.aslink(True), page.aslink(True)))
return (True, None)
elif not self.originPage.isDisambig() and page.isDisambig():
- wikipedia.output(u"NOTE: Ignoring link from non-disambiguation page
%s to disambiguation %s" % (self.originPage.aslink(True), page.aslink(True)))
+ pywikibot.output(u"NOTE: Ignoring link from non-disambiguation page
%s to disambiguation %s" % (self.originPage.aslink(True), page.aslink(True)))
return (True, None)
else:
choice = 'y'
if self.originPage.isDisambig() and not page.isDisambig():
disambig = self.getFoundDisambig(page.site())
if disambig:
- wikipedia.output(u"NOTE: Ignoring non-disambiguation page %s for
%s because disambiguation page %s has already been found." % (page.aslink(True),
self.originPage.aslink(True), disambig.aslink(True)))
+ pywikibot.output(u"NOTE: Ignoring non-disambiguation page %s for
%s because disambiguation page %s has already been found." % (page.aslink(True),
self.originPage.aslink(True), disambig.aslink(True)))
return (True, None)
else:
- choice = wikipedia.inputChoice('WARNING: %s is a disambiguation
page, but %s doesn\'t seem to be one. Follow it anyway?' %
(self.originPage.aslink(True), page.aslink(True)), ['Yes', 'No', 'Add
an alternative', 'Give up'], ['y', 'n', 'a',
'g'])
+ choice = pywikibot.inputChoice('WARNING: %s is a disambiguation
page, but %s doesn\'t seem to be one. Follow it anyway?' %
(self.originPage.aslink(True), page.aslink(True)), ['Yes', 'No', 'Add
an alternative', 'Give up'], ['y', 'n', 'a',
'g'])
elif not self.originPage.isDisambig() and page.isDisambig():
nondisambig = self.getFoundNonDisambig(page.site())
if nondisambig:
- wikipedia.output(u"NOTE: Ignoring disambiguation page %s for %s
because non-disambiguation page %s has already been found." % (page.aslink(True),
self.originPage.aslink(True), nondisambig.aslink(True)))
+ pywikibot.output(u"NOTE: Ignoring disambiguation page %s for %s
because non-disambiguation page %s has already been found." % (page.aslink(True),
self.originPage.aslink(True), nondisambig.aslink(True)))
return (True, None)
else:
- choice = wikipedia.inputChoice('WARNING: %s doesn\'t seem to
be a disambiguation page, but %s is one. Follow it anyway?' %
(self.originPage.aslink(True), page.aslink(True)), ['Yes', 'No', 'Add
an alternative', 'Give up'], ['y', 'n', 'a',
'g'])
+ choice = pywikibot.inputChoice('WARNING: %s doesn\'t seem to
be a disambiguation page, but %s is one. Follow it anyway?' %
(self.originPage.aslink(True), page.aslink(True)), ['Yes', 'No', 'Add
an alternative', 'Give up'], ['y', 'n', 'a',
'g'])
if choice == 'n':
return (True, None)
elif choice == 'a':
- newHint = wikipedia.input(u'Give the alternative for language %s, not
using a language code:' % page.site().language())
- alternativePage = wikipedia.Page(page.site(), newHint)
+ newHint = pywikibot.input(u'Give the alternative for language %s, not
using a language code:' % page.site().language())
+ alternativePage = pywikibot.Page(page.site(), newHint)
return (True, alternativePage)
elif choice == 'g':
self.makeForcedStop(counter)
@@ -1001,18 +1002,18 @@
def isIgnored(self, page):
if page.site().language() in globalvar.neverlink:
- wikipedia.output(u"Skipping link %s to an ignored language" %
page.aslink())
+ pywikibot.output(u"Skipping link %s to an ignored language" %
page.aslink())
return True
if page in globalvar.ignore:
- wikipedia.output(u"Skipping link %s to an ignored page" %
page.aslink())
+ pywikibot.output(u"Skipping link %s to an ignored page" %
page.aslink())
return True
return False
def reportInterwikilessPage(self, page):
- wikipedia.output(u"NOTE: %s does not have any interwiki links" %
self.originPage.aslink(True))
+ pywikibot.output(u"NOTE: %s does not have any interwiki links" %
self.originPage.aslink(True))
if config.without_interwiki:
f = codecs.open(
- wikipedia.config.datafilepath('without_interwiki.txt'),
'a', 'utf-8')
+ pywikibot.config.datafilepath('without_interwiki.txt'),
'a', 'utf-8')
f.write("# %s \n" % page.aslink(forceInterwiki=True))
f.close()
@@ -1027,15 +1028,15 @@
newhint = None
t = globalvar.showtextlink
if t:
- wikipedia.output(self.originPage.get()[:t])
+ pywikibot.output(self.originPage.get()[:t])
# loop
while True:
- newhint = wikipedia.input(u'Give a hint (? to see
pagetext):')
+ newhint = pywikibot.input(u'Give a hint (? to see
pagetext):')
if newhint == '?':
t += globalvar.showtextlinkadd
- wikipedia.output(self.originPage.get()[:t])
+ pywikibot.output(self.originPage.get()[:t])
elif newhint and not ':' in newhint:
- wikipedia.output(u'Please enter a hint in the format
language:pagename or type nothing if you do not have a hint.')
+ pywikibot.output(u'Please enter a hint in the format
language:pagename or type nothing if you do not have a hint.')
elif not newhint:
break
else:
@@ -1066,7 +1067,7 @@
if globalvar.skipauto:
dictName, year = page.autoFormat()
if dictName is not None:
- wikipedia.output(u'WARNING: %s:%s relates to %s:%s, which is an
auto entry %s(%s)' % (self.originPage.site().language(), self.originPage.title(),
page.site().language(),page.title(),dictName,year))
+ pywikibot.output(u'WARNING: %s:%s relates to %s:%s, which is an
auto entry %s(%s)' % (self.originPage.site().language(), self.originPage.title(),
page.site().language(),page.title(),dictName,year))
# Register this fact at the todo-counter.
counter.minus(page.site())
@@ -1075,7 +1076,7 @@
if not page.exists():
- wikipedia.output(u"NOTE: %s does not exist" %
page.aslink(True))
+ pywikibot.output(u"NOTE: %s does not exist" %
page.aslink(True))
if page == self.originPage:
# The page we are working on is the page that does not exist.
# No use in doing any work on it in that case.
@@ -1089,12 +1090,12 @@
elif page.isRedirectPage():
try:
redirectTargetPage = page.getRedirectTarget()
- except wikipedia.InvalidTitle:
+ except pywikibot.InvalidTitle:
# MW considers #redirect [[en:#foo]] as a redirect page,
# but we can't do anything useful with such pages
- wikipedia.output(u"NOTE: %s redirects to an invalid title"
% page.aslink(True))
+ pywikibot.output(u"NOTE: %s redirects to an invalid title"
% page.aslink(True))
continue
- wikipedia.output(u"NOTE: %s is redirect to %s" %
(page.aslink(True), redirectTargetPage.aslink(True)))
+ pywikibot.output(u"NOTE: %s is redirect to %s" %
(page.aslink(True), redirectTargetPage.aslink(True)))
if page == self.originPage:
if globalvar.initialredirect:
if globalvar.contentsondisk:
@@ -1110,12 +1111,12 @@
counter.minus(site, count)
self.todo = PageTree()
elif not globalvar.followredirect:
- wikipedia.output(u"NOTE: not following redirects.")
+ pywikibot.output(u"NOTE: not following redirects.")
elif page.site().family == redirectTargetPage.site().family \
and not self.skipPage(page, redirectTargetPage, counter):
if self.addIfNew(redirectTargetPage, counter, page):
if config.interwiki_shownew:
- wikipedia.output(u"%s: %s gives new redirect %s" %
(self.originPage.aslink(), page.aslink(True), redirectTargetPage.aslink(True)))
+ pywikibot.output(u"%s: %s gives new redirect %s" %
(self.originPage.aslink(), page.aslink(True), redirectTargetPage.aslink(True)))
continue
@@ -1127,13 +1128,13 @@
try:
iw = page.interwiki()
- except wikipedia.NoSuchSite:
- wikipedia.output(u"NOTE: site %s does not exist" %
page.site())
+ except pywikibot.NoSuchSite:
+ pywikibot.output(u"NOTE: site %s does not exist" %
page.site())
continue
(skip, alternativePage) = self.disambigMismatch(page, counter)
if skip:
- wikipedia.output(u"NOTE: ignoring %s and its interwiki links" %
page.aslink(True))
+ pywikibot.output(u"NOTE: ignoring %s and its interwiki links" %
page.aslink(True))
self.done.remove(page)
iw = ()
if alternativePage:
@@ -1161,14 +1162,14 @@
elif globalvar.autonomous and duplicate:
- wikipedia.output(u"Stopping work on %s because duplicate
pages"\
+ pywikibot.output(u"Stopping work on %s because duplicate
pages"\
" %s and %s are found" % (self.originPage.aslink(True),
duplicate.aslink(True),
page.aslink(True)))
self.makeForcedStop(counter)
try:
f = codecs.open(
-
wikipedia.config.datafilepath('autonomous_problems.dat'),
+
pywikibot.config.datafilepath('autonomous_problems.dat'),
'a', 'utf-8')
f.write("* %s {Found more than one link for %s}" %
(self.originPage.aslink(True), page.site()))
if config.interwiki_graph and config.interwiki_graph_url:
@@ -1180,11 +1181,11 @@
# except: should be avoided!!
except:
#raise
- wikipedia.output(u'File autonomous_problems.dat open or corrupted!
Try again with -restore.')
+ pywikibot.output(u'File autonomous_problems.dat open or corrupted!
Try again with -restore.')
sys.exit()
iw = ()
elif page.isEmpty() and not page.isCategory():
- wikipedia.output(u"NOTE: %s is empty; ignoring it and its interwiki
links" % page.aslink(True))
+ pywikibot.output(u"NOTE: %s is empty; ignoring it and its interwiki
links" % page.aslink(True))
# Ignore the interwiki links
self.done.remove(page)
iw = ()
@@ -1192,7 +1193,7 @@
for linkedPage in iw:
if globalvar.hintsareright:
if linkedPage.site in self.hintedsites:
- wikipedia.output(u"NOTE: %s: %s extra interwiki on hinted
site ignored %s" % (self.originPage.aslink(), page.aslink(True),
linkedPage.aslink(True)))
+ pywikibot.output(u"NOTE: %s: %s extra interwiki on hinted
site ignored %s" % (self.originPage.aslink(), page.aslink(True),
linkedPage.aslink(True)))
break
if not self.skipPage(page, linkedPage, counter):
if globalvar.followinterwiki or page == self.originPage:
@@ -1204,11 +1205,11 @@
if prevPage != linkedPage and prevPage.site() == lpsite:
# Still, this could be "no problem" as
either may be a
# redirect to the other. No way to find out quickly!
- wikipedia.output(u"NOTE: %s: %s gives duplicate
interwiki on same site %s" % (self.originPage.aslink(True), page.aslink(True),
linkedPage.aslink(True)))
+ pywikibot.output(u"NOTE: %s: %s gives duplicate
interwiki on same site %s" % (self.originPage.aslink(True), page.aslink(True),
linkedPage.aslink(True)))
break
else:
if config.interwiki_shownew:
- wikipedia.output(u"%s: %s gives new interwiki
%s"% (self.originPage.aslink(), page.aslink(True), linkedPage.aslink(True)))
+ pywikibot.output(u"%s: %s gives new interwiki
%s"% (self.originPage.aslink(), page.aslink(True), linkedPage.aslink(True)))
# These pages are no longer 'in progress'
self.pending = PageTree()
@@ -1223,7 +1224,7 @@
def problem(self, txt, createneed = True):
"""Report a problem with the resolution of this
subject."""
- wikipedia.output(u"ERROR: %s" % txt)
+ pywikibot.output(u"ERROR: %s" % txt)
self.confirm = True
if createneed:
self.problemfound = True
@@ -1231,15 +1232,15 @@
def whereReport(self, page, indent=4):
for page2 in sorted(self.foundIn[page]):
if page2 is None:
- wikipedia.output(u" "*indent + "Given as a hint.")
+ pywikibot.output(u" "*indent + "Given as a hint.")
else:
- wikipedia.output(u" "*indent + page2.aslink(True))
+ pywikibot.output(u" "*indent + page2.aslink(True))
def assemble(self):
# No errors have been seen so far, except....
errorCount = self.problemfound
- mysite = wikipedia.getSite()
+ mysite = pywikibot.getSite()
# Build up a dictionary of all pages found, with the site as key.
# Each value will be a list of pages.
new = {}
@@ -1282,15 +1283,15 @@
# First loop over the ones that have more solutions
for site, pages in new.iteritems():
if len(pages) > 1:
- wikipedia.output(u"=" * 30)
- wikipedia.output(u"Links to %s" % site)
+ pywikibot.output(u"=" * 30)
+ pywikibot.output(u"Links to %s" % site)
i = 0
for page2 in pages:
i += 1
- wikipedia.output(u" (%d) Found link to %s in:" % (i,
page2.aslink(True)))
+ pywikibot.output(u" (%d) Found link to %s in:" % (i,
page2.aslink(True)))
self.whereReport(page2, indent = 8)
while True:
- answer = wikipedia.input(u"Which variant should be used [number,
(n)one, (g)ive up] :")
+ answer = pywikibot.input(u"Which variant should be used [number,
(n)one, (g)ive up] :")
if answer:
if answer == 'g':
return None
@@ -1312,15 +1313,15 @@
for site, pages in new.iteritems():
if len(pages) == 1:
if not acceptall:
- wikipedia.output(u"=" * 30)
+ pywikibot.output(u"=" * 30)
page2 = pages[0]
- wikipedia.output(u"Found link to %s in:" %
page2.aslink(True))
+ pywikibot.output(u"Found link to %s in:" %
page2.aslink(True))
self.whereReport(page2, indent = 4)
while True:
if acceptall:
answer = 'a'
else:
- answer = wikipedia.inputChoice(u'What should be done?',
['accept', 'reject', 'give up', 'accept all'],
['a', 'r', 'g', 'l'], 'a')
+ answer = pywikibot.inputChoice(u'What should be done?',
['accept', 'reject', 'give up', 'accept all'],
['a', 'r', 'g', 'l'], 'a')
if answer == 'l': # accept all
acceptall = True
answer = 'a'
@@ -1347,7 +1348,7 @@
if not self.workonme:
return
if self.forcedStop:
- wikipedia.output(u"======Aborted processing %s======" %
self.originPage.aslink(True))
+ pywikibot.output(u"======Aborted processing %s======" %
self.originPage.aslink(True))
return
if self.originPage.isRedirectPage():
return
@@ -1359,11 +1360,11 @@
# if len(self.done) == 1:
# # No interwiki at all
# return
- wikipedia.output(u"======Post-processing %s======" %
self.originPage.aslink(True))
+ pywikibot.output(u"======Post-processing %s======" %
self.originPage.aslink(True))
# Assemble list of accepted interwiki links
new = self.assemble()
if new is None: # User said give up or autonomous with problem
- wikipedia.output(u"======Aborted processing %s======" %
self.originPage.aslink(True))
+ pywikibot.output(u"======Aborted processing %s======" %
self.originPage.aslink(True))
return
# Make sure new contains every page link, including the page we are processing
@@ -1382,7 +1383,7 @@
frgnSiteDone = False
for siteCode in lclSite.family.languages_by_size:
- site = wikipedia.getSite(code = siteCode)
+ site = pywikibot.getSite(code = siteCode)
if (not lclSiteDone and site == lclSite) or (not frgnSiteDone and site !=
lclSite and site in new):
if site == lclSite:
lclSiteDone = True # even if we fail the update
@@ -1401,8 +1402,8 @@
try:
for page in new[site].interwiki():
old[page.site()] = page
- except wikipedia.NoPage:
- wikipedia.output(u"BUG>>> %s no longer
exists?" % new[site].aslink(True))
+ except pywikibot.NoPage:
+ pywikibot.output(u"BUG>>> %s no longer
exists?" % new[site].aslink(True))
continue
mods, mcomment, adding, removing, modifying = compareLanguages(old,
new, insite = lclSite)
if (len(removing) > 0 and not globalvar.autonomous) or
(len(modifying) > 0 and self.problemfound) or len(old) == 0 or (globalvar.needlimit and
len(adding) + len(modifying) >= globalvar.needlimit +1):
@@ -1411,7 +1412,7 @@
updatedSites.append(site)
except SaveError:
notUpdatedSites.append(site)
- except wikipedia.NoUsername:
+ except pywikibot.NoUsername:
pass
except GiveUpOnPage:
break
@@ -1470,16 +1471,16 @@
if page.title() != page.sectionFreeTitle():
# This is not a page, but a subpage. Do not edit it.
- wikipedia.output(u"Not editing %s: not doing interwiki on subpages"
% page.aslink(True))
+ pywikibot.output(u"Not editing %s: not doing interwiki on subpages"
% page.aslink(True))
raise SaveError
try:
pagetext = page.get()
- except wikipedia.NoPage:
- wikipedia.output(u"Not editing %s: page does not exist" %
page.aslink(True))
+ except pywikibot.NoPage:
+ pywikibot.output(u"Not editing %s: page does not exist" %
page.aslink(True))
raise SaveError
# Show a message in purple.
- wikipedia.output("\03{lightpurple}Updating links on page
%s.\03{default}" % page.aslink(True))
+ pywikibot.output(u"\03{lightpurple}Updating links on page
%s.\03{default}" % page.aslink(True))
# clone original newPages dictionary, so that we can modify it to the local
page's needs
new = dict(newPages)
@@ -1489,18 +1490,18 @@
# remove interwiki links to ignore
for iw in re.finditer('<!-- *\[\[(.*?:.*?)\]\] *-->', pagetext):
try:
- ignorepage = wikipedia.Page(page.site(), iw.groups()[0])
- except (wikipedia.NoSuchSite, wikipedia.InvalidTitle):
+ ignorepage = pywikibot.Page(page.site(), iw.groups()[0])
+ except (pywikibot.NoSuchSite, pywikibot.InvalidTitle):
continue
try:
if (new[ignorepage.site()] == ignorepage) and (ignorepage.site() !=
page.site()):
if (ignorepage not in interwikis):
- wikipedia.output(u"Ignoring link to %(to)s for
%(from)s" % {'to': ignorepage.aslink(), 'from': page.aslink()})
+ pywikibot.output(u"Ignoring link to %(to)s for
%(from)s" % {'to': ignorepage.aslink(), 'from': page.aslink()})
new.pop(ignorepage.site())
else:
- wikipedia.output(u"NOTE: Not removing interwiki from
%(from)s to %(to)s (exists both commented and non-commented)" % {'to':
ignorepage.aslink(), 'from': page.aslink()})
+ pywikibot.output(u"NOTE: Not removing interwiki from
%(from)s to %(to)s (exists both commented and non-commented)" % {'to':
ignorepage.aslink(), 'from': page.aslink()})
except KeyError:
pass
@@ -1509,7 +1510,7 @@
if pltmp != page:
s = "None"
if pltmp is not None: s = pltmp.aslink(True)
- wikipedia.output(u"BUG>>> %s is not in the list of new links!
Found %s." % (page.aslink(True), s))
+ pywikibot.output(u"BUG>>> %s is not in the list of new links!
Found %s." % (page.aslink(True), s))
raise SaveError
# Avoid adding an iw link back to itself
@@ -1532,28 +1533,28 @@
# temporary hard-coded special case to get rid of thousands of broken
links to the Lombard Wikipedia,
# where useless bot-created articles were mass-deleted. See for
example:
#
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Lo…
- if rmsite == wikipedia.getSite('lmo', 'wikipedia'):
- wikipedia.output('Found bad link to %s. As many lmo pages
were deleted, it is assumed that it can be safely removed.' % rmPage.aslink())
+ if rmsite == pywikibot.getSite('lmo', 'wikipedia'):
+ pywikibot.output(u'Found bad link to %s. As many lmo pages
were deleted, it is assumed that it can be safely removed.' % rmPage.aslink())
else:
##########
new[rmsite] = old[rmsite]
- wikipedia.output(u"WARNING: %s is either deleted or has a
mismatching disambiguation state." % rmPage.aslink(True))
+ pywikibot.output(u"WARNING: %s is either deleted or has a
mismatching disambiguation state." % rmPage.aslink(True))
# Re-Check what needs to get done
mods, mcomment, adding, removing, modifying = compareLanguages(old, new,
insite = page.site())
if not mods:
- wikipedia.output(u'No changes needed' )
+ pywikibot.output(u'No changes needed' )
return False
- wikipedia.output(u"Changes to be made: %s" % mods)
+ pywikibot.output(u"Changes to be made: %s" % mods)
oldtext = page.get()
- newtext = wikipedia.replaceLanguageLinks(oldtext, new, site = page.site(),
template = (page.namespace() == 10) )
+ newtext = pywikibot.replaceLanguageLinks(oldtext, new, site = page.site(),
template = (page.namespace() == 10) )
if newtext == oldtext:
return False
if globalvar.debug:
- wikipedia.showDiff(oldtext, newtext)
+ pywikibot.showDiff(oldtext, newtext)
- # wikipedia.output(u"NOTE: Replace %s" % page.aslink())
+ # pywikibot.output(u"NOTE: Replace %s" % page.aslink())
# Determine whether we need permission to submit
ask = False
if removing and removing != [page.site()]: # Allow for special case of a
self-pointing interwiki link
@@ -1569,7 +1570,7 @@
# If we cannot ask, deny permission
answer = 'n'
else:
- answer = wikipedia.inputChoice(u'Submit?',
+ answer = pywikibot.inputChoice(u'Submit?',
['Yes', 'No', 'open in Browser',
'Give up'],
['y', 'n', 'b', 'g'])
if answer == 'b':
@@ -1577,51 +1578,51 @@
page.site().hostname(),
page.site().nice_get_address(page.title())
))
- wikipedia.input("Press Enter when finished in browser.")
+ pywikibot.input("Press Enter when finished in browser.")
return True
else:
# If we do not need to ask, allow
answer = 'y'
# If we got permission to submit, do so
if answer == 'y':
- # Check whether we will have to wait for wikipedia. If so, make
+ # Check whether we will have to wait for pywikibot. If so, make
# another get-query first.
if bot:
- while wikipedia.get_throttle.waittime() + 2.0 <
wikipedia.put_throttle.waittime():
- wikipedia.output(u"NOTE: Performing a recursive query first to
save time....")
+ while pywikibot.get_throttle.waittime() + 2.0 <
pywikibot.put_throttle.waittime():
+ pywikibot.output(u"NOTE: Performing a recursive query first to
save time....")
qdone = bot.oneQuery()
if not qdone:
# Nothing more to do
break
- wikipedia.output(u"NOTE: Updating live wiki...")
+ pywikibot.output(u"NOTE: Updating live wiki...")
timeout=60
while 1:
try:
status, reason, data = page.put(newtext, comment = mcomment)
- except wikipedia.LockedPage:
- wikipedia.output(u'Page %s is locked. Skipping.' %
(page.title(),))
+ except pywikibot.LockedPage:
+ pywikibot.output(u'Page %s is locked. Skipping.' %
(page.title(),))
raise SaveError
- except wikipedia.EditConflict:
- wikipedia.output(u'ERROR putting page: An edit conflict occurred.
Giving up.')
+ except pywikibot.EditConflict:
+ pywikibot.output(u'ERROR putting page: An edit conflict occurred.
Giving up.')
raise SaveError
- except (wikipedia.SpamfilterError), error:
- wikipedia.output(u'ERROR putting page: %s blacklisted by
spamfilter. Giving up.' % (error.url,))
+ except (pywikibot.SpamfilterError), error:
+ pywikibot.output(u'ERROR putting page: %s blacklisted by
spamfilter. Giving up.' % (error.url,))
raise SaveError
- except (wikipedia.PageNotSaved), error:
- wikipedia.output(u'ERROR putting page: %s' % (error.args,))
+ except (pywikibot.PageNotSaved), error:
+ pywikibot.output(u'ERROR putting page: %s' % (error.args,))
raise SaveError
except (socket.error, IOError), error:
if timeout>3600:
raise
- wikipedia.output(u'ERROR putting page: %s' % (error.args,))
- wikipedia.output(u'Sleeping %i seconds before trying again.'
% (timeout,))
+ pywikibot.output(u'ERROR putting page: %s' % (error.args,))
+ pywikibot.output(u'Sleeping %i seconds before trying again.'
% (timeout,))
timeout *= 2
time.sleep(timeout)
- except wikipedia.ServerError:
+ except pywikibot.ServerError:
if timeout>3600:
raise
- wikipedia.output(u'ERROR putting page: ServerError.')
- wikipedia.output(u'Sleeping %i seconds before trying again.'
% (timeout,))
+ pywikibot.output(u'ERROR putting page: ServerError.')
+ pywikibot.output(u'Sleeping %i seconds before trying again.'
% (timeout,))
timeout *= 2
time.sleep(timeout)
else:
@@ -1629,7 +1630,7 @@
if str(status) == '302':
return True
else:
- wikipedia.output(u'%s %s' % (status, reason))
+ pywikibot.output(u'%s %s' % (status, reason))
return False
elif answer == 'g':
raise GiveUpOnPage
@@ -1653,8 +1654,8 @@
if not page.section():
try:
linkedPages = set(page.interwiki())
- except wikipedia.NoPage:
- wikipedia.output(u"WARNING: Page %s does no longer
exist?!" % page.title())
+ except pywikibot.NoPage:
+ pywikibot.output(u"WARNING: Page %s does no longer
exist?!" % page.title())
break
# To speed things up, create a dictionary which maps sites to pages.
# This assumes that there is only one interwiki link per language.
@@ -1665,18 +1666,18 @@
if expectedPage != page:
try:
linkedPage = linkedPagesDict[expectedPage.site()]
- wikipedia.output(u"WARNING: %s: %s does not link to
%s but to %s" % (page.site().family.name, page.aslink(True),
expectedPage.aslink(True), linkedPage.aslink(True)))
+ pywikibot.output(u"WARNING: %s: %s does not link to
%s but to %s" % (page.site().family.name, page.aslink(True),
expectedPage.aslink(True), linkedPage.aslink(True)))
except KeyError:
- wikipedia.output(u"WARNING: %s: %s does not link to
%s" % (page.site().family.name, page.aslink(True), expectedPage.aslink(True)))
+ pywikibot.output(u"WARNING: %s: %s does not link to
%s" % (page.site().family.name, page.aslink(True), expectedPage.aslink(True)))
# Check for superfluous links
for linkedPage in linkedPages:
if linkedPage not in expectedPages:
# Check whether there is an alternative page on that
language.
# In this case, it was already reported above.
if linkedPage.site() not in expectedSites:
- wikipedia.output(u"WARNING: %s: %s links to
incorrect %s" % (page.site().family.name, page.aslink(True),
linkedPage.aslink(True)))
+ pywikibot.output(u"WARNING: %s: %s links to
incorrect %s" % (page.site().family.name, page.aslink(True),
linkedPage.aslink(True)))
except (socket.error, IOError):
- wikipedia.output(u'ERROR: could not report backlinks')
+ pywikibot.output(u'ERROR: could not report backlinks')
class InterwikiBot(object):
"""A class keeping track of a list of subjects, controlling which
pages
@@ -1710,15 +1711,15 @@
self.generateUntil = until
def dump(self):
- site = wikipedia.getSite()
- dumpfn = wikipedia.config.datafilepath(
+ site = pywikibot.getSite()
+ dumpfn = pywikibot.config.datafilepath(
'interwiki-dumps',
'interwikidump-%s-%s.txt' % (site.family.name, site.lang))
f = codecs.open(dumpfn, 'w', 'utf-8')
for subj in self.subjects:
f.write(subj.originPage.aslink(None)+'\n')
f.close()
- wikipedia.output(u'Dump %s (%s) saved' % (site.lang, site.family.name))
+ pywikibot.output(u'Dump %s (%s) saved' % (site.lang, site.family.name))
def generateMore(self, number):
"""Generate more subjects. This is called internally when the
@@ -1726,26 +1727,26 @@
PageGenerator"""
fs = self.firstSubject()
if fs:
- wikipedia.output(u"NOTE: The first unfinished subject is " +
fs.originPage.aslink(True))
- wikipedia.output(u"NOTE: Number of pages queued is %d, trying to add %d
more."%(len(self.subjects), number))
+ pywikibot.output(u"NOTE: The first unfinished subject is " +
fs.originPage.aslink(True))
+ pywikibot.output(u"NOTE: Number of pages queued is %d, trying to add %d
more."%(len(self.subjects), number))
for i in range(number):
try:
while True:
page = self.pageGenerator.next()
if page in globalvar.skip:
- wikipedia.output(u'Skipping: %s is in the skip list' %
page.title())
+ pywikibot.output(u'Skipping: %s is in the skip list' %
page.title())
continue
if globalvar.skipauto:
dictName, year = page.autoFormat()
if dictName is not None:
- wikipedia.output(u'Skipping: %s is an auto entry
%s(%s)' % (page.title(),dictName,year))
+ pywikibot.output(u'Skipping: %s is an auto entry
%s(%s)' % (page.title(),dictName,year))
continue
if globalvar.parenthesesonly:
# Only yield pages that have ( ) in titles
if "(" not in page.title():
continue
if page.isTalkPage():
- wikipedia.output(u'Skipping: %s is a talk page' %
page.title())
+ pywikibot.output(u'Skipping: %s is a talk page' %
page.title())
continue
break
@@ -1781,8 +1782,8 @@
# have to wait before submitting another modification to go live. Select
# any language from counts.
oc = self.counts
- if wikipedia.getSite() in oc:
- return wikipedia.getSite()
+ if pywikibot.getSite() in oc:
+ return pywikibot.getSite()
for lang in oc:
count = self.counts[lang]
if count > max:
@@ -1793,8 +1794,8 @@
def selectQuerySite(self):
"""Select the site the next query should go out
for."""
# How many home-language queries we still have?
- ###xqt: its seems this counts a negative value
- mycount = max(0, self.counts.get(wikipedia.getSite(), 0))
+ ### it seems this counts a negative value
+ mycount = max(0, self.counts.get(pywikibot.getSite(), 0))
# Do we still have enough subjects to work on for which the
# home language has been retrieved? This is rough, because
# some subjects may need to retrieve a second home-language page!
@@ -1805,17 +1806,17 @@
while timeout<3600:
try:
self.generateMore(globalvar.maxquerysize - mycount)
- except wikipedia.ServerError:
+ except pywikibot.ServerError:
# Could not extract allpages special page?
- wikipedia.output(u'ERROR: could not retrieve more pages. Will
try again in %d seconds'%timeout)
+ pywikibot.output(u'ERROR: could not retrieve more pages. Will
try again in %d seconds'%timeout)
time.sleep(timeout)
timeout *= 2
else:
break
# If we have a few, getting the home language is a good thing.
try:
- if self.counts[wikipedia.getSite()] > 4:
- return wikipedia.getSite()
+ if self.counts[pywikibot.getSite()] > 4:
+ return pywikibot.getSite()
except KeyError:
pass
# If getting the home language doesn't make sense, see how many
@@ -1832,7 +1833,7 @@
# First find the best language to work on
site = self.selectQuerySite()
if site is None:
- wikipedia.output(u"NOTE: Nothing left to do")
+ pywikibot.output(u"NOTE: Nothing left to do")
return False
# Now assemble a reasonable list of pages to get
subjectGroup = []
@@ -1848,7 +1849,7 @@
# We have found enough pages to fill the bandwidth.
break
if len(pageGroup) == 0:
- wikipedia.output(u"NOTE: Nothing left to do 2")
+ pywikibot.output(u"NOTE: Nothing left to do 2")
return False
# Get the content of the assembled list in one blow
gen = pagegenerators.PreloadingGenerator(iter(pageGroup))
@@ -1905,7 +1906,7 @@
modifying = sorted(site for site in oldiw & newiw if old[site] != new[site])
mcomment = u''
- mods = u''
+ mods = u""
if len(adding) + len(removing) + len(modifying) <= 3:
# Use an extended format for the string linking to all added pages.
@@ -1917,7 +1918,7 @@
colon = u': '
comma = u', '
- head, add, rem, mod = wikipedia.translate(insite.lang, msg)
+ head, add, rem, mod = pywikibot.translate(insite.lang, msg)
sep = u''
@@ -1970,10 +1971,10 @@
# to work on.
genFactory = pagegenerators.GeneratorFactory()
- for arg in wikipedia.handleArgs():
+ for arg in pywikibot.handleArgs():
if arg.startswith('-xml'):
if len(arg) == 4:
- xmlFilename = wikipedia.input(u'Please enter the XML dump\'s
filename:')
+ xmlFilename = pywikibot.input(u'Please enter the XML dump\'s
filename:')
else:
xmlFilename = arg[5:]
hintlessPageGen = XmlDumpLmoLinkPageGenerator(xmlFilename)
@@ -1984,7 +1985,7 @@
elif arg.startswith('-hintfile'):
hintfilename = arg[10:]
if (hintfilename is None) or (hintfilename == ''):
- hintfilename = wikipedia.input(u'Please enter the hint
filename:')
+ hintfilename = pywikibot.input(u'Please enter the hint
filename:')
f = codecs.open(hintfilename, 'r', config.textfile_encoding)
R = re.compile(ur'\[\[(.+?)(?:\]\]|\|)') # hint or title ends
either before | or before ]]
for pageTitle in R.findall(f.read()):
@@ -2084,7 +2085,7 @@
elif arg.startswith('-neverlink:'):
globalvar.neverlink += arg[11:].split(",")
elif arg.startswith('-ignore:'):
- globalvar.ignore += [wikipedia.Page(None,p) for p in
arg[8:].split(",")]
+ globalvar.ignore += [pywikibot.Page(None,p) for p in
arg[8:].split(",")]
elif arg.startswith('-ignorefile:'):
ignorefile = arg[12:]
ignorePageGen = pagegenerators.TextfilePageGenerator(ignorefile)
@@ -2122,11 +2123,11 @@
# ensure that we don't try to change main page
try:
- site = wikipedia.getSite()
+ site = pywikibot.getSite()
mainpagename = site.mediawiki_message('mainpage')
- globalvar.skip.add(wikipedia.Page(site, mainpagename))
- except wikipedia.Error:
- wikipedia.output(u'Missing main page name')
+ globalvar.skip.add(pywikibot.Page(site, mainpagename))
+ except pywikibot.Error:
+ pywikibot.output(u'Missing main page name')
if newPages is not None:
if len(namespaces) == 0:
@@ -2145,8 +2146,8 @@
hintlessPageGen = pagegenerators.NewpagesPageGenerator(newPages,
namespace=ns)
if optRestore or optContinue:
- site = wikipedia.getSite()
- dumpFileName = wikipedia.config.datafilepath(
+ site = pywikibot.getSite()
+ dumpFileName = pywikibot.config.datafilepath(
'interwiki-dumps',
u'interwikidump-%s-%s.txt'
% (site.family.name, site.lang))
@@ -2160,7 +2161,7 @@
nextPage = page.titleWithoutNamespace() + '!'
namespace = page.namespace()
except NameError:
- wikipedia.output(u"Dump file is empty?! Starting at the
beginning.")
+ pywikibot.output(u"Dump file is empty?! Starting at the
beginning.")
nextPage = "!"
namespace = 0
# old generator is used up, create a new one
@@ -2181,8 +2182,8 @@
else:
singlePageTitle = ' '.join(singlePageTitle)
if not singlePageTitle:
- singlePageTitle = wikipedia.input(u'Which page to check:')
- singlePage = wikipedia.Page(wikipedia.getSite(), singlePageTitle)
+ singlePageTitle = pywikibot.input(u'Which page to check:')
+ singlePage = pywikibot.Page(pywikibot.getSite(), singlePageTitle)
bot.add(singlePage, hints = hints)
try:
@@ -2198,4 +2199,4 @@
StoredPage.SPdeleteStore()
finally:
- wikipedia.stopme()
+ pywikibot.stopme()