Revision: 7912
Author: xqt
Date: 2010-02-05 17:40:58 +0000 (Fri, 05 Feb 2010)
Log Message:
-----------
actualize language dict for translate() from trunk
Modified Paths:
--------------
branches/rewrite/pywikibot/textlib.py
Modified: branches/rewrite/pywikibot/textlib.py
===================================================================
--- branches/rewrite/pywikibot/textlib.py 2010-02-05 17:36:47 UTC (rev 7911)
+++ branches/rewrite/pywikibot/textlib.py 2010-02-05 17:40:58 UTC (rev 7912)
@@ -100,7 +100,7 @@
}
# if we got a string, compile it as a regular expression
- if type(old) is str or type(old) is unicode:
+ if type(old) in [str, unicode]:
if caseInsensitive:
old = re.compile(old, re.IGNORECASE | re.UNICODE)
else:
@@ -114,6 +114,9 @@
if exc not in exceptionRegexes:
raise ValueError("Unknown tag type: " + exc)
dontTouchRegexes.append(exceptionRegexes[exc])
+ # handle alias
+ if exc == 'source':
+ dontTouchRegexes.append(re.compile(r'(?is)<syntaxhighlight .*?</syntaxhighlight>'))
else:
# assume it's a regular expression
dontTouchRegexes.append(exc)
@@ -275,7 +278,7 @@
instead.
"""
- if insite == None:
+ if insite is None:
insite = pywikibot.getSite()
result = {}
# Ignore interwiki links within nowiki tags, includeonly tags, pre tags,
@@ -312,7 +315,7 @@
interwiki links).
"""
- if site == None:
+ if site is None:
site = pywikibot.getSite()
if not site.validLanguageLinks():
return text
@@ -358,7 +361,7 @@
"""
# Find a marker that is not already in the text.
marker = findmarker( oldtext, u'@@')
- if site == None:
+ if site is None:
site = pywikibot.getSite()
separator = site.family.interwiki_text_separator
cseparator = site.family.category_text_separator
@@ -574,12 +577,13 @@
def replaceCategoryLinks(oldtext, new, site = None, addOnly = False):
- """Replace the category links given in the wikitext given
+ """
+ Replace the category links given in the wikitext given
in oldtext by the new links given in new.
'new' should be a list of Category objects.
- If addOnly is True, the old category won't be deleted andthe
+ If addOnly is True, the old category won't be deleted and the
category(s) given will be added (and so they won't replace anything).
"""
@@ -614,7 +618,7 @@
firstafter = len(s2)
else:
firstafter += len(marker)
- # Is there text in the 'after' part that means we should keep it
+ # Is there text in the 'after' part that means we should keep it
# after?
if "</noinclude>" in s2[firstafter:]:
if separatorstripped:
@@ -813,104 +817,139 @@
one with a translation, or '_default' as a last resort.
"""
- if code=='aa':
+ #Amharic
+ if code in ['aa', 'om']:
return ['am']
- if code in ['fa','so']:
+ #Arab
+ if code in ['arc', 'arz']:
return ['ar']
- if code=='ku':
- return ['ar','tr']
- if code=='sk':
- return ['cs']
- if code in ['bar','ksh','stq']:
+ if code == 'kab':
+ return ['ar', 'fr']
+ #Bulgarian
+ if code in ['cu', 'mk']:
+ return ['bg', 'sr', 'sh']
+ #Czech
+ if code in ['cs', 'sk']:
+ return ['cs', 'sk']
+ #German
+ if code in ['bar', 'ksh', 'pdc']:
return ['de']
- if code in ['als','lb']:
- return ['de','fr']
- if code=='dsb':
- return ['hsb','de']
- if code=='hsb':
- return ['dsb','de']
- if code=='io':
+ if code in ['als', 'lb']:
+ return ['de', 'fr']
+ if code == 'nds':
+ return ['nds-nl', 'de']
+ if code in ['dsb', 'hsb']:
+ return ['hsb', 'dsb', 'de']
+ if code == 'rm':
+ return ['de', 'it']
+ if code == 'stq':
+ return ['fy', 'de']
+ #Greek
+ if code == 'pnt':
+ return ['el']
+ #Esperanto
+ if code in ['io', 'nov']:
return ['eo']
- if code in ['an','ast','ay','ca','gn','nah','qu']:
+ #Spanish
+ if code in ['an', 'ast', 'ay', 'ca', 'ext', 'lad', 'nah', 'nv', 'qu']:
return ['es']
- if code == ['cbk-zam']:
- return ['es','tl']
- if code=='eu':
- return ['es','fr']
- if code in ['glk','mzn']:
- return ['fa','ar']
- if code=='gl':
- return ['es','pt']
- if code=='lad':
- return ['es','he']
- if code in ['br','ht','kab','ln','lo','nrm','wa']:
+ if code in ['gl', 'gn']:
+ return ['es', 'pt']
+ if code == ['eu']:
+ return ['es', 'fr']
+ if code in ['bcl', 'cbk-zam', 'ceb', 'ilo', 'pag', 'pam', 'tl', 'war']:
+ return ['es', 'tl']
+ #Estonian
+ if code == 'fiu-vro':
+ return ['et']
+ #Persian (Farsi)
+ if code in ['glk', 'mzn']:
+ return ['ar']
+ #French
+ if code in ['bm', 'br', 'ht', 'kab', 'kg', 'ln', 'mg', 'nrm', 'oc',
+ 'pcd', 'rw', 'sg', 'ty', 'wa']:
return ['fr']
- if code in ['ie','oc']:
- return ['ie','oc','fr']
- if code in ['co','frp']:
- return ['fr','it']
- if code=='yi':
- return ['he','de']
- if code=='sa':
+ if code == 'co':
+ return ['fr', 'it']
+ #Hindi
+ if code in ['bh', 'pi', 'sa']:
return ['hi']
- if code in ['eml','lij','lmo','nap','pms','roa-tara','sc','scn','vec']:
+ if code in ['ne', 'new']:
+ return ['ne', 'new', 'hi']
+ #Indonesian and Malay
+ if code in ['ace', 'bug', 'id', 'jv', 'ms', 'su']:
+ return ['id', 'ms', 'jv']
+ if code == 'map-bms':
+ return ['jv', 'id', 'ms']
+ #Inuit languages
+ if code in ['ik', 'iu']:
+ return ['iu', 'kl']
+ if code == 'kl':
+ return ['iu', 'da', 'no']
+ #Italian
+ if code in ['eml', 'fur', 'lij', 'lmo', 'nap', 'pms', 'roa-tara', 'sc',
+ 'scn', 'vec']:
return ['it']
- if code=='rm':
- return ['it','de','fr']
- if code in ['bat-smg','ltg']:
+ if code == 'frp':
+ return ['it', 'fr']
+ #Lithuanian
+ if code in ['bat-smg', 'ltg']:
return ['lt']
- if code=='ia':
- return ['la','es','fr','it']
- if code=='nds':
- return ['nds-nl','de']
- if code=='nds-nl':
- return ['nds','nl']
- if code in ['fy','pap','vls','zea']:
+ #Dutch
+ if code in ['fy', 'li', 'pap', 'srn', 'vls', 'zea']:
return ['nl']
- if code=='li':
- return ['nl','de']
- if code=='csb':
+ if code == ['nds-nl']:
+ return ['nds', 'nl']
+ #Polish
+ if code in ['csb', 'szl']:
return ['pl']
- if code in ['fab','tet']:
+ #Portuguese
+ if code in ['fab', 'mwl', 'tet']:
return ['pt']
- if code in ['mo','roa-rup']:
+ #Romanian
+ if code in ['mo', 'roa-rup']:
return ['ro']
- if code in ['av','bxr','cv','hy','lbe','tg','udm','uk','xal']:
+ #Russian and Belarusian
+ if code in ['ab', 'av', 'ba', 'bxr', 'ce', 'cv', 'kk', 'ky', 'lbe', 'mdf',
+ 'mhr', 'myv', 'os', 'sah', 'tg', 'tt', 'udm', 'uk', 'xal']:
return ['ru']
- if code in ['be','be-x-old']:
- return ['be','be-x-old','ru']
- if code in ['ky','tt','uz']:
- return ['kk','tr','ru']
- if code in ['az','diq','tk','ug']:
- return ['tr']
- if code in ['ja','minnan','zh','zh-cn']:
- return ['zh','zh-tw','zh-classical','zh-cn']
- if code in ['bo','cdo','hak','wuu','za','zh-cdo','zh-classical','zh-tw','zh-yue']:
- return ['zh','zh-cn','zh-classical','zh-tw']
- if code=='da':
- return ['nb','no']
- if code in ['is','no','nb','nn']:
- return ['no','nb','nn','da','sv']
- if code=='sv':
- return ['da','no','nb']
- if code=='se':
- return ['no','nb','sv','nn','fi','da']
- if code in ['bug','id','jv','map-bms','ms','su']:
- return ['id','ms','jv']
- if code in ['bs','hr','sh']:
- return ['sh','hr','bs','sr']
- if code in ['mk','sr']:
- return ['sh','sr','hr','bs']
- if code in ['ceb','pag','tl','war']:
- return ['tl','es']
- if code=='bi':
- return ['tpi']
- if code=='tpi':
- return ['bi']
- if code == 'new':
- return ['ne']
- if code == 'nov':
- return ['io','eo']
+ if code in ['be', 'be-x-old']:
+ return ['be', 'be-x-old', 'ru']
+ if code == 'kaa':
+ return ['uz', 'ru']
+ #Serbocroatian
+ if code in ['bs', 'hr', 'sh', 'sr']:
+ return ['sh', 'hr', 'bs', 'sr']
+ #Turkish and Kurdish
+ if code in ['diq', 'ku']:
+ return ['ku', 'tr']
+ if code == 'ckb':
+ return ['ku', 'ar']
+ #Chinese
+ if code in ['minnan', 'zh', 'zh-classical', 'zh-min-nan', 'zh-tw', 'zh-hans', 'zh-hant']:
+ return ['zh', 'zh-tw', 'zh-cn', 'zh-classical']
+ if code in ['cdo', 'gan', 'hak', 'ii', 'wuu', 'za', 'zh-cdo', 'zh-classical',
+ 'zh-cn', 'zh-yue']:
+ return ['zh', 'zh-cn', 'zh-tw', 'zh-classical']
+ #Scandinavian languages
+ if code in ['da', 'sv']:
+ return ['da', 'no', 'nb', 'sv', 'nn']
+ if code in ['fo', 'is']:
+ return ['da', 'no', 'nb', 'nn', 'sv']
+ if code == 'nn':
+ return ['no', 'nb', 'sv', 'da']
+ if code in ['nb', 'no']:
+ return ['no', 'nb', 'da', 'nn', 'sv']
+ if code == 'se':
+ return ['sv', 'no', 'nb', 'nn', 'fi']
+ #Other languages
+ if code in ['bi', 'tpi']:
+ return ['bi', 'tpi']
+ if code == 'yi':
+ return ['he', 'de']
+ if code in ['ia', 'ie']:
+ return ['ia', 'la', 'it', 'fr', 'es']
+ #Default value
return []
def translate(code, xdict):
@@ -936,7 +975,9 @@
for alt in _altlang(code):
if alt in xdict:
return xdict[alt]
- if "en" in xdict:
+ if '_default' in xdict:
+ return xdict['_default']
+ elif 'en' in xdict:
return xdict['en']
return xdict.values()[0]
Revision: 7910
Author: xqt
Date: 2010-02-05 09:03:56 +0000 (Fri, 05 Feb 2010)
Log Message:
-----------
* removeDeprecatedTemplates: (restored from older version) but it also replaces old templates with a new one, keeping its parameter if necessary
* fixHtml: replace header tags where only spaces are in the same line with mw-syntax
* exception handling for EditConflict
* additional choice for quitting the script
Modified Paths:
--------------
trunk/pywikipedia/cosmetic_changes.py
Modified: trunk/pywikipedia/cosmetic_changes.py
===================================================================
--- trunk/pywikipedia/cosmetic_changes.py 2010-02-05 06:42:52 UTC (rev 7909)
+++ trunk/pywikipedia/cosmetic_changes.py 2010-02-05 09:03:56 UTC (rev 7910)
@@ -214,6 +214,37 @@
'zh' : ([u'documentation', u'doc'], u'/doc'),
}
+# Template which should be replaced or removed.
+# Use a list with two entries. The first entry will be replaced by the second.
+# Examples:
+# For removing {{Foo}}, the list must be:
+# (u'Foo', None),
+#
+# The following also works:
+# (u'Foo', ''),
+#
+# For replacing {{Foo}} with {{Bar}} the list must be:
+# (u'Foo', u'Bar'),
+#
+# This also removes all template parameters of {{Foo}}
+# For replacing {{Foo}} with {{Bar}} but keep the template
+# parameters in its original order, please use:
+# (u'Foo', u'Bar\g<parameters>),
+
+deprecatedTemplates = {
+ 'wikipedia': {
+ 'de': [
+ (u'Stub', None),
+ (u'Belege', u'Belege fehlen\g<parameters>'),
+ (u'Quelle', u'Belege fehlen\g<parameters>'),
+ (u'Quellen', u'Belege fehlen\g<parameters>'),
+ ],
+ 'pdc':[
+ (u'Schkiss', None),
+ ],
+ }
+}
+
class CosmeticChangesToolkit:
def __init__(self, site, debug=False, redirect=False, namespace=None, pageTitle=None):
self.site = site
@@ -235,6 +266,7 @@
text = self.cleanUpSectionHeaders(text)
text = self.putSpacesInLists(text)
text = self.translateAndCapitalizeNamespaces(text)
+ text = self.replaceDeprecatedTemplates(text)
text = self.resolveHtmlEntities(text)
text = self.validXhtml(text)
text = self.removeUselessSpaces(text)
@@ -570,6 +602,21 @@
text = pywikibot.replaceExcept(text, r'(?m)^(?P<bullet>[:;]*(\*+|#+)[:;\*#]*)(?P<char>[^\s\*#:;].+?)', '\g<bullet> \g<char>', exceptions)
return text
+ def replaceDeprecatedTemplates(self, text):
+ exceptions = ['comment', 'math', 'nowiki', 'pre']
+ if self.site.family.name in deprecatedTemplates and self.site.lang in deprecatedTemplates[self.site.family.name]:
+ for template in deprecatedTemplates[self.site.family.name][self.site.lang]:
+ old = template[0]
+ new = template[1]
+ if new == None:
+ new = ''
+ else:
+ new = '{{'+new+'}}'
+ if not self.site.nocapitalize:
+ old = '[' + old[0].upper() + old[0].lower() + ']' + old[1:]
+ text = pywikibot.replaceExcept(text, r'\{\{([mM][sS][gG]:)?' + old + '(?P<parameters>\|[^}]+|)}}', new, exceptions)
+ return text
+
#from fixes.py
def fixSyntaxSave(self, text):
exceptions = ['nowiki', 'comment', 'math', 'pre', 'source', 'startspace']
@@ -600,6 +647,12 @@
# horizontal line with attributes; can't be done with wiki syntax
# so we only make it XHTML compliant
text = pywikibot.replaceExcept(text, r'(?i)<hr ([^>/]+?)>', r'<hr \1 />', exceptions)
+ # a header where only spaces are in the same line
+ for level in range(1, 7):
+ equals = '\\1%s \\2 %s\\3' % ("="*level, "="*level)
+ text = pywikibot.replaceExcept(text,
+ r'(?i)([\r\n]) *<h%d> *([^<]+?) *</h%d> *([\r\n])'%(level, level),
+ r'%s'%equals, exceptions)
#remove empty <ref/>-tag
text = pywikibot.replaceExcept(text, r'(?i)<ref\s*/>', r'', exceptions)
# TODO: maybe we can make the bot replace <p> tags with \r\n's.
@@ -631,6 +684,7 @@
self.generator = generator
self.acceptall = acceptall
self.comment = comment
+ self.done = False
def treat(self, page):
try:
@@ -641,9 +695,13 @@
changedText = ccToolkit.change(page.get())
if changedText.strip() != page.get().strip():
if not self.acceptall:
- choice = pywikibot.inputChoice(u'Do you want to accept these changes?', ['Yes', 'No', 'All'], ['y', 'N', 'a'], 'N')
+ choice = pywikibot.inputChoice(u'Do you want to accept these changes?',
+ ['Yes', 'No', 'All', 'Quit'], ['y', 'N', 'a', 'q'], 'N')
if choice == 'a':
self.acceptall = True
+ elif choice == 'q':
+ self.done = True
+ return
if self.acceptall or choice == 'y':
page.put(changedText, comment=self.comment)
else:
@@ -654,10 +712,13 @@
pywikibot.output("Page %s is a redirect; skipping." % page.aslink())
except pywikibot.LockedPage:
pywikibot.output("Page %s is locked?!" % page.aslink())
+ except pywikibot.EditConflict:
+ pywikibot.output("An edit conflict has occured at %s." % page.aslink())
def run(self):
try:
for page in self.generator:
+ if self.done: break
self.treat(page)
except KeyboardInterrupt:
pywikibot.output('\nQuitting program...')
Revision: 7908
Author: xqt
Date: 2010-02-04 07:12:46 +0000 (Thu, 04 Feb 2010)
Log Message:
-----------
* Use pagegenerators for getting pages to processed
* check whether source page exists
* use localized translation for #REDIRECT
Modified Paths:
--------------
trunk/pywikipedia/capitalize_redirects.py
Modified: trunk/pywikipedia/capitalize_redirects.py
===================================================================
--- trunk/pywikipedia/capitalize_redirects.py 2010-02-03 13:13:43 UTC (rev 7907)
+++ trunk/pywikipedia/capitalize_redirects.py 2010-02-04 07:12:46 UTC (rev 7908)
@@ -7,48 +7,33 @@
Command-line arguments:
- -file Work on all pages listed in a text file.
- Argument can also be given as "-file:filename".
+¶ms;
- -cat Work on all pages which are in a specific category.
- Argument can also be given as "-cat:categoryname".
+-always Don't prompt to make changes, just do them.
- -ref Work on all pages that link to a certain page.
- Argument can also be given as "-ref:referredpagetitle".
+-titlecase creates a titlecased redirect version of a given page
+ where all words of the title start with an uppercase
+ character and the remaining characters are lowercase.
- -links Work on all pages that are linked from a certain page.
- Argument can also be given as "-link:linkingpagetitle".
-
- -start Work on all pages on the home wiki, starting at the named
- page.
-
- -page Work on a single page.
-
- -namespace Run over especific namespace.
- Argument can also be given as "-namespace:100" or
- "-namespace:Image".
-
- -always Don't prompt to make changes, just do them.
-
- -titlecase creates a titlecased redirect version of a given page
- where all words of the title start with an uppercase
- character and the remaining characters are lowercase
-
Example: "python capitalize_redirects.py -start:B -always"
'''
#
# (C) Yrithinnd
-# (C) Pywikipedia bot team, 2007-2009
+# (C) Pywikipedia bot team, 2007-2010
#
# Class licensed under terms of the MIT license
#
__version__ = '$Id$'
#
-import time, sys
+import time, sys, re
import wikipedia as pywikibot
import pagegenerators, catlib
+docuReplacements = {
+ '¶ms;': pagegenerators.parameterHelp
+}
+
msg = {
'ar': u'روبوت: إنشاء تحويلة إلى [[%s]]',
'cs': u'Robot vytvořil přesměrování na [[%s]]',
@@ -70,124 +55,65 @@
self.acceptall = acceptall
self.titlecase = titlecase
self.site = pywikibot.getSite()
+ self.done = False
def run(self):
for page in self.generator:
- if page.isRedirectPage():
- page = page.getRedirectTarget()
- page_t = page.title()
- # Show the title of the page we're working on.
- # Highlight the title in purple.
- pywikibot.output(u"\n>>> \03{lightpurple}%s\03{default} <<<"
- % page_t)
- if self.titlecase:
- page_cap = pywikibot.Page(self.site, page_t.title())
- else:
- page_cap = pywikibot.Page(self.site, page_t.capitalize())
- if not page_cap.exists():
- pywikibot.output(u'[[%s]] doesn\'t exist' % page_cap.title())
- if not self.acceptall:
- choice = pywikibot.inputChoice(
- u'Do you want to create a redirect?',
- ['Yes', 'No', 'All', 'Quit'], ['y', 'N', 'a', 'q'], 'N')
- if choice == 'a':
- self.acceptall = True
- elif choice == 'q':
- break
- if self.acceptall or choice == 'y':
- comment = pywikibot.translate(self.site, msg) % page_t
- try:
- page_cap.put(u"#REDIRECT [[%s]]" % page_t, comment)
- print
- except:
- pywikibot.output(u"An error occurred, skipping...")
- continue
- else:
- pywikibot.output(u'%s already exists, skipping...\n'
- % page_t.title())
+ if self.done: break
+ if page.exists():
+ self.treat(page)
+ def treat(self, page):
+ if page.isRedirectPage():
+ page = page.getRedirectTarget()
+ page_t = page.title()
+ # Show the title of the page we're working on.
+ # Highlight the title in purple.
+ pywikibot.output(u"\n>>> \03{lightpurple}%s\03{default} <<<"
+ % page_t)
+ if self.titlecase:
+ page_cap = pywikibot.Page(self.site, page_t.title())
+ else:
+ page_cap = pywikibot.Page(self.site, page_t.capitalize())
+ if page_cap.exists():
+ pywikibot.output(u'%s already exists, skipping...\n'
+ % page_cap.aslink())
+ else:
+ pywikibot.output(u'[[%s]] doesn\'t exist' % page_cap.title())
+ if not self.acceptall:
+ choice = pywikibot.inputChoice(
+ u'Do you want to create a redirect?',
+ ['Yes', 'No', 'All', 'Quit'], ['y', 'N', 'a', 'q'], 'N')
+ if choice == 'a':
+ self.acceptall = True
+ elif choice == 'q':
+ self.done = True
+ if self.acceptall or choice == 'y':
+ comment = pywikibot.translate(self.site, msg) % page_t
+ try:
+ page_cap.put(u"#%s [[%s]]" % (self.site.redirect(True), page_t), comment)
+ except:
+ pywikibot.output(u"An error occurred, skipping...")
+
def main():
- gen = None
- source = None
- textfilename = None
- categoryname = None
- pageNames = []
- referredPageName = None
+ genFactory = pagegenerators.GeneratorFactory()
acceptall = False
- namespaces = []
- startpage = None
titlecase = False
for arg in pywikibot.handleArgs():
- if arg.startswith('-file'):
- if len(arg) == 5:
- textfilename = pywikibot.input(u'Please enter the filename:')
- else:
- textfilename = arg[6:]
- source = 'textfile'
- elif arg.startswith('-cat'):
- if len(arg) == 4:
- categoryname = pywikibot.input(
- u'Please enter the category name:')
- else:
- categoryname = arg[5:]
- source = 'category'
- elif arg.startswith('-page'):
- if len(arg) == 5:
- pageNames.append(pywikibot.input(
- u'Which page do you want to change?'))
- else:
- pageNames.append(arg[6:])
- source = 'singlepage'
- elif arg.startswith('-ref'):
- if len(arg) == 4:
- referredPageName = pywikibot.input(
- u'Links to which page should be processed?')
- else:
- referredPageName = arg[5:]
- source = 'ref'
- elif arg.startswith('-start'):
- if len(arg) == 6:
- firstPageTitle = pywikibot.input(
- u'Which page do you want to change?')
- else:
- firstPageTitle = arg[7:]
- source = 'allpages'
- elif arg == '-always':
+ if arg == '-always':
acceptall = True
elif arg == '-titlecase':
titlecase = True
- elif arg.startswith('-namespace:'):
- try:
- namespaces.append(int(arg[11:]))
- except ValueError:
- namespaces.append(arg[11:])
+ elif genFactory.handleArg(arg):
+ pass
else:
- commandline_replacements.append(arg)
+ pywikibot.showHelp(u'capitalize_redirects')
+ return
- if source == 'textfile':
- gen = pagegenerators.TextfilePageGenerator(textfilename)
- elif source == 'category':
- cat = catlib.Category(pywikibot.getSite(), categoryname)
- gen = pagegenerators.CategorizedPageGenerator(cat)
- elif source == 'singlepage':
- pages = [pywikibot.Page(pywikibot.getSite(), pageName)
- for pageName in pageNames]
- gen = iter(pages)
- elif source == 'allpages':
- namespace = pywikibot.Page(pywikibot.getSite(),
- firstPageTitle).namespace()
- gen = pagegenerators.AllpagesPageGenerator(firstPageTitle, namespace)
- elif source == 'ref':
- referredPage = pywikibot.Page(pywikibot.getSite(), referredPageName)
- gen = pagegenerators.ReferringPageGenerator(referredPage)
- elif source == None or len(commandline_replacements) not in [0, 2]:
- pywikibot.showHelp(u'capitalize_redirects')
- return
- if namespaces != []:
- gen = pagegenerators.NamespaceFilterPageGenerator(gen, namespaces)
- preloadingGen = pagegenerators.PreloadingGenerator(gen, pageNumber = 20)
- bot = CapitalizeBot(preloadingGen, acceptall, titlecase)
+ gen = genFactory.getCombinedGenerator()
+ preloadingGen = pagegenerators.PreloadingGenerator(gen)
+ bot = CapitalizeBot(preloadingGen, acceptall, titlecase, standard)
bot.run()
if __name__ == "__main__":