[Pywikipedia-l] SVN: [5293] trunk/pywikipedia

siebrand at svn.wikimedia.org siebrand at svn.wikimedia.org
Fri May 2 00:13:27 UTC 2008


Revision: 5293
Author:   siebrand
Date:     2008-05-02 00:13:27 +0000 (Fri, 02 May 2008)

Log Message:
-----------
More eol-style:native props

Modified Paths:
--------------
    trunk/pywikipedia/blockpageschecker.py
    trunk/pywikipedia/commonsdelinker/plugins/__init__.py
    trunk/pywikipedia/commonsdelinker/plugins/books.py
    trunk/pywikipedia/commonsdelinker/plugins/debug.py
    trunk/pywikipedia/commonsdelinker/plugins/flags.py
    trunk/pywikipedia/families/wikia_family.py
    trunk/pywikipedia/fixing_redirects.py
    trunk/pywikipedia/maintenance/readtalk.py

Property Changed:
----------------
    trunk/pywikipedia/blockpageschecker.py
    trunk/pywikipedia/commonsdelinker/plugins/__init__.py
    trunk/pywikipedia/commonsdelinker/plugins/books.py
    trunk/pywikipedia/commonsdelinker/plugins/debug.py
    trunk/pywikipedia/commonsdelinker/plugins/flags.py
    trunk/pywikipedia/families/wikia_family.py
    trunk/pywikipedia/fixing_redirects.py
    trunk/pywikipedia/maintenance/readtalk.py

Modified: trunk/pywikipedia/blockpageschecker.py
===================================================================
--- trunk/pywikipedia/blockpageschecker.py	2008-05-02 00:05:37 UTC (rev 5292)
+++ trunk/pywikipedia/blockpageschecker.py	2008-05-02 00:13:27 UTC (rev 5293)
@@ -1,402 +1,402 @@
-# -*- coding: utf-8  -*-
-"""
-This is a script originally written by Wikihermit and then rewritten by Filnik,
-to delete the templates used to warn in the pages that a page is blocked,
-when the page isn't blocked at all. Indeed, very often sysops block the pages
-for a setted time but then the forget to delete the warning! This script is useful
-if you want to delete those useless warning left in these pages.
-
-Parameters:
-
-These command line parameters can be used to specify which pages to work on:
-
-&params;
-
--xml              Retrieve information from a local XML dump (pages-articles
-                  or pages-meta-current, see http://download.wikimedia.org).
-                  Argument can also be given as "-xml:filename".
-
--page             Only edit a specific page.
-                  Argument can also be given as "-page:pagetitle". You can
-                  give this parameter multiple times to edit multiple pages.
-
--protectedpages:  Check all the blocked pages (useful when you have not categories
-                  or when you have problems with them. (add the namespace after ":" where
-                  you want to check - default checks all protected pages)
-
--moveprotected:   Same as -protectedpages, for moveprotected pages
-
-Furthermore, the following command line parameters are supported:
-
--always         Doesn't ask every time if the bot should make the change or not, do it always.
-
--debug          When the bot can't delete the template from the page (wrong regex or something like that)
-                it will ask you if it should open the page on your browser.
-                (attention: pages included may give false positives..)
-
--move           The bot will check if the page is blocked also for the move option, not only for edit
-
---- Warning! ---
-You have to edit this script in order to add your preferences
-otherwise the script won't work!
-
-If you have problems, ask on botwiki ( http://botwiki.sno.cc )
-or on IRC (#pywikipediabot)
-
---- Example of how to use the script ---
-
-python blockpageschecker.py -always
-
-python blockpageschecker.py -cat:Geography -always
-
-python blockpageschecker.py -debug -protectedpages:4
-
-"""
-#
-# (C) Monobi a.k.a. Wikihermit, 2007
-# (C) Filnik, 2007-2008
-# (C) NicDumZ, 2008
-#
-# Distributed under the terms of the MIT license.
-#
-__version__ = '$Id: blockpageschecker.py,v 1.5 2008/04/24 19.40.00 filnik Exp$'
-#
-
-import re, webbrowser
-import wikipedia, catlib, pagegenerators, config
-
-# This is required for the text that is shown when you run this script
-# with the parameter -help.
-docuReplacements = {
-    '&params;':     pagegenerators.parameterHelp,
-}
-
-#######################################################
-#--------------------- PREFERENCES -------------------#
-################### -- Edit below! -- #################
-
-# Added a new feature! Please update and add the settings in order
-# to improve the intelligence of this script ;-)
-# Regex to get the semi-protection template
-templateSemiProtection = {
-            'en': None,
-            'it':[r'\{\{(?:[Tt]emplate:|)[Aa]vvisobloccoparziale(?:|[ _]scad\|.*?|\|.*?)\}\}',
-                  r'\{\{(?:[Tt]emplate:|)[Aa]bp(?:|[ _]scad\|(?:.*?))\}\}'],
-            'fr': [ur'\{\{(?:[Tt]emplate:|[Mm]odèle:|)[Ss]emi[- ]?protection(|[^\}]*)\}\}'],
-            'ja':[ur'\{\{(?:[Tt]emplate:|)半保護(?:[Ss]|)(?:\|.+|)\}\}(\n+?|)'],
-            'zh':[ur'\{\{(?:[Tt]emplate:|)Protected|(?:[Ss]|[Ss]emi|半)(?:\|.+|)\}\}(\n+?|)',ur'\{\{(?:[Tt]emplate:|)Mini-protected|(?:[Ss]|[Ss]emi|半)(?:\|.+|)\}\}(\n+?|)',ur'\{\{(?:[Tt]emplate:|)Protected-logo|(?:[Ss]|[Ss]emi|半)(?:\|.+|)\}\}(\n+?|)'],
-            }
-# Regex to get the total-protection template
-templateTotalProtection = {
-            'en': None, 
-            'it':[r'\{\{(?:[Tt]emplate:|)[Aa]vvisoblocco(?:|[ _]scad\|(?:.*?)|minaccia|cancellata)\}\}',
-                  r'\{\{(?:[Tt]emplate:|)(?:[Cc][Tt]|[Cc]anc fatte|[Cc][Ee].*?)\}\}', r'<div class="toccolours[ _]itwiki[ _]template[ _]avviso">(?:\s|\n)*?[Qq]uesta pagina'],
-            'fr':[ur'\{\{(?:[Tt]emplate:|[Mm]odèle:|)[Pp]rotection(|[^\}]*)\}\}',
-                 ur'\{\{(?:[Tt]emplate:|[Mm]odèle:|)(?:[Pp]age|[Aa]rchive|[Mm]odèle) protégée?(|[^\}]*)\}\}'],
-            'ja':[ur'\{\{(?:[Tt]emplate:|)保護(?:[Ss]|)(?:\|.+|)\}\}(\n+?|)'],
-            'zh':[r'\{\{(?:[Tt]emplate:|)Protected|(?:[Nn]|[Nn]ormal)(?:\|.+|)\}\}(\n+?|)',r'\{\{(?:[Tt]emplate:|)Mini-protected|(?:[Nn]|[Nn]ormal)(?:\|.+|)\}\}(\n+?|)',r'\{\{(?:[Tt]emplate:|)Protected-logo|(?:[Nn]|[Nn]ormal)(?:\|.+|)\}\}(\n+?|)'],
-            }
-# Regex to get the semi-protection move template
-templateSemiMoveProtection = {
-            'en': None, 
-            'it':[r'\{\{(?:[Tt]emplate:|)[Aa]vvisobloccospostamento(?:|[ _]scad\|.*?|\|.*?)\}\}'],
-            'ja':[ur'\{\{(?:[Tt]emplate:|)移動半保護(?:[Ss]|)(?:\|.+|)\}\}(\n+?|)'],
-            'zh':[r'\{\{(?:[Tt]emplate:|)Protected|(?:MS|ms)(?:\|.+|)\}\}(\n+?|)',r'\{\{(?:[Tt]emplate:|)Mini-protected|(?:MS|ms)(?:\|.+|)\}\}(\n+?|)',r'\{\{(?:[Tt]emplate:|)Protected-logo|(?:MS|ms)(?:\|.+|)\}\}(\n+?|)'],
-            }
-# Regex to get the total-protection move template 
-templateTotalMoveProtection = {
-            'en': None, 
-            'it':[r'\{\{(?:[Tt]emplate:|)[Aa]vvisobloccospostamento(?:|[ _]scad\|.*?|\|.*?)\}\}'],
-            'ja':[ur'\{\{(?:[Tt]emplate:|)移動保護(?:[Ss]|)(?:\|.+|)\}\}(\n+?|)'],
-            'zh':[ur'\{\{(?:[Tt]emplate:|)Protected|(?:[Mm]|[Mm]ove|移[動动])(?:\|.+|)\}\}(\n+?|)',ur'\{\{(?:[Tt]emplate:|)Mini-protected|(?:[Mm]|[Mm]ove|移[動动])(?:\|.+|)\}\}(\n+?|)',ur'\{\{(?:[Tt]emplate:|)Protected-logo|(?:[Mm]|[Mm]ove|移[動动])(?:\|.+|)\}\}(\n+?|)'],
-            }
-# Array: 0 => Semi-block, 1 => Total Block, 2 => Semi-Move, 3 => Total-Move
-templateNoRegex = {
-            'it':['{{Avvisobloccoparziale}}', '{{Avvisoblocco}}', None, None],
-            'fr':['{{Semi-protection}}', '{{Protection}}', None, None],
-            'ja':[u'{{半保護}}', u'{{保護}}', u'{{移動半保護}}',u'{{移動保護}}'],
-            'zh':[u'{{Protected/semi}}',u'{{Protected}}',u'{{Protected/ms}}',u'{{Protected/move}}'],
-            }
-
-# Category where the bot will check
-categoryToCheck = {
-            'en':[u'Category:Protected'],
-            'fr':[u'Category:Page semi-protégée', u'Category:Page protégée', u'Catégorie:Article protégé'],
-            'he':[u'קטגוריה:ויקיפדיה: דפים מוגנים', u'קטגוריה:ויקיפדיה: דפים מוגנים חלקית'],
-            'it':[u'Categoria:Pagine semiprotette', u'Categoria:Voci_protette'],
-            'ja':[u'Category:編集保護中の記事',u'Category:編集半保護中の記事',
-                u'Category:移動保護中の記事',],
-            'pt':[u'Category:!Páginas protegidas', u'Category:!Páginas semiprotegidas'],
-            'zh':[u'Category:被保护的页面',u'Category:被保護的模板',u'Category:暂时不能移动的页面',
-                u'Category:被半保护的页面',],
-            }
-# Comment used when the Bot edits
-comment = {
-            'en':u'Bot: Deleting out-dated template',
-            'fr':u'Robot: Mise à jour des bandeaux de protection',
-            'he':u'בוט: מסיר תבנית שעבר זמנה',
-            'it':u'Bot: Tolgo o sistemo template di avviso blocco',
-            'ja':u'ロボットによる: 保護テンプレート除去',
-            'pt':u'Bot: Retirando predefinição de proteção',
-            'zh':u'機器人: 移除過期的保護模板',
-            }
-# Check list to block the users that haven't set their preferences
-project_inserted = ['en', 'fr', 'it', 'ja', 'pt', 'zh']
-
-#######################################################
-#------------------ END PREFERENCES ------------------#
-################## -- Edit above! -- ##################
-
-def understandBlock(text, TTP, TSP, TSMP, TTMP):
-    """ Understand if the page is blocked and if it has the right template """
-    for catchRegex in TTP: # TTP = templateTotalProtection
-        resultCatch = re.findall(catchRegex, text)
-        if resultCatch:
-            return ('sysop-total', catchRegex)
-    for catchRegex in TSP:
-        resultCatch = re.findall(catchRegex, text)
-        if resultCatch:
-            return ('autoconfirmed-total', catchRegex)
-    if TSMP != None and TTMP != None and TTP != TTMP and TSP != TSMP:
-        for catchRegex in TSMP:
-            resultCatch = re.findall(catchRegex, text)
-            if resultCatch:
-                return ('sysop-move', catchRegex)
-        for catchRegex in TTMP:
-            resultCatch = re.findall(catchRegex, text)
-            if resultCatch:
-                return ('autoconfirmed-move', catchRegex)
-    return ('editable', r'\A\n') # If editable means that we have no regex, won't change anything with this regex
-
-def debugQuest(site, page):
-    quest = wikipedia.input(u'Do you want to open the page on your [b]rowser, [g]ui or [n]othing?')
-    pathWiki = site.family.nicepath(site.lang)
-    url = 'http://%s%s%s?&redirect=no' % (wikipedia.getSite().hostname(), pathWiki, page.urlname())
-    while 1:
-        if quest.lower() in ['b', 'B']:                    
-            webbrowser.open(url)
-            break
-        elif quest.lower() in ['g', 'G']:
-            import editarticle
-            editor = editarticle.TextEditor()
-            text = editor.edit(page.get())
-            break
-        elif quest.lower() in ['n', 'N']:
-            break
-        else:
-            wikipedia.output(u'wrong entry, type "b", "g" or "n"')
-            continue
-
-def main():
-    """ Main Function """
-    # Loading the comments
-    global categoryToCheck; global comment; global project_inserted
-    if config.mylang not in project_inserted:
-        wikipedia.output(u"Your project is not supported by this script. You have to edit the script and add it!")
-        wikipedia.stopme()
-    # always, define a generator to understand if the user sets one, defining what's genFactory
-    always = False; generator = False; debug = False
-    moveBlockCheck = False; genFactory = pagegenerators.GeneratorFactory()
-    # To prevent Infinite loops
-    errorCount = 0
-    # Load the right site
-    site = wikipedia.getSite()
-    # Loading the default options.
-    for arg in wikipedia.handleArgs():
-        if arg == '-always':
-            always = True
-        elif arg == '-move':
-            moveBlockCheck = True
-        elif arg == '-debug':
-            debug = True
-        elif arg.startswith('-protectedpages'):
-            if len(arg) == 15:
-                generator = site.protectedpages(namespace = 0)
-            else:
-                generator = site.protectedpages(namespace = int(arg[16:]))
-        elif arg.startswith('-moveprotected'):
-            if len(arg) == 14:
-                generator = site.protectedpages(namespace = 0, type = 'move')
-            else:
-                generator = site.protectedpages(namespace = int(arg[16:]),
-                                                type = 'move')
-        elif arg.startswith('-page'):
-            if len(arg) == 5:
-                generator = [wikipedia.Page(wikipedia.getSite(), wikipedia.input(u'What page do you want to use?'))]
-            else:
-                generator = [wikipedia.Page(wikipedia.getSite(), arg[6:])]
-        else:
-            generator = genFactory.handleArg(arg)
-    # Take the right templates to use, the category and the comment
-    TSP = wikipedia.translate(site, templateSemiProtection)
-    TTP = wikipedia.translate(site, templateTotalProtection)
-    TSMP = wikipedia.translate(site, templateSemiMoveProtection)
-    TTMP = wikipedia.translate(site, templateTotalMoveProtection)
-    TNR = wikipedia.translate(site, templateNoRegex)
-    
-    category = wikipedia.translate(site, categoryToCheck)
-    commentUsed = wikipedia.translate(site, comment)
-    if not generator:
-        generator = list()
-        wikipedia.output(u'Loading categories...')
-        # Define the category if no other generator has been setted
-        for CAT in category:
-            cat = catlib.Category(site, CAT)
-            # Define the generator
-            gen = pagegenerators.CategorizedPageGenerator(cat)
-            for pageCat in gen:
-                generator.append(pageCat)
-        wikipedia.output(u'Categories loaded, start!')
-    # Main Loop
-    preloadingGen = pagegenerators.PreloadingGenerator(generator, pageNumber = 60)
-    for page in preloadingGen:
-        pagename = page.aslink()
-        wikipedia.output('Loading %s...' % pagename)
-        try:
-            text = page.get()
-            restrictions = page.getRestrictions()
-        except wikipedia.NoPage:
-            wikipedia.output("%s doesn't exist! Skipping..." % pagename)
-            continue
-        except wikipedia.IsRedirectPage:
-            wikipedia.output("%s is a redirect! Skipping..." % pagename)
-            if debug:
-                debugQuest(site, page)
-            continue
-        """
-        # This check does not work :
-        # PreloadingGenerator cannot set correctly page.editRestriction
-        # (see bug #1949476 ) 
-        if not page.canBeEdited():
-            wikipedia.output("%s is sysop-protected : this account can't edit it! Skipping..." % pagename)
-            continue
-        """        
-        editRestr = restrictions['edit']
-        if editRestr and editRestr[0] == 'sysop':
-            try:
-                config.sysopnames[site.family.name][site.lang]
-            except:
-                wikipedia.output("%s is sysop-protected : this account can't edit it! Skipping..." % pagename)
-                continue
-            
-        # Understand, according to the template in the page, what should be the protection
-        # and compare it with what there really is.
-        TemplateInThePage = understandBlock(text, TTP, TSP, TSMP, TTMP)
-        # Only to see if the text is the same or not...
-        oldtext = text
-        # keep track of the changes for each step (edit then move)
-        changes = -1
-
-        if not editRestr:
-            # page is not edit-protected
-            # Deleting the template because the page doesn't need it.
-            replaceToPerform = u'|'.join(TTP + TSP)
-            texti, changes = re.subn('(?:<noinclude>|)(%s)(?:</noinclude>|)' % replaceToPerform, '', text)
-            wikipedia.output(u'The page is editable for all, deleting the template...')
-
-        elif editRestr[0] == 'sysop':
-            # total edit protection         
-            if TemplateInThePage[0] == 'sysop-total' and TTP != None:
-                msg = 'The page is protected to the sysop'
-                if not moveBlockCheck:
-                    msg += ', skipping...'
-                wikipedia.output(msg)
-            else:
-                wikipedia.output(u'The page is protected to the sysop, but the template seems not correct. Fixing...')
-                text, changes = re.subn(TemplateInThePage[1], TNR[1], text)
-
-        elif TSP != None:
-            # implicitely editRestr[0] = 'autoconfirmed', edit-Semi-protection
-            if TemplateInThePage[0] == 'autoconfirmed-total':                    
-                msg = 'The page is editable only for the autoconfirmed users'
-                if not moveBlockCheck:
-                    msg += ', skipping...'
-                wikipedia.output(msg)
-            else:
-                wikipedia.output(u'The page is editable only for the autoconfirmed users, but the template seems not correct. Fixing...')
-                text, changes = re.subn(TemplateInThePage[1], TNR[0], text)
-
-        if changes == 0:
-            # We tried to fix edit-protection templates, but it did not work.
-            wikipedia.output('Warning : No edit-protection template could be found')
-        
-        if moveBlockCheck:
-            # checking move protection now
-            moveRestr = restrictions['move']
-            changes = -1
-
-            if not moveRestr:
-                wikipedia.output(u'The page is movable for all, deleting the template...')
-                # Deleting the template because the page doesn't need it.
-                replaceToPerform = u'|'.join(TSMP + TTMP)
-                text, changes = re.subn('(?:<noinclude>|)(%s)(?:</noinclude>|)' % replaceToPerform, '', text)
-
-            elif moveRestr[0] == 'sysop':
-                # move-total-protection
-                if TemplateInThePage[0] == 'sysop-move' and TTMP != None:
-                    wikipedia.output(u'The page is protected from moving to the sysop, skipping...')
-                else:
-                    wikipedia.output(u'The page is protected from moving to the sysop, but the template seems not correct. Fixing...')
-                    text, changes = re.subn(TemplateInThePage[1], TNR[3], text)
-
-            elif TSMP != None:
-                # implicitely moveRestr[0] = 'autoconfirmed', move-semi-protection
-                if TemplateInThePage[0] == 'autoconfirmed-move':
-                    wikipedia.output(u'The page is movable only for the autoconfirmed users, skipping...')
-                else:
-                    wikipedia.output(u'The page is movable only for the autoconfirmed users, but the template seems not correct. Fixing...')
-                    text, changes = re.subn(TemplateInThePage[1], TNR[2], text)
-
-            if changes == 0:
-                # We tried to fix move-protection templates, but it did not work.
-                wikipedia.output('Warning : No move-protection template could be found')
-
-
-        if oldtext != text:
-            # Ok, asking if the change has to be performed and do it if yes.
-            wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title())
-            wikipedia.showDiff(oldtext, text)
-            choice = ''
-            while 1:
-                if not always:
-                    choice = wikipedia.inputChoice(u'Do you want to accept these changes?', ['Yes', 'No', 'All'], ['y', 'N', 'a'], 'N')
-                if choice.lower() in ['a', 'all']:
-                    always = True
-                if choice.lower() in ['n', 'no']:
-                    break
-                if choice.lower() in ['y', 'yes'] or always:
-                    try:
-                        page.put(text, commentUsed, force=True)
-                    except wikipedia.EditConflict:
-                        wikipedia.output(u'Edit conflict! skip!')
-                        break
-                    except wikipedia.ServerError:
-                        # Sometimes there is this error that's quite annoying because
-                        # can block the whole process for nothing. 
-                        errorCount += 1
-                        if errorCount < 5:
-                            wikipedia.output(u'Server Error! Wait..')
-                            time.sleep(3)
-                            continue
-                        else:
-                            # Prevent Infinite Loops
-                            raise wikipedia.ServerError(u'Fifth Server Error!')
-                    except wikipedia.SpamfilterError, e:
-                        wikipedia.output(u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url))
-                        break
-                    except wikipedia.PageNotSaved, error:
-                        wikipedia.output(u'Error putting page: %s' % (error.args,))
-                        break
-                    except wikipedia.LockedPage:
-                        wikipedia.output(u'The page is still protected. Skipping...')
-                        break
-                    else:
-                        # Break only if the errors are one after the other
-                        errorCount = 0
-                        break
-                    
-if __name__ == "__main__":
-    try:
-        main()
-    finally:
-        wikipedia.stopme()
+# -*- coding: utf-8  -*-
+"""
+This is a script originally written by Wikihermit and then rewritten by Filnik,
+to delete the templates used to warn in the pages that a page is blocked,
+when the page isn't blocked at all. Indeed, very often sysops block the pages
+for a setted time but then the forget to delete the warning! This script is useful
+if you want to delete those useless warning left in these pages.
+
+Parameters:
+
+These command line parameters can be used to specify which pages to work on:
+
+&params;
+
+-xml              Retrieve information from a local XML dump (pages-articles
+                  or pages-meta-current, see http://download.wikimedia.org).
+                  Argument can also be given as "-xml:filename".
+
+-page             Only edit a specific page.
+                  Argument can also be given as "-page:pagetitle". You can
+                  give this parameter multiple times to edit multiple pages.
+
+-protectedpages:  Check all the blocked pages (useful when you have not categories
+                  or when you have problems with them. (add the namespace after ":" where
+                  you want to check - default checks all protected pages)
+
+-moveprotected:   Same as -protectedpages, for moveprotected pages
+
+Furthermore, the following command line parameters are supported:
+
+-always         Doesn't ask every time if the bot should make the change or not, do it always.
+
+-debug          When the bot can't delete the template from the page (wrong regex or something like that)
+                it will ask you if it should open the page on your browser.
+                (attention: pages included may give false positives..)
+
+-move           The bot will check if the page is blocked also for the move option, not only for edit
+
+--- Warning! ---
+You have to edit this script in order to add your preferences
+otherwise the script won't work!
+
+If you have problems, ask on botwiki ( http://botwiki.sno.cc )
+or on IRC (#pywikipediabot)
+
+--- Example of how to use the script ---
+
+python blockpageschecker.py -always
+
+python blockpageschecker.py -cat:Geography -always
+
+python blockpageschecker.py -debug -protectedpages:4
+
+"""
+#
+# (C) Monobi a.k.a. Wikihermit, 2007
+# (C) Filnik, 2007-2008
+# (C) NicDumZ, 2008
+#
+# Distributed under the terms of the MIT license.
+#
+__version__ = '$Id: blockpageschecker.py,v 1.5 2008/04/24 19.40.00 filnik Exp$'
+#
+
+import re, webbrowser
+import wikipedia, catlib, pagegenerators, config
+
+# This is required for the text that is shown when you run this script
+# with the parameter -help.
+docuReplacements = {
+    '&params;':     pagegenerators.parameterHelp,
+}
+
+#######################################################
+#--------------------- PREFERENCES -------------------#
+################### -- Edit below! -- #################
+
+# Added a new feature! Please update and add the settings in order
+# to improve the intelligence of this script ;-)
+# Regex to get the semi-protection template
+templateSemiProtection = {
+            'en': None,
+            'it':[r'\{\{(?:[Tt]emplate:|)[Aa]vvisobloccoparziale(?:|[ _]scad\|.*?|\|.*?)\}\}',
+                  r'\{\{(?:[Tt]emplate:|)[Aa]bp(?:|[ _]scad\|(?:.*?))\}\}'],
+            'fr': [ur'\{\{(?:[Tt]emplate:|[Mm]odèle:|)[Ss]emi[- ]?protection(|[^\}]*)\}\}'],
+            'ja':[ur'\{\{(?:[Tt]emplate:|)半保護(?:[Ss]|)(?:\|.+|)\}\}(\n+?|)'],
+            'zh':[ur'\{\{(?:[Tt]emplate:|)Protected|(?:[Ss]|[Ss]emi|半)(?:\|.+|)\}\}(\n+?|)',ur'\{\{(?:[Tt]emplate:|)Mini-protected|(?:[Ss]|[Ss]emi|半)(?:\|.+|)\}\}(\n+?|)',ur'\{\{(?:[Tt]emplate:|)Protected-logo|(?:[Ss]|[Ss]emi|半)(?:\|.+|)\}\}(\n+?|)'],
+            }
+# Regex to get the total-protection template
+templateTotalProtection = {
+            'en': None, 
+            'it':[r'\{\{(?:[Tt]emplate:|)[Aa]vvisoblocco(?:|[ _]scad\|(?:.*?)|minaccia|cancellata)\}\}',
+                  r'\{\{(?:[Tt]emplate:|)(?:[Cc][Tt]|[Cc]anc fatte|[Cc][Ee].*?)\}\}', r'<div class="toccolours[ _]itwiki[ _]template[ _]avviso">(?:\s|\n)*?[Qq]uesta pagina'],
+            'fr':[ur'\{\{(?:[Tt]emplate:|[Mm]odèle:|)[Pp]rotection(|[^\}]*)\}\}',
+                 ur'\{\{(?:[Tt]emplate:|[Mm]odèle:|)(?:[Pp]age|[Aa]rchive|[Mm]odèle) protégée?(|[^\}]*)\}\}'],
+            'ja':[ur'\{\{(?:[Tt]emplate:|)保護(?:[Ss]|)(?:\|.+|)\}\}(\n+?|)'],
+            'zh':[r'\{\{(?:[Tt]emplate:|)Protected|(?:[Nn]|[Nn]ormal)(?:\|.+|)\}\}(\n+?|)',r'\{\{(?:[Tt]emplate:|)Mini-protected|(?:[Nn]|[Nn]ormal)(?:\|.+|)\}\}(\n+?|)',r'\{\{(?:[Tt]emplate:|)Protected-logo|(?:[Nn]|[Nn]ormal)(?:\|.+|)\}\}(\n+?|)'],
+            }
+# Regex to get the semi-protection move template
+templateSemiMoveProtection = {
+            'en': None, 
+            'it':[r'\{\{(?:[Tt]emplate:|)[Aa]vvisobloccospostamento(?:|[ _]scad\|.*?|\|.*?)\}\}'],
+            'ja':[ur'\{\{(?:[Tt]emplate:|)移動半保護(?:[Ss]|)(?:\|.+|)\}\}(\n+?|)'],
+            'zh':[r'\{\{(?:[Tt]emplate:|)Protected|(?:MS|ms)(?:\|.+|)\}\}(\n+?|)',r'\{\{(?:[Tt]emplate:|)Mini-protected|(?:MS|ms)(?:\|.+|)\}\}(\n+?|)',r'\{\{(?:[Tt]emplate:|)Protected-logo|(?:MS|ms)(?:\|.+|)\}\}(\n+?|)'],
+            }
+# Regex to get the total-protection move template 
+templateTotalMoveProtection = {
+            'en': None, 
+            'it':[r'\{\{(?:[Tt]emplate:|)[Aa]vvisobloccospostamento(?:|[ _]scad\|.*?|\|.*?)\}\}'],
+            'ja':[ur'\{\{(?:[Tt]emplate:|)移動保護(?:[Ss]|)(?:\|.+|)\}\}(\n+?|)'],
+            'zh':[ur'\{\{(?:[Tt]emplate:|)Protected|(?:[Mm]|[Mm]ove|移[動动])(?:\|.+|)\}\}(\n+?|)',ur'\{\{(?:[Tt]emplate:|)Mini-protected|(?:[Mm]|[Mm]ove|移[動动])(?:\|.+|)\}\}(\n+?|)',ur'\{\{(?:[Tt]emplate:|)Protected-logo|(?:[Mm]|[Mm]ove|移[動动])(?:\|.+|)\}\}(\n+?|)'],
+            }
+# Array: 0 => Semi-block, 1 => Total Block, 2 => Semi-Move, 3 => Total-Move
+templateNoRegex = {
+            'it':['{{Avvisobloccoparziale}}', '{{Avvisoblocco}}', None, None],
+            'fr':['{{Semi-protection}}', '{{Protection}}', None, None],
+            'ja':[u'{{半保護}}', u'{{保護}}', u'{{移動半保護}}',u'{{移動保護}}'],
+            'zh':[u'{{Protected/semi}}',u'{{Protected}}',u'{{Protected/ms}}',u'{{Protected/move}}'],
+            }
+
+# Category where the bot will check
+categoryToCheck = {
+            'en':[u'Category:Protected'],
+            'fr':[u'Category:Page semi-protégée', u'Category:Page protégée', u'Catégorie:Article protégé'],
+            'he':[u'קטגוריה:ויקיפדיה: דפים מוגנים', u'קטגוריה:ויקיפדיה: דפים מוגנים חלקית'],
+            'it':[u'Categoria:Pagine semiprotette', u'Categoria:Voci_protette'],
+            'ja':[u'Category:編集保護中の記事',u'Category:編集半保護中の記事',
+                u'Category:移動保護中の記事',],
+            'pt':[u'Category:!Páginas protegidas', u'Category:!Páginas semiprotegidas'],
+            'zh':[u'Category:被保护的页面',u'Category:被保護的模板',u'Category:暂时不能移动的页面',
+                u'Category:被半保护的页面',],
+            }
+# Comment used when the Bot edits
+comment = {
+            'en':u'Bot: Deleting out-dated template',
+            'fr':u'Robot: Mise à jour des bandeaux de protection',
+            'he':u'בוט: מסיר תבנית שעבר זמנה',
+            'it':u'Bot: Tolgo o sistemo template di avviso blocco',
+            'ja':u'ロボットによる: 保護テンプレート除去',
+            'pt':u'Bot: Retirando predefinição de proteção',
+            'zh':u'機器人: 移除過期的保護模板',
+            }
+# Check list to block the users that haven't set their preferences
+project_inserted = ['en', 'fr', 'it', 'ja', 'pt', 'zh']
+
+#######################################################
+#------------------ END PREFERENCES ------------------#
+################## -- Edit above! -- ##################
+
+def understandBlock(text, TTP, TSP, TSMP, TTMP):
+    """ Understand if the page is blocked and if it has the right template """
+    for catchRegex in TTP: # TTP = templateTotalProtection
+        resultCatch = re.findall(catchRegex, text)
+        if resultCatch:
+            return ('sysop-total', catchRegex)
+    for catchRegex in TSP:
+        resultCatch = re.findall(catchRegex, text)
+        if resultCatch:
+            return ('autoconfirmed-total', catchRegex)
+    if TSMP != None and TTMP != None and TTP != TTMP and TSP != TSMP:
+        for catchRegex in TSMP:
+            resultCatch = re.findall(catchRegex, text)
+            if resultCatch:
+                return ('sysop-move', catchRegex)
+        for catchRegex in TTMP:
+            resultCatch = re.findall(catchRegex, text)
+            if resultCatch:
+                return ('autoconfirmed-move', catchRegex)
+    return ('editable', r'\A\n') # If editable means that we have no regex, won't change anything with this regex
+
+def debugQuest(site, page):
+    quest = wikipedia.input(u'Do you want to open the page on your [b]rowser, [g]ui or [n]othing?')
+    pathWiki = site.family.nicepath(site.lang)
+    url = 'http://%s%s%s?&redirect=no' % (wikipedia.getSite().hostname(), pathWiki, page.urlname())
+    while 1:
+        if quest.lower() in ['b', 'B']:                    
+            webbrowser.open(url)
+            break
+        elif quest.lower() in ['g', 'G']:
+            import editarticle
+            editor = editarticle.TextEditor()
+            text = editor.edit(page.get())
+            break
+        elif quest.lower() in ['n', 'N']:
+            break
+        else:
+            wikipedia.output(u'wrong entry, type "b", "g" or "n"')
+            continue
+
+def main():
+    """ Main Function """
+    # Loading the comments
+    global categoryToCheck; global comment; global project_inserted
+    if config.mylang not in project_inserted:
+        wikipedia.output(u"Your project is not supported by this script. You have to edit the script and add it!")
+        wikipedia.stopme()
+    # always, define a generator to understand if the user sets one, defining what's genFactory
+    always = False; generator = False; debug = False
+    moveBlockCheck = False; genFactory = pagegenerators.GeneratorFactory()
+    # To prevent Infinite loops
+    errorCount = 0
+    # Load the right site
+    site = wikipedia.getSite()
+    # Loading the default options.
+    for arg in wikipedia.handleArgs():
+        if arg == '-always':
+            always = True
+        elif arg == '-move':
+            moveBlockCheck = True
+        elif arg == '-debug':
+            debug = True
+        elif arg.startswith('-protectedpages'):
+            if len(arg) == 15:
+                generator = site.protectedpages(namespace = 0)
+            else:
+                generator = site.protectedpages(namespace = int(arg[16:]))
+        elif arg.startswith('-moveprotected'):
+            if len(arg) == 14:
+                generator = site.protectedpages(namespace = 0, type = 'move')
+            else:
+                generator = site.protectedpages(namespace = int(arg[16:]),
+                                                type = 'move')
+        elif arg.startswith('-page'):
+            if len(arg) == 5:
+                generator = [wikipedia.Page(wikipedia.getSite(), wikipedia.input(u'What page do you want to use?'))]
+            else:
+                generator = [wikipedia.Page(wikipedia.getSite(), arg[6:])]
+        else:
+            generator = genFactory.handleArg(arg)
+    # Take the right templates to use, the category and the comment
+    TSP = wikipedia.translate(site, templateSemiProtection)
+    TTP = wikipedia.translate(site, templateTotalProtection)
+    TSMP = wikipedia.translate(site, templateSemiMoveProtection)
+    TTMP = wikipedia.translate(site, templateTotalMoveProtection)
+    TNR = wikipedia.translate(site, templateNoRegex)
+    
+    category = wikipedia.translate(site, categoryToCheck)
+    commentUsed = wikipedia.translate(site, comment)
+    if not generator:
+        generator = list()
+        wikipedia.output(u'Loading categories...')
+        # Define the category if no other generator has been setted
+        for CAT in category:
+            cat = catlib.Category(site, CAT)
+            # Define the generator
+            gen = pagegenerators.CategorizedPageGenerator(cat)
+            for pageCat in gen:
+                generator.append(pageCat)
+        wikipedia.output(u'Categories loaded, start!')
+    # Main Loop
+    preloadingGen = pagegenerators.PreloadingGenerator(generator, pageNumber = 60)
+    for page in preloadingGen:
+        pagename = page.aslink()
+        wikipedia.output('Loading %s...' % pagename)
+        try:
+            text = page.get()
+            restrictions = page.getRestrictions()
+        except wikipedia.NoPage:
+            wikipedia.output("%s doesn't exist! Skipping..." % pagename)
+            continue
+        except wikipedia.IsRedirectPage:
+            wikipedia.output("%s is a redirect! Skipping..." % pagename)
+            if debug:
+                debugQuest(site, page)
+            continue
+        """
+        # This check does not work :
+        # PreloadingGenerator cannot set correctly page.editRestriction
+        # (see bug #1949476 ) 
+        if not page.canBeEdited():
+            wikipedia.output("%s is sysop-protected : this account can't edit it! Skipping..." % pagename)
+            continue
+        """        
+        editRestr = restrictions['edit']
+        if editRestr and editRestr[0] == 'sysop':
+            try:
+                config.sysopnames[site.family.name][site.lang]
+            except:
+                wikipedia.output("%s is sysop-protected : this account can't edit it! Skipping..." % pagename)
+                continue
+            
+        # Understand, according to the template in the page, what should be the protection
+        # and compare it with what there really is.
+        TemplateInThePage = understandBlock(text, TTP, TSP, TSMP, TTMP)
+        # Only to see if the text is the same or not...
+        oldtext = text
+        # keep track of the changes for each step (edit then move)
+        changes = -1
+
+        if not editRestr:
+            # page is not edit-protected
+            # Deleting the template because the page doesn't need it.
+            replaceToPerform = u'|'.join(TTP + TSP)
+            texti, changes = re.subn('(?:<noinclude>|)(%s)(?:</noinclude>|)' % replaceToPerform, '', text)
+            wikipedia.output(u'The page is editable for all, deleting the template...')
+
+        elif editRestr[0] == 'sysop':
+            # total edit protection         
+            if TemplateInThePage[0] == 'sysop-total' and TTP != None:
+                msg = 'The page is protected to the sysop'
+                if not moveBlockCheck:
+                    msg += ', skipping...'
+                wikipedia.output(msg)
+            else:
+                wikipedia.output(u'The page is protected to the sysop, but the template seems not correct. Fixing...')
+                text, changes = re.subn(TemplateInThePage[1], TNR[1], text)
+
+        elif TSP != None:
+            # implicitely editRestr[0] = 'autoconfirmed', edit-Semi-protection
+            if TemplateInThePage[0] == 'autoconfirmed-total':                    
+                msg = 'The page is editable only for the autoconfirmed users'
+                if not moveBlockCheck:
+                    msg += ', skipping...'
+                wikipedia.output(msg)
+            else:
+                wikipedia.output(u'The page is editable only for the autoconfirmed users, but the template seems not correct. Fixing...')
+                text, changes = re.subn(TemplateInThePage[1], TNR[0], text)
+
+        if changes == 0:
+            # We tried to fix edit-protection templates, but it did not work.
+            wikipedia.output('Warning : No edit-protection template could be found')
+        
+        if moveBlockCheck:
+            # checking move protection now
+            moveRestr = restrictions['move']
+            changes = -1
+
+            if not moveRestr:
+                wikipedia.output(u'The page is movable for all, deleting the template...')
+                # Deleting the template because the page doesn't need it.
+                replaceToPerform = u'|'.join(TSMP + TTMP)
+                text, changes = re.subn('(?:<noinclude>|)(%s)(?:</noinclude>|)' % replaceToPerform, '', text)
+
+            elif moveRestr[0] == 'sysop':
+                # move-total-protection
+                if TemplateInThePage[0] == 'sysop-move' and TTMP != None:
+                    wikipedia.output(u'The page is protected from moving to the sysop, skipping...')
+                else:
+                    wikipedia.output(u'The page is protected from moving to the sysop, but the template seems not correct. Fixing...')
+                    text, changes = re.subn(TemplateInThePage[1], TNR[3], text)
+
+            elif TSMP != None:
+                # implicitely moveRestr[0] = 'autoconfirmed', move-semi-protection
+                if TemplateInThePage[0] == 'autoconfirmed-move':
+                    wikipedia.output(u'The page is movable only for the autoconfirmed users, skipping...')
+                else:
+                    wikipedia.output(u'The page is movable only for the autoconfirmed users, but the template seems not correct. Fixing...')
+                    text, changes = re.subn(TemplateInThePage[1], TNR[2], text)
+
+            if changes == 0:
+                # We tried to fix move-protection templates, but it did not work.
+                wikipedia.output('Warning : No move-protection template could be found')
+
+
+        if oldtext != text:
+            # Ok, asking if the change has to be performed and do it if yes.
+            wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title())
+            wikipedia.showDiff(oldtext, text)
+            choice = ''
+            while 1:
+                if not always:
+                    choice = wikipedia.inputChoice(u'Do you want to accept these changes?', ['Yes', 'No', 'All'], ['y', 'N', 'a'], 'N')
+                if choice.lower() in ['a', 'all']:
+                    always = True
+                if choice.lower() in ['n', 'no']:
+                    break
+                if choice.lower() in ['y', 'yes'] or always:
+                    try:
+                        page.put(text, commentUsed, force=True)
+                    except wikipedia.EditConflict:
+                        wikipedia.output(u'Edit conflict! skip!')
+                        break
+                    except wikipedia.ServerError:
+                        # Sometimes there is this error that's quite annoying because
+                        # can block the whole process for nothing. 
+                        errorCount += 1
+                        if errorCount < 5:
+                            wikipedia.output(u'Server Error! Wait..')
+                            time.sleep(3)
+                            continue
+                        else:
+                            # Prevent Infinite Loops
+                            raise wikipedia.ServerError(u'Fifth Server Error!')
+                    except wikipedia.SpamfilterError, e:
+                        wikipedia.output(u'Cannot change %s because of blacklist entry %s' % (page.title(), e.url))
+                        break
+                    except wikipedia.PageNotSaved, error:
+                        wikipedia.output(u'Error putting page: %s' % (error.args,))
+                        break
+                    except wikipedia.LockedPage:
+                        wikipedia.output(u'The page is still protected. Skipping...')
+                        break
+                    else:
+                        # Break only if the errors are one after the other
+                        errorCount = 0
+                        break
+                    
+if __name__ == "__main__":
+    try:
+        main()
+    finally:
+        wikipedia.stopme()


Property changes on: trunk/pywikipedia/blockpageschecker.py
___________________________________________________________________
Name: svn:eol-style
   + native

Modified: trunk/pywikipedia/commonsdelinker/plugins/__init__.py
===================================================================
--- trunk/pywikipedia/commonsdelinker/plugins/__init__.py	2008-05-02 00:05:37 UTC (rev 5292)
+++ trunk/pywikipedia/commonsdelinker/plugins/__init__.py	2008-05-02 00:13:27 UTC (rev 5293)
@@ -1 +1 @@
-__version__ = '$Id: $'
+__version__ = '$Id: $'


Property changes on: trunk/pywikipedia/commonsdelinker/plugins/__init__.py
___________________________________________________________________
Name: svn:eol-style
   + native

Modified: trunk/pywikipedia/commonsdelinker/plugins/books.py
===================================================================
--- trunk/pywikipedia/commonsdelinker/plugins/books.py	2008-05-02 00:05:37 UTC (rev 5292)
+++ trunk/pywikipedia/commonsdelinker/plugins/books.py	2008-05-02 00:13:27 UTC (rev 5293)
@@ -1,15 +1,15 @@
-__version__ = '$Id: $'
-
-import re
-
-class FrPhotographie(object):
-	hook = 'before_replace'
-	def __init__(self, CommonsDelinker):
-		self.CommonsDelinker = CommonsDelinker
-	def __call__(self, page, summary, image, replacement):
-		site = page.site()
-		if (site.lang, site.family.name) == ('fr', 'wikibooks') and replacement.get() is None:
-			if page.title().startswith('Photographie/') or page.title().startswith('Tribologie/'):
-				replacement.set('IMG.svg')
-				self.CommonsDelinker.output(u'%s Replaced %s by IMG.svg on %s.' % \
-					(self, image.get(), replacement.get()))
+__version__ = '$Id: $'
+
+import re
+
+class FrPhotographie(object):
+	hook = 'before_replace'
+	def __init__(self, CommonsDelinker):
+		self.CommonsDelinker = CommonsDelinker
+	def __call__(self, page, summary, image, replacement):
+		site = page.site()
+		if (site.lang, site.family.name) == ('fr', 'wikibooks') and replacement.get() is None:
+			if page.title().startswith('Photographie/') or page.title().startswith('Tribologie/'):
+				replacement.set('IMG.svg')
+				self.CommonsDelinker.output(u'%s Replaced %s by IMG.svg on %s.' % \
+					(self, image.get(), replacement.get()))


Property changes on: trunk/pywikipedia/commonsdelinker/plugins/books.py
___________________________________________________________________
Name: svn:eol-style
   + native

Modified: trunk/pywikipedia/commonsdelinker/plugins/debug.py
===================================================================
--- trunk/pywikipedia/commonsdelinker/plugins/debug.py	2008-05-02 00:05:37 UTC (rev 5292)
+++ trunk/pywikipedia/commonsdelinker/plugins/debug.py	2008-05-02 00:13:27 UTC (rev 5293)
@@ -1,17 +1,17 @@
-import difflib
-__version__ = '$Id: $'
-
-class Diff(object):
-	hook = 'before_save'
-	def __init__(self, CommonsDelinker):
-		self.CommonsDelinker = CommonsDelinker
-	def __call__(self, page, text, new_text, summary):
-		diff = difflib.context_diff(
-			text.encode('utf-8').splitlines(True),
-			new_text.get().encode('utf-8').splitlines(True))
-			
-		f = open((u'diff/%s-%s-%s.txt' % (page.urlname().replace('/', '-'),
-			page.site().dbName(), page.editTime())).encode('utf-8', 'ignore'), 'w')
-						
-		f.writelines(diff)
+import difflib
+__version__ = '$Id: $'
+
+class Diff(object):
+	hook = 'before_save'
+	def __init__(self, CommonsDelinker):
+		self.CommonsDelinker = CommonsDelinker
+	def __call__(self, page, text, new_text, summary):
+		diff = difflib.context_diff(
+			text.encode('utf-8').splitlines(True),
+			new_text.get().encode('utf-8').splitlines(True))
+			
+		f = open((u'diff/%s-%s-%s.txt' % (page.urlname().replace('/', '-'),
+			page.site().dbName(), page.editTime())).encode('utf-8', 'ignore'), 'w')
+						
+		f.writelines(diff)
 		f.close()
\ No newline at end of file


Property changes on: trunk/pywikipedia/commonsdelinker/plugins/debug.py
___________________________________________________________________
Name: svn:eol-style
   + native

Modified: trunk/pywikipedia/commonsdelinker/plugins/flags.py
===================================================================
--- trunk/pywikipedia/commonsdelinker/plugins/flags.py	2008-05-02 00:05:37 UTC (rev 5292)
+++ trunk/pywikipedia/commonsdelinker/plugins/flags.py	2008-05-02 00:13:27 UTC (rev 5293)
@@ -1,32 +1,32 @@
-__version__ = '$Id: $'
-
-import re
-
-class NlWiki(object):
-	hook = 'gallery_replace'
-	def __init__(self, CommonsDelinker):
-		self.CommonsDelinker = CommonsDelinker
-	def __call__(self, page, summary, image, replacement, match, groups):
-		site = page.site()
-		if (site.lang, site.family.name) == ('nl', 'wikipedia') and replacement.get() is None:
-			commands = self.CommonsDelinker.SummaryCache.get(site, 'Vlaggen', default = '')
-			
-			flags = re.findall(r'(?s)\<\!\-\-begin\-flags (.*?)\-\-\>(.*?)\<\!\-\-end\-flags\-\-\>', commands)
-			text = page.get()
-			
-			namespace = site.namespace(14)
-			r_namespace = r'(?:[Cc]ategory)|(?:[%s%s]%s)' % \
-				(namespace[0], namespace[0].lower(), namespace[1:])
-					
-			for new_image, categories in flags:
-				for category in categories.split('\n'):
-					if category.strip() == '': continue
-					
-					r_cat = r'\[\[\s*%s\s*\:\s*%s\s*(?:\|.*?)?\s*\]\]' % (r_namespace, 
-						re.sub(r'\\[ _]', '[ _]', re.escape(category.strip())))
-					if re.search(r_cat, text):
-						self.CommonsDelinker.output(
-							u'%s %s replaced by %s in category %s' % \
-							(self, image, new_image, category))
-						replacement.set(new_image.replace(' ', '_'))
+__version__ = '$Id: $'
+
+import re
+
+class NlWiki(object):
+	hook = 'gallery_replace'
+	def __init__(self, CommonsDelinker):
+		self.CommonsDelinker = CommonsDelinker
+	def __call__(self, page, summary, image, replacement, match, groups):
+		site = page.site()
+		if (site.lang, site.family.name) == ('nl', 'wikipedia') and replacement.get() is None:
+			commands = self.CommonsDelinker.SummaryCache.get(site, 'Vlaggen', default = '')
+			
+			flags = re.findall(r'(?s)\<\!\-\-begin\-flags (.*?)\-\-\>(.*?)\<\!\-\-end\-flags\-\-\>', commands)
+			text = page.get()
+			
+			namespace = site.namespace(14)
+			r_namespace = r'(?:[Cc]ategory)|(?:[%s%s]%s)' % \
+				(namespace[0], namespace[0].lower(), namespace[1:])
+					
+			for new_image, categories in flags:
+				for category in categories.split('\n'):
+					if category.strip() == '': continue
+					
+					r_cat = r'\[\[\s*%s\s*\:\s*%s\s*(?:\|.*?)?\s*\]\]' % (r_namespace, 
+						re.sub(r'\\[ _]', '[ _]', re.escape(category.strip())))
+					if re.search(r_cat, text):
+						self.CommonsDelinker.output(
+							u'%s %s replaced by %s in category %s' % \
+							(self, image, new_image, category))
+						replacement.set(new_image.replace(' ', '_'))
 			
\ No newline at end of file


Property changes on: trunk/pywikipedia/commonsdelinker/plugins/flags.py
___________________________________________________________________
Name: svn:eol-style
   + native

Modified: trunk/pywikipedia/families/wikia_family.py
===================================================================
--- trunk/pywikipedia/families/wikia_family.py	2008-05-02 00:05:37 UTC (rev 5292)
+++ trunk/pywikipedia/families/wikia_family.py	2008-05-02 00:13:27 UTC (rev 5293)
@@ -1,43 +1,43 @@
-# -*- coding: utf-8  -*-
-
-__version__ = '$Id:  $'
-
-import family
-
-# The Wikia Search family
-# user-config.py: usernames['wikia']['wikia'] = 'User name'
-
-class Family(family.Family):
-    def __init__(self):
-        family.Family.__init__(self)
-        self.name = u'wikia'
-
-        self.langs = {
-            u'wikia': u'search.wikia.com',
-        }
-
-        self.namespaces[4] = {
-            '_default': [u'search', self.namespaces[4]['_default']],
-        }
-        self.namespaces[5] = {
-            '_default': [u'search talk', self.namespaces[5]['_default']],
-        }
-        self.namespaces[100] = {
-            '_default': u'Forum',
-        }
-        self.namespaces[101] = {
-            '_default': u'Forum talk',
-        }
-        self.namespaces[112] = {
-            '_default': u'Mini',
-        }
-        self.namespaces[113] = {
-            '_default': u'Mini talk',
-        }
-        
-    def code2encoding(self, code):
-        return 'iso-8859-1'
-    
-    def version(self, code):
-        return "1.12alpha"
-    
+# -*- coding: utf-8  -*-
+
+__version__ = '$Id:  $'
+
+import family
+
+# The Wikia Search family
+# user-config.py: usernames['wikia']['wikia'] = 'User name'
+
+class Family(family.Family):
+    def __init__(self):
+        family.Family.__init__(self)
+        self.name = u'wikia'
+
+        self.langs = {
+            u'wikia': u'search.wikia.com',
+        }
+
+        self.namespaces[4] = {
+            '_default': [u'search', self.namespaces[4]['_default']],
+        }
+        self.namespaces[5] = {
+            '_default': [u'search talk', self.namespaces[5]['_default']],
+        }
+        self.namespaces[100] = {
+            '_default': u'Forum',
+        }
+        self.namespaces[101] = {
+            '_default': u'Forum talk',
+        }
+        self.namespaces[112] = {
+            '_default': u'Mini',
+        }
+        self.namespaces[113] = {
+            '_default': u'Mini talk',
+        }
+        
+    def code2encoding(self, code):
+        return 'iso-8859-1'
+    
+    def version(self, code):
+        return "1.12alpha"
+    


Property changes on: trunk/pywikipedia/families/wikia_family.py
___________________________________________________________________
Name: svn:eol-style
   + native

Modified: trunk/pywikipedia/fixing_redirects.py
===================================================================
--- trunk/pywikipedia/fixing_redirects.py	2008-05-02 00:05:37 UTC (rev 5292)
+++ trunk/pywikipedia/fixing_redirects.py	2008-05-02 00:13:27 UTC (rev 5293)
@@ -1,191 +1,191 @@
-#!/usr/bin/python
-# -*- coding: utf-8  -*-
-"""
-This script has the intention to correct all redirect
-links in featured pages or only one page of each wiki.
-
-Can be using with:
--featured      Run over featured pages
--page:XXX      Run over only one page
-
-"""
-#
-# This script based on disambredir.py and solve_disambiguation.py
-#
-# Distributed under the terms of the MIT license.
-#
-__version__='$Id: disambredir.py 4407 2007-10-03 17:27:14Z leogregianin $'
-#
-import wikipedia
-import pagegenerators
-import re, sys
-
-msg = {
-    'ar': u'بوت: إصلاح التحويلات',
-    'en': u'Bot: Fixing redirects',
-    'he': u'בוט: מתקן הפניות',
-    'ja': u'ロボットによる:リダイレクト回避',
-    'nn': u'robot: retta omdirigeringar',
-    'no': u'Robot: Retter omdirigeringer',
-    'pt': u'Bot: Arrumando redirects',
-    'sv': u'Bot: Rättar omdirigeringar',
-    'zh': u'機器人: 修復重定向',
-}
-
-featured_articles = {
-    'ar': u'ويكيبيديا:مقالات مختارة',
-    'de': u'Wikipedia:Exzellente_Artikel',
-    'en': u'Wikipedia:Featured_articles',
-    'es': u'Wikipedia:Artículos_destacados',
-    'fr': u'Wikipédia:Articles_de_qualité',
-    'he': u'פורטל:ערכים_מומלצים',
-    'it': u'Wikipedia:Articoli_in_vetrina',
-    'ja': u'Wikipedia:秀逸な記事',
-    'nl': u'Wikipedia:Etalage',
-    'nn': u'Wikipedia:Gode artiklar',
-    'no': u'Wikipedia:Anbefalte artikler',
-    'pt': u'Wikipedia:Os_melhores_artigos',
-    'sv': u'Wikipedia:Utvalda_artiklar',
-    'zh': u'Wikipedia:特色条目',
-}
-
-def firstcap(string):
-    return string[0].upper()+string[1:]
-
-def treat(text, linkedPage, targetPage):
-    """
-    Based on the method of the same name in solve_disambiguation.py
-    """
-    # make a backup of the original text so we can show the changes later
-    linkR = re.compile(r'\[\[(?P<title>[^\]\|#]*)(?P<section>#[^\]\|]*)?(\|(?P<label>[^\]]*))?\]\](?P<linktrail>' + linktrail + ')')
-    curpos = 0
-    # This loop will run until we have finished the current page
-    while True:
-        m = linkR.search(text, pos = curpos)
-        if not m:
-            break
-        # Make sure that next time around we will not find this same hit.
-        curpos = m.start() + 1
-        # ignore interwiki links and links to sections of the same page
-        if m.group('title') == '' or mysite.isInterwikiLink(m.group('title')):
-            continue
-        else:
-            actualLinkPage = wikipedia.Page(page.site(), m.group('title'))
-            # Check whether the link found is to page.
-            if actualLinkPage != linkedPage:
-                continue
-
-        # how many bytes should be displayed around the current link
-        context = 15
-        # at the beginning of the link, start red color.
-        # at the end of the link, reset the color to default
-        wikipedia.output(text[max(0, m.start() - context) : m.start()] + '\03{lightred}' + text[m.start() : m.end()] + '\03{default}' + text[m.end() : m.end() + context])
-        choice = 'y'
-
-        # The link looks like this:
-        # [[page_title|link_text]]trailing_chars
-        page_title = m.group('title')
-        link_text = m.group('label')
-
-        if not link_text:
-            # or like this: [[page_title]]trailing_chars
-            link_text = page_title
-        if m.group('section') == None:
-            section = ''
-        else:
-            section = m.group('section')
-        trailing_chars = m.group('linktrail')
-        if trailing_chars:
-            link_text += trailing_chars
-
-        if choice in "uU":
-            # unlink - we remove the section if there's any
-            text = text[:m.start()] + link_text + text[m.end():]
-            continue
-        replaceit = choice in "rR"
-
-        if link_text[0].isupper():
-            new_page_title = targetPage.title()
-        else:
-            new_page_title = targetPage.title()[0].lower() + targetPage.title()[1:]
-        if replaceit and trailing_chars:
-            newlink = "[[%s%s]]%s" % (new_page_title, section, trailing_chars)
-        elif replaceit or (new_page_title == link_text and not section):
-            newlink = "[[%s]]" % new_page_title
-        # check if we can create a link with trailing characters instead of a pipelink
-        elif len(new_page_title) <= len(link_text) and firstcap(link_text[:len(new_page_title)]) == firstcap(new_page_title) and re.sub(re.compile(linktrail), '', link_text[len(new_page_title):]) == '' and not section:
-            newlink = "[[%s]]%s" % (link_text[:len(new_page_title)], link_text[len(new_page_title):])
-        else:
-            newlink = "[[%s%s|%s]]" % (new_page_title, section, link_text)
-        text = text[:m.start()] + newlink + text[m.end():]
-        continue
-    return text
-
-def workon(page):
-    try:
-        text = page.get()
-    except wikipedia.IsRedirectPage:
-        return
-    wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title())
-    links = page.linkedPages()
-    wikipedia.getall(mysite,links)
-    for page2 in links:
-        try:
-            target = page2.getRedirectTarget()
-        except (wikipedia.Error,wikipedia.SectionError):
-            continue
-        text = treat(text, page2, target)
-    if text != page.get():
-        comment = wikipedia.translate(mysite, msg)
-        page.put(text, comment)
-
-try:
-    start = '!'
-    featured = False
-    title = None
-    namespace = None
-
-    for arg in wikipedia.handleArgs():
-        if arg.startswith('-start'):
-            if len(arg) == 6:
-                start = wikipedia.input(u'Which start where?')
-            else:
-                start = arg[7:]
-        elif arg == '-featured':
-            featured = True
-        elif arg.startswith('-page'):
-            if len(arg) == 5:
-                title = wikipedia.input(u'Which page should be processed?')
-            else:
-                title = arg[6:]
-        elif arg.startswith('-namespace'):
-            if len(arg) == 10:
-                namespace = int(wikipedia.input(u'Which namespace should be processed?'))
-            else:
-                namespace = int(arg[11:])
-
+#!/usr/bin/python
+# -*- coding: utf-8  -*-
+"""
+This script has the intention to correct all redirect
+links in featured pages or only one page of each wiki.
+
+Can be using with:
+-featured      Run over featured pages
+-page:XXX      Run over only one page
+
+"""
+#
+# This script based on disambredir.py and solve_disambiguation.py
+#
+# Distributed under the terms of the MIT license.
+#
+__version__='$Id: disambredir.py 4407 2007-10-03 17:27:14Z leogregianin $'
+#
+import wikipedia
+import pagegenerators
+import re, sys
+
+msg = {
+    'ar': u'بوت: إصلاح التحويلات',
+    'en': u'Bot: Fixing redirects',
+    'he': u'בוט: מתקן הפניות',
+    'ja': u'ロボットによる:リダイレクト回避',
+    'nn': u'robot: retta omdirigeringar',
+    'no': u'Robot: Retter omdirigeringer',
+    'pt': u'Bot: Arrumando redirects',
+    'sv': u'Bot: Rättar omdirigeringar',
+    'zh': u'機器人: 修復重定向',
+}
+
+featured_articles = {
+    'ar': u'ويكيبيديا:مقالات مختارة',
+    'de': u'Wikipedia:Exzellente_Artikel',
+    'en': u'Wikipedia:Featured_articles',
+    'es': u'Wikipedia:Artículos_destacados',
+    'fr': u'Wikipédia:Articles_de_qualité',
+    'he': u'פורטל:ערכים_מומלצים',
+    'it': u'Wikipedia:Articoli_in_vetrina',
+    'ja': u'Wikipedia:秀逸な記事',
+    'nl': u'Wikipedia:Etalage',
+    'nn': u'Wikipedia:Gode artiklar',
+    'no': u'Wikipedia:Anbefalte artikler',
+    'pt': u'Wikipedia:Os_melhores_artigos',
+    'sv': u'Wikipedia:Utvalda_artiklar',
+    'zh': u'Wikipedia:特色条目',
+}
+
+def firstcap(string):
+    return string[0].upper()+string[1:]
+
+def treat(text, linkedPage, targetPage):
+    """
+    Based on the method of the same name in solve_disambiguation.py
+    """
+    # make a backup of the original text so we can show the changes later
+    linkR = re.compile(r'\[\[(?P<title>[^\]\|#]*)(?P<section>#[^\]\|]*)?(\|(?P<label>[^\]]*))?\]\](?P<linktrail>' + linktrail + ')')
+    curpos = 0
+    # This loop will run until we have finished the current page
+    while True:
+        m = linkR.search(text, pos = curpos)
+        if not m:
+            break
+        # Make sure that next time around we will not find this same hit.
+        curpos = m.start() + 1
+        # ignore interwiki links and links to sections of the same page
+        if m.group('title') == '' or mysite.isInterwikiLink(m.group('title')):
+            continue
+        else:
+            actualLinkPage = wikipedia.Page(page.site(), m.group('title'))
+            # Check whether the link found is to page.
+            if actualLinkPage != linkedPage:
+                continue
+
+        # how many bytes should be displayed around the current link
+        context = 15
+        # at the beginning of the link, start red color.
+        # at the end of the link, reset the color to default
+        wikipedia.output(text[max(0, m.start() - context) : m.start()] + '\03{lightred}' + text[m.start() : m.end()] + '\03{default}' + text[m.end() : m.end() + context])
+        choice = 'y'
+
+        # The link looks like this:
+        # [[page_title|link_text]]trailing_chars
+        page_title = m.group('title')
+        link_text = m.group('label')
+
+        if not link_text:
+            # or like this: [[page_title]]trailing_chars
+            link_text = page_title
+        if m.group('section') == None:
+            section = ''
+        else:
+            section = m.group('section')
+        trailing_chars = m.group('linktrail')
+        if trailing_chars:
+            link_text += trailing_chars
+
+        if choice in "uU":
+            # unlink - we remove the section if there's any
+            text = text[:m.start()] + link_text + text[m.end():]
+            continue
+        replaceit = choice in "rR"
+
+        if link_text[0].isupper():
+            new_page_title = targetPage.title()
+        else:
+            new_page_title = targetPage.title()[0].lower() + targetPage.title()[1:]
+        if replaceit and trailing_chars:
+            newlink = "[[%s%s]]%s" % (new_page_title, section, trailing_chars)
+        elif replaceit or (new_page_title == link_text and not section):
+            newlink = "[[%s]]" % new_page_title
+        # check if we can create a link with trailing characters instead of a pipelink
+        elif len(new_page_title) <= len(link_text) and firstcap(link_text[:len(new_page_title)]) == firstcap(new_page_title) and re.sub(re.compile(linktrail), '', link_text[len(new_page_title):]) == '' and not section:
+            newlink = "[[%s]]%s" % (link_text[:len(new_page_title)], link_text[len(new_page_title):])
+        else:
+            newlink = "[[%s%s|%s]]" % (new_page_title, section, link_text)
+        text = text[:m.start()] + newlink + text[m.end():]
+        continue
+    return text
+
+def workon(page):
+    try:
+        text = page.get()
+    except wikipedia.IsRedirectPage:
+        return
+    wikipedia.output(u"\n\n>>> \03{lightpurple}%s\03{default} <<<" % page.title())
+    links = page.linkedPages()
+    wikipedia.getall(mysite,links)
+    for page2 in links:
+        try:
+            target = page2.getRedirectTarget()
+        except (wikipedia.Error,wikipedia.SectionError):
+            continue
+        text = treat(text, page2, target)
+    if text != page.get():
+        comment = wikipedia.translate(mysite, msg)
+        page.put(text, comment)
+
+try:
+    start = '!'
+    featured = False
+    title = None
+    namespace = None
+
+    for arg in wikipedia.handleArgs():
+        if arg.startswith('-start'):
+            if len(arg) == 6:
+                start = wikipedia.input(u'Which start where?')
+            else:
+                start = arg[7:]
+        elif arg == '-featured':
+            featured = True
+        elif arg.startswith('-page'):
+            if len(arg) == 5:
+                title = wikipedia.input(u'Which page should be processed?')
+            else:
+                title = arg[6:]
+        elif arg.startswith('-namespace'):
+            if len(arg) == 10:
+                namespace = int(wikipedia.input(u'Which namespace should be processed?'))
+            else:
+                namespace = int(arg[11:])
+
     mysite = wikipedia.getSite()
     if mysite.sitename() == 'wikipedia:nl':
         wikipedia.output(u'\03{lightred}There is consensus on the Dutch Wikipedia that bots should not be used to fix redirects.\03{default}')
         sys.exit()
-
-    linktrail = mysite.linktrail()
-    if featured:
-        featuredList = wikipedia.translate(mysite, featured_articles)
-        ref = wikipedia.Page(wikipedia.getSite(), featuredList)
-        gen = pagegenerators.ReferringPageGenerator(ref)
-        generator = pagegenerators.NamespaceFilterPageGenerator(gen, [0])
-        for page in generator:
-            workon(page)
-    elif title is not None:
-        page = wikipedia.Page(wikipedia.getSite(), title)
-        workon(page)
-    elif namespace is not None:
-        for page in pagegenerators.AllpagesPageGenerator(start=start, namespace=namespace, includeredirects=False):
-            workon(page)
-    else:
-        wikipedia.showHelp('fixing_redirects')
-        sys.exit()
-
-finally:
-    wikipedia.stopme()
+
+    linktrail = mysite.linktrail()
+    if featured:
+        featuredList = wikipedia.translate(mysite, featured_articles)
+        ref = wikipedia.Page(wikipedia.getSite(), featuredList)
+        gen = pagegenerators.ReferringPageGenerator(ref)
+        generator = pagegenerators.NamespaceFilterPageGenerator(gen, [0])
+        for page in generator:
+            workon(page)
+    elif title is not None:
+        page = wikipedia.Page(wikipedia.getSite(), title)
+        workon(page)
+    elif namespace is not None:
+        for page in pagegenerators.AllpagesPageGenerator(start=start, namespace=namespace, includeredirects=False):
+            workon(page)
+    else:
+        wikipedia.showHelp('fixing_redirects')
+        sys.exit()
+
+finally:
+    wikipedia.stopme()


Property changes on: trunk/pywikipedia/fixing_redirects.py
___________________________________________________________________
Name: svn:eol-style
   + native

Modified: trunk/pywikipedia/maintenance/readtalk.py
===================================================================
--- trunk/pywikipedia/maintenance/readtalk.py	2008-05-02 00:05:37 UTC (rev 5292)
+++ trunk/pywikipedia/maintenance/readtalk.py	2008-05-02 00:13:27 UTC (rev 5293)
@@ -1,30 +1,30 @@
-#!/usr/bin/python
-# -*- coding: utf-8  -*-
-"""
-Tool to read all your talk pages.
-
-This tool will go through all the normal (not sysop) accounts configured in user-config and output the contents of the talk page.
-
-TODO:
-*Error checking
-"""
-import sys, re
-sys.path.append(re.sub('/[^/]*$', '', sys.path[0])) #sys.path.append('..')
-import wikipedia, config
-
-def main():
-    # Get a dictionary of all the usernames
-    namedict = config.usernames
-    for familyName in namedict.iterkeys():
-        for lang in namedict[familyName].iterkeys():
-            site = wikipedia.getSite(code=lang, fam=familyName)
-            username = config.usernames[familyName][lang]
-            page = wikipedia.Page(site, u'User_Talk:' + username)
-            wikipedia.output(u'Reading talk page from ' + lang + u' ' + familyName)
-            wikipedia.output(page.get (nofollow_redirects=True))
-
-if __name__ == "__main__":
-    try:
-        main()
-    finally:
-        wikipedia.stopme()
+#!/usr/bin/python
+# -*- coding: utf-8  -*-
+"""
+Tool to read all your talk pages.
+
+This tool will go through all the normal (not sysop) accounts configured in user-config and output the contents of the talk page.
+
+TODO:
+*Error checking
+"""
+import sys, re
+sys.path.append(re.sub('/[^/]*$', '', sys.path[0])) #sys.path.append('..')
+import wikipedia, config
+
+def main():
+    # Get a dictionary of all the usernames
+    namedict = config.usernames
+    for familyName in namedict.iterkeys():
+        for lang in namedict[familyName].iterkeys():
+            site = wikipedia.getSite(code=lang, fam=familyName)
+            username = config.usernames[familyName][lang]
+            page = wikipedia.Page(site, u'User_Talk:' + username)
+            wikipedia.output(u'Reading talk page from ' + lang + u' ' + familyName)
+            wikipedia.output(page.get (nofollow_redirects=True))
+
+if __name__ == "__main__":
+    try:
+        main()
+    finally:
+        wikipedia.stopme()


Property changes on: trunk/pywikipedia/maintenance/readtalk.py
___________________________________________________________________
Name: svn:eol-style
   + native





More information about the Pywikipedia-l mailing list