jenkins-bot has submitted this change and it was merged.
Change subject: [PEP8] changes
......................................................................
[PEP8] changes
Change-Id: Ib9d0f0448c75c828cc5c843b77676c5b18597cfa
---
M selflink.py
1 file changed, 43 insertions(+), 34 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/selflink.py b/selflink.py
index 0bb684e..b5a5910 100644
--- a/selflink.py
+++ b/selflink.py
@@ -26,16 +26,18 @@
and the bot will only work on that single page.
"""
#
-# (C) Pywikipedia bot team, 2006-2010
+# (C) Pywiki bot team, 2006-2013
#
# Distributed under the terms of the MIT license.
#
-__version__='$Id$'
+__version__ = '$Id$'
#
-import re, sys
-import wikipedia as pywikibot
-import pagegenerators, catlib
+import re
+import sys
+import pywikibot
+import pagegenerators
+import catlib
import editarticle
# This is required for the text that is shown when you run this script
@@ -48,33 +50,35 @@
# NOTE: Predefined replacement tasks might use their own dictionary, see 'fixes'
# in fixes.py.
msg = {
- 'ar':u'روبوت: إزالة وصلات ذاتية',
- 'be-x-old':u'Робат: выдаленьне аўтаспасылкі',
- 'cs':u'Robot odstranil odkaz na název článku',
- 'da':u'Bot: fjerner selvreference',
- 'de':u'Bot: Entferne Selbstlinks',
- 'en':u'Robot: Removing selflinks',
- 'es':u'Bot: Eliminando enlaces al mismo artículo',
- 'fa':u'ربات:برداشتن پیوند به خود',
- 'fr':u'Robot: Enlève autoliens',
- 'he':u'בוט: מסיר קישורים של הדף לעצמו',
- 'hu':u'Bot: Önmagukra mutató hivatkozások eltávolítása',
- 'ja':u'ロボットによる 自己リンクの解除',
- 'ksh':u'Bot: Ene Lengk vun de Sigg op sesch sellver, erus jenumme.',
- 'nl':u'Bot: verwijzingen naar pagina zelf verwijderd',
- 'nn':u'robot: fjerna sjølvlenkjer',
- 'no':u'robot: fjerner selvlenker',
- 'pl':u'Robot automatycznie usuwa linki zwrotne',
- 'pt':u'Bot: Retirando link para o próprio artigo',
- 'ru':u'Бот: удалил заголовок-ссылку в тексте. см. ',
- 'zh':u'機器人:移除自我連結',
+ 'ar': u'روبوت: إزالة وصلات ذاتية',
+ 'be-x-old': u'Робат: выдаленьне аўтаспасылкі',
+ 'cs': u'Robot odstranil odkaz na název článku',
+ 'da': u'Bot: fjerner selvreference',
+ 'de': u'Bot: Entferne Selbstlinks',
+ 'en': u'Robot: Removing selflinks',
+ 'es': u'Bot: Eliminando enlaces al mismo artículo',
+ 'fa': u'ربات:برداشتن پیوند به خود',
+ 'fr': u'Robot: Enlève autoliens',
+ 'he': u'בוט: מסיר קישורים של הדף לעצמו',
+ 'hu': u'Bot: Önmagukra mutató hivatkozások eltávolítása',
+ 'ja': u'ロボットによる 自己リンクの解除',
+ 'ksh': u'Bot: Ene Lengk vun de Sigg op sesch sellver, erus jenumme.',
+ 'nl': u'Bot: verwijzingen naar pagina zelf verwijderd',
+ 'nn': u'robot: fjerna sjølvlenkjer',
+ 'no': u'robot: fjerner selvlenker',
+ 'pl': u'Robot automatycznie usuwa linki zwrotne',
+ 'pt': u'Bot: Retirando link para o próprio artigo',
+ 'ru': u'Бот: удалил заголовок-ссылку в тексте. см. ',
+ 'zh': u'機器人:移除自我連結',
}
+
class XmlDumpSelflinkPageGenerator:
"""
Generator which will yield Pages that might contain selflinks.
These pages will be retrieved from a local XML dump file
(cur table).
+
"""
def __init__(self, xmlFilename):
"""
@@ -100,6 +104,7 @@
yield pywikibot.Page(mysite, entry.title)
continue
+
class SelflinkBot:
def __init__(self, generator, always=False):
@@ -111,11 +116,12 @@
# group label is the alternative link title, that's everything between | and ].
# group linktrail is the link trail, that's letters after ]] which are part of the word.
# note that the definition of 'letter' varies from language to language.
- self.linkR = re.compile(r'\[\[(?P<title>[^\]\|#]*)(?P<section>#[^\]\|]*)?(\|(?P<label>[^\]]*))?\]\](?P<linktrail>' + linktrail + ')')
+ self.linkR = re.compile(
+ r'\[\[(?P<title>[^\]\|#]*)(?P<section>#[^\]\|]*)?(\|(?P<label>[^\]]*))?\]\](?P<linktrail>' + linktrail + ')')
self.always = always
self.done = False
- def handleNextLink(self, page, text, match, context = 100):
+ def handleNextLink(self, page, text, match, context=100):
"""
Returns a tuple (text, jumpToBeginning).
text is the unicode string after the current link has been processed.
@@ -146,9 +152,9 @@
choice = 'a'
else:
pywikibot.output(
- text[max(0, match.start() - context) : match.start()] \
- + '\03{lightred}' + text[match.start() : match.end()] \
- + '\03{default}' + text[match.end() : match.end() + context])
+ text[max(0, match.start() - context):match.start()] \
+ + '\03{lightred}' + text[match.start():match.end()] \
+ + '\03{default}' + text[match.end():match.end() + context])
choice = pywikibot.inputChoice(
u'\nWhat shall be done with this selflink?\n',
['unlink', 'make bold', 'skip', 'edit', 'more context',
@@ -161,7 +167,7 @@
return text, False
elif choice == 'e':
editor = editarticle.TextEditor()
- newText = editor.edit(text, jumpIndex = match.start())
+ newText = editor.edit(text, jumpIndex=match.start())
# if user didn't press Cancel
if newText:
return newText, True
@@ -202,7 +208,7 @@
text = oldText
curpos = 0
while curpos < len(text):
- match = self.linkR.search(text, pos = curpos)
+ match = self.linkR.search(text, pos=curpos)
if not match:
break
# Make sure that next time around we will not find this same
@@ -231,8 +237,10 @@
pywikibot.setAction(comment)
for page in self.generator:
- if self.done: break
+ if self.done:
+ break
self.treat(page)
+
def main():
#page generator
@@ -289,11 +297,12 @@
pywikibot.showHelp('selflink')
else:
if namespaces != []:
- gen = pagegenerators.NamespaceFilterPageGenerator(gen, namespaces)
+ gen = pagegenerators.NamespaceFilterPageGenerator(gen, namespaces)
preloadingGen = pagegenerators.PreloadingGenerator(gen)
bot = SelflinkBot(preloadingGen, always)
bot.run()
+
if __name__ == "__main__":
try:
main()
--
To view, visit https://gerrit.wikimedia.org/r/103557
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ib9d0f0448c75c828cc5c843b77676c5b18597cfa
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/compat
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change and it was merged.
Change subject: [PEP8] changes
......................................................................
[PEP8] changes
Change-Id: I06c5caef1dbde999c10c59605b5f5af4cf06fe83
---
M delete.py
M delinker.py
M disambredir.py
M diskcache.py
4 files changed, 68 insertions(+), 47 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/delete.py b/delete.py
index de37b43..7c14955 100644
--- a/delete.py
+++ b/delete.py
@@ -32,12 +32,13 @@
__version__ = '$Id$'
#
-import wikipedia as pywikibot
+import pywikibot
from pywikibot import i18n
import config
import catlib
import pagegenerators
+
class DeletionRobot:
""" This robot allows deletion of pages en masse. """
diff --git a/delinker.py b/delinker.py
index 955e0f6..4c4b6fe 100644
--- a/delinker.py
+++ b/delinker.py
@@ -2,9 +2,11 @@
__version__ = '$Id$'
-import wikipedia, config
+import sys
+import os
+import wikipedia
+import config
-import sys, os
sys.path.insert(0, 'commonsdelinker')
module = 'delinker'
diff --git a/disambredir.py b/disambredir.py
index d2ea6bc..ffc644e 100644
--- a/disambredir.py
+++ b/disambredir.py
@@ -5,15 +5,17 @@
each link that goes to a redirect page whether it should be replaced.
"""
#
-# (C) André Engels and others, 2006-2009
+# (c) André Engels and others, 2006-2009
+# (c) pywikibot team, 2006-2013
#
# Distributed under the terms of the MIT license.
#
-__version__='$Id$'
+__version__ = '$Id$'
#
-import wikipedia as pywikibot
+import sys
+import re
+import pywikibot
import pagegenerators
-import sys, re
import catlib
msg = {
@@ -31,19 +33,20 @@
'zh': u'機器人: 修改消歧義頁中的重定向連結',
}
+
def firstcap(string):
- return string[0].upper()+string[1:]
+ return string[0].upper() + string[1:]
+
def treat(text, linkedPage, targetPage):
- """
- Based on the method of the same name in solve_disambiguation.py.
- """
+ """ Based on the method of the same name in solve_disambiguation.py. """
# make a backup of the original text so we can show the changes later
- linkR = re.compile(r'\[\[(?P<title>[^\]\|#]*)(?P<section>#[^\]\|]*)?(\|(?P<label>[^\]]*))?\]\](?P<linktrail>' + linktrail + ')')
+ linkR = re.compile(
+ r'\[\[(?P<title>[^\]\|#]*)(?P<section>#[^\]\|]*)?(\|(?P<label>[^\]]*))?\]\](?P<linktrail>' + linktrail + ')')
curpos = 0
# This loop will run until we have finished the current page
while True:
- m = linkR.search(text, pos = curpos)
+ m = linkR.search(text, pos=curpos)
if not m:
break
# Make sure that next time around we will not find this same hit.
@@ -61,12 +64,13 @@
context = 30
# at the beginning of the link, start red color.
# at the end of the link, reset the color to default
- pywikibot.output(text[max(0, m.start() - context) : m.start()] +
- '\03{lightred}' + text[m.start() : m.end()] +
- '\03{default}' + text[m.end() : m.end() + context])
+ pywikibot.output(text[max(0, m.start() - context):m.start()] +
+ '\03{lightred}' + text[m.start():m.end()] +
+ '\03{default}' + text[m.end():m.end() + context])
while True:
choice = pywikibot.input(
- u"Option (N=do not change, y=change link to \03{lightpurple}%s\03{default}, r=change and replace text, u=unlink)"%targetPage.title())
+ u"Option (N=do not change, y=change link to \03{lightpurple}%s\03{default}, r=change and replace text, u=unlink)"
+ % targetPage.title())
try:
choice = choice[0]
except:
@@ -83,7 +87,7 @@
if not link_text:
# or like this: [[page_title]]trailing_chars
link_text = page_title
- if m.group('section') == None:
+ if m.group('section') is None:
section = ''
else:
section = m.group('section')
@@ -100,8 +104,8 @@
if link_text[0].isupper():
new_page_title = targetPage.title()
else:
- new_page_title = targetPage.title()[0].lower() + \
- targetPage.title()[1:]
+ new_page_title = (targetPage.title()[0].lower() +
+ targetPage.title()[1:])
if replaceit and trailing_chars:
newlink = "[[%s%s]]%s" % (new_page_title, section, trailing_chars)
elif replaceit or (new_page_title == link_text and not section):
@@ -120,6 +124,7 @@
continue
return text
+
def workon(page, links):
text = page.get()
# Show the title of the page we're working on.
@@ -129,12 +134,13 @@
for page2 in links:
try:
target = page2.getRedirectTarget()
- except (pywikibot.Error,pywikibot.SectionError):
+ except (pywikibot.Error, pywikibot.SectionError):
continue
text = treat(text, page2, target)
if text != page.get():
comment = pywikibot.translate(mysite, msg)
page.put(text, comment)
+
def main():
global mysite, linktrail, page
@@ -149,7 +155,7 @@
linktrail = mysite.linktrail()
try:
generator = pagegenerators.CategorizedPageGenerator(
- mysite.disambcategory(), start = start)
+ mysite.disambcategory(), start=start)
except pywikibot.NoPage:
pywikibot.output(
"The bot does not know the disambiguation category for your wiki.")
@@ -163,18 +169,18 @@
if page.isRedirectPage():
continue
linked = page.linkedPages()
- pagestodo.append((page,linked))
+ pagestodo.append((page, linked))
pagestoload += linked
if len(pagestoload) > 49:
- pywikibot.getall(mysite,pagestoload)
+ pywikibot.getall(mysite, pagestoload)
for page, links in pagestodo:
- workon(page,links)
+ workon(page, links)
pagestoload = []
pagestodo = []
+
if __name__ == "__main__":
try:
main()
finally:
pywikibot.stopme()
-
diff --git a/diskcache.py b/diskcache.py
index 8dcfb75..23f3dcc 100644
--- a/diskcache.py
+++ b/diskcache.py
@@ -1,6 +1,14 @@
+#!/usr/bin/python
# -*- coding: utf-8 -*-
-
+""" Dictionary like disk caching module """
+#
+# (c) Bryan Tong Minh, 2008
+# (c) pywikibot team, 2008-2013
+#
+# Distributed under the terms of the MIT license
+#
__version__ = '$Id$'
+#
import random
import config
@@ -8,27 +16,27 @@
# http://mail.python.org/pipermail/python-list/2006-March/375280.html
try:
- os.SEEK_SET
+ os.SEEK_SET
except AttributeError:
- os.SEEK_SET, os.SEEK_CUR, os.SEEK_END = range(3)
+ os.SEEK_SET, os.SEEK_CUR, os.SEEK_END = range(3)
-## Dictionary like disk caching module
-## (c) Copyright 2008 - Bryan Tong Minh / The Pywikipediabot team
-## Licensed under the terms of the MIT license
class CachedReadOnlyDictI(object):
"""A cached readonly dict with case insensitive keys."""
- def __init__(self, data, prefix = "", max_size = 10, cache_base = 'cache'):
+
+ def __init__(self, data, prefix="", max_size=10, cache_base='cache'):
self.max_size = max_size
while True:
- self.cache_path = config.datafilepath(cache_base, prefix + ''.join(
- [random.choice('abcdefghijklmnopqrstuvwxyz')
- for i in xrange(16)]))
- if not os.path.exists(self.cache_path): break
+ self.cache_path = config.datafilepath(
+ cache_base, prefix + ''.join(
+ [random.choice('abcdefghijklmnopqrstuvwxyz')
+ for i in xrange(16)]))
+ if not os.path.exists(self.cache_path):
+ break
self.cache_file = open(self.cache_path, 'wb+')
lookup = [-1] * 36
- data.sort(key = lambda i: i[0])
+ data.sort(key=lambda i: i[0])
for key, value in data:
if type(key) is unicode:
key = key.encode('utf-8')
@@ -36,11 +44,12 @@
key = str(key)
key = key.lower()
index = key[0]
- if not ((index >= 'a' and index <= 'z') or (index >= '0' and index <= '9')) or '\t' in key:
+ if not ((index >= 'a' and index <= 'z') or
+ (index >= '0' and index <= '9')) or '\t' in key:
raise RuntimeError('Only alphabetic keys are supported', key)
if index < 'a':
- index = ord(index) - 48 + 26 # Numeric
+ index = ord(index) - 48 + 26 # Numeric
else:
index = ord(index) - 97
if lookup[index] == -1:
@@ -54,9 +63,11 @@
if len(key) > 0xFF:
raise RuntimeError('Key length must be smaller than %i' % 0xFF)
if len(value) > 0xFFFFFF:
- raise RuntimeError('Value length must be smaller than %i' % 0xFFFFFF)
+ raise RuntimeError('Value length must be smaller than %i'
+ % 0xFFFFFF)
- self.cache_file.write('%02x%s%06x%s' % (len(key), key, len(value), value))
+ self.cache_file.write('%02x%s%06x%s'
+ % (len(key), key, len(value), value))
self.lookup = lookup
@@ -96,11 +107,12 @@
index = key[0]
except IndexError:
raise KeyError(key)
- if not ((index >= 'a' and index <= 'z') or (index >= '0' and index <= '9')):
+ if not ((index >= 'a' and index <= 'z') or
+ (index >= '0' and index <= '9')):
raise KeyError(key)
if index < 'a':
- i = ord(index) - 48 + 26 # Numeric
+ i = ord(index) - 48 + 26 # Numeric
else:
i = ord(index) - 97
@@ -134,8 +146,8 @@
length = int(self.read(6, key), 16)
self.cache_file.seek(length, os.SEEK_CUR)
-
- def read(self, length, key = ''):
+ def read(self, length, key=''):
s = self.cache_file.read(length)
- if not s: raise KeyError(key)
+ if not s:
+ raise KeyError(key)
return s
--
To view, visit https://gerrit.wikimedia.org/r/103539
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I06c5caef1dbde999c10c59605b5f5af4cf06fe83
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/compat
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change and it was merged.
Change subject: lonelypages.py: ported to core
......................................................................
lonelypages.py: ported to core
Ported lonelypages.py to pywikibot core. Removed some unnecessary inline
comments from the code. Also shifted messages to i18n TranslateWiki (see
I9b3506d1e801909a2b1d25f6d82063cdca195d26).
Change-Id: Ic832222ec22c90842388ab78c2220af15a567802
---
A scripts/lonelypages.py
1 file changed, 263 insertions(+), 0 deletions(-)
Approvals:
Merlijn van Deen: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/lonelypages.py b/scripts/lonelypages.py
new file mode 100644
index 0000000..b5ce898
--- /dev/null
+++ b/scripts/lonelypages.py
@@ -0,0 +1,263 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+"""
+This is a script written to add the template "orphan" to the pages that aren't
+linked by other pages. It can give some strange Errors sometime, I hope that
+all of them are fixed in this version.
+
+These command line parameters can be used to specify which pages to work on:
+
+¶ms;
+
+-xml Retrieve information from a local XML dump (pages-articles
+ or pages-meta-current, see http://download.wikimedia.org).
+ Argument can also be given as "-xml:filename".
+
+-page Only edit a specific page.
+ Argument can also be given as "-page:pagetitle". You can
+ give this parameter multiple times to edit multiple pages.
+
+Furthermore, the following command line parameters are supported:
+
+-enable: - Enable or disable the bot via a Wiki Page.
+
+-disambig: - Set a page where the bot save the name of the disambig
+ pages found (default: skip the pages)
+
+-limit: - Set how many pages check.
+
+-always - Always say yes, won't ask
+
+--- FixMes ---
+* Check that all the code hasn't bugs
+
+--- Credit and Help ---
+This Script has been developed by Pietrodn and Filnik on botwiki. If you want
+to help us improving our script archive and pywikipediabot's archive or you
+simply need help you can find us here: http://botwiki.sno.cc
+
+--- Examples ---
+python lonelypages.py -enable:User:Bot/CheckBot -always
+"""
+#
+# (C) Pietrodn, it.wiki 2006-2007
+# (C) Filnik, it.wiki 2007
+# (C) Pywikipedia bot team, 2008-2012
+#
+# Distributed under the terms of the MIT license.
+#
+__version__ = '$Id$'
+#
+
+import pywikibot
+from pywikibot import i18n
+from pywikibot import pagegenerators
+from pywikibot import re
+
+# This is required for the text that is shown when you run this script
+# with the parameter -help.
+docuReplacements = {
+ '¶ms;': pagegenerators.parameterHelp,
+}
+
+Template = {
+ 'ar': u'{{يتيمة|تاريخ={{نسخ:اسم_شهر}} {{نسخ:عام}}}}',
+ 'ca': u'{{Orfe|date={{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}}}',
+ 'en': u'{{Orphan|date={{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}}}',
+ 'it': u'{{O||mese={{subst:CURRENTMONTHNAME}} {{subst:CURRENTYEAR}}}}',
+ 'jh': u'{{孤立|{{subst:DATE}}}}',
+ 'za': u'{{subst:Orphan/auto}}',
+}
+
+exception_regex = {
+ 'ar': [ur'\{\{(?:قالب:|)(يتيمة)[\|\}]'],
+ 'ca': [r'\{\{(?:template:|)(orfe)[\|\}]'],
+ 'en': [r'\{\{(?:template:|)(orphan)[\|\}]', r'\{\{(?:template:|)(wi)[\|\}]'],
+ 'it': [r'\{\{(?:template:|)(o|a)[\|\}]'],
+ 'jh': [ur'\{\{(?:template:|)(孤立)[\|\}]'],
+ 'za': [r'\{\{(?:template:|)(orphan)[\|\}]'],
+}
+#####################################################
+# Here you have to put the config for your Project. #
+#####################################################
+
+
+def main():
+ enablePage = None # Check if someone set an enablePage or not
+ limit = 50000 # Hope that there aren't so many lonely pages in a project
+ generator = None # Check if bot should use default generator or not
+ # Load all default generators!
+ genFactory = pagegenerators.GeneratorFactory()
+ nwpages = False # Check variable for newpages
+ always = False # Check variable for always
+ disambigPage = None # If no disambigPage given, not use it.
+ # Arguments!
+ for arg in pywikibot.handleArgs():
+ if arg.startswith('-enable'):
+ if len(arg) == 7:
+ enablePage = pywikibot.input(u'Would you like to check if the \
+ bot should run or not?')
+ else:
+ enablePage = arg[8:]
+ if arg.startswith('-disambig'):
+ if len(arg) == 9:
+ disambigPage = pywikibot.input(u'In which page should the bot \
+ save the disambig pages?')
+ else:
+ disambigPage = arg[10:]
+ elif arg.startswith('-limit'):
+ if len(arg) == 6:
+ limit = int(pywikibot.input(u'How many pages do you want to \
+ check?'))
+ else:
+ limit = int(arg[7:])
+ elif arg.startswith('-newpages'):
+ if len(arg) == 9:
+ nwlimit = 50 # Default: 50 pages
+ else:
+ nwlimit = int(arg[10:])
+ generator = pywikibot.getSite().newpages(number=nwlimit)
+ nwpages = True
+ elif arg == '-always':
+ always = True
+ else:
+ genFactory.handleArg(arg)
+ # Retrive the site
+ wikiSite = pywikibot.getSite()
+
+ if not generator:
+ generator = genFactory.getCombinedGenerator()
+
+ # If the generator is not given, use the default one
+ if not generator:
+ generator = wikiSite.lonelypages(repeat=True, number=limit)
+ # Take the configurations according to our project
+ comment = i18n.twtranslate(wikiSite, 'lonelypages-comment-add-template')
+ commentdisambig = i18n.twtranslate(wikiSite, 'lonelypages-comment-add-disambig-template')
+ template = i18n.translate(wikiSite, Template, fallback=False)
+ exception = i18n.translate(wikiSite, exception_regex, fallback=False)
+ if template is None or exception is None:
+ raise Exception("Missing configuration for site %r" % wikiSite)
+ # EnablePage part
+ if enablePage is not None:
+ # Define the Page Object
+ enable = pywikibot.Page(wikiSite, enablePage)
+ # Loading the page's data
+ try:
+ getenable = enable.get()
+ except pywikibot.NoPage:
+ pywikibot.output(u"%s doesn't esist, I use the page as if it was \
+ blank!" % enable.title())
+ getenable = ''
+ except wikiepedia.IsRedirect:
+ pywikibot.output(u"%s is a redirect, skip!" % enable.title())
+ getenable = ''
+ # If the enable page is set to disable, turn off the bot
+ # (useful when the bot is run on a server)
+ if getenable != 'enable':
+ pywikibot.output('The bot is disabled')
+ return
+ # DisambigPage part
+ if disambigPage is not None:
+ disambigpage = pywikibot.Page(wikiSite, disambigPage)
+ try:
+ disambigtext = disambigpage.get()
+ except pywikibot.NoPage:
+ pywikibot.output(u"%s doesn't esist, skip!" % disambigpage.title())
+ disambigtext = ''
+ except wikiepedia.IsRedirect:
+ pywikibot.output(
+ u"%s is a redirect, don't use it!" % disambigpage.title())
+ disambigPage = None
+ # Main Loop
+ for page in generator:
+ if nwpages is True:
+ # The newpages generator returns a tuple, not a Page object.
+ page = page[0]
+ pywikibot.output(u"Checking %s..." % page.title())
+ if page.isRedirectPage(): # If redirect, skip!
+ pywikibot.output(u'%s is a redirect! Skip...' % page.title())
+ continue
+ # refs is not a list, it's a generator while resList... is a list, yes.
+ refs = page.getReferences()
+ refsList = list()
+ for j in refs:
+ if j is None:
+ # We have to find out why the function returns that value
+ pywikibot.error(u'1 --> Skip page')
+ continue
+ refsList.append(j)
+ # This isn't possible with a generator
+ if refsList != []:
+ pywikibot.output(u"%s isn't orphan! Skip..." % page.title())
+ continue
+ # Never understood how a list can turn in "None", but it happened :-S
+ elif refsList is None:
+ # We have to find out why the function returns that value
+ pywikibot.error(u'2 --> Skip page')
+ continue
+ else:
+ # no refs, no redirect; check if there's already the template
+ try:
+ oldtxt = page.get()
+ except pywikibot.NoPage:
+ pywikibot.output(u"%s doesn't exist! Skip..." % page.title())
+ continue
+ except pywikibot.IsRedirectPage:
+ pywikibot.output(u"%s is a redirect! Skip..." % page.title())
+ continue
+ # I've used a loop in a loop. If I use continue in the second loop,
+ # it won't do anything in the first. So let's create a variable to
+ # avoid this problem.
+ Find = False
+ for regexp in exception:
+ res = re.findall(regexp, oldtxt.lower())
+ # Found a template! Let's skip the page!
+ if res != []:
+ pywikibot.output(u'Your regex has found something in %s, \
+ skipping...' % page.title())
+ Find = True
+ break
+ # Skip the page..
+ if Find:
+ continue
+ # Is the page a disambig?
+ if page.isDisambig() and disambigPage is not None:
+ pywikibot.output(
+ u'%s is a disambig page, report..' % page.title())
+ if not page.title().lower() in disambigtext.lower():
+ disambigtext = u"%s\n*[[%s]]" % (disambigtext, page.title())
+ disambigpage.put(disambigtext, commentdisambig)
+ continue
+ # Is the page a disambig but there's not disambigPage? Skip!
+ elif page.isDisambig():
+ pywikibot.output(
+ u'%s is a disambig page, skip...' % page.title())
+ continue
+ else:
+ # Ok, the page need the template. Let's put it there!
+ # Adding the template in the text
+ newtxt = u"%s\n%s" % (template, oldtxt)
+ # Showing the title
+ pywikibot.output(u"\t\t>>> %s <<<" % page.title())
+ # Showing the changes
+ pywikibot.showDiff(oldtxt, newtxt)
+ choice = 'y' # Default answer
+ if not always:
+ choice = pywikibot.inputChoice(u'Orphan page found, add \
+ template?', ['Yes', 'No', 'All'], ['y', 'n', 'a'])
+ if choice == 'a':
+ always = True
+ choice = 'y'
+ if choice == 'y':
+ try:
+ page.put(newtxt, comment)
+ except pywikibot.EditConflict:
+ pywikibot.output(u'Edit Conflict! Skip...')
+ continue
+
+if __name__ == '__main__':
+ try:
+ main()
+ finally:
+ pywikibot.stopme()
--
To view, visit https://gerrit.wikimedia.org/r/103106
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ic832222ec22c90842388ab78c2220af15a567802
Gerrit-PatchSet: 16
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Gerrit Patch Uploader <gerritpatchuploader(a)gmail.com>
Gerrit-Reviewer: Gerrit Patch Uploader <gerritpatchuploader(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Legoktm <legoktm.wikipedia(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Strainu <wiki(a)strainu.ro>
Gerrit-Reviewer: jenkins-bot