jenkins-bot has submitted this change and it was merged.
Change subject: [PEP8] changes, remove obsolete imports
......................................................................
[PEP8] changes, remove obsolete imports
Change-Id: Iea4c84c9eebe99ba5d0d722d9e12601bcfb7ff56
---
M imageharvest.py
1 file changed, 35 insertions(+), 26 deletions(-)
Approvals:
Ladsgroup: Looks good to me, approved
jenkins-bot: Verified
diff --git a/imageharvest.py b/imageharvest.py
index 49c8792..7b3203e 100644
--- a/imageharvest.py
+++ b/imageharvest.py
@@ -15,15 +15,19 @@
-shown Choose images shown on the page as well as linked from it
-justshown Choose _only_ images shown on the page, not those linked
"""
+# (C) Pywikibot team, 2004-2013
+#
+# Distributed under the terms of the MIT license.
+#
+__version__ = '$Id$'
+#
-__version__='$Id$'
-
-import re, sys, os
-import wikipedia as pywikibot
-import externals # check for and install needed
+import os
import urllib
+import pywikibot
import BeautifulSoup
import upload
+
def get_imagelinks(url):
"""Given a URL, get all images linked to by the page at that URL."""
@@ -45,35 +49,36 @@
if link:
ext = os.path.splitext(link)[1].lower().strip('.')
if ext in fileformats:
- links.append(urllib.basejoin(url, link))
+ links.append(urllib.basejoin(url, link))
return links
+
def main(give_url, image_url, desc):
url = give_url
if url == '':
if image_url:
- url = pywikibot.input(
- u"What URL range should I check (use $ for the part that is changeable)")
+ url = pywikibot.input(u"What URL range should I check "
+ u"(use $ for the part that is changeable)")
else:
- url = pywikibot.input(
- u"From what URL should I get the images?")
+ url = pywikibot.input(u"From what URL should I get the images?")
if image_url:
- minimum=1
- maximum=99
- answer= pywikibot.input(
+ minimum = 1
+ maximum = 99
+ answer = pywikibot.input(
u"What is the first number to check (default: 1)")
if answer:
- minimum=int(answer)
- answer= pywikibot.input(
+ minimum = int(answer)
+ answer = pywikibot.input(
u"What is the last number to check (default: 99)")
if answer:
- maximum=int(answer)
+ maximum = int(answer)
if not desc:
basicdesc = pywikibot.input(
- u"What text should be added at the end of the description of each image from this url?")
+ u"What text should be added at the end of "
+ u"the description of each image from this url?")
else:
basicdesc = desc
@@ -81,7 +86,7 @@
ilinks = []
i = minimum
while i <= maximum:
- ilinks += [url.replace("$",str(i))]
+ ilinks += [url.replace("$", str(i))]
i += 1
else:
ilinks = get_imagelinks(url)
@@ -94,19 +99,23 @@
desc = pywikibot.input(u"Give the description of this image:")
categories = []
while True:
- cat = pywikibot.input(
- u"Specify a category (or press enter to end adding categories)")
- if not cat.strip(): break
+ cat = pywikibot.input(u"Specify a category (or press enter to "
+ u"end adding categories)")
+ if not cat.strip():
+ break
if ":" in cat:
- categories.append("[["+cat+"]]")
+ categories.append(u"[[%s]]" % cat)
else:
- categories.append("[["+mysite.namespace(14)+":"+cat+"]]")
- desc = desc + "\r\n\r\n" + basicdesc + "\r\n\r\n" + \
- "\r\n".join(categories)
- uploadBot = upload.UploadRobot(image, description = desc)
+ categories.append(u"[[%s:%s]]"
+ % (mysite.namespace(14), cat))
+ desc += "\r\n\r\n" + basicdesc + "\r\n\r\n" + \
+ "\r\n".join(categories)
+ uploadBot = upload.UploadRobot(image, description=desc)
uploadBot.run()
elif answer == 's':
break
+
+
try:
url = u''
image_url = False
--
To view, visit https://gerrit.wikimedia.org/r/103242
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Iea4c84c9eebe99ba5d0d722d9e12601bcfb7ff56
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/compat
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Andre Engels <andreengels(a)gmail.com>
Gerrit-Reviewer: DrTrigon <dr.trigon(a)surfeu.ch>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change and it was merged.
Change subject: [PEP8] changes
......................................................................
[PEP8] changes
Change-Id: I8d9d934b242c96796d56ecfd61169133dc76d328
---
M imagetransfer.py
M lonelypages.py
2 files changed, 90 insertions(+), 86 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/imagetransfer.py b/imagetransfer.py
index a950d48..f5979d1 100644
--- a/imagetransfer.py
+++ b/imagetransfer.py
@@ -26,34 +26,37 @@
"""
#
# (C) Andre Engels, 2004
-# (C) Pywikipedia bot team, 2004-2012
+# (C) Pywikibot team, 2004-2013
#
# Distributed under the terms of the MIT license.
#
-__version__='$Id$'
+__version__ = '$Id$'
-import re, sys
-import wikipedia as pywikibot
-import upload, config, pagegenerators
+import re
+import sys
+import pywikibot
+import upload
+import config
+import pagegenerators
copy_message = {
- 'ar':u"هذه الصورة تم نقلها من %s. الوصف الأصلي كان:\r\n\r\n%s",
- 'en':u"This image was copied from %s. The original description was:\r\n\r\n%s",
- 'fa':u"تصویر از %s کپی شدهاست.توضیحات اصلی ان این بود::\r\n\r\n%s",
- 'de':u"Dieses Bild wurde von %s kopiert. Die dortige Beschreibung lautete:\r\n\r\n%s",
- 'fr':u"Cette image est copiée de %s. La description originale était:\r\n\r\n%s",
- 'he':u"תמונה זו הועתקה מהאתר %s. תיאור הקובץ המקורי היה:\r\n\r\n%s",
- 'hu':u"Kép másolása innen: %s. Az eredeti leírás:\r\n\r\n%s",
- 'ia':u"Iste imagine esseva copiate de %s. Le description original esseva:\r\n\r\n%s",
- 'it':u"Questa immagine è stata copiata da %s. La descrizione originale era:\r\n\r\n%s",
- 'kk':u"Бұл сурет %s дегеннен көшірілді. Түпнұсқа сипатттамасы былай болды:\r\n\r\n%s",
- 'lt':u"Šis paveikslėlis buvo įkeltas iš %s. Originalus aprašymas buvo:\r\n\r\n%s",
- 'nl':u"Afbeelding gekopieerd vanaf %s. De beschrijving daar was:\r\n\r\n%s",
- 'pl':u"Ten obraz został skopiowany z %s. Oryginalny opis to:\r\n\r\n%s",
- 'pt':u"Esta imagem foi copiada de %s. A descrição original foi:\r\n\r\n%s",
- 'ru':u"Изображение было скопировано с %s. Оригинальное описание содержало:\r\n\r\n%s",
- 'sr':u"Ова слика је копирана са %s. Оригинални опис је:\r\n\r\n%s",
- 'zh':u"本圖像從 %s 複製,原始說明資料:\r\n\r\n%s",
+ 'ar': u"هذه الصورة تم نقلها من %s. الوصف الأصلي كان:\r\n\r\n%s",
+ 'en': u"This image was copied from %s. The original description was:\r\n\r\n%s",
+ 'fa': u"تصویر از %s کپی شدهاست.توضیحات اصلی ان این بود::\r\n\r\n%s",
+ 'de': u"Dieses Bild wurde von %s kopiert. Die dortige Beschreibung lautete:\r\n\r\n%s",
+ 'fr': u"Cette image est copiée de %s. La description originale était:\r\n\r\n%s",
+ 'he': u"תמונה זו הועתקה מהאתר %s. תיאור הקובץ המקורי היה:\r\n\r\n%s",
+ 'hu': u"Kép másolása innen: %s. Az eredeti leírás:\r\n\r\n%s",
+ 'ia': u"Iste imagine esseva copiate de %s. Le description original esseva:\r\n\r\n%s",
+ 'it': u"Questa immagine è stata copiata da %s. La descrizione originale era:\r\n\r\n%s",
+ 'kk': u"Бұл сурет %s дегеннен көшірілді. Түпнұсқа сипатттамасы былай болды:\r\n\r\n%s",
+ 'lt': u"Šis paveikslėlis buvo įkeltas iš %s. Originalus aprašymas buvo:\r\n\r\n%s",
+ 'nl': u"Afbeelding gekopieerd vanaf %s. De beschrijving daar was:\r\n\r\n%s",
+ 'pl': u"Ten obraz został skopiowany z %s. Oryginalny opis to:\r\n\r\n%s",
+ 'pt': u"Esta imagem foi copiada de %s. A descrição original foi:\r\n\r\n%s",
+ 'ru': u"Изображение было скопировано с %s. Оригинальное описание содержало:\r\n\r\n%s",
+ 'sr': u"Ова слика је копирана са %s. Оригинални опис је:\r\n\r\n%s",
+ 'zh': u"本圖像從 %s 複製,原始說明資料:\r\n\r\n%s",
}
nowCommonsTemplate = {
@@ -70,20 +73,13 @@
'kk': u'{{NowCommons|Image:%s}}',
'li': u'{{NowCommons|%s}}',
'lt': u'{{NowCommons|Image:%s}}',
- 'nds-nl' : u'{{NoenCommons|File:%s}}',
+ 'nds-nl': u'{{NoenCommons|File:%s}}',
'nl': u'{{NuCommons|Image:%s}}',
'pl': u'{{NowCommons|%s}}',
'pt': u'{{NowCommons|%s}}',
'sr': u'{{NowCommons|%s}}',
'zh': u'{{NowCommons|Image:%s}}',
}
-
-#nowCommonsThis = {
- #'en': u'{{NowCommonsThis|%s}}',
- #'it': u'{{NowCommons omonima|%s}}',
- #'kk': u'{{NowCommonsThis|%s}}',
- #'pt': u'{{NowCommonsThis|%s}}',
-#}
nowCommonsMessage = {
'ar': u'الملف الآن متوفر في ويكيميديا كومنز.',
@@ -105,15 +101,6 @@
'sr': u'Слика је сада доступна и на Викимедија Остави.',
'zh': u'檔案已存在於維基共享資源。',
}
-
-#nowCommonsThisMessage = {
- #'ar': u'الملف الآن متوفر في كومنز بنفس الاسم.',
- #'en': u'File is now available on Commons with the same name.',
- #'he': u'הקובץ זמין כעת בוויקישיתוף בשם זהה.',
- #'it': u'L\'immagine è adesso disponibile su Wikimedia Commons con lo stesso nome.',
- #'kk': u'Файлды дәл сол атауымен енді Ортаққордан қатынауға болады.',
- #'pt': u'Esta imagem está agora no Commons com o mesmo nome.',
-#}
# Translations for license templates.
# Must only be given when they are in fact different.
@@ -165,6 +152,7 @@
class ImageTransferBot:
+
def __init__(self, generator, targetSite=None, interwiki=False,
keep_name=False):
self.generator = generator
@@ -182,7 +170,7 @@
sourceSite = sourceImagePage.site()
if debug:
print "-" * 50
- print "Found image: %s"% imageTitle
+ print "Found image: %s" % imageTitle
url = sourceImagePage.fileUrl().encode('utf-8')
pywikibot.output(u"URL should be: %s" % url)
# localize the text that should be printed on the image description page
@@ -215,7 +203,7 @@
targetSite=self.targetSite,
urlEncoding=sourceSite.encoding(),
keepFilename=self.keep_name,
- verifyDescription = not self.keep_name)
+ verifyDescription=not self.keep_name)
# try to upload
targetFilename = bot.run()
if targetFilename and self.targetSite.family.name == 'commons' and \
@@ -272,7 +260,7 @@
except pywikibot.NoPage:
break
- print "="*60
+ print "=" * 60
def run(self):
for page in self.generator:
@@ -284,9 +272,9 @@
imagePage = pywikibot.ImagePage(page.site(), page.title())
imagelist = [imagePage]
else:
- imagelist = page.imagelinks(followRedirects = True)
+ imagelist = page.imagelinks(followRedirects=True)
- while len(imagelist)>0:
+ while imagelist:
self.showImageList(imagelist)
if len(imagelist) == 1:
# no need to query the user, only one possibility
@@ -303,7 +291,7 @@
pywikibot.output(
u'The image is already on Wikimedia Commons.')
else:
- self.transferImage(imagelist[todo], debug = False)
+ self.transferImage(imagelist[todo], debug=False)
# remove the selected image from the list
imagelist = imagelist[:todo] + imagelist[todo + 1:]
else:
diff --git a/lonelypages.py b/lonelypages.py
index 3f5675b..859bab8 100644
--- a/lonelypages.py
+++ b/lonelypages.py
@@ -40,14 +40,14 @@
#
# (C) Pietrodn, it.wiki 2006-2007
# (C) Filnik, it.wiki 2007
-# (C) Pywikipedia bot team, 2008-2012
+# (C) Pywikibot team, 2008-2013
#
# Distributed under the terms of the MIT license.
#
__version__ = '$Id$'
#
-import wikipedia as pywikibot
+import pywikibot
import pagegenerators
import re
@@ -108,41 +108,52 @@
# ************* Modify only above! ************* #
+
def main():
# Load the configurations in the function namespace
- global commento; global Template; global disambigPage; global commenttodisambig
+ global commento
+ global Template
+ global disambigPage
+ global commenttodisambig
global exception
- enablePage = None # Check if someone set an enablePage or not
- limit = 50000 # All the pages! (I hope that there aren't so many lonely pages in a project..)
- generator = None # Check if the bot should use the default generator or not
- genFactory = pagegenerators.GeneratorFactory() # Load all the default generators!
- nwpages = False # Check variable for newpages
- always = False # Check variable for always
- disambigPage = None # If no disambigPage given, not use it.
+ enablePage = None # Check if someone set an enablePage or not
+ # All the pages! (I hope that there aren't so many lonely pages in a
+ # project..)
+ limit = 50000
+ # Check if the bot should use the default generator or not
+ generator = None
+ # Load all the default generators!
+ genFactory = pagegenerators.GeneratorFactory()
+ nwpages = False # Check variable for newpages
+ always = False # Check variable for always
+ disambigPage = None # If no disambigPage given, not use it.
# Arguments!
for arg in pywikibot.handleArgs():
if arg.startswith('-enable'):
if len(arg) == 7:
- enablePage = pywikibot.input(u'Would you like to check if the bot should run or not?')
+ enablePage = pywikibot.input(
+ u'Would you like to check if the bot should run or not?')
else:
enablePage = arg[8:]
if arg.startswith('-disambig'):
if len(arg) == 9:
- disambigPage = pywikibot.input(u'In which page should the bot save the disambig pages?')
+ disambigPage = pywikibot.input(
+ u'In which page should the bot save the disambig pages?')
else:
disambigPage = arg[10:]
elif arg.startswith('-limit'):
if len(arg) == 6:
- limit = int(pywikibot.input(u'How many pages do you want to check?'))
+ limit = int(pywikibot.input(
+ u'How many pages do you want to check?'))
else:
limit = int(arg[7:])
elif arg.startswith('-newpages'):
if len(arg) == 9:
- nwlimit = 50 # Default: 50 pages
+ nwlimit = 50 # Default: 50 pages
else:
nwlimit = int(arg[10:])
- generator = pywikibot.getSite().newpages(number = nwlimit)
+ generator = pywikibot.getSite().newpages(number=nwlimit)
nwpages = True
elif arg == '-always':
always = True
@@ -156,21 +167,23 @@
# If the generator is not given, use the default one
if not generator:
- generator = wikiSite.lonelypages(repeat = True, number = limit)
+ generator = wikiSite.lonelypages(repeat=True, number=limit)
# Take the configurations according to our project
comment = pywikibot.translate(wikiSite, commento)
commentdisambig = pywikibot.translate(wikiSite, commenttodisambig)
template = pywikibot.translate(wikiSite, Template)
exception = pywikibot.translate(wikiSite, exception)
# EnablePage part
- if enablePage != None:
+ if enablePage:
# Define the Page Object
enable = pywikibot.Page(wikiSite, enablePage)
# Loading the page's data
try:
getenable = enable.get()
except pywikibot.NoPage:
- pywikibot.output(u"%s doesn't esist, I use the page as if it was blank!" % enable.title())
+ pywikibot.output(
+ u"%s doesn't esist, I use the page as if it was blank!"
+ % enable.title())
getenable = ''
except wikiepedia.IsRedirect:
pywikibot.output(u"%s is a redirect, skip!" % enable.title())
@@ -181,7 +194,7 @@
pywikibot.output('The bot is disabled')
return
# DisambigPage part
- if disambigPage != None:
+ if disambigPage is not None:
disambigpage = pywikibot.Page(wikiSite, disambigPage)
try:
disambigtext = disambigpage.get()
@@ -189,24 +202,23 @@
pywikibot.output(u"%s doesn't esist, skip!" % disambigpage.title())
disambigtext = ''
except wikiepedia.IsRedirect:
- pywikibot.output(u"%s is a redirect, don't use it!" % disambigpage.title())
+ pywikibot.output(u"%s is a redirect, don't use it!"
+ % disambigpage.title())
disambigPage = None
# Main Loop
for page in generator:
- if nwpages == True:
- page = page[0] # The newpages generator returns a tuple, not a Page object.
+ if nwpages:
+ # newpages generator returns a tuple, not a Page object.
+ page = page[0]
pywikibot.output(u"Checking %s..." % page.title())
- # Used to skip the first pages in test phase...
- #if page.title()[0] in ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q']:
- #continue
- if page.isRedirectPage(): # If redirect, skip!
+ if page.isRedirectPage(): # If redirect, skip!
pywikibot.output(u'%s is a redirect! Skip...' % page.title())
continue
# refs is not a list, it's a generator while resList... is a list, yes.
refs = page.getReferences()
refsList = list()
for j in refs:
- if j == None:
+ if j is None:
# We have to find out why the function returns that value
pywikibot.error(u'1 --> Skip page')
continue
@@ -216,12 +228,13 @@
pywikibot.output(u"%s isn't orphan! Skip..." % page.title())
continue
# Never understood how a list can turn in "None", but it happened :-S
- elif refsList == None:
+ elif refsList is None:
# We have to find out why the function returns that value
pywikibot.error(u'2 --> Skip page')
continue
else:
- # Ok, no refs, no redirect... let's check if there's already the template
+ # Ok, no refs, no redirect...
+ # let's check if there's already the template
try:
oldtxt = page.get()
except pywikibot.NoPage:
@@ -237,29 +250,32 @@
res = re.findall(regexp, oldtxt.lower())
# Found a template! Let's skip the page!
if res != []:
- pywikibot.output(u'Your regex has found something in %s, skipping...' % page.title())
+ pywikibot.output(
+ u'Your regex has found something in %s, skipping...'
+ % page.title())
Find = True
break
- # Skip the page..
if Find:
- continue
- # Is the page a disambig?
- if page.isDisambig() and disambigPage != None:
- pywikibot.output(u'%s is a disambig page, report..' % page.title())
+ continue # Skip the page..
+ if page.isDisambig() and disambigPage:
+ pywikibot.output(u'%s is a disambig page, report..'
+ % page.title())
if not page.title().lower() in disambigtext.lower():
disambigtext = u"%s\n*[[%s]]" % (disambigtext, page.title())
disambigpage.put(disambigtext, commentdisambig)
continue
# Is the page a disambig but there's not disambigPage? Skip!
elif page.isDisambig():
- pywikibot.output(u'%s is a disambig page, skip...' % page.title())
- continue
+ pywikibot.output(u'%s is a disambig page, skip...'
+ % page.title())
+ continue
else:
# Ok, the page need the template. Let's put it there!
- newtxt = u"%s\n%s" % (template, oldtxt) # Adding the template in the text
- pywikibot.output(u"\t\t>>> %s <<<" % page.title()) # Showing the title
- pywikibot.showDiff(oldtxt, newtxt) # Showing the changes
- choice = 'y' # Default answer
+ # Adding the template in the text
+ newtxt = u"%s\n%s" % (template, oldtxt)
+ pywikibot.output(u"\t\t>>> %s <<<" % page.title())
+ pywikibot.showDiff(oldtxt, newtxt)
+ choice = 'y'
if not always:
choice = pywikibot.inputChoice(u'Orphan page found, shall I add the template?', ['Yes', 'No', 'All'], ['y', 'n', 'a'])
if choice == 'a':
--
To view, visit https://gerrit.wikimedia.org/r/103243
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I8d9d934b242c96796d56ecfd61169133dc76d328
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/compat
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change and it was merged.
Change subject: [BUGFIX] for https://gerrit.wikimedia.org/r/#/c/103220/
......................................................................
[BUGFIX] for https://gerrit.wikimedia.org/r/#/c/103220/
Change-Id: If04790db215e11179585df1e68252a9804c5d891
---
M login.py
1 file changed, 2 insertions(+), 2 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/login.py b/login.py
index f040c8b..ec4c114 100644
--- a/login.py
+++ b/login.py
@@ -107,7 +107,7 @@
u"If you have a sysop account for that site, please "
u"add such a line to user-config.py:\n\n"
u"sysopnames['%(fam)s']['%(code)s'] = 'myUsername'"
- % {'fam': self.site.family.name
+ % {'fam': self.site.family.name,
'code': self.site.lang})
else:
try:
@@ -119,7 +119,7 @@
u"If you have an account for that site, please "
u"add such a line to user-config.py:\n\n"
u"usernames['%(fam)s']['%(code)s'] = 'myUsername'"
- % {'fam': self.site.family.name
+ % {'fam': self.site.family.name,
'code': self.site.lang})
self.password = password
self.verbose = verbose
--
To view, visit https://gerrit.wikimedia.org/r/103232
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: If04790db215e11179585df1e68252a9804c5d891
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/compat
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change and it was merged.
Change subject: [PEP8] changes
......................................................................
[PEP8] changes
Change-Id: Id72f0267538d42e23b4db7ae1e3f9aea5bcf6611
---
M interwiki.py
1 file changed, 41 insertions(+), 31 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/interwiki.py b/interwiki.py
index a100d5c..0f0e212 100644
--- a/interwiki.py
+++ b/interwiki.py
@@ -666,7 +666,8 @@
index = 1
while True:
path = config.datafilepath('cache', 'pagestore' + str(index))
- if not os.path.exists(path): break
+ if not os.path.exists(path):
+ break
index += 1
StoredPage.SPpath = path
StoredPage.SPstore = shelve.open(path)
@@ -1257,8 +1258,8 @@
"""
# Loop over all the pages that should have been taken care of
for page in self.pending:
- if page.title == None: ### seems a DataPage
- page.get() ### get it's title (and content)
+ if page.title is None: # seems a DataPage
+ page.get() # get it's title (and content)
# Mark the page as done
self.done.add(page)
@@ -1347,7 +1348,7 @@
pywikibot.output(
u"NOTE: not following static %sredirects." % redir)
elif page.site.family == redirectTargetPage.site.family \
- and not self.skipPage(page, redirectTargetPage, counter):
+ and not self.skipPage(page, redirectTargetPage, counter):
if self.addIfNew(redirectTargetPage, counter, page):
if config.interwiki_shownew or pywikibot.verbose:
pywikibot.output(u"%s: %s gives new %sredirect %s"
@@ -1426,21 +1427,26 @@
self.makeForcedStop(counter)
try:
f = codecs.open(
- pywikibot.config.datafilepath('autonomous_problems.dat'),
- 'a', 'utf-8')
+ pywikibot.config.datafilepath(
+ 'autonomous_problems.dat'),
+ 'a', 'utf-8')
f.write(u"* %s {Found more than one link for %s}"
% (self.originPage, page.site))
if config.interwiki_graph and config.interwiki_graph_url:
- filename = interwiki_graph.getFilename(self.originPage, extension = config.interwiki_graph_formats[0])
- f.write(u" [%s%s graph]" % (config.interwiki_graph_url, filename))
+ filename = interwiki_graph.getFilename(
+ self.originPage,
+ extension=config.interwiki_graph_formats[0])
+ f.write(u" [%s%s graph]"
+ % (config.interwiki_graph_url, filename))
f.write("\n")
f.close()
# FIXME: What errors are we catching here?
# except: should be avoided!!
except:
#raise
- pywikibot.output(u'File autonomous_problems.dat open or corrupted! Try again with -restore.')
- sys.exit()
+ pywikibot.output(u'File autonomous_problems.dat open or '
+ u'corrupted! Try again with -restore.')
+ sys.exit()
iw = ()
elif page.isEmpty() and not page.isCategory():
globalvar.remove.append(unicode(page))
@@ -1462,9 +1468,10 @@
if self.addIfNew(linkedPage, counter, page):
# It is new. Also verify whether it is the second on the
# same site
- lpsite=linkedPage.site
+ lpsite = linkedPage.site
for prevPage in self.foundIn:
- if prevPage != linkedPage and prevPage.site == lpsite:
+ if prevPage != linkedPage and \
+ prevPage.site == lpsite:
# Still, this could be "no problem" as either may be a
# redirect to the other. No way to find out quickly!
pywikibot.output(u"NOTE: %s: %s gives duplicate interwiki on same site %s"
@@ -1489,7 +1496,7 @@
"""Return True if all the work for this subject has completed."""
return len(self.todo) == 0
- def problem(self, txt, createneed = True):
+ def problem(self, txt, createneed=True):
"""Report a problem with the resolution of this subject."""
pywikibot.output(u"ERROR: %s" % txt)
self.confirm = True
@@ -1503,7 +1510,6 @@
else:
pywikibot.output(u" "*indent + unicode(page2))
-
def assemble(self):
# No errors have been seen so far, except....
errorCount = self.problemfound
@@ -1512,11 +1518,13 @@
# Each value will be a list of pages.
new = {}
for page in self.done:
- if page.exists() and not page.isRedirectPage() and not page.isCategoryRedirect():
+ if page.exists() and not page.isRedirectPage() and \
+ not page.isCategoryRedirect():
site = page.site
if site.family.interwiki_forward:
- #TODO: allow these cases to be propagated!
- continue # inhibit the forwarding families pages to be updated.
+ # TODO: allow these cases to be propagated!
+ # inhibit the forwarding families pages to be updated.
+ continue
if site == self.originPage.site:
if page != self.originPage:
self.problem(u"Found link to %s" % page)
@@ -1560,7 +1568,7 @@
i += 1
pywikibot.output(u" (%d) Found link to %s in:"
% (i, page2))
- self.whereReport(page2, indent = 8)
+ self.whereReport(page2, indent=8)
while True:
#TODO: allow answer to repeat previous or go back after a mistake
answer = pywikibot.input(u"Which variant should be used? (<number>, [n]one, [g]ive up) ").lower()
@@ -1588,7 +1596,7 @@
pywikibot.output(u"=" * 30)
page2 = pages[0]
pywikibot.output(u"Found link to %s in:" % page2)
- self.whereReport(page2, indent = 4)
+ self.whereReport(page2, indent=4)
while True:
if acceptall:
answer = 'a'
@@ -1627,7 +1635,7 @@
return
if not self.untranslated and globalvar.untranslatedonly:
return
- if self.forcedStop: # autonomous with problem
+ if self.forcedStop: # autonomous with problem
pywikibot.output(u"======Aborted processing %s======"
% self.originPage)
return
@@ -1696,7 +1704,7 @@
(len(modifying) > 0 and self.problemfound) or \
len(old) == 0 or \
(globalvar.needlimit and
- len(adding) + len(modifying) >= globalvar.needlimit + 1):
+ len(adding) + len(modifying) >= globalvar.needlimit + 1):
try:
if self.replaceLinks(new[site], new):
updatedSites.append(site)
@@ -1722,7 +1730,7 @@
if smallWikiAllowed and globalvar.autonomous and \
(page.site.sitename() == 'wikipedia:is' or
page.site.sitename() == 'wikipedia:zh' and
- page.namespace() == 10):
+ page.namespace() == 10):
old = {}
try:
for mypage in new[page.site].interwiki():
@@ -1742,19 +1750,20 @@
if not smallWikiAllowed:
import userlib
user = userlib.User(page.site, page.userName())
- if not 'bot' in user.groups() \
- and not 'bot' in page.userName().lower(): #erstmal auch keine namen mit bot
+ if not ('bot' in user.groups() or
+ # ignore account names containing 'bot' (yet)
+ 'bot' in page.userName().lower()):
smallWikiAllowed = True
else:
_now = datetime.datetime.utcnow()
- _editTime = datetime.datetime.strptime(str(page.editTime()),
- "%Y%m%d%H%M%S")
+ _editTime = datetime.datetime.strptime(
+ str(page.editTime()), "%Y%m%d%H%M%S")
if abs((_now - _editTime).days) > 30:
smallWikiAllowed = True
else:
pywikibot.output(
- u'NOTE: number of edits are restricted at %s'
- % page.site.sitename())
+ u'NOTE: number of edits are restricted at '
+ u'%s' % page.site.sitename())
# if we have an account for this site
if site.family.name in config.usernames and \
@@ -1862,7 +1871,8 @@
pltmp = new[page.site]
if pltmp != page:
s = u"None"
- if pltmp is not None: s = pltmp
+ if pltmp is not None:
+ s = pltmp
pywikibot.output(
u"BUG>>> %s is not in the list of new links! Found %s."
% (page, s))
@@ -1993,7 +2003,7 @@
if answer == 'y':
if not globalvar.quiet or pywikibot.verbose:
pywikibot.output(u"NOTE: Updating live wiki...")
- timeout=60
+ timeout = 60
while True:
try:
if dp:
@@ -2021,7 +2031,7 @@
pywikibot.error(u'putting page: %s' % (error.args,))
raise SaveError(u'PageNotSaved')
except (socket.error, IOError), error:
- if timeout>3600:
+ if timeout > 3600:
raise
pywikibot.error(u'putting page: %s' % (error.args,))
pywikibot.output(u'Sleeping %i seconds before trying again.'
--
To view, visit https://gerrit.wikimedia.org/r/103225
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Id72f0267538d42e23b4db7ae1e3f9aea5bcf6611
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/compat
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change and it was merged.
Change subject: [PEP8] changes, code improvements
......................................................................
[PEP8] changes, code improvements
- use format dictionary for raising exception message
instead of duplicate the format tuple in LoginManager.__init__()
Change-Id: I82a42f5d9d744c20bee0decdf056e640c4560f67
---
M login.py
1 file changed, 98 insertions(+), 52 deletions(-)
Approvals:
Alex S.H. Lin: Verified; Looks good to me, approved
jenkins-bot: Verified
diff --git a/login.py b/login.py
index 7a812e5..f040c8b 100644
--- a/login.py
+++ b/login.py
@@ -49,13 +49,15 @@
"""
#
# (C) Rob W.W. Hooft, 2003
-# (C) Pywikipedia bot team, 2003-2011
+# (C) Pywikipedia bot team, 2003-2013
#
# Distributed under the terms of the MIT license.
#
-__version__='$Id$'
+__version__ = '$Id$'
-import re, os, query
+import re
+import os
+import query
import urllib2
import wikipedia as pywikibot
import config
@@ -74,12 +76,14 @@
}
}
-def show (mysite, sysop = False):
- if mysite.loggedInAs(sysop = sysop):
+
+def show(mysite, sysop=False):
+ if mysite.loggedInAs(sysop=sysop):
pywikibot.output(u"You are logged in on %s as %s."
% (repr(mysite), mysite.loggedInAs(sysop=sysop)))
else:
pywikibot.output(u"You are not logged in on %s." % repr(mysite))
+
class LoginManager:
def __init__(self, password=None, sysop=False, site=None, username=None,
@@ -90,20 +94,33 @@
self.username = username
# perform writeback.
if site.family.name not in config.usernames:
- config.usernames[site.family.name]={}
- config.usernames[site.family.name][self.site.lang]=username
+ config.usernames[site.family.name] = {}
+ config.usernames[site.family.name][self.site.lang] = username
else:
if sysop:
try:
- self.username = config.sysopnames\
- [self.site.family.name][self.site.lang]
+ self.username = config.sysopnames[
+ self.site.family.name][self.site.lang]
except:
- raise pywikibot.NoUsername(u'ERROR: Sysop username for %s:%s is undefined.\nIf you have a sysop account for that site, please add such a line to user-config.py:\n\nsysopnames[\'%s\'][\'%s\'] = \'myUsername\'' % (self.site.family.name, self.site.lang, self.site.family.name, self.site.lang))
+ raise pywikibot.NoUsername(
+ u"ERROR: Sysop username for %(fam)s:%(code)s is undefined.\n"
+ u"If you have a sysop account for that site, please "
+ u"add such a line to user-config.py:\n\n"
+ u"sysopnames['%(fam)s']['%(code)s'] = 'myUsername'"
+ % {'fam': self.site.family.name
+ 'code': self.site.lang})
else:
try:
- self.username = config.usernames[self.site.family.name][self.site.lang]
+ self.username = config.usernames[
+ self.site.family.name][self.site.lang]
except:
- raise pywikibot.NoUsername(u'ERROR: Username for %s:%s is undefined.\nIf you have an account for that site, please add such a line to user-config.py:\n\nusernames[\'%s\'][\'%s\'] = \'myUsername\'' % (self.site.family.name, self.site.lang, self.site.family.name, self.site.lang))
+ raise pywikibot.NoUsername(
+ u"ERROR: Username for %(fam)s:%(code)s is undefined.\n"
+ u"If you have an account for that site, please "
+ u"add such a line to user-config.py:\n\n"
+ u"usernames['%(fam)s']['%(code)s'] = 'myUsername'"
+ % {'fam': self.site.family.name
+ 'code': self.site.lang})
self.password = password
self.verbose = verbose
if getattr(config, 'password_file', ''):
@@ -113,10 +130,12 @@
"""
Checks whether the bot is listed on a specific page to comply with
the policy on the respective wiki.
+
"""
if self.site.family.name in botList \
and self.site.language() in botList[self.site.family.name]:
- botListPageTitle, botTemplate = botList[self.site.family.name][self.site.language()]
+ botListPageTitle, botTemplate = botList[
+ self.site.family.name][self.site.language()]
botListPage = pywikibot.Page(self.site, botListPageTitle)
if botTemplate:
for template in botListPage.templatesWithParams():
@@ -132,7 +151,7 @@
# No bot policies on other sites
return True
- def getCookie(self, api=config.use_api_login, remember=True, captcha = None):
+ def getCookie(self, api=config.use_api_login, remember=True, captcha=None):
"""
Login to the site.
@@ -140,6 +159,7 @@
captchaId A dictionary containing the captcha id and answer, if any
Returns cookie data if succesful, None otherwise.
+
"""
if api:
predata = {
@@ -154,7 +174,8 @@
predata = {
"wpName": self.username.encode(self.site.encoding()),
"wpPassword": self.password,
- "wpLoginattempt": "Aanmelden & Inschrijven", # dutch button label seems to work for all wikis
+ # dutch button label seems to work for all wikis
+ "wpLoginattempt": "Aanmelden & Inschrijven",
"wpRemember": str(int(bool(remember))),
"wpSkipCookieCheck": '1'
}
@@ -178,13 +199,16 @@
else:
# clean type login, setup the new cookies files.
self.site._setupCookies(L, self.sysop)
- response, data = query.GetData(predata, self.site, sysop=self.sysop, back_response = True)
+ response, data = query.GetData(predata, self.site,
+ sysop=self.sysop,
+ back_response=True)
result = data['login']['result']
if result == "NeedToken":
predata["lgtoken"] = data["login"]["token"]
if ['lgtoken'] in data['login'].keys():
self.site._userName[index] = data['login']['lgusername']
- self.site._token[index] = data['login']['lgtoken'] + "+\\"
+ self.site._token[index] = data[
+ 'login']['lgtoken'] + "+\\"
continue
break
if result != "Success":
@@ -202,14 +226,16 @@
fakepredata['wpPassword'] = u'XXXXX'
pywikibot.output(u"self.site.postData(%s, %s)" % (address, self.site.urlEncode(fakepredata)))
trans = config.transliterate
- config.transliterate = False #transliteration breaks for some reason
+ # transliteration breaks for some reason
+ config.transliterate = False
#pywikibot.output(fakedata.decode(self.site.encoding()))
config.transliterate = trans
- fakeresponsemsg = re.sub(r"(session|Token)=..........", r"session=XXXXXXXXXX", data)
- pywikibot.output(u"%s/%s\n%s" % (response.code, response.msg, fakeresponsemsg))
- #pywikibot.cj.save(pywikibot.COOKIEFILE)
+ fakeresponsemsg = re.sub(r"(session|Token)=..........",
+ r"session=XXXXXXXXXX", data)
+ pywikibot.output(u"%s/%s\n%s" % (response.code, response.msg,
+ fakeresponsemsg))
- Reat=re.compile(': (.*?)=(.*?);')
+ Reat = re.compile(': (.*?)=(.*?);')
L = {}
if hasattr(response, 'sheaders'):
@@ -249,7 +275,7 @@
elif not captcha:
solve = self.site.solveCaptcha(data)
if solve:
- return self.getCookie(api = api, remember = remember, captcha = solve)
+ return self.getCookie(api=api, remember=remember, captcha=solve)
return None
def storecookiedata(self, filename, data):
@@ -263,7 +289,7 @@
s = u''
for v, k in data.iteritems():
s += "%s=%s\n" % (v, k)
- f = open(pywikibot.config.datafilepath('login-data',filename), 'w')
+ f = open(pywikibot.config.datafilepath('login-data', filename), 'w')
f.write(s)
f.close()
@@ -284,28 +310,31 @@
("my_sysop_user", "my_sysop_password")
("en", "wikipedia", "my_en_user", "my_en_pass")
"""
- password_f = open(pywikibot.config.datafilepath(config.password_file), 'r')
+ password_f = open(pywikibot.config.datafilepath(config.password_file),
+ 'r')
for line in password_f:
- if not line.strip(): continue
+ if not line.strip():
+ continue
entry = eval(line)
- if len(entry) == 2: #for default userinfo
- if entry[0] == self.username: self.password = entry[1]
- elif len(entry) == 4: #for userinfo included code and family
+ if len(entry) == 2: # for default userinfo
+ if entry[0] == self.username:
+ self.password = entry[1]
+ elif len(entry) == 4: # for userinfo included code and family
if entry[0] == self.site.lang and \
- entry[1] == self.site.family.name and \
- entry[2] == self.username:
+ entry[1] == self.site.family.name and \
+ entry[2] == self.username:
self.password = entry[3]
password_f.close()
- def login(self, api=config.use_api_login, retry = False):
+ def login(self, api=config.use_api_login, retry=False):
if not self.password:
# As we don't want the password to appear on the screen, we set
# password = True
self.password = pywikibot.input(
- u'Password for user %(name)s on %(site)s\n' \
- u'No characters will be shown:'
- % {'name': self.username, 'site': self.site},
- password = True)
+ u'Password for user %(name)s on %(site)s\n'
+ u'No characters will be shown:'
+ % {'name': self.username, 'site': self.site},
+ password=True)
self.password = self.password.encode(self.site.encoding())
@@ -319,21 +348,29 @@
try:
cookiedata = self.getCookie(api)
except NotImplementedError:
- pywikibot.output('API disabled because this site does not support.\nRetrying by ordinary way...')
+ pywikibot.output(
+ 'API disabled because this site does not support.\n'
+ 'Retrying by ordinary way...')
api = False
return self.login(False, retry)
if cookiedata:
- fn = '%s-%s-%s-login.data' % (self.site.family.name, self.site.lang, self.username)
+ fn = '%s-%s-%s-login.data' % (self.site.family.name,
+ self.site.lang, self.username)
#self.storecookiedata(fn,cookiedata)
pywikibot.output(u"Should be logged in now")
# Show a warning according to the local bot policy
if not self.botAllowed():
- pywikibot.output(u'*** Your username is not listed on [[%s]].\n*** Please make sure you are allowed to use the robot before actually using it!' % botList[self.site.family.name][self.site.lang])
+ pywikibot.output(
+ u'*** Your username is not listed on [[%s]].\n'
+ u'*** Please make sure you are allowed to use '
+ u'before actually using it!'
+ % botList[self.site.family.name][self.site.lang])
return True
else:
pywikibot.output(u"Login failed. Wrong password or CAPTCHA answer?")
if api:
- pywikibot.output(u"API login failed, retrying using standard webpage.")
+ pywikibot.output(
+ u"API login failed, retrying using standard webpage.")
return self.login(False, retry)
if retry:
@@ -342,14 +379,15 @@
else:
return False
- def logout(self, api = config.use_api):
+ def logout(self, api=config.use_api):
flushCk = False
if api and self.site.versionnumber() >= 12:
- if query.GetData({'action':'logout'}, self.site) == []:
+ if query.GetData({'action': 'logout'}, self.site) == []:
flushCk = True
else:
text = self.site.getUrl(self.site.get_address("Special:UserLogout"))
- if self.site.mediawiki_message('logouttext') in text: #confirm loggedout
+ # confirm loggedout
+ if self.site.mediawiki_message('logouttext') in text:
flushCk = True
if flushCk:
@@ -360,6 +398,7 @@
def showCaptchaWindow(self, url):
pass
+
def main():
username = password = None
@@ -373,8 +412,9 @@
for arg in pywikibot.handleArgs():
if arg.startswith("-pass"):
if len(arg) == 5:
- password = pywikibot.input(u'Password for all accounts ' \
- u'(no characters will be shown):', password = True)
+ password = pywikibot.input(u'Password for all accounts '
+ u'(no characters will be shown):',
+ password=True)
else:
password = arg[6:]
elif arg == "-clean":
@@ -392,8 +432,10 @@
return
if pywikibot.verbose > 1:
- pywikibot.warning(u"Using -v -v on login.py might leak private data. When sharing, please double check your password is not readable and log out your bots session.")
- verbose = True # only use this verbose when running from login.py
+ pywikibot.warning(u"""
+Using -v -v on login.py might leak private data. When sharing, please double
+check your password is not readable and log out your bots session.""")
+ verbose = True # only use this verbose when running from login.py
if logall:
if sysop:
namedict = config.sysopnames
@@ -412,26 +454,30 @@
if clean:
loginMan.logout()
else:
- if not forceLogin and site.loggedInAs(sysop = sysop):
- pywikibot.output(u'Already logged in on %s' % site)
+ if not forceLogin and site.loggedInAs(sysop=sysop):
+ pywikibot.output(u'Already logged in on %s'
+ % site)
else:
loginMan.login()
except pywikibot.NoSuchSite:
- pywikibot.output(lang+ u'.' + familyName + u' is not a valid site, please remove it from your config')
+ pywikibot.output(lang + u'.' + familyName +
+ u' is not a valid site, please remove '
+ u'it from your config')
elif testonly:
show(pywikibot.getSite(), sysop)
elif clean:
try:
site = pywikibot.getSite()
- lgm = LoginManager(site = site)
+ lgm = LoginManager(site=site)
lgm.logout()
except pywikibot.NoSuchSite:
pass
else:
- loginMan = LoginManager(password, sysop = sysop, verbose=verbose)
+ loginMan = LoginManager(password, sysop=sysop, verbose=verbose)
loginMan.login()
+
if __name__ == "__main__":
try:
main()
--
To view, visit https://gerrit.wikimedia.org/r/103220
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I82a42f5d9d744c20bee0decdf056e640c4560f67
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/compat
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Alex S.H. Lin <alexsh(a)mail2000.com.tw>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: jenkins-bot
Alex S.H. Lin has submitted this change and it was merged.
Change subject: [PEP8] changes, code improvements
......................................................................
[PEP8] changes, code improvements
Change-Id: I6aef8bebee7b76cb3684a1ee95c0ea151ff9d852
---
M statistics_in_wikitable.py
1 file changed, 74 insertions(+), 41 deletions(-)
Approvals:
Alex S.H. Lin: Verified; Looks good to me, approved
diff --git a/statistics_in_wikitable.py b/statistics_in_wikitable.py
index 2855b5c..e2604c8 100644
--- a/statistics_in_wikitable.py
+++ b/statistics_in_wikitable.py
@@ -12,25 +12,34 @@
If not existing yet, it is created.
If existing, it is updated.
"""
+#
+#
+# (C) Pywikibot team, 2009-2013
+#
+# Distributed under the terms of the MIT license.
__version__ = '$Id$'
#
+
import time
-import wikipedia as pywikibot
-import pagegenerators, query
+import pywikibot
+import pagegenerators
+import query
# This is the title of the wikipage where to render stats.
your_page = "Logstats"
summary_update = {
- 'en':u'Updating some statistics.',
- }
+ 'en': u'Bot: Updating some statistics.',
+}
+
summary_creation = {
- 'en':u'Creating statistics log page.',
- }
+ 'en': u'Bot: Creating statistics log page.',
+}
class StatisticsBot:
- def __init__ (self, screen, your_page):
+
+ def __init__(self, screen, your_page):
"""
Constructor. Parameter:
* screen - If True, doesn't do any real changes, but only shows
@@ -39,80 +48,101 @@
self.screen = screen
self.your_page = your_page
self.site = pywikibot.getSite()
- self.dict = self.getdata() # Try to get data.
+ self.dict = self.getdata() # Try to get data.
def run(self):
if self.screen:
pywikibot.output("Bot is running to output stats.")
- self.idle(1) # Run a function to idle
+ self.idle(1) # Run a function to idle
self.outputall()
if not self.screen:
- self.outputall() # Output all datas on screen.
- pywikibot.output("\nBot is running. Going to treat \03{lightpurple}%s\03{default}..." % self.your_page )
+ self.outputall() # Output all datas on screen.
+ pywikibot.output("\nBot is running. Going to treat "
+ "\03{lightpurple}%s\03{default}..."
+ % self.your_page)
self.idle(2)
self.treat()
- def getdata(self): # getdata() returns a dictionnary of the query to api.php?action=query&meta=siteinfo&siprop=statistics
+ def getdata(self):
+ # getdata() returns a dictionnary of the query to
+ # api.php?action=query&meta=siteinfo&siprop=statistics
# This method return data in a dictionnary format.
- # View data with: api.php?action=query&meta=siteinfo&siprop=statistics&format=jsonfm
+ # View data with:
+ # api.php?action=query&meta=siteinfo&siprop=statistics&format=jsonfm
params = {
- 'action' :'query',
- 'meta' :'siteinfo',
- 'siprop' :'statistics',
+ 'action': 'query',
+ 'meta': 'siteinfo',
+ 'siprop': 'statistics',
}
pywikibot.output("\nQuerying api for json-formatted data...")
+ data = None
try:
- data = query.GetData(params,self.site)
+ data = query.GetData(params, self.site)
except:
- url = self.site.protocol() + '://' + self.site.hostname() + self.site.api_address()
- pywikibot.output("The query has failed. Have you check the API? Cookies are working?")
+ url = (self.site.protocol() + '://' + self.site.hostname() +
+ self.site.api_address())
+ pywikibot.output("The query has failed. Have you check the API? "
+ "Cookies are working?")
pywikibot.output(u"\n>> \03{lightpurple}%s\03{default} <<" % url)
- if data != None:
+ if data:
pywikibot.output("Extracting statistics...")
- data = data['query'] # "query" entry of data.
- dict = data['statistics'] # "statistics" entry of "query" dict.
- return dict
+ return data['query']['statistics']
def treat(self):
page = pywikibot.Page(self.site, self.your_page)
if page.exists():
- pywikibot.output(u'\nWikitable on \03{lightpurple}%s\03{default} will be completed with:\n' % self.your_page )
+ pywikibot.output(
+ u'\nWikitable on \03{lightpurple}%s\03{default} will be '
+ u'completed with:\n' % self.your_page)
text = page.get()
newtext = self.newraw()
pywikibot.output(newtext)
- choice = pywikibot.inputChoice(u'Do you want to add these on wikitable?', ['Yes', 'No'], ['y', 'N'], 'N')
+ choice = pywikibot.inputChoice(
+ u'Do you want to add these on wikitable?',
+ ['Yes', 'No'], ['y', 'N'], 'N')
text = text[:-3] + newtext
summ = pywikibot.translate(self.site, summary_update)
if choice == 'y':
try:
page.put(u''.join(text), summ)
except:
- pywikibot.output(u'Impossible to edit. It may be an edit conflict... Skipping...')
+ pywikibot.output(u'Impossible to edit. It may be an '
+ u'edit conflict. Skipping...')
else:
- pywikibot.output(u'\nWikitable on \03{lightpurple}%s\03{default} will be created with:\n' % self.your_page )
+ pywikibot.output(
+ u'\nWikitable on \03{lightpurple}%s\03{default} will be '
+ u'created with:\n' % self.your_page)
newtext = self.newtable()+self.newraw()
pywikibot.output(newtext)
summ = pywikibot.translate(self.site, summary_creation)
- choice = pywikibot.inputChoice(u'Do you want to accept this page creation?', ['Yes', 'No'], ['y', 'N'], 'N')
+ choice = pywikibot.inputChoice(
+ u'Do you want to accept this page creation?',
+ ['Yes', 'No'], ['y', 'N'], 'N')
if choice == 'y':
try:
page.put(newtext, summ)
except pywikibot.LockedPage:
- pywikibot.output(u"Page %s is locked; skipping." % title)
+ pywikibot.output(u"Page %s is locked; skipping."
+ % title)
except pywikibot.EditConflict:
- pywikibot.output(u'Skipping %s because of edit conflict' % title)
+ pywikibot.output(u'Skipping %s because of edit conflict'
+ % title)
except pywikibot.SpamfilterError, error:
- pywikibot.output(u'Cannot change %s because of spam blacklist entry %s' % (title, error.url))
+ pywikibot.output(u'Cannot change %s because of spam '
+ u'blacklist entry %s'
+ % (title, error.url))
def newraw(self):
- newtext = ('\n|----\n!\'\''+ self.date() +'\'\'') # new raw for date and stats
+ # new raw for date and stats
+ newtext = ("\n|----\n!''" + self.date() + "''")
for name in self.dict:
- newtext += '\n|'+str(abs(self.dict[name]))
+ newtext += '\n|' + str(abs(self.dict[name]))
newtext += '\n|----\n|}'
return newtext
def newtable(self):
- newtext = ('\n{| class=wikitable style=text-align:center\n!'+ "date") # create table
+ # create table
+ newtext = ('\n{| class=wikitable style=text-align:center\n!' + "date")
for name in self.dict:
newtext += '\n|'+name
return newtext
@@ -124,22 +154,24 @@
list = self.dict.keys()
list.sort()
for name in self.dict:
- pywikibot.output("There are "+str(self.dict[name])+" "+name)
+ pywikibot.output("There are " + str(self.dict[name]) + " " + name)
def idle(self, retry_idle_time):
time.sleep(retry_idle_time)
pywikibot.output(u"Starting in %i second..." % retry_idle_time)
time.sleep(retry_idle_time)
+
def main(your_page):
- screen = False # If True it would not edit the wiki, only output statistics
+ screen = False # If True do not edit the wiki, only output statistics
_page = None
pywikibot.output("\nBuilding the bot...")
- for arg in pywikibot.handleArgs(): # Parse command line arguments
+ for arg in pywikibot.handleArgs(): # Parse command line arguments
if arg.startswith('-page'):
if len(arg) == 5:
- _page = pywikibot.input(u'On what page do you want to add statistics?')
+ _page = pywikibot.input(
+ u'On what page do you want to add statistics?')
else:
_page = arg[6:]
if arg.startswith("-screen"):
@@ -147,9 +179,10 @@
if not _page:
_page = your_page
if not screen:
- pywikibot.output("The bot will add statistics on %s.\n" % _page )
- bot = StatisticsBot(screen, _page) # Launch the instance of a StatisticsBot
- bot.run() # Execute the 'run' method
+ pywikibot.output("The bot will add statistics on %s.\n" % _page)
+ bot = StatisticsBot(screen, _page) # Launch the instance of a StatisticsBot
+ bot.run() # Execute the 'run' method
+
if __name__ == "__main__":
try:
--
To view, visit https://gerrit.wikimedia.org/r/103213
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I6aef8bebee7b76cb3684a1ee95c0ea151ff9d852
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/compat
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Alex S.H. Lin <alexsh(a)mail2000.com.tw>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Siebrand <siebrand(a)wikimedia.org>
Gerrit-Reviewer: jenkins-bot