jenkins-bot has submitted this change and it was merged.
Change subject: [PEP8] changes ......................................................................
[PEP8] changes
Change-Id: Icf144501fa5939ccb12ae7278929175551573248 --- M extract_wikilinks.py M fixing_redirects.py M followlive.py M get.py 4 files changed, 23 insertions(+), 19 deletions(-)
Approvals: Xqt: Looks good to me, approved jenkins-bot: Verified
diff --git a/extract_wikilinks.py b/extract_wikilinks.py index 6f5e7d1..601cea2 100644 --- a/extract_wikilinks.py +++ b/extract_wikilinks.py @@ -19,9 +19,10 @@ # # Distributed under the terms of the MIT license. # -__version__='$Id$' +__version__ = '$Id$' # -import sys,re +import sys +import re import codecs import wikipedia as pywikibot # This bot does not contact the Wiki, so no need to get it on the list @@ -38,7 +39,7 @@ elif arg.startswith("-bare"): complete = False elif fn: - print "Ignoring argument %s"%arg + print "Ignoring argument %s" % arg else: fn = arg
@@ -47,14 +48,14 @@ sys.exit(1)
mysite = pywikibot.getSite() -f=open(fn,'r') -text=f.read() +f = open(fn, 'r') +text = f.read() f.close() for hit in R.findall(text): if complete: list.append(mysite.linkto(hit)) else: - list.append("[[%s]]"%hit) + list.append("[[%s]]" % hit) if sorted: list.sort() for page in list: diff --git a/fixing_redirects.py b/fixing_redirects.py index 49627dd..d35badf 100644 --- a/fixing_redirects.py +++ b/fixing_redirects.py @@ -20,9 +20,10 @@ # # Distributed under the terms of the MIT license. # -__version__='$Id$' +__version__ = '$Id$' # -import re, sys +import re +import sys import wikipedia as pywikibot import pagegenerators from pywikibot import i18n @@ -72,7 +73,7 @@ curpos = 0 # This loop will run until we have finished the current page while True: - m = linkR.search(text, pos = curpos) + m = linkR.search(text, pos=curpos) if not m: break # Make sure that next time around we will not find this same hit. @@ -102,7 +103,7 @@ if not link_text: # or like this: [[page_title]]trailing_chars link_text = page_title - if m.group('section') == None: + if m.group('section') is None: section = '' else: section = m.group('section') @@ -117,16 +118,16 @@ replaceit = choice in "rR"
# remove preleading ":" - if link_text[0]==':': + if link_text[0] == ':': link_text = link_text[1:] if link_text[0].isupper(): new_page_title = targetPage.title() else: new_page_title = targetPage.title()[0].lower() + \ - targetPage.title()[1:] + targetPage.title()[1:]
# remove preleading ":" - if new_page_title[0]==':': + if new_page_title[0] == ':': new_page_title = new_page_title[1:]
if replaceit and trailing_chars: @@ -149,6 +150,7 @@
pageCache = []
+ def workon(page): mysite = pywikibot.getSite() try: @@ -163,7 +165,7 @@ % page.title()) links = page.linkedPages() if len(links): - pywikibot.getall(mysite,links) + pywikibot.getall(mysite, links) else: pywikibot.output('Nothing left to do.') return diff --git a/followlive.py b/followlive.py index a8987a2..ba7a822 100644 --- a/followlive.py +++ b/followlive.py @@ -13,10 +13,11 @@ # # Distributed under the terms of the MIT license. # -__version__='$Id$' +__version__ = '$Id$'
import sys -import datetime, time +import datetime +import time import traceback import wikipedia as pywikibot import editarticle diff --git a/get.py b/get.py index 82343c6..4b97cdf 100644 --- a/get.py +++ b/get.py @@ -13,9 +13,10 @@ # # Distributed under the terms of the MIT license.
-__version__='$Id$' +__version__ = '$Id$'
import wikipedia as pywikibot +
def main(): singlePageTitleParts = [] @@ -26,11 +27,10 @@ page = pywikibot.Page(pywikibot.getSite(), pageTitle)
# TODO: catch exceptions - pywikibot.output(page.get(), toStdout = True) + pywikibot.output(page.get(), toStdout=True)
if __name__ == "__main__": try: main() finally: pywikibot.stopme() -