Xqt has submitted this change and it was merged.
Change subject: [PEP8] changes ......................................................................
[PEP8] changes
Change-Id: Ie759c4d8253e270e8ad7071a23a4eff4dd56f6ed --- M misspelling.py M ndashredir.py M nowcommons.py M statistics_in_wikitable.py 4 files changed, 75 insertions(+), 56 deletions(-)
Approvals: Xqt: Looks good to me, approved
diff --git a/misspelling.py b/misspelling.py index 8c891c3..287b4b4 100644 --- a/misspelling.py +++ b/misspelling.py @@ -20,14 +20,16 @@ -main only check pages in the main namespace, not in the talk, wikipedia, user, etc. namespaces. """ -__version__ = '$Id$'
# (C) Daniel Herding, 2007 -# (C) Pywikipedia bot team 2007-2013 +# (C) Pywikibot team, 2007-2013 # # Distributed under the terms of the MIT license. +# +__version__ = '$Id$' +#
-import wikipedia as pywikibot +import pywikibot import catlib import pagegenerators as pg import solve_disambiguation @@ -73,7 +75,8 @@
def createPageGenerator(self, firstPageTitle): if pywikibot.getSite().lang in self.misspellingCategory: - misspellingCategoryTitle = self.misspellingCategory[pywikibot.getSite().lang] + misspellingCategoryTitle = self.misspellingCategory[ + pywikibot.getSite().lang] misspellingCategory = catlib.Category(pywikibot.getSite(), misspellingCategoryTitle) generator = pg.CategorizedPageGenerator(misspellingCategory, @@ -81,14 +84,16 @@ start=firstPageTitle) else: misspellingTemplateName = 'Template:%s' \ - % self.misspellingTemplate[pywikibot.getSite().lang] + % self.misspellingTemplate[ + pywikibot.getSite().lang] misspellingTemplate = pywikibot.Page(pywikibot.getSite(), misspellingTemplateName) generator = pg.ReferringPageGenerator(misspellingTemplate, onlyTemplateInclusion=True) if firstPageTitle: pywikibot.output( - u'-start parameter unsupported on this wiki because there is no category for misspellings.') + u'-start parameter unsupported on this wiki because there ' + u'is no category for misspellings.') preloadingGen = pg.PreloadingGenerator(generator) return preloadingGen
@@ -119,8 +124,8 @@ dn=False): # TODO: setSummaryMessage() in solve_disambiguation now has parameters # new_targets and unlink. Make use of these here. - comment = pywikibot.translate(self.mysite, self.msg) \ - % disambPage.title() + comment = pywikibot.translate( + self.mysite, self.msg) % disambPage.title() pywikibot.setAction(comment)
@@ -146,6 +151,7 @@ bot = MisspellingRobot(always, firstPageTitle, main_only) bot.run()
+ if __name__ == "__main__": try: main() diff --git a/ndashredir.py b/ndashredir.py index 61759ea..4ffed75 100644 --- a/ndashredir.py +++ b/ndashredir.py @@ -40,31 +40,35 @@ """
# -# (C) Bináris, 2012 +# (c) Bináris, 2012 +# (c) pywikibot team, 2012-2013 # # Distributed under the terms of the MIT license. # -__version__='$Id$' +__version__ = '$Id$' +#
-import codecs, re -import wikipedia as pywikibot +import codecs +import re +import pywikibot from pagegenerators import RegexFilterPageGenerator as RPG from pywikibot import i18n
+ def main(*args): - regex = ur'.*[–—]' # Alt 0150 (n dash), alt 0151 (m dash), respectively. + regex = ur'.*[–—]' # Alt 0150 (n dash), alt 0151 (m dash), respectively. ns = 0 start = '!' - filename = None # The name of the file to save titles - titlefile = None # The file object itself - ignorefilename = None # The name of the ignore file - ignorelist = [] # A list to ignore titles that redirect to somewhere else + filename = None # The name of the file to save titles + titlefile = None # The file object itself + ignorefilename = None # The name of the ignore file + ignorelist = [] # A list to ignore titles that redirect to somewhere else
# Handling parameters: for arg in pywikibot.handleArgs(*args): if arg == '-start': start = pywikibot.input( - u'From which title do you want to continue?') + u'From which title do you want to continue?') elif arg.startswith('-start:'): start = arg[7:] elif arg in ['-ns', '-namespace']: @@ -98,42 +102,42 @@ ignorelist = re.findall(ur'[[:?(.*?)]]', igfile.read()) igfile.close() except IOError: - pywikibot.output("%s cannot be opened for reading." % ignorefilename) + pywikibot.output("%s cannot be opened for reading." + % ignorefilename) return
# Ready to initialize site = pywikibot.getSite() redirword = site.redirect() gen = RPG(site.allpages( - start=start, namespace=ns, includeredirects=False), [regex]) + start=start, namespace=ns, includeredirects=False), [regex])
# Processing: for page in gen: title = page.title() editSummary = i18n.twtranslate(site, 'ndashredir-create', {'title': title}) - newtitle = title.replace(u'–','-').replace(u'—','-') + newtitle = title.replace(u'–', '-').replace(u'—', '-') # n dash -> hyphen, m dash -> hyphen, respectively redirpage = pywikibot.Page(site, newtitle) if redirpage.exists(): if redirpage.isRedirectPage() and \ - redirpage.getRedirectTarget() == page: - pywikibot.output( - u'[[%s]] already redirects to [[%s]], nothing to do with it.' - % (newtitle, title)) + redirpage.getRedirectTarget() == page: + pywikibot.output(u'[[%s]] already redirects to [[%s]], nothing ' + u'to do with it.' % (newtitle, title)) elif newtitle in ignorelist: pywikibot.output( u'Skipping [[%s]] because it is on your ignore list.' % newtitle) else: pywikibot.output( - (u'\03{lightyellow}Skipping [[%s]] because it exists ' - u'already with a different content.\03{default}') + u'\03{lightyellow}Skipping [[%s]] because it exists ' + u'already with a different content.\03{default}' % newtitle) if titlefile: - s = u'\n#%s does not redirect to %s.' %\ - (redirpage.title(asLink=True, textlink=True), - page.title(asLink=True, textlink=True)) + s = u'\n#%s does not redirect to %s.' % ( + redirpage.title(asLink=True, textlink=True), + page.title(asLink=True, textlink=True)) # For the unlikely case if someone wants to run it in # file namespace. titlefile.write(s) @@ -154,7 +158,8 @@ # RegexFilterPageGenerator or throttle.py or anything else and cannot # be catched in this loop.) if titlefile: - titlefile.close() # For the spirit of programming (it was flushed) + titlefile.close() # For the spirit of programming (it was flushed) +
if __name__ == "__main__": try: diff --git a/nowcommons.py b/nowcommons.py index fe54bad..e06a0c2 100644 --- a/nowcommons.py +++ b/nowcommons.py @@ -48,16 +48,19 @@ # # (C) Wikipedian, 2006-2007 # (C) Siebrand Mazeland, 2007-2008 -# (C) xqt, 2010-2012 -# (C) Pywikipedia bot team, 2006-2013 +# (C) xqt, 2010-2013 +# (C) Pywikibot team, 2006-2013 # # Distributed under the terms of the MIT license. # __version__ = '$Id$' #
-import sys, re, webbrowser, urllib -import wikipedia as pywikibot +import sys +import re +import webbrowser +import urllib +import pywikibot import pagegenerators as pg import image # only for nowCommonsMessage @@ -119,7 +122,7 @@ 'it': [ u'NowCommons', ], - 'ja':[ + 'ja': [ u'NowCommons', ], 'ko': [ @@ -128,7 +131,7 @@ u'공용 중복', u'Nowcommons', ], - 'nds-nl' : [ + 'nds-nl': [ u'NoenCommons', u'NowCommons', ], @@ -152,7 +155,7 @@ u'Перенесено на Викисклад', u'На Викискладе', ], - 'zh':[ + 'zh': [ u'NowCommons', u'Nowcommons', u'NCT', @@ -175,7 +178,7 @@ word_to_skip = { 'en': [], 'it': ['stemma', 'stub', 'hill40 '], - } +}
#nowCommonsMessage = imagetransfer.nowCommonsMessage
@@ -200,7 +203,7 @@ images_processed = list() while 1: url = 'http://toolserver.org/~multichill/nowcommons.php?language=%s&page=%s&...' % (lang, num_page) - HTML_text = self.site.getUrl(url, no_hostname = True) + HTML_text = self.site.getUrl(url, no_hostname=True) reg = r'<[Aa] href="(?P<urllocal>.*?)">(?P<imagelocal>.*?)</[Aa]> +?</td><td>\n\s*?' reg += r'<[Aa] href="(?P<urlcommons>http://commons.wikimedia.org/.*?)%22%3EImage:(?P<imagecommons>.*?)</[Aa]> +?</td><td>' regex = re.compile(reg, re.UNICODE) @@ -309,7 +312,7 @@ for page in self.getPageGenerator(): if use_hash: # Page -> Has the namespace | commons image -> Not - images_list = page # 0 -> local image, 1 -> commons image + images_list = page # 0 -> local image, 1 -> commons image page = pywikibot.Page(self.site, images_list[0]) else: # If use_hash is true, we have already print this before, no need @@ -345,7 +348,7 @@ u'"\03{lightred}%s\03{default}" is still used in %i pages.' % (localImagePage.title(withNamespace=False), len(usingPages))) - if replace == True: + if replace: pywikibot.output( u'Replacing "\03{lightred}%s\03{default}" by "\03{lightgreen}%s\03{default}".' % (localImagePage.title(withNamespace=False), @@ -358,9 +361,10 @@ oImageRobot.run() # If the image is used with the urlname the # previous function won't work - if len(list(pywikibot.ImagePage(self.site, - page.title()).usingPages())) > 0 and \ - replaceloose: + if len(list(pywikibot.ImagePage( + self.site, + page.title()).usingPages())) > 0 and \ + replaceloose: oImageRobot = image.ImageRobot( pg.FileLinksGenerator( localImagePage), @@ -377,7 +381,9 @@ if usingPages > 0 and use_hash: # just an enter pywikibot.input( - u'There are still %s pages with this image, confirm the manual removal from them please.' + u'There are still %s pages with this ' + u'image, confirm the manual removal ' + u'from them please.' % usingPages)
else: @@ -388,15 +394,17 @@ u'No page is using "\03{lightgreen}%s\03{default}" anymore.' % localImagePage.title(withNamespace=False)) commonsText = commonsImagePage.get() - if replaceonly == False: + if not replaceonly: if md5 == commonsImagePage.getFileMd5Sum(): pywikibot.output( u'The image is identical to the one on Commons.') - if len(localImagePage.getFileVersionHistory()) > 1 and not use_hash: - pywikibot.output( - u"This image has a version history. Please delete it manually after making sure that the old versions are not worth keeping.""") + if len(localImagePage.getFileVersionHistory()) > 1 and \ + not use_hash: + pywikibot.output(u""" +This image has a version history. Please delete it manually after +making sure that the old versions are not worth keeping.""") continue - if autonomous == False: + if not autonomous: pywikibot.output( u'\n\n>>>> Description on \03{lightpurple}%s\03{default} <<<<\n' % page.title()) @@ -406,17 +414,17 @@ % commonsImagePage.title()) pywikibot.output(commonsText) choice = pywikibot.inputChoice( -u'Does the description on Commons contain all required source and license\n' - u'information?', + u'Does the description on Commons contain all ' + u'required source and license\ninformation?', ['yes', 'no'], ['y', 'N'], 'N') if choice.lower() in ['y', 'yes']: localImagePage.delete( comment + ' [[:commons:Image:%s]]' - % filenameOnCommons, prompt = False) + % filenameOnCommons, prompt=False) else: localImagePage.delete( comment + ' [[:commons:Image:%s]]' - % filenameOnCommons, prompt = False) + % filenameOnCommons, prompt=False) else: pywikibot.output( u'The image is not identical to the one on Commons.') diff --git a/statistics_in_wikitable.py b/statistics_in_wikitable.py index e2604c8..73e2204 100644 --- a/statistics_in_wikitable.py +++ b/statistics_in_wikitable.py @@ -38,7 +38,7 @@
class StatisticsBot: - + def __init__(self, screen, your_page): """ Constructor. Parameter: