jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/367032 )
Change subject: [pep8] Keep lines beneath 80 chars ......................................................................
[pep8] Keep lines beneath 80 chars
Change-Id: I64f4145ce52104d95f5448c5a07030dcce51cc8c --- M scripts/script_wui.py M scripts/selflink.py M scripts/solve_disambiguation.py M scripts/standardize_interwiki.py M scripts/surnames_redirects.py M scripts/table2wiki.py M scripts/template.py M scripts/transferbot.py M scripts/unusedfiles.py M scripts/version.py M scripts/weblinkchecker.py M scripts/welcome.py M scripts/wikisourcetext.py 13 files changed, 150 insertions(+), 105 deletions(-)
Approvals: Dalba: Looks good to me, approved jenkins-bot: Verified
diff --git a/scripts/script_wui.py b/scripts/script_wui.py index 97cd2c8..002a046 100755 --- a/scripts/script_wui.py +++ b/scripts/script_wui.py @@ -49,7 +49,7 @@ # Id (rev-id) auf Ausgabeseite geschrieben, damit kann der Befehl # (durch Angabe der Sim-Id) ausgeführt werden -> crontab (!) # [ shell (rev-id) -> output mit shell rev-id ] -# [ shell rev-id (als eindeutige job/task-config bzw. script) -> crontab ] +# [ shell rev-id (eindeutige job/task-config bzw. script) -> crontab ] # @todo Bei jeder Botbearbeitung wird der Name des Auftraggebers vermerkt # --- --- --- --- --- --- --- --- --- --- --- --- --- --- --- # Writing code in Wikipedia: @@ -59,7 +59,8 @@ # # overwrite 'raw_input' to run bot non-blocking and simulation mode # __builtin__.raw_input = lambda: 'n' # -# # backup sys.argv; depreciated: if possible manipulate pywikibot.config instead +# # backup sys.argv; depreciated: if possible manipulate pywikibot.config +# instead # sys_argv = copy.deepcopy( sys.argv ) # # ... @@ -118,7 +119,8 @@ 'CRONMaxDelay': 5 * 60.0, # check all ~5 minutes
# forbidden parameters - # (at the moment none, but consider e.g. '-always' or allow it with '-simulate' only!) + # (at the moment none, but consider e.g. '-always' or allow it with + # '-simulate' only!) }
__simulate = True @@ -159,9 +161,9 @@ # First check if page is protected, reject any data if not parts = self.refs[item].title().lower().rsplit('.') if len(parts) == 1 or parts[1] not in ['.css', '.js']: - raise ValueError(u'%s config %s = %s is not a secure page; ' - u'it should be a css or js userpage which are ' - u'automatically semi-protected.' + raise ValueError('%s config %s = %s is not a secure page; ' + 'it should be a css or js userpage which are ' + 'automatically semi-protected.' % (self.__class__.__name__, item, self.refs[item])) try: @@ -197,41 +199,52 @@ self.do_check_CronJobs()
def do_check_CronJobs(self): - # check cron/date (changes of self.refs are tracked (and reload) in on_pubmsg) + # check cron/date (changes of self.refs are tracked (and reload) in + # on_pubmsg) page = self.refs[self.templ] ctab = self.refs[self.cron].get() # extract 'rev' and 'timestmp' from 'crontab' page text ... - for line in ctab.splitlines(): # hacky/ugly/cheap; already better done in trunk dtbext + + # hacky/ugly/cheap; already better done in trunk dtbext + for line in ctab.splitlines(): (rev, timestmp) = [item.strip() for item in line[1:].split(',')]
# [min] [hour] [day of month] [month] [day of week] # (date supported only, thus [min] and [hour] dropped) entry = crontab.CronTab(timestmp) - # find the delay from current minute (does not return 0.0 - but next) + # find the delay from current minute + # (does not return 0.0 - but next) now = datetime.datetime.now().replace(second=0, microsecond=0) delay = entry.next( now - datetime.timedelta(microseconds=1))
if (delay <= bot_config['CRONMaxDelay']): - pywikibot.output(u"CRONTAB: %s / %s / %s" % (page, rev, timestmp)) + pywikibot.output('CRONTAB: %s / %s / %s' % + (page, rev, timestmp)) self.do_check(page.title(), int(rev))
def do_check(self, page_title, rev=None, params=None): # Create two threads as follows # (simple 'thread' for more sophisticated code use 'threading') try: - thread.start_new_thread(main_script, (self.refs[page_title], rev, params)) + thread.start_new_thread(main_script, (self.refs[page_title], rev, + params)) except: - # (done according to subster in trunk and submit in rewrite/.../data/api.py) + # (done according to subster in trunk and submit in + # rewrite/.../data/api.py) # TODO: is this error handling here needed at all??!? - pywikibot.exception(tb=True) # secure traceback print (from api.py submit) + + # secure traceback print (from api.py submit) + pywikibot.exception(tb=True) pywikibot.warning(u"Unable to start thread")
wiki_logger(traceback.format_exc(), self.refs[page_title], rev)
# Define a function for the thread -def main_script(page, rev=None, params=NotImplemented): # pylint: disable=unused-argument + +# pylint: disable=unused-argument +def main_script(page, rev=None, params=NotImplemented): """Main thread.""" # http://opensourcehacker.com/2011/02/23/temporarily-capturing-python-logging-...
@@ -244,7 +257,8 @@ rootLogger = logging.getLogger()
logHandler = logging.StreamHandler(buffer) - formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + formatter = logging.Formatter( + '%(asctime)s - %(name)s - %(levelname)s - %(message)s') logHandler.setFormatter(formatter) rootLogger.addHandler(logHandler)
@@ -259,8 +273,11 @@ try: exec(code) except: - # (done according to subster in trunk and submit in rewrite/.../data/api.py) - pywikibot.exception(tb=True) # secure traceback print (from api.py submit) + # (done according to subster in trunk and submit in + # rewrite/.../data/api.py) + + # secure traceback print (from api.py submit) + pywikibot.exception(tb=True)
sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ @@ -280,7 +297,8 @@ pywikibot.output( u'environment: garbage; %s / memory; %s / members; %s' % ( gc.collect(), - resource.getrusage(resource.RUSAGE_SELF).ru_maxrss * resource.getpagesize(), + resource.getrusage(resource.RUSAGE_SELF).ru_maxrss * + resource.getpagesize(), len(dir()))) else: pywikibot.output( @@ -296,7 +314,8 @@ def wiki_logger(buffer, page, rev=None): """Log to wiki.""" # FIXME: what is this?? - # (might be a problem here for TS and SGE, output string has another encoding) + # (might be a problem here for TS and SGE, output string has another + # encoding) if False: buffer = buffer.decode(pywikibot.config.console_encoding) buffer = re.sub(r'\03{(.*?)}(.*?)\03{default}', r'\g<2>', buffer) @@ -307,9 +326,10 @@ outpage = pywikibot.Page(pywikibot.Site(), bot_config['ConfCSSoutput']) text = outpage.get() outpage.put( - text + u"\n== Simulation vom %s mit [%s code:%s] ==\n<pre>\n%s</pre>\n\n" + text + ('\n== Simulation vom %s mit [%s code:%s] ==' + '\n<pre>\n%s</pre>\n\n') % (pywikibot.Timestamp.now().isoformat(' '), link, rev, buffer)) -# comment = pywikibot.translate(self.site.lang, bot_config['msg'])) +# summary=pywikibot.translate(self.site.lang, bot_config['msg']))
def main(*args): @@ -335,7 +355,8 @@ site.login() chan = '#' + site.code + '.' + site.family.name
- bot_user_name = pywikibot.config.usernames[pywikibot.config.family][pywikibot.config.mylang] + bot_user_name = pywikibot.config.usernames[pywikibot.config.family][ + pywikibot.config.mylang] for key, value in bot_config.items(): if hasattr(value, 'format'): bot_config[key] = value.format(username=bot_user_name) diff --git a/scripts/selflink.py b/scripts/selflink.py index f148fe6..f2121ff 100755 --- a/scripts/selflink.py +++ b/scripts/selflink.py @@ -57,7 +57,8 @@ def _create_callback(self): """Create callback and add a choice to make the link bold.""" callback = super(SelflinkBot, self)._create_callback() - callback.additional_choices += [_BoldChoice(self.current_page, callback)] + callback.additional_choices += [_BoldChoice(self.current_page, + callback)] return callback
def treat_page(self): diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py index 7716173..2bd59a0 100755 --- a/scripts/solve_disambiguation.py +++ b/scripts/solve_disambiguation.py @@ -246,7 +246,8 @@ u'Wikipédia:Liens aux pages d’homonymie', u'Wikipédia:Homonymie', u'Wikipédia:Homonymie/Homonymes dynastiques', - u'Wikipédia:Prise de décision, noms des membres de dynasties/liste des dynastiens', + 'Wikipédia:Prise de décision, noms des membres ' + 'de dynasties/liste des dynastiens', u'Liste de toutes les combinaisons de deux lettres', u'Wikipédia:Log d’upload/.*', u'Sigles de trois lettres de [A-Z]AA à [A-Z]ZZ', @@ -405,7 +406,8 @@ pywikibot.output(u"Found %d references." % len(refs)) # Remove ignorables if self.disambPage.site.family.name in ignore_title and \ - self.disambPage.site.lang in ignore_title[self.disambPage.site.family.name]: + self.disambPage.site.lang in ignore_title[ + self.disambPage.site.family.name]: for ig in ignore_title[self.disambPage.site.family.name ][self.disambPage.site.lang]: for i in range(len(refs) - 1, -1, -1): @@ -497,7 +499,7 @@ 'disambiguations', self.disambPage.title(asUrl=True) + '.txt') try: - # Open file for appending. If none exists yet, create a new one. + # Open file for appending. If none exists, create a new one. f = codecs.open(filename, 'a', 'utf-8') f.write(refPage.title(asUrl=True) + '\n') f.close() @@ -584,7 +586,8 @@
def test(self, value): """Test aliases and combine it with the original test.""" - return value.lower() in self._aliases or super(AliasOption, self).test(value) + return value.lower() in self._aliases or super(AliasOption, + self).test(value)
class DisambiguationRobot(Bot): @@ -613,8 +616,8 @@ 'hu': u'Egyért-redir', }
- def __init__(self, always, alternatives, getAlternatives, dnSkip, generator, - primary, main_only, minimum=0): + def __init__(self, always, alternatives, getAlternatives, dnSkip, + generator, primary, main_only, minimum=0): """Constructor.""" super(DisambiguationRobot, self).__init__() self.always = always @@ -688,7 +691,7 @@ # between | and ]. # group linktrail is the link trail, that's letters after ]] which # are part of the word. - # note that the definition of 'letter' varies from language to language. + # note: the definition of 'letter' varies from language to language. self.linkR = re.compile(r''' [[ (?P<title> [^[]|#]*) (?P<section> #[^]|]*)? @@ -761,7 +764,7 @@ % refPage.title()) include = False if include in (True, "redirect"): - # make a backup of the original text so we can show the changes later + # save the original text so we can show the changes later original_text = text n = 0 curpos = 0 @@ -778,7 +781,7 @@ else: # stop loop and save page break - # Make sure that next time around we will not find this same hit. + # Ensure that next time around we will not find this same hit. curpos = m.start() + 1 try: foundlink = pywikibot.Link(m.group('title'), @@ -803,13 +806,15 @@ context = 60 # check if there's a dn-template here already if (self.dnSkip and self.dn_template_str and - self.dn_template_str[:-2] in text[m.end():m.end() + - len(self.dn_template_str) + 8]): + self.dn_template_str[:-2] in text[ + m.end():m.end() + len(self.dn_template_str) + 8]): continue
- edit = EditOption('edit page', 'e', text, m.start(), disambPage.title()) + edit = EditOption('edit page', 'e', text, m.start(), + disambPage.title()) context_option = HighlightContextOption( - 'more context', 'm', text, 60, start=m.start(), end=m.end()) + 'more context', 'm', text, 60, start=m.start(), + end=m.end()) context_option.before_question = True
options = [ListOption(self.alternatives, ''), @@ -820,7 +825,8 @@ StandardOption('unlink', 'u')] if self.dn_template_str: # '?', '/' for old choice - options += [AliasOption('tag template %s' % self.dn_template_str, + options += [AliasOption('tag template %s' % + self.dn_template_str, ['t', '?', '/'])] options += [context_option] if not edited: @@ -928,8 +934,10 @@ # instead of a pipelink elif ( (len(new_page_title) <= len(link_text)) and - (firstcap(link_text[:len(new_page_title)]) == firstcap(new_page_title)) and - (re.sub(self.trailR, '', link_text[len(new_page_title):]) == '') and + (firstcap(link_text[:len(new_page_title)]) == + firstcap(new_page_title)) and + (re.sub(self.trailR, '', + link_text[len(new_page_title):]) == '') and (not section) ): newlink = "[[%s]]%s" \ @@ -1057,7 +1065,8 @@
# first check whether user has customized the edit comment if (self.mysite.family.name in config.disambiguation_comment and - self.mylang in config.disambiguation_comment[self.mysite.family.name]): + self.mylang in config.disambiguation_comment[ + self.mysite.family.name]): try: self.comment = i18n.translate( self.mysite, @@ -1208,7 +1217,8 @@ pywikibot.Site().disambcategory(), start=arg[7:], namespaces=[0]) except pywikibot.NoPage: - pywikibot.output("Disambiguation category for your wiki is not known.") + pywikibot.output( + 'Disambiguation category for your wiki is not known.') raise else: generator_factory.handleArg(arg) diff --git a/scripts/standardize_interwiki.py b/scripts/standardize_interwiki.py index 7ef1a58..b283a7d 100644 --- a/scripts/standardize_interwiki.py +++ b/scripts/standardize_interwiki.py @@ -51,7 +51,7 @@ for arg in pywikibot.handle_args(): if arg.startswith('-start'): if len(arg) == 6: - start = pywikibot.input(u'From what page do you want to start?') + start = pywikibot.input('From what page do you want to start?') else: start = arg[7:] site = pywikibot.Site() diff --git a/scripts/surnames_redirects.py b/scripts/surnames_redirects.py index 2cc8de4..d817fda 100755 --- a/scripts/surnames_redirects.py +++ b/scripts/surnames_redirects.py @@ -90,14 +90,15 @@ else: pywikibot.output('%s doesn't exist' % new_page.title(asLink=True)) - choice = pywikibot.input_yn('Do you want to create a redirect?') + choice = pywikibot.input_yn( + 'Do you want to create a redirect?') if choice: comment = i18n.twtranslate( site, 'capitalize_redirects-create-redirect', {'to': page_t}) - new_page.set_redirect_target(self.current_page, create=True, - summary=comment) + new_page.set_redirect_target(self.current_page, + create=True, summary=comment)
def main(*args): diff --git a/scripts/table2wiki.py b/scripts/table2wiki.py index 3d41b20..4b4ec12 100644 --- a/scripts/table2wiki.py +++ b/scripts/table2wiki.py @@ -70,7 +70,7 @@
class TableXmlDumpPageGenerator(object): - """A page generator that will yield all pages that seem to contain an HTML table.""" + """Generator to yield all pages that seem to contain an HTML table."""
def __init__(self, xmlfilename): """Constructor.""" @@ -114,8 +114,9 @@ beautify all wiki tables already contained in the text. """ warnings = 0 - # this array will contain strings that will be shown in case of possible - # errors, before the user is asked if he wants to accept the changes. + # this array will contain strings that will be shown in case of + # possible errors, before the user is asked if he wants to accept the + # changes. warning_messages = [] newTable = table ################## @@ -138,8 +139,9 @@ newTable = re.sub(r'(?i)[\r\n]*?<##table##>(?P<more>[\w\W]*?)[\r\n ]*', r'\r\n{|\n\g<more>\r\n', newTable) # <table> tag with attributes, without more text on the same line - newTable = re.sub(r'(?i)[\r\n]*?<##table## (?P<attr>[\w\W]*?)>[\r\n ]*', - r'\r\n{| \g<attr>\r\n', newTable) + newTable = re.sub( + r'(?i)[\r\n]*?<##table## (?P<attr>[\w\W]*?)>[\r\n ]*', + r'\r\n{| \g<attr>\r\n', newTable) # <table> tag without attributes, without more text on the same line newTable = re.sub(r'(?i)[\r\n]*?<##table##>[\r\n ]*', '\r\n{|\r\n', newTable) @@ -150,7 +152,8 @@ ################## # caption with attributes newTable = re.sub( - r'(?i)<caption (?P<attr>[\w\W]*?)>(?P<caption>[\w\W]*?)</caption>', + r'(?i)<caption (?P<attr>[\w\W]*?)>' + r'(?P<caption>[\w\W]*?)</caption>', r'\r\n|+\g<attr> | \g<caption>', newTable) # caption without attributes newTable = re.sub(r'(?i)<caption>(?P<caption>[\w\W]*?)</caption>', @@ -182,7 +185,7 @@ r'\n!\g<attr> | \g<header>\r\n', newTable) if n > 0: warning_messages.append( - u'WARNING: found <th ...> without </th>. (%d occurences\n)' % n) + 'WARNING: found <th ...> without </th>. (%d occurences\n)' % n) warnings += n
################## @@ -214,16 +217,6 @@ warning_messages.append( u'<td> used where </td> was expected. (%d occurences)\n' % n) warnings += n - - # fail save, sometimes it's a <td><td></tr> - # newTable, n = re.subn("[\r\n]+<(td|TD)>([^<]*?)<(td|TD)></(tr|TR)>", - # "\r\n| \2\r\n", newTable) - # newTable, n = re.subn("[\r\n]+<(td|TD)([^>]*?)>([^<]*?)<(td|TD)></(tr|TR)>", - # "\r\n|\2| \3\r\n", newTable) - # if n > 0: - # warning_messages.append("WARNING: found <td><td></tr>, but no </td>." - # " (%d occurences)\n" % n) - # warnings += n
# what is this for? newTable, n = re.subn( @@ -280,8 +273,9 @@ if config.deIndentTables: num = 1 while num != 0: - newTable, num = re.subn(r'({|[\w\W]*?)\n[ \t]+([\w\W]*?|})', - r'\1\r\n\2', newTable) + newTable, num = re.subn( + r'({|[\w\W]*?)\n[ \t]+([\w\W]*?|})', + r'\1\r\n\2', newTable)
################## # kills additional spaces after | or ! or {| @@ -375,15 +369,17 @@ # TODO: how does this work? docu please. # why are only äöüß used, but not other special characters? newTable, num = re.subn( - r'(\r\n[A-Z]{1}[^\n\r]{200,}?[a-zäöüß].)\ ([A-ZÄÖÜ]{1}[^\n\r]{200,})', + r'(\r\n[A-Z]{1}[^\n\r]{200,}?[a-zäöüß].)' + r'\ ([A-ZÄÖÜ]{1}[^\n\r]{200,})', r'\1\r\n\2', newTable) return newTable, warnings, warning_messages
def markActiveTables(self, text): """ - Mark all table start and end tags that are not disabled by nowiki tags, comments etc. + Mark all hidden table start and end tags.
- We will then later only work on these marked tags. + Mark all table start and end tags that are not disabled by nowiki tags, + comments etc. We will then later only work on these marked tags. """ tableStartTagR = re.compile("<table", re.IGNORECASE) tableEndTagR = re.compile("</table>", re.IGNORECASE) @@ -398,7 +394,7 @@
def findTable(self, text): """ - Find the first HTML table (which can contain nested tables) inside a text. + Find the first HTML table (which can contain nested tables).
Returns the table and the start and end position inside the text. """ @@ -420,10 +416,12 @@ nextStarting = markedTableStartTagR.search(text) nextEnding = markedTableEndTagR.search(text) if not nextEnding: - pywikibot.output("More opening than closing table tags. Skipping.") + pywikibot.output( + 'More opening than closing table tags. Skipping.') return None, 0, 0 # if another table tag is opened before one is closed - elif nextStarting and nextStarting.start() < nextEnding.start(): + elif (nextStarting and + nextStarting.start() < nextEnding.start()): offset += nextStarting.end() text = text[nextStarting.end():] depth += 1 diff --git a/scripts/template.py b/scripts/template.py index f44f541..2ea67f5 100755 --- a/scripts/template.py +++ b/scripts/template.py @@ -239,7 +239,8 @@ if not template.exists(): pywikibot.warning(u'Template "%s" does not exist.' % new) if not pywikibot.input_yn('Do you want to proceed anyway?', - default=False, automatic_quit=False): + default=False, + automatic_quit=False): continue replacements.append((templateRegex, r'{{%s\g<parameters>}}' % new)) @@ -343,13 +344,15 @@
if not gen: gens = [ - pagegenerators.ReferringPageGenerator(t, onlyTemplateInclusion=True) + pagegenerators.ReferringPageGenerator(t, + onlyTemplateInclusion=True) for t in oldTemplates ] gen = pagegenerators.CombinedPageGenerator(gens) gen = pagegenerators.DuplicateFilterPageGenerator(gen) if user: - gen = pagegenerators.UserEditFilterGenerator(gen, user, timestamp, skip, + gen = pagegenerators.UserEditFilterGenerator(gen, user, timestamp, + skip, max_revision_depth=100, show_filtered=True)
diff --git a/scripts/transferbot.py b/scripts/transferbot.py index e6ece36..3df1753 100755 --- a/scripts/transferbot.py +++ b/scripts/transferbot.py @@ -57,8 +57,8 @@
"""Base class for exceptions from this script.
- Makes it easier for clients to catch all expected exceptions that the script might - throw + Makes it easier for clients to catch all expected exceptions that the + script might throw """
pass @@ -68,8 +68,8 @@
"""Thrown when the target site is the same as the source site.
- Based on the way each are initialized, this is likely to happen when the target site - simply hasn't been specified. + Based on the way each are initialized, this is likely to happen when the + target site simply hasn't been specified. """
pass @@ -77,7 +77,7 @@
class TargetPagesMissing(WikiTransferException):
- """Thrown if no page range has been specified for the script to operate on.""" + """Thrown if no page range has been specified to operate on."""
pass
@@ -139,7 +139,8 @@ for page in gen: summary = "Moved page from %s" % page.title(asLink=True) targetpage = pywikibot.Page(tosite, prefix + page.title()) - edithistpage = pywikibot.Page(tosite, prefix + page.title() + '/edithistory') + edithistpage = pywikibot.Page(tosite, prefix + page.title() + + '/edithistory')
if targetpage.exists() and not overwrite: pywikibot.output( diff --git a/scripts/unusedfiles.py b/scripts/unusedfiles.py index 2e65460..9ba8d16 100755 --- a/scripts/unusedfiles.py +++ b/scripts/unusedfiles.py @@ -35,7 +35,7 @@
# This template message should use subst: template_to_the_user = { - 'fa': u'\n\n{{جا:اخطار به کاربر برای تصاویر بدون استفاده|%(title)s}}--~~~~', + 'fa': '\n\n{{جا:اخطار به کاربر برای تصاویر بدون استفاده|%(title)s}}--~~~~', }
diff --git a/scripts/version.py b/scripts/version.py index 59f3416..c447dfa 100755 --- a/scripts/version.py +++ b/scripts/version.py @@ -37,7 +37,9 @@
def check_environ(environ_name): """Print environment variable.""" - pywikibot.output('{0}: {1}'.format(environ_name, os.environ.get(environ_name, 'Not set'))) + pywikibot.output('{0}: {1}'.format(environ_name, + os.environ.get(environ_name, + 'Not set')))
def main(*args): @@ -76,12 +78,14 @@ if usernames: pywikibot.output('Usernames for family "{0}":'.format(family)) for lang, username in usernames.items(): - sysop_name = pywikibot.config2.sysopnames.get(family, {}).get(lang) + sysop_name = pywikibot.config2.sysopnames.get(family, + {}).get(lang) if not sysop_name: sysop_name = 'no sysop configured' elif sysop_name == username: sysop_name = 'also sysop' - pywikibot.output('\t{0}: {1} ({2})'.format(lang, username, sysop_name)) + pywikibot.output('\t{0}: {1} ({2})'.format(lang, username, + sysop_name))
if __name__ == '__main__': diff --git a/scripts/weblinkchecker.py b/scripts/weblinkchecker.py index 755c5fa..0aa0a4a 100755 --- a/scripts/weblinkchecker.py +++ b/scripts/weblinkchecker.py @@ -171,7 +171,8 @@
# Other special cases re.compile(r'.*[./@]berlinonline.de(/.*)?'), - # above entry to be manually fixed per request at [[de:Benutzer:BLueFiSH.as/BZ]] + # above entry to be manually fixed per request at + # [[de:Benutzer:BLueFiSH.as/BZ]] # bot can't handle their redirects:
# bot rejected on the site, already archived @@ -299,8 +300,8 @@ Given a HTTP URL, tries to load the page from the Internet and checks if it is still online.
- Returns a (boolean, string) tuple saying if the page is online and including - a status reason. + Returns a (boolean, string) tuple saying if the page is online and + including a status reason.
Per-domain user-agent faking is not supported in this deprecated class.
@@ -366,8 +367,8 @@ if not self.serverEncoding: # TODO: We might also load a page, then check for an encoding # definition in a HTML meta tag. - pywikibot.output(u'Error retrieving server's default charset. ' - u'Using ISO 8859-1.') + pywikibot.output('Error retrieving server's default charset. ' + 'Using ISO 8859-1.') # most browsers use ISO 8859-1 (Latin-1) as the default. self.serverEncoding = 'iso8859-1' return self.serverEncoding @@ -402,7 +403,8 @@ except UnicodeEncodeError: encoding = self.getEncodingUsedByServer() self.path = unicode(urllib.quote(self.path.encode(encoding))) - self.query = unicode(urllib.quote(self.query.encode(encoding), '=&')) + self.query = unicode(urllib.quote(self.query.encode(encoding), + '=&'))
def resolveRedirect(self, useHEAD=False): """ @@ -560,7 +562,8 @@ alive = self.response.status not in range(400, 500) if self.response.status in self.HTTPignore: alive = False - return alive, '%s %s' % (self.response.status, self.response.reason) + return alive, '%s %s' % (self.response.status, + self.response.reason)
class LinkCheckThread(threading.Thread): @@ -598,7 +601,8 @@ try: header = self.header r = comms.http.fetch( - self.url, headers=header, use_fake_user_agent=self._use_fake_user_agent) + self.url, headers=header, + use_fake_user_agent=self._use_fake_user_agent) except requests.exceptions.InvalidURL: message = i18n.twtranslate(self.page.site, 'weblinkchecker-badurl_msg', @@ -631,8 +635,8 @@ The URLs are dictionary keys, and values are lists of tuples where each tuple represents one time the URL was found dead. Tuples have the form (title, date, error) where title is the - wiki page where the URL was found, date is an instance of time, and error is - a string with error code and message. + wiki page where the URL was found, date is an instance of time, and error + is a string with error code and message.
We assume that the first element in the list represents the first time we found this dead link, and the last element represents the last time. @@ -656,7 +660,8 @@ self.site = site self.semaphore = threading.Semaphore() self.datfilename = pywikibot.config.datafilepath( - 'deadlinks', 'deadlinks-%s-%s.dat' % (self.site.family.name, self.site.code)) + 'deadlinks', 'deadlinks-%s-%s.dat' % (self.site.family.name, + self.site.code)) # Count the number of logged links, so that we can insert captions # from time to time self.logCount = 0 @@ -994,7 +999,8 @@ xmlStart except NameError: xmlStart = None - gen = XmlDumpPageGenerator(xmlFilename, xmlStart, genFactory.namespaces) + gen = XmlDumpPageGenerator(xmlFilename, xmlStart, + genFactory.namespaces)
if not gen: gen = genFactory.getCombinedGenerator() diff --git a/scripts/welcome.py b/scripts/welcome.py index 7297944..f31981b 100755 --- a/scripts/welcome.py +++ b/scripts/welcome.py @@ -409,7 +409,7 @@ offset = None # skip users newer than that timestamp timeoffset = 0 # skip users newer than # minutes recursive = True # define if the Bot is recursive or not - timeRecur = 3600 # how much time (sec.) the bot sleeps before restart + timeRecur = 3600 # how much time (sec.) the bot waits before restart makeWelcomeLog = True # create the welcome log or not confirm = False # should bot ask to add user to bad-username list welcomeAuto = False # should bot welcome auto-created users @@ -419,7 +419,7 @@ signFileName = None # File name, default: None defaultSign = '--~~~~' # default signature queryLimit = 50 # number of users that the bot load to check - quiet = False # Prevents users without contributions are displayed + quiet = False # Users without contributions aren't displayed
class WelcomeBot(object): @@ -567,7 +567,7 @@ if answer.lower() in ['yes', 'y'] or not globalvar.confirm: showStatus() pywikibot.output( - u'%s is possibly an unwanted username. It will be reported.' + '%s is possibly an unwanted username. It will be reported.' % name) if hasattr(self, '_BAQueue'): self._BAQueue.append(name) @@ -696,8 +696,8 @@ pywikibot.output('Loading signature list...') signText = signPage.get() else: - pywikibot.output('The Signature list page is not exist, random ' - 'signature will disable.') + pywikibot.output('The signature list page does not exist, ' + 'random signature will be disabled.') globalvar.randomSign = False else: try: @@ -913,7 +913,7 @@ try: globalvar.offset = pywikibot.Timestamp.fromtimestampformat(val) except ValueError: - # upon request, we might want to check for software version here + # upon request, we could check for software version here raise ValueError( "Mediawiki has changed, -offset:# is not supported " "anymore, but -offset:TIMESTAMP is, assuming TIMESTAMP " diff --git a/scripts/wikisourcetext.py b/scripts/wikisourcetext.py index 6ae1f92..dbe99c1 100644 --- a/scripts/wikisourcetext.py +++ b/scripts/wikisourcetext.py @@ -9,8 +9,8 @@ to create the Index page, making the upload feature independent from the format of the file, as long as it is supported by the MW ProofreadPage extension.
-As alternative, if '-ocr' option is selected, https://tools.wmflabs.org/phetools -OCR tool will be used to get text. +As alternative, if '-ocr' option is selected, +https://tools.wmflabs.org/phetools OCR tool will be used to get text. In this case, also already existing pages with quality value 'Not Proofread' can be treated. '-force' will override existing page in this case.
@@ -34,8 +34,8 @@ saving the page
-ocr: use https://tools.wmflabs.org/phetools OCR tool to get text; - default is False, i.e. only not-(yet)-existing pages - in Page ns will be treated and text will be fetched via preload. + default is False, i.e. only not-(yet)-existing pages in Page + ns will be treated and text will be fetched via preload.
-force: overwrite existing pages; default is False; valid only if '-ocr' is selected.
pywikibot-commits@lists.wikimedia.org