jenkins-bot has submitted this change and it was merged.
Change subject: Update doc strings and minor code parts from core
......................................................................
Update doc strings and minor code parts from core
Change-Id: I52283b4533d5556d6f7c55ebce8a970a66845fe6
---
M solve_disambiguation.py
1 file changed, 69 insertions(+), 36 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/solve_disambiguation.py b/solve_disambiguation.py
index 678302e..1fb5a37 100644
--- a/solve_disambiguation.py
+++ b/solve_disambiguation.py
@@ -6,9 +6,11 @@
Specify the disambiguation page on the command line, or enter it at the
prompt after starting the program. (If the disambiguation page title starts
with a '-', you cannot name it on the command line, but you can enter it at
-the prompt.) The program will pick up the page, and look for all
-alternative links, and show them with a number adjacent to them. It will
-then automatically loop over all pages referring to the disambiguation page,
+the prompt.)
+
+The program will pick up the page, and look for all alternative links,
+and show them with a number adjacent to them. It will then automatically
+loop over all pages referring to the disambiguation page,
and show 30 characters of context on each side of the reference to help you
make the decision between the alternatives. It will ask you to type the
number of the appropriate replacement, and perform the change.
@@ -279,8 +281,8 @@
u'Wikipedy:Fangnet',
],
'hu': [
- #hu:Wikipédia:Kocsmafal (egyéb)#Hol nem kell egyértelműsíteni?
- #2012-02-08
+ # hu:Wikipédia:Kocsmafal (egyéb)#Hol nem kell egyértelműsíteni?
+ # 2012-02-08
u'Wikipédia:(?!Sportműhely/Eddigi cikkeink).*',
u'.*\(egyértelműsítő lap\)$',
u'.*[Vv]ita:.*',
@@ -373,11 +375,15 @@
return string[0].upper() + string[1:]
+def first_lower(string):
+ return string[:1].lower() + string[1:]
+
+
def correctcap(link, text):
# If text links to a page with title link uncapitalized, uncapitalize link,
# otherwise capitalize it
linkupper = link.title()
- linklower = linkupper[0].lower() + linkupper[1:]
+ linklower = first_lower(linkupper)
if "[[%s]]" % linklower in text or "[[%s|" % linklower in text:
return linklower
else:
@@ -404,6 +410,9 @@
class ReferringPageGeneratorWithIgnore:
+
+ """Referring Page generator, with an ignore manager."""
+
def __init__(self, disambPage, primary=False, minimum=0):
self.disambPage = disambPage
self.minimum = minimum
@@ -427,12 +436,16 @@
class PrimaryIgnoreManager(object):
+
"""
+ Primary ignore manager.
+
If run with the -primary argument, reads from a file which pages should
not be worked on; these are the ones where the user pressed n last time.
If run without the -primary argument, doesn't ignore any pages.
"""
+
def __init__(self, disambPage, enabled=False):
self.disambPage = disambPage
self.enabled = enabled
@@ -449,7 +462,7 @@
# remove trailing newlines and carriage returns
while line[-1] in ['\n', '\r']:
line = line[:-1]
- #skip empty lines
+ # skip empty lines
if line != '':
self.ignorelist.append(line)
f.close()
@@ -461,7 +474,7 @@
def ignore(self, refPage):
if self.enabled:
- # Skip this occurence next time.
+ # Skip this occurrence next time.
filename = config.datafilepath(
'disambiguations',
self.disambPage.urlname() + '.txt')
@@ -475,6 +488,8 @@
class DisambiguationRobot(object):
+
+ """Disambiguation bot."""
ignore_contents = {
'de': (u'{{[Ii]nuse}}',
@@ -517,13 +532,15 @@
self.setupRegexes()
def checkContents(self, text):
- '''
+ """
+ Check if the text matches any of the ignore regexes.
+
For a given text, returns False if none of the regular
expressions given in the dictionary at the top of this class
matches a substring of the text.
Otherwise returns the substring which is matched by one of
the regular expressions.
- '''
+ """
for ig in self.ignore_contents_regexes:
match = ig.search(text)
if match:
@@ -531,11 +548,9 @@
return None
def makeAlternativesUnique(self):
- # remove duplicate entries
- result = {}
- for i in self.alternatives:
- result[i] = None
- self.alternatives = result.keys()
+ # remove duplicate entries stable
+ unique = set(self.alternatives)
+ self.alternatives = [alt for alt in self.alternatives if alt in unique]
def listAlternatives(self):
list = u'\n'
@@ -577,6 +592,8 @@
def treat(self, refPage, disambPage):
"""
+ Treat a page.
+
Parameters:
disambPage - The disambiguation page or redirect we don't want
anything to link to
@@ -605,8 +622,9 @@
if disambPage.isRedirectPage():
target = self.alternatives[0]
choice = pywikibot.inputChoice(
- u'Do you want to make redirect %s point to %s?'
- % (refPage.title(), target), ['yes', 'no'], ['y', 'N'], 'N')
+ u'Do you want to make redirect %s point '
+ 'to %s?' % (refPage.title(), target),
+ ['yes', 'no'], ['y', 'N'], 'N')
if choice == 'y':
redir_text = '#%s [[%s]]' \
% (self.mysite.redirect(default=True), target)
@@ -617,8 +635,9 @@
else:
choice = pywikibot.inputChoice(
u'Do you want to work on pages linking to %s?'
- % refPage.title(), ['yes', 'no', 'change redirect'],
- ['y', 'N', 'c'], 'N')
+ % refPage.title(),
+ ['yes', 'no', 'change redirect'],
+ ['y', 'N', 'c'], 'N')
if choice == 'y':
gen = ReferringPageGeneratorWithIgnore(refPage,
self.primary)
@@ -747,7 +766,7 @@
# skip this page
if self.primary:
# If run with the -primary argument, skip this
- # occurence next time.
+ # occurrence next time.
self.primaryIgnoreManager.ignore(refPage)
return True
elif choice in ['q', 'Q']:
@@ -787,7 +806,7 @@
position_split = end_of_word_match.start(0)
else:
position_split = 0
- #insert dab needed template
+ # insert dab needed template
text = (text[:m.end() + position_split] +
dn_template_str +
text[m.end() + position_split:])
@@ -828,21 +847,20 @@
continue
new_page_title = self.alternatives[choice]
repPl = pywikibot.Page(disambPage.site(), new_page_title)
- if (new_page_title[0].isupper()
- or link_text[0].isupper()):
+ if (new_page_title[0].isupper() or
+ link_text[0].isupper()):
new_page_title = repPl.title()
else:
new_page_title = repPl.title()
- new_page_title = (new_page_title[0].lower() +
- new_page_title[1:])
+ new_page_title = first_lower(new_page_title)
if new_page_title not in new_targets:
new_targets.append(new_page_title)
if replaceit and trailing_chars:
newlink = "[[%s%s]]%s" % (new_page_title,
section,
trailing_chars)
- elif replaceit or (new_page_title == link_text
- and not section):
+ elif replaceit or (new_page_title == link_text and
+ not section):
newlink = "[[%s]]" % new_page_title
# check if we can create a link with trailing characters
# instead of a pipelink
@@ -869,7 +887,8 @@
pywikibot.showDiff(original_text, text)
pywikibot.output(u'')
# save the page
- self.setSummaryMessage(disambPage, new_targets, unlink, dn)
+ self.setSummaryMessage(disambPage, new_targets, unlink,
+ dn)
try:
refPage.put_async(text, comment=self.comment)
except pywikibot.LockedPage:
@@ -880,14 +899,14 @@
def findAlternatives(self, disambPage):
if disambPage.isRedirectPage() and not self.primary:
- if (disambPage.site().lang in self.primary_redir_template
- and self.primary_redir_template[disambPage.site().lang]
+ if (disambPage.site().lang in self.primary_redir_template and
+ self.primary_redir_template[disambPage.site.lang]
in disambPage.templates(get_redirect=True)):
baseTerm = disambPage.title()
for template in disambPage.templatesWithParams(
get_redirect=True):
if template[0] == self.primary_redir_template[
- disambPage.site().lang] \
+ disambPage.site.lang] \
and len(template[1]) > 0:
baseTerm = template[1][1]
disambTitle = primary_topic_format[self.mylang] % baseTerm
@@ -966,7 +985,8 @@
targets = targets[:-2]
if not targets:
- targets = i18n.twtranslate(self.mysite, unknown_msg)
+ targets = i18n.twtranslate(self.mysite,
+ unknown_msg)
# first check whether user has customized the edit comment
if (self.mysite.family.name in pywikibot.config.disambiguation_comment
@@ -993,9 +1013,10 @@
msg_redir_unlink,
{'from': disambPage.title()})
elif dn and not new_targets:
- self.comment = i18n.twtranslate(self.mysite,
- msg_redir_dn,
- {'from': disambPage.title()})
+ self.comment = i18n.twtranslate(
+ self.mysite,
+ msg_redir_dn,
+ {'from': disambPage.title()})
else:
self.comment = i18n.twtranslate(self.mysite,
msg_redir,
@@ -1019,8 +1040,11 @@
ignore_title[self.mysite.family.name] = {}
if self.mylang not in ignore_title[self.mysite.family.name]:
ignore_title[self.mysite.family.name][self.mylang] = []
+
ignore_title[self.mysite.family.name][self.mylang] += [
- u'%s:' % namespace for namespace in self.mysite.namespaces()]
+ u'%s:' % namespace
+ for namespace in self.mysite.namespaces()
+ ]
for disambPage in self.generator:
self.primaryIgnoreManager = PrimaryIgnoreManager(
@@ -1029,6 +1053,7 @@
if not self.findAlternatives(disambPage):
continue
+ pywikibot.output('\nAlternatives for %s' % disambPage)
self.makeAlternativesUnique()
# sort possible choices
if pywikibot.config.sort_ignore_case:
@@ -1051,6 +1076,14 @@
def main(*args):
+ """
+ Process command line arguments and invoke bot.
+
+ If args is an empty list, sys.argv is used.
+
+ @param args: command line arguments
+ @type args: list of unicode
+ """
# the option that's always selected when the bot wonders what to do with
# a link. If it's None, the user is prompted (default behaviour).
always = None
--
To view, visit https://gerrit.wikimedia.org/r/210924
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I52283b4533d5556d6f7c55ebce8a970a66845fe6
Gerrit-PatchSet: 3
Gerrit-Project: pywikibot/compat
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: [bugfix] enable version.py for SVN 1.8
......................................................................
[bugfix] enable version.py for SVN 1.8
SVN 1.8.13 doesn't work with setuptools. Now fallback to the old
implementation reading wc.db while reading getversiondict.
SVN 1.8.13 does not have .svn/entries file anymore. There is just
the wc.db inside the .svn folder. In that case skip retrieving the
version info from entries and assume the newer svn version.
Compairing dates with undefined daylight saving time status (which
means the value is -1) cannot be done directly. Otherwise it would
fail. Compaire its items instead.
bug: T97241
Change-Id: Ia235b9203ef642dd5bcdd0d1d70239a23d5eeadb
---
M pywikibot/version.py
1 file changed, 39 insertions(+), 29 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/version.py b/pywikibot/version.py
index 8de383e..10c35d7 100644
--- a/pywikibot/version.py
+++ b/pywikibot/version.py
@@ -2,7 +2,7 @@
"""Module to determine the pywikibot version (tag, revision and date)."""
#
# (C) Merlijn 'valhallasw' van Deen, 2007-2014
-# (C) xqt, 2010-2014
+# (C) xqt, 2010-2015
# (C) Pywikibot team, 2007-2015
#
# Distributed under the terms of the MIT license.
@@ -141,36 +141,39 @@
path = os.path.join(path, '..')
_program_dir = path
+ filename = os.path.join(_program_dir, '.svn/entries')
+ if os.path.isfile(filename):
+ with open(filename) as entries:
+ version = entries.readline().strip()
+ if version != '12':
+ for i in range(3):
+ entries.readline()
+ tag = entries.readline().strip()
+ t = tag.split('://')
+ t[1] = t[1].replace('svn.wikimedia.org/svnroot/pywikipedia/',
+ '')
+ tag = '[%s] %s' % (t[0], t[1])
+ for i in range(4):
+ entries.readline()
+ date = time.strptime(entries.readline()[:19],
+ '%Y-%m-%dT%H:%M:%S')
+ rev = entries.readline()[:-1]
+ return tag, rev, date
- entries = open(os.path.join(_program_dir, '.svn/entries'))
- version = entries.readline().strip()
- # use sqlite table for new entries format
- if version == "12":
- entries.close()
- from sqlite3 import dbapi2 as sqlite
- con = sqlite.connect(os.path.join(_program_dir, ".svn/wc.db"))
- cur = con.cursor()
- cur.execute("""select
+ # We haven't found the information in entries file.
+ # Use sqlite table for new entries format
+ from sqlite3 import dbapi2 as sqlite
+ con = sqlite.connect(os.path.join(_program_dir, ".svn/wc.db"))
+ cur = con.cursor()
+ cur.execute("""select
local_relpath, repos_path, revision, changed_date, checksum from nodes
order by revision desc, changed_date desc""")
- name, tag, rev, date, checksum = cur.fetchone()
- cur.execute("select root from repository")
- tag, = cur.fetchone()
- con.close()
- tag = os.path.split(tag)[1]
- date = time.gmtime(date / 1000000)
- else:
- for i in range(3):
- entries.readline()
- tag = entries.readline().strip()
- t = tag.split('://')
- t[1] = t[1].replace('svn.wikimedia.org/svnroot/pywikipedia/', '')
- tag = '[%s] %s' % (t[0], t[1])
- for i in range(4):
- entries.readline()
- date = time.strptime(entries.readline()[:19], '%Y-%m-%dT%H:%M:%S')
- rev = entries.readline()[:-1]
- entries.close()
+ name, tag, rev, date, checksum = cur.fetchone()
+ cur.execute("select root from repository")
+ tag, = cur.fetchone()
+ con.close()
+ tag = os.path.split(tag)[1]
+ date = time.gmtime(date / 1000000)
return tag, rev, date
@@ -244,7 +247,14 @@
_program_dir = path or _get_program_dir()
tag, rev, date = svn_rev_info(_program_dir)
hsh, date2 = github_svn_rev2hash(tag, rev)
- assert(date == date2)
+ if date.tm_isdst >= 0 and date2.tm_isdst >= 0:
+ assert(date == date2)
+ # date.tm_isdst is -1 means unknown state
+ # compare its contents except daylight saving time status
+ else:
+ for i in range(date.n_fields - 1):
+ assert(date[i] == date2[i])
+
rev = 's%s' % rev
if (not date or not tag or not rev) and not path:
raise ParseError
--
To view, visit https://gerrit.wikimedia.org/r/208084
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ia235b9203ef642dd5bcdd0d1d70239a23d5eeadb
Gerrit-PatchSet: 6
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: Revert "Remove .py translation files"
......................................................................
Revert "Remove .py translation files"
This reverts commit f7e046ba0c7ed829fc1598da7b7d7fea5f3082d5.
i18n .py files are still needed by compat. Deleting them may block all
compat scripts. Keep them until compat is finally closed.
T100223
Change-Id: I9caf1e14c37f7ed2339b4171fa80621c5bfa15f7
---
A .pep8
M __init__.py
A add_text.py
A archivebot.py
A basic.py
A blockpageschecker.py
A capitalize_redirects.py
A casechecker.py
A catall.py
A category.py
A category_redirect.py
A clean_sandbox.py
A commons.py
A commons_link.py
A commonscat.py
A cosmetic_changes.py
A deledpimage.py
A delete.py
A djvutext.py
A editarticle.py
A featured.py
A fixing_redirects.py
A followlive.py
A interwiki.py
A isbn.py
A lonelypages.py
A makecat.py
A misspelling.py
A movepages.py
A ndashredir.py
A noreferences.py
A pagefromfile.py
A piper.py
A protect.py
A pywikibot.py
A redirect.py
A reflinks.py
A replace.py
A revertbot.py
A selflink.py
A solve_disambiguation.py
A spamremove.py
A spellcheck.py
A table2wiki.py
A template.py
A thirdparty.py
A undelete.py
A unlink.py
A unprotect.py
A weblinkchecker.py
A welcome.py
A wiktionary.py
52 files changed, 26,820 insertions(+), 4 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
--
To view, visit https://gerrit.wikimedia.org/r/213477
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I9caf1e14c37f7ed2339b4171fa80621c5bfa15f7
Gerrit-PatchSet: 3
Gerrit-Project: pywikibot/i18n
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: [bugfix] enable version.py for SVN 1.8
......................................................................
[bugfix] enable version.py for SVN 1.8
SVN 1.8.13 does not have .svn/entries file anymore. There is just
the wc.db inside the .svn folder. In that case skip retrieving the
version info from entries and assume the newer svn version.
Update some docstrings from core, remove obsolete "size" variable.
bug: T97241
Change-Id: Ie8e38cd3a31edeb967b471fcaca582708305a821
---
M pywikibot/version.py
1 file changed, 77 insertions(+), 25 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/version.py b/pywikibot/version.py
index ee49fe7..a4ccc2a 100644
--- a/pywikibot/version.py
+++ b/pywikibot/version.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-""" Module to determine the pywikibot version (tag, revision and date).
+"""Module to determine the pywikibot version (tag, revision and date).
This module must not be loaded before the module wikipedia, otherwise
the proxy configuration does not have any effect, and the urllib2 open
@@ -12,8 +12,8 @@
"""
#
# (C) Merlijn 'valhallasw' van Deen, 2007-2014
-# (C) xqt, 2010-2014
-# (C) Pywikibot team, 2007-2013
+# (C) xqt, 2010-2015
+# (C) Pywikibot team, 2007-2015
#
# Distributed under the terms of the MIT license.
#
@@ -31,7 +31,7 @@
class ParseError(Exception):
- """ Parsing went wrong. """
+ """Parsing went wrong."""
def _get_program_dir():
@@ -40,10 +40,11 @@
def getversion(online=True):
- """Return a pywikibot version string
+ """Return a pywikibot version string.
+
@param online: (optional) Include information obtained online
"""
- data = dict(getversiondict()) # copy dict to prevent changes in 'chache'
+ data = dict(getversiondict()) # copy dict to prevent changes in 'cache'
data['cmp_ver'] = 'n/a'
if online:
@@ -59,6 +60,15 @@
def getversiondict():
+ """Get version info for the package.
+
+ @return:
+ - tag (name for the repository),
+ - rev (current revision identifier),
+ - date (date of current revision),
+ - hash (git hash for the current revision)
+ @rtype: C{dict} of four C{str}
+ """
global cache
if cache:
return cache
@@ -122,14 +132,40 @@
def getversion_svn(path=None):
+ """Get version info for a Subversion checkout.
+
+ @param path: directory of the Subversion checkout
+ @return:
+ - tag (name for the repository),
+ - rev (current Subversion revision identifier),
+ - date (date of current revision),
+ - hash (git hash for the Subversion revision)
+ @rtype: C{tuple} of three C{str} and a C{time.struct_time}
+ """
import httplib
import xml.dom.minidom
_program_dir = path or _get_program_dir()
- entries = open(os.path.join(_program_dir, '.svn/entries'))
- version = entries.readline().strip()
+ filename = os.path.join(_program_dir, '.svn/entries')
+ found = False
+ if os.path.isfile(filename):
+ with open(filename) as entries:
+ version = entries.readline().strip()
+ if version != '12':
+ for i in range(3):
+ entries.readline()
+ tag = entries.readline().strip()
+ t = tag.split('://')
+ t[1] = t[1].replace('svn.wikimedia.org/svnroot/pywikipedia/',
+ '')
+ tag = '[%s] %s' % (t[0], t[1])
+ for i in range(4):
+ entries.readline()
+ date = time.strptime(entries.readline()[:19],
+ '%Y-%m-%dT%H:%M:%S')
+ rev = entries.readline()[:-1]
+ found = True
# use sqlite table for new entries format
- if version == "12":
- entries.close()
+ if not found:
from sqlite3 import dbapi2 as sqlite
con = sqlite.connect(os.path.join(_program_dir, ".svn/wc.db"))
cur = con.cursor()
@@ -142,18 +178,7 @@
con.close()
tag = os.path.split(tag)[1]
date = time.gmtime(date / 1000000)
- else:
- for i in range(3):
- entries.readline()
- tag = entries.readline().strip()
- t = tag.split('://')
- t[1] = t[1].replace('svn.wikimedia.org/svnroot/pywikipedia/', '')
- tag = '[%s] %s' % (t[0], t[1])
- for i in range(4):
- entries.readline()
- date = time.strptime(entries.readline()[:19], '%Y-%m-%dT%H:%M:%S')
- rev = entries.readline()[:-1]
- entries.close()
+
conn = httplib.HTTPSConnection('github.com')
conn.request('PROPFIND', '/wikimedia/%s/!svn/vcc/default' % tag,
"<?xml version='1.0' encoding='utf-8'?>"
@@ -172,6 +197,16 @@
def getversion_git(path=None):
+ """Get version info for a Git clone.
+
+ @param path: directory of the Git checkout
+ @return:
+ - tag (name for the repository),
+ - rev (current revision identifier),
+ - date (date of current revision),
+ - hash (git hash for the current revision)
+ @rtype: C{tuple} of three C{str} and a C{time.struct_time}
+ """
_program_dir = path or _get_program_dir()
cmd = 'git'
rev = None
@@ -222,6 +257,15 @@
def getversion_nightly():
+ """Get version info for a nightly release.
+
+ @return:
+ - tag (name for the repository),
+ - rev (current revision identifier),
+ - date (date of current revision),
+ - hash (git hash for the current revision)
+ @rtype: C{tuple} of three C{str} and a C{time.struct_time}
+ """
import wikipediatools
try:
data = open(os.path.join(wikipediatools.get_base_dir(), 'version'))
@@ -254,12 +298,20 @@
def getfileversion(filename):
- """ Retrieve revision number of file (__version__ variable containing Id tag)
- without importing it (thus can be done for any file)
+ """ Retrieve revision number of file.
+
+ Extracts __version__ variable containing Id tag, without importing it.
+ (thus can be done for any file)
+
+ The version variable containing the Id tag is read and
+ returned. Because it doesn't import it, the version can
+ be retrieved from any file.
+ @param filename: Name of the file to get version
+ @type filename: string
"""
_program_dir = _get_program_dir()
__version__ = None
- size, mtime = None, None
+ mtime = None
fn = os.path.join(_program_dir, filename)
if os.path.exists(fn):
for line in open(fn, 'r').readlines():
--
To view, visit https://gerrit.wikimedia.org/r/206629
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ie8e38cd3a31edeb967b471fcaca582708305a821
Gerrit-PatchSet: 4
Gerrit-Project: pywikibot/compat
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>