jenkins-bot has submitted this change and it was merged.
Change subject: harvest_template: Use PreloadingGenerator
......................................................................
harvest_template: Use PreloadingGenerator
PreloadingGenerator can safely be used because we are
not writing to the pages. Using larger batches should
speed up processing, even though writing to wikidata
will still be slow.
Change-Id: Id8b411134b6337c3ab6f56f6dacb2e1500ebfa72
---
M scripts/harvest_template.py
1 file changed, 1 insertion(+), 1 deletion(-)
Approvals:
Merlijn van Deen: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/harvest_template.py b/scripts/harvest_template.py
index 05a9c52..d4bef05 100755
--- a/scripts/harvest_template.py
+++ b/scripts/harvest_template.py
@@ -40,7 +40,7 @@
* fields - A dictionary of fields that are of use to us
"""
- self.generator = generator
+ self.generator = pg.PreloadingGenerator(generator)
self.templateTitle = templateTitle.replace(u'_', u' ')
# TODO: Make it a list which also includes the redirects to the template
self.fields = fields
--
To view, visit https://gerrit.wikimedia.org/r/81016
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Id8b411134b6337c3ab6f56f6dacb2e1500ebfa72
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Legoktm <legoktm.wikipedia(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change and it was merged.
Change subject: Working on generate_user_files.py and I made it a little more user-friendly
......................................................................
Working on generate_user_files.py and I made it a little more user-friendly
Change-Id: I397b7c1b4ba72b00cc256c85350b2466e7634058
---
M generate_user_files.py
1 file changed, 30 insertions(+), 7 deletions(-)
Approvals:
Merlijn van Deen: Looks good to me, approved
jenkins-bot: Verified
diff --git a/generate_user_files.py b/generate_user_files.py
index 521d0b5..13245f5 100644
--- a/generate_user_files.py
+++ b/generate_user_files.py
@@ -27,11 +27,20 @@
if choice == '' and default:
return default
-
+ try:
+ choice=int(choice)
+ except ValueError:
+ pass
+ if isinstance(choice, basestring):
+ if not choice in clist:
+ print("Invalid response")
+ else:
+ return choice
try:
return clist[int(choice) - 1]
except:
- print("Invalid response")
+ if not isinstance(choice, basestring):
+ print("Invalid response")
return response
def file_exists(filename):
@@ -48,8 +57,21 @@
os.path.join(base_dir,
"families"))))
fam = listchoice(known_families,
- "Select family of sites we are working on",
+ "Select family of sites we are working on, " \
+ "just enter the number not name",
default='wikipedia')
+ codesds=codecs.open("families/%s_family.py" % fam, "r","utf-8").read()
+ rre=re.compile("self\.languages\_by\_size *\= *(.+?)\]",re.DOTALL)
+ known_langs=[]
+ if not rre.findall(codesds):
+ rre=re.compile("self\.langs *\= *(.+?)\}",re.DOTALL)
+ if rre.findall(codesds):
+ import ast
+ known_langs=ast.literal_eval(rre.findall(codesds)[0]+u"}").keys()
+ else:
+ known_langs=eval(rre.findall(codesds)[0]+u"]")
+ print "This is the list of known language(s):"
+ print ",".join(known_langs)
mylang = raw_input(
"The language code of the site we're working on (default: 'en'): ") or 'en'
username = raw_input("Username (%s %s): "
@@ -57,7 +79,8 @@
username = unicode(username, console_encoding)
while True:
choice = raw_input(
-"Which variant of user_config.py:\n[S]mall or [E]xtended (with further information)? "
+"Which variant of user_config.py:\n"\
+"[S]mall or [E]xtended (with further information)? "
).upper()
if choice in "SE":
break
@@ -137,10 +160,10 @@
print("'%s' written." % _fnf)
if __name__ == "__main__":
- print("1: Create user_config.py file")
- print("2: Create user_fixes.py file")
+ print("1: Create user_config.py file (required)")
+ print("2: Create user_fixes.py file (optional, for advanced usage)")
print("3: The two files")
- choice = raw_input("What do you do? ")
+ choice = raw_input("What do you do? Just enter the number: ")
if choice == "1":
create_user_config('')
if choice == "2":
--
To view, visit https://gerrit.wikimedia.org/r/79569
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I397b7c1b4ba72b00cc256c85350b2466e7634058
Gerrit-PatchSet: 7
Gerrit-Project: pywikibot/compat
Gerrit-Branch: master
Gerrit-Owner: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: DrTrigon <dr.trigon(a)surfeu.ch>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Legoktm <legoktm.wikipedia(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change and it was merged.
Change subject: pep8-fiy exceptions.py
......................................................................
pep8-fiy exceptions.py
Change-Id: Ibc6de40d4adef27d76a090ce0da10bfc66ecc32a
---
M pywikibot/exceptions.py
1 file changed, 2 insertions(+), 0 deletions(-)
Approvals:
Ladsgroup: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/exceptions.py b/pywikibot/exceptions.py
index 8972fed..dd97cb0 100644
--- a/pywikibot/exceptions.py
+++ b/pywikibot/exceptions.py
@@ -119,10 +119,12 @@
class ServerError(Error):
"""Got unexpected server response"""
+
class FatalServerError(ServerError):
"""A fatal server error that's not going to be corrected by just sending
the request again."""
+
class Server504Error(Error):
"""Server timed out with http 504 code"""
--
To view, visit https://gerrit.wikimedia.org/r/80770
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ibc6de40d4adef27d76a090ce0da10bfc66ecc32a
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Legoktm <legoktm.wikipedia(a)gmail.com>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change and it was merged.
Change subject: pep8-ified pagegenerators.py
......................................................................
pep8-ified pagegenerators.py
Change-Id: I1e847c616eaab599672ffe2dda2861e7a6fcb867
---
M pywikibot/pagegenerators.py
1 file changed, 68 insertions(+), 54 deletions(-)
Approvals:
Ladsgroup: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index d07b9e0..c6e8c7d 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -27,7 +27,6 @@
from pywikibot import deprecate_arg, i18n
-
# ported from version 1 for backwards-compatibility
# most of these functions just wrap a Site or Page method that returns
# a generator
@@ -172,7 +171,6 @@
self.step = None
self.limit = None
-
def getCombinedGenerator(self):
"""Return the combination of all accumulated generators.
@@ -189,8 +187,7 @@
self.gens[i].set_maximum_items(self.limit)
else:
if self.namespaces:
- self.gens[i] = NamespaceFilterPageGenerator(
- self.gens[i], namespaces)
+ self.gens[i] = NamespaceFilterPageGenerator(self.gens[i], namespaces)
if self.limit:
self.gens[i] = itertools.islice(self.gens[i], self.limit)
if len(self.gens) == 0:
@@ -217,8 +214,8 @@
defaultNamespace=14))
# Link constructor automatically prepends localized namespace
# if not included in user's input
- return CategorizedPageGenerator(cat,
- start=startfrom, recurse=recurse, content=content)
+ return CategorizedPageGenerator(cat, start=startfrom,
+ recurse=recurse, content=content)
def setSubCategoriesGen(self, arg, length, recurse=False, content=False):
if len(arg) == length:
@@ -235,8 +232,8 @@
cat = pywikibot.Category(pywikibot.Link(categoryname,
defaultNamespace=14))
- return SubCategoriesPageGenerator(cat,
- start=startfrom, recurse=recurse, content=content)
+ return SubCategoriesPageGenerator(cat, start=startfrom,
+ recurse=recurse, content=content)
def handleArg(self, arg):
"""Parse one argument at a time.
@@ -268,19 +265,19 @@
if len(arg) == 12:
gen = UnusedFilesGenerator()
else:
- gen = UnusedFilesGenerator(number = int(arg[13:]))
+ gen = UnusedFilesGenerator(number=int(arg[13:]))
elif arg.startswith('-unwatched'):
if len(arg) == 10:
gen = UnwatchedPagesPageGenerator()
else:
- gen = UnwatchedPagesPageGenerator(number = int(arg[11:]))
+ gen = UnwatchedPagesPageGenerator(number=int(arg[11:]))
elif arg.startswith('-usercontribs'):
gen = UserContributionsGenerator(arg[14:])
elif arg.startswith('-withoutinterwiki'):
if len(arg) == 17:
gen = WithoutInterwikiPageGenerator()
else:
- gen = WithoutInterwikiPageGenerator(number = int(arg[18:]))
+ gen = WithoutInterwikiPageGenerator(number=int(arg[18:]))
elif arg.startswith('-interwiki'):
title = arg[11:]
if not title:
@@ -327,23 +324,23 @@
self.limit = int(arg[len('-limit:'):])
return True
elif arg.startswith('-catr'):
- gen = self.getCategoryGen(arg, len('-catr'), recurse = True)
+ gen = self.getCategoryGen(arg, len('-catr'), recurse=True)
elif arg.startswith('-category'):
gen = self.getCategoryGen(arg, len('-category'))
elif arg.startswith('-cat'):
gen = self.getCategoryGen(arg, len('-cat'))
elif arg.startswith('-subcatsr'):
- gen = self.setSubCategoriesGen(arg, 9, recurse = True)
+ gen = self.setSubCategoriesGen(arg, 9, recurse=True)
elif arg.startswith('-subcats'):
gen = self.setSubCategoriesGen(arg, 8)
elif arg.startswith('-page'):
if len(arg) == len('-page'):
gen = [pywikibot.Page(
- pywikibot.Link(
- pywikibot.input(
- u'What page do you want to use?'),
- pywikibot.getSite())
- )]
+ pywikibot.Link(
+ pywikibot.input(
+ u'What page do you want to use?'),
+ pywikibot.getSite())
+ )]
else:
gen = [pywikibot.Page(pywikibot.Link(arg[len('-page:'):],
pywikibot.getSite())
@@ -382,9 +379,9 @@
transclusionPageTitle = pywikibot.input(
u'Pages that transclude which page should be processed?')
transclusionPage = pywikibot.Page(
- pywikibot.Link(transclusionPageTitle,
- defaultNamespace=10,
- source=pywikibot.Site()))
+ pywikibot.Link(transclusionPageTitle,
+ defaultNamespace=10,
+ source=pywikibot.Site()))
gen = ReferringPageGenerator(transclusionPage,
onlyTemplateInclusion=True)
elif arg.startswith('-start'):
@@ -411,9 +408,9 @@
gen = NewimagesPageGenerator(total=int(limit))
elif arg.startswith('-newpages'):
if len(arg) >= 10:
- gen = NewpagesPageGenerator(total=int(arg[10:]))
+ gen = NewpagesPageGenerator(total=int(arg[10:]))
else:
- gen = NewpagesPageGenerator(total=60)
+ gen = NewpagesPageGenerator(total=60)
elif arg.startswith('-imagesused'):
imagelinkstitle = arg[len('-imagesused:'):]
if not imagelinkstitle:
@@ -428,7 +425,7 @@
mediawikiQuery = pywikibot.input(
u'What do you want to search for?')
# In order to be useful, all namespaces are required
- gen = SearchPageGenerator(mediawikiQuery, namespaces = [])
+ gen = SearchPageGenerator(mediawikiQuery, namespaces=[])
elif arg.startswith('-google'):
gen = GoogleSearchPageGenerator(arg[8:])
elif arg.startswith('-titleregex'):
@@ -500,7 +497,7 @@
@deprecate_arg("namespace", "namespaces")
@deprecate_arg("repeat", None)
def NewpagesPageGenerator(get_redirect=False, repeat=False, site=None,
- namespaces=[0,], step=None, total=None):
+ namespaces=[0, ], step=None, total=None):
"""
Iterate Page objects for all new titles in a single namespace.
"""
@@ -582,10 +579,10 @@
step=None, total=None, content=False):
'''Yields all pages referring to a specific page.'''
return referredPage.getReferences(
- follow_redirects=followRedirects,
- withTemplateInclusion=withTemplateInclusion,
- onlyTemplateInclusion=onlyTemplateInclusion,
- step=step, total=total, content=content)
+ follow_redirects=followRedirects,
+ withTemplateInclusion=withTemplateInclusion,
+ onlyTemplateInclusion=onlyTemplateInclusion,
+ step=step, total=total, content=content)
def CategorizedPageGenerator(category, recurse=False, start=None,
@@ -612,6 +609,7 @@
for a in category.articles(**kwargs):
yield a
+
def SubCategoriesPageGenerator(category, recurse=False, start=None,
step=None, total=None, content=False):
"""Yield all subcategories in a specific category.
@@ -629,8 +627,8 @@
"""
# TODO: page generator could be modified to use cmstartsortkey ...
- for s in category.subcategories(
- recurse=recurse, step=step, total=total, content=content):
+ for s in category.subcategories(recurse=recurse, step=step,
+ total=total, content=content):
if start is None or s.title(withNamespace=False) >= start:
yield s
@@ -749,6 +747,7 @@
seenPages[page] = True
yield page
+
def RegexFilterPageGenerator(generator, regex):
"""Yield pages from another generator whose titles match regex."""
reg = re.compile(regex, re.I)
@@ -842,11 +841,13 @@
extension=extension):
yield pywikibot.ImagePage(page.site, page.title())
+
def WithoutInterwikiPageGenerator(number=100, repeat=False, site=None):
if site is None:
site = pywikibot.Site()
for page in site.withoutinterwiki(number=number, repeat=repeat):
yield page
+
def UnCategorizedCategoryGenerator(number=100, repeat=False, site=None):
if site is None:
@@ -854,11 +855,13 @@
for page in site.uncategorizedcategories(number=number, repeat=repeat):
yield page
-def UnCategorizedImageGenerator(number = 100, repeat = False, site = None):
+
+def UnCategorizedImageGenerator(number=100, repeat=False, site=None):
if site is None:
site = pywikibot.Site()
for page in site.uncategorizedimages(number=number, repeat=repeat):
yield page
+
def UnCategorizedPageGenerator(number=100, repeat=False, site=None):
if site is None:
@@ -866,31 +869,36 @@
for page in site.uncategorizedpages(number=number, repeat=repeat):
yield page
-def LonelyPagesPageGenerator(number = 100, repeat = False, site = None):
+
+def LonelyPagesPageGenerator(number=100, repeat=False, site=None):
if site is None:
site = pywikibot.Site()
for page in site.lonelypages(number=number, repeat=repeat):
yield page
-def UnwatchedPagesPageGenerator(number = 100, repeat = False, site = None):
+
+def UnwatchedPagesPageGenerator(number=100, repeat=False, site=None):
if site is None:
site = pywikibot.Site()
for page in site.unwatchedpages(number=number, repeat=repeat):
yield page
-def AncientPagesPageGenerator(number = 100, repeat = False, site = None):
+
+def AncientPagesPageGenerator(number=100, repeat=False, site=None):
if site is None:
site = pywikibot.Site()
for page, date in site.ancientpages(number=number, repeat=repeat):
yield page
-def DeadendPagesPageGenerator(number = 100, repeat = False, site = None):
+
+def DeadendPagesPageGenerator(number=100, repeat=False, site=None):
if site is None:
site = pywikibot.Site()
for page in site.deadendpages(number=number, repeat=repeat):
yield page
-def LongPagesPageGenerator(number = 100, repeat = False, site = None):
+
+def LongPagesPageGenerator(number=100, repeat=False, site=None):
if site is None:
site = pywikibot.Site()
for page, length in site.longpages(number=number, repeat=repeat):
@@ -902,6 +910,7 @@
site = pywikibot.Site()
for page, length in site.shortpages(total=number, repeat=repeat):
yield page
+
def LinksearchPageGenerator(link, namespaces=None, step=None, total=None,
site=None):
@@ -915,6 +924,7 @@
total=total, content=False):
yield page
+
def SearchPageGenerator(query, step=None, total=None, namespaces=None, site=None):
"""
Provides a list of results using the internal MediaWiki search engine
@@ -926,11 +936,12 @@
# following classes just ported from version 1 without revision; not tested
+
class YahooSearchPageGenerator:
'''
To use this generator, install pYsearch
'''
- def __init__(self, query = None, count = 100, site = None): # values larger than 100 fail
+ def __init__(self, query=None, count=100, site=None): # values larger than 100 fail
self.query = query or pywikibot.input(u'Please enter the search query:')
self.count = count
if site is None:
@@ -938,14 +949,14 @@
self.site = site
def queryYahoo(self, query):
- from yahoo.search.web import WebSearch
- srch = WebSearch(config.yahoo_appid, query=query, results=self.count)
+ from yahoo.search.web import WebSearch
+ srch = WebSearch(config.yahoo_appid, query=query, results=self.count)
- dom = srch.get_results()
- results = srch.parse_results(dom)
- for res in results:
- url = res.Url
- yield url
+ dom = srch.get_results()
+ results = srch.parse_results(dom)
+ for res in results:
+ url = res.Url
+ yield url
def __iter__(self):
# restrict query to local site
@@ -958,6 +969,7 @@
page = pywikibot.Page(pywikibot.Link(title, pywikibot.Site()))
yield page
+
class GoogleSearchPageGenerator:
'''
To use this generator, you must install the pyGoogle module from
@@ -965,7 +977,7 @@
http://www.google.com/apis/index.html . The google_key must be set to your
license key in your configuration.
'''
- def __init__(self, query = None, site = None):
+ def __init__(self, query=None, site=None):
self.query = query or pywikibot.input(u'Please enter the search query:')
if site is None:
site = pywikibot.Site()
@@ -993,12 +1005,12 @@
offset = 0
estimatedTotalResultsCount = None
while not estimatedTotalResultsCount \
- or offset < estimatedTotalResultsCount:
+ or offset < estimatedTotalResultsCount:
while (True):
# Google often yields 502 errors.
try:
pywikibot.output(u'Querying Google, offset %i' % offset)
- data = google.doGoogleSearch(query, start = offset, filter = False)
+ data = google.doGoogleSearch(query, start=offset, filter=False)
break
except KeyboardInterrupt:
raise
@@ -1119,7 +1131,7 @@
yield page
-def YearPageGenerator(start = 1, end = 2050, site = None):
+def YearPageGenerator(start=1, end=2050, site=None):
if site is None:
site = pywikibot.Site()
pywikibot.output(u"Starting with year %i" % start)
@@ -1128,18 +1140,20 @@
pywikibot.output(u'Preparing %i...' % i)
# There is no year 0
if i != 0:
- current_year = date.formatYear(site.lang, i )
+ current_year = date.formatYear(site.lang, i)
yield pywikibot.Page(pywikibot.Link(current_year, site))
-def DayPageGenerator(startMonth = 1, endMonth = 12, site = None):
+
+def DayPageGenerator(startMonth=1, endMonth=12, site=None):
if site is None:
site = pywikibot.Site()
fd = date.FormatDate(site)
firstPage = pywikibot.Page(site, fd(startMonth, 1))
pywikibot.output(u"Starting with %s" % firstPage.title(asLink=True))
- for month in xrange(startMonth, endMonth+1):
- for day in xrange(1, date.getNumberOfDaysInMonth(month)+1):
+ for month in xrange(startMonth, endMonth + 1):
+ for day in xrange(1, date.getNumberOfDaysInMonth(month) + 1):
yield pywikibot.Page(pywikibot.Link(fd(month, day), site))
+
def main(*args):
try:
@@ -1161,5 +1175,5 @@
pywikibot.stopme()
-if __name__=="__main__":
+if __name__ == "__main__":
main()
--
To view, visit https://gerrit.wikimedia.org/r/80771
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I1e847c616eaab599672ffe2dda2861e7a6fcb867
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Legoktm <legoktm.wikipedia(a)gmail.com>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change and it was merged.
Change subject: pep8-ified pywikibot/throttle.py
......................................................................
pep8-ified pywikibot/throttle.py
Change-Id: I66c331d0d4860ef76f1fcfeafd47d17f5b76ba9d
---
M pywikibot/throttle.py
1 file changed, 17 insertions(+), 17 deletions(-)
Approvals:
Ladsgroup: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/throttle.py b/pywikibot/throttle.py
index 0a20d0d..240b606 100644
--- a/pywikibot/throttle.py
+++ b/pywikibot/throttle.py
@@ -52,10 +52,10 @@
self.last_read = 0
self.last_write = 0
self.next_multiplicity = 1.0
- self.checkdelay = 300 # Check logfile again after this many seconds
- self.dropdelay = 600 # Ignore processes that have not made
- # a check in this many seconds
- self.releasepid = 1200 # Free the process id after this many seconds
+ self.checkdelay = 300 # Check logfile again after this many seconds
+ self.dropdelay = 600 # Ignore processes that have not made
+ # a check in this many seconds
+ self.releasepid = 1200 # Free the process id after this many seconds
self.lastwait = 0.0
self.delay = 0
self.checktime = 0
@@ -107,7 +107,7 @@
'time': ptime,
'site': this_site})
if not pid and this_pid >= my_pid:
- my_pid = this_pid+1 # next unused process id
+ my_pid = this_pid + 1 # next unused process id
if not pid:
pid = my_pid
@@ -115,7 +115,7 @@
processes.append({'pid': pid,
'time': self.checktime,
'site': mysite})
- processes.sort(key=lambda p:(p['pid'], p['site']))
+ processes.sort(key=lambda p: (p['pid'], p['site']))
try:
f = open(self.ctrlfilename, 'w')
for p in processes:
@@ -167,7 +167,7 @@
thisdelay = self.writedelay
else:
thisdelay = self.delay
- if self.multiplydelay: # We're checking for multiple processes
+ if self.multiplydelay: # We're checking for multiple processes
if time.time() > self.checktime + self.checkdelay:
self.checkMultiplicity()
if thisdelay < (self.mindelay * self.next_multiplicity):
@@ -210,7 +210,7 @@
this_pid = int(line[0])
ptime = int(line[1].split('.')[0])
this_site = line[2].rstrip()
- except (IndexError,ValueError):
+ except (IndexError, ValueError):
continue # Sometimes the file gets corrupted
# ignore that line
if now - ptime <= self.releasepid \
@@ -218,7 +218,7 @@
processes.append({'pid': this_pid,
'time': ptime,
'site': this_site})
- processes.sort(key=lambda p:p['pid'])
+ processes.sort(key=lambda p: p['pid'])
try:
f = open(self.ctrlfilename, 'w')
for p in processes:
@@ -245,23 +245,23 @@
# We want to add "one delay" for each factor of two in the
# size of the request. Getting 64 pages at once allows 6 times
# the delay time for the server.
- self.next_multiplicity = math.log(1+requestsize)/math.log(2.0)
+ self.next_multiplicity = math.log(1 + requestsize) / math.log(2.0)
# Announce the delay if it exceeds a preset limit
if wait > 0:
if wait > config.noisysleep or self.verbosedelay:
pywikibot.output(
u"Sleeping for %(wait).1f seconds, %(now)s"
% {'wait': wait,
- 'now' : time.strftime("%Y-%m-%d %H:%M:%S",
+ 'now': time.strftime("%Y-%m-%d %H:%M:%S",
time.localtime())
- } )
+ })
else:
pywikibot.log(
u"Sleeping for %(wait).1f seconds, %(now)s"
% {'wait': wait,
- 'now' : time.strftime("%Y-%m-%d %H:%M:%S",
+ 'now': time.strftime("%Y-%m-%d %H:%M:%S",
time.localtime())
- } )
+ })
time.sleep(wait)
if write:
@@ -282,7 +282,7 @@
try:
# start at 1/2 the current server lag time
# wait at least 5 seconds but not more than 120 seconds
- delay = min(max(5, lagtime//2), 120)
+ delay = min(max(5, lagtime // 2), 120)
# account for any time we waited while acquiring the lock
wait = delay - (time.time() - started)
if wait > 0:
@@ -292,14 +292,14 @@
% {'wait': wait,
'now': time.strftime("%Y-%m-%d %H:%M:%S",
time.localtime())
- } )
+ })
else:
pywikibot.log(
u"Sleeping for %(wait).1f seconds, %(now)s"
% {'wait': wait,
'now': time.strftime("%Y-%m-%d %H:%M:%S",
time.localtime())
- } )
+ })
time.sleep(wait)
finally:
self.lock.release()
--
To view, visit https://gerrit.wikimedia.org/r/80774
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I66c331d0d4860ef76f1fcfeafd47d17f5b76ba9d
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Legoktm <legoktm.wikipedia(a)gmail.com>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change and it was merged.
Change subject: pep8-ified pywikibot/titletranslate.py
......................................................................
pep8-ified pywikibot/titletranslate.py
Change-Id: I5c800a0d33948a27dc523c1f2377e1ad63e160bd
---
M pywikibot/titletranslate.py
1 file changed, 15 insertions(+), 12 deletions(-)
Approvals:
Ladsgroup: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/titletranslate.py b/pywikibot/titletranslate.py
index 5367e92..24ed10f 100644
--- a/pywikibot/titletranslate.py
+++ b/pywikibot/titletranslate.py
@@ -13,6 +13,7 @@
import pywikibot
import pywikibot.date as date
+
def translate(page, hints=None, auto=True, removebrackets=False, site=None, family=None):
"""
Goes through all entries in 'hints'. Returns a list of pages.
@@ -45,7 +46,7 @@
# be a page in language xy with the same title as the page
# we're currently working on ...
if page is None:
- continue
+ continue
ns = page.namespace()
if ns:
newname = u'%s:%s' % (family.namespace('_default', ns),
@@ -95,13 +96,13 @@
% (page.title(), dictName, value))
for entryLang, entry in date.formats[dictName].iteritems():
if entryLang != page.site.code:
- if dictName == 'yearsBC' and \
- entryLang in date.maxyearBC and \
- value > date.maxyearBC[entryLang]:
+ if (dictName == 'yearsBC' and
+ entryLang in date.maxyearBC and
+ value > date.maxyearBC[entryLang]):
pass
- elif dictName == 'yearsAD' and \
- entryLang in date.maxyearAD and \
- value > date.maxyearAD[entryLang]:
+ elif (dictName == 'yearsAD' and
+ entryLang in date.maxyearAD and
+ value > date.maxyearAD[entryLang]):
pass
else:
newname = entry(value)
@@ -110,14 +111,16 @@
pywikibot.getSite(code=entryLang,
fam=site.family))
if x not in result:
- result.append(x) # add new page
+ result.append(x) # add new page
return result
bcDateErrors = [u'[[ko:%dë…„]]']
-def appendFormatedDates( result, dictName, value ):
+
+def appendFormatedDates(result, dictName, value):
for code, func in date.formats[dictName].iteritems():
- result.append( u'[[%s:%s]]' % (code,func(value)) )
+ result.append(u'[[%s:%s]]' % (code, func(value)))
+
def getPoisonedLinks(pl):
"""Returns a list of known corrupted links that should be removed if seen
@@ -127,11 +130,11 @@
pywikibot.output(u'getting poisoned links for %s' % pl.title())
dictName, value = date.getAutoFormat(pl.site.code, pl.title())
if dictName is not None:
- pywikibot.output( u'date found in %s' % dictName )
+ pywikibot.output(u'date found in %s' % dictName)
# errors in year BC
if dictName in date.bcFormats:
for fmt in bcDateErrors:
- result.append( fmt % value )
+ result.append(fmt % value)
# i guess this is like friday the 13th for the years
if value == 398 and dictName == 'yearsBC':
appendFormatedDates(result, dictName, 399)
--
To view, visit https://gerrit.wikimedia.org/r/80775
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I5c800a0d33948a27dc523c1f2377e1ad63e160bd
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Legoktm <legoktm.wikipedia(a)gmail.com>
Gerrit-Reviewer: jenkins-bot