jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/433408 )
Change subject: [IMPR] Drop a script from the throttle log while waiting ......................................................................
[IMPR] Drop a script from the throttle log while waiting
If a script is waiting at least 30 seconds drop it from throttle log by calling stopme() and do not throttle other scripts until the next api write call.
Change-Id: I6026336b27a4cb18681e5cb86e5a719346a93a20 --- M pywikibot/__init__.py M pywikibot/data/api.py M pywikibot/data/sparql.py M pywikibot/pagegenerators.py M pywikibot/specialbots.py M scripts/checkimages.py M scripts/flickrripper.py M scripts/imagerecat.py M scripts/interwiki.py M scripts/patrol.py M scripts/welcome.py 11 files changed, 34 insertions(+), 30 deletions(-)
Approvals: Zhuyifei1999: Looks good to me, but someone else must approve Framawiki: Looks good to me, approved jenkins-bot: Verified
diff --git a/pywikibot/__init__.py b/pywikibot/__init__.py index b64f7ba..9ce9f11 100644 --- a/pywikibot/__init__.py +++ b/pywikibot/__init__.py @@ -17,6 +17,7 @@ import re import sys import threading +import time
from warnings import warn
@@ -1323,6 +1324,17 @@ # Throttle and thread handling
+def sleep(secs): + """Suspend execution of the current thread for the given number of seconds. + + Drop this process from the throttle log if wait time is greater than + 30 seconds. + """ + if secs >= 30: + stopme() + time.sleep(secs) + + def stopme(): """ Drop this process from the throttle log, after pending threads finish. diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py index d956df9..2a97f30 100644 --- a/pywikibot/data/api.py +++ b/pywikibot/data/api.py @@ -14,7 +14,6 @@ import os import pprint import re -import time import traceback
from collections import Container, MutableMapping @@ -2210,7 +2209,7 @@ raise TimeoutError("Maximum retries attempted without success.") pywikibot.warning(u"Waiting %s seconds before retrying." % self.retry_wait) - time.sleep(self.retry_wait) + pywikibot.sleep(self.retry_wait) # double the next wait, but do not exceed 120 seconds self.retry_wait = min(120, self.retry_wait * 2)
@@ -3074,9 +3073,10 @@ if hasattr(self, '_waituntil'): if datetime.datetime.now() < self._waituntil: diff = self._waituntil - datetime.datetime.now() - pywikibot.warning(u"Too many tries, waiting %s seconds before retrying." - % diff.seconds) - time.sleep(diff.seconds) + pywikibot.warning( + 'Too many tries, waiting {} seconds before retrying.' + .format(diff.seconds)) + pywikibot.sleep(diff.seconds)
# base login request login_request = self.site._request( diff --git a/pywikibot/data/sparql.py b/pywikibot/data/sparql.py index 1453c8d..5adf153 100644 --- a/pywikibot/data/sparql.py +++ b/pywikibot/data/sparql.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- """SPARQL Query interface.""" # -# (C) Pywikibot team, 2016-2017 +# (C) Pywikibot team, 2016-2018 # # Distributed under the terms of the MIT license. # @@ -9,11 +9,10 @@
import json import sys -import time
from requests.exceptions import Timeout
-from pywikibot import config, warning, Site +from pywikibot import config, warning, Site, sleep from pywikibot.comms import http from pywikibot.tools import UnicodeMixin, py2_encode_utf_8 from pywikibot.exceptions import Error, TimeoutError @@ -162,7 +161,7 @@ if self.max_retries < 0: raise TimeoutError('Maximum retries attempted without success.') warning('Waiting {0} seconds before retrying.'.format(self.retry_wait)) - time.sleep(self.retry_wait) + sleep(self.retry_wait) # double the next wait, but do not exceed 120 seconds self.retry_wait = min(120, self.retry_wait * 2)
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py index 8208fad..db47ebd 100644 --- a/pywikibot/pagegenerators.py +++ b/pywikibot/pagegenerators.py @@ -27,7 +27,6 @@ import json import re import sys -import time
from datetime import timedelta from warnings import warn @@ -2043,7 +2042,7 @@ return else: break - time.sleep(sleep_duration) + pywikibot.sleep(sleep_duration) for item in list(filtered_generator())[::-1]: yield item
diff --git a/pywikibot/specialbots.py b/pywikibot/specialbots.py index 0818e72..5790984 100644 --- a/pywikibot/specialbots.py +++ b/pywikibot/specialbots.py @@ -3,7 +3,7 @@ """Library containing special bots.""" # # (C) Rob W.W. Hooft, Andre Engels 2003-2004 -# (C) Pywikibot team, 2003-2017 +# (C) Pywikibot team, 2003-2018 # # Distributed under the terms of the MIT license. # @@ -11,7 +11,6 @@
import os import tempfile -import time
import pywikibot import pywikibot.data.api @@ -181,7 +180,7 @@ if valid_ranges and rlen > 0: resume = True pywikibot.output(u"Sleeping for %d seconds..." % dt) - time.sleep(dt) + pywikibot.sleep(dt) if dt <= 60: dt += 15 elif dt < 360: diff --git a/scripts/checkimages.py b/scripts/checkimages.py index a398533..44a14af 100755 --- a/scripts/checkimages.py +++ b/scripts/checkimages.py @@ -1754,8 +1754,7 @@
if repeat: pywikibot.output(u"Waiting for %s seconds," % time_sleep) - pywikibot.stopme() - time.sleep(time_sleep) + pywikibot.sleep(time_sleep) else: break
diff --git a/scripts/flickrripper.py b/scripts/flickrripper.py index e6d8004..8453231 100755 --- a/scripts/flickrripper.py +++ b/scripts/flickrripper.py @@ -26,7 +26,7 @@ """ # # (C) Multichill, 2009 -# (C) Pywikibot team, 2009-2017 +# (C) Pywikibot team, 2009-2018 # # Distributed under the terms of the MIT license. # @@ -37,7 +37,6 @@ import io import re import sys -import time
import pywikibot from pywikibot import config, textlib @@ -92,7 +91,7 @@ return photoInfo, photoSizes except flickrapi.exceptions.FlickrError: pywikibot.output(u'Flickr api problem, sleeping') - time.sleep(30) + pywikibot.sleep(30)
def isAllowedLicense(photoInfo): @@ -390,7 +389,7 @@ except flickrapi.exceptions.FlickrError: gotPhotos = False pywikibot.output(u'Flickr api problem, sleeping') - time.sleep(30) + pywikibot.sleep(30)
return
diff --git a/scripts/imagerecat.py b/scripts/imagerecat.py index 884da46..5ba8b05 100755 --- a/scripts/imagerecat.py +++ b/scripts/imagerecat.py @@ -36,7 +36,6 @@ import re import socket import sys -import time import xml.etree.ElementTree
import pywikibot @@ -233,10 +232,10 @@ gotInfo = True except IOError: pywikibot.output(u'Got an IOError, let's try again') - time.sleep(30) + pywikibot.sleep(30) except socket.timeout: pywikibot.output(u'Got a timeout, let's try again') - time.sleep(30) + pywikibot.sleep(30) validParts = [u'hamlet', u'village', u'city', u'county', u'country'] invalidParts = [u'path', u'road', u'suburb', u'state', u'country_code'] addressparts = et.find('addressparts') diff --git a/scripts/interwiki.py b/scripts/interwiki.py index 82d4f94..d969406 100755 --- a/scripts/interwiki.py +++ b/scripts/interwiki.py @@ -355,7 +355,6 @@ import shelve import socket import sys -import time
import pywikibot
@@ -1922,7 +1921,7 @@ pywikibot.output('Sleeping %i seconds before trying again.' % (timeout,)) timeout *= 2 - time.sleep(timeout) + pywikibot.sleep(timeout) except pywikibot.ServerError: if timeout > 3600: raise @@ -1930,7 +1929,7 @@ pywikibot.output('Sleeping %i seconds before trying again.' % (timeout,)) timeout *= 2 - time.sleep(timeout) + pywikibot.sleep(timeout) else: break return True @@ -2177,7 +2176,7 @@ 'ERROR: could not retrieve more pages. ' 'Will try again in %d seconds' % timeout) - time.sleep(timeout) + pywikibot.sleep(timeout) timeout *= 2 else: break diff --git a/scripts/patrol.py b/scripts/patrol.py index b525b03..1f3b897 100755 --- a/scripts/patrol.py +++ b/scripts/patrol.py @@ -454,7 +454,7 @@ yield page[1] if repeat: pywikibot.output(u'Sleeping for %d seconds' % delay) - time.sleep(delay) + pywikibot.sleep(delay) else: break
diff --git a/scripts/welcome.py b/scripts/welcome.py index eaf1839..cd5ec05 100755 --- a/scripts/welcome.py +++ b/scripts/welcome.py @@ -844,8 +844,7 @@ u"%d %b %Y %H:%M:%S (UTC)", time.gmtime()) pywikibot.output(u'Sleeping %d seconds before rerun. %s' % (globalvar.timeRecur, strfstr)) - pywikibot.stopme() - time.sleep(globalvar.timeRecur) + pywikibot.sleep(globalvar.timeRecur) else: raise KeyboardInterrupt except KeyboardInterrupt:
pywikibot-commits@lists.wikimedia.org