http://www.mediawiki.org/wiki/Special:Code/pywikipedia/9456
Revision: 9456 Author: russblau Date: 2011-08-25 19:15:23 +0000 (Thu, 25 Aug 2011) Log Message: ----------- step down limits in case of server error, and use indexpageids option to iterate query results in correct order
Modified Paths: -------------- branches/rewrite/pywikibot/data/api.py
Modified: branches/rewrite/pywikibot/data/api.py =================================================================== --- branches/rewrite/pywikibot/data/api.py 2011-08-23 16:52:37 UTC (rev 9455) +++ branches/rewrite/pywikibot/data/api.py 2011-08-25 19:15:23 UTC (rev 9456) @@ -295,6 +295,16 @@ "Non-JSON response received from server %s; the server may be down." % self.site) pywikibot.debug(rawdata, _logger) + # there might also be an overflow, so try a smaller limit + for param in self.params: + if param.endswith("limit"): + value = self.params[param] + try: + self.params[param] = str(int(value) // 2) + pywikibot.output(u"Set %s = %s" + % (param, self.params[param])) + except: + pass self.wait() continue if not result: @@ -415,6 +425,7 @@ if name not in _modules: self.get_module() break + kwargs["indexpageids"] = "" # always ask for list of pageids self.request = Request(**kwargs) self.prefix = None self.update_limit() # sets self.prefix @@ -566,7 +577,14 @@ resultdata.keys(), self.limit), _logger) - resultdata = [resultdata[k] for k in sorted(resultdata.keys())] + if "pageids" in self.data["query"]: + # this ensures that page data will be iterated + # in the same order as received from server + resultdata = [resultdata[k] + for k in self.data["query"]["pageids"]] + else: + resultdata = [resultdata[k] + for k in sorted(resultdata.keys())] else: pywikibot.debug(u"%s received %s; limit=%s" % (self.__class__.__name__,
pywikipedia-svn@lists.wikimedia.org