jenkins-bot has submitted this change and it was merged.
Change subject: pep8ify site.py, ......................................................................
pep8ify site.py,
remove obsolete "return None"
Change-Id: Ia3233f6647b394be16de888c548e84dadc1d1998 --- M pywikibot/site.py 1 file changed, 154 insertions(+), 94 deletions(-)
Approvals: Xqt: Looks good to me, approved jenkins-bot: Verified
diff --git a/pywikibot/site.py b/pywikibot/site.py index 933eb14..18416c1 100644 --- a/pywikibot/site.py +++ b/pywikibot/site.py @@ -63,7 +63,8 @@ for key, value in cls.__dict__.iteritems(): if key == key.upper() and value == search_value: return key - raise KeyError("Value %r could not be found in this enum" % search_value) + raise KeyError("Value %r could not be found in this enum" + % search_value)
def Family(fam=None, fatal=True): @@ -151,7 +152,8 @@ if sysop: sysop = sysop[0].upper() + sysop[1:] self._username = [user, sysop] - self.use_hard_category_redirects = self.code in self.family.use_hard_category_redirects + self.use_hard_category_redirects = ( + self.code in self.family.use_hard_category_redirects)
# following are for use with lock_page and unlock_page methods self._pagemutex = threading.Lock() @@ -207,7 +209,6 @@ return self._username[True] elif self.logged_in(False): return self._username[False] - return None
def username(self, sysop=False): return self._username[sysop] @@ -272,7 +273,6 @@ if namespace.lower() in [name.lower() for name in self.namespaces()[ns]]: return ns - return None
getNamespaceIndex = ns_index # for backwards-compatibility
@@ -351,7 +351,8 @@ """Return Category in which disambig pages are listed."""
try: - name = self.namespace(14) + ':' + self.family.disambcatname[self.code] + name = '%s:%s' % (self.namespace(14), + self.family.disambcatname[self.code]) except KeyError: raise Error(u"No disambiguation category name found for %(site)s" % {'site': self}) @@ -395,12 +396,17 @@ """Return True iff title1 and title2 identify the same wiki page.""" # title1 and title2 may be unequal but still identify the same page, # if they use different aliases for the same namespace + def valid_namespace(text, number): - """Return True iff text is a valid alias for namespace with given number.""" + """Return True if text is a valid alias for namespace with given + number. + + """ for alias in self.namespace(number, all=True): if text.lower() == alias.lower(): return True return False + if title1 == title2: return True # determine whether titles contain namespace prefixes @@ -469,7 +475,8 @@ # TODO: is this even needed? No family in the framework has this # dictionary defined! if self.lang in self.family.interwiki_putfirst_doubled: - if len(list_of_links) >= self.family.interwiki_putfirst_doubled[self.lang][0]: + if (len(list_of_links) >= + self.family.interwiki_putfirst_doubled[self.lang][0]): links2 = [lang.language() for lang in list_of_links] result = [] for lang in self.family.interwiki_putfirst_doubled[self.lang][1]: @@ -866,7 +873,8 @@ if hasattr(self, "_userinfo"): del self._userinfo self.getuserinfo() - if self.userinfo['name'] == self._username[sysop] and self.logged_in(sysop): + if self.userinfo['name'] == self._username[sysop] and \ + self.logged_in(sysop): return loginMan = api.LoginManager(site=self, sysop=sysop, user=self._username[sysop]) @@ -875,7 +883,8 @@ if hasattr(self, "_userinfo"): del self._userinfo self.getuserinfo() - self._loginstatus = LoginStatus.AS_SYSOP if sysop else LoginStatus.AS_USER + self._loginstatus = (LoginStatus.AS_SYSOP + if sysop else LoginStatus.AS_USER) else: self._loginstatus = LoginStatus.NOT_LOGGED_IN # failure if not hasattr(self, "_siteinfo"): @@ -1151,7 +1160,8 @@ self._getsiteinfo() if self._extensions is None: if isinstance(unknown, type) and issubclass(unknown, Exception): - raise unknown("Feature 'hasExtension' only available in MW 1.14+") + raise unknown( + "Feature 'hasExtension' only available in MW 1.14+") else: return unknown for ext in self._extensions: @@ -1261,8 +1271,6 @@ m = re.match(r"^MediaWiki ([0-9]+).([0-9]+)(.*)$", versionstring) if m: return (int(m.group(1)), int(m.group(2)), m.group(3)) - else: - return None
def loadpageinfo(self, page): """Load page info from api and save in page attributes""" @@ -1281,7 +1289,7 @@
def loadcoordinfo(self, page): """Load [[mw:Extension:GeoData]] info""" - #prop=coordinates&titles=Wikimedia Foundation&format=jsonfm&coprop=type|name|dim|country|region&coprimary=all + # prop=coordinates&titles=Wikimedia Foundation&format=jsonfm&coprop=type|name|dim|country|region&coprimary=all title = page.title(withSection=False) query = self._generator(api.PropertyGenerator, type_arg="coordinates", @@ -1334,7 +1342,8 @@ u"loadimageinfo: Query on %s returned data on '%s'" % (page, pageitem['title'])) api.update_page(page, pageitem) - return pageitem['imageinfo'] if history else pageitem['imageinfo'][0] + return (pageitem['imageinfo'] + if history else pageitem['imageinfo'][0])
def page_exists(self, page): """Return True if and only if page is an existing page on site.""" @@ -1461,8 +1470,8 @@ break else: pywikibot.warning( - u"preloadpages: Query returned unexpected title '%s'" - % pagedata['title']) + u"preloadpages: Query returned unexpected title" + u"'%s'" % pagedata['title']) continue except KeyError: pywikibot.debug(u"No 'title' in %s" % pagedata, _logger) @@ -1518,7 +1527,8 @@ bltitle = page.title(withSection=False).encode(self.encoding()) blargs = {"gbltitle": bltitle} if filterRedirects is not None: - blargs["gblfilterredir"] = filterRedirects and "redirects" or "nonredirects" + blargs["gblfilterredir"] = (filterRedirects and "redirects" or + "nonredirects") blgen = self._generator(api.PageGenerator, type_arg="backlinks", namespaces=namespaces, step=step, total=total, g_content=content, **blargs) @@ -1565,9 +1575,11 @@ (default False)
""" - eiargs = {"geititle": page.title(withSection=False).encode(self.encoding())} + eiargs = {"geititle": + page.title(withSection=False).encode(self.encoding())} if filterRedirects is not None: - eiargs["geifilterredir"] = filterRedirects and "redirects" or "nonredirects" + eiargs["geifilterredir"] = (filterRedirects and "redirects" or + "nonredirects") eigen = self._generator(api.PageGenerator, type_arg="embeddedin", namespaces=namespaces, step=step, total=total, g_content=content, **eiargs) @@ -1635,7 +1647,8 @@ if hasattr(page, "_pageid"): clargs['pageids'] = str(page._pageid) else: - clargs['titles'] = page.title(withSection=False).encode(self.encoding()) + clargs['titles'] = page.title( + withSection=False).encode(self.encoding()) clgen = self._generator(api.CategoryPageGenerator, type_arg="categories", step=step, total=total, g_content=content, **clargs) @@ -1736,23 +1749,23 @@ if starttime and sortby == "timestamp": cmargs["gcmstart"] = str(starttime) elif starttime: - raise ValueError( - "categorymembers: invalid combination of 'sortby' and 'starttime'") + raise ValueError("categorymembers: " + "invalid combination of 'sortby' and 'starttime'") if endtime and sortby == "timestamp": cmargs["gcmend"] = str(endtime) elif endtime: - raise ValueError( - "categorymembers: invalid combination of 'sortby' and 'endtime'") + raise ValueError("categorymembers: " + "invalid combination of 'sortby' and 'endtime'") if startsort and sortby != "timestamp": cmargs["gcmstartsortkey"] = startsort elif startsort: - raise ValueError( - "categorymembers: invalid combination of 'sortby' and 'startsort'") + raise ValueError("categorymembers: " + "invalid combination of 'sortby' and 'startsort'") if endsort and sortby != "timestamp": cmargs["gcmendsortkey"] = endsort elif endsort: - raise ValueError( - "categorymembers: invalid combination of 'sortby' and 'endsort'") + raise ValueError("categorymembers: " + "invalid combination of 'sortby' and 'endsort'")
cmgen = self._generator(api.PageGenerator, namespaces=namespaces, step=step, total=total, g_content=content, @@ -1941,7 +1954,8 @@ self.getcategoryinfo(category) if not hasattr(category, "_catinfo"): # a category that exists but has no contents returns no API result - category._catinfo = {'size': 0, 'pages': 0, 'files': 0, 'subcats': 0} + category._catinfo = {'size': 0, 'pages': 0, 'files': 0, + 'subcats': 0} return category._catinfo
@deprecate_arg("throttle", None) @@ -1985,7 +1999,8 @@ raise Error("allpages: only one namespace permitted.") if includeredirects is not None: pywikibot.debug( - u"allpages: 'includeRedirects' argument is deprecated; use 'filterredirs'.", + u"allpages: 'includeRedirects' argument is deprecated; " + u"use 'filterredirs'.", _logger) if includeredirects: if includeredirects == "only": @@ -2186,11 +2201,13 @@ if reverse: if starttime > endtime: raise pywikibot.Error( - "blocks: starttime must be before endtime with reverse=True") + "blocks: " + "starttime must be before endtime with reverse=True") else: if endtime > starttime: raise pywikibot.Error( - "blocks: endtime must be before starttime with reverse=False") + "blocks: " + "endtime must be before starttime with reverse=False") bkgen = self._generator(api.ListGenerator, type_arg="blocks", step=step, total=total) bkgen.request["bkprop"] = "id|user|by|timestamp|expiry|reason|range|flags" @@ -2263,11 +2280,13 @@ if reverse: if end < start: raise Error( - "logevents: end must be later than start with reverse=True") + "logevents: " + "end must be later than start with reverse=True") else: if start < end: raise Error( - "logevents: start must be later than end with reverse=False") + "logevents: " + "start must be later than end with reverse=False") legen = self._generator(api.LogEntryListGenerator, type_arg=logtype, step=step, total=total) if logtype is not None: @@ -2331,11 +2350,13 @@ if reverse: if end < start: raise Error( - "recentchanges: end must be later than start with reverse=True") + "recentchanges: " + "end must be later than start with reverse=True") else: if start < end: raise Error( - "recentchanges: start must be later than end with reverse=False") + "recentchanges: " + "start must be later than end with reverse=False") rcgen = self._generator(api.ListGenerator, type_arg="recentchanges", rcprop="user|comment|timestamp|title|ids" "|sizes|redirect|loginfo|flags", @@ -2363,7 +2384,8 @@ 'anon': showAnon, 'redirect': showRedirects, } - if showPatrolled is not None and (self.has_right('patrol') or self.has_right('patrolmarks')): + if showPatrolled is not None and ( + self.has_right('patrol') or self.has_right('patrolmarks')): rcgen.request['rcprop'] += '|patrolled' filters['patrolled'] = showPatrolled rcshow = [] @@ -2394,7 +2416,8 @@ @param where: Where to search; value must be "text" or "titles" (many wikis do not support title search) @param namespaces: search only in these namespaces (defaults to 0) - @type namespaces: list of ints, or an empty list to signal all namespaces + @type namespaces: list of ints, or an empty list to signal all + namespaces @param getredirects: if True, include redirects in results @param content: if True, load the current content of each iterated page (default False) @@ -2432,7 +2455,8 @@ @param reverse: Iterate oldest contributions first (default: newest) @param showMinor: if True, iterate only minor edits; if False and not None, iterate only non-minor edits (default: iterate both) - @param top_only: if True, iterate only edits which are the latest revision + @param top_only: if True, iterate only edits which are the latest + revision
""" if not (user or userprefix): @@ -2442,11 +2466,13 @@ if reverse: if end < start: raise Error( - "usercontribs: end must be later than start with reverse=True") + "usercontribs: " + "end must be later than start with reverse=True") else: if start < end: raise Error( - "usercontribs: start must be later than end with reverse=False") + "usercontribs: " + "start must be later than end with reverse=False") ucgen = self._generator(api.ListGenerator, type_arg="usercontribs", ucprop="ids|title|timestamp|comment|flags", namespaces=namespaces, step=step, @@ -2489,11 +2515,13 @@ if reverse: if end < start: raise Error( - "watchlist_revs: end must be later than start with reverse=True") + "watchlist_revs: " + "end must be later than start with reverse=True") else: if start < end: raise Error( - "watchlist_revs: start must be later than end with reverse=False") + "watchlist_revs: " + "start must be later than end with reverse=False") wlgen = self._generator(api.ListGenerator, type_arg="watchlist", wlprop="user|comment|timestamp|title|ids|flags", wlallrev="", namespaces=namespaces, @@ -2538,11 +2566,13 @@ if reverse: if end < start: raise Error( -"deletedrevs: end must be later than start with reverse=True") + "deletedrevs: " + "end must be later than start with reverse=True") else: if start < end: raise Error( - "deletedrevs: start must be later than end with reverse=False") + "deletedrevs: " + "start must be later than end with reverse=False") if not self.logged_in(): self.login() if "deletedhistory" not in self.userinfo['rights']: @@ -2552,7 +2582,8 @@ pass if "deletedhistory" not in self.userinfo['rights']: raise Error( - "deletedrevs: User:%s not authorized to access deleted revisions." + "deletedrevs: " + "User:%s not authorized to access deleted revisions." % self.user()) if get_text: if "undelete" not in self.userinfo['rights']: @@ -2562,7 +2593,8 @@ pass if "undelete" not in self.userinfo['rights']: raise Error( - "deletedrevs: User:%s not authorized to view deleted content." + "deletedrevs: " + "User:%s not authorized to view deleted content." % self.user())
drgen = self._generator(api.ListGenerator, type_arg="deletedrevs", @@ -2570,7 +2602,8 @@ drprop="revid|user|comment|minor", step=step, total=total) if get_text: - drgen.request['drprop'] = drgen.request['drprop'] + "|content|token" + drgen.request['drprop'] = (drgen.request['drprop'] + + "|content|token") if start is not None: drgen.request["drstart"] = str(start) if end is not None: @@ -2729,8 +2762,10 @@ except api.APIError, err: self.unlock_page(page) if err.code.endswith("anon") and self.logged_in(): - pywikibot.debug(u"editpage: received '%s' even though bot is logged in" % err.code, - _logger) + pywikibot.debug( + u"editpage: received '%s' even though bot is logged in" + % err.code, + _logger) errdata = { 'site': self, 'title': page.title(withSection=False), @@ -2738,8 +2773,9 @@ 'info': err.info } if err.code == "spamdetected": - raise SpamfilterError(self._ep_errors[err.code] % errdata - + err.info[err.info.index("fragment: ") + 9:]) + raise SpamfilterError( + self._ep_errors[err.code] % errdata + + err.info[err.info.index("fragment: ") + 9:])
if err.code == "editconflict": raise EditConflict(self._ep_errors[err.code] % errdata) @@ -2774,25 +2810,31 @@ continue elif "url" in captcha: import webbrowser - webbrowser.open(self.family.protocol(self.code) + '://' + self.family.hostname(self.code) + captcha["url"]) + webbrowser.open('%s://%s%s' + % (self.family.protocol(self.code), + self.family.hostname(self.code), + captcha["url"])) req['captchaword'] = cap_answerwikipedia.input( -"Please view CAPTCHA in your browser, then type answer here:") + "Please view CAPTCHA in your browser, " + "then type answer here:") continue else: self.unlock_page(page) pywikibot.error( - u"editpage: unknown CAPTCHA response %s, page not saved" + u"editpage: unknown CAPTCHA response %s, " + u"page not saved" % captcha) return False else: self.unlock_page(page) - pywikibot.error(u"editpage: unknown failure reason %s" % str(result)) + pywikibot.error(u"editpage: unknown failure reason %s" + % str(result)) return False else: self.unlock_page(page) pywikibot.error( - u"editpage: Unknown result code '%s' received; page not saved" - % result["edit"]["result"]) + u"editpage: Unknown result code '%s' received; " + u"page not saved" % result["edit"]["result"]) pywikibot.log(str(result)) return False
@@ -2864,7 +2906,8 @@ except api.APIError, err: if err.code.endswith("anon") and self.logged_in(): pywikibot.debug( - u"movepage: received '%s' even though bot is logged in" % err.code, + u"movepage: received '%s' even though bot is logged in" + % err.code, _logger) errdata = { 'site': self, @@ -2888,7 +2931,8 @@ #TODO: Check for talkmove-error messages if "talkmove-error-code" in result["move"]: pywikibot.warning( - u"movepage: Talk page %s not moved" % (page.toggleTalkPage().title(asLink=True))) + u"movepage: Talk page %s not moved" + % (page.toggleTalkPage().title(asLink=True))) return pywikibot.Page(page, newtitle)
# catalog of rollback errors for use in error messages @@ -2911,7 +2955,8 @@ """ if len(page._revisions) < 2: raise pywikibot.Error( - u"Rollback of %s aborted; load revision history first." % page.title(asLink=True)) + u"Rollback of %s aborted; load revision history first." + % page.title(asLink=True)) last_rev = page._revisions[page.latestRevision()] last_user = last_rev.user for rev in sorted(page._revisions.keys(), reverse=True): @@ -2921,7 +2966,8 @@ break else: raise pywikibot.Error( - u"Rollback of %s aborted; only one user in revision history." % page.title(asLink=True)) + u"Rollback of %s aborted; only one user in revision history." + % page.title(asLink=True)) summary = summary or ( u"Reverted edits by [[Special:Contributions/%(last_user)s|%(last_user)s]] " u"([[User talk:%(last_user)s|Talk]]) to last version by %(prev_user)s" @@ -2967,7 +3013,8 @@ try: self.login(sysop=True) except pywikibot.NoUsername, e: - raise NoUsername("delete: Unable to login as sysop (%s)" % e.__class__.__name__) + raise NoUsername("delete: Unable to login as sysop (%s)" + % e.__class__.__name__) if not self.logged_in(sysop=True): raise NoUsername("delete: Unable to login as sysop") token = self.token(page, "delete") @@ -2985,7 +3032,8 @@ } if err.code in self._dl_errors: raise Error(self._dl_errors[err.code] % errdata) - pywikibot.debug(u"delete: Unexpected error code '%s' received." % err.code, + pywikibot.debug(u"delete: Unexpected error code '%s' received." + % err.code, _logger) raise finally: @@ -2996,14 +3044,14 @@ #TODO: implement patrol
@must_be(group='sysop') - def blockuser(self, user, expiry, reason, anononly=True, nocreate=True, autoblock=True, - noemail=False, reblock=False): + def blockuser(self, user, expiry, reason, anononly=True, nocreate=True, + autoblock=True, noemail=False, reblock=False):
token = self.token(user, 'block') if isinstance(expiry, pywikibot.Timestamp): expiry = expiry.toISOformat() - req = api.Request(site=self, action='block', user=user.username, expiry=expiry, - reason=reason, token=token) + req = api.Request(site=self, action='block', user=user.username, + expiry=expiry, reason=reason, token=token) if anononly: req['anononly'] = '' if nocreate: @@ -3027,7 +3075,8 @@
""" token = self.token(page, "watch") - req = api.Request(action="watch", token=token, title=page.title(withSection=False)) + req = api.Request(action="watch", token=token, + title=page.title(withSection=False)) if unwatch: req["unwatch"] = "" result = req.submit() @@ -3064,8 +3113,8 @@ NOTE 2: it returns the image title WITHOUT the image namespace.
""" - if hash_found is None: # If the hash is none return None and not continue - return None + if hash_found is None: + return return [image.title(withNamespace=False) for image in self.allimages(sha1=hash_found)]
@@ -3108,20 +3157,18 @@ # check for required user right if "upload" not in self.userinfo["rights"]: raise pywikibot.Error( - "User '%s' does not have upload rights on site %s." % (self.user(), self) - ) + "User '%s' does not have upload rights on site %s." + % (self.user(), self)) # check for required parameters if (source_filename and source_url)\ or (source_filename is None and source_url is None): - raise ValueError( -"APISite.upload: must provide either source_filename or source_url, not both." - ) + raise ValueError("APISite.upload: must provide either " + "source_filename or source_url, not both.") if comment is None: comment = imagepage.text if not comment: - raise ValueError( -"APISite.upload: cannot upload file without a summary/description." - ) + raise ValueError("APISite.upload: cannot upload file without " + "a summary/description.") if text is None: text = imagepage.text if not text: @@ -3301,7 +3348,9 @@ @deprecate_arg("number", None) @deprecate_arg("repeat", None) def unwatchedpages(self, step=None, total=None): - """Yield Pages from Special:Unwatchedpages (requires Admin privileges).""" + """Yield Pages from Special:Unwatchedpages (requires Admin privileges). + + """ uwgen = self._generator(api.PageGenerator, type_arg="querypage", gqppage="Unwatchedpages", step=step, total=total) @@ -3345,7 +3394,8 @@
@deprecate_arg("number", None) @deprecate_arg("repeat", None) - def uncategorizedtemplates(self, number=None, repeat=True, step=None, total=None): + def uncategorizedtemplates(self, number=None, repeat=True, step=None, + total=None): """Yield Pages from Special:Uncategorizedtemplates.""" utgen = self._generator(api.PageGenerator, type_arg="querypage", @@ -3445,7 +3495,8 @@ def _get_propertyitem(self, props, source, **params): """generic method to get the data for multiple Wikibase items""" wbdata = self.get_item(source, props=props, **params) - assert props in wbdata, "API wbgetentities response lacks %s key" % props + assert props in wbdata, \ + "API wbgetentities response lacks %s key" % props return wbdata[props]
@deprecated("pywikibot.WikibasePage") @@ -3457,10 +3508,13 @@ wbrequest = api.Request(site=self, action="wbgetentities", ids=ids, **params) wbdata = wbrequest.submit() - assert 'success' in wbdata, "API wbgetentities response lacks 'success' key" + assert 'success' in wbdata, \ + "API wbgetentities response lacks 'success' key" assert wbdata['success'] == 1, "API 'success' key ist not 1" - assert 'entities' in wbdata, "API wbgetentities response lacks 'entities' key" - assert ids in wbdata['entities'], "API wbgetentities response lacks %s key" % ids + assert 'entities' in wbdata, \ + "API wbgetentities response lacks 'entities' key" + assert ids in wbdata['entities'], \ + "API wbgetentities response lacks %s key" % ids return wbdata['entities'][ids] else: # not implemented yet @@ -3509,7 +3563,7 @@ for qid in data['entities']: item = pywikibot.ItemPage(self, qid) item._content = data['entities'][qid] - item.get() # This parses the json and preloads the various properties + item.get() # parses json and preloads the various properties yield item
def getPropertyType(self, prop): @@ -3553,7 +3607,8 @@ params['bot'] = 1 if 'baserevid' in kwargs and kwargs['baserevid']: params['baserevid'] = kwargs['baserevid'] - params['token'] = self.token(pywikibot.Page(self, u'Main Page'), 'edit') # Use a dummy page + params['token'] = self.token(pywikibot.Page(self, u'Main Page'), + 'edit') # Use a dummy page for arg in kwargs: if arg in ['clear', 'data', 'exclude', 'summary']: params[arg] = kwargs[arg] @@ -3632,7 +3687,7 @@ params = dict(action='wbsetreference', statement=claim.snak, ) - if claim.on_item: # I can't think of when this would be false, but lets be safe + if claim.on_item: # I think this wouldn't be false, but lets be safe params['baserevid'] = claim.on_item.lastrevid if bot: params['bot'] = 1 @@ -3654,7 +3709,8 @@ 'value': sourceclaim._formatDataValue(), } else: - raise NotImplementedError('%s datatype is not supported yet.' % sourceclaim.getType()) + raise NotImplementedError('%s datatype is not supported yet.' + % sourceclaim.getType()) valuesnaks = [] if sourceclaim.getID() in snak: valuesnaks = snak[sourceclaim.getID()] @@ -3665,7 +3721,8 @@ )
snak[sourceclaim.getID()] = valuesnaks - # set the hash if the source should be changed. if present, all claims of one source have the same hash + # set the hash if the source should be changed. + # if present, all claims of one source have the same hash if not new and hasattr(sourceclaim, 'hash'): params['reference'] = sourceclaim.hash
@@ -3693,7 +3750,7 @@ params = dict(action='wbsetqualifier', claim=claim.snak, ) - if claim.on_item: # I can't think of when this would be false, but lets be safe + if claim.on_item: # I think this wouldn't be false, but lets be safe params['baserevid'] = claim.on_item.lastrevid if bot: params['bot'] = 1 @@ -3720,7 +3777,8 @@ if bot: params['bot'] = 1 params['claim'] = '|'.join(claim.snak for claim in claims) - params['token'] = self.token(pywikibot.Page(self, u'Main Page'), 'edit') # Use a dummy page + params['token'] = self.token(pywikibot.Page(self, u'Main Page'), + 'edit') # Use a dummy page for kwarg in kwargs: if kwarg in ['baserevid', 'summary']: params[kwarg] = kwargs[kwarg] @@ -3826,12 +3884,14 @@ self._cookies[index] = None self._isLoggedIn[index] = False else: - tmp = '%s-%s-%s-login.data' % (self.family.name, self.code, username) + tmp = '%s-%s-%s-login.data' % (self.family.name, self.code, + username) fn = config.datafilepath('login-data', tmp) if not os.path.exists(fn): self._cookies[index] = None self._isLoggedIn[index] = False else: f = open(fn) - self._cookies[index] = '; '.join([x.strip() for x in f.readlines()]) + self._cookies[index] = '; '.join([x.strip() + for x in f.readlines()]) f.close()
pywikibot-commits@lists.wikimedia.org