jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/781963 )
Change subject: [W0048] Merge 'if' statements ......................................................................
[W0048] Merge 'if' statements
Change-Id: I054e3070ae5fceee7d014d723dd328f7fef16731 --- M pywikibot/comms/http.py M pywikibot/page/_pages.py M pywikibot/scripts/generate_user_files.py M pywikibot/site/_upload.py M pywikibot/site_detect.py M pywikibot/specialbots/_upload.py M scripts/checkimages.py M scripts/coordinate_import.py M scripts/download_dump.py M scripts/imagetransfer.py M scripts/interwiki.py M scripts/interwikidata.py 12 files changed, 92 insertions(+), 95 deletions(-)
Approvals: Xqt: Looks good to me, approved jenkins-bot: Verified
diff --git a/pywikibot/comms/http.py b/pywikibot/comms/http.py index 84eb68f..465ef29 100644 --- a/pywikibot/comms/http.py +++ b/pywikibot/comms/http.py @@ -268,9 +268,9 @@ :type response: :py:obj:`requests.Response` """ # TODO: do some error correcting stuff - if isinstance(response, requests.exceptions.SSLError): - if SSL_CERT_VERIFY_FAILED_MSG in str(response): - raise FatalServerError(str(response)) + if isinstance(response, requests.exceptions.SSLError) \ + and SSL_CERT_VERIFY_FAILED_MSG in str(response): + raise FatalServerError(str(response))
if isinstance(response, requests.ConnectionError): msg = str(response) diff --git a/pywikibot/page/_pages.py b/pywikibot/page/_pages.py index d398b87..893b236 100644 --- a/pywikibot/page/_pages.py +++ b/pywikibot/page/_pages.py @@ -1907,11 +1907,10 @@ been retrieved earlier). If timestamp is not found, returns empty list. """ - if hasattr(self, '_deletedRevs'): - if timestamp in self._deletedRevs and ( - not content - or 'content' in self._deletedRevs[timestamp]): - return self._deletedRevs[timestamp] + if hasattr(self, '_deletedRevs') \ + and timestamp in self._deletedRevs \ + and (not content or 'content' in self._deletedRevs[timestamp]): + return self._deletedRevs[timestamp]
for item in self.site.deletedrevs(self, start=timestamp, content=content, total=1, **kwargs): diff --git a/pywikibot/scripts/generate_user_files.py b/pywikibot/scripts/generate_user_files.py index 0281985..304b766 100755 --- a/pywikibot/scripts/generate_user_files.py +++ b/pywikibot/scripts/generate_user_files.py @@ -157,12 +157,12 @@ mycode = None while not mycode: mycode = pywikibot.input(message, default=default_lang, force=force) - if known_langs and mycode and mycode not in known_langs: - if not pywikibot.input_yn( - fill('The site code {!r} is not in the list of known ' - 'sites. Do you want to continue?'.format(mycode)), - default=False, automatic_quit=False): - mycode = None + if known_langs and mycode and mycode not in known_langs \ + and not pywikibot.input_yn( + fill('The site code {!r} is not in the list of known sites. ' + 'Do you want to continue?'.format(mycode)), + default=False, automatic_quit=False): + mycode = None
message = 'Username on {}:{}'.format(mycode, fam.name) username = pywikibot.input(message, default=default_username, force=force) diff --git a/pywikibot/site/_upload.py b/pywikibot/site/_upload.py index c85a397..73e9e7d 100644 --- a/pywikibot/site/_upload.py +++ b/pywikibot/site/_upload.py @@ -170,10 +170,9 @@
if report_success is None: report_success = isinstance(ignore_warnings, bool) - if report_success is True: - if not isinstance(ignore_warnings, bool): - raise ValueError('report_success may only be set to True when ' - 'ignore_warnings is a boolean') + if report_success is True and not isinstance(ignore_warnings, bool): + raise ValueError('report_success may only be set to True when ' + 'ignore_warnings is a boolean') if isinstance(ignore_warnings, Iterable): ignored_warnings = ignore_warnings
diff --git a/pywikibot/site_detect.py b/pywikibot/site_detect.py index b236b16..8bd4a78 100644 --- a/pywikibot/site_detect.py +++ b/pywikibot/site_detect.py @@ -240,9 +240,9 @@ else: if self._parsed_url: # allow upgrades to https, but not downgrades - if self._parsed_url.scheme == 'https': - if new_parsed_url.scheme != self._parsed_url.scheme: - return + if self._parsed_url.scheme == 'https' \ + and new_parsed_url.scheme != self._parsed_url.scheme: + return
# allow http://www.brickwiki.info/ vs http://brickwiki.info/ if (new_parsed_url.netloc in self._parsed_url.netloc diff --git a/pywikibot/specialbots/_upload.py b/pywikibot/specialbots/_upload.py index 4bfb79f..9fa7dc2 100644 --- a/pywikibot/specialbots/_upload.py +++ b/pywikibot/specialbots/_upload.py @@ -190,9 +190,8 @@ :return: False if this warning should cause an abort, True if it should be ignored or None if this warning has no default handler. """ - if self.aborts is not True: - if warning in self.aborts: - return False + if self.aborts is not True and warning in self.aborts: + return False if self.ignore_warning is True or (self.ignore_warning is not False and warning in self.ignore_warning): return True diff --git a/scripts/checkimages.py b/scripts/checkimages.py index d8598de..33adbe2 100755 --- a/scripts/checkimages.py +++ b/scripts/checkimages.py @@ -1266,12 +1266,13 @@ # Found the templates ONLY in the image's description for template_selected in templates_in_the_image_raw: tp = pywikibot.Page(self.site, template_selected) + page_title = tp.title(as_url=True, with_ns=False).lower() for template_real in self.licenses_found: - if (tp.title(as_url=True, with_ns=False).lower() - == template_real.title(as_url=True, - with_ns=False).lower()): - if template_real not in self.all_licenses: - self.all_licenses.append(template_real) + template_title = template_real.title(as_url=True, + with_ns=False).lower() + if page_title == template_title \ + and template_real not in self.all_licenses: + self.all_licenses.append(template_real) break
if self.licenses_found: @@ -1448,19 +1449,8 @@ if find_tipe.lower() == 'findonly': search_results = re.findall(r'{}'.format(k.lower()), self.image_check_text.lower()) - if search_results: - if search_results[0] == self.image_check_text.lower(): - self.some_problem = True - self.text_used = text - self.head_used = head_2 - self.imagestatus_used = imagestatus - self.name_used = name - self.summary_used = summary - self.mex_used = mex_catched - break - elif find_tipe.lower() == 'find': - if re.findall(r'{}'.format(k.lower()), - self.image_check_text.lower()): + if search_results \ + and search_results[0] == self.image_check_text.lower(): self.some_problem = True self.text_used = text self.head_used = head_2 @@ -1468,7 +1458,18 @@ self.name_used = name self.summary_used = summary self.mex_used = mex_catched - continue + break + elif find_tipe.lower() == 'find' \ + and re.findall(r'{}'.format(k.lower()), + self.image_check_text.lower()): + self.some_problem = True + self.text_used = text + self.head_used = head_2 + self.imagestatus_used = imagestatus + self.name_used = name + self.summary_used = summary + self.mex_used = mex_catched + continue
def check_step(self) -> None: """Check a single file page.""" @@ -1752,14 +1753,14 @@ continue
# Check on commons if there's already an image with the same name - if commons_active and site.family.name != 'commons': - if not bot.check_image_on_commons(): - continue + if commons_active and site.family.name != 'commons' \ + and not bot.check_image_on_commons(): + continue
# Check if there are duplicates of the image on the project - if duplicates_active: - if not bot.check_image_duplicated(duplicates_rollback): - continue + if duplicates_active \ + and not bot.check_image_duplicated(duplicates_rollback): + continue
bot.check_step()
diff --git a/scripts/coordinate_import.py b/scripts/coordinate_import.py index d5cfe39..08f843e 100755 --- a/scripts/coordinate_import.py +++ b/scripts/coordinate_import.py @@ -118,9 +118,9 @@ if page is None: # running over items, search in linked pages for page in item.iterlinks(): - if page.site.has_extension('GeoData'): - if self.try_import_coordinates_from_page(page, item): - break + if page.site.has_extension('GeoData') \ + and self.try_import_coordinates_from_page(page, item): + break return
self.try_import_coordinates_from_page(page, item) diff --git a/scripts/download_dump.py b/scripts/download_dump.py index cc977b8..66626d8 100755 --- a/scripts/download_dump.py +++ b/scripts/download_dump.py @@ -102,9 +102,8 @@ if toolforge_dump_filepath: pywikibot.output('Symlinking file from ' + toolforge_dump_filepath) - if non_atomic: - if os.path.exists(file_final_storepath): - remove(file_final_storepath) + if non_atomic and os.path.exists(file_final_storepath): + remove(file_final_storepath) symlink(toolforge_dump_filepath, file_current_storepath) else: url = 'https://dumps.wikimedia.org/%7B%7D/%7B%7D/%7B%7D%27.format( diff --git a/scripts/imagetransfer.py b/scripts/imagetransfer.py index e8e08d9..96c653b 100755 --- a/scripts/imagetransfer.py +++ b/scripts/imagetransfer.py @@ -242,9 +242,9 @@ reason = i18n.twtranslate(sourceSite, 'imagetransfer-nowcommons_notice') # try to delete the original image if we have a sysop account - if sourceSite.has_right('delete'): - if sourceImagePage.delete(reason): - return + if sourceSite.has_right('delete') \ + and sourceImagePage.delete(reason): + return if sourceSite.lang in nowCommonsTemplate \ and sourceSite.family.name in config.usernames \ and sourceSite.lang in \ diff --git a/scripts/interwiki.py b/scripts/interwiki.py index 8d6a194..2ca1cd4 100755 --- a/scripts/interwiki.py +++ b/scripts/interwiki.py @@ -1103,12 +1103,12 @@ elif page.isStaticRedirect(): self.conf.note('not following static {}redirects.'.format(redir)) elif (page.site.family == redirect_target.site.family - and not self.skipPage(page, redirect_target, counter)): - if self.addIfNew(redirect_target, counter, page): - if config.interwiki_shownew: - pywikibot.output('{}: {} gives new {}redirect {}' - .format(self.origin, page, redir, - redirect_target)) + and not self.skipPage(page, redirect_target, counter) + and self.addIfNew(redirect_target, counter, page) + and config.interwiki_shownew): + pywikibot.output('{}: {} gives new {}redirect {}' + .format(self.origin, page, redir, + redirect_target)) return True
def check_page(self, page, counter) -> None: @@ -1181,10 +1181,10 @@ if self.conf.untranslatedonly: # Ignore the interwiki links. iw = () - if self.conf.lacklanguage: - if self.conf.lacklanguage in (link.site.lang for link in iw): - iw = () - self.workonme = False + if self.conf.lacklanguage \ + and self.conf.lacklanguage in (link.site.lang for link in iw): + iw = () + self.workonme = False if len(iw) < self.conf.minlinks: iw = () self.workonme = False @@ -1223,28 +1223,26 @@ .format(self.origin, page, linkedPage)) break
- if not self.skipPage(page, linkedPage, counter): - if self.conf.followinterwiki or page == self.origin: - if self.addIfNew(linkedPage, counter, page): - # It is new. Also verify whether it is the second - # on the same site - lpsite = linkedPage.site - for prevPage in self.found_in: - if prevPage != linkedPage and \ - prevPage.site == lpsite: - # Still, this could be "no problem" as - # either may be a redirect to the other. - # No way to find out quickly! - pywikibot.output( - 'NOTE: {}: {} gives duplicate ' - 'interwiki on same site {}' - .format(self.origin, page, linkedPage)) - break - else: - if config.interwiki_shownew: - pywikibot.output( - '{}: {} gives new interwiki {}' - .format(self.origin, page, linkedPage)) + if not self.skipPage(page, linkedPage, counter) \ + and self.conf.followinterwiki or page == self.origin \ + and self.addIfNew(linkedPage, counter, page): + # It is new. Also verify whether it is the second on the + # same site + lpsite = linkedPage.site + for prevPage in self.found_in: + if prevPage != linkedPage and prevPage.site == lpsite: + # Still, this could be "no problem" as + # either may be a redirect to the other. + # No way to find out quickly! + pywikibot.output( + 'NOTE: {}: {} gives duplicate interwiki on same ' + 'site {}'.format(self.origin, page, linkedPage)) + break + else: + if config.interwiki_shownew: + pywikibot.output( + '{}: {} gives new interwiki {}' + .format(self.origin, page, linkedPage)) if self.forcedStop: break
diff --git a/scripts/interwikidata.py b/scripts/interwikidata.py index 3628b2b..fc068b4 100755 --- a/scripts/interwikidata.py +++ b/scripts/interwikidata.py @@ -161,12 +161,14 @@ """Clean interwiki links from the page.""" if not self.iwlangs: return + dbnames = [iw_site.dbName() for iw_site in self.iwlangs] - if set(dbnames) - set(self.current_item.sitelinks.keys()): - if not self.handle_complicated(): - warning('Interwiki conflict in {}, skipping...' - .format(self.current_page.title(as_link=True))) - return + if set(dbnames) - set(self.current_item.sitelinks.keys()) \ + and not self.handle_complicated(): + warning('Interwiki conflict in {}, skipping...' + .format(self.current_page.title(as_link=True))) + return + output('Cleaning up the page') new_text = pywikibot.textlib.removeLanguageLinks( self.current_page.text, site=self.current_page.site)