jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/769980 )
Change subject: [IMPR] Remove unused notUpdatedSites list inside post_processing ......................................................................
[IMPR] Remove unused notUpdatedSites list inside post_processing
Also split post_processing into two method
Change-Id: I301cfc512f89dae2d095464d70b9302ba435cfa5 --- M scripts/interwiki.py 1 file changed, 70 insertions(+), 80 deletions(-)
Approvals: Xqt: Looks good to me, approved jenkins-bot: Verified
diff --git a/scripts/interwiki.py b/scripts/interwiki.py index 540280a..8d6a194 100755 --- a/scripts/interwiki.py +++ b/scripts/interwiki.py @@ -1444,19 +1444,11 @@ .format(self.origin)) return
- # The following check is not always correct and thus disabled. - # self.done might contain no interwiki links because of the -neverlink - # argument or because of disambiguation conflicts. -# if len(self.done) == 1: -# # No interwiki at all -# return - self.post_processing()
def post_processing(self): """Some finishing processes to be done.""" - pywikibot.output('======Post-processing {}======' - .format(self.origin)) + pywikibot.output('======Post-processing {}======'.format(self.origin)) # Assemble list of accepted interwiki links new = self.assemble() if new is None: # User said give up @@ -1474,86 +1466,84 @@ new[self.origin.site] = self.origin
updatedSites = [] - notUpdatedSites = [] # Process all languages here self.conf.always = False if self.conf.limittwo: - lclSite = self.origin.site - lclSiteDone = False - frgnSiteDone = False - - for siteCode in lclSite.family.languages_by_size: - site = pywikibot.Site(siteCode, lclSite.family) - if (not lclSiteDone and site == lclSite) \ - or (not frgnSiteDone and site != lclSite and site in new): - if site == lclSite: - lclSiteDone = True # even if we fail the update - if (site.family.name in config.usernames - and site.code in config.usernames[ - site.family.name]): - try: - if self.replaceLinks(new[site], new): - updatedSites.append(site) - if site != lclSite: - frgnSiteDone = True - except SaveError: - notUpdatedSites.append(site) - except GiveUpOnPage: - break - elif (not self.conf.strictlimittwo - and site in new and site != lclSite): - old = {} - try: - for link in new[site].iterlanglinks(): - page = pywikibot.Page(link) - old[page.site] = page - except NoPageError: - pywikibot.output('BUG>>> {} no longer exists?' - .format(new[site])) - continue - _mods, _comment, adding, removing, modifying \ - = compareLanguages(old, new, lclSite, - self.conf.summary) - if (removing and not self.conf.autonomous - or modifying and self.problemfound - or not old - or (self.conf.needlimit - and len(adding) + len(modifying) - >= self.conf.needlimit + 1)): - try: - if self.replaceLinks(new[site], new): - updatedSites.append(site) - except SaveError: - notUpdatedSites.append(site) - except NoUsernameError: - pass - except GiveUpOnPage: - break + self.process_limittwo(new, updatedSites) else: - for (site, page) in new.items(): - # if we have an account for this site - if site.family.name in config.usernames \ - and site.code in config.usernames[site.family.name] \ - and not site.has_data_repository: - # Try to do the changes - try: - if self.replaceLinks(page, new): - # Page was changed - updatedSites.append(site) - except SaveError: - notUpdatedSites.append(site) - except GiveUpOnPage: - break - - # disabled graph drawing for minor problems: it just takes too long - # if notUpdatedSites != [] and config.interwiki_graph: - # # at least one site was not updated, save a conflict graph - # self.createGraph() + self.process_unlimited(new, updatedSites)
# don't report backlinks for pages we already changed if config.interwiki_backlink: self.reportBacklinks(new, updatedSites)
+ def process_limit_two(self, new, updated): + """Post process limittwo.""" + lclSite = self.origin.site + lclSiteDone = False + frgnSiteDone = False + + for code in lclSite.family.languages_by_size: + site = pywikibot.Site(code, lclSite.family) + if not lclSiteDone and site == lclSite \ + or (not frgnSiteDone and site != lclSite and site in new): + if site == lclSite: + lclSiteDone = True # even if we fail the update + if (site.family.name in config.usernames + and site.code in config.usernames[site.family.name]): + try: + if self.replaceLinks(new[site], new): + updated.append(site) + if site != lclSite: + frgnSiteDone = True + except SaveError: + pass + except GiveUpOnPage: + break + + elif (not self.conf.strictlimittwo + and site in new and site != lclSite): + old = {} + try: + for link in new[site].iterlanglinks(): + page = pywikibot.Page(link) + old[page.site] = page + except NoPageError: + pywikibot.error('{} no longer exists?'.format(new[site])) + continue + *_, adding, removing, modifying = compareLanguages( + old, new, lclSite, self.conf.summary) + if (removing and not self.conf.autonomous + or modifying and self.problemfound + or not old + or (self.conf.needlimit + and len(adding) + len(modifying) + >= self.conf.needlimit + 1)): + try: + if self.replaceLinks(new[site], new): + updated.append(site) + except (NoUsernameError, SaveError): + pass + except GiveUpOnPage: + break + + def process_unlimited(self, new, updated): + """Post process unlimited.""" + for (site, page) in new.items(): + # if we have an account for this site + if site.family.name in config.usernames \ + and site.code in config.usernames[site.family.name] \ + and not site.has_data_repository: + # Try to do the changes + try: + if self.replaceLinks(page, new): + # Page was changed + updated.append(site) + except SaveError: + pass + except GiveUpOnPage: + break + def replaceLinks(self, page, newPages) -> bool: """Return True if saving was successful.""" # In this case only continue on the Page we started with