jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/934673 )
Change subject: [IMPR] Use f-strings within scripts ......................................................................
[IMPR] Use f-strings within scripts
Changes made by flint 0.78
Change-Id: I6e4876d2d4684e68511427dd00df450a1f679ae3 --- M scripts/redirect.py M scripts/add_text.py M scripts/transferbot.py M scripts/blockpageschecker.py M scripts/commons_information.py M scripts/harvest_template.py M scripts/category_redirect.py M scripts/interwiki.py M scripts/touch.py M scripts/weblinkchecker.py M scripts/listpages.py M scripts/maintenance/cache.py M scripts/welcome.py M scripts/interwikidata.py M scripts/reflinks.py M scripts/speedy_delete.py M scripts/protect.py M scripts/commonscat.py M scripts/noreferences.py M scripts/nowcommons.py M scripts/solve_disambiguation.py M scripts/maintenance/make_i18n_dict.py M scripts/revertbot.py M scripts/template.py M scripts/data_ingestion.py M scripts/delete.py M scripts/djvutext.py M scripts/coordinate_import.py M scripts/replicate_wiki.py M scripts/change_pagelang.py M scripts/dataextend.py M scripts/newitem.py M scripts/replace.py M scripts/claimit.py M scripts/category.py M scripts/movepages.py 36 files changed, 177 insertions(+), 243 deletions(-)
Approvals: JJMC89: Looks good to me, approved jenkins-bot: Verified
diff --git a/scripts/add_text.py b/scripts/add_text.py index 643537d..754e918 100755 --- a/scripts/add_text.py +++ b/scripts/add_text.py @@ -130,8 +130,7 @@ """Skip if -exceptUrl matches or page does not exists.""" if page.exists(): if self.opt.createonly: - pywikibot.warning('Skipping because {page} already exists' - .format(page=page)) + pywikibot.warning(f'Skipping because {page} already exists') return True
if self.opt.regex_skip_url: @@ -141,8 +140,8 @@
if result: pywikibot.warning( - 'Skipping {page} because -excepturl matches {result}.' - .format(page=page, result=result)) + f'Skipping {page} because -excepturl matches {result}.' + ) return True
elif page.isTalkPage(): diff --git a/scripts/blockpageschecker.py b/scripts/blockpageschecker.py index 900c1a2..e60ebd0 100755 --- a/scripts/blockpageschecker.py +++ b/scripts/blockpageschecker.py @@ -44,7 +44,7 @@
""" # -# (C) Pywikibot team, 2007-2022 +# (C) Pywikibot team, 2007-2023 # # Distributed under the terms of the MIT license. # @@ -240,8 +240,8 @@ page.protection() if not page.has_permission(): pywikibot.warning( - "{} is protected: this account can't edit it! Skipping..." - .format(page)) + f"{page} is protected: this account can't edit it! Skipping..." + ) return True
return False diff --git a/scripts/category.py b/scripts/category.py index 4c3a7a9..ab915dc 100755 --- a/scripts/category.py +++ b/scripts/category.py @@ -427,8 +427,8 @@ elif not os.path.isabs(filename): filename = config.datafilepath(filename) if self.is_loaded and (self.cat_content_db or self.superclass_db): - pywikibot.info('Dumping to {}, please wait...' - .format(config.shortpath(filename))) + pywikibot.info( + f'Dumping to {config.shortpath(filename)}, please wait...') databases = { 'cat_content_db': self.cat_content_db, 'superclass_db': self.superclass_db @@ -952,8 +952,7 @@ history = self.oldcat.getVersionHistoryTable() title = i18n.twtranslate(self.site, 'category-section-title', {'oldcat': self.oldcat.title()}) - self.newtalk.text = '{}\n== {} ==\n{}'.format(self.newtalk.text, - title, history) + self.newtalk.text = f'{self.newtalk.text}\n== {title} ==\n{history}' comment = i18n.twtranslate(self.site, 'category-version-history', {'oldcat': self.oldcat.title()}) self.newtalk.save(comment) @@ -1007,8 +1006,8 @@ def run(self) -> None: """Start bot.""" if self.list.exists() and not (self.append or self.overwrite): - pywikibot.info('Page {} already exists, aborting.\n' - .format(self.list.title())) + pywikibot.info( + f'Page {self.list.title()} already exists, aborting.\n') pywikibot.info(fill( 'Use -append option to append the list to the output page or ' '-overwrite option to overwrite the output page.')) @@ -1025,8 +1024,8 @@ list_string += '{} {}'.format( self.prefix, article.title(as_link=True, textlink=textlink)) if self.talk_pages and not article.isTalkPage(): - list_string += ' -- [[{}|talk]]'.format( - article.toggleTalkPage().title()) + list_string += ( + f' -- [[{article.toggleTalkPage().title()}|talk]]') list_string += '\n'
if self.list.text and self.append: @@ -1160,19 +1159,16 @@ .format(prefix, index % i, cat, index % i2, cat_list[i2])) else: - lines.append('[{}{}] {}'.format( - prefix, index % i, cat)) + lines.append(f'[{prefix}{index % i}] {cat}') else: - lines.append('[{}{}] {}'.format( - prefix, index % i, cat)) + lines.append(f'[{prefix}{index % i}] {cat}')
# output the result for line in lines: pywikibot.info(line)
# show the title of the page where the link was found. - pywikibot.info('\n>>> <<lightpurple>>{}<<default>> <<<' - .format(member.title())) + pywikibot.info(f'\n>>> <<lightpurple>>{member.title()}<<default>> <<<')
# determine a reasonable amount of context. try: @@ -1360,7 +1356,7 @@ if current_depth > 0: result += ' ' result += cat.title(as_link=True, textlink=True, with_ns=False) - result += ' ({})'.format(int(cat.categoryinfo['pages'])) + result += f" ({int(cat.categoryinfo['pages'])})" if current_depth < self.max_depth // 2: # noisy dots pywikibot.info('.', newline=False) diff --git a/scripts/category_redirect.py b/scripts/category_redirect.py index 6cb41ee..3736740 100755 --- a/scripts/category_redirect.py +++ b/scripts/category_redirect.py @@ -272,12 +272,10 @@ # validate L10N self.template_list = self.site.category_redirects() if not self.template_list: - pywikibot.warning('No redirect templates defined for {}' - .format(self.site)) + pywikibot.warning(f'No redirect templates defined for {self.site}') return if not self.get_cat(): - pywikibot.warning('No redirect category found for {}' - .format(self.site)) + pywikibot.warning(f'No redirect category found for {self.site}') return
self.user = self.site.user() # invokes login() diff --git a/scripts/change_pagelang.py b/scripts/change_pagelang.py index 6dc7007..d7db330 100755 --- a/scripts/change_pagelang.py +++ b/scripts/change_pagelang.py @@ -21,7 +21,7 @@ can be set within a settings file which is scripts.ini by default. """ # -# (C) Pywikibot team, 2018-2022 +# (C) Pywikibot team, 2018-2023 # # Distributed under the terms of the MIT license. # @@ -100,8 +100,8 @@ f'different content language ' f'<<yellow>>{currentlang}<<default>> set; skipping.') else: - pywikibot.info('\n\n>>> <<lightpurple>>{}<<default>> <<<' - .format(page.title())) + pywikibot.info( + f'\n\n>>> <<lightpurple>>{page.title()}<<default>> <<<') choice = pywikibot.input_choice( f'The content language for this page is already set to ' f'<<yellow>>{currentlang}<<default>>, which is different from ' diff --git a/scripts/claimit.py b/scripts/claimit.py index 620b86c..72709dc 100755 --- a/scripts/claimit.py +++ b/scripts/claimit.py @@ -143,8 +143,7 @@ coord_args[0], coord_args[1], precision=precision) else: raise NotImplementedError( - '{} datatype is not yet supported by claimit.py' - .format(claim.type)) + f'{claim.type} datatype is not yet supported by claimit.py') claim.setTarget(target) claims.append(claim)
diff --git a/scripts/commons_information.py b/scripts/commons_information.py index 116eefb..c1a6bd3 100755 --- a/scripts/commons_information.py +++ b/scripts/commons_information.py @@ -132,9 +132,8 @@ if lang != '': tmp_page = pywikibot.Page(page.site, lang, ns=10) if tmp_page not in self.lang_tmps: - pywikibot.warning( - '"{lang}" is not a valid language template on {site}' - .format(lang=lang, site=page.site)) + pywikibot.warning(f'{lang!r} is not a valid language ' + f'template on {page.site}') new = mwparserfromhell.nodes.template.Template(lang, [value]) self.replace_value(desc, new) edited = True diff --git a/scripts/commonscat.py b/scripts/commonscat.py index 8f89dfe..361bb27 100755 --- a/scripts/commonscat.py +++ b/scripts/commonscat.py @@ -38,7 +38,7 @@ # *Found one template. Add this template # *Found more templates. Ask the user <- still have to implement this # -# (C) Pywikibot team, 2008-2022 +# (C) Pywikibot team, 2008-2023 # # Distributed under the terms of the MIT license. # @@ -317,10 +317,10 @@ commonscatLink = self.find_commons_category(page) if commonscatLink: if commonscatLink == page.title(): - text_to_add = '{{%s}}' % primaryCommonscat + text_to_add = f'{{{{{primaryCommonscat}}}}}' else: - text_to_add = '{{{{{}|{}}}}}'.format(primaryCommonscat, - commonscatLink) + text_to_add = ( + f'{{{{{primaryCommonscat}|{commonscatLink}}}}}') summary = self.opt.summary or i18n.twtranslate( page.site, 'add_text-adding', {'adding': text_to_add}) self.put_current(add_text(page.text, text_to_add), @@ -346,8 +346,7 @@ if linktitle and newcat != page.title(with_ns=False): newtext = re.sub(r'(?i){{%s|?[^{}]*(?:{{.*}})?}}' % oldtemplate, - '{{{{{}|{}|{}}}}}'.format(newtemplate, newcat, - linktitle), + f'{{{{{newtemplate}|{newcat}|{linktitle}}}}}', page.get()) elif newcat == page.title(with_ns=False): newtext = re.sub(r'(?i){{%s|?[^{}]*(?:{{.*}})?}}' diff --git a/scripts/coordinate_import.py b/scripts/coordinate_import.py index 22de794..21956a6 100755 --- a/scripts/coordinate_import.py +++ b/scripts/coordinate_import.py @@ -142,8 +142,8 @@ source = self.getSource(page.site) if source: newclaim.addSource(source) - pywikibot.info('Adding {}, {} to {}'.format( - coordinate.lat, coordinate.lon, item.title())) + pywikibot.info( + f'Adding {coordinate.lat}, {coordinate.lon} to {item.title()}') # todo: handle exceptions using self.user_add_claim try: item.addClaim(newclaim) diff --git a/scripts/data_ingestion.py b/scripts/data_ingestion.py index 6f032da..02b3111 100755 --- a/scripts/data_ingestion.py +++ b/scripts/data_ingestion.py @@ -191,8 +191,7 @@ for key in sorted(params.keys()): value = params[key] if not key.startswith('_'): - description += '|{}={}\n'.format( - key, self._safe_template_value(value)) + description += f'|{key}={self._safe_template_value(value)}\n' description += '}}'
return description diff --git a/scripts/dataextend.py b/scripts/dataextend.py index d40e204..1a7a2fb 100755 --- a/scripts/dataextend.py +++ b/scripts/dataextend.py @@ -414,8 +414,7 @@ with codecs.open(self.datafile, **param) as f: for dtype in self.data: for key in self.data[dtype]: - f.write('{}:{}:{}\n'.format(dtype, key, - self.data[dtype][key])) + f.write(f'{dtype}:{key}:{self.data[dtype][key]}\n')
with codecs.open(self.nonamefile, **param) as f: for noname in self.noname: @@ -466,8 +465,7 @@ .format(self.label(prop), self.showtime(claim.getTarget()))) elif claim.type in ['external-id', 'commonsMedia']: - pywikibot.info('{}: {}'.format(self.label(prop), - claim.getTarget())) + pywikibot.info(f'{self.label(prop)}: {claim.getTarget()}') elif claim.type == 'quantity': pywikibot.info( '{}: {} {}' @@ -691,11 +689,9 @@ if day is None and month == 0: month = None if month and month > 12: - raise ValueError('Date seems to have an invalid month number {}' - .format(month)) + raise ValueError(f'Date seems to have an invalid month number {month}') if day and day > 31: - raise ValueError('Date seems to have an invalid day number {}' - .format(day)) + raise ValueError(f'Date seems to have an invalid day number {day}') if not year: raise ValueError(f"Can't interpret date {text}") return pywikibot.WbTime(year=year, month=month, day=day, precision=9 @@ -799,8 +795,7 @@ for claim in newclaims: try: pywikibot.info( - '{}: {}'.format(self.label(claim[0]), - self.label(claim[1]))) + f'{self.label(claim[0])}: {self.label(claim[1])}') except ValueError: newclaims = [nclaim for nclaim in newclaims @@ -915,8 +910,7 @@ except OtherPageSaveError as ex: if claim[1].startswith('!i!'): pywikibot.info( - 'Unable to save image {}: {}' - .format(claim[1][3:], ex)) + f'Unable to save image {claim[1][3:]}: {ex}') continue
raise @@ -1040,11 +1034,9 @@ if editdescriptions: item.editDescriptions(editdescriptions) for prop in unidentifiedprops: - pywikibot.info('Unknown external {} ({})' - .format(prop, self.label(prop))) + pywikibot.info(f'Unknown external {prop} ({self.label(prop)})') for prop in failedprops: - pywikibot.info('External failed to load: {} ({})' - .format(prop, self.label(prop))) + pywikibot.info(f'External failed to load: {prop} ({self.label(prop)})') if longtexts: if unidentifiedprops or failedprops: pywikibot.input('Press Enter to continue') @@ -2496,11 +2488,9 @@ @property def url(self): if self.isperson: - return 'http://id.loc.gov/authorities/names/%7Bid%7D.html%27.format( - id=self.id) + return f'http://id.loc.gov/authorities/names/%7Bself.id%7D.html' if self.id.startswith('s'): - return 'http://id.loc.gov/authorities/subjects/%7Bid%7D.html%27.format( - id=self.id) + return f'http://id.loc.gov/authorities/subjects/%7Bself.id%7D.html' return None
@property @@ -2553,9 +2543,7 @@ if result and '[' not in result: m = re.match(r'(\d+)[/-](\d+)[/-](\d+)', result) if m: - result = '{}-{}-{}'.format( - m[2], m[1], m[3] if len(m[3]) > 2 else '19' + m[3] - ) + result = f"{m[2]}-{m[1]}-{m[3] if len(m[3]) > 2 else '19' + m[3]}" return result
return None @@ -2572,9 +2560,7 @@ if result and '[' not in result: m = re.match(r'(\d+)[/-](\d+)[/-](\d+)', result) if m: - result = '{}-{}-{}'.format( - m[2], m[1], m[3] if len(m[3]) > 2 else '19' + m[3] - ) + result = f"{m[2]}-{m[1]}-{m[3] if len(m[3]) > 2 else '19' + m[3]}" return result
return None @@ -4167,8 +4153,7 @@ if section: result = [] splitter = 'et' if ' et ' in section else 'und' - for subsection in section.split('.')[0].split(' {} ' - .format(splitter)): + for subsection in section.split('.')[0].split(f' {splitter} '): result += self.findallbyre(r'([\w\s]+)', subsection, 'occupation') return result @@ -5807,8 +5792,7 @@ return None
if county: - return self.getdata('county', '{} county, {}' - .format(county, state)) + return self.getdata('county', f'{county} county, {state}') return self.getdata('state', state)
def findcoords(self, html: str): @@ -11822,8 +11806,7 @@ self.language = 'it'
def getvalue(self, field, html, dtype=None): - return self.findbyre('<B>{}:(?:<[^<>]*>)*([^<>]+)<' - .format(field), html, dtype) + return self.findbyre(f'<B>{field}:(?:<[^<>]*>)*([^<>]+)<', html, dtype)
def findnames(self, html) -> List[str]: result = [] @@ -13887,8 +13870,7 @@ if self.language in ['commons', 'species']: site = 'wikimedia' self.dbname = f'{site.title()} {self.language.upper()}' - self.urlbase = 'https://%7B%7D.%7B%7D.org/wiki/%7B%7Bid%7D%7D%27.format( - self.language, site) + self.urlbase = f'https://%7Bself.language%7D.%7Bsite%7D.org/wiki/%7B%7Bid%7D%7D' self.urlbase3 = 'https://%7B%7D.%7B%7D.org/w/index.php?title=%7B%7Bid%7D%7D&veswitched=1&... self.language, site) self.hrtre = '{{(.*?)}}' @@ -13900,7 +13882,7 @@ def prepare(self, html: str): def reworkwikilink(wikipart): parts = wikipart[1].split('|') - return '[[{}]]'.format(parts[0] if ':' in parts[0] else parts[-1]) + return f"[[{parts[0] if ':' in parts[0] else parts[-1]}]]"
if not html: return None diff --git a/scripts/delete.py b/scripts/delete.py index 39b39e1..e4938e4 100755 --- a/scripts/delete.py +++ b/scripts/delete.py @@ -51,7 +51,7 @@ python pwb.py delete -cat:"To delete" -always """ # -# (C) Pywikibot team, 2013-2022 +# (C) Pywikibot team, 2013-2023 # # Distributed under the terms of the MIT license. # @@ -160,8 +160,8 @@ pywikibot.warning('There are {} pages that link to {}.' .format(total, self.current_page)) else: - pywikibot.warning('There is a page that links to {}.' - .format(self.current_page)) + pywikibot.warning( + f'There is a page that links to {self.current_page}.')
show_n_pages = self.opt.isorphan width = len(max((ns.canonical_prefix() for ns in refs), key=len)) diff --git a/scripts/djvutext.py b/scripts/djvutext.py index 2fdad67..6b9a858 100755 --- a/scripts/djvutext.py +++ b/scripts/djvutext.py @@ -107,10 +107,7 @@ def generator(self): """Generate pages from specified page interval.""" for page_number in self.page_number_gen(): - title = '{page_ns}:{prefix}/{number}'.format( - page_ns=self._page_ns, - prefix=self._prefix, - number=page_number) + title = f'{self._page_ns}:{self._prefix}/{page_number}' page = ProofreadPage(self._index.site, title) page.page_number = page_number # remember page number in djvu file yield page @@ -194,8 +191,7 @@
site = pywikibot.Site() if not site.has_extension('ProofreadPage'): - pywikibot.error('Site {} must have ProofreadPage extension.' - .format(site)) + pywikibot.error(f'Site {site} must have ProofreadPage extension.') return
index_page = pywikibot.Page(site, index, ns=site.proofread_index_ns) diff --git a/scripts/harvest_template.py b/scripts/harvest_template.py index 0fa7597..c6e2104 100755 --- a/scripts/harvest_template.py +++ b/scripts/harvest_template.py @@ -340,8 +340,7 @@ handler = getattr(self, 'handle_' + ppage.type.lower().replace('-', '_'), None) if not handler: - pywikibot.info('{} is not a supported datatype.' - .format(ppage.type)) + pywikibot.info(f'{ppage.type} is not a supported datatype.') return
exists_arg = set(self._get_option_with_fallback(options, 'exists')) @@ -399,9 +398,8 @@ return
if not self._get_option_with_fallback(options, 'islink'): - pywikibot.info( - '{} field {} value "{}" is not a wikilink. Skipping.' - .format(prop, field, value)) + pywikibot.info(f'{prop} field {field} value "{value}" is not a' + 'wikilink. Skipping.') return
linked_item = self.template_link_target(item, site, value) diff --git a/scripts/interwiki.py b/scripts/interwiki.py index 580a9d0..66ee6b1 100755 --- a/scripts/interwiki.py +++ b/scripts/interwiki.py @@ -760,9 +760,8 @@ # Bug-check: Isn't there any work still in progress? We can't work on # different sites at a time! if self.pending: - raise RuntimeError( - "BUG: Can't start to work on {}; still working on {}" - .format(site, self.pending)) + raise RuntimeError(f"BUG: Can't start to work on {site}; still " + f'working on {self.pending}') # Prepare a list of suitable pages result = [] for page in self.todo.filter(site): @@ -890,8 +889,7 @@ self.addIfNew(alternativePage, counter, None) else: pywikibot.info( - 'NOTE: ignoring {} and its interwiki links' - .format(linkedPage)) + f'NOTE: ignoring {linkedPage} and its interwiki links') return True
# same namespaces, no problem @@ -1014,8 +1012,7 @@
def reportInterwikilessPage(self, page) -> None: """Report interwikiless page.""" - self.conf.note('{} does not have any interwiki links' - .format(self.origin)) + self.conf.note(f'{self.origin} does not have any interwiki links') if config.without_interwiki: with codecs.open( pywikibot.config.datafilepath('without_interwiki.txt'), @@ -1077,8 +1074,7 @@ else: return False
- self.conf.note('{} is {}redirect to {}' - .format(page, redir, redirect_target)) + self.conf.note(f'{page} is {redir}redirect to {redirect_target}') if self.origin is None or page == self.origin: # the 1st existig page becomes the origin page, if none was # supplied @@ -1200,8 +1196,8 @@ filename = interwiki_graph.getFilename( self.origin, extension=config.interwiki_graph_formats[0]) - f.write(' [{}{} graph]' - .format(config.interwiki_graph_url, filename)) + f.write( + f' [{config.interwiki_graph_url}{filename} graph]') f.write('\n') # FIXME: What errors are we catching here? except Exception: @@ -1236,9 +1232,8 @@ break else: if config.interwiki_shownew: - pywikibot.info( - '{}: {} gives new interwiki {}' - .format(self.origin, page, linkedPage)) + pywikibot.info(f'{self.origin}: {page} gives new ' + f'interwiki {linkedPage}') if self.forcedStop: break
@@ -1622,8 +1617,7 @@ self.conf.note(f'No changes needed on page {page}') return False
- pywikibot.info('<<lightpurple>>Updating links on page {}.' - .format(page)) + pywikibot.info(f'<<lightpurple>>Updating links on page {page}.') pywikibot.info(f'Changes to be made: {mods}') oldtext = page.get() template = (page.namespace() == 10) @@ -1758,8 +1752,7 @@ linkedPages = {pywikibot.Page(link) for link in page.iterlanglinks()} except NoPageError: - pywikibot.warning('Page {} does no longer exist?!' - .format(page)) + pywikibot.warning(f'Page {page} does no longer exist?!') break
# To speed things up, create a dictionary which maps sites @@ -1876,8 +1869,7 @@ del tmpl if loc is not None and loc in page.title(): pywikibot.info( - 'Skipping: {} is a templates subpage' - .format(page.title())) + f'Skipping: {page.title()} is a templates subpage') continue break else: # generator stopped diff --git a/scripts/interwikidata.py b/scripts/interwikidata.py index ef4f49c..b2bba9d 100755 --- a/scripts/interwikidata.py +++ b/scripts/interwikidata.py @@ -29,7 +29,7 @@ can be set within a settings file which is scripts.ini by default. """
-# (C) Pywikibot team, 2015-2022 +# (C) Pywikibot team, 2015-2023 # # Distributed under the terms of the MIT license. # @@ -98,8 +98,7 @@ self.iwlangs = pywikibot.textlib.getLanguageLinks( self.current_page.text, insite=self.current_page.site) if not self.iwlangs: - output('No interlanguagelinks on {page}'.format( - page=self.current_page.title(as_link=True))) + output(f'No interlanguagelinks on {self.current_page}') return try: item = pywikibot.ItemPage.fromPage(self.current_page) @@ -201,8 +200,7 @@ return False item = list(wd_data).pop() if self.current_page.site.dbName() in item.sitelinks: - warning('Interwiki conflict in {}, skipping...' - .format(item.title(as_link=True))) + warning(f'Interwiki conflict in {item}, skipping...') return False output('Adding link to ' + item.title()) item.setSitelink(self.current_page, summary='Added ' + ( diff --git a/scripts/listpages.py b/scripts/listpages.py index 44e8d34..469d87f 100755 --- a/scripts/listpages.py +++ b/scripts/listpages.py @@ -228,8 +228,7 @@ base_dir))
if not os.path.exists(base_dir): - pywikibot.info('Directory "{}" does not exist.' - .format(base_dir)) + pywikibot.info(f'Directory "{base_dir}" does not exist.') choice = pywikibot.input_yn('Do you want to create it ("No" ' 'to continue without saving)?') if choice: diff --git a/scripts/maintenance/cache.py b/scripts/maintenance/cache.py index 400ff41..806d144 100755 --- a/scripts/maintenance/cache.py +++ b/scripts/maintenance/cache.py @@ -143,12 +143,11 @@ end = self.key.index(')')
if not end: - raise ParseError('End of Site() keyword not found: {}' - .format(self.key)) + raise ParseError(f'End of Site() keyword not found: {self.key}')
if 'Site' not in self.key[0:start]: - raise ParseError('Site() keyword not found at start of key: {}' - .format(self.key)) + raise ParseError( + f'Site() keyword not found at start of key: {self.key}')
site = self.key[0:end + 1] if site[0:5] == 'Site(': @@ -171,20 +170,20 @@
end = self.key.index(')', start + 5) if not end: - raise ParseError('End of User() keyword not found: {}' - .format(self.key)) + raise ParseError( + f'End of User() keyword not found: {self.key}') username = self.key[start:end] elif self.key[start:start + 12] == 'LoginStatus(': end = self.key.index(')', start + 12) if not end: - raise ParseError('End of LoginStatus() keyword not found: {}' - .format(self.key)) + raise ParseError( + f'End of LoginStatus() keyword not found: {self.key}') login_status = self.key[start:end + 1] # If the key does not contain User(..) or LoginStatus(..), # it must be the old key format which only contains Site and params elif self.key[start:start + 3] != "[('": - raise ParseError('Keyword after Site not recognised: {}...' - .format(self.key)) + raise ParseError( + f'Keyword after Site not recognised: {self.key}...')
start = end + 1
@@ -292,8 +291,8 @@ try: entry.parse_key() except ParseError as e: - pywikibot.error('Problems parsing {} with key {}' - .format(entry.filename, entry.key)) + pywikibot.error( + f'Problems parsing {entry.filename} with key {entry.key}') pywikibot.error(e) continue
@@ -405,7 +404,7 @@ """Return a pretty formatted parameters list.""" lines = '' for key, items in sorted(entry._params.items()): - lines += '{}={}\n'.format(key, ', '.join(items)) + lines += f"{key}={', '.join(items)}\n" return lines
diff --git a/scripts/maintenance/make_i18n_dict.py b/scripts/maintenance/make_i18n_dict.py index a9d806d..869a757 100755 --- a/scripts/maintenance/make_i18n_dict.py +++ b/scripts/maintenance/make_i18n_dict.py @@ -82,8 +82,7 @@ for msg in sorted(self.messages.values()): label = f'{self.scriptname}-{msg}' if label in self.dict[code]: - print(" '{}': '{}'," - .format(label, self.dict[code][label])) + print(f" '{label}': '{self.dict[code][label]}',") print(' },') print('};')
@@ -99,9 +98,8 @@ if code == 'qqq': if code not in self.dict: self.dict[code] = {} - self.dict[code][label] = ( - 'Edit summary for message {} of {} report' - .format(newmsg, self.scriptname)) + self.dict[code][label] = (f'Edit summary for message {newmsg} ' + f'of {self.scriptname} report') elif code != 'commons': if code not in self.dict: self.dict[code] = {} diff --git a/scripts/movepages.py b/scripts/movepages.py index f9f9e77..029ef2b 100755 --- a/scripts/movepages.py +++ b/scripts/movepages.py @@ -192,8 +192,7 @@ self.create_title = create_title
choice = pywikibot.input_choice( - 'Change the page title ' - 'to {!r}?'.format(create_title(page)), + f'Change the page title to {create_title(page)!r}?', choices)
if choice == 'y': @@ -236,8 +235,7 @@ for old_page, new_page in zip_longest(*page_gen, fillvalue=None): if new_page is None: pywikibot.warning( - 'file {} contains odd number ' - 'of links'.format(filename)) + f'file {filename} contains odd number of links') else: from_to_pairs.append([old_page.title(), new_page.title()]) elif opt in ('always', 'noredirect', 'skipredirects'): diff --git a/scripts/newitem.py b/scripts/newitem.py index c4a1f5c..61cdd06 100755 --- a/scripts/newitem.py +++ b/scripts/newitem.py @@ -84,11 +84,9 @@ pywikibot.info('Doing a null edit on the page.') page.touch() except (NoCreateError, NoPageError): - pywikibot.error('Page {} does not exist.'.format( - page.title(as_link=True))) + pywikibot.error(f'Page {page.title(as_link=True)} does not exist.') except LockedPageError: - pywikibot.error('Page {} is locked.'.format( - page.title(as_link=True))) + pywikibot.error(f'Page {page.title(as_link=True)} is locked.') except PageSaveRelatedError as e: pywikibot.error(f'Page {page} not saved:\n{e.args}')
diff --git a/scripts/noreferences.py b/scripts/noreferences.py index 65c8308..c361185 100755 --- a/scripts/noreferences.py +++ b/scripts/noreferences.py @@ -638,9 +638,8 @@ .format(section)) index = match.end() else: - pywikibot.info( - 'Adding references section before {} section...\n' - .format(section)) + pywikibot.info(f'Adding references section before ' + f'{section} section...\n') index = match.start() ident = match['ident'] return self.createReferenceSection(oldText, index, diff --git a/scripts/nowcommons.py b/scripts/nowcommons.py index 2e8a1fc..f0054f0 100755 --- a/scripts/nowcommons.py +++ b/scripts/nowcommons.py @@ -356,8 +356,7 @@ 'Does the description on Commons contain all required ' 'source and license\ninformation?', default=False): local_file_page.delete( - '{} [[:commons:File:{}]]' - .format(self.summary, file_on_commons), + f'{self.summary} [[:commons:File:{file_on_commons}]]', prompt=False) self.counter['delete'] += 1 else: diff --git a/scripts/protect.py b/scripts/protect.py index e8697de..3fb97c6 100755 --- a/scripts/protect.py +++ b/scripts/protect.py @@ -57,7 +57,7 @@ # # Created by modifying delete.py # -# (C) Pywikibot team, 2008-2022 +# (C) Pywikibot team, 2008-2023 # # Distributed under the terms of the MIT license. # @@ -139,10 +139,10 @@ if level == default: default_char = first_char[-1]
- choice = pywikibot.input_choice('Choose a protection level to {}:' - .format(operation), - zip(levels, first_char), - default=default_char) + choice = pywikibot.input_choice( + f'Choose a protection level to {operation}:', + zip(levels, first_char), + default=default_char)
return levels[first_char.index(choice)]
diff --git a/scripts/redirect.py b/scripts/redirect.py index 681795d..28d85d9 100755 --- a/scripts/redirect.py +++ b/scripts/redirect.py @@ -191,9 +191,8 @@ if target_link and target_link.title: source = pywikibot.Link(entry.title, self.site) if target_link.anchor: - pywikibot.info( - 'HINT: {} is a redirect with a pipelink.' - .format(entry.title)) + pywikibot.info(f'HINT: {entry.title} is a redirect' + ' with a pipelink.') redict[space_to_underscore(source)] = ( space_to_underscore(target_link)) return redict, pageTitles @@ -406,8 +405,7 @@ elif action == 'both': self.treat_page = self.fix_double_or_delete_broken_redirect else: - raise NotImplementedError('No valid action "{}" found.' - .format(action)) + raise NotImplementedError(f'No valid action "{action}" found.')
def get_sd_template(self, site=None) -> Optional[str]: """Look for speedy deletion template and return it. diff --git a/scripts/reflinks.py b/scripts/reflinks.py index bd3de5b..fc12ffe 100755 --- a/scripts/reflinks.py +++ b/scripts/reflinks.py @@ -42,7 +42,7 @@
¶ms; """ -# (C) Pywikibot team, 2008-2022 +# (C) Pywikibot team, 2008-2023 # # Distributed under the terms of the MIT license. # @@ -484,8 +484,8 @@ @staticmethod def httpError(err_num, link, pagetitleaslink) -> None: """Log HTTP Error.""" - pywikibot.stdout('HTTP error ({}) for {} on {}' - .format(err_num, link, pagetitleaslink)) + pywikibot.stdout( + f'HTTP error ({err_num}) for {link} on {pagetitleaslink}')
@staticmethod def getPDFTitle(ref, response) -> None: @@ -542,7 +542,7 @@ return True
if not page.has_permission(): - pywikibot.warning("You can't edit page {page}" .format(page=page)) + pywikibot.warning(f"You can't edit page {page}") return True
return False diff --git a/scripts/replace.py b/scripts/replace.py index 9d9cd94..e4feecb 100755 --- a/scripts/replace.py +++ b/scripts/replace.py @@ -659,8 +659,8 @@
if self.isTitleExcepted(page.title()): pywikibot.warning( - 'Skipping {} because the title is on the exceptions list.' - .format(page)) + f'Skipping {page} because the title is on the exceptions list.' + ) return True
if not page.has_permission(): @@ -738,8 +738,7 @@ try: original_text = page.get(get_redirect=True, force=True) except NoPageError: - pywikibot.info('Page {} has been deleted.' - .format(page.title())) + pywikibot.info(f'Page {page.title()} has been deleted.') break new_text = original_text last_text = None @@ -871,13 +870,13 @@ else: except_clause = ''
- sql = """ + sql = f""" SELECT page_namespace, page_title FROM page JOIN text ON (page_id = old_id) -{} -{} -LIMIT 200""".format(where_clause, except_clause) +{where_clause} +{except_clause} +LIMIT 200"""
return pagegenerators.MySQLPageGenerator(sql)
diff --git a/scripts/replicate_wiki.py b/scripts/replicate_wiki.py index a9512a1..ace1c14 100755 --- a/scripts/replicate_wiki.py +++ b/scripts/replicate_wiki.py @@ -153,8 +153,8 @@ for site in self.sites: sync_overview_page = Page(site, f'User:{site.user()}/sync.py overview') - output = '== {} ==\n\n'.format( - twtranslate(site, 'replicate_wiki-headline')) + output = ( + f"== {twtranslate(site, 'replicate_wiki-headline')} ==\n\n") if self.differences[site]: output += ''.join(f'* [[:{page_title}]]\n' for page_title in self.differences[site]) @@ -164,7 +164,7 @@ output += '\n\n== {} ==\n\n'.format( twtranslate(site, 'replicate_wiki-missing-users')) if self.user_diff[site]: - output += ''.join('* {}\n'.format(user_name.replace('_', ' ')) + output += ''.join(f"* {user_name.replace('_', ' ')}\n" for user_name in self.user_diff[site]) else: output += twtranslate(site, 'replicate_wiki-same-users') diff --git a/scripts/revertbot.py b/scripts/revertbot.py index 5edf9c2..8c08503 100755 --- a/scripts/revertbot.py +++ b/scripts/revertbot.py @@ -35,7 +35,7 @@
""" # -# (C) Pywikibot team, 2008-2022 +# (C) Pywikibot team, 2008-2023 # # Distributed under the terms of the MIT license. # @@ -82,11 +82,11 @@ if callback(item): result = self.revert(item) if result: - pywikibot.info('{}: {}'.format(item['title'], result)) + pywikibot.info(f"{item['title']}: {result}") else: - pywikibot.info('Skipped {}'.format(item['title'])) + pywikibot.info(f"Skipped {item['title']}") else: - pywikibot.info('Skipped {} by callback'.format(item['title'])) + pywikibot.info(f"Skipped {item['title']} by callback")
@staticmethod def callback(item: Container) -> bool: @@ -141,8 +141,8 @@ except Error: pass else: - return 'The edit(s) made in {} by {} was rollbacked'.format( - page.title(), self.user) + return (f'The edit(s) made in {page.title()} by {self.user}' + ' was rollbacked')
pywikibot.exception(exc_info=False) return False diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py index 2bd8527..8694a1b 100755 --- a/scripts/solve_disambiguation.py +++ b/scripts/solve_disambiguation.py @@ -75,7 +75,7 @@
""" # -# (C) Pywikibot team, 2003-2022 +# (C) Pywikibot team, 2003-2023 # # Distributed under the terms of the MIT license. # @@ -430,8 +430,8 @@ elif self.primaryIgnoreManager.isIgnored(refs[i]): del refs[i] if len(refs) < self.minimum: - pywikibot.info('Found only {} pages to work on; skipping.' - .format(len(refs))) + pywikibot.info( + f'Found only {len(refs)} pages to work on; skipping.') return pywikibot.info(f'Will work on {len(refs)} pages.') yield from refs @@ -835,8 +835,7 @@ 'Do you want to make redirect {} point to {}?' .format(ref_page.title(), target), default=False, automatic_quit=False): - redir_text = '#{} [[{}]]' \ - .format(self.site.redirect(), target) + redir_text = f'#{self.site.redirect()} [[{target}]]' try: ref_page.put(redir_text, summary=self.summary, asynchronous=True) @@ -844,8 +843,8 @@ pywikibot.info(f'Page not saved: {error.args}') else: choice = pywikibot.input_choice( - 'Do you want to work on pages linking to {}?' - .format(ref_page.title()), + f'Do you want to work on pages linking to ' + f'{ref_page.title()}?', [('yes', 'y'), ('no', 'n'), ('change redirect', 'c')], 'n', automatic_quit=False) if choice == 'y': @@ -860,9 +859,8 @@ text = ref_page.get(get_redirect=True) include = 'redirect' except NoPageError: - pywikibot.info( - 'Page [[{}]] does not seem to exist?! Skipping.' - .format(ref_page.title())) + pywikibot.info(f'Page [[{ref_page.title()}]] does not seem to' + ' exist?! Skipping.') else: ignore_reason = self.checkContents(text) if ignore_reason: @@ -941,9 +939,9 @@
if self.dn_template_str: # '?', '/' for old choice - options += [AliasOption('tag template %s' % - self.dn_template_str, - ['t', '?', '/'])] + options += [AliasOption( + f'tag template {self.dn_template_str}', + ['t', '?', '/'])] options += [context_option] if not edited: options += [ShowPageOption('show disambiguation page', 'd', @@ -1052,9 +1050,7 @@ new_targets.append(new_page_title)
if replaceit and trailing_chars: - newlink = '[[{}{}]]{}'.format(new_page_title, - section, - trailing_chars) + newlink = f'[[{new_page_title}{section}]]{trailing_chars}' elif replaceit or (new_page_title == link_text and not section): newlink = f'[[{new_page_title}]]' @@ -1072,8 +1068,7 @@ link_text[:len(new_page_title)], link_text[len(new_page_title):]) else: - newlink = '[[{}{}|{}]]'.format(new_page_title, - section, link_text) + newlink = f'[[{new_page_title}{section}|{link_text}]]' text = text[:m.start()] + newlink + text[m.end():] continue
diff --git a/scripts/speedy_delete.py b/scripts/speedy_delete.py index 339a095..8052635 100755 --- a/scripts/speedy_delete.py +++ b/scripts/speedy_delete.py @@ -22,7 +22,7 @@ .. note:: This script currently only works for the Wikipedia project. """ # -# (C) Pywikibot team, 2007-2022 +# (C) Pywikibot team, 2007-2023 # # Distributed under the terms of the MIT license. # @@ -331,8 +331,7 @@ self.csd_cat = self.site.page_from_repository(self.csd_cat_item) if self.csd_cat is None: raise Error( - 'No category for speedy deletion found for {}' - .format(self.site)) + f'No category for speedy deletion found for {self.site}') else: self.csd_cat = pywikibot.Category(self.site, csd_cat) self.saved_progress = None @@ -373,8 +372,8 @@ def get_reason_for_deletion(self, page): """Get a reason for speedy deletion from operator.""" suggested_reason = self.guess_reason_for_deletion(page) - pywikibot.info('The suggested reason is: <<lightred>>{}' - .format(suggested_reason)) + pywikibot.info( + f'The suggested reason is: <<lightred>>{suggested_reason}')
# We don't use i18n.translate() here because for some languages the # entry is intentionally left out. @@ -428,7 +427,7 @@ """Process one page.""" page = self.current_page
- color_line = '<<blue>>{}<<default>>'.format('_' * 80) + color_line = f"<<blue>>{'_' * 80}<<default>>" pywikibot.info(color_line) pywikibot.info(page.extract('wiki', lines=self.LINES)) pywikibot.info(color_line) diff --git a/scripts/template.py b/scripts/template.py index d44250b..94dccad 100755 --- a/scripts/template.py +++ b/scripts/template.py @@ -187,8 +187,7 @@ else: template = pywikibot.Page(self.site, new, ns=10) if not template.exists(): - pywikibot.warning('Template "{}" does not exist.' - .format(new)) + pywikibot.warning(f'Template "{new}" does not exist.') if not pywikibot.input_yn('Do you want to proceed anyway?', default=False, automatic_quit=False): diff --git a/scripts/touch.py b/scripts/touch.py index fbd082d..60174f6 100755 --- a/scripts/touch.py +++ b/scripts/touch.py @@ -57,11 +57,9 @@ try: page.touch(botflag=self.opt.botflag) except (NoCreateError, NoPageError): - pywikibot.error('Page {} does not exist.' - .format(page.title(as_link=True))) + pywikibot.error(f'Page {page.title(as_link=True)} does not exist.') except LockedPageError: - pywikibot.error('Page {} is locked.' - .format(page.title(as_link=True))) + pywikibot.error(f'Page {page.title(as_link=True)} is locked.') except PageSaveRelatedError as e: pywikibot.error(f'Page {page} not saved:\n{e.args}') else: @@ -122,8 +120,8 @@ self.counter['purge'] += length self.pages[site].clear()
- pywikibot.info('{} pages{} purged' - .format(length, '' if done else ' not')) + pywikibot.info( + f"{length} pages{'' if done else ' not'} purged") if not flush and not config.simulate: pywikibot.info('Waiting due to purge rate limit') pywikibot.sleep(62) diff --git a/scripts/transferbot.py b/scripts/transferbot.py index 67791e6..6aa75c7 100755 --- a/scripts/transferbot.py +++ b/scripts/transferbot.py @@ -149,9 +149,7 @@
if not page.exists(): pywikibot.warning( - "Page {} doesn't exist".format( - page.title(as_link=True) - ) + f"Page {page.title(as_link=True)} doesn't exist" ) continue
diff --git a/scripts/weblinkchecker.py b/scripts/weblinkchecker.py index 680a3db..a04d25d 100755 --- a/scripts/weblinkchecker.py +++ b/scripts/weblinkchecker.py @@ -352,8 +352,8 @@ self.site = site self.semaphore = threading.Semaphore() self.datfilename = pywikibot.config.datafilepath( - 'deadlinks', 'deadlinks-{}-{}.dat'.format(self.site.family.name, - self.site.code)) + 'deadlinks', + f'deadlinks-{self.site.family.name}-{self.site.code}.dat') # Count the number of logged links, so that we can insert captions # from time to time self.log_count = 0 @@ -373,8 +373,7 @@ for (page_title, date, error) in self.history_dict[url]: # ISO 8601 formulation iso_date = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(date)) - error_report += '** In [[{}]] on {}, {}\n'.format( - page_title, iso_date, error) + error_report += f'** In [[{page_title}]] on {iso_date}, {error}\n' pywikibot.info('** Logging link for deletion.') txtfilename = pywikibot.config.datafilepath('deadlinks', 'results-{}-{}.txt' @@ -385,8 +384,7 @@ self.log_count += 1 if self.log_count % 30 == 0: # insert a caption - txtfile.write('=== {} ===\n' - .format(containing_page.title()[:3])) + txtfile.write(f'=== {containing_page.title()[:3]} ===\n') txtfile.write(error_report)
if self.report_thread and not containing_page.isTalkPage(): @@ -413,8 +411,7 @@ archive_url = get_archive_url(url) except Exception as e: pywikibot.warning( - 'get_closest_memento_url({}) failed: {}'.format( - url, e)) + f'get_closest_memento_url({url}) failed: {e}') archive_url = None self.log(url, error, page, archive_url) else: @@ -513,8 +510,8 @@ count = '' # Check if there is already such a caption on # the talk page. - while re.search('= *{}{} *=' - .format(caption, count), content) is not None: + while re.search( + f'= *{caption}{count} *=', content) is not None: i += 1 count = ' ' + str(i) caption += count @@ -602,8 +599,8 @@
num = self.count_link_check_threads() if num: - pywikibot.info('<<yellow>>>Remaining {} threads will be killed.' - .format(num)) + pywikibot.info( + f'<<yellow>>>Remaining {num} threads will be killed.')
if self.history.report_thread: self.history.report_thread.shutdown() diff --git a/scripts/welcome.py b/scripts/welcome.py index 26cb1db..917d5d7 100755 --- a/scripts/welcome.py +++ b/scripts/welcome.py @@ -156,7 +156,7 @@ badwords at all but can be used for some bad-nickname. """ # -# (C) Pywikibot team, 2006-2022 +# (C) Pywikibot team, 2006-2023 # # Distributed under the terms of the MIT license. # @@ -482,9 +482,8 @@ # Raises KeyError if site is not in netext dict. site_netext = i18n.translate(self.site, netext) if site_netext is None: - raise KeyError( - 'welcome.py is not localized for site {} in netext dict.' - .format(self.site)) + raise KeyError(f'welcome.py is not localized for site {self.site}' + ' in netext dict.') self.welcome_text = site_netext
def bad_name_filer(self, name, force: bool = False) -> bool: @@ -597,9 +596,8 @@
if answer.lower() in ['yes', 'y'] or not globalvar.confirm: self.show_status() - pywikibot.info( - '{} is possibly an unwanted username. It will be reported.' - .format(name)) + pywikibot.info(f'{name} is possibly an unwanted username. It will' + ' be reported.') if hasattr(self, '_BAQueue'): self._BAQueue.append(name) else: @@ -734,9 +732,8 @@ sign_page_name = i18n.translate(self.site, random_sign) if not sign_page_name: self.show_status(Msg.WARN) - pywikibot.info( - "{} doesn't allow random signature, force disable." - .format(self.site)) + pywikibot.info(f"{self.site} doesn't allow random signature," + ' force disable.') globalvar.random_sign = False return []
@@ -857,8 +854,7 @@ pywikibot.info('Putting the log of the latest user...') else: pywikibot.info( - 'Putting the log of the latest {} users...' - .format(welcomed_count)) + f'Putting the log of the latest {welcomed_count} users...') self.makelogpage()
if hasattr(self, '_BAQueue'): @@ -885,8 +881,7 @@ # Filename and Pywikibot path # file where is stored the random signature index filename = pywikibot.config.datafilepath( - 'welcome-{}-{}.data'.format(self.site.family.name, - self.site.code)) + f'welcome-{self.site.family.name}-{self.site.code}.data') with open(filename, 'wb') as f: pickle.dump(self.welcomed_users, f, protocol=config.pickle_protocol)