jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/835290 )
Change subject: automatically upgrade syntax for python 3.6 ......................................................................
automatically upgrade syntax for python 3.6
Change-Id: I1d9532fa1b9553e3dc90a2bf25f11a2f07b89431 --- M pywikibot/__init__.py M pywikibot/_wbtypes.py M pywikibot/bot.py M pywikibot/bot_choice.py M pywikibot/comms/eventstreams.py M pywikibot/comms/http.py M pywikibot/config.py M pywikibot/cosmetic_changes.py M pywikibot/data/api/_generators.py M pywikibot/data/api/_optionset.py M pywikibot/data/api/_paraminfo.py M pywikibot/data/api/_requests.py M pywikibot/data/mysql.py M pywikibot/data/sparql.py M pywikibot/date.py M pywikibot/diff.py M pywikibot/editor.py M pywikibot/exceptions.py M pywikibot/family.py M pywikibot/i18n.py M pywikibot/interwiki_graph.py M pywikibot/logentries.py M pywikibot/logging.py M pywikibot/login.py M pywikibot/page/_collections.py M pywikibot/page/_decorators.py M pywikibot/page/_links.py M pywikibot/page/_pages.py M pywikibot/page/_revision.py M pywikibot/page/_wikibase.py M pywikibot/pagegenerators/_factory.py M pywikibot/pagegenerators/_filters.py M pywikibot/pagegenerators/_generators.py M pywikibot/proofreadpage.py M pywikibot/scripts/generate_family_file.py M pywikibot/scripts/generate_user_files.py M pywikibot/scripts/login.py M pywikibot/scripts/preload_sites.py M pywikibot/scripts/version.py M pywikibot/site/_apisite.py M pywikibot/site/_datasite.py M pywikibot/site/_extensions.py M pywikibot/site/_generators.py M pywikibot/site/_interwikimap.py M pywikibot/site/_namespace.py M pywikibot/site/_siteinfo.py M pywikibot/site/_upload.py M pywikibot/site_detect.py M pywikibot/specialbots/_upload.py M pywikibot/textlib.py M pywikibot/throttle.py M pywikibot/time.py M pywikibot/tools/__init__.py M pywikibot/tools/_deprecate.py M pywikibot/tools/chars.py M pywikibot/tools/collections.py M pywikibot/tools/djvu.py M pywikibot/tools/formatter.py M pywikibot/userinterfaces/gui.py M pywikibot/userinterfaces/terminal_interface_base.py M pywikibot/version.py M scripts/add_text.py M scripts/archivebot.py M scripts/blockpageschecker.py M scripts/category.py M scripts/category_redirect.py M scripts/checkimages.py M scripts/commons_information.py M scripts/commonscat.py M scripts/coordinate_import.py M scripts/cosmetic_changes.py M scripts/create_isbn_edition.py M scripts/data_ingestion.py M scripts/dataextend.py M scripts/delete.py M scripts/delinker.py M scripts/djvutext.py M scripts/download_dump.py M scripts/fixing_redirects.py M scripts/harvest_template.py M scripts/imagetransfer.py M scripts/interwiki.py M scripts/interwikidata.py M scripts/listpages.py M scripts/maintenance/cache.py M scripts/maintenance/colors.py M scripts/maintenance/make_i18n_dict.py M scripts/maintenance/sorting_order.py M scripts/maintenance/wikimedia_sites.py M scripts/movepages.py M scripts/newitem.py M scripts/noreferences.py M scripts/pagefromfile.py M scripts/parser_function_count.py M scripts/patrol.py M scripts/protect.py M scripts/redirect.py M scripts/reflinks.py M scripts/replace.py M scripts/replicate_wiki.py M scripts/revertbot.py M scripts/solve_disambiguation.py M scripts/speedy_delete.py M scripts/template.py M scripts/templatecount.py M scripts/touch.py M scripts/transferbot.py M scripts/watchlist.py M scripts/weblinkchecker.py M scripts/welcome.py M tests/__init__.py M tests/archivebot_tests.py M tests/aspects.py M tests/basesite_tests.py M tests/date_tests.py M tests/diff_tests.py M tests/djvu_tests.py M tests/dry_api_tests.py M tests/edit_tests.py M tests/eventstreams_tests.py M tests/file_tests.py M tests/i18n_tests.py M tests/link_tests.py M tests/linter_tests.py M tests/logentries_tests.py M tests/page_tests.py M tests/pagegenerators_tests.py M tests/paraminfo_tests.py M tests/patrolbot_tests.py M tests/plural_tests.py M tests/proofreadpage_tests.py M tests/pwb/print_env.py M tests/pwb/print_locals.py M tests/pwb_tests.py M tests/replacebot_tests.py M tests/site_generators_tests.py M tests/site_tests.py M tests/textlib_tests.py M tests/token_tests.py M tests/ui_options_tests.py M tests/ui_tests.py M tests/utils.py 142 files changed, 654 insertions(+), 664 deletions(-)
Approvals: JJMC89: Looks good to me, approved jenkins-bot: Verified
diff --git a/pywikibot/__init__.py b/pywikibot/__init__.py index a8e8889..ca3c648 100644 --- a/pywikibot/__init__.py +++ b/pywikibot/__init__.py @@ -409,7 +409,7 @@ assert isinstance(precision, str) self.precision = self.PRECISION[precision] else: - raise ValueError('Invalid precision: "{}"'.format(precision)) + raise ValueError(f'Invalid precision: "{precision}"')
@classmethod def fromTimestr(cls: Type['WbTime'], @@ -443,7 +443,7 @@ match = re.match(r'([-+]?\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+)Z', datetimestr) if not match: - raise ValueError("Invalid format: '{}'".format(datetimestr)) + raise ValueError(f"Invalid format: '{datetimestr}'") t = match.groups() return cls(int(t[0]), int(t[1]), int(t[2]), int(t[3]), int(t[4]), int(t[5]), @@ -790,15 +790,15 @@
# validate page exists if not page.exists(): - raise ValueError('Page {} must exist.'.format(page)) + raise ValueError(f'Page {page} must exist.')
# validate page is on the right site, and that site supports the type if not data_site: raise ValueError( - 'The provided site does not support {}.'.format(label)) + f'The provided site does not support {label}.') if page.site != data_site: raise ValueError( - 'Page must be on the {} repository site.'.format(label)) + f'Page must be on the {label} repository site.')
# validate page title fulfills hard-coded Wikibase requirement # pcre regexp: '/^Data:[^\[\]#\:{|}]+.map$/u' for geo-shape @@ -1096,15 +1096,15 @@ try: tmp = __import__('pywikibot.site', fromlist=[interface]) except ImportError: - raise ValueError('Invalid interface name: {}'.format(interface)) + raise ValueError(f'Invalid interface name: {interface}') else: interface = getattr(tmp, interface)
if not issubclass(interface, BaseSite): - warning('Site called with interface={}'.format(interface.__name__)) + warning(f'Site called with interface={interface.__name__}')
user = normalize_username(user) - key = '{}:{}:{}:{}'.format(interface.__name__, fam, code, user) + key = f'{interface.__name__}:{fam}:{code}:{user}' if key not in _sites or not isinstance(_sites[key], interface): _sites[key] = interface(code=code, fam=fam, user=user) debug("Instantiated {} object '{}'" diff --git a/pywikibot/_wbtypes.py b/pywikibot/_wbtypes.py index 8484d5c..f9934a2 100644 --- a/pywikibot/_wbtypes.py +++ b/pywikibot/_wbtypes.py @@ -48,9 +48,9 @@ assert all(isinstance(item, str) for item in self._items)
values = ((attr, getattr(self, attr)) for attr in self._items) - attrs = ', '.join('{}={}'.format(attr, value) + attrs = ', '.join(f'{attr}={value}' for attr, value in values) - return '{}({})'.format(self.__class__.__name__, attrs) + return f'{self.__class__.__name__}({attrs})'
def __eq__(self, other: object) -> bool: if isinstance(other, self.__class__): diff --git a/pywikibot/bot.py b/pywikibot/bot.py index 6aaa9f0..be0d959 100644 --- a/pywikibot/bot.py +++ b/pywikibot/bot.py @@ -310,7 +310,7 @@ """ path, qualifier = name.rsplit('.', 1) root, ext = os.path.splitext(path) - return '{}.{}{}'.format(root, qualifier, ext) + return f'{root}.{qualifier}{ext}'
def init_handlers() -> None: @@ -468,10 +468,10 @@ .format(pywikibot.__version__))
# script call - log('COMMAND: {}'.format(sys.argv)) + log(f'COMMAND: {sys.argv}')
# script call time stamp - log('DATE: {} UTC'.format(datetime.datetime.utcnow())) + log(f'DATE: {datetime.datetime.utcnow()} UTC')
# new framework release/revision? (handle_args needs to be called first) try: @@ -482,10 +482,10 @@
# system if hasattr(os, 'uname'): - log('SYSTEM: {}'.format(os.uname())) + log(f'SYSTEM: {os.uname()}')
# config file dir - log('CONFIG FILE DIR: {}'.format(pywikibot.config.base_dir)) + log(f'CONFIG FILE DIR: {pywikibot.config.base_dir}')
# These are the main dependencies of pywikibot. check_package_list = [ @@ -521,10 +521,10 @@ param['timespec'] = 'seconds' mtime = version.get_module_mtime(module).isoformat(**param)
- log(' {} {}'.format(mtime, filename)) + log(f' {mtime} {filename}')
if config.log_pywiki_repo_version: - log('PYWIKI REPO VERSION: {}'.format(version.getversion_onlinerepo())) + log(f'PYWIKI REPO VERSION: {version.getversion_onlinerepo()}')
log('=' * 57)
@@ -740,7 +740,7 @@ if isinstance(c, Choice) and c.shortcut == choice: return c.handle()
- raise ValueError('Invalid choice "{}"'.format(choice)) + raise ValueError(f'Invalid choice "{choice}"')
def __call__(self, link: PageLinkType, text: str, groups: Mapping[str, str], @@ -1064,7 +1064,7 @@ """ messages = [] if exception: - messages.append('An error occurred: "{}".'.format(exception)) + messages.append(f'An error occurred: "{exception}".') if missing_generator: messages.append( 'Unable to execute script because no generator was defined.') @@ -1100,7 +1100,7 @@ """ modname = calledModuleName() # put quotation marks around all parameters - args = [modname] + ['"{}"'.format(s) for s in pywikibot.argvu[1:]] + args = [modname] + [f'"{s}"' for s in pywikibot.argvu[1:]] command_log_filename = config.datafilepath('logs', 'commands.log') try: command_log_file = codecs.open(command_log_filename, 'a', 'utf-8') @@ -1389,7 +1389,7 @@ """ if page != self._current_page: self._current_page = page - msg = 'Working on {!r}'.format(page.title()) + msg = f'Working on {page.title()!r}' if config.colorized_output: log(msg) stdout('\n\n>>> <<lightpurple>>{}<<default>> <<<' @@ -1725,7 +1725,7 @@ if not self._site: warning('Bot.site was not set before being retrieved.') self.site = pywikibot.Site() - warning('Using the default site: {}'.format(self.site)) + warning(f'Using the default site: {self.site}') assert self._site is not None return self._site
@@ -1742,11 +1742,11 @@ return
if site not in self._sites: - log('LOADING SITE {} VERSION: {}'.format(site, site.mw_version)) + log(f'LOADING SITE {site} VERSION: {site.mw_version}')
self._sites.add(site) if len(self._sites) == 2: - log('{} uses multiple sites'.format(self.__class__.__name__)) + log(f'{self.__class__.__name__} uses multiple sites') if self._site and self._site != site: log('{}: changing site from {} to {}' .format(self.__class__.__name__, self._site, site)) @@ -1893,7 +1893,7 @@ section = calledModuleName()
if (conf.read(self.INI) == [self.INI] and conf.has_section(section)): - pywikibot.output('Reading settings from {} file.'.format(self.INI)) + pywikibot.output(f'Reading settings from {self.INI} file.') options = {} for option, value in self.available_options.items(): if not conf.has_option(section, option): @@ -1908,7 +1908,7 @@ options[option] = method(section, option) for opt in set(conf.options(section)) - set(options): pywikibot.warning( - '"{}" is not a valid option. It was ignored.'.format(opt)) + f'"{opt}" is not a valid option. It was ignored.') options.update(kwargs) else: options = kwargs @@ -2009,7 +2009,7 @@ summary = i18n.twtranslate(self.current_page.site, self.summary_key, self.summary_parameters) - pywikibot.log('Use automatic summary message "{}"'.format(summary)) + pywikibot.log(f'Use automatic summary message "{summary}"') kwargs['summary'] = summary super().put_current(*args, **kwargs)
@@ -2146,7 +2146,7 @@ self.repo = self.site.data_repository() if self.repo is None: raise WikiBaseError( - '{} is not connected to a data repository'.format(self.site)) + f'{self.site} is not connected to a data repository')
def cacheSources(self) -> None: """ @@ -2397,7 +2397,7 @@ 'value': page.title(without_brackets=page.namespace() == 0) } }) - pywikibot.output('Creating item for {}...'.format(page)) + pywikibot.output(f'Creating item for {page}...') item = pywikibot.ItemPage(page.site.data_repository()) kwargs.setdefault('show_diff', False) result = self.user_edit_entity(item, data, summary=summary, **kwargs) @@ -2446,7 +2446,7 @@ item = self.create_item_for_page(page, asynchronous=False)
if not item and not self.treat_missing_item: - pywikibot.output("{} doesn't have a Wikidata item.".format(page)) + pywikibot.output(f"{page} doesn't have a Wikidata item.") return
self.treat_page_and_item(page, item) diff --git a/pywikibot/bot_choice.py b/pywikibot/bot_choice.py index 2dbbdc8..dbeacac 100644 --- a/pywikibot/bot_choice.py +++ b/pywikibot/bot_choice.py @@ -170,7 +170,7 @@ return '{}[{}]{}'.format( self.option[:index], shortcut, self.option[index + len(self.shortcut):]) - return '{} [{}]'.format(self.option, shortcut) + return f'{self.option} [{shortcut}]'
def result(self, value: str) -> Any: """Return the lowercased shortcut.""" @@ -434,7 +434,7 @@
if default is not None and self.test(default): value = self.parse(default) - default = '[{}]'.format(value) + default = f'[{value}]' else: default = ''
@@ -449,14 +449,14 @@ default = '' else: maximum = '' if self.maximum is None else str(self.maximum) - default = '-{}-'.format(default) if default else '-' + default = f'-{default}-' if default else '-' if self.minimum == self.maximum: rng = minimum else: rng = minimum + default + maximum else: rng = 'any' + default - return '{}<number> [{}]'.format(self.prefix, rng) + return f'{self.prefix}<number> [{rng}]'
def parse(self, value: str) -> int: """Return integer from value with prefix removed.""" diff --git a/pywikibot/comms/eventstreams.py b/pywikibot/comms/eventstreams.py index 21f591b..cd9c280 100644 --- a/pywikibot/comms/eventstreams.py +++ b/pywikibot/comms/eventstreams.py @@ -175,7 +175,7 @@ if kwargs['timeout'] == config.socket_timeout: kwargs.pop('timeout') return '{}({})'.format(self.__class__.__name__, ', '.join( - '{}={!r}'.format(k, v) for k, v in kwargs.items())) + f'{k}={v!r}' for k, v in kwargs.items()))
@property @cached @@ -191,7 +191,7 @@ host=self._site.eventstreams_host(), path=self._site.eventstreams_path(), streams=self._streams, - since='?since={}'.format(self._since) if self._since else '') + since=f'?since={self._since}' if self._since else '')
def set_maximum_items(self, value: int) -> None: """ @@ -282,7 +282,7 @@ if callable(func): self.filter[ftype].append(func) else: - raise TypeError('{} is not a callable'.format(func)) + raise TypeError(f'{func} is not a callable')
# register pairs of keys and items as a filter function for key, value in kwargs.items(): @@ -357,9 +357,9 @@ else: ignore_first_empty_warning = False elif event.event == 'error': - warning('Encountered error: {}'.format(event.data)) + warning(f'Encountered error: {event.data}') else: - warning('Unknown event {} occurred.'.format(event.event)) + warning(f'Unknown event {event.event} occurred.')
debug('{}: Stopped iterating due to exceeding item limit.' .format(self.__class__.__name__)) diff --git a/pywikibot/comms/http.py b/pywikibot/comms/http.py index 4948f15..8b492f6 100644 --- a/pywikibot/comms/http.py +++ b/pywikibot/comms/http.py @@ -82,7 +82,7 @@ session.close()
if hasattr(sys, 'last_type'): - critical('Exiting due to uncaught exception {}'.format(sys.last_type)) + critical(f'Exiting due to uncaught exception {sys.last_type}')
log('Network session closed.')
@@ -295,7 +295,7 @@ # HTTP status 207 is also a success status for Webdav FINDPROP, # used by the version module. if response.status_code not in (HTTPStatus.OK, HTTPStatus.MULTI_STATUS): - warning('Http response status {}'.format(response.status_code)) + warning(f'Http response status {response.status_code}')
def fetch(uri: str, method: str = 'GET', headers: Optional[dict] = None, @@ -465,7 +465,7 @@ pywikibot.warning('Unknown or invalid encoding {!r}' .format(encoding)) except UnicodeDecodeError as e: - pywikibot.warning('{} found in {}'.format(e, content)) + pywikibot.warning(f'{e} found in {content}') else: return encoding
diff --git a/pywikibot/config.py b/pywikibot/config.py index 536e2e0..0862233 100644 --- a/pywikibot/config.py +++ b/pywikibot/config.py @@ -378,7 +378,7 @@ base_dir = os.path.normpath(os.path.join(os.getcwd(), base_dir)) # make sure this path is valid and that it contains user-config file if not os.path.isdir(base_dir): - raise RuntimeError("Directory '{}' does not exist.".format(base_dir)) + raise RuntimeError(f"Directory '{base_dir}' does not exist.") # check if config_file is in base_dir if not exists(base_dir): exc_text = 'No {} found in directory {!r}.\n'.format( @@ -931,7 +931,7 @@ key1 = winreg.OpenKey(winreg.HKEY_CURRENT_USER, key_name) _prog_id = winreg.EnumValue(key1, 0)[0] _key2 = winreg.OpenKey(winreg.HKEY_CLASSES_ROOT, - r'{}\shell\open\command'.format(_prog_id)) + fr'{_prog_id}\shell\open\command') _cmd = winreg.QueryValueEx(_key2, '')[0] # See T102465 for issues relating to using this value. cmd = _cmd @@ -985,14 +985,14 @@ _filemode = _filestatus[0] _fileuid = _filestatus[4] if not OSWIN32 and _fileuid not in [os.getuid(), 0]: - warning('Skipped {fn!r}: owned by someone else.'.format(fn=_filename)) + warning(f'Skipped {_filename!r}: owned by someone else.') elif OSWIN32 or _filemode & 0o02 == 0: with open(_filename, 'rb') as f: exec(compile(f.read(), _filename, 'exec'), _exec_globals) else: - warning('Skipped {fn!r}: writeable by others.'.format(fn=_filename)) + warning(f'Skipped {_filename!r}: writeable by others.') elif __no_user_config and __no_user_config != '2': - warning('{} cannot be loaded.'.format(user_config_file)) + warning(f'{user_config_file} cannot be loaded.')
class _DifferentTypeError(UserWarning, TypeError): @@ -1156,7 +1156,7 @@ if _arg == 'modified': _all = False else: - warning('Unknown arg {} ignored'.format(_arg)) + warning(f'Unknown arg {_arg} ignored')
for _name in sorted(globals()): if _name[0] != '_' \ @@ -1174,7 +1174,7 @@ + '( ...xxxxxxxx... )') else: _value = repr('xxxxxxxx') - output('{}={}'.format(_name, _value)) + output(f'{_name}={_value}')
# cleanup all locally-defined variables for __var in list(globals()): diff --git a/pywikibot/cosmetic_changes.py b/pywikibot/cosmetic_changes.py index 29d7517..04e6513 100644 --- a/pywikibot/cosmetic_changes.py +++ b/pywikibot/cosmetic_changes.py @@ -200,7 +200,7 @@ except stdnum_isbn.ValidationError as e: if strict: raise - pywikibot.log('ISBN "{}" validation error: {}'.format(isbn, e)) + pywikibot.log(f'ISBN "{isbn}" validation error: {e}') return isbn
return stdnum_isbn.format(isbn) @@ -428,7 +428,7 @@ # lowerspaced and underscored namespaces for i, item in enumerate(namespaces): item = item.replace(' ', '[ _]') - item = '[{}{}]'.format(item[0], item[0].lower()) + item[1:] + item = f'[{item[0]}{item[0].lower()}]' + item[1:] namespaces[i] = item namespaces.append(first_lower(final_ns)) if final_ns and namespaces: @@ -441,14 +441,14 @@ r'[[\s*({}) *:(?P<name>[^|]]*?.({}))' r'(?P<label>.*?)]]' .format('|'.join(namespaces), '|'.join(extensions)), - r'[[{}:\g<name>\g<label>]]'.format(final_ns), + fr'[[{final_ns}:\g<name>\g<label>]]', exceptions) else: text = textlib.replaceExcept( text, r'[[\s*({}) *:(?P<nameAndLabel>.*?)]]' .format('|'.join(namespaces)), - r'[[{}:\g<nameAndLabel>]]'.format(final_ns), + fr'[[{final_ns}:\g<nameAndLabel>]]', exceptions) return text
@@ -598,7 +598,7 @@ firstcase_label = label
if firstcase_label == firstcase_title: - newLink = '[[{}]]'.format(label) + newLink = f'[[{label}]]' # Check if we can create a link with trailing characters # instead of a pipelink elif (firstcase_label.startswith(firstcase_title) @@ -614,7 +614,7 @@ # uppercase if self.site.sitename == 'wikipedia:de': titleWithSection = first_upper(titleWithSection) - newLink = '[[{}|{}]]'.format(titleWithSection, label) + newLink = f'[[{titleWithSection}|{label}]]' # re-add spaces that were pulled out of the link. # Examples: # text[[ title ]]text -> text [[title]] text @@ -862,7 +862,7 @@ title_regex = (r'(?P<link>[^{sep}]+?)' r'(\s+(?P<title>[^\s].*?))' .format(sep=separator)) - url_regex = r'[[?{url}?\s*]]?'.format(url=url) + url_regex = fr'[[?{url}?\s*]]?' text = textlib.replaceExcept( text, url_regex.format(title=title_regex), @@ -885,7 +885,7 @@ # dash in external link, where the correct end of the URL can # be detected from the file extension. It is very unlikely that # this will cause mistakes. - extensions = [r'.{}'.format(ext) + extensions = [fr'.{ext}' for ext in ['pdf', 'html?', 'php', 'aspx?', 'jsp']] text = textlib.replaceExcept( text, diff --git a/pywikibot/data/api/_generators.py b/pywikibot/data/api/_generators.py index 21b39c4..5e6daa2 100644 --- a/pywikibot/data/api/_generators.py +++ b/pywikibot/data/api/_generators.py @@ -42,7 +42,7 @@ def _clean_kwargs(self, kwargs, **mw_api_args): """Clean kwargs, define site and request class.""" if 'site' not in kwargs: - warn('{} invoked without a site'.format(self.__class__.__name__), + warn(f'{self.__class__.__name__} invoked without a site', RuntimeWarning, 3) kwargs['site'] = pywikibot.Site() assert not hasattr(self, 'site') or self.site == kwargs['site'] diff --git a/pywikibot/data/api/_optionset.py b/pywikibot/data/api/_optionset.py index 7663df2..a947d06 100644 --- a/pywikibot/data/api/_optionset.py +++ b/pywikibot/data/api/_optionset.py @@ -7,6 +7,7 @@ from collections.abc import MutableMapping from typing import Optional
+ __all__ = ['OptionSet']
@@ -128,22 +129,22 @@ """Set option to enabled, disabled or neither.""" if value is True: if self._site_set and name not in self._valid_enable: - raise KeyError('Invalid name "{}"'.format(name)) + raise KeyError(f'Invalid name "{name}"') self._enabled.add(name) self._disabled.discard(name) elif value is False: if self._site_set and name not in self._valid_disable: - raise KeyError('Invalid name "{}"'.format(name)) + raise KeyError(f'Invalid name "{name}"') self._disabled.add(name) self._enabled.discard(name) elif value is None: if self._site_set and (name not in self._valid_enable or name not in self._valid_disable): - raise KeyError('Invalid name "{}"'.format(name)) + raise KeyError(f'Invalid name "{name}"') self._enabled.discard(name) self._disabled.discard(name) else: - raise ValueError('Invalid value "{}"'.format(value)) + raise ValueError(f'Invalid value "{value}"')
def __getitem__(self, name) -> Optional[bool]: """ @@ -161,7 +162,7 @@ if (self._site_set or name in self._valid_enable or name in self._valid_disable): return None - raise KeyError('Invalid name "{}"'.format(name)) + raise KeyError(f'Invalid name "{name}"')
def __delitem__(self, name) -> None: """Remove the item by setting it to None.""" @@ -176,7 +177,7 @@ """Iterate over each option as they appear in the URL.""" yield from self._enabled for disabled in self._disabled: - yield '!{}'.format(disabled) + yield f'!{disabled}'
def __len__(self) -> int: """Return the number of enabled and disabled options.""" diff --git a/pywikibot/data/api/_paraminfo.py b/pywikibot/data/api/_paraminfo.py index 49c552a..3bd2c1a 100644 --- a/pywikibot/data/api/_paraminfo.py +++ b/pywikibot/data/api/_paraminfo.py @@ -12,6 +12,7 @@ from pywikibot.backports import Dict, removeprefix from pywikibot.tools.itertools import itergroup
+ __all__ = ['ParamInfo']
@@ -422,7 +423,7 @@ pywikibot.warning('Path "{}" is ambiguous.' .format(path)) else: - pywikibot.log('Found another path "{}"'.format(path)) + pywikibot.log(f'Found another path "{path}"') result_data[path] = False else: result_data[path] = mod_data @@ -482,12 +483,12 @@ try: module = self[module] except KeyError: - raise ValueError("paraminfo for '{}' not loaded".format(module)) + raise ValueError(f"paraminfo for '{module}' not loaded")
try: params = module['parameters'] except KeyError: - pywikibot.warning("module '{}' has no parameters".format(module)) + pywikibot.warning(f"module '{module}' has no parameters") return None
param_data = [param for param in params @@ -548,7 +549,7 @@ @staticmethod def _prefix_submodules(modules, prefix): """Prefix submodules with path.""" - return {'{}+{}'.format(prefix, mod) for mod in modules} + return {f'{prefix}+{mod}' for mod in modules}
@property def prefix_map(self): diff --git a/pywikibot/data/api/_requests.py b/pywikibot/data/api/_requests.py index 5e1d564..f9c37dd 100644 --- a/pywikibot/data/api/_requests.py +++ b/pywikibot/data/api/_requests.py @@ -36,6 +36,7 @@ from pywikibot.textlib import removeHTMLParts from pywikibot.tools import PYTHON_VERSION
+ __all__ = ('CachedRequest', 'Request', 'encode_url')
# Actions that imply database updates on the server, used for various @@ -658,7 +659,7 @@ or self.site.maximum_GET_length() < len(paramstring)): use_get = False if use_get: - uri = '{}?{}'.format(uri, paramstring) + uri = f'{uri}?{paramstring}' body = None else: body = paramstring @@ -697,7 +698,7 @@ except Exception: # for any other error on the http request, wait and retry pywikibot.error(traceback.format_exc()) - pywikibot.log('{}, {}'.format(uri, paramstring)) + pywikibot.log(f'{uri}, {paramstring}') else: return response, use_get self.wait() @@ -912,7 +913,7 @@
if not delay: pywikibot.warning( - 'No rate limit found for action {}'.format(self.action)) + f'No rate limit found for action {self.action}') self.wait(delay)
def _bad_token(self, code) -> bool: @@ -980,7 +981,7 @@ self.site.throttle(write=self.write) else: pywikibot.log( - "Submitting unthrottled action '{}'.".format(self.action)) + f"Submitting unthrottled action '{self.action}'.")
use_get, uri, body, headers = self._get_request_params(use_get, paramstring) @@ -1040,8 +1041,8 @@ return {'help': {'mime': 'text/plain', 'help': error['help']}}
- pywikibot.warning('API error {}: {}'.format(code, info)) - pywikibot.log(' headers=\n{}'.format(response.headers)) + pywikibot.warning(f'API error {code}: {info}') + pywikibot.log(f' headers=\n{response.headers}')
if self._internal_api_error(code, error.copy(), result): continue @@ -1103,7 +1104,7 @@ param_repr = str(self._params) pywikibot.log('API Error: query=\n{}' .format(pprint.pformat(param_repr))) - pywikibot.log(' response=\n{}'.format(result)) + pywikibot.log(f' response=\n{result}')
raise pywikibot.exceptions.APIError(**error) except TypeError: @@ -1164,7 +1165,7 @@ :return: base directory path for cache entries """ path = os.path.join(config.base_dir, - 'apicache-py{:d}'.format(PYTHON_VERSION[0])) + f'apicache-py{PYTHON_VERSION[0]:d}') cls._make_dir(path) cls._get_cache_dir = classmethod(lambda c: path) # cache the result return path @@ -1203,7 +1204,7 @@ user_key = repr(LoginStatus(LoginStatus.NOT_LOGGED_IN))
request_key = repr(sorted(self._encoded_items().items())) - return '{!r}{}{}'.format(self.site, user_key, request_key) + return f'{self.site!r}{user_key}{request_key}'
def _create_file_name(self) -> str: """Return a unique ascii identifier for the cache entry.""" @@ -1242,7 +1243,7 @@ # file not found return False except Exception as e: - pywikibot.output('Could not load cache: {!r}'.format(e)) + pywikibot.output(f'Could not load cache: {e!r}') return False
def _write_cache(self, data) -> None: diff --git a/pywikibot/data/mysql.py b/pywikibot/data/mysql.py index 6e7a83c..b458d26 100644 --- a/pywikibot/data/mysql.py +++ b/pywikibot/data/mysql.py @@ -98,7 +98,7 @@
if pymysql_version < pkg_resources.parse_version('0.7.11'): issue_deprecation_warning( - 'pymysql package release {}'.format(pymysql_version), + f'pymysql package release {pymysql_version}', instead='pymysql >= 0.7.11', since='7.4.0') connection = _OldConnection(**args, **credentials) else: @@ -115,7 +115,7 @@ if not isinstance(_query, str): _query = str(_query, encoding='utf-8') _query = _query.strip() - _query = '\n'.join(' {}'.format(line) + _query = '\n'.join(f' {line}' for line in _query.splitlines()) pywikibot.output('Executing query:\n' + _query)
diff --git a/pywikibot/data/sparql.py b/pywikibot/data/sparql.py index c6e42f6..0f0dcc0 100644 --- a/pywikibot/data/sparql.py +++ b/pywikibot/data/sparql.py @@ -145,7 +145,7 @@ if headers is None: headers = DEFAULT_HEADERS
- url = '{}?query={}'.format(self.endpoint, quote(query)) + url = f'{self.endpoint}?query={quote(query)}' while True: try: self.last_response = http.fetch(url, headers=headers) @@ -164,7 +164,7 @@ self.max_retries -= 1 if self.max_retries < 0: raise TimeoutError('Maximum retries attempted without success.') - warning('Waiting {} seconds before retrying.'.format(self.retry_wait)) + warning(f'Waiting {self.retry_wait} seconds before retrying.') sleep(self.retry_wait) # double the next wait, but do not exceed config.retry_max seconds self.retry_wait = min(config.retry_max, self.retry_wait * 2) diff --git a/pywikibot/date.py b/pywikibot/date.py index 82a4145..21d2398 100644 --- a/pywikibot/date.py +++ b/pywikibot/date.py @@ -260,14 +260,14 @@ """ if value == ind: return match - raise ValueError('unknown value {}'.format(value)) + raise ValueError(f'unknown value {value}')
@dh_constVal.register(str) def _(value: str, ind: int, match: str) -> int: if value == match: return ind - raise ValueError('unknown value {}'.format(value)) + raise ValueError(f'unknown value {value}')
def alwaysTrue(x: Any) -> bool: @@ -339,7 +339,7 @@ def intToRomanNum(i: int) -> str: """Convert integer to roman numeral.""" if i >= len(_romanNumbers): - raise IndexError('Roman value {} is not defined'.format(i)) + raise IndexError(f'Roman value {i} is not defined') return _romanNumbers[i]
@@ -385,7 +385,7 @@
# Allows to search for '(%%)|(%d)|(%R)|...", and allows one digit 1-9 to set # the size of zero-padding for numbers -_reParameters = re.compile('|'.join('(%[1-9]?{})'.format(s) +_reParameters = re.compile('|'.join(f'(%[1-9]?{s})' for s in _digitDecoders))
# A map of sitecode+pattern to (re matching object and corresponding decoders) @@ -407,12 +407,12 @@
if len(subpattern) == 3: # enforce mandatory field size - newpattern += '([{}]{{{}}})'.format(dec[0], subpattern[1]) + newpattern += f'([{dec[0]}]{{{subpattern[1]}}})' # add the number of required digits as the last (4th) # part of the tuple decoders.append(dec + (int(s[1]),)) else: - newpattern += '([{}]+)'.format(dec[0]) + newpattern += f'([{dec[0]}]+)' decoders.append(dec)
# All encoders produce a string for strpattern. @@ -488,7 +488,7 @@ # This will be called from outside as well as recursivelly to verify # parsed value if filter and not filter(value): - raise ValueError('value {} is not allowed'.format(value)) + raise ValueError(f'value {value} is not allowed')
params = encf(value)
@@ -674,7 +674,7 @@ pattern, ucase = self.year_formats.get(key, ('{} %d', True)) func = 'dh_mnthOfYear' else: - raise KeyError("Wrong variant '{}'".format(self.variant)) + raise KeyError(f"Wrong variant '{self.variant}'")
if ucase: f = first_upper @@ -684,7 +684,7 @@ f = str
month_pattern = pattern.format(f(monthName(key, self.index))) - expression = "lambda v: {}(v, '{}')".format(func, month_pattern) + expression = f"lambda v: {func}(v, '{month_pattern}')" self.data[key] = eval(expression) return self.data[key]
@@ -1683,10 +1683,10 @@ if patterns[i] is not None: if isMnthOfYear: formats[yrMnthFmts[i]][lang] = eval( - 'lambda v: dh_mnthOfYear(v, "{}")'.format(patterns[i])) + f'lambda v: dh_mnthOfYear(v, "{patterns[i]}")') else: formats[dayMnthFmts[i]][lang] = eval( - 'lambda v: dh_dayOfMnth(v, "{}")'.format(patterns[i])) + f'lambda v: dh_dayOfMnth(v, "{patterns[i]}")')
def makeMonthList(pattern: str) -> List[str]: diff --git a/pywikibot/diff.py b/pywikibot/diff.py index 9c94077..c8f397f 100644 --- a/pywikibot/diff.py +++ b/pywikibot/diff.py @@ -7,9 +7,7 @@ import difflib import math from collections import abc -from difflib import ( # type: ignore[attr-defined] - _format_range_unified as format_range_unified, -) +from difflib import _format_range_unified # type: ignore[attr-defined] from itertools import zip_longest from typing import Optional, Union
@@ -82,8 +80,8 @@ def get_header_text(a_rng: Tuple[int, int], b_rng: Tuple[int, int], affix: str = '@@') -> str: """Provide header for any ranges.""" - a_rng = format_range_unified(*a_rng) - b_rng = format_range_unified(*b_rng) + a_rng = _format_range_unified(*a_rng) + b_rng = _format_range_unified(*b_rng) return '{0} -{1} +{2} {0}'.format(affix, a_rng, b_rng)
def create_diff(self) -> Iterable[str]: @@ -192,7 +190,7 @@ color_closed = False else: if char_ref == ' ': - char_tagged = '<<default>>{}'.format(char) + char_tagged = f'<<default>>{char}' color_closed = True colored_line += char_tagged
@@ -212,7 +210,7 @@ def __repr__(self) -> str: """Return a reconstructable representation.""" # TODO - return '{}(a, b, {})'.format(self.__class__.__name__, self.group) + return f'{self.__class__.__name__}(a, b, {self.group})'
class _SuperHunk(abc.Sequence): @@ -372,7 +370,7 @@ """Generate a diff text for the given hunks.""" def extend_context(start: int, end: int) -> str: """Add context lines.""" - return ''.join(' {}\n'.format(line.rstrip()) + return ''.join(f' {line.rstrip()}\n' for line in self.a[start:end])
context_range = self._get_context_range(hunks) @@ -512,7 +510,7 @@ position = next_hunk_position elif next_hunk: # nothing entered is silently ignored pywikibot.error( - 'Invalid hunk number "{}"'.format(next_hunk)) + f'Invalid hunk number "{next_hunk}"') elif choice == 'j': assert next_pending is not None position = next_pending @@ -528,11 +526,11 @@ + super_hunks[position].split() + super_hunks[position + 1:]) pywikibot.output( - 'Split into {} hunks'.format(len(super_hunk._hunks))) + f'Split into {len(super_hunk._hunks)} hunks') else: # choice == '?': pywikibot.output( '<<purple>>{}<<default>>'.format('\n'.join( - '{} -> {}'.format(answer, help_msg[answer]) + f'{answer} -> {help_msg[answer]}' for answer in answers)))
def apply(self) -> List[str]: diff --git a/pywikibot/editor.py b/pywikibot/editor.py index c3f53fa..e29b2de 100644 --- a/pywikibot/editor.py +++ b/pywikibot/editor.py @@ -47,27 +47,27 @@ if config.editor.startswith('kate'): command = ['-l', str(line + 1), '-c', str(column + 1)] elif config.editor.startswith(('gedit', 'emacs')): - command = ['+{}'.format(line + 1)] # columns seem unsupported + command = [f'+{line + 1}'] # columns seem unsupported elif config.editor.startswith('jedit'): - command = ['+line:{}'.format(line + 1)] # columns seem unsupported + command = [f'+line:{line + 1}'] # columns seem unsupported elif config.editor.startswith('vim'): - command = ['+{}'.format(line + 1)] # columns seem unsupported + command = [f'+{line + 1}'] # columns seem unsupported elif config.editor.startswith('nano'): - command = ['+{},{}'.format(line + 1, column + 1)] + command = [f'+{line + 1},{column + 1}'] # Windows editors elif config.editor.lower().endswith('notepad++.exe'): - command = ['-n{}'.format(line + 1)] # seems not to support columns + command = [f'-n{line + 1}'] # seems not to support columns else: command = []
# See T102465 for problems relating to using config.editor unparsed. command = [config.editor] + command + [file_name] - pywikibot.log('Running editor: {}'.format(TextEditor._concat(command))) + pywikibot.log(f'Running editor: {TextEditor._concat(command)}') return command
@staticmethod def _concat(command: Sequence[str]) -> str: - return ' '.join('{!r}'.format(part) if ' ' in part else part + return ' '.join(f'{part!r}' if ' ' in part else part for part in command)
def edit(self, text: str, jumpIndex: Optional[int] = None, diff --git a/pywikibot/exceptions.py b/pywikibot/exceptions.py index 5f03b42..654e579 100644 --- a/pywikibot/exceptions.py +++ b/pywikibot/exceptions.py @@ -225,10 +225,10 @@ self.code, self.info, ';\n '.join( - '{}: {}'.format(key, val) + f'{key}: {val}' for key, val in self.other.items()))
- return '{}: {}'.format(self.code, self.info) + return f'{self.code}: {self.info}'
class APIMWError(APIError): @@ -386,7 +386,7 @@ :param actual: title obtained by query
""" - self.message = "Query on {{}} returned data on '{}'".format(actual) + self.message = f"Query on {{}} returned data on '{actual}'" super().__init__(page)
diff --git a/pywikibot/family.py b/pywikibot/family.py index 927888a..de40fbe 100644 --- a/pywikibot/family.py +++ b/pywikibot/family.py @@ -380,7 +380,7 @@ Family._families[fam] = myfamily return Family._families[fam] else: - raise UnknownFamilyError('Family {} does not exist'.format(fam)) + raise UnknownFamilyError(f'Family {fam} does not exist')
try: # Ignore warnings due to dots in family names. @@ -391,7 +391,7 @@ sys.path.append(dirname(family_file)) mod = import_module(splitext(basename(family_file))[0]) except ImportError: - raise UnknownFamilyError('Family {} does not exist'.format(fam)) + raise UnknownFamilyError(f'Family {fam} does not exist') cls = mod.Family.instance if cls.name != fam: warnings.warn('Family name {} does not match family module name {}' @@ -561,19 +561,19 @@ protocol, host = self._hostname(code, protocol) if protocol == 'https': uri = self.ssl_pathprefix(code) + uri - return urlparse.urljoin('{}://{}'.format(protocol, host), uri) + return urlparse.urljoin(f'{protocol}://{host}', uri)
def path(self, code) -> str: """Return path to index.php.""" - return '{}/index.php'.format(self.scriptpath(code)) + return f'{self.scriptpath(code)}/index.php'
def querypath(self, code) -> str: """Return path to query.php.""" - return '{}/query.php'.format(self.scriptpath(code)) + return f'{self.scriptpath(code)}/query.php'
def apipath(self, code) -> str: """Return path to api.php.""" - return '{}/api.php'.format(self.scriptpath(code)) + return f'{self.scriptpath(code)}/api.php'
def eventstreams_host(self, code): """Hostname for EventStreams. @@ -591,7 +591,7 @@
def get_address(self, code, title) -> str: """Return the path to title using index.php with redirects disabled.""" - return '{}?title={}&redirect=no'.format(self.path(code), title) + return f'{self.path(code)}?title={title}&redirect=no'
def interface(self, code) -> str: """Return interface to use for code.""" @@ -653,7 +653,7 @@ # Use the code and family instead of the url # This is only creating a Site instance if domain matches site = pywikibot.Site(code, self.name) - pywikibot.log('Found candidate {}'.format(site)) + pywikibot.log(f'Found candidate {site}')
for iw_url in site._interwiki_urls(): iw_url, *_ = iw_url.partition('{}') @@ -677,7 +677,7 @@
def dbName(self, code) -> str: """Return the name of the MySQL database.""" - return '{}{}'.format(code, self.name) + return f'{code}{self.name}'
def encoding(self, code) -> str: """Return the encoding for a specific language wiki.""" @@ -710,7 +710,7 @@ return self.name
def __repr__(self) -> str: - return 'Family("{}")'.format(self.name) + return f'Family("{self.name}")'
def shared_image_repository(self, code): """Return the shared image repository, if any.""" @@ -825,11 +825,11 @@ codes += cls.closed_wikis
# shortcut this classproperty - cls.langs = {code: '{}.{}'.format(code, cls.domain) + cls.langs = {code: f'{code}.{cls.domain}' for code in codes}
if hasattr(cls, 'code_aliases'): - cls.langs.update({alias: '{}.{}'.format(code, cls.domain) + cls.langs.update({alias: f'{code}.{cls.domain}' for alias, code in cls.code_aliases.items()})
return cls.langs @@ -978,7 +978,7 @@ return 'wikimedia.org'
raise NotImplementedError( - "Family {} needs to define property 'domain'".format(cls.name)) + f"Family {cls.name} needs to define property 'domain'")
@classproperty def interwiki_removals(cls): @@ -1014,7 +1014,7 @@ @classproperty def domain(cls) -> str: """Return the parents domain with a subdomain prefix.""" - return '{}.wikimedia.org'.format(cls.name) + return f'{cls.name}.wikimedia.org'
def AutoFamily(name: str, url: str) -> SingleSiteFamily: diff --git a/pywikibot/i18n.py b/pywikibot/i18n.py index 7b2cb93..418a029 100644 --- a/pywikibot/i18n.py +++ b/pywikibot/i18n.py @@ -420,7 +420,7 @@
.. versionadded:: 7.0 """ - filename = '{}/{}.json'.format(dirname, lang) + filename = f'{dirname}/{lang}.json' try: data = pkgutil.get_data(_messages_package_name, filename) assert data is not None @@ -495,7 +495,7 @@ return plural_entries[index]
assert isinstance(parameters, Mapping), \ - 'parameters is not Mapping but {}'.format(type(parameters)) + f'parameters is not Mapping but {type(parameters)}'
rule = plural_rule(lang)
@@ -750,7 +750,7 @@ 'No {} translation has been defined for TranslateWiki key "{}". ' 'It can happen due to lack of i18n submodule or files or an ' 'outdated submodule. See {}/i18n' - .format('English' if 'en' in langs else "'{}'".format(lang), + .format('English' if 'en' in langs else f"'{lang}'", twtitle, __url__)))
if '{{PLURAL:' in trans: diff --git a/pywikibot/interwiki_graph.py b/pywikibot/interwiki_graph.py index cf5c5d3..209d4f2 100644 --- a/pywikibot/interwiki_graph.py +++ b/pywikibot/interwiki_graph.py @@ -97,7 +97,7 @@ :raises ImportError if pydot is not installed """ if PYDOT_ERROR: - msg = 'pydot is not installed: {}.'.format(PYDOT_ERROR) + msg = f'pydot is not installed: {PYDOT_ERROR}.' raise ImportError(msg) self.graph = None # type: Optional[pydot.Dot] self.subject = subject @@ -105,7 +105,7 @@ @staticmethod def getLabel(page: 'pywikibot.page.Page') -> str: """Get label for page.""" - return '"{}:{}"'.format(page.site.code, page.title()) + return f'"{page.site.code}:{page.title()}"'
def _octagon_site_set(self) -> Set['pywikibot.site.BaseSite']: """Build a list of sites with more than one valid page.""" @@ -227,5 +227,5 @@ page.site.code, page.title(as_filename=True))) if extension: - filename += '.{}'.format(extension) + filename += f'.{extension}' return filename diff --git a/pywikibot/logentries.py b/pywikibot/logentries.py index 433ffd5..b295cbc 100644 --- a/pywikibot/logentries.py +++ b/pywikibot/logentries.py @@ -50,7 +50,7 @@
It also logs debugging information when a key is missing. """ - pywikibot.debug('API log entry received:\n{!r}'.format(self)) + pywikibot.debug(f'API log entry received:\n{self!r}') hidden = { 'actionhidden': [ 'action', 'logpage', 'ns', 'pageid', 'params', 'title', @@ -373,7 +373,7 @@ :raise KeyError: logtype is not valid """ if logtype not in self._site.logtypes: - raise KeyError('{} is not a valid logtype'.format(logtype)) + raise KeyError(f'{logtype} is not a valid logtype')
return LogEntryFactory.get_entry_class(logtype)
@@ -410,7 +410,7 @@ try: logtype = logdata['type'] except KeyError: - pywikibot.debug('API log entry received:\n{}'.format(logdata)) + pywikibot.debug(f'API log entry received:\n{logdata}') raise Error("Log entry has no 'type' key")
return LogEntryFactory.get_entry_class(logtype)(logdata, self._site) diff --git a/pywikibot/logging.py b/pywikibot/logging.py index e6162c2..0955418 100644 --- a/pywikibot/logging.py +++ b/pywikibot/logging.py @@ -117,8 +117,8 @@ for i, arg in enumerate(args): key = keys[i] issue_deprecation_warning( - 'Positional argument {} ({})'.format(i + 1, arg), - 'keyword argument "{}={}"'.format(key, arg), + f'Positional argument {i + 1} ({arg})', + f'keyword argument "{key}={arg}"', since='7.2.0') if key in kwargs: warning('{!r} is given as keyword argument {!r} already; ignoring ' @@ -330,6 +330,6 @@ exc_type, value, _tb = sys.exc_info() msg = str(value) if not exc_info: - msg += ' ({})'.format(exc_type.__name__) + msg += f' ({exc_type.__name__})' assert msg is not None error(msg, *args, exc_info=exc_info, **kwargs) diff --git a/pywikibot/login.py b/pywikibot/login.py index 0cac48c..0581b8b 100644 --- a/pywikibot/login.py +++ b/pywikibot/login.py @@ -67,7 +67,7 @@
def __repr__(self) -> str: """Return internal representation.""" - return 'LoginStatus({})'.format(self) + return f'LoginStatus({self})'
class LoginManager: @@ -227,7 +227,7 @@ entry = None
if not isinstance(entry, tuple): - warn('Invalid tuple in line {}'.format(line_nr), + warn(f'Invalid tuple in line {line_nr}', _PasswordFileWarning) continue
@@ -292,12 +292,12 @@ self.login_to_site() except APIError as e: error_code = e.code - pywikibot.error('Login failed ({}).'.format(error_code)) + pywikibot.error(f'Login failed ({error_code}).') if error_code in self._api_error: error_msg = 'Username "{}" {} on {}'.format( self.login_name, self._api_error[error_code], self.site) if error_code in ('Failed', 'FAIL'): - error_msg += '\n.{}'.format(e.info) + error_msg += f'\n.{e.info}' raise NoUsernameError(error_msg)
# TODO: investigate other unhandled API codes (bug T75539) @@ -341,7 +341,7 @@
:param user: username (without suffix) """ - return '{}@{}'.format(username, self.suffix) + return f'{username}@{self.suffix}'
class OauthLoginManager(LoginManager): @@ -368,7 +368,7 @@ :raises ImportError: mwoauth isn't installed """ if isinstance(mwoauth, ImportError): - raise ImportError('mwoauth is not installed: {}.'.format(mwoauth)) + raise ImportError(f'mwoauth is not installed: {mwoauth}.') assert password is not None and user is not None super().__init__(password=None, site=site, user=None) if self.password: diff --git a/pywikibot/page/_collections.py b/pywikibot/page/_collections.py index 14cc42d..2826fa9 100644 --- a/pywikibot/page/_collections.py +++ b/pywikibot/page/_collections.py @@ -64,7 +64,7 @@ return key in self._data
def __repr__(self) -> str: - return '{}({})'.format(type(self), self._data) + return f'{type(self)}({self._data})'
@staticmethod def normalizeKey(key) -> str: @@ -239,7 +239,7 @@ return key in self._data
def __repr__(self) -> str: - return '{}({})'.format(type(self), self._data) + return f'{type(self)}({self._data})'
@classmethod def normalizeData(cls, data) -> dict: diff --git a/pywikibot/page/_decorators.py b/pywikibot/page/_decorators.py index bf6722b..ad3a32a 100644 --- a/pywikibot/page/_decorators.py +++ b/pywikibot/page/_decorators.py @@ -39,7 +39,7 @@ if do_async: pywikibot.error('page {} not saved due to {}\n' .format(link, err)) - pywikibot.log('Error saving page {} ({})\n'.format(link, err), + pywikibot.log(f'Error saving page {link} ({err})\n', exc_info=True) if not callback and not do_async: if isinstance(err, PageSaveRelatedError): diff --git a/pywikibot/page/_links.py b/pywikibot/page/_links.py index e814093..6b6a806 100644 --- a/pywikibot/page/_links.py +++ b/pywikibot/page/_links.py @@ -79,7 +79,7 @@ assert isinstance(self._items, tuple) assert all(isinstance(item, str) for item in self._items)
- attrs = ('{!r}'.format(getattr(self, attr)) for attr in self._items) + attrs = (f'{getattr(self, attr)!r}' for attr in self._items) return 'pywikibot.page.{}({})'.format(type(self).__name__, ', '.join(attrs))
@@ -165,7 +165,7 @@ .format(self.namespace, onsite))
if self.namespace != Namespace.MAIN: - return '{}:{}'.format(name, self.title) + return f'{name}:{self.title}' return self.title
def astext(self, onsite=None) -> str: @@ -182,14 +182,14 @@ if self.namespace != Namespace.MAIN: title = onsite.namespace(self.namespace) + ':' + title if onsite == self.site: - return '[[{}]]'.format(title) + return f'[[{title}]]' if onsite.family == self.site.family: - return '[[{}:{}]]'.format(self.site.code, title) + return f'[[{self.site.code}:{title}]]' if self.site.family.name == self.site.code: # use this form for sites like commons, where the # code is the same as the family name - return '[[{}:{}]]'.format(self.site.code, title) - return '[[{}:{}]]'.format(self.site.sitename, title) + return f'[[{self.site.code}:{title}]]' + return f'[[{self.site.sitename}:{title}]]'
def _cmpkey(self): """ @@ -396,7 +396,7 @@ if ns: if len(self._text) <= colon_position: raise InvalidTitleError( - "'{}' has no title.".format(self._text)) + f"'{self._text}' has no title.") self._namespace = ns ns_prefix = True old_position = colon_position @@ -438,7 +438,7 @@ # 'namespace:' is not a valid title if not t: raise InvalidTitleError( - "'{}' has no title.".format(self._text)) + f"'{self._text}' has no title.")
if ':' in t and self._namespace >= 0: # < 0 don't have talk other_ns = self._site.namespaces[self._namespace - 1 @@ -476,7 +476,7 @@ .format(self._text))
if self._namespace != -1 and len(t) > 255: - raise InvalidTitleError("(over 255 bytes): '{}'".format(t)) + raise InvalidTitleError(f"(over 255 bytes): '{t}'")
# "empty" local links can only be self-links # with a fragment identifier. diff --git a/pywikibot/page/_pages.py b/pywikibot/page/_pages.py index 5a871bc..94f2434 100644 --- a/pywikibot/page/_pages.py +++ b/pywikibot/page/_pages.py @@ -279,14 +279,14 @@ else: # use this form for sites like commons, where the # code is the same as the family name - title = '{}:{}'.format(self.site.code, title) + title = f'{self.site.code}:{title}' elif textlink and (self.is_filepage() or self.is_categorypage()): - title = ':{}'.format(title) + title = f':{title}' elif self.namespace() == 0 and not section: with_ns = True if with_ns: - return '[[{}{}]]'.format(title, section) - return '[[{}{}|{}]]'.format(title, section, label) + return f'[[{title}{section}]]' + return f'[[{title}{section}|{label}]]' if not with_ns and self.namespace() != 0: title = label + section else: @@ -329,7 +329,7 @@
def __repr__(self) -> str: """Return a more complete string representation.""" - return '{}({!r})'.format(self.__class__.__name__, self.title()) + return f'{self.__class__.__name__}({self.title()!r})'
def _cmpkey(self): """ @@ -837,7 +837,7 @@ title = self.title(with_ns=False) new_ns = ns + (1, -1)[self.isTalkPage()] return Page(self.site, - '{}:{}'.format(self.site.namespace(new_ns), title)) + f'{self.site.namespace(new_ns)}:{title}')
def is_categorypage(self): """Return True if the page is a Category, False otherwise.""" @@ -1262,10 +1262,10 @@ watch=watch, bot=botflag, **kwargs) if not done: if not quiet: - pywikibot.warning('Page {} not saved'.format(link)) + pywikibot.warning(f'Page {link} not saved') raise PageSaveRelatedError(self) if not quiet: - pywikibot.output('Page {} saved'.format(link)) + pywikibot.output(f'Page {link} saved')
def _cosmetic_changes_hook(self, summary: str) -> str: """The cosmetic changes hook. @@ -1435,8 +1435,8 @@ for i, arg in enumerate(args): # pragma: no cover key = keys[i] issue_deprecation_warning( - 'Positional argument {} ({})'.format(i + 1, arg), - 'keyword argument "{}={}"'.format(key, arg), + f'Positional argument {i + 1} ({arg})', + f'keyword argument "{key}={arg}"', since='7.0.0') if key in kwargs: pywikibot.warning('{!r} is given as keyword argument {!r} ' @@ -1845,7 +1845,7 @@ -1 page was marked for deletion """ if reason is None: - pywikibot.output('Deleting {}.'.format(self.title(as_link=True))) + pywikibot.output(f'Deleting {self.title(as_link=True)}.') reason = pywikibot.input('Please enter a reason for the deletion:')
# If user has 'delete' right, delete the page @@ -1988,7 +1988,7 @@ if reason is None: warn('Not passing a reason for undelete() is deprecated.', DeprecationWarning) - pywikibot.output('Undeleting {}.'.format(self.title(as_link=True))) + pywikibot.output(f'Undeleting {self.title(as_link=True)}.') reason = pywikibot.input( 'Please enter a reason for the undeletion:') self.site.undelete(self, reason, revision=undelete_revs) @@ -2155,7 +2155,7 @@ if not with_protocol: return re.sub(PROTOCOL_REGEX, '', link) elif with_protocol: - return '{}://{}'.format(wiki.protocol(), link) + return f'{wiki.protocol()}://{link}' return link
@@ -2300,7 +2300,7 @@
target_link = target_page.title(as_link=True, textlink=True, allow_interwiki=False) - target_link = '#{} {}'.format(self.site.redirect(), target_link) + target_link = f'#{self.site.redirect()} {target_link}' self.text = prefix + target_link + suffix if save: self.save(**kwargs) @@ -2335,7 +2335,7 @@
if not self.site.has_data_repository: raise UnknownExtensionError( - 'Wikibase is not implemented for {}.'.format(self.site)) + f'Wikibase is not implemented for {self.site}.')
def get_item_page(func, *args): try: @@ -2384,7 +2384,7 @@ title_with_sort_key = self.title(with_section=False) + '|' + key else: title_with_sort_key = self.title(with_section=False) - return '[[{}]]'.format(title_with_sort_key) + return f'[[{title_with_sort_key}]]'
def subcategories(self, recurse: Union[int, bool] = False, diff --git a/pywikibot/page/_revision.py b/pywikibot/page/_revision.py index 2aef3c5..e1ce8d4 100644 --- a/pywikibot/page/_revision.py +++ b/pywikibot/page/_revision.py @@ -81,7 +81,7 @@
def __repr__(self) -> str: """String representation of Revision.""" - return '{}({})'.format(self.__class__.__name__, self._data) + return f'{self.__class__.__name__}({self._data})'
def __str__(self) -> str: """Printable representation of Revision data.""" diff --git a/pywikibot/page/_wikibase.py b/pywikibot/page/_wikibase.py index 2418cd8..1e647a9 100644 --- a/pywikibot/page/_wikibase.py +++ b/pywikibot/page/_wikibase.py @@ -327,7 +327,7 @@ entity_id = self.getID() if entity_id == '-1': raise NoWikibaseEntityError(self) - return '{}{}'.format(self.repo.concept_base_uri, entity_id) + return f'{self.repo.concept_base_uri}{entity_id}'
class MediaInfo(WikibaseEntity): @@ -365,7 +365,7 @@
page = result.pop() if page.namespace() != page.site.namespaces.FILE: - raise Error('Page with id "{}" is not a file'.format(page_id)) + raise Error(f'Page with id "{page_id}" is not a file')
self._file = FilePage(page)
@@ -962,7 +962,7 @@ item. """ if not isinstance(site, DataSite): - raise TypeError('{} is not a data repository.'.format(site)) + raise TypeError(f'{site} is not a data repository.')
base_uri, _, qid = uri.rpartition('/') if base_uri != site.concept_base_uri.rstrip('/'): @@ -1533,7 +1533,7 @@ claim.type, lambda value, site: value)(value, site) else: pywikibot.warning( - '{} datatype is not supported yet.'.format(claim.type)) + f'{claim.type} datatype is not supported yet.') claim.target = pywikibot.WbUnknown.fromWikibase(value) if 'rank' in data: # References/Qualifiers don't have ranks claim.rank = data['rank'] @@ -1910,7 +1910,7 @@ value = self.getTarget().toWikibase() else: # WbUnknown pywikibot.warning( - '{} datatype is not supported yet.'.format(self.type)) + f'{self.type} datatype is not supported yet.') value = self.getTarget().toWikibase() return value
diff --git a/pywikibot/pagegenerators/_factory.py b/pywikibot/pagegenerators/_factory.py index 61ca0d3..a67280a 100644 --- a/pywikibot/pagegenerators/_factory.py +++ b/pywikibot/pagegenerators/_factory.py @@ -53,8 +53,8 @@ WikibaseSearchItemPageGenerator, WikidataSPARQLPageGenerator, ) -from pywikibot.tools.itertools import filter_unique, intersect_generators from pywikibot.tools.collections import DequeGenerator +from pywikibot.tools.itertools import filter_unique, intersect_generators
HANDLER_RETURN_TYPE = Union[None, bool, Iterable['pywikibot.page.BasePage']] @@ -388,7 +388,7 @@ assert total is None or total > 0 except ValueError as err: pywikibot.error( - '{}. Start parameter has wrong format!'.format(err)) + f'{err}. Start parameter has wrong format!') return None except AssertionError: pywikibot.error('Total number of log ({}) events must be a ' @@ -399,7 +399,7 @@ end = pywikibot.Timestamp.fromtimestampformat(end) except ValueError as err: pywikibot.error( - '{}. End parameter has wrong format!'.format(err)) + f'{err}. End parameter has wrong format!') return None except TypeError: # end is None pass @@ -443,9 +443,9 @@ _2i = 2 * _i txt = 'Available categories of lint errors:\n' for prio, _list in cats.items(): - txt += '{indent}{prio}\n'.format(indent=_i, prio=prio) + txt += f'{_i}{prio}\n' txt += ''.join( - '{indent}{cat}\n'.format(indent=_2i, cat=c) for c in _list) + f'{_2i}{c}\n' for c in _list) pywikibot.output(txt)
if cat == 'show': # Display categories of lint errors. @@ -459,7 +459,7 @@ else: lint_cats = cat.split(',') assert set(lint_cats) <= set(valid_cats), \ - 'Invalid category of lint errors: {}'.format(cat) + f'Invalid category of lint errors: {cat}'
return self.site.linter_pages( lint_categories='|'.join(lint_cats), namespaces=self.namespaces, @@ -891,7 +891,7 @@ params = value.split(',') if params[0] not in self.site.logtypes: raise NotImplementedError( - 'Invalid -logevents parameter "{}"'.format(params[0])) + f'Invalid -logevents parameter "{params[0]}"') return self._parse_log_events(*params)
def handle_args(self, args: Iterable[str]) -> List[str]: diff --git a/pywikibot/pagegenerators/_filters.py b/pywikibot/pagegenerators/_filters.py index bf12b34..909d311 100644 --- a/pywikibot/pagegenerators/_filters.py +++ b/pywikibot/pagegenerators/_filters.py @@ -111,7 +111,7 @@ continue
if config.verbose_output: - pywikibot.output('Ignoring page {}'.format(page.title())) + pywikibot.output(f'Ignoring page {page.title()}')
def RedirectFilterPageGenerator(generator: Iterable['pywikibot.page.Page'], @@ -462,7 +462,7 @@ if bool(contribs[username]) is not bool(skip): # xor operation yield page elif show_filtered: - pywikibot.output('Skipping {}'.format(page.title(as_link=True))) + pywikibot.output(f'Skipping {page.title(as_link=True)}')
def WikibaseItemFilterPageGenerator(generator: Iterable['pywikibot.page.Page'], @@ -479,7 +479,7 @@ :return: Wrapped generator """ why = "doesn't" if has_item else 'has' - msg = '{{page}} {why} a wikidata item. Skipping.'.format(why=why) + msg = f'{{page}} {why} a wikidata item. Skipping.'
for page in generator or []: try: diff --git a/pywikibot/pagegenerators/_generators.py b/pywikibot/pagegenerators/_generators.py index 9ea56df..78996d2 100644 --- a/pywikibot/pagegenerators/_generators.py +++ b/pywikibot/pagegenerators/_generators.py @@ -848,7 +848,7 @@ changed from iterator method to generator property """ # restrict query to local site - local_query = '{} site:{}'.format(self.query, self.site.hostname()) + local_query = f'{self.query} site:{self.site.hostname()}' base = 'http://%7B%7D%7B%7D%27.format(self.site.hostname(), self.site.articlepath) pattern = base.replace('{}', '(.+)') @@ -984,10 +984,10 @@ """ if site is None: site = pywikibot.Site() - pywikibot.output('Starting with year {}'.format(start)) + pywikibot.output(f'Starting with year {start}') for i in range(start, end + 1): if i % 100 == 0: - pywikibot.output('Preparing {}...'.format(i)) + pywikibot.output(f'Preparing {i}...') # There is no year 0 if i != 0: current_year = date.formatYear(site.lang, i) @@ -1007,7 +1007,7 @@ site = pywikibot.Site() lang = site.lang first_page = pywikibot.Page(site, date.format_date(start_month, 1, lang)) - pywikibot.output('Starting with {}'.format(first_page.title(as_link=True))) + pywikibot.output(f'Starting with {first_page.title(as_link=True)}') for month in range(start_month, end_month + 1): for day in range(1, calendar.monthrange(year, month)[1] + 1): yield pywikibot.Page( @@ -1161,7 +1161,7 @@
if namespaces: for namespace in namespaces: - query['ns[{}]'.format(int(namespace))] = 1 + query[f'ns[{int(namespace)}]'] = 1
query_final = query.copy() query_final.update(extra_options) @@ -1182,12 +1182,12 @@ try: req = http.fetch(url, params=self.opts) except ReadTimeout: - raise ServerError('received ReadTimeout from {}'.format(url)) + raise ServerError(f'received ReadTimeout from {url}')
server_err = HTTPStatus.INTERNAL_SERVER_ERROR if server_err <= req.status_code < server_err + 100: raise ServerError( - 'received {} status from {}'.format(req.status_code, req.url)) + f'received {req.status_code} status from {req.url}')
data = req.json() if 'error' in data: diff --git a/pywikibot/proofreadpage.py b/pywikibot/proofreadpage.py index 6004a73..8c8432a 100644 --- a/pywikibot/proofreadpage.py +++ b/pywikibot/proofreadpage.py @@ -294,7 +294,7 @@
index_page, others = self._index if others: - pywikibot.warning('{} linked to several Index pages.'.format(self)) + pywikibot.warning(f'{self} linked to several Index pages.') pywikibot.output('{}{!s}'.format(' ' * 9, [index_page] + others))
if index_page: @@ -493,7 +493,7 @@ """ def _assert_len(len_oq: int, len_cq: int, title: str) -> None: if (len_oq != len_cq) or (len_oq < 2 or len_cq < 2): - raise Error('ProofreadPage {}: invalid format'.format(title)) + raise Error(f'ProofreadPage {title}: invalid format')
# Property force page text loading. text = self.text @@ -559,7 +559,7 @@ The edit summary shall be appended to pre_summary to highlight Status in the edit summary on wiki. """ - return '/* {0.status} */ '.format(self) + return f'/* {self.status} */ '
@property @cached @@ -582,7 +582,7 @@ try: response = http.fetch(url, charset='utf-8') except Exception: - pywikibot.error('Error fetching HTML for {}.'.format(self)) + pywikibot.error(f'Error fetching HTML for {self}.') raise
soup = _bs4_soup(response.text) # type: ignore @@ -628,32 +628,32 @@
# wrong link fail with Exceptions for retry in range(5, 30, 5): - pywikibot.debug('{}: get URI {!r}'.format(ocr_tool, cmd_uri)) + pywikibot.debug(f'{ocr_tool}: get URI {cmd_uri!r}') try: response = http.fetch(cmd_uri) except ReadTimeout as e: - pywikibot.warning('ReadTimeout {}: {}'.format(cmd_uri, e)) + pywikibot.warning(f'ReadTimeout {cmd_uri}: {e}') except Exception as e: - pywikibot.error('"{}": {}'.format(cmd_uri, e)) + pywikibot.error(f'"{cmd_uri}": {e}') return True, e else: - pywikibot.debug('{}: {}'.format(ocr_tool, response.text)) + pywikibot.debug(f'{ocr_tool}: {response.text}') break
- pywikibot.warning('retrying in {} seconds ...'.format(retry)) + pywikibot.warning(f'retrying in {retry} seconds ...') time.sleep(retry) else: return True, ReadTimeout('ReadTimeout: Could not perform OCR')
if HTTPStatus.BAD_REQUEST <= response.status_code < 600: - return True, 'Http response status {}'.format(response.status_code) + return True, f'Http response status {response.status_code}'
data = response.json()
if ocr_tool == self._PHETOOLS: # phetools - assert 'error' in data, 'Error from phetools: {}'.format(data) + assert 'error' in data, f'Error from phetools: {data}' assert data['error'] in [0, 1, 2, 3], \ - 'Error from phetools: {}'.format(data) + f'Error from phetools: {data}' error, _text = bool(data['error']), data['text'] else: # googleOCR if 'error' in data: @@ -662,7 +662,7 @@ error, _text = False, data['text']
if error: - pywikibot.error('OCR query {}: {}'.format(cmd_uri, _text)) + pywikibot.error(f'OCR query {cmd_uri}: {_text}') return error, _text return error, parser_func(_text)
@@ -704,7 +704,7 @@ try: url_image = self.url_image except ValueError: - error_text = 'No prp-page-image src found for {}.'.format(self) + error_text = f'No prp-page-image src found for {self}.' pywikibot.error(error_text) return True, error_text
@@ -765,7 +765,7 @@ if not error and isinstance(text, str): return text raise ValueError( - '{}: not possible to perform OCR. {}'.format(self, text)) + f'{self}: not possible to perform OCR. {text}')
class PurgeRequest(Request): @@ -919,7 +919,7 @@ params = {'action': 'purge', 'titles': [self.title()]} request = PurgeRequest(site=self.site, parameters=params) rawdata = request.submit() - error_message = 'Purge action failed for {}'.format(self) + error_message = f'Purge action failed for {self}' assert 'purge' in rawdata, error_message assert 'purged' in rawdata['purge'][0], error_message
@@ -1081,7 +1081,7 @@ try: return self._labels_from_page[page] except KeyError: - raise KeyError('Invalid Page: {}.'.format(page)) + raise KeyError(f'Invalid Page: {page}.')
@check_if_cached def get_label_from_page_number(self, page_number: int) -> str: @@ -1108,7 +1108,7 @@ try: return mapping_dict[label] except KeyError: - raise KeyError('No page has label: "{}".'.format(label)) + raise KeyError(f'No page has label: "{label}".')
@check_if_cached def get_page_number_from_label(self, label: str = '1') -> str: @@ -1138,7 +1138,7 @@ try: return self._page_from_numbers[page_number] except KeyError: - raise KeyError('Invalid page number: {}.'.format(page_number)) + raise KeyError(f'Invalid page number: {page_number}.')
@check_if_cached def pages(self) -> List['pywikibot.page.Page']: @@ -1155,4 +1155,4 @@ try: return self._numbers_from_page[page] except KeyError: - raise KeyError('Invalid page: {}.'.format(page)) + raise KeyError(f'Invalid page: {page}.') diff --git a/pywikibot/scripts/generate_family_file.py b/pywikibot/scripts/generate_family_file.py index e545c89..e79a650 100755 --- a/pywikibot/scripts/generate_family_file.py +++ b/pywikibot/scripts/generate_family_file.py @@ -35,9 +35,9 @@ import os import string import sys +from contextlib import suppress from typing import Optional from urllib.parse import urlparse -from contextlib import suppress
# see pywikibot.family.py @@ -201,7 +201,7 @@ print('Loading wikis... ') for lang in self.langs: key = lang['prefix'] - print(' * {}... '.format(key), end='') + print(f' * {key}... ', end='') if key not in self.wikis: try: self.wikis[key] = self.Wiki(lang['url']) @@ -214,8 +214,8 @@ def writefile(self, verify) -> None: """Write the family file.""" fn = os.path.join(self.base_dir, 'families', - '{}_family.py'.format(self.name)) - print('Writing {}... '.format(fn)) + f'{self.name}_family.py') + print(f'Writing {fn}... ')
if os.path.exists(fn) and input('{} already exists. Overwrite? (y/n)' .format(fn)).lower() == 'n': @@ -228,7 +228,7 @@ ) for k, w in self.wikis.items())
code_path_pairs = '\n '.join( - "'{code}': '{path}',".format(code=k, path=w.scriptpath) + f"'{k}': '{w.scriptpath}'," for k, w in self.wikis.items())
code_protocol_pairs = '\n '.join( diff --git a/pywikibot/scripts/generate_user_files.py b/pywikibot/scripts/generate_user_files.py index 5882a80..d88e229 100755 --- a/pywikibot/scripts/generate_user_files.py +++ b/pywikibot/scripts/generate_user_files.py @@ -70,7 +70,7 @@ try: os.mkdir(new_base, pywikibot.config.private_files_permission) except Exception as e: - pywikibot.error('directory creation failed: {}'.format(e)) + pywikibot.error(f'directory creation failed: {e}') continue pywikibot.output('Created new directory.') break @@ -142,7 +142,7 @@ .format(fam.name)) default_lang = None elif len(known_langs) == 1: - pywikibot.output('The only known site code: {}'.format(known_langs[0])) + pywikibot.output(f'The only known site code: {known_langs[0]}') default_lang = known_langs[0] else: pywikibot.output('This is the list of known site oodes:') @@ -164,7 +164,7 @@ default=False, automatic_quit=False): mycode = None
- message = 'Username on {}:{}'.format(mycode, fam.name) + message = f'Username on {mycode}:{fam.name}' username = pywikibot.input(message, default=default_username, force=force) # Escape ''s if username: @@ -305,7 +305,7 @@ if msg: pywikibot.output(msg) msg = None - message = 'BotPassword's "bot name" for {}'.format(username) + message = f'BotPassword's "bot name" for {username}' botpasswordname = pywikibot.input(message, force=force) message = 'BotPassword's "password" for "{}" ' \ '(no characters will be shown)' \ @@ -344,10 +344,10 @@ main_code=main_code, usernames=usernames, config_text=config_text, - botpasswords='password_file = ' + ('"{}"'.format(PASS_BASENAME) + botpasswords='password_file = ' + (f'"{PASS_BASENAME}"' if botpasswords else 'None'))) - pywikibot.output("'{}' written.".format(_fnc)) + pywikibot.output(f"'{_fnc}' written.") except BaseException: if os.path.exists(_fnc): os.remove(_fnc) @@ -372,7 +372,7 @@ with codecs.open(_fncpass, 'w', 'utf-8') as f: f.write(PASSFILE_CONFIG.format(botpasswords=botpasswords)) file_mode_checker(_fncpass, mode=0o600) - pywikibot.output("'{}' written.".format(_fncpass)) + pywikibot.output(f"'{_fncpass}' written.") except OSError: os.remove(_fncpass) raise @@ -391,7 +391,7 @@ """ global base_dir
- pywikibot.output('\nYour default user directory is "{}"'.format(base_dir)) + pywikibot.output(f'\nYour default user directory is "{base_dir}"') while True: # Show whether file exists userfile = file_exists(os.path.join(base_dir, USER_BASENAME)) diff --git a/pywikibot/scripts/login.py b/pywikibot/scripts/login.py index 79f53a6..7b01376 100755 --- a/pywikibot/scripts/login.py +++ b/pywikibot/scripts/login.py @@ -52,8 +52,8 @@ # Distributed under the terms of the MIT license. # import datetime -from contextlib import suppress from concurrent.futures import ThreadPoolExecutor +from contextlib import suppress
import pywikibot from pywikibot import config @@ -63,9 +63,9 @@
def _get_consumer_token(site) -> Tuple[str, str]: - key_msg = 'OAuth consumer key on {}:{}'.format(site.code, site.family) + key_msg = f'OAuth consumer key on {site.code}:{site.family}' key = pywikibot.input(key_msg) - secret_msg = 'OAuth consumer secret for consumer {}'.format(key) + secret_msg = f'OAuth consumer secret for consumer {key}' secret = pywikibot.input(secret_msg, password=True) return key, secret
@@ -76,7 +76,7 @@ login_manager.login() identity = login_manager.identity if identity is None: - pywikibot.error('Invalid OAuth info for {site}.'.format(site=site)) + pywikibot.error(f'Invalid OAuth info for {site}.') elif site.username() != identity['username']: pywikibot.error( 'Logged in on {site} via OAuth as {wrong}, but expect as {right}' @@ -117,11 +117,11 @@
user = site.user() if user: - pywikibot.info('Logged in on {} as {}.'.format(site, user)) + pywikibot.info(f'Logged in on {site} as {user}.') elif logout: - pywikibot.info('Logged out of {}.'.format(site)) + pywikibot.info(f'Logged out of {site}.') else: - pywikibot.info('Not logged in on {}.'.format(site)) + pywikibot.info(f'Not logged in on {site}.')
def main(*args: str) -> None: diff --git a/pywikibot/scripts/preload_sites.py b/pywikibot/scripts/preload_sites.py index 1d06a27..bdf4d7e 100755 --- a/pywikibot/scripts/preload_sites.py +++ b/pywikibot/scripts/preload_sites.py @@ -91,7 +91,7 @@ futures = {executor.submit(preload_family, family, executor) for family in families} wait(futures) - pywikibot.output('Loading time used: {}'.format(datetime.now() - start)) + pywikibot.output(f'Loading time used: {datetime.now() - start}')
if __name__ == '__main__': diff --git a/pywikibot/scripts/version.py b/pywikibot/scripts/version.py index 662be38..45b4674 100755 --- a/pywikibot/scripts/version.py +++ b/pywikibot/scripts/version.py @@ -97,9 +97,9 @@ for family, usernames in pywikibot.config.usernames.items(): if not usernames: continue - pywikibot.output('Usernames for family {!r}:'.format(family)) + pywikibot.output(f'Usernames for family {family!r}:') for lang, username in usernames.items(): - pywikibot.output('\t{}: {}'.format(lang, username)) + pywikibot.output(f'\t{lang}: {username}')
if __name__ == '__main__': diff --git a/pywikibot/site/_apisite.py b/pywikibot/site/_apisite.py index a290875..d316b81 100644 --- a/pywikibot/site/_apisite.py +++ b/pywikibot/site/_apisite.py @@ -164,7 +164,7 @@ prefixes.update(self._interwikimap.get_by_url(url)) if not prefixes: raise KeyError( - "There is no interwiki prefix to '{}'".format(site)) + f"There is no interwiki prefix to '{site}'") return sorted(prefixes, key=lambda p: (len(p), p))
def local_interwiki(self, prefix: str) -> bool: @@ -215,7 +215,7 @@ if m_site['dbname'] == dbname: url = m_site['url'] + '/w/index.php' return pywikibot.Site(url=url) - raise ValueError('Cannot parse a site out of {}.'.format(dbname)) + raise ValueError(f'Cannot parse a site out of {dbname}.')
def _generator( self, @@ -648,7 +648,7 @@ None if 'anon' in uidata['query']['userinfo'] else uidata['query']['userinfo']['name']) return {ns for ns in self.namespaces.values() if ns.id >= 0 - and self._useroptions['searchNs{}'.format(ns.id)] + and self._useroptions[f'searchNs{ns.id}'] in ['1', True]}
@property # type: ignore[misc] @@ -712,7 +712,7 @@ pattern) if letters: pattern += ''.join(letters.split('|')) - return '[{}]*'.format(pattern) + return f'[{pattern}]*'
@staticmethod def assert_valid_iter_params( @@ -905,7 +905,7 @@ msgs = self.mediawiki_messages(needed_mw_messages) except KeyError: raise NotImplementedError( - 'MediaWiki messages missing: {}'.format(needed_mw_messages)) + f'MediaWiki messages missing: {needed_mw_messages}')
args = list(args) concat = msgs['and'] + msgs['word-separator'] @@ -1220,7 +1220,7 @@ """ if not self.has_data_repository: raise UnknownExtensionError( - 'Wikibase is not implemented for {}.'.format(self)) + f'Wikibase is not implemented for {self}.')
repo = self.data_repository() dp = pywikibot.ItemPage(repo, item) @@ -1586,7 +1586,7 @@ try: parsed_text = data['parse']['text']['*'] except KeyError as e: - raise KeyError('API parse response lacks {} key'.format(e)) + raise KeyError(f'API parse response lacks {e} key') return parsed_text
def getcategoryinfo(self, category: 'pywikibot.page.Category') -> None: @@ -1847,7 +1847,7 @@ while True: try: result = req.submit() - pywikibot.debug('editpage response: {}'.format(result)) + pywikibot.debug(f'editpage response: {result}') except APIError as err: if err.code.endswith('anon') and self.logged_in(): pywikibot.debug("editpage: received '{}' even though " @@ -2048,7 +2048,7 @@ self.unlock_page(dest)
if 'mergehistory' not in result: - pywikibot.error('mergehistory: {error}'.format(error=result)) + pywikibot.error(f'mergehistory: {result}') raise Error('mergehistory: unexpected response')
# catalog of move errors for use in error messages @@ -2135,7 +2135,7 @@ req['from'] = oldtitle # "from" is a python keyword try: result = req.submit() - pywikibot.debug('movepage response: {}'.format(result)) + pywikibot.debug(f'movepage response: {result}') except APIError as err: if err.code.endswith('anon') and self.logged_in(): pywikibot.debug( @@ -2179,7 +2179,7 @@ finally: self.unlock_page(page) if 'move' not in result: - pywikibot.error('movepage: {}'.format(result)) + pywikibot.error(f'movepage: {result}') raise Error('movepage: unexpected response') # TODO: Check for talkmove-error messages if 'talkmove-error-code' in result['move']: @@ -2330,7 +2330,7 @@ if deletetalk: if self.mw_version < '1.38wmf24': pywikibot.warning( - 'deletetalk is not available on {}'.format(self.mw_version) + f'deletetalk is not available on {self.mw_version}' ) else: params['deletetalk'] = deletetalk @@ -2650,7 +2650,7 @@ result = result['purge'] except KeyError: pywikibot.error( - 'purgepages: Unexpected API response:\n{}'.format(result)) + f'purgepages: Unexpected API response:\n{result}') return False if not all('purged' in page for page in result): return False @@ -2783,8 +2783,8 @@ raise TypeError('diff parameter is of invalid type')
params = {'action': 'compare', - 'from{}'.format(old_t[0]): old_t[1], - 'to{}'.format(diff_t[0]): diff_t[1]} + f'from{old_t[0]}': old_t[1], + f'to{diff_t[0]}': diff_t[1]}
req = self.simple_request(**params) data = req.submit() diff --git a/pywikibot/site/_datasite.py b/pywikibot/site/_datasite.py index a13e8a1..4d870d8 100644 --- a/pywikibot/site/_datasite.py +++ b/pywikibot/site/_datasite.py @@ -291,7 +291,7 @@ if arg in ['clear', 'summary']: params[arg] = kwargs[arg] elif arg != 'baserevid': - warn('Unknown wbeditentity parameter {} ignored'.format(arg), + warn(f'Unknown wbeditentity parameter {arg} ignored', UserWarning, 2)
params['data'] = json.dumps(data) @@ -851,7 +851,7 @@ # Supported actions assert action in ('wbsetaliases', 'wbsetdescription', 'wbsetlabel', 'wbsetsitelink'), \ - 'action {} not supported.'.format(action) + f'action {action} not supported.'
# prefer ID over (site, title) if isinstance(itemdef, str): diff --git a/pywikibot/site/_extensions.py b/pywikibot/site/_extensions.py index 66f4d59..9ba7bf4 100644 --- a/pywikibot/site/_extensions.py +++ b/pywikibot/site/_extensions.py @@ -201,7 +201,7 @@ defined for a returned entry in API response. """ if not isinstance(page, pywikibot.FilePage): - raise TypeError('Page {} must be a FilePage.'.format(page)) + raise TypeError(f'Page {page} must be a FilePage.')
title = page.title(with_section=False) args = {'titles': title, diff --git a/pywikibot/site/_generators.py b/pywikibot/site/_generators.py index d363abc..510d4f9 100644 --- a/pywikibot/site/_generators.py +++ b/pywikibot/site/_generators.py @@ -156,7 +156,7 @@ .format(len(cache), self))
for pagedata in rvgen: - pywikibot.debug('Preloading {}'.format(pagedata)) + pywikibot.debug(f'Preloading {pagedata}') try: if pagedata['title'] not in cache: # API always returns a "normalized" title which is @@ -177,9 +177,9 @@ continue
except KeyError: - pywikibot.debug("No 'title' in {}".format(pagedata)) - pywikibot.debug('pageids={}'.format(pageids)) - pywikibot.debug('titles={}'.format(list(cache.keys()))) + pywikibot.debug(f"No 'title' in {pagedata}") + pywikibot.debug(f'pageids={pageids}') + pywikibot.debug(f'titles={list(cache.keys())}') continue
priority, page = cache[pagedata['title']] @@ -866,8 +866,8 @@ else: filterredir = None issue_deprecation_warning( - 'The value "{}" for "filterredir"'.format(old), - '"{}"'.format(filterredir), since='7.0.0') + f'The value "{old}" for "filterredir"', + f'"{filterredir}"', since='7.0.0')
apgen = self._generator(api.PageGenerator, type_arg='allpages', namespaces=namespace, @@ -2135,7 +2135,7 @@ """ if propname not in self.get_property_names(): raise NotImplementedError( - '"{}" is not a valid page property'.format(propname)) + f'"{propname}" is not a valid page property') return self._generator(api.PageGenerator, type_arg='pageswithprop', gpwppropname=propname, total=total)
diff --git a/pywikibot/site/_interwikimap.py b/pywikibot/site/_interwikimap.py index 2f00367..9669d22 100644 --- a/pywikibot/site/_interwikimap.py +++ b/pywikibot/site/_interwikimap.py @@ -71,7 +71,7 @@ :raises TypeError: Site for the prefix is of wrong type """ if prefix not in self._iw_sites: - raise KeyError("'{}' is not an interwiki prefix.".format(prefix)) + raise KeyError(f"'{prefix}' is not an interwiki prefix.") if isinstance(self._iw_sites[prefix].site, pywikibot.site.BaseSite): return self._iw_sites[prefix] if isinstance(self._iw_sites[prefix].site, Exception): diff --git a/pywikibot/site/_namespace.py b/pywikibot/site/_namespace.py index 4dd6457..dd4877e 100644 --- a/pywikibot/site/_namespace.py +++ b/pywikibot/site/_namespace.py @@ -8,9 +8,10 @@ from enum import IntEnum from typing import Optional, Union
+from pywikibot.backports import Dict from pywikibot.backports import Iterable as IterableType -from pywikibot.backports import Dict, List -from pywikibot.tools import classproperty, ComparableMixin, SelfCallMixin +from pywikibot.backports import List +from pywikibot.tools import ComparableMixin, SelfCallMixin, classproperty
NamespaceIDType = Union[int, str, 'Namespace'] @@ -248,7 +249,7 @@
if extra: kwargs = ', ' + ', '.join( - key + '={!r}'.format(value) for key, value in extra) + key + f'={value!r}' for key, value in extra) else: kwargs = ''
diff --git a/pywikibot/site/_siteinfo.py b/pywikibot/site/_siteinfo.py index b973adc..c5d0a0a 100644 --- a/pywikibot/site/_siteinfo.py +++ b/pywikibot/site/_siteinfo.py @@ -169,7 +169,7 @@ if e.code == 'siunknown_siprop': if len(props) == 1: pywikibot.log( - "Unable to get siprop '{}'".format(props[0])) + f"Unable to get siprop '{props[0]}'") return {props[0]: (Siteinfo._get_default(props[0]), False)} pywikibot.log('Unable to get siteinfo, because at least ' "one property is unknown: '{}'".format( diff --git a/pywikibot/site/_upload.py b/pywikibot/site/_upload.py index 15c286d..e6ba7da 100644 --- a/pywikibot/site/_upload.py +++ b/pywikibot/site/_upload.py @@ -11,11 +11,7 @@ from warnings import warn
import pywikibot -from pywikibot.exceptions import ( - APIError, - Error, - UploadError, -) +from pywikibot.exceptions import APIError, Error, UploadError from pywikibot.tools import compute_file_hash
@@ -232,7 +228,7 @@ if verify_stash: # The SHA1 was also requested so calculate and compare it assert 'sha1' in stash_info, \ - 'sha1 not in stash info: {}'.format(stash_info) + f'sha1 not in stash info: {stash_info}' sha1 = compute_file_hash(self.filename, bytes_to_read=offset) if sha1 != stash_info['sha1']: raise ValueError( @@ -471,7 +467,7 @@ pywikibot.debug(result)
if 'result' not in result: - raise Error('Upload: unrecognized response: {}'.format(result)) + raise Error(f'Upload: unrecognized response: {result}')
if result['result'] == 'Warning': assert 'warnings' in result and not ignore_all_warnings diff --git a/pywikibot/site_detect.py b/pywikibot/site_detect.py index 5846e38..5031518 100644 --- a/pywikibot/site_detect.py +++ b/pywikibot/site_detect.py @@ -52,7 +52,7 @@ check_response(r)
if fromurl != r.url: - pywikibot.log('{} redirected to {}'.format(fromurl, r.url)) + pywikibot.log(f'{fromurl} redirected to {r.url}') fromurl = r.url
self.fromurl = fromurl @@ -73,16 +73,16 @@ except (ServerError, RequestException): raise except Exception as e: - pywikibot.log('MW detection failed: {!r}'.format(e)) + pywikibot.log(f'MW detection failed: {e!r}')
if not self.version: self._fetch_old_version()
if not self.api: - raise RuntimeError('Unsupported url: {}'.format(self.fromurl)) + raise RuntimeError(f'Unsupported url: {self.fromurl}')
if not self.version or self.version < MIN_VERSION: - raise RuntimeError('Unsupported version: {}'.format(self.version)) + raise RuntimeError(f'Unsupported version: {self.version}')
if not self.articlepath: if self.private_wiki: @@ -296,7 +296,7 @@ m = re.search(r'\d{3}', err.args[0], flags=re.ASCII) if not m: raise err - msg = 'Generic Server Error ({})'.format(m.group()) + msg = f'Generic Server Error ({m.group()})'
raise ServerError(msg)
diff --git a/pywikibot/specialbots/_upload.py b/pywikibot/specialbots/_upload.py index 0764746..3a4184b 100644 --- a/pywikibot/specialbots/_upload.py +++ b/pywikibot/specialbots/_upload.py @@ -131,7 +131,7 @@ file_len = path.stat().st_size if file_len: pywikibot.output('Download resumed.') - headers = {'Range': 'bytes={}-'.format(file_len)} + headers = {'Range': f'bytes={file_len}-'} else: headers = {}
@@ -174,12 +174,12 @@ break try: dt = next(dt_gen) - pywikibot.output('Sleeping for {} seconds ...'.format(dt)) + pywikibot.output(f'Sleeping for {dt} seconds ...') pywikibot.sleep(dt) except StopIteration: raise FatalServerError('Download failed, too many retries!')
- pywikibot.output('Downloaded {} bytes'.format(path.stat().st_size)) + pywikibot.output(f'Downloaded {path.stat().st_size} bytes') return tempname
def _handle_warning(self, warning: str) -> Optional[bool]: @@ -268,7 +268,7 @@ if invalid: c = ''.join(invalid) pywikibot.output( - 'Invalid character(s): {}. Please try again'.format(c)) + f'Invalid character(s): {c}. Please try again') continue
if allowed_formats and ext not in allowed_formats: @@ -391,7 +391,7 @@ imagepage = pywikibot.FilePage(site, filename) # normalizes filename imagepage.text = self.description
- pywikibot.output('Uploading file to {}...'.format(site)) + pywikibot.output(f'Uploading file to {site}...')
ignore_warnings = self.ignore_warning is True or self._handle_warnings
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py index f8e1675..d4578f4 100644 --- a/pywikibot/textlib.py +++ b/pywikibot/textlib.py @@ -203,12 +203,12 @@ old = template.title(with_ns=False) else: raise ValueError( - '{} is not a template Page object'.format(template)) + f'{template} is not a template Page object') elif isinstance(template, str): old = template else: raise ValueError( - '{!r} is not a valid template'.format(template)) + f'{template!r} is not a valid template')
pattern = case_escape(namespace.case, old) # namespaces may be any mixed case @@ -237,7 +237,7 @@ `_ignore_case` becomes a public method """ return ''.join( - '[{}{}]'.format(c, s) if c != s else c + f'[{c}{s}]' if c != s else c for s, c in zip(string, string.swapcase()))
@@ -562,12 +562,12 @@ def handle_starttag(self, tag, attrs) -> None: """Add start tag to text if tag should be kept.""" if tag in self.keeptags: - self.textdata += '<{}>'.format(tag) + self.textdata += f'<{tag}>'
def handle_endtag(self, tag) -> None: """Add end tag to text if tag should be kept.""" if tag in self.keeptags: - self.textdata += '</{}>'.format(tag) + self.textdata += f'</{tag}>'
def isDisabled(text: str, index: int, tags=None) -> bool: @@ -723,7 +723,7 @@ link_pattern = re.compile( r'[[(?P<title>.*?)(#(?P<section>.*?))?(|(?P<label>.*?))?]]' r'(?P<linktrail>{})'.format(linktrail)) - extended_label_pattern = re.compile(r'(.*?]])({})'.format(linktrail)) + extended_label_pattern = re.compile(fr'(.*?]])({linktrail})') linktrail = re.compile(linktrail) curpos = 0 # This loop will run until we have finished the current page @@ -859,7 +859,7 @@ or parsed_new_label.namespace != new_link.namespace)
if must_piped: - new_text = '[[{}|{}]]'.format(new_title, new_label) + new_text = f'[[{new_title}|{new_label}]]' else: new_text = '[[{}]]{}'.format(new_label[:len(new_title)], new_label[len(new_title):]) @@ -1215,15 +1215,15 @@ # Do we have a noinclude at the end of the template? parts = s2.split(includeOff) lastpart = parts[-1] - if re.match(r'\s*{}'.format(marker), lastpart): + if re.match(fr'\s*{marker}', lastpart): # Put the langlinks back into the noinclude's - regexp = re.compile(r'{}\s*{}'.format(includeOff, marker)) + regexp = re.compile(fr'{includeOff}\s*{marker}') newtext = regexp.sub(s + includeOff, s2) else: # Put the langlinks at the end, inside noinclude's newtext = (s2.replace(marker, '').strip() + separator - + '{}\n{}{}\n'.format(includeOn, s, includeOff)) + + f'{includeOn}\n{s}{includeOff}\n') else: newtext = s2.replace(marker, '').strip() + separator + s
@@ -1390,7 +1390,7 @@ site=site) if marker: # avoid having multiple linefeeds at the end of the text - text = re.sub(r'\s*{}'.format(re.escape(marker)), '\n' + marker, + text = re.sub(fr'\s*{re.escape(marker)}', '\n' + marker, text.strip()) return text.strip()
@@ -1593,7 +1593,7 @@ # whole word if no ":" is present prefix = category.split(':', 1)[0] if prefix not in insite.namespaces[14]: - category = '{}:{}'.format(insite.namespace(14), category) + category = f'{insite.namespace(14)}:{category}' category = pywikibot.Category(pywikibot.Link(category, insite, default_namespace=14), @@ -1698,7 +1698,7 @@ text = removeDisabledParts(text)
parser_name = wikitextparser.__name__ - pywikibot.debug('Using {!r} wikitext parser'.format(parser_name)) + pywikibot.debug(f'Using {parser_name!r} wikitext parser')
result = [] parsed = wikitextparser.parse(text) @@ -1783,7 +1783,7 @@ for items in params.items(): text += '|{}={}\n'.format(*items)
- return '{{{{{}\n{}}}}}'.format(template, text) + return f'{{{{{template}\n{text}}}}}'
# -------------------------- @@ -1808,7 +1808,7 @@ section = re.sub(r'\[\[(\?:)?', r'[[:?', re.escape(section)) # match underscores and white spaces section = re.sub(r'\?[ _]', '[ _]', section) - m = re.search("=+[ ']*{}[ ']*=+".format(section), pagetext) + m = re.search(f"=+[ ']*{section}[ ']*=+", pagetext) return bool(m)
diff --git a/pywikibot/throttle.py b/pywikibot/throttle.py index 1ac0cf7..42b9922 100644 --- a/pywikibot/throttle.py +++ b/pywikibot/throttle.py @@ -147,7 +147,7 @@ """ global pid mysite = self.mysite - pywikibot.debug('Checking multiplicity: pid = {pid}'.format(pid=pid)) + pywikibot.debug(f'Checking multiplicity: pid = {pid}') with self.lock: processes = [] used_pids = set() diff --git a/pywikibot/time.py b/pywikibot/time.py index eea240a..c129990 100644 --- a/pywikibot/time.py +++ b/pywikibot/time.py @@ -17,6 +17,7 @@ from pywikibot.backports import Tuple from pywikibot.tools import classproperty
+ __all__ = ( 'parse_duration', 'str2timedelta', @@ -228,7 +229,7 @@ :return: ISO8601 format string """ assert len(sep) == 1 - return '%Y-%m-%d{}%H:%M:%SZ'.format(sep) + return f'%Y-%m-%d{sep}%H:%M:%SZ'
@classmethod def fromISOformat(cls: Type['Timestamp'], # noqa: N802 @@ -244,7 +245,7 @@ # to create a clone. if isinstance(ts, cls): return ts.clone() - _ts = '{pre}{sep}{post}'.format(pre=ts[:10], sep=sep, post=ts[11:]) + _ts = f'{ts[:10]}{sep}{ts[11:]}' return cls._from_iso8601(_ts)
@classmethod @@ -288,7 +289,7 @@
.. versionadded: 7.5 """ - return '{ts:.6f}'.format(ts=self.posix_timestamp()) + return f'{self.posix_timestamp():.6f}'
def __str__(self) -> str: """Return a string format recognized by the API.""" diff --git a/pywikibot/tools/__init__.py b/pywikibot/tools/__init__.py index 5bdf198..7aae16d 100644 --- a/pywikibot/tools/__init__.py +++ b/pywikibot/tools/__init__.py @@ -12,7 +12,6 @@ import stat import subprocess import sys - from contextlib import suppress from functools import total_ordering, wraps from importlib import import_module @@ -317,7 +316,7 @@ return True if val in ('n', 'no', 'f', 'false', 'off', '0'): return False - raise ValueError('invalid truth value {!r}'.format(val)) + raise ValueError(f'invalid truth value {val!r}')
def normalize_username(username) -> Optional[str]: @@ -405,7 +404,7 @@ version_match = MediaWikiVersion.MEDIAWIKI_VERSION.match(version_str)
if not version_match: - raise ValueError('Invalid version number "{}"'.format(version_str)) + raise ValueError(f'Invalid version number "{version_str}"')
components = [int(n) for n in version_match.group(1).split('.')]
@@ -557,7 +556,7 @@ if mode in ('r', 'a', 'w'): mode += 'b' elif mode not in ('rb', 'ab', 'wb'): - raise ValueError('Invalid mode: "{}"'.format(mode)) + raise ValueError(f'Invalid mode: "{mode}"')
if use_extension: # if '.' not in filename, it'll be 1 character long but otherwise @@ -602,7 +601,7 @@ if stderr != b'': process.stdout.close() raise OSError( - 'Unexpected STDERR output from 7za {}'.format(stderr)) + f'Unexpected STDERR output from 7za {stderr}') binary = process.stdout
elif extension in ('lzma', 'xz'): diff --git a/pywikibot/tools/_deprecate.py b/pywikibot/tools/_deprecate.py index 885dbfc..4feb1c2 100644 --- a/pywikibot/tools/_deprecate.py +++ b/pywikibot/tools/_deprecate.py @@ -64,7 +64,7 @@ obj.__full_name__ = '{}.{}.{}'.format(obj.__module__, class_name, obj.__name__) else: - obj.__full_name__ = '{}.{}'.format(obj.__module__, obj.__name__) + obj.__full_name__ = f'{obj.__module__}.{obj.__name__}'
def manage_wrapping(wrapper, obj) -> None: @@ -174,7 +174,7 @@ :param since: a version string string when the method was deprecated """ if since and '.' not in since: - raise ValueError('{} is not a valid release number'.format(since)) + raise ValueError(f'{since} is not a valid release number')
if instead: msg = '{{0}} is deprecated{since}; use {{1}} instead.' @@ -411,7 +411,7 @@ for old_arg, new_arg in arg_pairs.items(): params[old_arg] = inspect.Parameter( old_arg, kind=inspect._POSITIONAL_OR_KEYWORD, - default='[deprecated name of {}]'.format(new_arg) + default=f'[deprecated name of {new_arg}]' if new_arg not in [True, False, None, ''] else NotImplemented) params = collections.OrderedDict(sorted(params.items(), @@ -600,7 +600,7 @@ raise ValueError('Deprecated name "{}" may not contain ' '".".'.format(name)) if name in self._deprecated: - raise ValueError('Name "{}" is already deprecated.'.format(name)) + raise ValueError(f'Name "{name}" is already deprecated.') if replacement is not None and hasattr(self._module, name): raise ValueError('Module has already an attribute named ' '"{}".'.format(name)) diff --git a/pywikibot/tools/chars.py b/pywikibot/tools/chars.py index f33b40e..06e85ea 100644 --- a/pywikibot/tools/chars.py +++ b/pywikibot/tools/chars.py @@ -37,7 +37,7 @@ codepoint = (ord(match[0]) & mask) << 10 | (ord(match[1]) & mask) else: codepoint = ord(match) - return '<{:x}>'.format(codepoint) + return f'<{codepoint:x}>'
return INVISIBLE_REGEX.sub(replace, text)
@@ -53,7 +53,7 @@ if 31 < cord < 127: html.append(c) else: - html.append('&#{};'.format(cord)) + html.append(f'&#{cord};') return ''.join(html)
diff --git a/pywikibot/tools/collections.py b/pywikibot/tools/collections.py index af9460a..e351e55 100644 --- a/pywikibot/tools/collections.py +++ b/pywikibot/tools/collections.py @@ -5,8 +5,7 @@ # Distributed under the terms of the MIT license. # import collections - -from abc import abstractmethod, ABC +from abc import ABC, abstractmethod from collections.abc import Collection, Generator, Iterator, Mapping from contextlib import suppress from itertools import chain @@ -195,7 +194,7 @@ def __repr__(self) -> str: """Provide an object representation without clearing the content.""" items = list(self) - result = '{}({})'.format(self.__class__.__name__, items) + result = f'{self.__class__.__name__}({items})' self.extend(items) return result
diff --git a/pywikibot/tools/djvu.py b/pywikibot/tools/djvu.py index 136d43f..39e4ae7 100644 --- a/pywikibot/tools/djvu.py +++ b/pywikibot/tools/djvu.py @@ -32,11 +32,11 @@ stdoutdata, stderrdata = dp.communicate()
if dp.returncode != 0: - pywikibot.error('{} error; {}'.format(lib, cmd)) + pywikibot.error(f'{lib} error; {cmd}') pywikibot.error(str(stderrdata)) return (False, stdoutdata)
- pywikibot.log('SUCCESS: {} (PID: {})'.format(cmd, dp.pid)) + pywikibot.log(f'SUCCESS: {cmd} (PID: {dp.pid})')
return (True, stdoutdata)
@@ -81,7 +81,7 @@
def __str__(self) -> str: """Return a string representation.""" - return "{}('{}')".format(self.__class__.__name__, self._filename) + return f"{self.__class__.__name__}('{self._filename}')"
def check_cache(fn): """Decorator to check if cache shall be cleared.""" @@ -227,7 +227,7 @@ if not self.has_text(force=force): raise ValueError('Djvu file {} has no text layer.' .format(self.file)) - res, stdoutdata = _call_cmd(['djvutxt', '--page={}'.format(int(n)), + res, stdoutdata = _call_cmd(['djvutxt', f'--page={int(n)}', self.file]) if not res: return False diff --git a/pywikibot/tools/formatter.py b/pywikibot/tools/formatter.py index 80f207b..768e6ca 100644 --- a/pywikibot/tools/formatter.py +++ b/pywikibot/tools/formatter.py @@ -87,12 +87,12 @@ colors = set(terminal_interface_base.colors) # Dot.product of colors to create all possible combinations of foreground # and background colors. - colors |= {'{};{}'.format(c1, c2) for c1 in colors for c2 in colors} + colors |= {f'{c1};{c2}' for c1 in colors for c2 in colors} col_pat = '|'.join(colors) - text = re.sub('(?:\03)?{{({})}}'.format(col_pat), r'<<\1>>', text) + text = re.sub(f'(?:\03)?{{({col_pat})}}', r'<<\1>>', text) replace_color = kwargs.get('color') if replace_color in colors: - text = text.replace('{color}', '<<{}>>'.format(replace_color)) + text = text.replace('{color}', f'<<{replace_color}>>') if '\03' in text: raise ValueError('\03 pattern found in color format') intersect = colors.intersection(kwargs) # kwargs use colors diff --git a/pywikibot/userinterfaces/gui.py b/pywikibot/userinterfaces/gui.py index 1dea42b..1e29c12 100644 --- a/pywikibot/userinterfaces/gui.py +++ b/pywikibot/userinterfaces/gui.py @@ -12,14 +12,15 @@ # Distributed under the terms of the MIT license. # import tkinter +from tkinter import simpledialog as tkSimpleDialog +from tkinter.scrolledtext import ScrolledText +from typing import Optional + from idlelib import replace as ReplaceDialog from idlelib import search as SearchDialog from idlelib.config import idleConf from idlelib.configdialog import ConfigDialog from idlelib.multicall import MultiCallCreator -from tkinter import simpledialog as tkSimpleDialog -from tkinter.scrolledtext import ScrolledText -from typing import Optional
import pywikibot from pywikibot import __url__ @@ -233,7 +234,7 @@ if not idx: break # index right after the end of the occurrence - lastidx = '{}+{}c'.format(idx, len(s)) + lastidx = f'{idx}+{len(s)}c' # tag the whole occurrence (start included, stop excluded) self.tag_add('found', idx, lastidx) # prepare to search for next occurrence @@ -261,7 +262,7 @@ if lineno <= 0: self.bell() return 'break' - self.mark_set('insert', '{}.0'.format(lineno)) + self.mark_set('insert', f'{lineno}.0') self.see('insert') return None
@@ -391,7 +392,7 @@ column = jumpIndex - (text[:jumpIndex].rfind('\n') + 1) # don't know how to place the caret, but scrolling to the right # line should already be helpful. - self.editbox.see('{}.{}'.format(line, column)) + self.editbox.see(f'{line}.{column}') # wait for user to push a button which will destroy (close) the window self.parent.mainloop() return self.text diff --git a/pywikibot/userinterfaces/terminal_interface_base.py b/pywikibot/userinterfaces/terminal_interface_base.py index a21d376..2142573 100644 --- a/pywikibot/userinterfaces/terminal_interface_base.py +++ b/pywikibot/userinterfaces/terminal_interface_base.py @@ -52,8 +52,8 @@ ]
_color_pat = '((:?{0});?(:?{0})?)'.format('|'.join(colors + ['previous'])) -old_colorTagR = re.compile('\03{{{cpat}}}'.format(cpat=_color_pat)) -new_colorTagR = re.compile('<<{cpat}>>'.format(cpat=_color_pat)) +old_colorTagR = re.compile(f'\03{{{_color_pat}}}') +new_colorTagR = re.compile(f'<<{_color_pat}>>')
class UI(ABUIC): @@ -370,7 +370,7 @@ end_marker = ':'
if default: - question += ' (default: {})'.format(default) + question += f' (default: {default})' question += end_marker
# lock stream output @@ -558,7 +558,7 @@ try: from pywikibot.userinterfaces import gui except ImportError as e: - pywikibot.warning('Could not load GUI modules: {}'.format(e)) + pywikibot.warning(f'Could not load GUI modules: {e}') return text editor = gui.EditBoxWindow() return editor.edit(text, jumpIndex=jumpIndex, highlight=highlight) diff --git a/pywikibot/version.py b/pywikibot/version.py index 661eb36..a8e2c00 100644 --- a/pywikibot/version.py +++ b/pywikibot/version.py @@ -22,7 +22,7 @@
import pywikibot from pywikibot import config -from pywikibot.backports import cache, Dict, List, Tuple +from pywikibot.backports import Dict, List, Tuple, cache from pywikibot.comms.http import fetch from pywikibot.exceptions import VersionParseError
@@ -181,7 +181,7 @@ :param rev: Subversion revision identifier :return: the git hash """ - uri = 'https://github.com/wikimedia/%7B%7D/!svn/vcc/default%27.format(tag) + uri = f'https://github.com/wikimedia/%7Btag%7D/!svn/vcc/default' request = fetch(uri, method='PROPFIND', data="<?xml version='1.0' encoding='utf-8'?>" '<propfind xmlns="DAV:"><allprop/></propfind>', @@ -216,7 +216,7 @@ for i in range(len(date) - 1): assert date[i] == date2[i], 'Date of version is not consistent'
- rev = 's{}'.format(rev) + rev = f's{rev}' if (not date or not tag or not rev) and not path: raise VersionParseError return (tag, rev, date, hsh) @@ -270,7 +270,7 @@ cwd=_program_dir, stdout=subprocess.PIPE) rev, stderr = dp.communicate() - rev = 'g{}'.format(len(rev.splitlines())) + rev = f'g{len(rev.splitlines())}' hsh = info[3] # also stored in '.git/refs/heads/master' if (not date or not tag or not rev) and not path: raise VersionParseError @@ -333,7 +333,7 @@ hsh = json.loads(buf)['revision'] return hsh except Exception as e: - raise VersionParseError('{!r} while parsing {!r}'.format(e, buf)) + raise VersionParseError(f'{e!r} while parsing {buf!r}')
def get_module_filename(module) -> Optional[str]: diff --git a/scripts/add_text.py b/scripts/add_text.py index c1251be..77aaa5a 100755 --- a/scripts/add_text.py +++ b/scripts/add_text.py @@ -146,7 +146,7 @@ return True
elif page.isTalkPage(): - pywikibot.output("{} doesn't exist, creating it!".format(page)) + pywikibot.output(f"{page} doesn't exist, creating it!") return False
elif self.opt.create or self.opt.createonly: @@ -228,7 +228,7 @@ elif option == '-major': args['minor'] = False else: - raise ValueError("Argument '{}' is unrecognized".format(option)) + raise ValueError(f"Argument '{option}' is unrecognized")
if not args['text'] and not args['textfile']: raise ValueError("Either the '-text' or '-textfile' is required") diff --git a/scripts/archivebot.py b/scripts/archivebot.py index a86f4f3..33dd9d5 100755 --- a/scripts/archivebot.py +++ b/scripts/archivebot.py @@ -214,7 +214,7 @@ """ match = re.fullmatch(r'(\d{1,3}(?: \d{3})+|\d+) *([BkKMT]?)', string) if not match: - raise MalformedConfigError("Couldn't parse size: {}".format(string)) + raise MalformedConfigError(f"Couldn't parse size: {string}") val, unit = (int(match.group(1).replace(' ', '')), match.group(2)) if unit == 'M': val *= 1024 @@ -309,7 +309,7 @@
def to_text(self) -> str: """Return wikitext discussion thread.""" - return '== {} ==\n\n{}'.format(self.title, self.content) + return f'== {self.title} ==\n\n{self.content}'
class DiscussionPage(pywikibot.Page): @@ -526,7 +526,7 @@ elif attr == 'maxarchivesize': size, unit = str2size(value) if unit == 'B' and size > self.maxsize: - value = '{} K'.format(self.maxsize // 1024) + value = f'{self.maxsize // 1024} K' warn('Siteinfo "maxarticlesize" exceeded. Decreasing ' '"maxarchivesize" to ' + value, ResourceWarning, stacklevel=2) @@ -541,7 +541,7 @@ """Return a template with archiver saveable attributes.""" return '{{%s\n%s\n}}' \ % (self.tpl.title(with_ns=(self.tpl.namespace() != 10)), - '\n'.join('|{} = {}'.format(a, self.get_attr(a)) + '\n'.join(f'|{a} = {self.get_attr(a)}' for a in self.saveables()))
def key_ok(self) -> bool: @@ -822,10 +822,10 @@ pass an unspecified number of arguments to the bot using ``*args`` """ if not page.exists(): - pywikibot.info('{} does not exist, skipping...'.format(page)) + pywikibot.info(f'{page} does not exist, skipping...') return True
- pywikibot.info('\n\n>>> <<lightpurple>>{}<<default>> <<<'.format(page)) + pywikibot.info(f'\n\n>>> <<lightpurple>>{page}<<default>> <<<') # Catching exceptions, so that errors in one page do not bail out # the entire process try: @@ -910,7 +910,7 @@ pywikibot.output( 'NOTE: the specified page "{}" does not (yet) exist.' .format(calc)) - pywikibot.output('key = {}'.format(calc_md5_hexdigest(calc, salt))) + pywikibot.output(f'key = {calc_md5_hexdigest(calc, salt)}') return
if not templates: diff --git a/scripts/blockpageschecker.py b/scripts/blockpageschecker.py index 6e4828e..36be5d7 100755 --- a/scripts/blockpageschecker.py +++ b/scripts/blockpageschecker.py @@ -191,7 +191,7 @@ 'Do you want to open the page?', [('with browser', 'b'), ('with gui', 'g'), ('no', 'n')], 'n') if choice == 'b': - webbrowser.open('{}?redirect=no'.format(page.full_url())) + webbrowser.open(f'{page.full_url()}?redirect=no') elif choice == 'g': editor = TextEditor() editor.edit(page.text) @@ -210,7 +210,7 @@ page = self.current_page if page.isRedirectPage(): if self.opt.always: - pywikibot.warning('{} is a redirect; skipping'.format(page)) + pywikibot.warning(f'{page} is a redirect; skipping') elif self.opt.show: self.invoke_editor(page) return @@ -314,11 +314,11 @@
replacement = '|'.join(ttp + tsp + (tu or '')) text, changes = re.subn( - '<noinclude>({})</noinclude>'.format(replacement), + f'<noinclude>({replacement})</noinclude>', '', text) if not changes: text, changes = re.subn( - '({})'.format(replacement), '', text) + f'({replacement})', '', text) msg = 'The page is editable for all' if not self.opt.move: msg += ', deleting the template..' @@ -385,11 +385,11 @@ # Deleting the template because the page doesn't need it. replacement = '|'.join(tsmp + ttmp + (tu or '')) text, changes = re.subn( - '<noinclude>({})</noinclude>'.format(replacement), + f'<noinclude>({replacement})</noinclude>', '', text) if not changes: text, changes = re.subn( - '({})'.format(replacement), '', text) + f'({replacement})', '', text) msg_type = 'deleting' elif move_restriction[0] == 'sysop': # move-total-protection diff --git a/scripts/category.py b/scripts/category.py index 89eb27b..ea2f008 100755 --- a/scripts/category.py +++ b/scripts/category.py @@ -504,12 +504,12 @@ else: if self.sort: catpl = self.sorted_by_last_name(catpl, self.current_page) - pywikibot.output('Adding {}'.format(catpl.title(as_link=True))) + pywikibot.output(f'Adding {catpl.title(as_link=True)}') if page.namespace() == page.site.namespaces.TEMPLATE: tagname = 'noinclude' if self.includeonly == ['includeonly']: tagname = 'includeonly' - tagnameregexp = re.compile(r'(.*)(</{}>)'.format(tagname), + tagnameregexp = re.compile(fr'(.*)(</{tagname}>)', re.I | re.DOTALL) categorytitle = catpl.title( as_link=True, allow_interwiki=False) @@ -519,7 +519,7 @@ # in the template page text = textlib.replaceExcept( text, tagnameregexp, - r'\1{}\n\2'.format(categorytitle), + fr'\1{categorytitle}\n\2', ['comment', 'math', 'nowiki', 'pre', 'syntaxhighlight'], site=self.current_page.site) @@ -1131,7 +1131,7 @@ try: full_text = member.get() except NoPageError: - pywikibot.output('Page {} not found.'.format(member.title())) + pywikibot.output(f'Page {member.title()} not found.') return
# skip initial templates, images and comments for articles. @@ -1194,7 +1194,7 @@ .format(member.title()), options, default='c')
if choice == 'c': - pywikibot.output('Saving page to {}'.format(current_cat.title())) + pywikibot.output(f'Saving page to {current_cat.title()}') if current_cat == original_cat: pywikibot.output('No changes necessary.') else: @@ -1418,7 +1418,7 @@ .format(child.title(with_ns=False)))
for grandchild in overcategorized: - pywikibot.output('\t{}'.format(grandchild.title())) + pywikibot.output(f'\t{grandchild.title()}')
for grandchild in overcategorized: msg = ('Remove "<<lightpurple>>{}<<default>>" from "{}" because ' diff --git a/scripts/category_redirect.py b/scripts/category_redirect.py index 427b416..a655898 100755 --- a/scripts/category_redirect.py +++ b/scripts/category_redirect.py @@ -289,7 +289,7 @@ localtime = time.localtime() today = '{:04d}-{:02d}-{:02d}'.format(*localtime[:3]) self.datafile = pywikibot.config.datafilepath( - '{}-catmovebot-data'.format(self.site.dbName())) + f'{self.site.dbName()}-catmovebot-data') try: with open(self.datafile, 'rb') as inp: self.record = pickle.load(inp) @@ -472,7 +472,7 @@ self.log_page.save(comment) if self.edit_requests: edit_request_page = pywikibot.Page( - self.site, 'User:{}/category edit requests'.format(self.user)) + self.site, f'User:{self.user}/category edit requests') edit_request_page.text = (self.edit_request_text % {'itemlist': '\n' + '\n'.join( (self.edit_request_item % item) diff --git a/scripts/checkimages.py b/scripts/checkimages.py index b9ee55f..f7346f4 100755 --- a/scripts/checkimages.py +++ b/scripts/checkimages.py @@ -84,7 +84,6 @@ import collections import re import time - from itertools import zip_longest from typing import Generator
@@ -483,7 +482,7 @@ def print_with_time_zone(message) -> None: """Print the messages followed by the TimeZone encoded correctly.""" time_zone = time.strftime('%d %b %Y %H:%M:%S (UTC)', time.gmtime()) - pywikibot.output('{} {}'.format(message.rstrip(), time_zone)) + pywikibot.output(f'{message.rstrip()} {time_zone}')
class CheckImagesBot: @@ -512,7 +511,7 @@ 'No report page provided in "REPORT_PAGE" dict ' 'for your project!') self.image_namespace = site.namespaces.FILE.custom_name + ':' - self.list_entry = '\n* [[:{}%s]] '.format(self.image_namespace) + self.list_entry = f'\n* [[:{self.image_namespace}%s]] '
# The summary of the report self.com = i18n.twtranslate(self.site, 'checkimages-log-comment') @@ -716,7 +715,7 @@ commentox = commento2
if second_text: - new_text = '{}\n\n{}'.format(testoattuale, self.notification2) + new_text = f'{testoattuale}\n\n{self.notification2}' else: new_text = '{}\n\n== {} ==\n{}'.format(testoattuale, self.head, self.notification) @@ -764,7 +763,7 @@ def regex_generator(self, regexp, textrun) -> Generator[pywikibot.FilePage, None, None]: """Find page to yield using regex to parse text.""" - regex = re.compile(r'{}'.format(regexp), re.DOTALL) + regex = re.compile(fr'{regexp}', re.DOTALL) results = regex.findall(textrun) for image in results: yield pywikibot.FilePage(self.site, image) @@ -775,7 +774,7 @@ # whitelist template... for key in Family.load('wikipedia').langs.keys(): self.hiddentemplates.add(pywikibot.Page( - self.site, 'Template:{}'.format(key))) + self.site, f'Template:{key}')) # Hidden template loading if self.page_hidden: try: @@ -979,7 +978,7 @@ # Delete the image in the list where we're write on image = self.image_namespace + image_to_tag text_for_the_report = re.sub( - r'\n*[[:{}]]'.format(re.escape(image)), + fr'\n*[[:{re.escape(image)}]]', '', text_for_the_report) self.report(text_for_the_report, image_to_tag, comm_image=dup_comment_image, unver=True) @@ -1345,7 +1344,7 @@ {'num': skip_number})) # If we still have pages to skip: if len(self.skip_list) < skip_number: - pywikibot.output('Skipping {}...'.format(self.image_name)) + pywikibot.output(f'Skipping {self.image_name}...') self.skip_list.append(self.image_name) if skip_number == 1: pywikibot.output() @@ -1429,7 +1428,7 @@ mex_catched = tupla[8] for k in find_list: if find_tipe.lower() == 'findonly': - search_results = re.findall(r'{}'.format(k.lower()), + search_results = re.findall(fr'{k.lower()}', self.image_check_text.lower()) if search_results \ and search_results[0] == self.image_check_text.lower(): @@ -1442,7 +1441,7 @@ self.mex_used = mex_catched break elif find_tipe.lower() == 'find' \ - and re.findall(r'{}'.format(k.lower()), + and re.findall(fr'{k.lower()}', self.image_check_text.lower()): self.some_problem = True self.text_used = text @@ -1535,7 +1534,7 @@ return
if delete: - pywikibot.output('{} is not a file!'.format(self.image_name)) + pywikibot.output(f'{self.image_name} is not a file!') if not di: pywikibot.output('No localized message given for ' "'DELETE_IMMEDIATELY'. Skipping.") @@ -1717,7 +1716,7 @@ try: text_regex = page.get() except NoPageError: - pywikibot.output("{} doesn't exist!".format(page.title())) + pywikibot.output(f"{page.title()} doesn't exist!") text_regex = '' # No source, so the bot will quit later. # If generator is the regex' one, use your own Generator using an url # or page and a regex. @@ -1747,7 +1746,7 @@ bot.check_step()
if repeat: - pywikibot.output('Waiting for {} seconds,'.format(time_sleep)) + pywikibot.output(f'Waiting for {time_sleep} seconds,') pywikibot.sleep(time_sleep) else: break @@ -1765,4 +1764,4 @@ if ret is not False: final = time.time() delta = int(final - start) - pywikibot.output('Execution time: {} seconds\n'.format(delta)) + pywikibot.output(f'Execution time: {delta} seconds\n') diff --git a/scripts/commons_information.py b/scripts/commons_information.py index 5402092..a75cff5 100755 --- a/scripts/commons_information.py +++ b/scripts/commons_information.py @@ -99,7 +99,7 @@ lstrip = param.value.lstrip() lspaces = param.value[:len(param.value) - len(lstrip)] rspaces = lstrip[len(lstrip.rstrip()):] - param.value = '{}{}{}'.format(lspaces, value, rspaces) + param.value = f'{lspaces}{value}{rspaces}'
def setup(self): """Raise exception if needed modules are missing.""" diff --git a/scripts/commonscat.py b/scripts/commonscat.py index 2145865..34a859e 100755 --- a/scripts/commonscat.py +++ b/scripts/commonscat.py @@ -359,7 +359,7 @@ elif oldcat.strip() != newcat: # strip trailing white space newtext = re.sub(r'(?i){{%s|?[^{}]*(?:{{.*}})?}}' % oldtemplate, - '{{{{{}|{}}}}}'.format(newtemplate, newcat), + f'{{{{{newtemplate}|{newcat}}}}}', page.get()) else: # nothing left to do return diff --git a/scripts/coordinate_import.py b/scripts/coordinate_import.py index 4e6b77d..8097fad 100755 --- a/scripts/coordinate_import.py +++ b/scripts/coordinate_import.py @@ -147,7 +147,7 @@ try: item.addClaim(newclaim) except CoordinateGlobeUnknownError as e: - pywikibot.output('Skipping unsupported globe: {}'.format(e.args)) + pywikibot.output(f'Skipping unsupported globe: {e.args}') return False else: return True diff --git a/scripts/cosmetic_changes.py b/scripts/cosmetic_changes.py index 96e6c4b..857fb02 100755 --- a/scripts/cosmetic_changes.py +++ b/scripts/cosmetic_changes.py @@ -114,7 +114,7 @@ try: options['ignore'] = getattr(CANCEL, value) except AttributeError: - raise ValueError('Unknown ignore mode {!r}!'.format(value)) + raise ValueError(f'Unknown ignore mode {value!r}!')
gen = gen_factory.getCombinedGenerator(preload=True) if not pywikibot.bot.suggest_help(missing_generator=not gen) \ diff --git a/scripts/create_isbn_edition.py b/scripts/create_isbn_edition.py index 64968f8..4f03522 100644 --- a/scripts/create_isbn_edition.py +++ b/scripts/create_isbn_edition.py @@ -1,5 +1,5 @@ #!/usr/bin/python3 -"""Pywikibot script to load ISBN related data into Wikidata. +r"""Pywikibot script to load ISBN related data into Wikidata.
Pywikibot script to get ISBN data from a digital library, and create or amend the related Wikidata item for edition @@ -255,16 +255,17 @@ # # Distributed under the terms of the MIT license. # -import os # Operating system -import re # Regular expressions (very handy!) +import os # Operating system +import re # Regular expressions (very handy!) from itertools import islice
-import pywikibot # API interface to Wikidata +import pywikibot # API interface to Wikidata from pywikibot import pagegenerators as pg # Wikidata Query interface from pywikibot.backports import List from pywikibot.config import verbose_output as verbose from pywikibot.data import api
+ try: import isbnlib except ImportError as e: @@ -394,7 +395,7 @@ if verbose: pywikibot.info() for i in isbn_data: - pywikibot.info('{}:\t{}'.format(i, isbn_data[i])) + pywikibot.info(f'{i}:\t{isbn_data[i]}')
# Get the book language from the ISBN book reference booklang = mainlang # Default language @@ -458,7 +459,7 @@ rescnt = 0 for rescnt, item in enumerate(generator, start=1): qnumber = item.getID() - pywikibot.warning('Found item: {}'.format(qnumber)) + pywikibot.warning(f'Found item: {qnumber}')
# Create or amend the item if rescnt == 1: @@ -468,9 +469,9 @@ item = pywikibot.ItemPage(repo) # Create item item.editEntity({'labels': label}, summary=transcmt) qnumber = item.getID() - pywikibot.warning('Creating item: {}'.format(qnumber)) + pywikibot.warning(f'Creating item: {qnumber}') else: - pywikibot.critical('Ambiguous ISBN number {}'.format(isbn_fmtd)) + pywikibot.critical(f'Ambiguous ISBN number {isbn_fmtd}') return
# Add all P/Q values @@ -491,7 +492,7 @@ targetx[propty].labels[booklang], target[propty])) except: # noqa: B001, E722, H201 - pywikibot.warning('Add {}:{}'.format(propty, target[propty])) + pywikibot.warning(f'Add {propty}:{target[propty]}')
claim = pywikibot.Claim(repo, propty) claim.setTarget(targetx[propty]) @@ -499,14 +500,14 @@
# Set formatted ISBN number if 'P212' not in item.claims: - pywikibot.warning('Add ISBN number (P212): {}'.format(isbn_fmtd)) + pywikibot.warning(f'Add ISBN number (P212): {isbn_fmtd}') claim = pywikibot.Claim(repo, 'P212') claim.setTarget(isbn_fmtd) item.addClaim(claim, bot=True, summary=transcmt)
# Title if 'P1476' not in item.claims: - pywikibot.warning('Add Title (P1476): {}'.format(objectname)) + pywikibot.warning(f'Add Title (P1476): {objectname}') claim = pywikibot.Claim(repo, 'P1476') claim.setTarget(pywikibot.WbMonolingualText(text=objectname, language=booklang)) @@ -514,7 +515,7 @@
# Subtitle if subtitle and 'P1680' not in item.claims: - pywikibot.warning('Add Subtitle (P1680): {}'.format(subtitle)) + pywikibot.warning(f'Add Subtitle (P1680): {subtitle}') claim = pywikibot.Claim(repo, 'P1680') claim.setTarget(pywikibot.WbMonolingualText(text=subtitle, language=booklang)) @@ -557,9 +558,9 @@ qualifier.setTarget(str(author_cnt)) claim.addQualifier(qualifier, summary=transcmt) elif not author_list: - pywikibot.warning('Unknown author: {}'.format(author_name)) + pywikibot.warning(f'Unknown author: {author_name}') else: - pywikibot.warning('Ambiguous author: {}'.format(author_name)) + pywikibot.warning(f'Ambiguous author: {author_name}')
# Get the publisher publisher_name = isbn_data['Publisher'].strip() @@ -593,13 +594,13 @@
# Book cover images for i in isbn_cover: - pywikibot.info('{}:\t{}'.format(i, isbn_cover[i])) + pywikibot.info(f'{i}:\t{isbn_cover[i]}')
# Handle ISBN classification isbn_classify = isbnlib.classify(isbn_number)
for i in isbn_classify: - pywikibot.debug('{}:\t{}'.format(i, isbn_classify[i])) + pywikibot.debug(f'{i}:\t{isbn_classify[i]}')
# ./create_isbn_edition.py '978-3-8376-5645-9' - de P407 Q188 # Q113460204 diff --git a/scripts/data_ingestion.py b/scripts/data_ingestion.py index 802cf05..82109f0 100755 --- a/scripts/data_ingestion.py +++ b/scripts/data_ingestion.py @@ -234,7 +234,7 @@ """ duplicates = page.find_duplicate_images() if duplicates: - pywikibot.output('Skipping duplicate of {!r}'.format(duplicates)) + pywikibot.output(f'Skipping duplicate of {duplicates!r}') return
title = page.get_title(self.titlefmt) @@ -319,7 +319,7 @@ try: config_page.get() except NoPageError: - pywikibot.error('{} does not exist'.format(config_page)) + pywikibot.error(f'{config_page} does not exist') continue
configuration = DataIngestionBot.parse_configuration_page(config_page) @@ -328,7 +328,7 @@ try: f = codecs.open(filename, 'r', configuration['csvEncoding']) except OSError as e: - pywikibot.error('{} could not be opened: {}'.format(filename, e)) + pywikibot.error(f'{filename} could not be opened: {e}') else: with f: files = CSVReader(f, urlcolumn='url', diff --git a/scripts/dataextend.py b/scripts/dataextend.py index bbdf116..dad781c 100644 --- a/scripts/dataextend.py +++ b/scripts/dataextend.py @@ -61,7 +61,6 @@ import codecs import datetime import re - from collections import defaultdict from contextlib import suppress from html import unescape @@ -71,7 +70,7 @@
import pywikibot from pywikibot.backports import List, Tuple -from pywikibot.bot import input_yn, SingleSiteBot, suggest_help +from pywikibot.bot import SingleSiteBot, input_yn, suggest_help from pywikibot.comms import http from pywikibot.data import sparql from pywikibot.exceptions import ( @@ -410,7 +409,7 @@
with codecs.open(self.labelfile, **param) as f: for item in self.labels: - f.write('{}:{}\n'.format(item, self.labels[item])) + f.write(f'{item}:{self.labels[item]}\n')
with codecs.open(self.datafile, **param) as f: for dtype in self.data: @@ -420,7 +419,7 @@
with codecs.open(self.nonamefile, **param) as f: for noname in self.noname: - f.write('{}\n'.format(noname)) + f.write(f'{noname}\n')
def page(self, title): """Dispatch title and return the appropriate Page object.""" @@ -429,7 +428,7 @@ return pywikibot.ItemPage(self.site, title) if title.startswith('P'): return pywikibot.PropertyPage(self.site, title) - raise ValueError('Invalid title {}'.format(title)) + raise ValueError(f'Invalid title {title}')
@staticmethod def showtime(time): @@ -439,15 +438,15 @@ if time.precision < 9: result = 'ca. ' + result if time.precision >= 10: - result = '{}-{}'.format(time.month, result) + result = f'{time.month}-{result}' if time.precision >= 11: - result = '{}-{}'.format(time.day, result) + result = f'{time.day}-{result}' if time.precision >= 12: - result = '{} {}'.format(result, time.hour) + result = f'{result} {time.hour}' if time.precision >= 13: - result = '{}:{}'.format(result, time.minute) + result = f'{result}:{time.minute}' if time.precision >= 14: - result = '{}:{}'.format(result, time.second) + result = f'{result}:{time.second}' return result
def showclaims(self, claims): @@ -627,7 +626,7 @@ try: month = self.MONTHNUMBER[m.group(2).lower()] except KeyError: - raise ValueError("Don't know month {}".format(m.group(2))) + raise ValueError(f"Don't know month {m.group(2)}") day = int(m.group(1)) m = re.match(r"(\d+)(?:.|er|eme|ème)?[\s.]\s*(?:d'|d[aei] )?" r'([^\s.]{2,}).?[\s.]\s*(\d+)$', text) @@ -636,7 +635,7 @@ try: month = self.MONTHNUMBER[m.group(2).lower()] except KeyError: - raise ValueError("Don't know month {}".format(m.group(2))) + raise ValueError(f"Don't know month {m.group(2)}") day = int(m.group(1)) m = re.match(r'(\d{4}).?[\s.]\s*([^\s.]{3,}).?[\s.]\s*(\d+)$', text) if m: @@ -644,7 +643,7 @@ try: month = self.MONTHNUMBER[m.group(2).lower()] except KeyError: - raise ValueError("Don't know month {}".format(m.group(2))) + raise ValueError(f"Don't know month {m.group(2)}") day = int(m.group(3)) m = re.match(r"(\d+) (?:de |d')?(\w+[a-z]\w+) de (\d+)", text) if m: @@ -652,7 +651,7 @@ try: month = self.MONTHNUMBER[m.group(2).lower()] except KeyError: - raise ValueError("Don't know month {}".format(m.group(2))) + raise ValueError(f"Don't know month {m.group(2)}") day = int(m.group(1)) m = re.match(r'(\w*[a-zA-Z]\w*).? (\d+)$', text) if m: @@ -660,7 +659,7 @@ try: month = self.MONTHNUMBER[m.group(1).lower()] except KeyError: - raise ValueError("Don't know month {}".format(m.group(1))) + raise ValueError(f"Don't know month {m.group(1)}") m = re.match(r'(\w+).? (\d{1,2})(?:st|nd|rd|th)?.?\s*,\s*(\d{3,4})$', text) if m: @@ -668,7 +667,7 @@ try: month = self.MONTHNUMBER[m.group(1).lower()] except KeyError: - raise ValueError("Don't know month {}".format(m.group(1))) + raise ValueError(f"Don't know month {m.group(1)}") day = int(m.group(2)) m = re.match(r'(\d{4}),? (\d{1,2}) (\w+)', text) if m: @@ -676,7 +675,7 @@ try: month = self.MONTHNUMBER[m.group(3).lower()] except KeyError: - raise ValueError("Don't know month {}".format(m.group(1))) + raise ValueError(f"Don't know month {m.group(1)}") day = int(m.group(2)) m = re.match(r'(\d+)年(\d+)月(\d+)日', text) if m: @@ -697,7 +696,7 @@ raise ValueError('Date seems to have an invalid day number {}' .format(day)) if not year: - raise ValueError("Can't interpret date {}".format(text)) + raise ValueError(f"Can't interpret date {text}") return pywikibot.WbTime(year=year, month=month, day=day, precision=9 if month is None else 10 if day is None else 11) @@ -1047,7 +1046,7 @@ pywikibot.input('Press Enter to continue') pywikibot.output('== longtexts ==') for longtext in longtexts: - pywikibot.output('\n== {} =='.format(longtext[0])) + pywikibot.output(f'\n== {longtext[0]} ==') pywikibot.output(longtext[1]) pywikibot.input('(press enter)')
@@ -1060,9 +1059,9 @@ pywikibot.output('Default is to keep the old value (0)') for i, pos in enumerate(possibilities): if pos is None: - pywikibot.output('{}: No description'.format(i)) + pywikibot.output(f'{i}: No description') else: - pywikibot.output('{}: {}'.format(i, pos)) + pywikibot.output(f'{i}: {pos}') answer = pywikibot.input('Which one to choose? ') try: answer = int(answer) @@ -1091,14 +1090,14 @@ pywikibot.output('New names found:') for language in realnewnames.keys(): for name in realnewnames[language]: - pywikibot.output('{}: {}'.format(language, name)) + pywikibot.output(f'{language}: {name}') result = pywikibot.input('Add these names? (y/n/[S]elect/x) ') if not result or result[0].upper() not in 'YNX': chosennewnames = defaultdict(list) for language in realnewnames.keys(): for name in realnewnames[language]: result = pywikibot.input( - '{}: {} - '.format(language, name)) + f'{language}: {name} - ') if (not result) or result[0].upper() == 'Y': chosennewnames[language].append(name) elif result[0].upper() == 'X': @@ -1266,7 +1265,7 @@ if not ask: return None
- pywikibot.output("Trying to get a {} out of '{}'".format(dtype, text)) + pywikibot.output(f"Trying to get a {dtype} out of '{text}'") answer = pywikibot.input( 'Type Qnnn to let it point to Qnnn from now on,\n' 'Xnnn to let it point to Qnnn only now,\n' @@ -1295,19 +1294,19 @@ if used and not base: continue self.urlbase = base - pywikibot.info('Getting {}'.format(self.url)) + pywikibot.info(f'Getting {self.url}') with suppress(ServerError, ConnectionError): pagerequest = http.fetch(self.url) break else: - pywikibot.info('Unable to load {}'.format(self.url)) + pywikibot.info(f'Unable to load {self.url}') return []
if pagerequest: self.html = pagerequest.text
for extraurl in self.extraurls: - pywikibot.info('Getting {}'.format(extraurl)) + pywikibot.info(f'Getting {extraurl}') try: pagerequest = http.fetch(extraurl) except (ServerError, ConnectionError): @@ -1541,7 +1540,7 @@ result = result.strip().lstrip('(').rstrip(')') result = result.replace('–', '-').replace('‑', '-') if '-' in result: - (start, end) = [r.strip() for r in result.split('-', 1)] + (start, end) = (r.strip() for r in result.split('-', 1)) if start == end: newclaims.append(('P1317', '!date!' + start, self)) else: @@ -2045,7 +2044,7 @@ @property def url(self): # TODO: check whether this is right or needed - return 'http://www.isni.org/%7Bid%7D%27.format(id=self.id).replace(' ', '') + return f'http://www.isni.org/%7Bself.id%7D%27.replace(' ', '')
def findlanguagenames(self, html: str): # TODO: check whether this is right or needed @@ -2158,7 +2157,7 @@ }
def getid(self, name, html): - result = self.findbyre(r'>{}|([^<>]+)'.format(name), html) + result = self.findbyre(fr'>{name}|([^<>]+)', html) if result: return result.replace(' ', '') return None @@ -2256,7 +2255,7 @@ ] iccu = self.getid('ICCU', html) if iccu: - result += [('P396', r'IT\ICCU{}{}'.format(iccu[:4], iccu[4:]))] + result += [('P396', fr'IT\ICCU{iccu[:4]}{iccu[4:]}')] result += self.finddefaultmixedrefs(html) return result
@@ -2538,7 +2537,7 @@ def findbirthdate(self, html: str): result = self.findbyre(r'<li><h3>Birth Date</h3><ul[^<>]*>(\d{8})<', html) if result: - return '{}-{}-{}'.format(result[6:], result[4:6], result[:4]) + return f'{result[6:]}-{result[4:6]}-{result[:4]}'
result = ( self.findbyre(r'(?s)Birth Date</h3><.*?>(?:(.*?))?([^<>]*?)</ul>', html) @@ -2560,7 +2559,7 @@ def finddeathdate(self, html: str): result = self.findbyre(r'<li><h3>Death Date</h3><ul[^<>]*>(\d{8})<', html) if result: - return '{}-{}-{}'.format(result[6:], result[4:6], result[:4]) + return f'{result[6:]}-{result[4:6]}-{result[:4]}'
result = ( self.findbyre(r'(?s)Death Date</h3><.*?>(?:(.*?))?([^<>]*?)</ul>', html) @@ -2963,9 +2962,9 @@ @property def url(self): if self.isfilm: - return 'https://www.imdb.com/title/%7Bid%7D/%27.format(id=self.id) + return f'https://www.imdb.com/title/%7Bself.id%7D/' if self.isperson: - return 'https://www.imdb.com/name/%7Bid%7D/%27.format(id=self.id) + return f'https://www.imdb.com/name/%7Bself.id%7D/' return None
@property @@ -3336,7 +3335,7 @@ lat = self.findbyre(r'itemprop="latitude" content="(.*?)"', html) lon = self.findbyre(r'itemprop="longitude" content="(.*?)"', html) if lat and lon: - return '{} {}'.format(lat, lon) + return f'{lat} {lon}'
def findheights(self, html: str): return [self.findbyre(r'(?s)<td>height</td>.*<td>(.*?)</td>', html)] @@ -3524,7 +3523,7 @@ self.hrtre = r'(<h1.*?</table>)'
def getvalue(self, name, html, category=None): - return self.findbyre(r'{}: "(.*?)"'.format(name), html, category) + return self.findbyre(fr'{name}: "(.*?)"', html, category)
def findnames(self, html) -> List[str]: return [self.getvalue('shareTitle', html)] @@ -3687,7 +3686,7 @@ lat = self.findbyre(r'"LAT">(.*?)<', html) lon = self.findbyre(r'"LONGI">(.*?)<', html) if lat and lon: - return '{} {}'.format(lat, lon) + return f'{lat} {lon}'
class MathGenAnalyzer(Analyzer): @@ -3962,7 +3961,7 @@ return html.replace(' ', ' ')
def getvalue(self, field, html, dtype=None): - return self.findbyre(r'(?s)<td[^<>]*>\s*{}\s*</td>\s*<td[^<>]*>(?:<[^<>]*>)*(.*?)<'.format(field), html, dtype) + return self.findbyre(fr'(?s)<td[^<>]*>\s*{field}\s*</td>\s*<td[^<>]*>(?:<[^<>]*>)*(.*?)<', html, dtype)
def findlongtext(self, html: str): return self.getvalue(r'Biogr./Hist. .daje', html) @@ -4223,7 +4222,7 @@
class HdsAnalyzer(Analyzer): def setup(self): - self.id = '{:06d}'.format(int(self.id)) + self.id = f'{int(self.id):06d}' self.dbproperty = 'P902' self.dbid = 'Q642074' self.dbname = 'Historical Dictionary of Switzerland' @@ -5813,7 +5812,7 @@ lat = self.findbyre(r'Lat:\s*(-?\d+.\d+)', html) lon = self.findbyre(r'Long:\s*(-?\d+.\d+)', html) if lat and lon: - return '{} {}'.format(lat, lon) + return f'{lat} {lon}'
class NlpAnalyzer(Analyzer): @@ -6377,7 +6376,7 @@ def getvalues(self, field, html, dtype=None, link=False) -> List[str]: section = self.findbyre(r'(?s)>{}</span>(.*?>)[^<>]+</span><span' .format(field), html) or \ - self.findbyre(r'(?s)>{}</span>(.*)'.format(field), html) + self.findbyre(fr'(?s)>{field}</span>(.*)', html) if section: return self.findallbyre(r'<{}[^<>]*>(.*?)[(<]' .format('a ' if link else 'span'), @@ -6752,7 +6751,7 @@ self.language = 'nl'
def getentry(self, naam, html, dtype=None): - return self.findbyre(r'(?s){}<.*?>([^<>]*)</div>'.format(naam), html, dtype) + return self.findbyre(fr'(?s){naam}<.*?>([^<>]*)</div>', html, dtype)
def finddescription(self, html: str): return self.findbyre(r'<h1>(.*?)<', html) @@ -7907,7 +7906,7 @@
def getvalue(self, field, html, dtype=None): return self.findbyre( - r'(?s)<div class="InformationBoxTitle">{}:</div>\s*<div class="InformationBoxContents">(.*?)</div>'.format(field), + fr'(?s)<div class="InformationBoxTitle">{field}:</div>\s*<div class="InformationBoxContents">(.*?)</div>', html, dtype)
def getvalues(self, field, html, dtype=None) -> List[str]: @@ -11830,15 +11829,15 @@ section = self.getvalue('Nome', html) if section: result.append(self.findbyre(r'([^&]+)', section).replace(':', '')) - pywikibot.info('section: {}, result: {}'.format(section, result)) + pywikibot.info(f'section: {section}, result: {result}') section = self.getvalue('Nome su edizioni', html) if section: result += self.findallbyre(r'([^;]+)', section) - pywikibot.info('section: {}, result: {}'.format(section, result)) + pywikibot.info(f'section: {section}, result: {result}') section = self.getvalue('Fonti', html) if section: result += self.findallbyre(r'((.*?))', section) - pywikibot.info('section: {}, result: {}'.format(section, result)) + pywikibot.info(f'section: {section}, result: {result}') return result
def finddescriptions(self, html: str): @@ -11886,7 +11885,7 @@ if field: if splitter == '<': return self.findallbyre('>(.*?)<', '>' + field + '<', dtype) - return self.findallbyre('[^{}]+'.format(splitter), field, dtype) + return self.findallbyre(f'[^{splitter}]+', field, dtype) return []
def findnames(self, html) -> List[str]: @@ -13868,7 +13867,7 @@ self.id = self.id.replace(' ', '_') if self.language in ['commons', 'species']: site = 'wikimedia' - self.dbname = '{} {}'.format(site.title(), self.language.upper()) + self.dbname = f'{site.title()} {self.language.upper()}' self.urlbase = 'https://%7B%7D.%7B%7D.org/wiki/%7B%7Bid%7D%7D%27.format( self.language, site) self.urlbase3 = 'https://%7B%7D.%7B%7D.org/w/index.php?title=%7B%7Bid%7D%7D&veswitched=1&... @@ -13927,7 +13926,7 @@ sections = self.findallbyre(r'(?is)[\b|_\s]%s\s*=((?:[^|、{}]|{{[^{}]*}})+)' % name, box, alt=alt) for section in sections: result += self.findallbyre( - r'([^{}]+)'.format(splitters), section, dtype) + fr'([^{splitters}]+)', section, dtype) return result
def getinfo(self, names, html, dtype=None, splitters=None, alt=None) -> str: @@ -13946,7 +13945,7 @@ preresult = self.findallbyre(r'(?is)[\b|_\s]%s\s*=((?:[^|{}]|{{[^{}]*}})+)' % name, box, alt=alt) for section in preresult: result += self.findallbyre( - r'([^{}]+)'.format(splitters), section, dtype) + fr'([^{splitters}]+)', section, dtype) if result: return result[0]
@@ -15099,7 +15098,7 @@ self.dbproperty = None self.dbid = 'Q2013' self.urlbase = None - self.sparqlquery = 'SELECT ?a ?b WHERE {{ ?a ?b wd:{} }}'.format(self.id) + self.sparqlquery = f'SELECT ?a ?b WHERE {{ ?a ?b wd:{self.id} }}' self.skipfirst = True self.hrtre = '()' self.language = 'en' @@ -15112,7 +15111,7 @@
def findlongtext(self, html: str): matches = re.findall(r'statement/([qQ]\d+)[^{}]+statement/([pP]\d+)', html) - return '\n'.join('{} of: {}'.format(self.bot.label(m[1]), self.bot.label(m[0])) for m in matches) + return '\n'.join(f'{self.bot.label(m[1])} of: {self.bot.label(m[0])}' for m in matches)
def findspouses(self, html: str): return self.getrelations('P26', html) diff --git a/scripts/delete.py b/scripts/delete.py index fc90773..a8b9cdc 100755 --- a/scripts/delete.py +++ b/scripts/delete.py @@ -169,21 +169,21 @@ n_pages_in_ns = len(refs[ns]) plural = '' if n_pages_in_ns == 1 else 's' ns_name = ns.canonical_prefix() if ns != ns.MAIN else 'Main:' - ns_id = '[{}]'.format(ns.id) + ns_id = f'[{ns.id}]' pywikibot.output( ' {0!s:<{width}} {1:>6} {2:>10} page{pl}'.format( ns_name, ns_id, n_pages_in_ns, width=width, pl=plural)) if show_n_pages: # do not show marker if 0 pages are requested. for page in islice_with_ellipsis(refs[ns], show_n_pages): - pywikibot.output(' {!s}'.format(page.title())) + pywikibot.output(f' {page.title()!s}')
def skip_page(self, page) -> bool: """Skip the page under some conditions.""" if self.opt.undelete and page.exists(): - pywikibot.output('Skipping: {} already exists.'.format(page)) + pywikibot.output(f'Skipping: {page} already exists.') return True if not self.opt.undelete and not page.exists(): - pywikibot.output('Skipping: {} does not exist.'.format(page)) + pywikibot.output(f'Skipping: {page} does not exist.') return True return super().skip_page(page)
diff --git a/scripts/delinker.py b/scripts/delinker.py index 872f144..b4b1aa47 100644 --- a/scripts/delinker.py +++ b/scripts/delinker.py @@ -41,8 +41,8 @@ import pywikibot from pywikibot.backports import removeprefix from pywikibot.bot import ( - ConfigParserBot, AutomaticTWSummaryBot, + ConfigParserBot, SingleSiteBot, calledModuleName, ) @@ -127,7 +127,7 @@ if not hasattr(self, 'last_ts'): return
- pywikibot.output("\nUpdate 'since' to {} file".format(self.INI)) + pywikibot.output(f"\nUpdate 'since' to {self.INI} file") conf = configparser.ConfigParser(inline_comment_prefixes=[';']) conf.read(self.INI) section = calledModuleName() diff --git a/scripts/djvutext.py b/scripts/djvutext.py index 4c85336..f3f265b 100755 --- a/scripts/djvutext.py +++ b/scripts/djvutext.py @@ -181,7 +181,7 @@ djvu = DjVuFile(djvu_path)
if not djvu.has_text(): - pywikibot.error('No text layer in djvu file {}'.format(djvu.file)) + pywikibot.error(f'No text layer in djvu file {djvu.file}') return
# Parse pages param. diff --git a/scripts/download_dump.py b/scripts/download_dump.py index 009a62f..3d713a1 100755 --- a/scripts/download_dump.py +++ b/scripts/download_dump.py @@ -48,7 +48,7 @@ @staticmethod def get_dump_name(db_name, typ, dumpdate): """Check if dump file exists locally in a Toolforge server.""" - db_path = '/public/dumps/public/{}/'.format(db_name) + db_path = f'/public/dumps/public/{db_name}/' if os.path.isdir(db_path): dump_filepath_template = ( '/public/dumps/public/{db_name}/{date}/{db_name}-{date}-{typ}') diff --git a/scripts/fixing_redirects.py b/scripts/fixing_redirects.py index b147f86..459a094 100755 --- a/scripts/fixing_redirects.py +++ b/scripts/fixing_redirects.py @@ -138,7 +138,7 @@
if ((new_page_title == link_text and not section) or self.opt.overwrite): - newlink = '[[{}]]'.format(new_page_title) + newlink = f'[[{new_page_title}]]' # check if we can create a link with trailing characters instead of # a pipelink elif (len(new_page_title) <= len(link_text) diff --git a/scripts/harvest_template.py b/scripts/harvest_template.py index 36453ab..9346e18 100755 --- a/scripts/harvest_template.py +++ b/scripts/harvest_template.py @@ -118,9 +118,9 @@ from typing import Any, Iterator, Optional
import pywikibot +from pywikibot import WbTime from pywikibot import pagegenerators as pg from pywikibot import textlib -from pywikibot import WbTime from pywikibot.backports import List, Tuple from pywikibot.bot import ConfigParserBot, OptionHandler, WikidataBot from pywikibot.exceptions import ( @@ -219,7 +219,7 @@ """Fetch redirects of the title, so we can check against them.""" temp = pywikibot.Page(self.site, title, ns=10) if not temp.exists(): - sys.exit('Template {} does not exist.'.format(temp.title())) + sys.exit(f'Template {temp.title()} does not exist.')
# Put some output here since it can take a while pywikibot.output('Finding redirects...') @@ -460,7 +460,7 @@ if out is None: out = data elif out != data: - pywikibot.output('Found ambiguous date: "{}"'.format(value)) + pywikibot.output(f'Found ambiguous date: "{value}"') return
yield WbTime.fromWikibase(out, self.repo) diff --git a/scripts/imagetransfer.py b/scripts/imagetransfer.py index 738484a..e2232c9 100755 --- a/scripts/imagetransfer.py +++ b/scripts/imagetransfer.py @@ -213,7 +213,7 @@ description += sourceImagePage.getFileVersionHistoryTable() # add interwiki link if sourceSite.family == self.opt.target.family: - description += '\n\n{}'.format(sourceImagePage) + description += f'\n\n{sourceImagePage}' except NoPageError: pywikibot.output( 'Image does not exist or description page is empty.') diff --git a/scripts/interwiki.py b/scripts/interwiki.py index b18c307..a303909 100755 --- a/scripts/interwiki.py +++ b/scripts/interwiki.py @@ -1023,7 +1023,7 @@ with codecs.open( pywikibot.config.datafilepath('without_interwiki.txt'), 'a', 'utf-8') as f: - f.write('# {} \n'.format(page)) + f.write(f'# {page} \n')
def askForHints(self, counter) -> None: """Ask for hints to other sites.""" @@ -1101,9 +1101,9 @@ counter.minus(site, count) self.todo.clear() elif not self.conf.followredirect: - self.conf.note('not following {}redirects.'.format(redir)) + self.conf.note(f'not following {redir}redirects.') elif page.isStaticRedirect(): - self.conf.note('not following static {}redirects.'.format(redir)) + self.conf.note(f'not following static {redir}redirects.') elif (page.site.family == redirect_target.site.family and not self.skipPage(page, redirect_target, counter) and self.addIfNew(redirect_target, counter, page) @@ -1117,7 +1117,7 @@ """Check whether any iw links should be added to the todo list.""" if not page.exists(): self.conf.remove.append(str(page)) - self.conf.note('{} does not exist. Skipping.'.format(page)) + self.conf.note(f'{page} does not exist. Skipping.') if page == self.origin: # The page we are working on is the page that does not # exist. No use in doing any work on it in that case. @@ -1136,7 +1136,7 @@ # otherwise a redirect error would be raised if page_empty_check(page): self.conf.remove.append(str(page)) - self.conf.note('{} is empty. Skipping.'.format(page)) + self.conf.note(f'{page} is empty. Skipping.') if page == self.origin: for site, count in self.todo.iter_values_len(): counter.minus(site, count) @@ -1146,7 +1146,7 @@ return
if page.section(): - self.conf.note('{} is a page section. Skipping.'.format(page)) + self.conf.note(f'{page} is a page section. Skipping.') return
# Page exists, isn't a redirect, and is a plain link (no section) @@ -1158,7 +1158,7 @@ try: iw = page.langlinks() except UnknownSiteError: - self.conf.note('site {} does not exist.'.format(page.site)) + self.conf.note(f'site {page.site} does not exist.') return
(skip, alternativePage) = self.disambigMismatch(page, counter) @@ -1392,7 +1392,7 @@ if not acceptall: pywikibot.output('=' * 30) page2 = pages[0] - pywikibot.output('Found link to {} in:'.format(page2)) + pywikibot.output(f'Found link to {page2} in:') self.whereReport(page2, indent=4)
# TODO: allow answer to repeat previous or go back @@ -1442,7 +1442,7 @@
def post_processing(self): """Some finishing processes to be done.""" - pywikibot.output('======Post-processing {}======'.format(self.origin)) + pywikibot.output(f'======Post-processing {self.origin}======') # Assemble list of accepted interwiki links new = self.assemble() if new is None: # User said give up @@ -1503,7 +1503,7 @@ page = pywikibot.Page(link) old[page.site] = page except NoPageError: - pywikibot.error('{} no longer exists?'.format(new[site])) + pywikibot.error(f'{new[site]} no longer exists?') continue *_, adding, removing, modifying = compareLanguages( old, new, lclSite, self.conf.summary) @@ -1558,7 +1558,7 @@ raise SaveError("Page doesn't exist")
if page_empty_check(page): - pywikibot.output('Not editing {}: page is empty'.format(page)) + pywikibot.output(f'Not editing {page}: page is empty') raise SaveError('Page is empty.')
# clone original newPages dictionary, so that we can modify it to the @@ -1629,12 +1629,12 @@ mods, mcomment, adding, removing, modifying = compareLanguages( old, new, page.site, self.conf.summary) if not mods: - self.conf.note('No changes needed on page {}'.format(page)) + self.conf.note(f'No changes needed on page {page}') return False
pywikibot.info('<<lightpurple>>Updating links on page {}.' .format(page)) - pywikibot.info('Changes to be made: {}'.format(mods)) + pywikibot.info(f'Changes to be made: {mods}') oldtext = page.get() template = (page.namespace() == 10) newtext = textlib.replaceLanguageLinks(oldtext, new, @@ -1643,7 +1643,7 @@ # This is for now. Later there should be different funktions for each # kind if not botMayEdit(page): - pywikibot.info('SKIPPING: {} '.format(page), newline=False) + pywikibot.info(f'SKIPPING: {page} ', newline=False) if template: msg = 'should have interwiki links on subpage.' else: @@ -2265,7 +2265,7 @@ pywikibot.error('Cannot delete {} due to\n{}\nDo it manually.' .format(tail, e)) else: - pywikibot.output('Dumpfile {} deleted'.format(tail)) + pywikibot.output(f'Dumpfile {tail} deleted')
def main(*args: str) -> None: diff --git a/scripts/interwikidata.py b/scripts/interwikidata.py index dfeebf1..7326b3d 100755 --- a/scripts/interwikidata.py +++ b/scripts/interwikidata.py @@ -143,7 +143,7 @@
item = pywikibot.ItemPage(self.repo) item.editEntity(data, new='item', summary=summary) - output('Created item {item}'.format(item=item.getID())) + output(f'Created item {item.getID()}') return item
def handle_complicated(self) -> bool: diff --git a/scripts/listpages.py b/scripts/listpages.py index 1ca7def..7489bcd 100755 --- a/scripts/listpages.py +++ b/scripts/listpages.py @@ -151,7 +151,7 @@ self.trs_title = page._link.ns_title(onsite=self.onsite) # Fallback if no corresponding namespace is found in onsite. except Error: - self.trs_title = '{}:{}'.format(default, page._link.title) + self.trs_title = f'{default}:{page._link.title}'
def output(self, num=None, fmt: str = '1') -> str: """Output formatted string.""" @@ -203,7 +203,7 @@ if self.opt.save: filename = os.path.join(self.opt.save, page.title(as_filename=True)) - pywikibot.info('Saving {} to {}'.format(page.title(), filename)) + pywikibot.info(f'Saving {page.title()} to {filename}') with open(filename, mode='wb') as f: f.write(page.text.encode(self.opt.encode)) self.counter['save'] += 1 diff --git a/scripts/maintenance/cache.py b/scripts/maintenance/cache.py index 48da334..36b7fef 100755 --- a/scripts/maintenance/cache.py +++ b/scripts/maintenance/cache.py @@ -229,10 +229,10 @@ """ if not cache_path: cache_path = os.path.join(pywikibot.config.base_dir, - 'apicache-py{:d}'.format(PYTHON_VERSION[0])) + f'apicache-py{PYTHON_VERSION[0]:d}')
if not os.path.exists(cache_path): - pywikibot.error('{}: no such file or directory'.format(cache_path)) + pywikibot.error(f'{cache_path}: no such file or directory') return
if os.path.isdir(cache_path): @@ -320,7 +320,7 @@ except Exception as e: pywikibot.error(e) pywikibot.error( - 'Cannot compile {} command: {}'.format(name, command)) + f'Cannot compile {name} command: {command}') return None
@@ -469,7 +469,7 @@
for cache_path in cache_paths: if len(cache_paths) > 1: - pywikibot.output('Processing {}'.format(cache_path)) + pywikibot.output(f'Processing {cache_path}') process_entries(cache_path, filter_func, output_func=output_func, action_func=action_func)
diff --git a/scripts/maintenance/colors.py b/scripts/maintenance/colors.py index dd2463e..7a36606 100755 --- a/scripts/maintenance/colors.py +++ b/scripts/maintenance/colors.py @@ -32,9 +32,9 @@ line += ' ' line += '<<{color}>>{}<<default>>'.format( fg_col.ljust(max_len_fg_colors), - color='{};{}'.format(fg_col, bg_col)) + color=f'{fg_col};{bg_col}')
- line = '{} {}'.format(bg_col.ljust(max_len_bc_color), line) + line = f'{bg_col.ljust(max_len_bc_color)} {line}' pywikibot.output(line)
pywikibot.output() diff --git a/scripts/maintenance/make_i18n_dict.py b/scripts/maintenance/make_i18n_dict.py index 6778d8b..5e9768b 100755 --- a/scripts/maintenance/make_i18n_dict.py +++ b/scripts/maintenance/make_i18n_dict.py @@ -57,7 +57,7 @@ if hasattr(self.script, msg): self.messages[msg] = msg else: - print('message {} not found'.format(msg)) + print(f'message {msg} not found') for new, old in kwargs.items(): self.messages[old] = new.replace('_', '-') self.dict = {} @@ -78,9 +78,9 @@
print('msg = {') for code in keys: - print(" '{}': {{".format(code)) + print(f" '{code}': {{") for msg in sorted(self.messages.values()): - label = '{}-{}'.format(self.scriptname, msg) + label = f'{self.scriptname}-{msg}' if label in self.dict[code]: print(" '{}': '{}'," .format(label, self.dict[code][label])) @@ -95,7 +95,7 @@ if newmsg is None: newmsg = oldmsg for code in keys: - label = '{}-{}'.format(self.scriptname, newmsg) + label = f'{self.scriptname}-{newmsg}' if code == 'qqq': if code not in self.dict: self.dict[code] = {} @@ -137,7 +137,7 @@ if not os.path.exists(json_dir): os.makedirs(json_dir) for lang in self.dict: - file_name = os.path.join(json_dir, '{}.json'.format(lang)) + file_name = os.path.join(json_dir, f'{lang}.json') if os.path.isfile(file_name): with codecs.open(file_name, 'r', 'utf-8') as json_file: new_dict = json.loads(json_file.read()) diff --git a/scripts/maintenance/sorting_order.py b/scripts/maintenance/sorting_order.py index 0b21ca2..b515775 100755 --- a/scripts/maintenance/sorting_order.py +++ b/scripts/maintenance/sorting_order.py @@ -47,13 +47,13 @@ return
pywikibot.output("The lists don't match, the new list is:") - text = ' {} = [\n'.format(list_name) + text = f' {list_name} = [\n' line = ' ' * 7 for code in new: if len(line) + len(code) >= 76: text += line + '\n' line = ' ' * 7 - line += " '{}',".format(code) + line += f" '{code}'," text += line + '\n' text += ' ]' pywikibot.output(text) diff --git a/scripts/maintenance/wikimedia_sites.py b/scripts/maintenance/wikimedia_sites.py index eb4aa6a..06bcc6f 100755 --- a/scripts/maintenance/wikimedia_sites.py +++ b/scripts/maintenance/wikimedia_sites.py @@ -40,7 +40,7 @@ """Update family files.""" ws = wikistats.WikiStats() for family in families or families_list: - pywikibot.output('\nChecking family {}:'.format(family)) + pywikibot.output(f'\nChecking family {family}:')
original = Family.load(family).languages_by_size for code in exceptions.get(family, []): @@ -78,11 +78,11 @@ if len(line) + len(code) >= 76: text += line + '\n' line = ' ' * 7 - line += " '{}',".format(code) + line += f" '{code}'," text += line + '\n' text += ' ]' pywikibot.output(text) - family_file_name = 'pywikibot/families/{}_family.py'.format(family) + family_file_name = f'pywikibot/families/{family}_family.py' with codecs.open(family_file_name, 'r', 'utf8') as family_file: family_text = family_file.read() family_text = re.sub(r'(?ms)^ {4}languages_by_size.+?]', diff --git a/scripts/movepages.py b/scripts/movepages.py index 3be9e3f..a69f47d 100755 --- a/scripts/movepages.py +++ b/scripts/movepages.py @@ -111,9 +111,9 @@ def create_new_title_append(start, end, page, namespace=None): """Append helper function.""" page_title = page.title(with_ns=False) - new_page_tite = '{}{}{}'.format(start, page_title, end) + new_page_tite = f'{start}{page_title}{end}' if namespace is not None: - new_page_tite = '{}:{}'.format(namespace, new_page_tite) + new_page_tite = f'{namespace}:{new_page_tite}' return new_page_tite
def create_new_title_regex(regex, replacement, page, namespace=None): @@ -121,7 +121,7 @@ page_title = page.title(with_ns=False) new_page_title = regex.sub(replacement, page_title) if namespace is not None: - new_page_title = '{}:{}'.format(namespace, new_page_title) + new_page_title = f'{namespace}:{new_page_title}' return new_page_title
def manage_namespace(page): @@ -170,7 +170,7 @@ def create_new_title_prefix(prefix, page): """Replace prefix helper function.""" page_title = page.title(with_ns=False) - return '{}{}'.format(prefix, page_title) + return f'{prefix}{page_title}'
if prefix: handler = partial(create_new_title_prefix, prefix) @@ -247,21 +247,21 @@ options[opt.replace('no', 'move', 1)] = False elif opt == 'from': if old_name: - pywikibot.warning('-from:{} without -to:'.format(old_name)) + pywikibot.warning(f'-from:{old_name} without -to:') old_name = value elif opt == 'to': if old_name: from_to_pairs.append([old_name, value]) old_name = None else: - pywikibot.warning('{} without -from'.format(arg)) + pywikibot.warning(f'{arg} without -from') elif opt == 'prefix': options[opt] = value or pywikibot.input('Enter the prefix:') elif opt == 'summary': options[opt] = value or pywikibot.input('Enter the summary:')
if old_name: - pywikibot.warning('-from:{} without -to:'.format(old_name)) + pywikibot.warning(f'-from:{old_name} without -to:')
site = pywikibot.Site()
diff --git a/scripts/newitem.py b/scripts/newitem.py index e6469d3..ad5a7cd 100755 --- a/scripts/newitem.py +++ b/scripts/newitem.py @@ -90,7 +90,7 @@ pywikibot.error('Page {} is locked.'.format( page.title(as_link=True))) except PageSaveRelatedError as e: - pywikibot.error('Page {} not saved:\n{}'.format(page, e.args)) + pywikibot.error(f'Page {page} not saved:\n{e.args}')
def _callback(self, page, exc) -> None: if exc is None and self.opt.touch: diff --git a/scripts/noreferences.py b/scripts/noreferences.py index 59eb118..948bc9b 100755 --- a/scripts/noreferences.py +++ b/scripts/noreferences.py @@ -598,7 +598,7 @@ # Set the edit summary for this case self.comment = i18n.twtranslate(self.site, 'noreferences-add-tag') for section in i18n.translate(self.site, referencesSections): - sectionR = re.compile(r'\r?\n=+ *{} *=+ *\r?\n'.format(section)) + sectionR = re.compile(fr'\r?\n=+ *{section} *=+ *\r?\n') index = 0 while index < len(oldText): match = sectionR.search(oldText, index) @@ -617,7 +617,7 @@ new_text = ( oldText[:match.end() - 1] + templates_or_comments.sub( - r'\1\n{}\n'.format(self.referencesText), + fr'\1\n{self.referencesText}\n', oldText[match.end() - 1:])) return new_text else: @@ -658,7 +658,7 @@ # At the end, look at the length of the temp text. That's the position # where we'll insert the references section. catNamespaces = '|'.join(self.site.namespaces.CATEGORY) - categoryPattern = r'[[\s*({})\s*:[^\n]*]]\s*'.format(catNamespaces) + categoryPattern = fr'[[\s*({catNamespaces})\s*:[^\n]*]]\s*' interwikiPattern = r'[[([a-zA-Z-]+)\s?:([^[]\n]*)]]\s*' # won't work with nested templates # the negative lookahead assures that we'll match the last template @@ -700,7 +700,7 @@ :return: the amended page text with reference section added """ if self.site.code in noTitleRequired: - ref_section = '\n\n{}\n'.format(self.referencesText) + ref_section = f'\n\n{self.referencesText}\n' else: ref_section = '\n\n{ident} {title} {ident}\n{text}\n'.format( title=i18n.translate(self.site, referencesSections)[0], diff --git a/scripts/pagefromfile.py b/scripts/pagefromfile.py index 05dc9e9..2f3554e 100755 --- a/scripts/pagefromfile.py +++ b/scripts/pagefromfile.py @@ -70,7 +70,7 @@
import pywikibot from pywikibot import config, i18n -from pywikibot.backports import Tuple, Iterator +from pywikibot.backports import Iterator, Tuple from pywikibot.bot import CurrentPageBot, OptionHandler, SingleSiteBot from pywikibot.pagegenerators import PreloadingGenerator from pywikibot.tools.collections import GeneratorWrapper @@ -207,7 +207,7 @@ .. versionchanged:: 7.6 changed from iterator method to generator property """ - pywikibot.output("\n\nReading '{}'...".format(self.filename)) + pywikibot.output(f"\n\nReading '{self.filename}'...") try: with codecs.open(self.filename, 'r', encoding=config.textfile_encoding) as f: @@ -301,7 +301,7 @@ elif option in ('nocontent', 'summary'): options[option] = value else: - pywikibot.output('Disregarding unknown argument {}.'.format(arg)) + pywikibot.output(f'Disregarding unknown argument {arg}.')
options['always'] = 'showdiff' not in options
@@ -309,7 +309,7 @@ # User can quit. failed_filename = False while not os.path.isfile(filename): - pywikibot.output("\nFile '{}' does not exist. ".format(filename)) + pywikibot.output(f"\nFile '{filename}' does not exist. ") _input = pywikibot.input( 'Please enter the file name [q to quit]:') if _input == 'q': diff --git a/scripts/parser_function_count.py b/scripts/parser_function_count.py index df996e1..563efdd 100755 --- a/scripts/parser_function_count.py +++ b/scripts/parser_function_count.py @@ -157,7 +157,7 @@ .format(result=result) for result in self.results.most_common(self.opt.first)) pywikibot.output(resultlist) - pywikibot.output('{} templates were found.'.format(len(self.results))) + pywikibot.output(f'{len(self.results)} templates were found.')
# File operations: if self.opt.save: diff --git a/scripts/patrol.py b/scripts/patrol.py index e26aa52..5d15203 100755 --- a/scripts/patrol.py +++ b/scripts/patrol.py @@ -358,10 +358,10 @@ linkedpages.append(linkedpage.title())
self.linkedpages = linkedpages - verbose_output('Loaded {} page links'.format(len(linkedpages))) + verbose_output(f'Loaded {len(linkedpages)} page links')
for p in self.linkedpages: - verbose_output("Checking against '{}'".format(p)) + verbose_output(f"Checking against '{p}'") if page_title.startswith(p): verbose_output('Matched.') return True @@ -389,7 +389,7 @@ else: yield page[1] if repeat: - pywikibot.output('Sleeping for {} seconds'.format(delay)) + pywikibot.output(f'Sleeping for {delay} seconds') pywikibot.sleep(delay) else: break @@ -437,7 +437,7 @@ if usercontribs: user = pywikibot.User(site, usercontribs) if user.isAnonymous() or user.isRegistered(): - pywikibot.output('Processing user: {}'.format(usercontribs)) + pywikibot.output(f'Processing user: {usercontribs}') else: pywikibot.warning('User {} does not exist on site {}.' .format(usercontribs, site)) diff --git a/scripts/protect.py b/scripts/protect.py index 23f995a..b80d393 100755 --- a/scripts/protect.py +++ b/scripts/protect.py @@ -199,7 +199,7 @@ protections[option] = value else: if not gen_factory.handle_arg(arg): - raise ValueError('Unknown parameter "{}"'.format(arg)) + raise ValueError(f'Unknown parameter "{arg}"') if value: message_properties.update({'cat': value, 'page': value}) if 'summary' not in options: @@ -210,11 +210,11 @@ if message_type == 'simple' or message_properties: if default_level == 'all': options['summary'] = i18n.twtranslate( - site, 'unprotect-{}'.format(message_type), + site, f'unprotect-{message_type}', message_properties) else: options['summary'] = i18n.twtranslate( - site, 'protect-{}'.format(message_type), + site, f'protect-{message_type}', message_properties)
generator = gen_factory.getCombinedGenerator() diff --git a/scripts/redirect.py b/scripts/redirect.py index 9029760..402c719 100755 --- a/scripts/redirect.py +++ b/scripts/redirect.py @@ -156,7 +156,7 @@ readPagesCount += 1 # always print status message after 10000 pages if readPagesCount % 10000 == 0: - pywikibot.output('{} pages read...'.format(readPagesCount)) + pywikibot.output(f'{readPagesCount} pages read...') if self.opt.namespaces and pywikibot.Page( self.site, entry.title).namespace() not in self.opt.namespaces: @@ -265,7 +265,7 @@ 'pageids': apiQ}) data = gen.submit() if 'error' in data: - raise RuntimeError('API query error: {}'.format(data)) + raise RuntimeError(f'API query error: {data}') if data == [] or 'query' not in data: raise RuntimeError('No results given.') pages = {} @@ -435,7 +435,7 @@
pywikibot.warning( 'No speedy deletion template {}available.' - .format('"{}" '.format(title) if title else '')) + .format(f'"{title}" ' if title else '')) return None
@property @@ -465,7 +465,7 @@ :param summary_key: The message key for the deletion summary """ assert page.site == self.current_page.site, ( - 'target page is on different site {}'.format(page.site)) + f'target page is on different site {page.site}') reason = i18n.twtranslate(page.site, summary_key) if page.site.has_right('delete'): page.delete(reason, prompt=False) @@ -490,9 +490,9 @@ return page.getRedirectTarget() except (CircularRedirectError, RuntimeError) as e: pywikibot.error(e) - pywikibot.output('Skipping {}.'.format(page)) + pywikibot.output(f'Skipping {page}.') except InterwikiRedirectPageError: - pywikibot.output('{} is on another site, skipping.'.format(page)) + pywikibot.output(f'{page} is on another site, skipping.') return None
def delete_1_broken_redirect(self) -> None: @@ -583,7 +583,7 @@ .format(newRedir.title(as_link=True))) except UnsupportedPageError as e: pywikibot.error(e) - pywikibot.output('Skipping {}.'.format(newRedir)) + pywikibot.output(f'Skipping {newRedir}.') break except NoPageError: title = newRedir.title(as_link=True) @@ -593,7 +593,7 @@ .format(title)) break # skip if automatic pywikibot.warning( - "Redirect target {} doesn't exist.".format(title)) + f"Redirect target {title} doesn't exist.") except ServerError: pywikibot.output('Skipping due to server error: ' 'No textarea found') diff --git a/scripts/reflinks.py b/scripts/reflinks.py index 7cfba8e..b0383c4 100755 --- a/scripts/reflinks.py +++ b/scripts/reflinks.py @@ -227,7 +227,7 @@ else: if '%s' in tag: tag %= self.link - dead_link = '<ref{}>{}</ref>'.format(self.name, tag) + dead_link = f'<ref{self.name}>{tag}</ref>' return dead_link
def transform(self, ispdf: bool = False) -> None: @@ -370,7 +370,7 @@
# Fix references for groupname, references in found_refs.items(): - group = 'group="{}" '.format(groupname) if groupname else '' + group = f'group="{groupname}" ' if groupname else ''
for ref, v in references.items(): if len(v[IX.reflist]) == 1 and not v[IX.change_needed]: @@ -378,11 +378,11 @@
name = v[IX.name] if not name: - name = '"{}{}"'.format(self.autogen, next(free_number)) + name = f'"{self.autogen}{next(free_number)}"' elif v[IX.quoted]: - name = '"{}"'.format(name) + name = f'"{name}"'
- named = '<ref {}name={}>{}</ref>'.format(group, name, ref) + named = f'<ref {group}name={name}>{ref}</ref>' text = text.replace(v[IX.reflist][0], named, 1)
# make sure that the first (named ref) is not removed later @@ -391,7 +391,7 @@ end = text[pos:]
# replace multiple identical references with repeated ref - repeated_ref = '<ref {}name={} />'.format(group, name) + repeated_ref = f'<ref {group}name={name} />' for ref in v[IX.reflist][1:]: # Don't replace inside templates (T266411) end = replaceExcept(end, re.escape(ref), repeated_ref, @@ -403,12 +403,12 @@ # TODO : Support ref groups name = v[IX.name] if v[IX.reflist]: - name = '"{}"'.format(name) + name = f'"{name}"'
text = re.sub( r'<ref name\s*=\s*(?P<quote>["']?)\s*{}\s*(?P=quote)\s*/>' .format(ref), - '<ref name={} />'.format(name), text) + f'<ref name={name} />', text) return text
@@ -441,7 +441,7 @@ code = alt break if code: - manual += '/{}'.format(code) + manual += f'/{code}'
if self.opt.summary: self.msg = self.opt.summary @@ -450,7 +450,7 @@
local = i18n.translate(self.site, badtitles) if local: - bad = '({}|{})'.format(globalbadtitles, local) + bad = f'({globalbadtitles}|{local})' else: bad = globalbadtitles
@@ -618,7 +618,7 @@ # purposely removed if r.status_code == HTTPStatus.GONE \ or (r.status_code == HTTPStatus.NOT_FOUND - and '\t{}\t'.format(ref.url) in self.dead_links): + and f'\t{ref.url}\t' in self.dead_links): repl = ref.refDead() new_text = new_text.replace(match.group(), repl) continue @@ -690,7 +690,7 @@ if not ref.title: repl = ref.refLink() new_text = new_text.replace(match.group(), repl) - pywikibot.output('{} : No title found...'.format(ref.link)) + pywikibot.output(f'{ref.link} : No title found...') continue
if self.titleBlackList.match(ref.title): diff --git a/scripts/replace.py b/scripts/replace.py index 3825cae..fd25330 100755 --- a/scripts/replace.py +++ b/scripts/replace.py @@ -221,7 +221,7 @@ changes to the MediaWiki server, the edit summary includes the descriptions of each replacement that you applied to the page. """ - return '-{} +{}'.format(self.old, self.new) + return f'-{self.old} +{self.new}'
@property def container(self): @@ -647,7 +647,7 @@ default_summary = comma.join( '-{} +{}'.format(*default_summary) for default_summary in default_summaries) - desc = {'description': ' ({})'.format(default_summary)} + desc = {'description': f' ({default_summary})'} summary_messages.insert(0, msg % desc)
semicolon = self.site.mediawiki_message('semicolon-separator') @@ -665,7 +665,7 @@ return True
if not page.has_permission(): - pywikibot.warning("You can't edit page {}".format(page)) + pywikibot.warning(f"You can't edit page {page}") return True
return False @@ -824,9 +824,9 @@ # strip newlines, but not other characters replacements = f.read().splitlines() if not replacements: - raise OSError('{} is empty.'.format(filename)) + raise OSError(f'{filename} is empty.') except OSError as e: - pywikibot.error('Error loading {}: {}'.format(filename, e)) + pywikibot.error(f'Error loading {filename}: {e}') return None
if len(replacements) % 2: @@ -988,7 +988,7 @@ single_summary = i18n.twtranslate( site, 'replace-replacing', {'description': - ' (-{} +{})'.format(replacement.old, replacement.new)} + f' (-{replacement.old} +{replacement.new})'} ) replacements.append(replacement)
@@ -1034,7 +1034,7 @@ summary = None if len(replacement) < 3 else replacement[2] if not set_summary and not summary: missing_fix_summaries.append( - '"{}" (replacement #{})'.format(fix_name, index)) + f'"{fix_name}" (replacement #{index})') if chars.contains_invisible(replacement[0]): pywikibot.warning('The old string "{}" contains formatting ' 'characters like U+200E'.format( @@ -1063,7 +1063,7 @@
if len(fix['replacements']) == len(missing_fix_summaries): missing_fixes_summaries.append( - '"{}" (all replacements)'.format(fix_name)) + f'"{fix_name}" (all replacements)') else: missing_fixes_summaries += missing_fix_summaries
diff --git a/scripts/replicate_wiki.py b/scripts/replicate_wiki.py index bbfaf20..922969b 100755 --- a/scripts/replicate_wiki.py +++ b/scripts/replicate_wiki.py @@ -83,7 +83,7 @@ if options.namespace and 'help' in options.namespace: for namespace in self.original.namespaces.values(): pywikibot.output( - '{} {}'.format(namespace.id, namespace.custom_name)) + f'{namespace.id} {namespace.custom_name}') sys.exit()
self.sites = [pywikibot.Site(s, family) for s in sites] @@ -133,7 +133,7 @@
def check_namespace(self, namespace) -> None: """Check an entire namespace.""" - pywikibot.output('\nCHECKING NAMESPACE {}'.format(namespace)) + pywikibot.output(f'\nCHECKING NAMESPACE {namespace}') pages = (p.title() for p in self.original.allpages( '!', namespace=namespace)) for p in pages: @@ -156,7 +156,7 @@ .format(site.user())) output = '== Pages that differ from original ==\n\n' if self.differences[site]: - output += ''.join('* [[:{}]]\n'.format(page_title) + output += ''.join(f'* [[:{page_title}]]\n' for page_title in self.differences[site]) else: output += 'All important pages are the same' @@ -206,11 +206,11 @@ if txt1 != txt_new: pywikibot.output( 'NOTE: text replaced using config.sync_replace') - pywikibot.output('{} {} {}'.format(txt1, txt_new, txt2)) + pywikibot.output(f'{txt1} {txt_new} {txt2}') txt1 = txt_new
if txt1 != txt2: - pywikibot.output('\n {} DIFFERS'.format(site)) + pywikibot.output(f'\n {site} DIFFERS') self.differences[site].append(pagename)
if self.options.replace: diff --git a/scripts/revertbot.py b/scripts/revertbot.py index 1fa571a..a36b2f0 100755 --- a/scripts/revertbot.py +++ b/scripts/revertbot.py @@ -112,7 +112,7 @@
rev = history[1]
- pywikibot.output('\n\n>>> <<lightpurple>>{0}<<default>> <<<' + pywikibot.output('\n\n>>> <<lightpurple>>{}<<default>> <<<' .format(page.title(as_link=True, force_interwiki=True, textlink=True))) diff --git a/scripts/solve_disambiguation.py b/scripts/solve_disambiguation.py index 4c818f6..c288748 100755 --- a/scripts/solve_disambiguation.py +++ b/scripts/solve_disambiguation.py @@ -385,7 +385,7 @@ """ linkupper = link.title() linklower = first_lower(linkupper) - if '[[{}]]'.format(linklower) in text or '[[{}|'.format(linklower) in text: + if f'[[{linklower}]]' in text or f'[[{linklower}|' in text: return linklower return linkupper
@@ -417,7 +417,7 @@ refs = list(self.page.getReferences(with_template_inclusion=False, namespaces=0 if self.main_only else None)) - pywikibot.output('Found {} references.'.format(len(refs))) + pywikibot.output(f'Found {len(refs)} references.') # Remove ignorables site = self.page.site if site.family.name in ignore_title \ @@ -433,7 +433,7 @@ pywikibot.output('Found only {} pages to work on; skipping.' .format(len(refs))) return - pywikibot.output('Will work on {} pages.'.format(len(refs))) + pywikibot.output(f'Will work on {len(refs)} pages.') yield from refs
@@ -662,8 +662,8 @@ for i, arg in enumerate(args): key = keys[i] issue_deprecation_warning( - 'Positional argument {} ({})'.format(i + 1, arg), - 'keyword argument "{}={}"'.format(key, arg), + f'Positional argument {i + 1} ({arg})', + f'keyword argument "{key}={arg}"', since='6.0.0') if key in kwargs: pywikibot.warning('{!r} is given as keyword argument {!r} ' @@ -677,7 +677,7 @@ if key in keymap: newkey = keymap[key] issue_deprecation_warning( - '{!r} argument of {}'.format(key, self.__class__.__name__), + f'{key!r} argument of {self.__class__.__name__}', repr(newkey), since='6.0.0') kwargs[newkey] = kwargs.pop(key)
@@ -1059,7 +1059,7 @@ trailing_chars) elif replaceit or (new_page_title == link_text and not section): - newlink = '[[{}]]'.format(new_page_title) + newlink = f'[[{new_page_title}]]' # check if we can create a link with trailing characters # instead of a pipelink elif ( @@ -1093,7 +1093,7 @@ except LockedPageError: pywikibot.output('Page not saved: page is locked') except PageSaveRelatedError as error: - pywikibot.output('Page not saved: {}'.format(error.args)) + pywikibot.output(f'Page not saved: {error.args}')
return 'done'
@@ -1199,7 +1199,7 @@ new_targets = new_targets or [] # make list of new targets comma = self.site.mediawiki_message('comma-separator') - targets = comma.join('[[{}]]'.format(page_title) + targets = comma.join(f'[[{page_title}]]' for page_title in new_targets)
if not targets: @@ -1271,7 +1271,7 @@ if not self.findAlternatives(page): return
- pywikibot.output('\nAlternatives for {}'.format(page)) + pywikibot.output(f'\nAlternatives for {page}') self.makeAlternativesUnique() # sort possible choices if config.sort_ignore_case: diff --git a/scripts/speedy_delete.py b/scripts/speedy_delete.py index 272b5f7..b10a263 100755 --- a/scripts/speedy_delete.py +++ b/scripts/speedy_delete.py @@ -457,7 +457,7 @@
# skip this page else: - pywikibot.output('Skipping page {}'.format(page)) + pywikibot.output(f'Skipping page {page}')
def setup(self) -> None: """Refresh generator.""" diff --git a/scripts/template.py b/scripts/template.py index 9fc88f0..3e0d59e 100755 --- a/scripts/template.py +++ b/scripts/template.py @@ -120,7 +120,6 @@ itergroup, roundrobin_generators, ) - from scripts.replace import ReplaceRobot as ReplaceBot
@@ -182,12 +181,12 @@ 'pagelist', ] elif self.opt.remove: separate_line_regex = re.compile( - r'^[*#:]* *{} *\n'.format(template_regex.pattern), + fr'^[*#:]* *{template_regex.pattern} *\n', re.DOTALL | re.MULTILINE) replacements.append((separate_line_regex, ''))
spaced_regex = re.compile( - r' +{} +'.format(template_regex.pattern), + fr' +{template_regex.pattern} +', re.DOTALL) replacements.append((spaced_regex, ' '))
diff --git a/scripts/templatecount.py b/scripts/templatecount.py index f4c7ace..28100be 100755 --- a/scripts/templatecount.py +++ b/scripts/templatecount.py @@ -97,7 +97,7 @@ for page in template_dict[key]: pywikibot.stdout(page.title()) total += 1 - pywikibot.output('Total page count: {}'.format(total)) + pywikibot.output(f'Total page count: {total}') pywikibot.stdout('Report generated on {}' .format(datetime.datetime.utcnow().isoformat()))
diff --git a/scripts/touch.py b/scripts/touch.py index 596e792..115a900 100755 --- a/scripts/touch.py +++ b/scripts/touch.py @@ -60,7 +60,7 @@ pywikibot.error('Page {} is locked.' .format(page.title(as_link=True))) except PageSaveRelatedError as e: - pywikibot.error('Page {} not saved:\n{}'.format(page, e.args)) + pywikibot.error(f'Page {page} not saved:\n{e.args}') else: self.counter['touch'] += 1
diff --git a/scripts/transferbot.py b/scripts/transferbot.py index ea13014..39a3196 100755 --- a/scripts/transferbot.py +++ b/scripts/transferbot.py @@ -170,7 +170,7 @@ 'history': edithistpage.title(as_link=True, insite=targetpage.site)} ) - text += '<noinclude>\n\n<small>{}</small></noinclude>'.format(note) + text += f'<noinclude>\n\n<small>{note}</small></noinclude>'
pywikibot.log('Getting edit history.') historytable = page.getVersionHistoryTable() diff --git a/scripts/watchlist.py b/scripts/watchlist.py index bd72dc8..22608d0 100755 --- a/scripts/watchlist.py +++ b/scripts/watchlist.py @@ -79,7 +79,7 @@
def refresh(site): """Fetch the watchlist.""" - pywikibot.output('Retrieving watchlist for {}.'.format(str(site))) + pywikibot.output(f'Retrieving watchlist for {str(site)}.') return list(site.watched_pages(force=True))
diff --git a/scripts/weblinkchecker.py b/scripts/weblinkchecker.py index 34944f7..8f41b83 100755 --- a/scripts/weblinkchecker.py +++ b/scripts/weblinkchecker.py @@ -370,9 +370,9 @@ def log(self, url, error, containing_page, archive_url) -> None: """Log an error report to a text file in the deadlinks subdirectory.""" if archive_url: - error_report = '* {} ([{} archive])\n'.format(url, archive_url) + error_report = f'* {url} ([{archive_url} archive])\n' else: - error_report = '* {}\n'.format(url) + error_report = f'* {url}\n' for (page_title, date, error) in self.history_dict[url]: # ISO 8601 formulation iso_date = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(date)) diff --git a/scripts/welcome.py b/scripts/welcome.py index b1f3feb..1335a43 100755 --- a/scripts/welcome.py +++ b/scripts/welcome.py @@ -776,11 +776,11 @@ """ if user.is_blocked() or user.is_locked(): self.show_status(Msg.SKIP) - pywikibot.output('{} has been blocked!'.format(user.username)) + pywikibot.output(f'{user.username} has been blocked!')
elif 'bot' in user.groups(): self.show_status(Msg.SKIP) - pywikibot.output('{} is a bot!'.format(user.username)) + pywikibot.output(f'{user.username} is a bot!')
elif 'bot' in user.username.lower(): self.show_status(Msg.SKIP) @@ -848,7 +848,7 @@ elif welcomed_count == 1: count = 'One user has' else: - count = '{} users have'.format(welcomed_count) + count = f'{welcomed_count} users have' pywikibot.output(count + ' been welcomed.')
if welcomed_count >= globalvar.dump_to_log: @@ -988,7 +988,7 @@ elif arg in mapping: setattr(globalvar, *mapping[arg]) else: - pywikibot.warning('Unknown option "{}"'.format(arg)) + pywikibot.warning(f'Unknown option "{arg}"')
def main(*args: str) -> None: diff --git a/tests/__init__.py b/tests/__init__.py index f17f05a..1966c56 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -47,7 +47,7 @@ """Return a function returning a path relative to the given directory.""" func = functools.partial(base_func, subpath) func.path = base_func.path + '/' + subpath - func.__doc__ = 'Return a path relative to `{}/`.'.format(func.path) + func.__doc__ = f'Return a path relative to `{func.path}/`.' return func
diff --git a/tests/archivebot_tests.py b/tests/archivebot_tests.py index 8593877..db1c67f 100755 --- a/tests/archivebot_tests.py +++ b/tests/archivebot_tests.py @@ -312,12 +312,12 @@ try: archivebot.PageArchiver(page, tmpl_with_ns, '') except Error as e: - self.fail('PageArchiver() raised {}!'.format(e)) + self.fail(f'PageArchiver() raised {e}!')
try: archivebot.PageArchiver(page, tmpl_without_ns, '') except Error as e: - self.fail('PageArchiver() raised {}!'.format(e)) + self.fail(f'PageArchiver() raised {e}!')
def testLoadConfigInOtherNamespace(self): """Test loading of config with TEMPLATE_PAGE not in Template ns. @@ -339,7 +339,7 @@ try: archivebot.PageArchiver(page, tmpl_with_ns, '') except Error as e: - self.fail('PageArchiver() raised {}!'.format(e)) + self.fail(f'PageArchiver() raised {e}!')
with self.assertRaises(archivebot.MissingConfigError): archivebot.PageArchiver(page, tmpl_without_ns, '') diff --git a/tests/aspects.py b/tests/aspects.py index 433c78a..b20b270 100644 --- a/tests/aspects.py +++ b/tests/aspects.py @@ -84,7 +84,7 @@ self.test_completed = time.time() duration = self.test_completed - self.test_start if duration > self.test_duration_warning_interval: - unittest_print(' {:.3f}s'.format(duration), end=' ') + unittest_print(f' {duration:.3f}s', end=' ') sys.stdout.flush()
@@ -123,7 +123,7 @@
if first_len != second_len: msg = self._formatMessage( - msg, 'len({}) != {}'.format(safe_repr(seq), second_len)) + msg, f'len({safe_repr(seq)}) != {second_len}') self.fail(msg)
def assertPageInNamespaces(self, page, namespaces): @@ -139,7 +139,7 @@ namespaces = {namespaces}
self.assertIn(page.namespace(), namespaces, - '{} not in namespace {!r}'.format(page, namespaces)) + f'{page} not in namespace {namespaces!r}')
def _get_gen_pages(self, gen, count=None, site=None): """ @@ -326,7 +326,7 @@ """Set up test.""" self.old_Site_lookup_method = pywikibot.Site pywikibot.Site = lambda *args: self.fail( - '{}: Site() not permitted'.format(self.__class__.__name__)) + f'{self.__class__.__name__}: Site() not permitted')
super().setUp()
@@ -950,9 +950,9 @@ def has_site_user(cls, family, code): """Check the user config has a user for the site.""" if not family: - raise Exception('no family defined for {}'.format(cls.__name__)) + raise Exception(f'no family defined for {cls.__name__}') if not code: - raise Exception('no site code defined for {}'.format(cls.__name__)) + raise Exception(f'no site code defined for {cls.__name__}')
usernames = config.usernames
@@ -1396,9 +1396,9 @@ @classmethod def _build_message(cls, deprecated, instead): if deprecated is not None: - msg = '{} is deprecated'.format(deprecated) + msg = f'{deprecated} is deprecated' if instead: - msg += '; use {} instead.'.format(instead) + msg += f'; use {instead} instead.' elif instead is None: msg = None elif instead is True: @@ -1552,5 +1552,5 @@ Otherwise, returns: httpbin.org """ if hasattr(self, 'httpbin'): - return '{}:{}'.format(self.httpbin.host, self.httpbin.port) + return f'{self.httpbin.host}:{self.httpbin.port}' return 'httpbin.org' diff --git a/tests/basesite_tests.py b/tests/basesite_tests.py index 7d3f3e3..70b5869 100755 --- a/tests/basesite_tests.py +++ b/tests/basesite_tests.py @@ -87,7 +87,7 @@ self.assertIsInstance(mysite.lang, str) self.assertEqual(mysite, pywikibot.Site(self.code, self.family)) self.assertIsInstance(mysite.user(), (str, type(None))) - self.assertEqual(mysite.sitename, '{}:{}'.format(self.family, code)) + self.assertEqual(mysite.sitename, f'{self.family}:{code}') self.assertIsInstance(mysite.linktrail(), str) self.assertIsInstance(mysite.redirect(), str)
diff --git a/tests/date_tests.py b/tests/date_tests.py index a859b77..2adae3d 100755 --- a/tests/date_tests.py +++ b/tests/date_tests.py @@ -48,7 +48,7 @@
for formatname in date.formats: cls.add_method(dct, 'test_' + formatname, test_method(formatname), - doc_suffix='using {} format'.format(formatname)) + doc_suffix=f'using {formatname} format')
return super().__new__(cls, name, bases, dct)
diff --git a/tests/diff_tests.py b/tests/diff_tests.py index 6ce8ca0..9c99123 100755 --- a/tests/diff_tests.py +++ b/tests/diff_tests.py @@ -203,7 +203,7 @@ # output messages expected during testing diff_message = ('<<lightred>>- old\n<<default>><<lightgreen>>+ ' 'new\n<<default>>') - none_message = '<<lightpurple>>{0: ^50}<<default>>'.format('None.') + none_message = '<<lightpurple>>{: ^50}<<default>>'.format('None.') header_base = '\n<<lightpurple>>{0:*^50}<<default>>\n' headers = [' ALL CHANGES ', ' REVIEW CHANGES ', ' APPROVED CHANGES '] diff_by_letter_message = ('<<lightred>>- o\n<<default>>' diff --git a/tests/djvu_tests.py b/tests/djvu_tests.py index 1d3ed95..e4b82fc 100755 --- a/tests/djvu_tests.py +++ b/tests/djvu_tests.py @@ -34,13 +34,13 @@ def test_repr_method(self): """Test __repr__() method.""" djvu = DjVuFile(self.file_djvu) - expected = "pywikibot.tools.djvu.DjVuFile('{}')".format(self.file_djvu) + expected = f"pywikibot.tools.djvu.DjVuFile('{self.file_djvu}')" self.assertEqual(repr(djvu), expected)
def test_str_method(self): """Test __str__() method.""" djvu = DjVuFile(self.file_djvu) - expected = "DjVuFile('{}')".format(self.file_djvu) + expected = f"DjVuFile('{self.file_djvu}')" self.assertEqual(str(djvu), expected)
def test_file_existence(self): diff --git a/tests/dry_api_tests.py b/tests/dry_api_tests.py index fdf7a12..c170f3e 100755 --- a/tests/dry_api_tests.py +++ b/tests/dry_api_tests.py @@ -81,7 +81,7 @@ self.assertFalse(self.req._expired(now)) self.assertTrue( self.req._expired(now - datetime.timedelta(days=2)), - msg='\nreq.expiry: {}, now: {}'.format(self.req.expiry, now)) + msg=f'\nreq.expiry: {self.req.expiry}, now: {now}')
def test_parameter_types(self): """Test _uniquedescriptionstr is identical using different ways.""" @@ -185,7 +185,7 @@ return 'MockSite()'
def __getattr__(self, attr): - raise Exception('Attribute {!r} not defined'.format(attr)) + raise Exception(f'Attribute {attr!r} not defined')
self.mocksite = MockSite() super().setUp() diff --git a/tests/edit_tests.py b/tests/edit_tests.py index 05303ac..6fb8834 100755 --- a/tests/edit_tests.py +++ b/tests/edit_tests.py @@ -205,11 +205,11 @@ self.assertTrue(self.site.logged_in()) ts = str(time.time()) p = pywikibot.Page(self.site, - 'User:{}/edit test'.format(self.site.username())) + f'User:{self.site.username()}/edit test') p.site.editpage(p, appendtext=ts) revision_id = p.latest_revision_id p = pywikibot.Page(self.site, - 'User:{}/edit test'.format(self.site.username())) + f'User:{self.site.username()}/edit test') self.assertEqual(revision_id, p.latest_revision_id) self.assertTrue(p.text.endswith(ts))
diff --git a/tests/eventstreams_tests.py b/tests/eventstreams_tests.py index bafe316..7f69c9f 100755 --- a/tests/eventstreams_tests.py +++ b/tests/eventstreams_tests.py @@ -10,10 +10,9 @@ from contextlib import suppress from unittest import mock
-from pywikibot import config, Site +from pywikibot import Site, config from pywikibot.comms.eventstreams import EventSource, EventStreams from pywikibot.family import WikimediaFamily - from tests.aspects import DefaultSiteTestCase, TestCase, require_modules from tests.utils import skipping
@@ -59,7 +58,7 @@ self.assertEqual(e._url, e.sse_kwargs.get('url')) self.assertIsNone(e._total) self.assertEqual(e._streams, streams) - site_repr = 'site={}, '.format(repr(site)) if site != Site() else '' + site_repr = f'site={repr(site)}, ' if site != Site() else '' self.assertEqual(repr(e), "EventStreams({}streams='{}')" .format(site_repr, streams)) diff --git a/tests/file_tests.py b/tests/file_tests.py index 8063d34..64294b1 100755 --- a/tests/file_tests.py +++ b/tests/file_tests.py @@ -94,7 +94,7 @@ self.assertFalse(commons_file.file_is_shared())
page_doesnt_exist_exc_regex = re.escape( - "Page [[commons:{}]] doesn't exist.".format(title)) + f"Page [[commons:{title}]] doesn't exist.")
with self.assertRaisesRegex( NoPageError, diff --git a/tests/i18n_tests.py b/tests/i18n_tests.py index 7d8fa6d..c8f6e0a 100755 --- a/tests/i18n_tests.py +++ b/tests/i18n_tests.py @@ -377,7 +377,7 @@ self.skipTest('Wrong content model {!r} for cosmetic_changes' .format(page.content_model))
- summary = 'Working on Test page at site {}'.format(self.site) + summary = f'Working on Test page at site {self.site}' msg = page._cosmetic_changes_hook(summary) self.assertEqual(msg, summary + '; cosmetic changes')
diff --git a/tests/link_tests.py b/tests/link_tests.py index 91d7076..67cb233 100755 --- a/tests/link_tests.py +++ b/tests/link_tests.py @@ -415,7 +415,7 @@ # switch code:family sequence en:wikipedia or wikipedia:en for first, second in [(family, code), (code, family)]: with self.subTest(colon=colon, - site='{}:{}'.format(first, second)): + site=f'{first}:{second}'): link_title = self.PATTERN.format(colon=colon, first=first, second=second, @@ -433,7 +433,7 @@ # switch code:family sequence en:wikipedia or wikipedia:en for first, second in [(family, code), (code, family)]: with self.subTest(colon=colon, - site='{}:{}'.format(first, second)): + site=f'{first}:{second}'): link_title = self.PATTERN.format(colon=colon, first=first, second=second, diff --git a/tests/linter_tests.py b/tests/linter_tests.py index 9a402a5..2f8990a 100755 --- a/tests/linter_tests.py +++ b/tests/linter_tests.py @@ -21,7 +21,7 @@ super().setUp() if not self.site.has_extension('Linter'): self.skipTest( - 'The site {} does not use Linter extension'.format(self.site)) + f'The site {self.site} does not use Linter extension')
def test_linter_pages(self): """Test the deprecated site.logpages() method.""" diff --git a/tests/logentries_tests.py b/tests/logentries_tests.py index e4468c3..4798036 100755 --- a/tests/logentries_tests.py +++ b/tests/logentries_tests.py @@ -67,7 +67,7 @@ self.assertLess(self.site.mw_version, '1.25')
with skipping(StopIteration, - msg='No entry found for {!r}'.format(logtype)): + msg=f'No entry found for {logtype!r}'): le = next(self.site.logevents(logtype=logtype, total=1)) return le
@@ -154,7 +154,7 @@
# create test methods for the support logtype classes for logtype in LogEntryFactory._logtypes: - cls.add_method(dct, 'test_{}Entry'.format(logtype.title()), + cls.add_method(dct, f'test_{logtype.title()}Entry', test_method(logtype))
return super().__new__(cls, name, bases, dct) diff --git a/tests/page_tests.py b/tests/page_tests.py index cd7415e..32dfafb 100755 --- a/tests/page_tests.py +++ b/tests/page_tests.py @@ -546,7 +546,7 @@ for page in site.allpages(filterredir=True, total=1): break else: - self.skipTest('No redirect pages on site {!r}'.format(site)) + self.skipTest(f'No redirect pages on site {site!r}') # This page is already initialised self.assertTrue(hasattr(page, '_isredir')) # call api.update_page without prop=info @@ -675,7 +675,7 @@ """Test to capture actual Python result pre unicode_literals.""" self.assertEqual(repr(self.page), "Page('Ō')") self.assertEqual('%r' % self.page, "Page('Ō')") - self.assertEqual('{!r}'.format(self.page), "Page('Ō')") + self.assertEqual(f'{self.page!r}', "Page('Ō')")
class TestPageBotMayEdit(TestCase): diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py index 37f9cce..1545b5f 100755 --- a/tests/pagegenerators_tests.py +++ b/tests/pagegenerators_tests.py @@ -178,7 +178,7 @@ site=self.site) pages = [] for p in gen: - p.text = 'This is the content of {} as a sample'.format(p.title()) + p.text = f'This is the content of {p.title()} as a sample' pages.append(p) gen = pagegenerators.RegexBodyFilterPageGenerator(iter(pages), '/doc') self.assertPageTitlesEqual(gen, @@ -1144,7 +1144,7 @@
# Get by pageids. gf = pagegenerators.GeneratorFactory(site=self.get_site()) - gf.handle_arg('-pageid:{}'.format(pageids)) + gf.handle_arg(f'-pageid:{pageids}') gen = gf.getCombinedGenerator() self.assertIsNotNone(gen) pages_from_pageid = list(gen) @@ -1224,7 +1224,7 @@ 'hiddencat', 'invalid_property'): if item in mysite.get_property_names(): gf = pagegenerators.GeneratorFactory() - gf.handle_arg('-property:{}'.format(item)) + gf.handle_arg(f'-property:{item}') gf.handle_arg('-limit:10') gen = gf.getCombinedGenerator() self.assertIsNotNone(gen) @@ -1238,7 +1238,7 @@ with self.assertRaises(NotImplementedError): mysite.pages_with_property(item) self.fail( - 'NotImplementedError not raised for {}'.format(item)) + f'NotImplementedError not raised for {item}')
def test_empty_generator(self): """Test empty generator.""" @@ -1644,7 +1644,7 @@ super().setUpClass() cls.client = 'sseclient' if not has_module(cls.client): - raise unittest.SkipTest('{} is not available'.format(cls.client)) + raise unittest.SkipTest(f'{cls.client} is not available')
def test_RC_pagegenerator_result(self): """Test RC pagegenerator.""" @@ -1720,7 +1720,7 @@
for search, expected in cases: gf = pagegenerators.GeneratorFactory(site=self.site) - gf.handle_arg('-weblink:{}'.format(search)) + gf.handle_arg(f'-weblink:{search}') gf.handle_arg('-ns:2') gf.handle_arg('-limit:1') gen = gf.getCombinedGenerator() diff --git a/tests/paraminfo_tests.py b/tests/paraminfo_tests.py index 0f26e3d..2b6d71d 100755 --- a/tests/paraminfo_tests.py +++ b/tests/paraminfo_tests.py @@ -26,12 +26,12 @@ """Perform check that a parameter matches the expected list.""" with skipping( ValueError, - msg='Paraminfo for {} could not be loaded'.format(module)): + msg=f'Paraminfo for {module} could not be loaded'): param = site._paraminfo.parameter(module, parameter)
if not param or 'type' not in param: raise unittest.SkipTest( - 'No defined values for {}.{}'.format(module, parameter)) + f'No defined values for {module}.{parameter}') return param['type']
def _check_param_values(self, site, module, parameter, expected): @@ -86,7 +86,7 @@ if self.site.mw_version >= '1.24': types.append('unread')
- known = types + ['!{}'.format(item) for item in types] + known = types + [f'!{item}' for item in types]
self._check_param_subset(self.site, 'query+watchlist', 'show', known)
diff --git a/tests/patrolbot_tests.py b/tests/patrolbot_tests.py index b69d404..aed5d9b 100755 --- a/tests/patrolbot_tests.py +++ b/tests/patrolbot_tests.py @@ -41,9 +41,9 @@ """Test parsing the page tuples from a dummy text.""" tuples = self.bot.parse_page_tuples(DUMMY_PAGE_TUPLES) for gen_user in (1, 2): - user = 'Test {}'.format(gen_user) + user = f'Test {gen_user}' self.assertIn(user, tuples) - self.assertEqual(tuples[user], {'Page {}'.format(i * gen_user) + self.assertEqual(tuples[user], {f'Page {i * gen_user}' for i in range(1, 4)}) self.assertIn('Prefixed', tuples) self.assertEqual(tuples['Prefixed'], {'Page 1', 'Page 2'}) diff --git a/tests/plural_tests.py b/tests/plural_tests.py index e397d86..c5b721f 100755 --- a/tests/plural_tests.py +++ b/tests/plural_tests.py @@ -47,7 +47,7 @@ for lang, rule in plural.plural_rules.items(): cls.add_method(dct, 'test_{}'.format(lang.replace('-', '_')), create_test(rule), - doc_suffix='for "{}"'.format(lang)) + doc_suffix=f'for "{lang}"') return super().__new__(cls, name, bases, dct)
diff --git a/tests/proofreadpage_tests.py b/tests/proofreadpage_tests.py index ad2f675..47b6965 100755 --- a/tests/proofreadpage_tests.py +++ b/tests/proofreadpage_tests.py @@ -866,17 +866,17 @@
def test_has_valid_content_prefixed(self): """Test prefixing Index template is invalid.""" - self.index.text = 'pre {}'.format(self.valid_template) + self.index.text = f'pre {self.valid_template}' self.assertFalse(self.index.has_valid_content())
def test_has_valid_content_postfixed(self): """Test postfixing Index template is invalid.""" - self.index.text = '{}post'.format(self.valid_template) + self.index.text = f'{self.valid_template}post' self.assertFalse(self.index.has_valid_content())
def test_has_valid_content_pre_and_postfixed(self): """Test pre- and postfixing Index template is invalid.""" - self.index.text = 'pre{}post'.format(self.valid_template) + self.index.text = f'pre{self.valid_template}post' self.assertFalse(self.index.has_valid_content())
def test_has_valid_content_second_template(self): diff --git a/tests/pwb/print_env.py b/tests/pwb/print_env.py index 37ae1f0..42701d1 100755 --- a/tests/pwb/print_env.py +++ b/tests/pwb/print_env.py @@ -25,7 +25,7 @@ # This only appears in subprocesses if k == 'PYWIKIBOT_DIR_PWB': continue - print('{}: {}'.format(k, v)) + print(f'{k}: {v}')
print('sys.path:') for path in sys.path: diff --git a/tests/pwb/print_locals.py b/tests/pwb/print_locals.py index 88f21e6..538f624 100755 --- a/tests/pwb/print_locals.py +++ b/tests/pwb/print_locals.py @@ -17,7 +17,7 @@ if k == '__file__': print('__file__: ' + os.path.join('.', os.path.relpath(__file__))) else: - print('{}: {}'.format(k, v)) + print(f'{k}: {v}')
if __name__ == '__main__': diff --git a/tests/pwb_tests.py b/tests/pwb_tests.py index 69f335f..268db5b 100755 --- a/tests/pwb_tests.py +++ b/tests/pwb_tests.py @@ -119,7 +119,7 @@ text = stderr.readline().strip() self.assertTrue( text.startswith(result[1]), - msg='"{}" does not start with "{}"'.format(text, result[1])) + msg=f'"{text}" does not start with "{result[1]}"') with self.subTest(line=2): self.assertEqual(stderr.readline().strip(), result[2])
diff --git a/tests/replacebot_tests.py b/tests/replacebot_tests.py index 203b5e9..531ff01 100755 --- a/tests/replacebot_tests.py +++ b/tests/replacebot_tests.py @@ -128,7 +128,7 @@ offset) if msg: self.assertEqual(replacement.edit_summary, - 'M{}'.format(offset + 1)) + f'M{offset + 1}') else: self.assertIs(replacement.edit_summary, replacement.fix_set.edit_summary) diff --git a/tests/site_generators_tests.py b/tests/site_generators_tests.py index 39516de..95b2204 100644 --- a/tests/site_generators_tests.py +++ b/tests/site_generators_tests.py @@ -20,11 +20,7 @@ ) from pywikibot.tools import suppress_warnings from tests import WARN_SITE_CODE, unittest_print -from tests.aspects import ( - DefaultSiteTestCase, - DeprecationTestCase, - TestCase, -) +from tests.aspects import DefaultSiteTestCase, DeprecationTestCase, TestCase from tests.utils import skipping
@@ -633,7 +629,7 @@ with self.assertRaises(NotImplementedError): mysite.pages_with_property(item) self.fail( - 'NotImplementedError not raised for {}'.format(item)) + f'NotImplementedError not raised for {item}')
def test_unconnected(self): """Test site.unconnected_pages method.""" @@ -767,7 +763,7 @@ page = pywikibot.Page(mysite, mysite.siteinfo['mainpage']) with skipping( StopIteration, - msg='No images on the main page of site {!r}'.format(mysite)): + msg=f'No images on the main page of site {mysite!r}'): imagepage = next(page.imagelinks()) # 1st image of page
unittest_print('site_tests.TestImageUsage found {} on {}' @@ -2201,7 +2197,7 @@ for count, page in enumerate(gen): self.assertIsInstance(page, pywikibot.Page) self.assertIsInstance(page.exists(), bool) - self.assertFalse(page.exists(), 'page {} exists'.format(page)) + self.assertFalse(page.exists(), f'page {page} exists') if count >= 5: break
diff --git a/tests/site_tests.py b/tests/site_tests.py index e2326b1..354e4dc 100755 --- a/tests/site_tests.py +++ b/tests/site_tests.py @@ -87,7 +87,7 @@ def test_repr(self): """Test __repr__.""" code = self.site.family.obsolete.get(self.code) or self.code - expect = 'Site("{}", "{}")'.format(code, self.family) + expect = f'Site("{code}", "{self.family}")' self.assertTrue(repr(self.site).endswith(expect))
def test_constructors(self): @@ -251,7 +251,7 @@ mysite = self.get_site() if mysite.lang != 'en': self.skipTest( - 'English-specific tests not valid on {}'.format(mysite)) + f'English-specific tests not valid on {mysite}')
self.assertEqual(mysite.months_names[4], ('May', 'May')) self.assertEqual(mysite.list_to_text(('Pride', 'Prejudice')), @@ -381,7 +381,7 @@ break else: self.skipTest( - '{} contains no deleted revisions.'.format(mainpage)) + f'{mainpage} contains no deleted revisions.') self.assertLessEqual(len(dr['revisions']), 10) for rev in dr['revisions']: self.assertIsInstance(rev, dict) @@ -480,7 +480,7 @@ for data in gen: break else: - self.skipTest('{} does not have deleted edits.'.format(myuser)) + self.skipTest(f'{myuser} does not have deleted edits.') self.assertIn('revisions', data) for drev in data['revisions']: for key in ('revid', 'timestamp', 'user', 'comment'): diff --git a/tests/textlib_tests.py b/tests/textlib_tests.py index adf4e4a..343ae8c 100755 --- a/tests/textlib_tests.py +++ b/tests/textlib_tests.py @@ -737,7 +737,7 @@ return pywikibot.Link( '{}#{}' .format(self._count, link.section), link.site) - return pywikibot.Link('{}'.format(self._count), link.site) + return pywikibot.Link(f'{self._count}', link.site)
return None
@@ -1276,18 +1276,18 @@ def test_replace_tag_category(self): """Test replacing not inside category links.""" for ns_name in self.site.namespaces[14]: - self.assertEqual(textlib.replaceExcept('[[{}:x]]'.format(ns_name), + self.assertEqual(textlib.replaceExcept(f'[[{ns_name}:x]]', 'x', 'y', ['category'], site=self.site), - '[[{}:x]]'.format(ns_name)) + f'[[{ns_name}:x]]')
def test_replace_tag_file(self): """Test replacing not inside file links.""" for ns_name in self.site.namespaces[6]: - self.assertEqual(textlib.replaceExcept('[[{}:x]]'.format(ns_name), + self.assertEqual(textlib.replaceExcept(f'[[{ns_name}:x]]', 'x', 'y', ['file'], site=self.site), - '[[{}:x]]'.format(ns_name)) + f'[[{ns_name}:x]]')
self.assertEqual( textlib.replaceExcept( diff --git a/tests/token_tests.py b/tests/token_tests.py index f94bdaa..5ba8cef 100755 --- a/tests/token_tests.py +++ b/tests/token_tests.py @@ -150,7 +150,7 @@ raise
if hasattr(mysite, '_patroldisabled') and mysite._patroldisabled: - self.skipTest('Patrolling is disabled on {} wiki.'.format(mysite)) + self.skipTest(f'Patrolling is disabled on {mysite} wiki.')
result = result[0] self.assertIsInstance(result, dict) diff --git a/tests/ui_options_tests.py b/tests/ui_options_tests.py index fed1970..d397329 100755 --- a/tests/ui_options_tests.py +++ b/tests/ui_options_tests.py @@ -96,9 +96,9 @@ self.assertFalse(option.test('r6')) self.assertIsNone(option.handled('r6')) for i in range(1, 6): - self.assertTrue(option.test('r{}'.format(i))) - self.assertEqual(option.handled('r{}'.format(i)), option) - self.assertEqual(option.result('r{}'.format(i)), ('r', i)) + self.assertTrue(option.test(f'r{i}')) + self.assertEqual(option.handled(f'r{i}'), option) + self.assertEqual(option.result(f'r{i}'), ('r', i))
def test_List(self): """Test ListOption.""" @@ -132,12 +132,12 @@ for prefix in ('', 'r', 'st'): option = bot.ListOption(options, prefix=prefix) self.assertEqual(message('?', [option]), - '? ({}<number> [1-3])'.format(prefix)) + f'? ({prefix}<number> [1-3])') for i, elem in enumerate(options, 1): - self.assertTrue(option.test('{}{}'.format(prefix, i))) + self.assertTrue(option.test(f'{prefix}{i}')) self.assertIs(option.handled('{}{}' .format(prefix, i)), option) - self.assertEqual(option.result('{}{}'.format(prefix, i)), + self.assertEqual(option.result(f'{prefix}{i}'), (prefix, elem)) self.assertFalse(option.test('{}{}' .format(prefix, len(options) + 1))) diff --git a/tests/ui_tests.py b/tests/ui_tests.py index cf750fa..7856f1c 100755 --- a/tests/ui_tests.py +++ b/tests/ui_tests.py @@ -266,7 +266,7 @@ self.assertEqual(self.strout.getvalue(), '') self.assertEqual( self.strerr.getvalue(), - ''.join('{}: {}\n'.format(num, items) + ''.join(f'{num}: {items}\n' for num, items in enumerate(options, start=1)) + 'question (default: 2): ') self.assertEqual(rv, 'answer 2') diff --git a/tests/utils.py b/tests/utils.py index e9031dc..397552d 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -21,7 +21,6 @@ from pywikibot.exceptions import APIError from pywikibot.login import LoginStatus from pywikibot.site import Namespace - from tests import _pwb_py
@@ -501,7 +500,7 @@ if overrides: command.append('-c') overrides = '; '.join( - '{} = {}'.format(key, value) for key, value in overrides.items()) + f'{key} = {value}' for key, value in overrides.items()) command.append( 'import pwb; import pywikibot; {}; pwb.main()' .format(overrides))