jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/1101186?usp=email )
Change subject: doc: Fix spelling mistakes ......................................................................
doc: Fix spelling mistakes
Change-Id: I8ced196e189c827e47eec6d4678d661cd746e241 --- M HISTORY.rst M docs/conf.py M docs/scripts/archive.rst M docs/scripts/outdated.rst M pywikibot/data/sparql.py M pywikibot/family.py M pywikibot/page/_filepage.py M pywikibot/page/_toolforge.py M pywikibot/pagegenerators/_generators.py M pywikibot/scripts/generate_family_file.py M pywikibot/site/_upload.py M pywikibot/tools/_deprecate.py M scripts/CHANGELOG.rst M scripts/category_graph.py M scripts/checkimages.py M scripts/maintenance/unidata.py M scripts/transwikiimport.py M tests/dry_api_tests.py M tests/pagegenerators_tests.py M tests/proofreadpage_tests.py M tests/redirect_bot_tests.py M tests/site_login_logout_tests.py 22 files changed, 63 insertions(+), 59 deletions(-)
Approvals: Xqt: Looks good to me, approved jenkins-bot: Verified
diff --git a/HISTORY.rst b/HISTORY.rst index 2e6ba4e..4d067ad 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -722,7 +722,7 @@ * Retry for internal_api_error_DBQueryTimeoutError errors due to :phab:`T297708` * Handle ParserError within xmlreader.XmlDump.parse() instead of raising an exception (:phab:`T306134`) * XMLDumpOldPageGenerator is deprecated in favour of a `content` parameter (:phab:`T306134`) -* `use_disambig` BaseBot attribute was added to hande disambig skipping +* `use_disambig` BaseBot attribute was added to handle disambig skipping * Deprecate RedirectPageBot and NoRedirectPageBot in favour of `use_redirects` attribute * tools.formatter.color_format is deprecated and will be removed * A new and easier color format was implemented; colors can be used like: @@ -1020,7 +1020,7 @@ **Improvements and Bugfixes**
* Use different logfiles for multiple processes of the same script (:phab:`T56685`) -* throttle.pip will be reused as soon as possbile +* throttle.pip will be reused as soon as possibile * terminal_interface_base.TerminalHandler is subclassed from logging.StreamHandler * Fix iterating of SizedKeyCollection (:phab:`T282865`) * An abstract base user interface module was added diff --git a/docs/conf.py b/docs/conf.py index da75c99..f1af6c8 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -480,7 +480,7 @@ napoleon_use_admonition_for_examples = True napoleon_use_admonition_for_notes = True napoleon_use_admonition_for_references = True -napoleon_custom_sections = ['Advice', 'Advices', 'Hints', 'Rights', 'Tips'] +napoleon_custom_sections = ['Advice', 'Hints', 'Rights']
python_use_unqualified_type_names = True modindex_common_prefix = ['pywikibot.scripts.'] diff --git a/docs/scripts/archive.rst b/docs/scripts/archive.rst index 43cd347..867a081 100644 --- a/docs/scripts/archive.rst +++ b/docs/scripts/archive.rst @@ -2,7 +2,7 @@ Outdated core scripts *********************
-This list contains outdated scripts from :term:`core` banch which +This list contains outdated scripts from :term:`core` branch which aren't supported any longer. They are deleted from repository.
.. hint:: diff --git a/docs/scripts/outdated.rst b/docs/scripts/outdated.rst index 5e907af..4a8d2b8 100644 --- a/docs/scripts/outdated.rst +++ b/docs/scripts/outdated.rst @@ -2,7 +2,7 @@ Outdated compat scripts ***********************
-This list contains outdated scripts from :term:`compat` banch which +This list contains outdated scripts from :term:`compat` branch which haven't ported to the :term:`core` branch of Pywikibot.
Feel free to reactivate any script at any time by creating a Phabricator diff --git a/pywikibot/data/sparql.py b/pywikibot/data/sparql.py index 622c012..8086052 100644 --- a/pywikibot/data/sparql.py +++ b/pywikibot/data/sparql.py @@ -165,7 +165,7 @@ return self.last_response.json() except JSONDecodeError: # There is no proper error given but server returns HTML page - # in case login isn't valid sotry to guess what the problem is + # in case login isn't valid so try to guess what the problem is # and notify user instead of silently ignoring it. # This could be made more reliable by fixing the backend. # Note: only raise error when response starts with HTML, diff --git a/pywikibot/family.py b/pywikibot/family.py index 2d62f44..094a4ea 100644 --- a/pywikibot/family.py +++ b/pywikibot/family.py @@ -1098,7 +1098,7 @@
"""A base class for a Wikimedia Wikibase Family.
- This class holds defauls for :meth:`calendarmodel`, + This class holds defaults for :meth:`calendarmodel`, :meth:`default_globe` and :meth:`globes` to prevent code duplication.
.. warning:: Possibly you have to adjust the repository site in diff --git a/pywikibot/page/_filepage.py b/pywikibot/page/_filepage.py index 60d693e..bb4b054 100644 --- a/pywikibot/page/_filepage.py +++ b/pywikibot/page/_filepage.py @@ -292,18 +292,21 @@ :param source: Path or URL to the file to be uploaded.
:keyword comment: Edit summary; if this is not provided, then - filepage.text will be used. An empty summary is not permitted. - This may also serve as the initial page text (see below). + filepage.text will be used. An empty summary is not + permitted. This may also serve as the initial page text (see + below). :keyword text: Initial page text; if this is not set, then filepage.text will be used, or comment. - :keyword watch: If true, add filepage to the bot user's watchlist + :keyword watch: If true, add filepage to the bot user's + watchlist :keyword ignore_warnings: It may be a static boolean, a callable - returning a boolean or an iterable. The callable gets a list of - UploadError instances and the iterable should contain the warning - codes for which an equivalent callable would return True if all - UploadError codes are in thet list. If the result is False it'll - not continue uploading the file and otherwise disable any warning - and reattempt to upload the file. + returning a boolean or an iterable. The callable gets a list + of UploadError instances and the iterable should contain the + warning codes for which an equivalent callable would return + True if all UploadError codes are in that list. If the + result is False it'll not continue uploading the file and + otherwise disable any warning and reattempt to upload the + file.
.. note:: NOTE: If report_success is True or None it'll raise an UploadError exception if the static boolean is @@ -314,11 +317,12 @@ only upload in chunks, if the chunk size is positive but lower than the file size. :type chunk_size: int - :keyword report_success: If the upload was successful it'll print a - success message and if ignore_warnings is set to False it'll - raise an UploadError if a warning occurred. If it's - None (default) it'll be True if ignore_warnings is a bool and False - otherwise. If it's True or None ignore_warnings must be a bool. + :keyword report_success: If the upload was successful it'll + print a success message and if ignore_warnings is set to + False it'll raise an UploadError if a warning occurred. If + it's None (default) it'll be True if ignore_warnings is a + bool and False otherwise. If it's True or None + ignore_warnings must be a bool. :return: It returns True if the upload was successful and False otherwise. """ diff --git a/pywikibot/page/_toolforge.py b/pywikibot/page/_toolforge.py index 19a2782..3f2bbf9 100644 --- a/pywikibot/page/_toolforge.py +++ b/pywikibot/page/_toolforge.py @@ -214,7 +214,7 @@ chars = int(row[3].replace(',', '_')) percent = float(row[4].rstrip('%'))
- # take into account tht data() is ordered + # take into account that data() is ordered if n and rank > n or chars < min_chars or percent < min_pct: break
diff --git a/pywikibot/pagegenerators/_generators.py b/pywikibot/pagegenerators/_generators.py index cfd5c09..42a54c6 100644 --- a/pywikibot/pagegenerators/_generators.py +++ b/pywikibot/pagegenerators/_generators.py @@ -796,14 +796,14 @@ ) -> Iterable[pywikibot.page.Page]: """Yield all pages that link to a certain URL.
- :param url: The URL to search for (with ot without the protocol prefix); - this may include a '*' as a wildcard, only at the start of the - hostname + :param url: The URL to search for (with or without the protocol + prefix); this may include a '*' as a wildcard, only at the start + of the hostname :param namespaces: list of namespace numbers to fetch contribs from :param total: Maximum number of pages to retrieve in total :param site: Site for generator results - :param protocol: Protocol to search for, likely http or https, http by - default. Full list shown on Special:LinkSearch wikipage + :param protocol: Protocol to search for, likely http or https, http + by default. Full list shown on Special:LinkSearch wikipage. """ if site is None: site = pywikibot.Site() diff --git a/pywikibot/scripts/generate_family_file.py b/pywikibot/scripts/generate_family_file.py index c2b2e00..8b03eff 100755 --- a/pywikibot/scripts/generate_family_file.py +++ b/pywikibot/scripts/generate_family_file.py @@ -73,9 +73,9 @@ only includes site of the same domain (usually for Wikimedia sites), `N` or `n` for no and `E` or `e` if you want to edit the collection of sites. - :param verify: If a certificate verification failes, you may - pass `Y` or `y` to disable certificate validaton `N` or `n` - to keep it enabled. + :param verify: If a certificate verification fails, you may pass + `Y` or `y` to disable certificate validaton `N` or `n` to + keep it enabled. """ from pywikibot.scripts import _import_with_no_user_config
diff --git a/pywikibot/site/_upload.py b/pywikibot/site/_upload.py index 0386764..2477d3f 100644 --- a/pywikibot/site/_upload.py +++ b/pywikibot/site/_upload.py @@ -46,7 +46,7 @@ returning a boolean or an iterable. The callable gets a list of UploadError instances and the iterable should contain the warning codes for which an equivalent callable would return True if all - UploadError codes are in thet list. If the result is False it'll + UploadError codes are in that list. If the result is False it'll not continue uploading the file and otherwise disable any warning and reattempt to upload the file.
diff --git a/pywikibot/tools/_deprecate.py b/pywikibot/tools/_deprecate.py index 7f7c4c0..8a05ae5 100644 --- a/pywikibot/tools/_deprecate.py +++ b/pywikibot/tools/_deprecate.py @@ -445,7 +445,7 @@ def deprecate_positionals(since: str = ''): """Decorator for methods that issues warnings for positional arguments.
- This decorator allowes positional arguments after keyword-only + This decorator allows positional arguments after keyword-only argument syntax (:pep:`3102`) but throws a FutureWarning. The decorator makes the needed argument updates before passing them to the called function or method. This decorator may be used for a @@ -476,7 +476,7 @@ def decorator(func): """Outer wrapper. Inspect the parameters of *func*.
- :param func: function or method beeing wrapped. + :param func: function or method being wrapped. """
@wraps(func) diff --git a/scripts/CHANGELOG.rst b/scripts/CHANGELOG.rst index df653b3..4254ddb 100644 --- a/scripts/CHANGELOG.rst +++ b/scripts/CHANGELOG.rst @@ -63,7 +63,7 @@ * Use :pylib:`difflib.get_close_matches()<difflib#difflib.get_close_matches>` to find the closest image match * Add ``-category`` option to work from given category and look for the - lastest file deletion first(:phab:`T372206`) + latest file deletion first (:phab:`T372206`) * Check whether image exists first (:phab:`T372106`)
unlink @@ -1329,7 +1329,7 @@ general ^^^^^^^
-* Add missing commas in string contants +* Add missing commas in string constants
4.1.0 ----- diff --git a/scripts/category_graph.py b/scripts/category_graph.py index 77c6b01..b30655b 100755 --- a/scripts/category_graph.py +++ b/scripts/category_graph.py @@ -170,7 +170,7 @@ self.dot.add_edge(e) # repeat recursively self.scan_level(subcat, level - 1, h) - # track graph's structure to reduse too big graph + # track graph's structure to reduce too big graph self.rev[e.get_destination()].append(e.get_source()) self.fw[e.get_source()].append(e.get_destination())
diff --git a/scripts/checkimages.py b/scripts/checkimages.py index ca695d8..855f0c0 100755 --- a/scripts/checkimages.py +++ b/scripts/checkimages.py @@ -1083,14 +1083,14 @@ for number, m in enumerate(SETTINGS_REGEX.finditer(page_text), start=1): name = str(m[1]) - find_tipe = str(m[2]) + find_type = str(m[2]) find = str(m[3]) imagechanges = str(m[4]) summary = str(m[5]) head = str(m[6]) text = str(m[7]) mexcatched = str(m[8]) - settings = [number, name, find_tipe, find, imagechanges, summary, + settings = [number, name, find_type, find, imagechanges, summary, head, text, mexcatched] self.settings_data.append(settings)
@@ -1371,7 +1371,7 @@ # In every tuple there's a setting configuration for tupla in self.settings_data: name = tupla[1] - find_tipe = tupla[2] + find_type = tupla[2] find = tupla[3] find_list = self.load(find) imagechanges = tupla[4] @@ -1390,7 +1390,7 @@ text = tupla[7] % self.image_name mex_catched = tupla[8] for k in find_list: - if find_tipe.lower() == 'findonly': + if find_type.lower() == 'findonly': search_results = re.findall(fr'{k.lower()}', self.image_check_text.lower()) if search_results \ @@ -1403,7 +1403,7 @@ self.summary_used = summary self.mex_used = mex_catched break - elif find_tipe.lower() == 'find' \ + elif find_type.lower() == 'find' \ and re.findall(fr'{k.lower()}', self.image_check_text.lower()): self.some_problem = True diff --git a/scripts/maintenance/unidata.py b/scripts/maintenance/unidata.py index 1c77aec..f7b5b92 100755 --- a/scripts/maintenance/unidata.py +++ b/scripts/maintenance/unidata.py @@ -70,8 +70,8 @@ data={'text': wikilinks}, timeout=10, ).json() - pased_text = j['parse']['text']['*'] - titles = findall(r'title="[^:]*:(.)', pased_text) + parsed_text = j['parse']['text']['*'] + titles = findall(r'title="[^:]*:(.)', parsed_text) site_excepts = {} for i, original_char in enumerate(chars): title_char = titles[i] diff --git a/scripts/transwikiimport.py b/scripts/transwikiimport.py index 67bcfb5..6aeb52e 100755 --- a/scripts/transwikiimport.py +++ b/scripts/transwikiimport.py @@ -60,8 +60,8 @@ -target -fullhistory -assignknownusers
-Advices -------- +Advice +------
The module gives access to all parameters of the API (and special page) and is compatible to the :mod:`scripts.transferbot` script. diff --git a/tests/dry_api_tests.py b/tests/dry_api_tests.py index 7674625..f7a512b 100755 --- a/tests/dry_api_tests.py +++ b/tests/dry_api_tests.py @@ -52,12 +52,12 @@ def setUp(self): """Initialize the fake requests.""" super().setUp() - self.parms = {'action': 'query', - 'meta': 'userinfo'} + self.params = {'action': 'query', + 'meta': 'userinfo'} self.req = CachedRequest(expiry=1, site=self.basesite, - parameters=self.parms) + parameters=self.params) self.expreq = CachedRequest(expiry=0, site=self.basesite, - parameters=self.parms) + parameters=self.params) self.diffreq = CachedRequest( expiry=1, site=self.basesite, parameters={'action': 'query', 'meta': 'siteinfo'}) @@ -69,14 +69,14 @@ self.deprecated_explicit = CachedRequest( expiry=1, site=self.basesite, action='query', meta='userinfo') self.deprecated_asterisks = CachedRequest( - expiry=1, site=self.basesite, **self.parms) + expiry=1, site=self.basesite, **self.params)
def test_expiry_formats(self): """Test using a timedelta as expiry.""" self.assertEqual(self.req.expiry, CachedRequest(datetime.timedelta(days=1), site=self.basesite, - parameters=self.parms).expiry) + parameters=self.params).expiry)
def test_expired(self): """Test if the request is expired.""" diff --git a/tests/pagegenerators_tests.py b/tests/pagegenerators_tests.py index 374f844..d72c392 100755 --- a/tests/pagegenerators_tests.py +++ b/tests/pagegenerators_tests.py @@ -911,7 +911,7 @@ gf = pagegenerators.GeneratorFactory() gf.handle_arg('-intersect')
- # check wether the generator works for both directions + # check whether the generator works for both directions patterns = ['Python 3.7-dev', 'Pywikibot 7.0.dev'] for index in range(2): with self.subTest(index=index): @@ -919,7 +919,7 @@ gen = gf.getCombinedGenerator(gen=patterns[index - 1]) self.assertEqual(''.join(gen), 'Pyot 7.dev')
- # check wether the generator works for a very long text + # check whether the generator works for a very long text patterns.append('PWB 7+ unittest developed with a very long text.') with self.subTest(patterns=patterns): gf.gens = patterns diff --git a/tests/proofreadpage_tests.py b/tests/proofreadpage_tests.py index 063b289..ab7f065 100755 --- a/tests/proofreadpage_tests.py +++ b/tests/proofreadpage_tests.py @@ -435,7 +435,7 @@
class BS4TestCase(TestCase):
- """Run tests which needs bs4 beeing installed.""" + """Run tests which needs bs4 being installed."""
@classmethod @require_modules('bs4') diff --git a/tests/redirect_bot_tests.py b/tests/redirect_bot_tests.py index 368aba5..9faa27a 100755 --- a/tests/redirect_bot_tests.py +++ b/tests/redirect_bot_tests.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 """Tests for the redirect.py script.""" # -# (C) Pywikibot team, 2017-2022 +# (C) Pywikibot team, 2017-2024 # # Distributed under the terms of the MIT license. # @@ -59,7 +59,7 @@ w.assert_called_with('No speedy deletion template available.')
def test_with_delete_and_non_existing_sdtemplate(self): - """Test with delete and non-exisitng sdtemplate.""" + """Test with delete and non-existing sdtemplate.""" options = {'delete': True, 'sdtemplate': 'txt {{n|a}} txt'} bot = RedirectTestRobot('broken', **options) with patch.object(Page, 'exists', new=Mock(return_value=False)), \ diff --git a/tests/site_login_logout_tests.py b/tests/site_login_logout_tests.py index beb1770..e08f392 100755 --- a/tests/site_login_logout_tests.py +++ b/tests/site_login_logout_tests.py @@ -1,9 +1,9 @@ #!/usr/bin/env python3 """Test for login and logout methods.
-These tests are separated from others because they should not be runned -in paralled CI test tasks. Any logout could lead other parallel tests -to fail. +These tests are separated from others because they should not be run in +paralled CI test tasks. Any logout could lead other parallel tests to +fail. """ # # (C) Pywikibot team, 2022-2024