jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/611641 )
Change subject: Update category for py3
......................................................................
Update category for py3
Bug: T257399
Change-Id: Ic7a8da9b55d769a592fd119539670ef8399f2d21
---
M scripts/category.py
1 file changed, 45 insertions(+), 49 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/category.py b/scripts/category.py
index 85d2df9..b491f99 100755
--- a/scripts/category.py
+++ b/scripts/category.py
@@ -117,7 +117,6 @@
#
# Distributed under the terms of the MIT license.
#
-from __future__ import absolute_import, division, unicode_literals
import codecs
import math
@@ -126,6 +125,7 @@
import re
from operator import methodcaller
+from typing import Optional, Set
import pywikibot
@@ -139,7 +139,6 @@
from pywikibot.cosmetic_changes import moved_links
from pywikibot.tools import (
deprecated_args, deprecated, ModuleDeprecationWrapper, open_archive,
- UnicodeType
)
from pywikibot.tools.formatter import color_format
@@ -178,14 +177,13 @@
self.edit_redirects = edit_redirects
self.create = create
- def determine_type_target(self, page):
+ def determine_type_target(self, page) -> Optional[pywikibot.Page]:
"""
Return page to be categorized by type.
@param page: Existing, missing or redirect page to be processed.
@type page: pywikibot.Page
@return: Page to be categorized.
- @rtype: pywikibot.Page or None
"""
if page.exists():
if page.isRedirectPage():
@@ -222,7 +220,7 @@
.format(page.title(as_link=True)))
return None
- def determine_template_target(self, page):
+ def determine_template_target(self, page) -> pywikibot.Page:
"""
Return template page to be categorized.
@@ -236,7 +234,6 @@
@param page: Page to be processed.
@type page: pywikibot.Page
@return: Page to be categorized.
- @rtype: pywikibot.Page
"""
includeonly = []
if page.namespace() == page.site.namespaces.TEMPLATE:
@@ -268,14 +265,14 @@
return page
-class CategoryDatabase(object):
+class CategoryDatabase:
"""Temporary database saving pages and subcategories for each category.
This prevents loading the category pages over and over again.
"""
- def __init__(self, rebuild=False, filename='category.dump.bz2'):
+ def __init__(self, rebuild=False, filename='category.dump.bz2') -> None:
"""Initializer."""
if not os.path.isabs(filename):
filename = config.datafilepath(filename)
@@ -284,11 +281,11 @@
self.rebuild()
@property
- def is_loaded(self):
+ def is_loaded(self) -> bool:
"""Return whether the contents have been loaded."""
return hasattr(self, 'catContentDB') and hasattr(self, 'superclassDB')
- def _load(self):
+ def _load(self) -> None:
if not self.is_loaded:
try:
if config.verbose_output:
@@ -306,12 +303,12 @@
# If something goes wrong, just rebuild the database
self.rebuild()
- def rebuild(self):
+ def rebuild(self) -> None:
"""Rebuild the dabatase."""
self.catContentDB = {}
self.superclassDB = {}
- def getSubcats(self, supercat):
+ def getSubcats(self, supercat) -> Set[pywikibot.Category]:
"""Return the list of subcategories for a given supercategory.
Saves this list in a temporary database so that it won't be loaded
@@ -328,7 +325,7 @@
self.catContentDB[supercat] = (subcatset, articleset)
return subcatset
- def getArticles(self, cat):
+ def getArticles(self, cat) -> Set[pywikibot.Page]:
"""Return the list of pages for a given category.
Saves this list in a temporary database so that it won't be loaded
@@ -345,7 +342,7 @@
self.catContentDB[cat] = (subcatset, articleset)
return articleset
- def getSupercats(self, subcat):
+ def getSupercats(self, subcat) -> Set[pywikibot.Category]:
"""Return the supercategory (or a set of) for a given subcategory."""
self._load()
# if we already know which subcategories exist here.
@@ -357,7 +354,7 @@
self.superclassDB[subcat] = supercatset
return supercatset
- def dump(self, filename=None):
+ def dump(self, filename=None) -> None:
"""Save the dictionaries to disk if not empty.
Pickle the contents of the dictionaries superclassDB and catContentDB
@@ -399,9 +396,9 @@
@deprecated_args(editSummary='comment', dry=None)
def __init__(self, generator, newcat=None, sort_by_last_name=False,
- create=False, comment='', follow_redirects=False):
+ create=False, comment='', follow_redirects=False) -> None:
"""Initializer."""
- super(CategoryAddBot, self).__init__()
+ super().__init__()
self.generator = generator
self.newcat = newcat
self.sort = sort_by_last_name
@@ -410,7 +407,7 @@
self.always = False
self.comment = comment
- def sorted_by_last_name(self, catlink, pagelink):
+ def sorted_by_last_name(self, catlink, pagelink) -> pywikibot.Page:
"""Return a Category with key that sorts persons by their last name.
Parameters: catlink - The Category to be linked.
@@ -441,7 +438,7 @@
else:
return pywikibot.Page(site, catlink.title())
- def treat(self, page):
+ def treat(self, page) -> None:
"""Process one page."""
# find correct categorization target
page = self.determine_type_target(page)
@@ -531,7 +528,7 @@
deletion_comment=DELETION_COMMENT_AUTOMATIC,
move_comment=None,
wikibase=True, allow_split=False, move_together=False,
- keep_sortkey=None):
+ keep_sortkey=None) -> None:
"""Store all given parameters in the objects attributes.
@param oldcat: The move source.
@@ -621,7 +618,7 @@
'category-removing',
template_vars)
# Set deletion reason for category page and talkpage.
- if isinstance(deletion_comment, UnicodeType):
+ if isinstance(deletion_comment, str):
# Deletion comment is set to given string.
self.deletion_comment = deletion_comment
elif deletion_comment == self.DELETION_COMMENT_SAME_AS_EDIT_COMMENT:
@@ -640,7 +637,7 @@
self.site, 'category-was-disbanded')
self.move_comment = move_comment if move_comment else self.comment
- def run(self):
+ def run(self) -> None:
"""
The main bot function that does all the work.
@@ -704,7 +701,7 @@
((self.newcat and self.move_oldcat) or not self.newcat):
self._delete(can_move_page, can_move_talk)
- def _delete(self, moved_page, moved_talk):
+ def _delete(self, moved_page, moved_talk) -> None:
"""Private function to delete the category page and its talk page.
Do not use this function from outside the class. Automatically marks
@@ -723,7 +720,7 @@
self.oldtalk.delete(self.deletion_comment, not self.batch,
mark=True)
- def _change(self, gen):
+ def _change(self, gen) -> None:
"""
Private function to move category contents.
@@ -751,7 +748,7 @@
sort_key=self.keep_sortkey)
@staticmethod
- def check_move(name, old_page, new_page):
+ def check_move(name, old_page, new_page) -> bool:
"""Return if the old page can be safely moved to the new page.
@param name: Title of the new page
@@ -762,8 +759,6 @@
@type new_page: pywikibot.page.BasePage
@return: True if possible to move page, False if not page move
not possible
- @rtype: bool
-
"""
move_possible = True
if new_page and new_page.exists():
@@ -779,7 +774,7 @@
move_possible = False
return move_possible
- def _movecat(self):
+ def _movecat(self) -> None:
"""Private function to move the category page by copying its contents.
Note that this method of moving category pages by copying over the raw
@@ -800,7 +795,7 @@
self.newcat.text = self.oldcat.text
self._strip_cfd_templates(summary)
- def _strip_cfd_templates(self, summary=None, commit=True):
+ def _strip_cfd_templates(self, summary=None, commit=True) -> None:
"""Private function to strip out CFD templates from the new category.
The new category is saved.
@@ -826,7 +821,7 @@
if commit:
self.newcat.save(summary=summary)
- def _movetalk(self):
+ def _movetalk(self) -> None:
"""Private function to move the category talk page.
Do not use this function from outside the class.
@@ -837,7 +832,7 @@
'title': cat_name_only})
self.oldtalk.move(self.newtalk.title(), comment)
- def _update_wikibase_item(self):
+ def _update_wikibase_item(self) -> None:
"""Private function to update the Wikibase item for the category.
Do not use this function from outside the class.
@@ -854,7 +849,7 @@
'title': cat_name_only})
item.setSitelink(self.newcat, summary=comment)
- def _hist(self):
+ def _hist(self) -> None:
"""Private function to copy the history of the to-be-deleted category.
Do not use this function from outside the class. It adds a table with
@@ -869,7 +864,7 @@
{'oldcat': self.oldcat.title()})
self.newtalk.save(comment)
- def _makecat(self, var):
+ def _makecat(self, var) -> pywikibot.Category:
"""Private helper function to get a Category object.
Checks if the instance given is a Category object and returns it.
@@ -901,9 +896,9 @@
def __init__(
self, catTitle, batchMode=False, editSummary='',
useSummaryForDeletion=CategoryMoveRobot.DELETION_COMMENT_AUTOMATIC,
- titleRegex=None, inPlace=False, pagesonly=False):
+ titleRegex=None, inPlace=False, pagesonly=False) -> None:
"""Initializer."""
- super(CategoryRemoveRobot, self).__init__(
+ super().__init__(
oldcat=catTitle,
batch=batchMode,
comment=editSummary,
@@ -913,13 +908,13 @@
pagesonly=pagesonly)
-class CategoryListifyRobot(object):
+class CategoryListifyRobot:
"""Create a list containing all of the members in a category."""
def __init__(self, catTitle, listTitle, editSummary, append=False,
overwrite=False, showImages=False, subCats=False,
- talkPages=False, recurse=False, prefix='*'):
+ talkPages=False, recurse=False, prefix='*') -> None:
"""Initializer."""
self.editSummary = editSummary
self.append = append
@@ -933,7 +928,7 @@
self.recurse = recurse
self.prefix = prefix
- def run(self):
+ def run(self) -> None:
"""Start bot."""
setOfArticles = set(self.cat.articles(recurse=self.recurse))
if self.subCats:
@@ -1010,7 +1005,8 @@
@type: str
"""
- def __init__(self, cat_title, cat_db, namespaces=None, comment=None):
+ def __init__(self, cat_title, cat_db, namespaces=None, comment=None
+ ) -> None:
"""Initializer."""
self.cat_title = cat_title
self.cat_db = cat_db
@@ -1025,7 +1021,7 @@
self.cat.articles(namespaces=namespaces)))
@deprecated_args(article='member')
- def move_to_category(self, member, original_cat, current_cat):
+ def move_to_category(self, member, original_cat, current_cat) -> None:
"""
Ask whether to move it to one of the sub- or super-categories.
@@ -1047,7 +1043,7 @@
class CatContextOption(ContextOption):
"""An option to show more and more context and categories."""
- def output_range(self, start, end):
+ def output_range(self, start, end) -> None:
"""Output a section and categories from the text."""
pywikibot.output(self.text[start:end] + '...')
@@ -1064,7 +1060,7 @@
class CatIntegerOption(IntegerOption):
"""An option allowing a range of integers."""
- def list_categories(self, cat_list, prefix=''):
+ def list_categories(self, cat_list, prefix='') -> None:
"""
Output categories in one or two columns.
@@ -1241,19 +1237,19 @@
self.move_to_category(member, original_cat,
subcatlist[choice[1]])
- def teardown(self):
+ def teardown(self) -> None:
"""Cleanups after run operation."""
if self._generator_completed and not self._treat_counter:
pywikibot.output('There are no pages or files in category {}.'
.format(self.cat_title))
- def treat(self, page):
+ def treat(self, page) -> None:
"""Process page."""
pywikibot.output('')
self.move_to_category(page, self.cat, self.cat)
-class CategoryTreeRobot(object):
+class CategoryTreeRobot:
"""Robot to create tree overviews of the category structure.
@@ -1267,7 +1263,7 @@
the tree to stdout.
"""
- def __init__(self, catTitle, catDB, filename=None, maxDepth=10):
+ def __init__(self, catTitle, catDB, filename=None, maxDepth=10) -> None:
"""Initializer."""
self.catTitle = catTitle
self.catDB = catDB
@@ -1277,7 +1273,7 @@
self.maxDepth = maxDepth
self.site = pywikibot.Site()
- def treeview(self, cat, currentDepth=0, parent=None):
+ def treeview(self, cat, currentDepth=0, parent=None) -> str:
"""Return a tree view of all subcategories of cat.
The multi-line string contains a tree view of all subcategories of cat,
@@ -1322,7 +1318,7 @@
result += '#' * (currentDepth + 1) + ' [...]\n'
return result
- def run(self):
+ def run(self) -> None:
"""Handle the multi-line string generated by treeview.
After string was generated by treeview it is either printed to the
@@ -1340,7 +1336,7 @@
pywikibot.stdout(tree)
-def main(*args):
+def main(*args) -> None:
"""
Process command line arguments and invoke bot.
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/611641
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ic7a8da9b55d769a592fd119539670ef8399f2d21
Gerrit-Change-Number: 611641
Gerrit-PatchSet: 2
Gerrit-Owner: JJMC89 <JJMC89.Wikimedia(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/611425 )
Change subject: [4.0] Remove Python 2 related code in pagegenerators.py
......................................................................
[4.0] Remove Python 2 related code in pagegenerators.py
Change-Id: I376d19b712b27db7a7d56601c725f5b8391de4d7
---
M pywikibot/pagegenerators.py
1 file changed, 26 insertions(+), 45 deletions(-)
Approvals:
Zhuyifei1999: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index 1328bd4..a37e88b 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -18,8 +18,6 @@
#
# Distributed under the terms of the MIT license.
#
-from __future__ import absolute_import, division, unicode_literals
-
import calendar
import codecs
import datetime
@@ -30,6 +28,7 @@
from datetime import timedelta
from functools import partial
+from itertools import zip_longest
from requests.exceptions import ReadTimeout
from warnings import warn
@@ -57,12 +56,7 @@
UnknownExtension,
)
from pywikibot.proofreadpage import ProofreadPage
-from pywikibot.tools import PY2, UnicodeType
-if not PY2:
- from itertools import zip_longest
-else:
- from itertools import izip_longest as zip_longest
_logger = 'pagegenerators'
@@ -423,7 +417,7 @@
filter_unique, key=lambda page: '{}:{}:{}'.format(*page._cmpkey()))
-class GeneratorFactory(object):
+class GeneratorFactory:
"""Process command line arguments and return appropriate page generator.
@@ -674,7 +668,7 @@
"""
total = None
start = start or None # because start might be an empty string
- if isinstance(start, UnicodeType) and len(start) == 8:
+ if isinstance(start, str) and len(start) == 8:
start = pywikibot.Timestamp.strptime(start, '%Y%m%d')
elif start is not None:
try:
@@ -1209,11 +1203,7 @@
if value == '':
value = None
- try:
- handler = getattr(self, '_handle_' + arg[1:], None)
- except UnicodeEncodeError:
- # getattr() on py2 does implicit unicode -> str
- return False
+ handler = getattr(self, '_handle_' + arg[1:], None)
if handler:
handler_result = handler(value)
if isinstance(handler_result, bool):
@@ -1545,8 +1535,7 @@
return
return
kwargs['startprefix'] = start
- for a in category.articles(**kwargs):
- yield a
+ yield from category.articles(**kwargs)
@deprecated_args(step=None)
@@ -1633,7 +1622,7 @@
def PagesFromTitlesGenerator(iterable, site=None):
"""
- Generate pages from the titles (unicode strings) yielded by iterable.
+ Generate pages from the titles (strings) yielded by iterable.
@param site: Site for generator results.
@type site: L{pywikibot.site.BaseSite}
@@ -1641,7 +1630,7 @@
if site is None:
site = pywikibot.Site()
for title in iterable:
- if not isinstance(title, UnicodeType):
+ if not isinstance(title, str):
break
yield pywikibot.Page(pywikibot.Link(title, site))
@@ -1783,7 +1772,7 @@
% page)
-class ItemClaimFilter(object):
+class ItemClaimFilter:
"""Item claim filter."""
@@ -1876,7 +1865,7 @@
% page)
-class RegexFilter(object):
+class RegexFilter:
"""Regex filter."""
@@ -1902,7 +1891,7 @@
regex = [regex]
# Test if regex is already compiled.
# We assume that all list components have the same type
- if isinstance(regex[0], UnicodeType):
+ if isinstance(regex[0], str):
regex = [re.compile(r, flag) for r in regex]
return regex
@@ -2093,7 +2082,7 @@
@type show_filtered: bool
"""
if timestamp:
- if isinstance(timestamp, UnicodeType):
+ if isinstance(timestamp, str):
ts = pywikibot.Timestamp.fromtimestampformat(timestamp)
else:
ts = timestamp
@@ -2190,8 +2179,8 @@
else:
break
pywikibot.sleep(sleep_duration)
- for item in list(filtered_generator())[::-1]:
- yield item
+
+ yield from list(filtered_generator())[::-1]
@deprecated_args(pageNumber='groupsize', step='groupsize', lookahead=None)
@@ -2213,12 +2202,11 @@
if len(sites[site]) >= groupsize:
# if this site is at the groupsize, process it
group = sites.pop(site)
- for i in site.preloadpages(group, groupsize):
- yield i
+ yield from site.preloadpages(group, groupsize)
+
for site, pages in sites.items():
# process any leftover sites that never reached the groupsize
- for i in site.preloadpages(pages, groupsize):
- yield i
+ yield from site.preloadpages(pages, groupsize)
@deprecated_args(step='groupsize')
@@ -2232,8 +2220,7 @@
if not page_count:
return
- for page in PreloadingGenerator(generator, page_count):
- yield page
+ yield from PreloadingGenerator(generator, page_count)
@deprecated_args(step='groupsize')
@@ -2256,13 +2243,12 @@
# if this site is at the groupsize, process it
group = sites.pop(site)
repo = site.data_repository()
- for i in repo.preload_entities(group, groupsize):
- yield i
+ yield from repo.preload_entities(group, groupsize)
+
for site, pages in sites.items():
# process any leftover sites that never reached the groupsize
repo = site.data_repository()
- for i in repo.preload_entities(pages, groupsize):
- yield i
+ yield from repo.preload_entities(pages, groupsize)
@deprecated_args(number='total', step=None, repeat=None)
@@ -2667,7 +2653,7 @@
# following classes just ported from version 1 without revision; not tested
-class GoogleSearchPageGenerator(object):
+class GoogleSearchPageGenerator:
"""
Page generator using Google search results.
@@ -2719,8 +2705,7 @@
'To install, please run: pip install google.')
exit(1)
pywikibot.warning('Please read http://www.google.com/accounts/TOS')
- for url in google.search(query):
- yield url
+ yield from google.search(query)
def __iter__(self):
"""Iterate results."""
@@ -2845,10 +2830,7 @@
def __next__(self):
"""Get next Page."""
while True:
- try:
- entry = next(self.parser)
- except StopIteration:
- raise
+ entry = next(self.parser)
if self.skipping:
if entry.title < self.start:
continue
@@ -2867,7 +2849,7 @@
def __next__(self):
"""Get next Page from dump and remove the text."""
- page = super(XMLDumpPageGenerator, self).__next__()
+ page = super().__next__()
del page.text
return page
@@ -3002,7 +2984,7 @@
return (pywikibot.ItemPage(repo, item['id']) for item in data)
-class PetScanPageGenerator(object):
+class PetScanPageGenerator:
"""Queries PetScan (https://petscan.wmflabs.org/) to generate pages."""
def __init__(self, categories, subset_combination=True, namespaces=None,
@@ -3075,8 +3057,7 @@
'received {0} status from {1}'.format(req.status, req.uri))
j = json.loads(req.text)
raw_pages = j['*'][0]['a']['*']
- for raw_page in raw_pages:
- yield raw_page
+ yield from raw_pages
def __iter__(self):
for raw_page in self.query():
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/611425
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I376d19b712b27db7a7d56601c725f5b8391de4d7
Gerrit-Change-Number: 611425
Gerrit-PatchSet: 1
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Zhuyifei1999 <zhuyifei1999(a)gmail.com>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/611295 )
Change subject: [4.0] remove Python 2 code parts in page/__init__.py
......................................................................
[4.0] remove Python 2 code parts in page/__init__.py
Change-Id: I60934fe5117e56a1cd52df21e13e6b003f7aa2f5
---
M pywikibot/page/__init__.py
1 file changed, 77 insertions(+), 111 deletions(-)
Approvals:
JJMC89: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/page/__init__.py b/pywikibot/page/__init__.py
index 04621d9..c8f678b 100644
--- a/pywikibot/page/__init__.py
+++ b/pywikibot/page/__init__.py
@@ -16,8 +16,6 @@
#
# Distributed under the terms of the MIT license.
#
-from __future__ import absolute_import, division, unicode_literals
-
import hashlib
import logging
import os.path
@@ -26,11 +24,10 @@
import unicodedata
from collections import Counter, defaultdict, namedtuple, OrderedDict
-try:
- from collections.abc import MutableMapping
-except ImportError:
- from collections import MutableMapping
+from collections.abc import MutableMapping
+from html.entities import name2codepoint
from itertools import chain
+from urllib.parse import quote_from_bytes, unquote_to_bytes
from warnings import warn
import pywikibot
@@ -47,27 +44,14 @@
from pywikibot.site import DataSite, Namespace, need_version
from pywikibot.tools import (
classproperty, compute_file_hash,
- UnicodeMixin, ComparableMixin, DotReadableDict,
+ ComparableMixin, DotReadableDict,
deprecated, deprecate_arg, deprecated_args, issue_deprecation_warning,
add_full_name, manage_wrapping, suppress_warnings,
- ModuleDeprecationWrapper as _ModuleDeprecationWrapper, PY2,
- first_upper, redirect_func, remove_last_args, UnicodeType,
- StringTypes
+ ModuleDeprecationWrapper as _ModuleDeprecationWrapper,
+ first_upper, redirect_func, remove_last_args
)
from pywikibot.tools import is_IP
-if not PY2:
- from html.entities import name2codepoint
- from urllib.parse import quote_from_bytes, unquote_to_bytes
-else:
- if __debug__ and not PY2:
- unichr = NotImplemented # pyflakes workaround
-
- chr = unichr
-
- from htmlentitydefs import name2codepoint
- from urllib import quote as quote_from_bytes, unquote as unquote_to_bytes
-
PROTOCOL_REGEX = r'\Ahttps?://'
@@ -144,7 +128,7 @@
# Page objects (defined here) represent the page itself, including its
# contents.
-class BasePage(UnicodeMixin, ComparableMixin):
+class BasePage(ComparableMixin):
"""
BasePage: Base object for a MediaWiki page.
@@ -301,9 +285,9 @@
def title(self, underscore=False, with_ns=True,
with_section=True, as_url=False, as_link=False,
allow_interwiki=True, force_interwiki=False, textlink=False,
- as_filename=False, insite=None, without_brackets=False):
+ as_filename=False, insite=None, without_brackets=False) -> str:
"""
- Return the title of this Page, as a Unicode string.
+ Return the title of this Page, as a string.
@param underscore: (not used with as_link) if true, replace all ' '
characters with '_'
@@ -401,21 +385,13 @@
section = None
return section
- def __unicode__(self):
- """Return a unicode string representation."""
+ def __str__(self):
+ """Return a string representation."""
return self.title(as_link=True, force_interwiki=True)
def __repr__(self):
"""Return a more complete string representation."""
- if not PY2:
- title = repr(self.title())
- else:
- try:
- title = self.title().encode(config.console_encoding)
- except UnicodeEncodeError:
- # okay console encoding didn't work, at least try something
- title = self.title().encode('unicode_escape')
- return '{0}({1})'.format(self.__class__.__name__, title)
+ return '{}({!r})'.format(self.__class__.__name__, self.title())
def _cmpkey(self):
"""
@@ -649,7 +625,7 @@
@param value: New value or None
@type value: basestring
"""
- self._text = None if value is None else UnicodeType(value)
+ self._text = None if value is None else str(value)
if hasattr(self, '_raw_extracted_templates'):
del self._raw_extracted_templates
@@ -708,14 +684,12 @@
return self.properties(force=force).get('defaultsort')
@deprecate_arg('refresh', 'force')
- def expand_text(self, force=False, includecomments=False):
+ def expand_text(self, force=False, includecomments=False) -> str:
"""Return the page text with all templates and parser words expanded.
@param force: force updating from the live site
@param includecomments: Also strip comments if includecomments
parameter is not True.
-
- @rtype unicode or None
"""
if not hasattr(self, '_expanded_text') or (
self._expanded_text is None) or force:
@@ -1110,7 +1084,7 @@
"""
return self.site.page_restrictions(self)
- def applicable_protections(self):
+ def applicable_protections(self) -> set:
"""
Return the protection types allowed for that page.
@@ -1121,8 +1095,7 @@
It is possible, that it returns an empty set, but only if original
protection types were removed.
- @return: set of unicode
- @rtype: set
+ @return: set of str
"""
# New API since commit 32083235eb332c419df2063cf966b3400be7ee8a
if self.site.mw_version >= '1.25wmf14':
@@ -1685,7 +1658,7 @@
Iterate all external URLs (not interwiki links) from this page.
@param total: iterate no more than this number of pages in total
- @return: a generator that yields unicode objects containing URLs.
+ @return: a generator that yields str objects containing URLs.
@rtype: generator
"""
return self.site.page_extlinks(self, total=total)
@@ -1897,14 +1870,15 @@
DEPRECATED: Use Page.oldest_revision.
- @rtype: tuple(username, Timestamp)
+ @return: tuple of username and timestamp in isoformat
+ @rtype: tuple[str]
"""
result = self.oldest_revision
- return result.user, UnicodeType(result.timestamp.isoformat())
+ return result.user, result.timestamp.isoformat()
@deprecated('contributors() or revisions()', since='20150206')
@deprecated_args(limit='total')
- def getLatestEditors(self, total=1):
+ def getLatestEditors(self, total=1) -> list:
"""
Get a list of revision information of the last total edits.
@@ -1912,11 +1886,10 @@
@param total: iterate no more than this number of revisions in total
@return: list of dict, each dict containing the username and Timestamp
- @rtype: list
"""
return [
{'user': rev.user,
- 'timestamp': UnicodeType(rev.timestamp.isoformat())}
+ 'timestamp': rev.timestamp.isoformat()}
for rev in self.revisions(total=total)]
def merge_history(self, dest, timestamp=None, reason=None):
@@ -2344,7 +2317,7 @@
if not title:
raise ValueError('Title must be specified and not empty '
'if source is a Site.')
- super(Page, self).__init__(source, title, ns)
+ super().__init__(source, title, ns)
@property
def raw_extracted_templates(self):
@@ -2446,7 +2419,7 @@
@param kwargs: Arguments which are used for saving the page directly
afterwards, like 'summary' for edit summary.
"""
- if isinstance(target_page, UnicodeType):
+ if isinstance(target_page, str):
target_page = pywikibot.Page(self.site, target_page)
elif self.site != target_page.site:
raise pywikibot.InterwikiRedirectPage(self, target_page)
@@ -2495,7 +2468,7 @@
def __init__(self, source, title=''):
"""Initializer."""
self._file_revisions = {} # dictionary to cache File history.
- super(FilePage, self).__init__(source, title, 6)
+ super().__init__(source, title, 6)
if self.namespace() != 6:
raise ValueError("'%s' is not in the file namespace!" % title)
@@ -2555,7 +2528,7 @@
def getImagePageHtml(self):
"""
- Download the file page, and return the HTML, as a unicode string.
+ Download the file page, and return the HTML, as a string.
Caches the HTML code, so that if you run this method twice on the
same FilePage object, the page will only be downloaded once.
@@ -2652,24 +2625,24 @@
return self.latest_file_info.sha1
@deprecated('FilePage.oldest_file_info.user', since='20150206')
- def getFirstUploader(self):
+ def getFirstUploader(self) -> list:
"""
Return a list with first uploader of the FilePage and timestamp.
For compatibility with compat only.
"""
return [self.oldest_file_info.user,
- UnicodeType(self.oldest_file_info.timestamp.isoformat())]
+ self.oldest_file_info.timestamp.isoformat()]
@deprecated('FilePage.latest_file_info.user', since='20141106')
- def getLatestUploader(self):
+ def getLatestUploader(self) -> list:
"""
Return a list with latest uploader of the FilePage and timestamp.
For compatibility with compat only.
"""
return [self.latest_file_info.user,
- UnicodeType(self.latest_file_info.timestamp.isoformat())]
+ self.latest_file_info.timestamp.isoformat()]
@deprecated('FilePage.get_file_history()', since='20141106')
def getFileVersionHistory(self):
@@ -2849,7 +2822,7 @@
@deprecated_args(
forceInterwiki=None, textlink=None, noInterwiki=None,
sortKey='sort_key')
- def aslink(self, sort_key=None):
+ def aslink(self, sort_key=None) -> str:
"""
Return a link to place a page in this Category.
@@ -2858,7 +2831,7 @@
@param sort_key: The sort key for the article to be placed in this
Category; if omitted, default sort key is used.
- @type sort_key: (optional) unicode
+ @type sort_key: (optional) str
"""
key = sort_key or self.sortKey
if key is not None:
@@ -3507,7 +3480,7 @@
Each tuple is composed of a pywikibot.Page object,
the revision id (int), the edit timestamp (as a pywikibot.Timestamp
- object), and the comment (unicode).
+ object), and the comment (str).
Pages returned are not guaranteed to be unique.
@param total: limit result to this number of pages
@@ -3560,7 +3533,7 @@
Yield tuples describing files uploaded by this user.
Each tuple is composed of a pywikibot.Page, the timestamp (str in
- ISO8601 format), comment (unicode) and a bool for pageid > 0.
+ ISO8601 format), comment (str) and a bool for pageid > 0.
Pages returned are not guaranteed to be unique.
@param total: limit result to this number of pages
@@ -3570,10 +3543,9 @@
return
for item in self.logevents(logtype='upload', total=total):
yield (item.page(),
- UnicodeType(item.timestamp()),
+ str(item.timestamp()),
item.comment(),
- item.pageid() > 0
- )
+ item.pageid() > 0)
@property
def is_thankable(self):
@@ -3600,7 +3572,7 @@
"""
def __init__(self, data=None):
- super(LanguageDict, self).__init__()
+ super().__init__()
self._data = {}
if data:
self.update(data)
@@ -3647,7 +3619,7 @@
def normalizeData(cls, data):
norm_data = {}
for key, value in data.items():
- if isinstance(value, UnicodeType):
+ if isinstance(value, str):
norm_data[key] = {'language': key, 'value': value}
else:
norm_data[key] = value
@@ -3679,7 +3651,7 @@
"""
def __init__(self, data=None):
- super(AliasesDict, self).__init__()
+ super().__init__()
self._data = {}
if data:
self.update(data)
@@ -3720,7 +3692,7 @@
if isinstance(values, list):
strings = []
for value in values:
- if isinstance(value, UnicodeType):
+ if isinstance(value, str):
strings.append({'language': key, 'value': value})
else:
strings.append(value)
@@ -3750,7 +3722,7 @@
"""A structure holding claims for a Wikibase entity."""
def __init__(self, repo):
- super(ClaimCollection, self).__init__()
+ super().__init__()
self.repo = repo
self._data = {}
@@ -3844,7 +3816,7 @@
@param repo: the Wikibase site on which badges are defined
@type repo: pywikibot.site.DataSite
"""
- super(SiteLinkCollection, self).__init__()
+ super().__init__()
self.repo = repo
self._data = {}
if data:
@@ -3883,7 +3855,7 @@
@type val: dict or str
@rtype: pywikibot.page.SiteLink
"""
- if isinstance(val, UnicodeType):
+ if isinstance(val, str):
val = SiteLink(val, key)
else:
val = SiteLink.fromJSON(val, self.repo)
@@ -3999,7 +3971,7 @@
return data
-class WikibaseEntity(object):
+class WikibaseEntity:
"""
The base interface for Wikibase entities.
@@ -4342,7 +4314,7 @@
'WikibasePage.lastrevid', 'latest_revision_id',
since='20150607')
name = '_revid'
- return super(WikibasePage, self).__getattribute__(name)
+ return super().__getattribute__(name)
def __setattr__(self, attr, value):
"""Attribute setter. Deprecates lastrevid."""
@@ -4351,7 +4323,7 @@
'WikibasePage.lastrevid', 'latest_revision_id',
since='20150607')
attr = '_revid'
- return super(WikibasePage, self).__setattr__(attr, value)
+ return super().__setattr__(attr, value)
def __delattr__(self, attr):
"""Attribute deleter. Deprecates lastrevid."""
@@ -4360,7 +4332,7 @@
'WikibasePage.lastrevid', 'latest_revision_id',
since='20150607')
attr = '_revid'
- return super(WikibasePage, self).__delattr__(attr)
+ return super().__delattr__(attr)
def namespace(self):
"""
@@ -4488,8 +4460,7 @@
else:
baserevid = None
- super(WikibasePage, self).editEntity(
- data, baserevid=baserevid, **kwargs)
+ super().editEntity(data, baserevid=baserevid, **kwargs)
def editLabels(self, labels, **kwargs):
"""
@@ -4620,7 +4591,7 @@
ns = site.item_namespace
# Special case for empty item.
if title is None or title == '-1':
- super(ItemPage, self).__init__(site, '-1', ns=ns)
+ super().__init__(site, '-1', ns=ns)
assert self.id == '-1'
return
@@ -4628,7 +4599,7 @@
if not title:
raise pywikibot.InvalidTitle("Item's title cannot be empty")
- super(ItemPage, self).__init__(site, title, ns=ns)
+ super().__init__(site, title, ns=ns)
assert self.id == self._link.title
@@ -4714,7 +4685,7 @@
del self._title
del self._site
- return super(ItemPage, self).title(**kwargs)
+ return super().title(**kwargs)
def getID(self, numeric=False, force=False):
"""
@@ -4727,7 +4698,7 @@
"""
if not hasattr(self, 'id') or force:
self.get(force=force)
- return super(WikibasePage, self).getID(numeric=numeric)
+ return super().getID(numeric=numeric)
@classmethod
def fromPage(cls, page, lazy_load=False):
@@ -4821,7 +4792,7 @@
entity and their modifying may indirectly cause unwanted change to
the live content
"""
- data = super(ItemPage, self).get(force, *args, **kwargs)
+ data = super().get(force, *args, **kwargs)
if self.isRedirectPage() and not get_redirect:
raise pywikibot.IsRedirectPage(self)
@@ -4830,7 +4801,7 @@
def getRedirectTarget(self):
"""Return the redirect target for this page."""
- target = super(ItemPage, self).getRedirectTarget()
+ target = super().getRedirectTarget()
cmodel = target.content_model
if cmodel != 'wikibase-item':
raise pywikibot.Error('%s has redirect target %s with content '
@@ -4947,7 +4918,7 @@
is not redirect.
@type force: bool
"""
- if isinstance(target_page, UnicodeType):
+ if isinstance(target_page, str):
target_page = pywikibot.ItemPage(self.repo, target_page)
elif self.repo != target_page.repo:
raise pywikibot.InterwikiRedirectPage(self, target_page)
@@ -4968,7 +4939,7 @@
if hasattr(self, '_content') and not hasattr(self, '_isredir'):
self._isredir = self.id != self._content.get('id', self.id)
return self._isredir
- return super(ItemPage, self).isRedirectPage()
+ return super().isRedirectPage()
# alias for backwards compatibility
@@ -4977,7 +4948,7 @@
since='20170222')
-class Property(object):
+class Property:
"""
A Wikibase property.
@@ -4993,15 +4964,15 @@
types = {'wikibase-item': ItemPage,
# 'wikibase-property': PropertyPage, must be declared first
- 'string': UnicodeType,
+ 'string': str,
'commonsMedia': FilePage,
'globe-coordinate': pywikibot.Coordinate,
- 'url': UnicodeType,
+ 'url': str,
'time': pywikibot.WbTime,
'quantity': pywikibot.WbQuantity,
'monolingualtext': pywikibot.WbMonolingualText,
- 'math': UnicodeType,
- 'external-id': UnicodeType,
+ 'math': str,
+ 'external-id': str,
'geo-shape': pywikibot.WbGeoShape,
'tabular-data': pywikibot.WbTabularData,
}
@@ -5675,7 +5646,7 @@
@rtype: bool
"""
if (isinstance(self.target, WikibasePage)
- and isinstance(value, UnicodeType)):
+ and isinstance(value, str)):
return self.target.id == value
if (isinstance(self.target, pywikibot.WbTime)
@@ -5683,7 +5654,7 @@
return self.target.year == int(value)
if (isinstance(self.target, pywikibot.Coordinate)
- and isinstance(value, UnicodeType)):
+ and isinstance(value, str)):
coord_args = [float(x) for x in value.split(',')]
if len(coord_args) >= 3:
precision = coord_args[2]
@@ -5699,7 +5670,7 @@
and abs(self.target.lon - coord_args[1]) <= precision)
if (isinstance(self.target, pywikibot.WbMonolingualText)
- and isinstance(value, UnicodeType)):
+ and isinstance(value, str)):
return self.target.text == value
return self.target == value
@@ -5969,14 +5940,14 @@
return self.__dict__ == other.__dict__
-class BaseLink(UnicodeMixin, ComparableMixin):
+class BaseLink(ComparableMixin):
"""
A MediaWiki link (local or interwiki).
Has the following attributes:
- - title: The title of the page linked to (unicode); does not include
+ - title: The title of the page linked to (str); does not include
namespace or section
- namespace: The Namespace object of the page linked to
- site: The Site object for the wiki linked to
@@ -5985,13 +5956,12 @@
# Components used for __repr__
_items = ('title', 'namespace', '_sitekey')
- def __init__(self, title, namespace=None, site=None):
+ def __init__(self, title: str, namespace=None, site=None):
"""
Initializer.
- @param title: the title of the page linked to (unicode); does not
+ @param title: the title of the page linked to (str); does not
include namespace or section
- @type title: unicode
@param namespace: the namespace of the page linked to. Can be provided
as either an int, a Namespace instance or a str, defaults to the
MAIN namespace.
@@ -6018,7 +5988,7 @@
def __repr__(self):
"""Return a more complete string representation."""
assert isinstance(self._items, tuple)
- assert all(isinstance(item, StringTypes) for item in self._items)
+ assert all(isinstance(item, (bytes, str)) for item in self._items)
attrs = ('{0!r}'.format(getattr(self, attr)) for attr in self._items)
return 'pywikibot.page.{0}({1})'.format(
@@ -6033,7 +6003,7 @@
default_nskey = Namespace.MAIN
self._nskey = self._nskey or default_nskey
- if isinstance(self._nskey, UnicodeType):
+ if isinstance(self._nskey, str):
ns = self.site.namespaces.lookup_name(self._nskey)
if ns:
return ns
@@ -6146,12 +6116,8 @@
"""
return (self.site, self.namespace, self.title)
- def __unicode__(self):
- """
- Return a unicode string representation.
-
- @rtype: str
- """
+ def __str__(self) -> str:
+ """Return a str string representation."""
return self.astext()
def __hash__(self):
@@ -6184,9 +6150,9 @@
Extends BaseLink by the following attributes:
- - section: The section of the page linked to (unicode or None); this
+ - section: The section of the page linked to (str or None); this
contains any text following a '#' character in the title
- - anchor: The anchor text (unicode or None); this contains any text
+ - anchor: The anchor text (str or None); this contains any text
following a '|' character inside the link
"""
@@ -6503,7 +6469,7 @@
"""
if onsite is None:
onsite = self._source
- text = super(Link, self).astext(onsite)
+ text = super().astext(onsite)
if self.section:
text = '{0}#{1}]]'.format(text.rstrip(']'), self.section)
@@ -6649,7 +6615,7 @@
if ':' in title:
site, namespace, title = SiteLink._parse_namespace(title, site)
- super(SiteLink, self).__init__(title, namespace, site)
+ super().__init__(title, namespace, site)
badges = badges or []
self._badges = set(badges)
@@ -6856,7 +6822,7 @@
@raise UnicodeError: Could not convert using any encoding.
"""
- if isinstance(encodings, UnicodeType):
+ if isinstance(encodings, str):
encodings = [encodings]
elif isinstance(encodings, pywikibot.site.BaseSite):
# create a list of all possible encodings for both hint sites
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/611295
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I60934fe5117e56a1cd52df21e13e6b003f7aa2f5
Gerrit-Change-Number: 611295
Gerrit-PatchSet: 4
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: JJMC89 <JJMC89.Wikimedia(a)gmail.com>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/610392 )
Change subject: [4.0] replace str('string') by 'string'
......................................................................
[4.0] replace str('string') by 'string'
Change-Id: I2ac37eb9827b741708cedc08d82092a34f4a874f
---
M pywikibot/config2.py
M pywikibot/page/__init__.py
M pywikibot/userinterfaces/terminal_interface_win32.py
M pywikibot/xmlreader.py
M tests/api_tests.py
5 files changed, 10 insertions(+), 17 deletions(-)
Approvals:
Zhuyifei1999: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/config2.py b/pywikibot/config2.py
index c193e8f..d74c505 100644
--- a/pywikibot/config2.py
+++ b/pywikibot/config2.py
@@ -50,11 +50,10 @@
from distutils.version import StrictVersion
from locale import getdefaultlocale
from os import getenv, environ
+from requests import __version__ as requests_version
from textwrap import fill
from warnings import warn
-from requests import __version__ as requests_version
-
from pywikibot import __version__ as pwb_version
from pywikibot.logging import error, output, warning
from pywikibot.tools import PY2, issue_deprecation_warning
@@ -313,7 +312,7 @@
base_dir = ''
for arg in sys.argv[1:]:
- if arg.startswith(str('-dir:')):
+ if arg.startswith('-dir:'):
base_dir = arg[5:]
base_dir = os.path.expanduser(base_dir)
break
@@ -378,7 +377,7 @@
base_dir = get_base_dir()
for arg in sys.argv[1:]:
- if arg.startswith(str('-verbose')) or arg == str('-v'):
+ if arg.startswith('-verbose') or arg == '-v':
output('The base directory is ' + base_dir)
break
family_files = {}
diff --git a/pywikibot/page/__init__.py b/pywikibot/page/__init__.py
index 0c5a31d..04621d9 100644
--- a/pywikibot/page/__init__.py
+++ b/pywikibot/page/__init__.py
@@ -373,7 +373,7 @@
title = title.replace(' ', '_')
if as_url:
encoded_title = title.encode(self.site.encoding())
- title = quote_from_bytes(encoded_title, safe=str(''))
+ title = quote_from_bytes(encoded_title, safe='')
if as_filename:
# Replace characters that are not possible in file names on some
# systems, but still are valid in MediaWiki titles:
@@ -415,7 +415,7 @@
except UnicodeEncodeError:
# okay console encoding didn't work, at least try something
title = self.title().encode('unicode_escape')
- return str('{0}({1})').format(self.__class__.__name__, title)
+ return '{0}({1})'.format(self.__class__.__name__, title)
def _cmpkey(self):
"""
diff --git a/pywikibot/userinterfaces/terminal_interface_win32.py b/pywikibot/userinterfaces/terminal_interface_win32.py
index 78f18c9..898a847 100755
--- a/pywikibot/userinterfaces/terminal_interface_win32.py
+++ b/pywikibot/userinterfaces/terminal_interface_win32.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""User interface for Win32 terminals."""
#
-# (C) Pywikibot team, 2003-2019
+# (C) Pywikibot team, 2003-2020
#
# Distributed under the terms of the MIT license.
#
@@ -77,9 +77,7 @@
def _raw_input(self):
data = self.stdin.readline()
- # data is in both Python versions str but '\x1a' is unicode in Python 2
- # so explicitly convert into str as it otherwise tries to decode data
- if str('\x1a') in data:
+ if '\x1a' in data:
raise EOFError()
return data.strip()
diff --git a/pywikibot/xmlreader.py b/pywikibot/xmlreader.py
index b46315b..9c1f883 100644
--- a/pywikibot/xmlreader.py
+++ b/pywikibot/xmlreader.py
@@ -118,10 +118,7 @@
def parse(self):
"""Generator using ElementTree iterparse function."""
with open_archive(self.filename) as source:
- # iterparse's event must be a str but they are unicode with
- # unicode_literals in Python 2
- context = iterparse(source, events=(str('start'), str('end'),
- str('start-ns')))
+ context = iterparse(source, events=('start', 'end', 'start-ns'))
self.root = None
for event, elem in context:
@@ -131,8 +128,7 @@
if event == 'start' and self.root is None:
self.root = elem
continue
- for rev in self._parse(event, elem):
- yield rev
+ yield from self._parse(event, elem)
def _parse_only_latest(self, event, elem):
"""Parser that yields only the latest revision."""
diff --git a/tests/api_tests.py b/tests/api_tests.py
index 55c32b7..1774800 100644
--- a/tests/api_tests.py
+++ b/tests/api_tests.py
@@ -1102,7 +1102,7 @@
def test_url_encoding_from_basestring(self):
"""Test encoding basestring values."""
query = {'token': 'test\xe2\x80\x94test'}
- expect = str('token=test%C3%A2%C2%80%C2%94test')
+ expect = 'token=test%C3%A2%C2%80%C2%94test'
result = api.encode_url(query)
self.assertEqual(result, expect)
self.assertIsInstance(result, str)
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/610392
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I2ac37eb9827b741708cedc08d82092a34f4a874f
Gerrit-Change-Number: 610392
Gerrit-PatchSet: 4
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Zhuyifei1999 <zhuyifei1999(a)gmail.com>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/i18n/+/611235 )
Change subject: Localisation updates from https://translatewiki.net.
......................................................................
Localisation updates from https://translatewiki.net.
Change-Id: Ia2d17d0b1e615a1eb0d5c3a3cb2a34d72ee78482
---
M archivebot/vi.json
M category/vi.json
M category_redirect/vi.json
M checkimages/vi.json
M pywikibot/vi.json
A standardize_interwiki/vi.json
M unusedfiles/vi.json
7 files changed, 26 insertions(+), 5 deletions(-)
Approvals:
L10n-bot: Looks good to me, approved
jenkins-bot: Verified
diff --git a/archivebot/vi.json b/archivebot/vi.json
index cb15aee..e9965d2 100644
--- a/archivebot/vi.json
+++ b/archivebot/vi.json
@@ -7,6 +7,6 @@
"archivebot-archive-full": "(LƯU TRỮ ĐÃ ĐẦY)",
"archivebot-archive-summary": "Lưu trữ %(count)d luồng từ [[%(from)s]]",
"archivebot-archiveheader": "{{Thảo luận lưu}}",
- "archivebot-older-than": "cũ hơn %(duration)s",
+ "archivebot-older-than": "{{PLURAL:%(count)d|cũ}} hơn %(duration)s",
"archivebot-page-summary": "Lưu trữ %(count)d luồng (%(why)s) qua %(archives)s"
}
diff --git a/category/vi.json b/category/vi.json
index b0f66ef..ac0f3dc 100644
--- a/category/vi.json
+++ b/category/vi.json
@@ -12,6 +12,8 @@
"category-replacing": "Bot: Thay thể loại %(oldcat)s bằng %(newcat)s",
"category-section-title": "Lịch sử các trang từng nằm trong %(oldcat)s",
"category-strip-cfd-templates": "Bot: Gỡ bản mẫu đề nghị xóa thể loại vì đã được giải quyết",
+ "category-strip-sort-keys": "Bot: Gỡ từ khóa sắp xếp vì tác vụ đã được giải quyết",
+ "category-strip-both": "Bot: Gỡ bản mẫu đề nghị xóa thể loại vì đã được giải quyết",
"category-version-history": "Bot: Lưu lịch sử phiên bản của các trang từng nằm trong %(oldcat)s",
"category-was-disbanded": "Bot: Thể loại bị giải tán",
"category-was-moved": "Bot: Di chuyển thể loại qua [[:Category:%(newcat)s|%(title)s]]"
diff --git a/category_redirect/vi.json b/category_redirect/vi.json
index 7114f64..dace53b 100644
--- a/category_redirect/vi.json
+++ b/category_redirect/vi.json
@@ -11,5 +11,11 @@
"category_redirect-edit-request": "Các trang đã khóa sau cần phải cập nhật liên kết thể loại: %(itemlist)s",
"category_redirect-edit-request-item": "* %(title)s đang thuộc %(oldcat)s, là thể loại đổi hướng đến %(newcat)s",
"category_redirect-fix-double": "Bot: Sửa thể loại đổi hướng kép",
- "category_redirect-log-failed": "** Không thành công: %(error)s"
+ "category_redirect-log-added": "* Đã xếp [[%(ns)s%(template)s]] vào %(oldcat)s",
+ "category_redirect-log-add-failed": "* Thất bại khi xếp [[%(ns)s%(template)s]] vào %(oldcat)s",
+ "category_redirect-log-double": "* Đã sửa đổi hướng kép: %(oldcat)s → %(newcat)s → %(targetcat)s",
+ "category_redirect-log-failed": "** Không thành công: %(error)s",
+ "category_redirect-log-ignoring": "* Đã bỏ qua %(oldcat)s",
+ "category_redirect-log-loop": "* Vòng đổi hướng từ %(oldcat)s",
+ "category_redirect-older-logs": "'''[%(oldlogs)s Nhật trình cũ hơn]'''"
}
diff --git a/checkimages/vi.json b/checkimages/vi.json
index 8bfa345..ea785fa 100644
--- a/checkimages/vi.json
+++ b/checkimages/vi.json
@@ -4,6 +4,10 @@
"Minh Nguyen"
]
},
+ "checkimages-deletion-comment": "Bot: Thêm %(adding)s",
+ "checkimages-log-comment": "Bot: Cập nhật nhật trình",
+ "checkimages-no-license-head": "Hình thiếu giấy phép",
"checkimages-source-tag-comment": "Bot: Đánh dấu một tập tin mới được tải lên mà chưa gắn thẻ",
- "checkimages-source-notice-comment": "Bot: Yêu cầu thông tin nguồn gốc"
+ "checkimages-source-notice-comment": "Bot: Yêu cầu thông tin nguồn gốc",
+ "checkimages-unknown-extension-head": "Phần mở rộng không rõ!"
}
diff --git a/pywikibot/vi.json b/pywikibot/vi.json
index 67ad3a5..767c784 100644
--- a/pywikibot/vi.json
+++ b/pywikibot/vi.json
@@ -13,5 +13,6 @@
"pywikibot-enter-xml-filename": "Xin vui lòng nhập tên tập tin của bản sao lưu XML:",
"pywikibot-fixes-fckeditor": "Bot: Sửa mã HTML của trình soạn thảo có định dạng",
"pywikibot-fixes-html": "Bot: Chuyển đổi/sửa HTML",
- "pywikibot-fixes-syntax": "Bot: Sửa cú pháp wiki"
+ "pywikibot-fixes-syntax": "Bot: Sửa cú pháp wiki",
+ "pywikibot-touch": "Pywikibot sửa đổi chạm"
}
diff --git a/standardize_interwiki/vi.json b/standardize_interwiki/vi.json
new file mode 100644
index 0000000..1ac2aae
--- /dev/null
+++ b/standardize_interwiki/vi.json
@@ -0,0 +1,8 @@
+{
+ "@metadata": {
+ "authors": [
+ "Minh Nguyen"
+ ]
+ },
+ "standardize_interwiki-comment": "Bot: Chuẩn hóa liên kết liên wiki"
+}
diff --git a/unusedfiles/vi.json b/unusedfiles/vi.json
index a0041f2..438dbd6 100644
--- a/unusedfiles/vi.json
+++ b/unusedfiles/vi.json
@@ -4,5 +4,5 @@
"Minh Nguyen"
]
},
- "unusedfiles-comment": "Bot: Hình ảnh để loại bỏ"
+ "unusedfiles-comment": "Bot: Gắn thẻ tập tin mồ côi"
}
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/i18n/+/611235
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/i18n
Gerrit-Branch: master
Gerrit-Change-Id: Ia2d17d0b1e615a1eb0d5c3a3cb2a34d72ee78482
Gerrit-Change-Number: 611235
Gerrit-PatchSet: 1
Gerrit-Owner: L10n-bot <l10n-bot(a)translatewiki.net>
Gerrit-Reviewer: L10n-bot <l10n-bot(a)translatewiki.net>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged