jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/914362 )
Change subject: [IMPR] Solve stylistic checks for transwikiimport.py
......................................................................
[IMPR] Solve stylistic checks for transwikiimport.py
- solve flake8 and hacking errors
- add script to CHANGELOG.rst and README.rst
- add scripts to documentation
- update AUTHORS.rst
Change-Id: I6f4ca2378be7adc047cb512b35fc9ab61fa847e1
---
M scripts/CHANGELOG.rst
M scripts/README.rst
M AUTHORS.rst
M scripts/transwikiimport.py
M tox.ini
5 files changed, 73 insertions(+), 44 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/AUTHORS.rst b/AUTHORS.rst
index e057adc..f7b56f3 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -79,6 +79,7 @@
Diwanshu Mittal
Dmytro Dziuma
Dr03ramos
+ Draco flavus
DrTrigon
Dvorapa
diff --git a/scripts/CHANGELOG.rst b/scripts/CHANGELOG.rst
index c747382..34f96c8 100644
--- a/scripts/CHANGELOG.rst
+++ b/scripts/CHANGELOG.rst
@@ -5,9 +5,16 @@
-----
replicate_wiki
+~~~~~~~~~~~~~~
* i18n support was added (:phab:`T333759`)
+transwikiimport
+~~~~~~~~~~~~~~~
+
+* Script was added (:phab:`T335246`)
+
+
8.1.0
-----
diff --git a/scripts/README.rst b/scripts/README.rst
index 0ed2066..562694b 100644
--- a/scripts/README.rst
+++ b/scripts/README.rst
@@ -143,6 +143,9 @@
+--------------------------+---------------------------------------------------------+
| transferbot.py | Transfers pages from a source wiki to a target wiki. |
+--------------------------+---------------------------------------------------------+
+| transwikiimport.py | Transfers pages from a source wiki to a target wiki |
+| | including edit history using :api:`Import`. |
++--------------------------+---------------------------------------------------------+
| unusedfiles.py | Bot appends some text to all unused images and other |
| | text to the respective uploaders. |
+--------------------------+---------------------------------------------------------+
diff --git a/scripts/transwikiimport.py b/scripts/transwikiimport.py
index 2f80c48..6bd9335 100644
--- a/scripts/transwikiimport.py
+++ b/scripts/transwikiimport.py
@@ -1,10 +1,7 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-"""
-This script transfers pages from a source wiki to a target wiki
- over the transwiki import mechanism.
+#!/usr/bin/env python3
+"""This script transfers pages from a source wiki to a target wiki.
-It is also able to copy the full edit history.
+It uses :api:`Import` and it is also able to copy the full edit history.
The following parameters are supported:
@@ -24,8 +21,9 @@
-summary: Log entry import summary.
--tags: Change tags to apply to the entry in the import log
- and to the null revision on the imported pages.
+-tags: Change tags to apply to the entry in the import
+ log and to the null revision on the imported
+ pages.
-test: No import, the names of the pages are output.
@@ -35,8 +33,7 @@
-target Use page generator of the target site
This also affects the correspondingnamespace.
-
-Internal links are *not* repaired!
+.. warning:: Internal links are *not* repaired!
Pages to work on can be specified using any of:
@@ -45,22 +42,24 @@
Examples
--------
-Transfer all pages in category "Query service" from the English Wikipedia to
-the home Wikipedia, adding "Wikipedia:Import enwp/" as prefix:
+Transfer all pages in category "Query service" from the English
+Wikipedia to the home Wikipedia, adding "Wikipedia:Import enwp/" as
+prefix:
python pwb.py transwikiimport -interwikisource:en -cat:"Query service" \
--prefix:"Wikipedia:Import enwp/" -fullhistory -assignknownusers
+ -prefix:"Wikipedia:Import enwp/" -fullhistory -assignknownusers
-Copy the template "Query service" from the English Wikipedia to the
-home Wiktionary:
+Copy the template "Query service" from the English Wikipedia to the home
+Wiktionary:
python pwb.py transferbot -interwikisource:w:en \
--page:"Template:Query service" -fullhistory -assignknownusers
+ -page:"Template:Query service" -fullhistory -assignknownusers
-Copy 10 wanted templates of the home Wikipedia from English Wikipedia \
-to the home Wikipedia
+Copy 10 wanted templates of the home Wikipedia from English Wikipedia to
+the home Wikipedia
+
python pwb.py transferbot -interwikisource:en \
--wantedtemplates:10 -target -fullhistory -assignknownusers
+ -wantedtemplates:10 -target -fullhistory -assignknownusers
Advices
-------
@@ -136,24 +135,25 @@
the appropriate flag on the account
must be set (usually administrator, tranwiki importer or importer).
-
+.. versionadded:: 8.2
"""
#
-# (C) Draco flavus
+# (C) Pywikibot team, 2023
#
# Distributed under the terms of the MIT license.
#
import pywikibot
from pywikibot import pagegenerators
+from pywikibot.backports import Dict
from pywikibot.bot import suggest_help
-# from pywikibot.i18n import twtranslate
from pywikibot.data import api
docuReplacements = {'¶ms;': pagegenerators.parameterHelp} # noqa: N816
-def api_query(site, params):
+def api_query(site, params: Dict[str, str]):
+ """Request data from given site."""
query = api.Request(site, parameters=params)
datas = query.submit()
return datas
@@ -227,7 +227,7 @@
return
gen_args = ' '.join(gen_args)
- pywikibot.output("""
+ pywikibot.info("""
Page transfer configuration
---------------------------
Source: {fromsite}
@@ -240,10 +240,11 @@
rootpage=rootpage if rootpage else '(none)',
target='from target site\n' if target else ''))
- if correspondingnamespace != 'all' and rootpage != '':
- pywikibot.output('Both the correspondingnamespace and the rootpage are set! Exiting.')
- elif target and rootpage != '':
- pywikibot.output('Both the target and the rootpage are set! Exiting.')
+ if correspondingnamespace != 'all' and rootpage:
+ pywikibot.info('Both the correspondingnamespace and the rootpage are '
+ 'set! Exiting.')
+ elif target and rootpage:
+ pywikibot.info('Both the target and the rootpage are set! Exiting.')
else:
params = {
'action': 'import',
@@ -256,24 +257,32 @@
}
if correspondingnamespace != 'all':
params['namespace'] = correspondingnamespace
- if rootpage != '':
+ if rootpage:
params['rootpage'] = rootpage
- if tags != '':
+ if tags:
params['tags'] = tags
+
for page in gen:
if target:
if correspondingnamespace == 'all':
- fromtitle = page.namespace().canonical_prefix() + page.title(with_ns=False)
+ fromtitle = (page.namespace().canonical_prefix()
+ + page.title(with_ns=False))
else:
- fromtitle = str(fromsite.namespaces[int(correspondingnamespace)]) + page.title(with_ns=False)
+ fromtitle = str(
+ fromsite.namespaces[int(correspondingnamespace)]) \
+ + page.title(with_ns=False)
targetpage = page
else:
fromtitle = page.title(with_ns=True)
if correspondingnamespace == 'all':
- totitle = page.namespace().canonical_prefix() + page.title(with_ns=False)
+ totitle = (page.namespace().canonical_prefix()
+ + page.title(with_ns=False))
else:
- totitle = str(tosite.namespaces[int(correspondingnamespace)]) + page.title(with_ns=False)
+ totitle = str(
+ tosite.namespaces[int(correspondingnamespace)]) \
+ + page.title(with_ns=False)
targetpage = pywikibot.Page(tosite, totitle)
+
if not overwrite:
if targetpage.exists():
pywikibot.warning(
@@ -291,19 +300,15 @@
)
)
continue
+
params['interwikipage'] = fromtitle
if test:
- pywikibot.output('Simulation: {} → {}'.format(
- fromtitle,
- targetpage.title(with_ns=True)
- )
- )
+ pywikibot.info(f'Simulation: {fromtitle} → '
+ f'{targetpage.title(with_ns=True)}')
else:
- # Zum Testen die folgende Zeile auskommentieren.
api_query(tosite, params)
- # Zum Testen bei folgenden zwei Zeilen das Kreuzzeichen entfernen.
- # pywikibot.output(params)
- # pywikibot.output(fromtitle + ' → ' + page.title(with_ns=True) if target else totitle)
+ pywikibot.info(fromtitle + ' → ' + page.title(with_ns=True)
+ if target else totitle)
if __name__ == '__main__':
diff --git a/tox.ini b/tox.ini
index 7069a31..17b9aca 100644
--- a/tox.ini
+++ b/tox.ini
@@ -192,7 +192,6 @@
scripts/reflinks.py: N802, N816
scripts/replace.py: N802, N803, N806, N816
scripts/solve_disambiguation.py: N802, N806
- scripts/transwikiimport.py: C103, D103, D205, D400, E123, E501
setup.py: T001, T201
tests/api_tests.py: N802
tests/archivebot_tests.py: N802
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/914362
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I6f4ca2378be7adc047cb512b35fc9ab61fa847e1
Gerrit-Change-Number: 914362
Gerrit-PatchSet: 7
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/914364 )
Change subject: [tsts] Skip QueryStringParamsTestCase on ServerError
......................................................................
[tsts] Skip QueryStringParamsTestCase on ServerError
Change-Id: Iddc503bceacc6d0684b86d35bf860782d2810d0f
---
M tests/http_tests.py
1 file changed, 19 insertions(+), 11 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/tests/http_tests.py b/tests/http_tests.py
index b84eaa5..580ff4f 100755
--- a/tests/http_tests.py
+++ b/tests/http_tests.py
@@ -494,8 +494,7 @@
class QueryStringParamsTestCase(HttpbinTestCase):
- """
- Test the query string parameter of request methods.
+ """Test the query string parameter of request methods.
The /get endpoint of httpbin returns JSON that can include an
'args' key with urldecoded query string parameters.
@@ -508,12 +507,12 @@
def test_no_params(self):
"""Test fetch method with no parameters."""
- r = http.fetch(self.url, params={})
+ r = self.fetch(self.url, params={})
fail_status = HTTPStatus.SERVICE_UNAVAILABLE
if r.status_code == fail_status: # T203637
- self.skipTest('{status.value}: {status.description} for {url}'
- .format(status=fail_status, url=self.url))
+ self.skipTest(f'{fail_status.value}: {fail_status.description} '
+ f'for {self.url}')
self.assertEqual(r.status_code, HTTPStatus.OK)
self.assertEqual(r.json()['args'], {})
@@ -525,12 +524,12 @@
HTTPBin returns the args in their urldecoded form, so what we put in
should be the same as what we get out.
"""
- r = http.fetch(self.url, params={'fish&chips': 'delicious'})
+ r = self.fetch(self.url, params={'fish&chips': 'delicious'})
fail_status = HTTPStatus.SERVICE_UNAVAILABLE
if r.status_code == fail_status: # T203637
- self.skipTest('{status.value}: {status.description} for {url}'
- .format(status=fail_status, url=self.url))
+ self.skipTest(f'{fail_status.value}: {fail_status.description} '
+ f'for {self.url}')
self.assertEqual(r.status_code, HTTPStatus.OK)
self.assertEqual(r.json()['args'], {'fish&chips': 'delicious'})
@@ -542,12 +541,12 @@
HTTPBin returns the args in their urldecoded form, so what we put in
should be the same as what we get out.
"""
- r = http.fetch(self.url, params={'fish%26chips': 'delicious'})
+ r = self.fetch(self.url, params={'fish%26chips': 'delicious'})
fail_status = HTTPStatus.SERVICE_UNAVAILABLE
if r.status_code == fail_status: # T203637
- self.skipTest('{status.value}: {status.description} for {url}'
- .format(status=fail_status, url=self.url))
+ self.skipTest(f'{fail_status.value}: {fail_status.description} '
+ f'for {self.url}')
self.assertEqual(r.status_code, HTTPStatus.OK)
self.assertEqual(r.json()['args'], {'fish%26chips': 'delicious'})
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/914364
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Iddc503bceacc6d0684b86d35bf860782d2810d0f
Gerrit-Change-Number: 914364
Gerrit-PatchSet: 1
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
Xqt has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/914038 )
Change subject: Add transwikiimport.py script
......................................................................
Add transwikiimport.py script
Patch submitted by Draco flavus
Bug: T335246
Change-Id: Ic4353c3863d8d6abc60200dc7707981884c7f055
---
A scripts/transwikiimport.py
M tox.ini
2 files changed, 323 insertions(+), 0 deletions(-)
Approvals:
Xqt: Verified; Looks good to me, approved
diff --git a/scripts/transwikiimport.py b/scripts/transwikiimport.py
new file mode 100644
index 0000000..2f80c48
--- /dev/null
+++ b/scripts/transwikiimport.py
@@ -0,0 +1,310 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+"""
+This script transfers pages from a source wiki to a target wiki
+ over the transwiki import mechanism.
+
+It is also able to copy the full edit history.
+
+The following parameters are supported:
+
+-interwikisource: The interwiki code of the source wiki.
+
+-fullhistory: Include all versions of the page.
+
+-includealltemplates: All templates and transcluded pages will
+ be copied (dangerous).
+
+-assignknownusers: If user exists on target wiki, assign the
+ editions to them
+
+-correspondingnamespace: The number of the corresponding namespace.
+
+-rootpage: Import as subpages of ...
+
+-summary: Log entry import summary.
+
+-tags: Change tags to apply to the entry in the import log
+ and to the null revision on the imported pages.
+
+-test: No import, the names of the pages are output.
+
+-overwrite: Existing pages are skipped by default.
+ Use this option to overwrite pages.
+
+-target Use page generator of the target site
+ This also affects the correspondingnamespace.
+
+
+Internal links are *not* repaired!
+
+Pages to work on can be specified using any of:
+
+¶ms;
+
+Examples
+--------
+
+Transfer all pages in category "Query service" from the English Wikipedia to
+the home Wikipedia, adding "Wikipedia:Import enwp/" as prefix:
+
+ python pwb.py transwikiimport -interwikisource:en -cat:"Query service" \
+-prefix:"Wikipedia:Import enwp/" -fullhistory -assignknownusers
+
+Copy the template "Query service" from the English Wikipedia to the
+home Wiktionary:
+
+ python pwb.py transferbot -interwikisource:w:en \
+-page:"Template:Query service" -fullhistory -assignknownusers
+
+Copy 10 wanted templates of the home Wikipedia from English Wikipedia \
+to the home Wikipedia
+ python pwb.py transferbot -interwikisource:en \
+-wantedtemplates:10 -target -fullhistory -assignknownusers
+
+Advices
+-------
+
+The module gives access to all parameters of the API (and specialpage)
+ and is compatible to the transferbot module.
+However for most scenarios the following parameters should be avoided:
+ -overwrite (by default set as False)
+ -target (by default set as False)
+ -includealltemplates (by default set as False)
+
+The correspondingnamespace is used only if the namespaces on both wikis do not
+ correspond one with another.
+
+Correspondingnamespace and rootpage are mutually exclusive.
+
+Target and rootpage are mutually exclusive.
+ (This combination does not seem to be feasible.)
+
+If the target page already exists, the target page will be overwritten
+ if -overwrite is set or skipped otherwise.
+
+The list of pages to be imported can be generated outside of the pywikbot:
+
+ for i in {1..10} ; do python3 pwb.py transwikiimport -interwikisource:mul \
+-page:"Page:How to become famous.djvu/$i" -fullhistory \
+-assignknownusers ; done
+
+(The pages Page:How to become famous.djvu/1, Page:How to become famous.djvu/2 \
+.. Page:How to become famous.djvu/10 will be copied
+ from wikisource (mul) to the home-wikisource, all versions will be imported
+ and the usernames will be identified
+ (existing pages will be skipped.)
+
+Or generated using the usual pywikibot generators:
+
+ python3 pwb.py transwikiimport -interwikisource:mul \
+-prefixindex:"Page:How to become famous.djvu" \
+-fullhistory -assignknownusers \
+-summary:"Book copied from oldwiki."
+
+(All pages like Page:How to become famous.djvu... will be copied
+ from wikisource (mul) to the home-wikisource, all versions will be
+ imported and the usernames will be identified
+ (existing pages will be skipped.)
+
+The parameter -test disables the import and the bot prints the names
+ of the pages that would be imported.
+Since the import of pages is a quite exceptionell process and potentially
+ dangerous it should be made carefully and tested in advance.
+The -test parameter can help to find out which pages would be moved
+ and what would be the target of the import.
+However it does not print the titles of the transcluded pages (e.g. templates)
+ if -includealltemplates is set.
+This option is quite *dangerous*. If the title of an existing page on home wiki
+ clashes with the title of one of the linked pages it would be *overritten*.
+ The histories would be merged. (If the imported version is newer.)
+ Even if -overwrite is not set the linked page *can be overwritten*.
+
+
+Interwikisource
+---------------
+
+The list of wikis that can be used as a source
+ is defined in the variable $wgImportSources
+It can be viewed on the Specialpage:Import.
+
+
+Rights
+------
+
+For tranwikiimport (and even to access the Specialpage:Import)
+ the appropriate flag on the account
+ must be set (usually administrator, tranwiki importer or importer).
+
+
+"""
+#
+# (C) Draco flavus
+#
+# Distributed under the terms of the MIT license.
+#
+import pywikibot
+from pywikibot import pagegenerators
+from pywikibot.bot import suggest_help
+# from pywikibot.i18n import twtranslate
+from pywikibot.data import api
+
+
+docuReplacements = {'¶ms;': pagegenerators.parameterHelp} # noqa: N816
+
+
+def api_query(site, params):
+ query = api.Request(site, parameters=params)
+ datas = query.submit()
+ return datas
+
+
+def main(*args: str) -> None:
+ """
+ Process command line arguments and invoke bot.
+
+ If args is an empty list, sys.argv is used.
+
+ :param args: command line arguments
+ """
+ local_args = pywikibot.handle_args(args)
+
+ interwikisource = ''
+ correspondingnamespace = 'all'
+ rootpage = ''
+ tags = ''
+ summary = 'Importing page from '
+ test = False
+ overwrite = False
+ target = False
+ fullhistory = False
+ includealltemplates = False
+ assignknownusers = False
+ gen_args = []
+
+ for arg in local_args:
+ if arg.startswith('-interwikisource'):
+ interwikisource = arg[len('-interwikisource:'):]
+ summary += interwikisource
+ elif arg.startswith('-correspondingnamespace'):
+ correspondingnamespace = arg[len('-correspondingnamespace:'):]
+ elif arg.startswith('-rootpage'):
+ rootpage = arg[len('-rootpage:'):]
+ elif arg.startswith('-tags'):
+ tags = arg[len('-tags:'):]
+ elif arg.startswith('-summary'):
+ summary = arg[len('-summary:'):]
+ elif arg == '-test':
+ test = True
+ elif arg == '-overwrite':
+ overwrite = True
+ elif arg == '-target':
+ target = True
+ elif arg == '-fullhistory':
+ fullhistory = True
+ elif arg == '-includealltemplates':
+ includealltemplates = True
+ elif arg == '-assignknownusers':
+ assignknownusers = True
+ else:
+ gen_args.append(arg)
+
+ tosite = pywikibot.Site()
+ csrf = tosite.tokens['csrf']
+ fromsite = pywikibot.Site().interwiki(interwikisource)
+ additional_text = ('Target site not different from source site.'
+ if fromsite == tosite else '')
+
+ gen_factory = pagegenerators.GeneratorFactory(site=tosite if target
+ else fromsite)
+ unknown_args = [arg for arg in gen_args if not gen_factory.handle_arg(arg)]
+
+ gen = gen_factory.getCombinedGenerator()
+
+ if suggest_help(missing_generator=not gen,
+ additional_text=additional_text,
+ unknown_parameters=unknown_args):
+ return
+
+ gen_args = ' '.join(gen_args)
+ pywikibot.output("""
+ Page transfer configuration
+ ---------------------------
+ Source: {fromsite}
+ Target: {tosite}
+
+ Generator of pages to transfer: {gen_args}
+ {target}
+ Prefix for transferred pages: {rootpage}
+ """.format(fromsite=fromsite, tosite=tosite, gen_args=gen_args,
+ rootpage=rootpage if rootpage else '(none)',
+ target='from target site\n' if target else ''))
+
+ if correspondingnamespace != 'all' and rootpage != '':
+ pywikibot.output('Both the correspondingnamespace and the rootpage are set! Exiting.')
+ elif target and rootpage != '':
+ pywikibot.output('Both the target and the rootpage are set! Exiting.')
+ else:
+ params = {
+ 'action': 'import',
+ 'token': csrf,
+ 'interwikisource': interwikisource,
+ 'fullhistory': fullhistory,
+ 'assignknownusers': assignknownusers,
+ 'templates': includealltemplates,
+ 'summary': summary
+ }
+ if correspondingnamespace != 'all':
+ params['namespace'] = correspondingnamespace
+ if rootpage != '':
+ params['rootpage'] = rootpage
+ if tags != '':
+ params['tags'] = tags
+ for page in gen:
+ if target:
+ if correspondingnamespace == 'all':
+ fromtitle = page.namespace().canonical_prefix() + page.title(with_ns=False)
+ else:
+ fromtitle = str(fromsite.namespaces[int(correspondingnamespace)]) + page.title(with_ns=False)
+ targetpage = page
+ else:
+ fromtitle = page.title(with_ns=True)
+ if correspondingnamespace == 'all':
+ totitle = page.namespace().canonical_prefix() + page.title(with_ns=False)
+ else:
+ totitle = str(tosite.namespaces[int(correspondingnamespace)]) + page.title(with_ns=False)
+ targetpage = pywikibot.Page(tosite, totitle)
+ if not overwrite:
+ if targetpage.exists():
+ pywikibot.warning(
+ 'Skipped {} (target page {} exists)'.format(
+ page.title(as_link=True, force_interwiki=True),
+ targetpage.title(as_link=True)
+ )
+ )
+ continue
+ else:
+ if not targetpage.botMayEdit():
+ pywikibot.warning(
+ 'Target page {} is not editable by bots'.format(
+ targetpage.title(as_link=True)
+ )
+ )
+ continue
+ params['interwikipage'] = fromtitle
+ if test:
+ pywikibot.output('Simulation: {} → {}'.format(
+ fromtitle,
+ targetpage.title(with_ns=True)
+ )
+ )
+ else:
+ # Zum Testen die folgende Zeile auskommentieren.
+ api_query(tosite, params)
+ # Zum Testen bei folgenden zwei Zeilen das Kreuzzeichen entfernen.
+ # pywikibot.output(params)
+ # pywikibot.output(fromtitle + ' → ' + page.title(with_ns=True) if target else totitle)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tox.ini b/tox.ini
index 17b9aca..7069a31 100644
--- a/tox.ini
+++ b/tox.ini
@@ -192,6 +192,7 @@
scripts/reflinks.py: N802, N816
scripts/replace.py: N802, N803, N806, N816
scripts/solve_disambiguation.py: N802, N806
+ scripts/transwikiimport.py: C103, D103, D205, D400, E123, E501
setup.py: T001, T201
tests/api_tests.py: N802
tests/archivebot_tests.py: N802
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/914038
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: Ic4353c3863d8d6abc60200dc7707981884c7f055
Gerrit-Change-Number: 914038
Gerrit-PatchSet: 4
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: D3r1ck01 <xsavitar.wiki(a)aol.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/913914 )
Change subject: [bugfix] Take into account that a repository clone may have no tags
......................................................................
[bugfix] Take into account that a repository clone may have no tags
Pywikibot repository may be cloned without depth and may have no tags.
This patch solves the IndexError which is raised in such case. This is
necessary for CI tests.
Call sys.exit(1) if an error occurs to signal CI tools to fail the test.
Also update coverage and use f-strings
Bug: T335676
Change-Id: I9d8b2990e286772efe4827c2a11fee50af598308
---
M setup.py
M make_dist.py
2 files changed, 62 insertions(+), 39 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/make_dist.py b/make_dist.py
index 1fca5b1..0453ba1 100755
--- a/make_dist.py
+++ b/make_dist.py
@@ -98,9 +98,12 @@
def cleanup(self) -> None:
"""Cleanup copied files."""
- def run(self) -> None: # pragma: no cover
- """Run the installer script."""
- if self.upgrade:
+ def run(self) -> bool:
+ """Run the installer script.
+
+ :return: True if no error occurs, else False
+ """
+ if self.upgrade: # pragma: no cover
check_call('python -m pip install --upgrade pip', shell=True)
check_call(
'pip install --upgrade setuptools wheel twine ', shell=True)
@@ -108,20 +111,20 @@
if self.local or self.remote or self.clear:
self.clear_old_dist()
if self.clear:
- return
+ return True # pragma: no cover
self.copy_files()
try:
setup.main() # create a new package
- except SystemExit as e:
+ except SystemExit as e: # pragma: no cover
error(e)
- return
+ return False
finally:
self.cleanup()
# check description
if run('twine check dist/*', shell=True).returncode:
- return
+ return False # pragma: no cover
if self.local:
check_call('pip uninstall pywikibot -y', shell=True)
@@ -131,7 +134,8 @@
if self.remote and input_yn(
'<<lightblue>>Upload dist to pypi', automatic_quit=False):
- check_call('twine upload dist/*', shell=True)
+ check_call('twine upload dist/*', shell=True) # pragma: no cover
+ return True
class SetupPywikibot(SetupBase):
@@ -149,7 +153,7 @@
self.target = target
self.source = source
- def copy_files(self) -> None: # pragma: no cover
+ def copy_files(self) -> None:
"""Copy i18n files to pywikibot.scripts folder.
Pywikibot i18n files are used for some translations. They are copied
@@ -162,7 +166,7 @@
shutil.copytree(self.source, self.target)
info('done')
- def cleanup(self) -> None: # pragma: no cover
+ def cleanup(self) -> None:
"""Remove all copied files from pywikibot scripts folder."""
info('Remove copied files... ', newline=False)
shutil.rmtree(self.target)
@@ -182,7 +186,7 @@
:return: Return whether dist is to be installed locally or to be
uploaded
"""
- if '-help' in sys.argv: # pragma: no cover
+ if '-help' in sys.argv:
info(__doc__)
info(setup.__doc__)
sys.exit()
@@ -192,7 +196,7 @@
clear = '-clear' in sys.argv
upgrade = '-upgrade' in sys.argv
- if remote and 'dev' in __version__:
+ if remote and 'dev' in __version__: # pragma: no cover
warning('Distribution must not be a developmental release to upload.')
remote = False
@@ -200,11 +204,12 @@
return local, remote, clear, upgrade
-def main() -> None: # pragma: no cover
+def main() -> None:
"""Script entry point."""
args = handle_args()
- SetupPywikibot(*args).run()
+ return SetupPywikibot(*args).run()
-if __name__ == '__main__': # pragma: no cover
- main()
+if __name__ == '__main__':
+ if not main():
+ sys.exit(1) # pragma: no cover
diff --git a/setup.py b/setup.py
index 8ac1364..de00fc5 100755
--- a/setup.py
+++ b/setup.py
@@ -120,7 +120,7 @@
assert metadata.__name__ == name
-def get_validated_version() -> str: # pragma: no cover
+def get_validated_version() -> str:
"""Get a validated pywikibot module version string.
The version number from pywikibot.__metadata__.__version__ is used.
@@ -134,7 +134,7 @@
"""
version = metadata.__version__
if 'sdist' not in sys.argv:
- return version
+ return version # pragma: no cover
# validate version for sdist
from contextlib import suppress
@@ -144,33 +144,34 @@
try:
tags = run(['git', 'tag'], check=True, stdout=PIPE,
universal_newlines=True).stdout.splitlines()
- except Exception as e:
+ except Exception as e: # pragma: no cover
print(e)
sys.exit('Creating source distribution canceled.')
- for tag in ('stable', 'python2'):
- with suppress(ValueError):
- tags.remove(tag)
+ last_tag = None
+ if tags: # pragma: no cover
+ for tag in ('stable', 'python2'):
+ with suppress(ValueError):
+ tags.remove(tag)
- last_tag = tags[-1]
+ last_tag = tags[-1]
warnings = []
- if parse_version(version) < parse_version('0'):
+ if parse_version(version) < parse_version('0'): # pragma: no cover
# any version which is not a valid PEP 440 version will be considered
# less than any valid PEP 440 version
warnings.append(
version + ' is not a valid version string following PEP 440.')
- elif safe_version(version) != version:
- warnings.append(
- '{} does not follow PEP 440. Use {} as version string instead.'
- .format(version, safe_version(version)))
+ elif safe_version(version) != version: # pragma: no cover
+ warnings.append(f'{version} does not follow PEP 440. Use '
+ f'{safe_version(version)} as version string instead.')
- if parse_version(version) <= parse_version(last_tag):
- warnings.append(
- 'New version "{}" is not higher than last version "{}".'
- .format(version, last_tag))
+ if last_tag and parse_version(version) <= parse_version(last_tag):
+ warnings.append( # pragma: no cover
+ f'New version {version!r} is not higher than last version '
+ f'{last_tag!r}.')
- if warnings:
+ if warnings: # pragma: no cover
print(__doc__)
print('\n\n'.join(warnings))
sys.exit('\nBuild of distribution package canceled.')
@@ -178,7 +179,7 @@
return version
-def read_desc(filename) -> str: # pragma: no cover
+def read_desc(filename) -> str:
"""Read long description.
Combine included restructured text files which must be done before
@@ -193,14 +194,14 @@
if os.path.exists(include):
with open(include) as g:
desc.append(re.sub(pattern[0], pattern[1], g.read()))
- else:
- print('Cannot include {}; file not found'.format(include))
+ else: # pragma: no cover
+ print(f'Cannot include {include}; file not found')
else:
desc.append(re.sub(pattern[0], pattern[1], line))
return ''.join(desc)
-def get_packages(name) -> List[str]: # pragma: no cover
+def get_packages(name) -> List[str]:
"""Find framework packages."""
try:
from setuptools import find_namespace_packages
@@ -211,7 +212,7 @@
return [str(name)] + packages
-def main() -> None: # pragma: no cover
+def main() -> None:
"""Setup entry point."""
version = get_validated_version()
setup(
@@ -350,7 +351,7 @@
# Finally show distribution version before uploading
if 'sdist' in sys.argv:
- print('\nDistribution package created for version {}'.format(version))
+ print(f'\nDistribution package created for version {version}')
if __name__ == '__main__': # pragma: no cover
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/core/+/913914
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Change-Id: I9d8b2990e286772efe4827c2a11fee50af598308
Gerrit-Change-Number: 913914
Gerrit-PatchSet: 5
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged
jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/i18n/+/913917 )
Change subject: Localisation updates from https://translatewiki.net.
......................................................................
Localisation updates from https://translatewiki.net.
Change-Id: I28d26842d90847e07f2384fd758753e83b686701
---
M unprotect/de.json
M replicate_wiki/fr.json
A replicate_wiki/de.json
M undelete/br.json
A replicate_wiki/mk.json
M redirect/mk.json
M redirect/br.json
A replicate_wiki/br.json
A replicate_wiki/sr.json
M protect/de.json
M category/br.json
M delinker/de.json
M redirect/de.json
M redirect/fi.json
14 files changed, 73 insertions(+), 16 deletions(-)
Approvals:
L10n-bot: Looks good to me, approved
jenkins-bot: Verified
diff --git a/category/br.json b/category/br.json
index a0dd777..32f4dde 100644
--- a/category/br.json
+++ b/category/br.json
@@ -1,6 +1,7 @@
{
"@metadata": {
"authors": [
+ "Adriendelucca",
"Fulup",
"Gwenn-Ael",
"Huñvreüs"
@@ -8,7 +9,7 @@
},
"category-adding": "Robot : Oc'h ouzhpennañ ar rummad [[:Category:%(newcat)s|%(newcat)s]]",
"category-also-in": "(ivez e %(alsocat)s)",
- "category-clean": "Robot: o tilemel rummad %(category)s a zo rummataet e %(child)s dija",
+ "category-clean": "Robot: O tilemel ar rummad %(category)s rak rummet eo e %(child)s c'hoazh",
"category-listifying": "Robot : Roll eus %(fromcat)s ({{PLURAL:%(num)d|1 elfenn|%(num)d pajenn}})",
"category-removing": "Robot : Tennet diwar %(oldcat)s",
"category-renamed": "Robot : dilec'hiet adalek %(oldcat)s. Aozerien : %(authors)s",
diff --git a/delinker/de.json b/delinker/de.json
index 67d8c84..1ecf5f9 100644
--- a/delinker/de.json
+++ b/delinker/de.json
@@ -4,5 +4,5 @@
"Justman10000"
]
},
- "delinker-delink": "Bot: Die Datei [[%(title)s]] wurde entfernt, da sie von [[:User:%(user)s]]: ''%(comment)s'' gelöscht wurde."
+ "delinker-delink": "Bot: Die Datei [[%(title)s]] wurde entfernt, da sie von [[:User:%(user)s]] gelöscht wurde: ''%(comment)s''"
}
diff --git a/protect/de.json b/protect/de.json
index 20acbae..45554fe 100644
--- a/protect/de.json
+++ b/protect/de.json
@@ -1,12 +1,13 @@
{
"@metadata": {
"authors": [
+ "Justman10000",
"Metalhead64"
]
},
"protect-category": "Bot: Schütze alle Seiten der Kategorie %(cat)s",
"protect-images": "Bot: Schütze alle Bilder auf der Seite %(page)s",
"protect-links": "Bot: Schütze alle Seiten, die von %(page)s verlinkt sind",
- "protect-ref": "Bot: Schütze alle Seiten, die auf %(page)s verlinken",
+ "protect-ref": "Bot: Schutz aller Seiten, die auf %(page)s verweisen",
"protect-simple": "Bot: Schütze eine Liste von Dateien."
}
diff --git a/redirect/br.json b/redirect/br.json
index 2a2a9f2..23bd67d 100644
--- a/redirect/br.json
+++ b/redirect/br.json
@@ -1,15 +1,16 @@
{
"@metadata": {
"authors": [
+ "Adriendelucca",
"Fohanno",
"Fulup",
"Gwenn-Ael",
"Y-M D"
]
},
- "redirect-fix-broken-moved": "Robot : O reizhañ an adkasoù torret war-zu ar bajenn bal %(to)s",
- "redirect-fix-double": "Kempennet adkas doubl gant robot → %(to)s",
- "redirect-fix-loop": "Robot : O kempenn al lagadenn adkas war-zu %(to)s",
- "redirect-remove-broken": "Robot : Ar bajenn ma vezer adkaset n'eus ket anezhi",
+ "redirect-fix-broken-moved": "O reizhañ an adkasoù torret war-zu ar bajenn bal %(to)s",
+ "redirect-fix-double": "Dresañ an adkas doubl da %(to)s",
+ "redirect-fix-loop": "O tresañ al lagadenn adkas war-zu %(to)s",
+ "redirect-remove-broken": "Adkas war-zu ur bajenn n'eus ket anezhi pe a zo bet dilamet",
"redirect-remove-loop": "Stumm ur c'helc'h-tro born zo gant an [[Wikipedia:Redirect|adkas]]"
}
diff --git a/redirect/de.json b/redirect/de.json
index dd97581..cafe3f1 100644
--- a/redirect/de.json
+++ b/redirect/de.json
@@ -2,6 +2,7 @@
"@metadata": {
"authors": [
"Geitost",
+ "Justman10000",
"Metalhead64",
"The Evil IP address"
]
@@ -10,6 +11,6 @@
"redirect-fix-broken-moved": "Korrigiere defekte Weiterleitung auf Verschiebeziel %(to)s",
"redirect-fix-double": "Korrigiere doppelte Weiterleitung auf %(to)s",
"redirect-fix-loop": "Korrigiere Weiterleitungschleife auf %(to)s",
- "redirect-remove-broken": "Weiterleitungsziel existiert nicht",
+ "redirect-remove-broken": "Weiterleitung zu einer gelöschten oder nicht existierenden Seite",
"redirect-remove-loop": "Weiterleitungsziel auf sich selbst"
}
diff --git a/redirect/fi.json b/redirect/fi.json
index 7d9139c..b98c0f1 100644
--- a/redirect/fi.json
+++ b/redirect/fi.json
@@ -6,13 +6,14 @@
"Nedergard",
"Nike",
"Olli",
+ "Pyscowicz",
"Silvonen"
]
},
"redirect-broken-redirect-template": "{{Pikapoisto|1=|2=O2}}",
- "redirect-fix-broken-moved": "Botti korjasi rikkinäisen ohjauksen siirrettyyn kohdesivuun %(to)s",
- "redirect-fix-double": "Botti korjasi kaksinkertaisen ohjauksen sivulle %(to)s",
- "redirect-fix-loop": "Botti korjasi ohjaussilmukan sivulle %(to)s",
+ "redirect-fix-broken-moved": "Korjasi rikkinäisen ohjauksen siirrettyyn kohdesivuun %(to)s",
+ "redirect-fix-double": "Korjasi kaksinkertaisen ohjauksen sivulle %(to)s",
+ "redirect-fix-loop": "Korjasi ohjaussilmukan sivulle %(to)s",
"redirect-remove-broken": "Ohjaus poistetulle tai olemattomalle sivulle",
"redirect-remove-loop": "Ohjauksen kohde muodostaa ohjaussilmukan"
}
diff --git a/redirect/mk.json b/redirect/mk.json
index 1750639..03c001e 100644
--- a/redirect/mk.json
+++ b/redirect/mk.json
@@ -10,6 +10,6 @@
"redirect-fix-broken-moved": "Исправка на прекинато пренасочување кон преместена целна страница %(to)s",
"redirect-fix-double": "Исправка на двојни пренасочувања → %(to)s",
"redirect-fix-loop": "Поправа јамка на пренасочување кон %(to)s",
- "redirect-remove-broken": "[[ВП:КББ|О6]: [[Википедија:Пренасочување|Пренасочување]] кон избришана или непостоечка страница",
- "redirect-remove-loop": "[[ВП:КББ|О6]]: Одредницата за [[Википедија:Пренасочување|пренасочување]] образува јамка"
+ "redirect-remove-broken": "Пренасочување кон избришана или непостоечка страница",
+ "redirect-remove-loop": "Одредницата за пренасочување образува јамка"
}
diff --git a/replicate_wiki/br.json b/replicate_wiki/br.json
new file mode 100644
index 0000000..25ba846
--- /dev/null
+++ b/replicate_wiki/br.json
@@ -0,0 +1,9 @@
+{
+ "@metadata": {
+ "authors": [
+ "Adriendelucca"
+ ]
+ },
+ "replicate_wiki-same-pages": "An holl bajennoù a-bouez a zo heñvel",
+ "replicate_wiki-summary": "Robot: Goubredañ ar wiki diwar %(source)s"
+}
diff --git a/replicate_wiki/de.json b/replicate_wiki/de.json
new file mode 100644
index 0000000..a80303c
--- /dev/null
+++ b/replicate_wiki/de.json
@@ -0,0 +1,12 @@
+{
+ "@metadata": {
+ "authors": [
+ "Justman10000"
+ ]
+ },
+ "replicate_wiki-headline": "Seiten, die vom Original abweichen",
+ "replicate_wiki-missing-users": "Admins vom Original, die hier fehlen",
+ "replicate_wiki-same-pages": "Alle wichtigen Seiten sind die gleichen",
+ "replicate_wiki-same-users": "Alle Benutzer aus dem Original sind zudem in diesem Wiki vertreten",
+ "replicate_wiki-summary": "Bot: Wiki-Synchronisation von %(source)s"
+}
diff --git a/replicate_wiki/fr.json b/replicate_wiki/fr.json
index 7128ec2..be7b787 100644
--- a/replicate_wiki/fr.json
+++ b/replicate_wiki/fr.json
@@ -8,5 +8,5 @@
"replicate_wiki-missing-users": "Administrateurs de l’original qui manquent ici",
"replicate_wiki-same-pages": "Toutes les pages importantes sont identiques",
"replicate_wiki-same-users": "Tous les utilisateurs de l’original sont également présents sur ce wiki",
- "replicate_wiki-summary": "Bot : synchronisation du wiki à partir de %(source)s"
+ "replicate_wiki-summary": "Robot : synchronisation du wiki à partir de %(source)s"
}
diff --git a/replicate_wiki/mk.json b/replicate_wiki/mk.json
new file mode 100644
index 0000000..c9c167f
--- /dev/null
+++ b/replicate_wiki/mk.json
@@ -0,0 +1,12 @@
+{
+ "@metadata": {
+ "authors": [
+ "Bjankuloski06"
+ ]
+ },
+ "replicate_wiki-headline": "Страницата што се разликува од оригиналот",
+ "replicate_wiki-missing-users": "Администратори од оригиналот што недостасуваат овде",
+ "replicate_wiki-same-pages": "Сите важни страници се исти",
+ "replicate_wiki-same-users": "Сите корисници од оригиналот се присутни и на ова вики",
+ "replicate_wiki-summary": "Бот: Викиусогласување од %(source)s"
+}
diff --git a/replicate_wiki/sr.json b/replicate_wiki/sr.json
new file mode 100644
index 0000000..5053d4d
--- /dev/null
+++ b/replicate_wiki/sr.json
@@ -0,0 +1,8 @@
+{
+ "@metadata": {
+ "authors": [
+ "Milicevic01"
+ ]
+ },
+ "replicate_wiki-summary": "Бот: усклађено са %(source)s."
+}
diff --git a/undelete/br.json b/undelete/br.json
index 33eeed5..6f88c6d 100644
--- a/undelete/br.json
+++ b/undelete/br.json
@@ -1,10 +1,11 @@
{
"@metadata": {
"authors": [
+ "Adriendelucca",
"Gwenn-Ael"
]
},
"undelete-from-file": "Robot : nullañ dilamadur ul listennad pajennoù",
- "undelete-images": "Robot : o tiverkañ dilamadur an holl skeudennoù diwar ar bajenn %(page)s\")",
+ "undelete-images": "Robot : O nullañ dilamadur an holl skeudennoù diwar ar bajenn %(page)s",
"undelete-linked-pages": "Robot : O tiverkañ dilamadur an holl bajennoù liammet adalek %(page)s"
}
diff --git a/unprotect/de.json b/unprotect/de.json
index c7cf4e1..31d8e52 100644
--- a/unprotect/de.json
+++ b/unprotect/de.json
@@ -1,6 +1,7 @@
{
"@metadata": {
"authors": [
+ "Justman10000",
"Metalhead64",
"Xqt"
]
@@ -8,6 +9,6 @@
"unprotect-category": "Bot: Hebe den Schutz aller Seiten aus der Kategorie %(cat)s auf",
"unprotect-images": "Bot: Hebe den Schutz aller Dateien auf der Seite %(page)s auf",
"unprotect-links": "Bot: Gebe alle Seiten frei, die von %(page)s verlinkt sind",
- "unprotect-ref": "Bot: Gebe alle Seiten frei, die von %(page)s verweisen",
+ "unprotect-ref": "Bot: Aufhebung des Schutzes aller Seiten, die auf %(page)s verweisen",
"unprotect-simple": "Bot: Gebe eine Liste mit Dateien frei"
}
--
To view, visit https://gerrit.wikimedia.org/r/c/pywikibot/i18n/+/913917
To unsubscribe, or for help writing mail filters, visit https://gerrit.wikimedia.org/r/settings
Gerrit-Project: pywikibot/i18n
Gerrit-Branch: master
Gerrit-Change-Id: I28d26842d90847e07f2384fd758753e83b686701
Gerrit-Change-Number: 913917
Gerrit-PatchSet: 1
Gerrit-Owner: L10n-bot <l10n-bot(a)translatewiki.net>
Gerrit-Reviewer: L10n-bot <l10n-bot(a)translatewiki.net>
Gerrit-Reviewer: jenkins-bot
Gerrit-MessageType: merged