Xqt has submitted this change and it was merged.
Change subject: Wrap api search test in skip wrapper
......................................................................
Wrap api search test in skip wrapper
If the search method returns an timeout, such as in [1], the build fails. This
changes the behavior, and marks the test as 'skipped'. Non-timeouts will
still cause the build to fail.
[1] https://travis-ci.org/wikimedia/pywikibot-core/builds/11205092
Change-Id: I59b2c576e156c3896c7b1743c7d046af7386834b
---
M tests/site_tests.py
1 file changed, 23 insertions(+), 19 deletions(-)
Approvals:
Xqt: Looks good to me, approved
diff --git a/tests/site_tests.py b/tests/site_tests.py
index 6d41c98..d9811bb 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -686,25 +686,29 @@
def testSearch(self):
"""Test the site.search() method"""
-
- se = list(mysite.search("wiki", total=10))
- self.assertTrue(len(se) <= 10)
- self.assertTrue(all(isinstance(hit, pywikibot.Page)
- for hit in se))
- self.assertTrue(all(hit.namespace() == 0 for hit in se))
- for hit in mysite.search("common", namespaces=4, total=5):
- self.assertType(hit, pywikibot.Page)
- self.assertEqual(hit.namespace(), 4)
- for hit in mysite.search("word", namespaces=[5, 6, 7], total=5):
- self.assertType(hit, pywikibot.Page)
- self.assertTrue(hit.namespace() in [5, 6, 7])
- for hit in mysite.search("another", namespaces="8|9|10", total=5):
- self.assertType(hit, pywikibot.Page)
- self.assertTrue(hit.namespace() in [8, 9, 10])
- for hit in mysite.search("wiki", namespaces=0, total=10,
- getredirects=True):
- self.assertType(hit, pywikibot.Page)
- self.assertEqual(hit.namespace(), 0)
+ try:
+ se = list(mysite.search("wiki", total=10))
+ self.assertTrue(len(se) <= 10)
+ self.assertTrue(all(isinstance(hit, pywikibot.Page)
+ for hit in se))
+ self.assertTrue(all(hit.namespace() == 0 for hit in se))
+ for hit in mysite.search("common", namespaces=4, total=5):
+ self.assertType(hit, pywikibot.Page)
+ self.assertEqual(hit.namespace(), 4)
+ for hit in mysite.search("word", namespaces=[5, 6, 7], total=5):
+ self.assertType(hit, pywikibot.Page)
+ self.assertTrue(hit.namespace() in [5, 6, 7])
+ for hit in mysite.search("another", namespaces="8|9|10", total=5):
+ self.assertType(hit, pywikibot.Page)
+ self.assertTrue(hit.namespace() in [8, 9, 10])
+ for hit in mysite.search("wiki", namespaces=0, total=10,
+ getredirects=True):
+ self.assertType(hit, pywikibot.Page)
+ self.assertEqual(hit.namespace(), 0)
+ except pywikibot.data.api.APIError as e:
+ if e.code == "gsrsearch-error" and "timed out" in e.info:
+ raise unittest.SkipTest("gsrsearch returned timeout on site: %r" % e)
+ raise
def testUsercontribs(self):
"""Test the site.usercontribs() method"""
--
To view, visit https://gerrit.wikimedia.org/r/83669
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I59b2c576e156c3896c7b1743c7d046af7386834b
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Legoktm <legoktm.wikipedia(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Xqt has submitted this change and it was merged.
Change subject: Ported delete.py to core
......................................................................
Ported delete.py to core
Change-Id: Ifbab6b64fcf6e58c0da3daed9ed21aeea7db4a7b
---
A scripts/delete.py
1 file changed, 132 insertions(+), 0 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/delete.py b/scripts/delete.py
new file mode 100644
index 0000000..2114e57
--- /dev/null
+++ b/scripts/delete.py
@@ -0,0 +1,132 @@
+# -*- coding: utf-8 -*-
+"""
+This script can be used to delete and undelete pages en masse.
+Of course, you will need an admin account on the relevant wiki.
+
+Syntax: python delete.py [-category categoryName]
+
+Command line options:
+
+-page: Delete specified page
+-cat: Delete all pages in the given category.
+-nosubcats: Don't delete pages in the subcategories.
+-links: Delete all pages linked from a given page.
+-file: Delete all pages listed in a text file.
+-ref: Delete all pages referring from a given page.
+-images: Delete all images used on a given page.
+-always: Don't prompt to delete pages, just do it.
+-summary: Supply a custom edit summary.
+-undelete: Actually undelete pages instead of deleting.
+ Obviously makes sense only with -page and -file.
+
+Examples:
+
+Delete everything in the category "To delete" without prompting.
+
+ python delete.py -cat:"To delete" -always
+"""
+__version__ = '$Id$'
+#
+# Distributed under the terms of the MIT license.
+#
+import pywikibot
+from pywikibot import i18n, config, catlib, pagegenerators
+
+class DeletionRobot:
+ """ This robot allows deletion of pages en masse. """
+
+ def __init__(self, generator, summary, always = False, undelete=True):
+ """ Arguments:
+ * generator - A page generator.
+ * always - Delete without prompting?
+
+ """
+ self.generator = generator
+ self.summary = summary
+ self.always = always
+ self.undelete = undelete
+
+ def run(self):
+ """ Starts the robot's action. """
+ #Loop through everything in the page generator and delete it.
+ for page in self.generator:
+ pywikibot.output(u'Processing page %s' % page.title())
+ if self.undelete:
+ page.undelete(self.summary, throttle = True)
+ else:
+ page.delete(self.summary, not self.always, throttle = True)
+
+def main():
+ genFactory = pagegenerators.GeneratorFactory()
+ pageName = ''
+ singlePage = ''
+ summary = ''
+ always = False
+ doSinglePage = False
+ doCategory = False
+ deleteSubcategories = True
+ doRef = False
+ doLinks = False
+ doImages = False
+ undelete = False
+ fileName = ''
+ generator = None
+
+ # read command line parameters
+ for arg in pywikibot.handleArgs():
+ if arg == '-always':
+ always = True
+ elif arg.startswith('-summary'):
+ if len(arg) == len('-summary'):
+ summary = pywikibot.input(u'Enter a reason for the deletion:')
+ else:
+ summary = arg[len('-summary:'):]
+ elif arg.startswith('-nosubcats'):
+ deleteSubcategories = False
+ elif arg.startswith('-images'):
+ doImages = True
+ if len(arg) == len('-images'):
+ pageName = pywikibot.input(
+ u'Enter the page with the images to delete:')
+ else:
+ pageName = arg[len('-images'):]
+ elif arg.startswith('-undelete'):
+ undelete = True
+ else:
+ genFactory.handleArg(arg)
+ if not summary:
+ if arg.startswith('-category'):
+ summary = i18n.twtranslate(mysite, 'delete-from-category',{'page': pageName})
+ elif arg.startswith('-links'):
+ summary = i18n.twtranslate(mysite, 'delete-linked-pages', {'page': pageName})
+ elif arg.startswith('-ref'):
+ summary = i18n.twtranslate(mysite, 'delete-referring-pages', {'page': pageName})
+ elif arg.startswith('-file'):
+ summary = i18n.twtranslate(mysite, 'delete-from-file')
+ mysite = pywikibot.getSite()
+ if doImages:
+ if not summary:
+ summary = i18n.twtranslate(mysite, 'delete-images',
+ {'page': pageName})
+ page = pywikibot.Page(mysite, pageName)
+ generator = pagegenerators.ImagesPageGenerator(page)
+ if not summary:
+ summary = pywikibot.input(u'Enter a reason for the %sdeletion:'
+ % ['', 'un'][undelete])
+ if not generator:
+ generator = genFactory.getCombinedGenerator()
+ if not generator:
+ # syntax error, show help text from the top of this file
+ pywikibot.showHelp('delete')
+ return
+ if generator:
+ pywikibot.setAction(summary)
+ # We are just deleting pages, so we have no need of using a preloading
+ # page generator to actually get the text of those pages.
+ bot = DeletionRobot(generator, summary, always, undelete)
+ bot.run()
+if __name__ == "__main__":
+ try:
+ main()
+ finally:
+ pywikibot.stopme()
--
To view, visit https://gerrit.wikimedia.org/r/82892
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ifbab6b64fcf6e58c0da3daed9ed21aeea7db4a7b
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Legoktm <legoktm.wikipedia(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Xqt has submitted this change and it was merged.
Change subject: Inline help if base_dir does not contain user-config.py
......................................................................
Inline help if base_dir does not contain user-config.py
Change-Id: Ib05d843f6e07a94e92b65a56f8495dca5dad46ea
---
M pywikibot/config2.py
1 file changed, 7 insertions(+), 2 deletions(-)
Approvals:
Xqt: Looks good to me, approved
diff --git a/pywikibot/config2.py b/pywikibot/config2.py
index 8fec8c0..6c2687c 100644
--- a/pywikibot/config2.py
+++ b/pywikibot/config2.py
@@ -139,8 +139,13 @@
raise RuntimeError("Directory '%(base_dir)s' does not exist."
% locals())
if not os.path.exists(os.path.join(base_dir, "user-config.py")):
- raise RuntimeError("No user-config.py found in directory '%(base_dir)s'."
- % locals())
+ exc_text = "No user-config.py found in directory '%(base_dir)s'.\n" % locals()
+ exc_text += " Please check that user-config.py is stored in the correct location.\n"
+ exc_text += " Directory where user-config.py is searched is determined as follows:\n\n"
+ exc_text += " " + _get_base_dir.__doc__
+
+ raise RuntimeError(exc_text)
+
return base_dir
_base_dir = _get_base_dir()
--
To view, visit https://gerrit.wikimedia.org/r/84223
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ib05d843f6e07a94e92b65a56f8495dca5dad46ea
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Legoktm <legoktm.wikipedia(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Xqt has submitted this change and it was merged.
Change subject: proposed bug fixing of https://sourceforge.net/p/pywikipediabot/bugs/1482/ (archivebot.py)
......................................................................
proposed bug fixing of https://sourceforge.net/p/pywikipediabot/bugs/1482/ (archivebot.py)
Change-Id: I20aa6d57c51c230f2adb4408ffc9bcf27f1e6bca
---
M archivebot.py
1 file changed, 22 insertions(+), 14 deletions(-)
Approvals:
Xqt: Looks good to me, approved
diff --git a/archivebot.py b/archivebot.py
index d6483a5..d0f79ee 100644
--- a/archivebot.py
+++ b/archivebot.py
@@ -81,7 +81,7 @@
import pagegenerators, query
Site = pywikibot.getSite()
-import os, re, time, locale, traceback, string, urllib
+import os, re, time, locale, traceback, string, urllib, unicodedata
try: #Get a constructor for the MD5 hash object
import hashlib
@@ -234,33 +234,41 @@
if not TM:
TM = re.search(r'(\d\d?) (\S+) (\d\d\d\d) (\d\d):(\d\d) \(.*?\)', line)
if TM:
- TIME = txt2timestamp(TM.group(0),"%d. %b %Y kl. %H:%M (%Z)")
+ # Strip away all diacritics in the Mn ('Mark, non-spacing') category
+ # NFD decomposition splits combined characters (e.g. 'ä", LATIN SMALL
+ # LETTER A WITH DIAERESIS) into two entities: LATIN SMALL LETTER A
+ # and COMBINING DIAERESIS. The latter falls in the Mn category and is
+ # filtered out, resuling in 'a'.
+ _TM = ''.join(c for c in unicodedata.normalize('NFD', TM.group(0))
+ if unicodedata.category(c) != 'Mn')
+
+ TIME = txt2timestamp(_TM,"%d. %b %Y kl. %H:%M (%Z)")
if not TIME:
- TIME = txt2timestamp(TM.group(0), "%Y. %B %d., %H:%M (%Z)")
+ TIME = txt2timestamp(_TM, "%Y. %B %d., %H:%M (%Z)")
if not TIME:
- TIME = txt2timestamp(TM.group(0), "%d. %b %Y kl.%H:%M (%Z)")
+ TIME = txt2timestamp(_TM, "%d. %b %Y kl.%H:%M (%Z)")
if not TIME:
- TIME = txt2timestamp(re.sub(' *\([^ ]+\) *', '', TM.group(0)),
+ TIME = txt2timestamp(re.sub(' *\([^ ]+\) *', '', _TM),
"%H:%M, %d %B %Y")
if not TIME:
- TIME = txt2timestamp(TM.group(0), "%H:%M, %d %b %Y (%Z)")
+ TIME = txt2timestamp(_TM, "%H:%M, %d %b %Y (%Z)")
if not TIME:
- TIME = txt2timestamp(re.sub(' *\([^ ]+\) *', '', TM.group(0)),
+ TIME = txt2timestamp(re.sub(' *\([^ ]+\) *', '', _TM),
"%H:%M, %d %b %Y")
if not TIME:
- TIME = txt2timestamp(TM.group(0), "%H:%M, %b %d %Y (%Z)")
+ TIME = txt2timestamp(_TM, "%H:%M, %b %d %Y (%Z)")
if not TIME:
- TIME = txt2timestamp(TM.group(0), "%H:%M, %B %d %Y (%Z)")
+ TIME = txt2timestamp(_TM, "%H:%M, %B %d %Y (%Z)")
if not TIME:
- TIME = txt2timestamp(TM.group(0), "%H:%M, %b %d, %Y (%Z)")
+ TIME = txt2timestamp(_TM, "%H:%M, %b %d, %Y (%Z)")
if not TIME:
- TIME = txt2timestamp(TM.group(0), "%H:%M, %B %d, %Y (%Z)")
+ TIME = txt2timestamp(_TM, "%H:%M, %B %d, %Y (%Z)")
if not TIME:
- TIME = txt2timestamp(TM.group(0),"%d. %Bta %Y kello %H.%M (%Z)")
+ TIME = txt2timestamp(_TM,"%d. %Bta %Y kello %H.%M (%Z)")
if not TIME:
- TIME = txt2timestamp(TM.group(0), "%d %B %Y %H:%M (%Z)")
+ TIME = txt2timestamp(_TM, "%d %B %Y %H:%M (%Z)")
if not TIME:
- TIME = txt2timestamp(re.sub(' *\([^ ]+\) *', '', TM.group(0)),
+ TIME = txt2timestamp(re.sub(' *\([^ ]+\) *', '', _TM),
"%H:%M, %d. %b. %Y")
if TIME:
self.timestamp = max(self.timestamp, time.mktime(TIME))
--
To view, visit https://gerrit.wikimedia.org/r/84204
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I20aa6d57c51c230f2adb4408ffc9bcf27f1e6bca
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/compat
Gerrit-Branch: master
Gerrit-Owner: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: Legoktm <legoktm.wikipedia(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
Xqt has submitted this change and it was merged.
Change subject: Correcting original message of dele-from-file which is the general message of deletation
......................................................................
Correcting original message of dele-from-file which is the general message of deletation
Change-Id: Ib4961b7946d51dfcbdca2b2d1d78738d3f2957fb
---
M delete.py
1 file changed, 1 insertion(+), 1 deletion(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/delete.py b/delete.py
index e300859..64ff24c 100644
--- a/delete.py
+++ b/delete.py
@@ -2,7 +2,7 @@
msg = {
'en': {
'delete-from-category': u'Robot: Deleting all pages from category %(page)s',
- 'delete-from-file': u'Robot: Deleting a list of files.',
+ 'delete-from-file': u'Robot: Deleting a list of pages.',
'delete-images': u'Robot: Deleting all images on page %(page)s',
'delete-linked-pages': u'Robot: Deleting all pages linked from %(page)s',
'delete-referring-pages': u'Robot: Deleting all pages referring from %(page)s',
--
To view, visit https://gerrit.wikimedia.org/r/84199
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ib4961b7946d51dfcbdca2b2d1d78738d3f2957fb
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/i18n
Gerrit-Branch: master
Gerrit-Owner: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: DrTrigon <dr.trigon(a)surfeu.ch>
Gerrit-Reviewer: Legoktm <legoktm.wikipedia(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Multichill <maarten(a)mdammers.nl>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot
jenkins-bot has submitted this change and it was merged.
Change subject: Support initial text separate from summary
......................................................................
Support initial text separate from summary
When a new file is uploaded, the page text defaults to the summary,
or vice versa.
This change should provide the ability to have them different.
Change-Id: Idd927c0e3256ca610ae418afdfbbaa3afb5ba9d9
---
M pywikibot/site.py
1 file changed, 10 insertions(+), 3 deletions(-)
Approvals:
Legoktm: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/site.py b/pywikibot/site.py
index 1c0b144..59eebf5 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -3037,7 +3037,7 @@
return self.getFilesFromAnHash(hash_found)
def upload(self, imagepage, source_filename=None, source_url=None,
- comment=None, watch=False, ignore_warnings=False):
+ comment=None, text=None, watch=False, ignore_warnings=False):
"""Upload a file to the wiki.
Either source_filename or source_url, but not both, must be provided.
@@ -3048,6 +3048,9 @@
@param source_url: URL of the file to be uploaded
@param comment: Edit summary; if this is not provided, then
imagepage.text will be used. An empty summary is not permitted.
+ This may also serve as the initial page text (see below).
+ @param text: Initial page text; if this is not set, then
+ imagepage.text will be used, or comment.
@param watch: If true, add imagepage to the bot user's watchlist
@param ignore_warnings: if true, ignore API warnings and force
upload (for example, to overwrite an existing file); default False
@@ -3089,6 +3092,10 @@
raise ValueError(
"APISite.upload: cannot upload file without a summary/description."
)
+ if text is None:
+ text = imagepage.text
+ if not text:
+ text = comment
token = self.token(imagepage, "edit")
if source_filename:
# upload local file
@@ -3102,7 +3109,7 @@
req = api.Request(site=self, action="upload", token=token,
filename=imagepage.title(withNamespace=False),
file=source_filename, comment=comment,
- mime=True)
+ text=text, mime=True)
else:
# upload by URL
if "upload_by_url" not in self.userinfo["rights"]:
@@ -3111,7 +3118,7 @@
% (self.user(), self))
req = api.Request(site=self, action="upload", token=token,
filename=imagepage.title(withNamespace=False),
- url=source_url, comment=comment)
+ url=source_url, comment=comment, text=text)
if watch:
req["watch"] = ""
if ignore_warnings:
--
To view, visit https://gerrit.wikimedia.org/r/82560
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Idd927c0e3256ca610ae418afdfbbaa3afb5ba9d9
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Hazard-SJ <hazard_sj(a)yahoo.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Legoktm <legoktm.wikipedia(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: jenkins-bot