jenkins-bot has submitted this change and it was merged.
Change subject: Use custom_name for file namespace
......................................................................
Use custom_name for file namespace
Right now '%s' % FILE object returns ':File:' instead of 'File'
causing this script to behave really bad by changing [[File:Foo.jpg]]
to [[:File::Bar.jpg]]
Change-Id: I699dbcf5474a683871f20c2d548924ce7bdb137b
---
M scripts/image.py
1 file changed, 1 insertion(+), 1 deletion(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/image.py b/scripts/image.py
index b99f28d..c826178 100755
--- a/scripts/image.py
+++ b/scripts/image.py
@@ -160,7 +160,7 @@
if not self.getOption('loose'):
replacements.append((image_regex,
u'[[%s:%s\\g<parameters>]]'
- % (self.site.namespaces.FILE,
+ % (self.site.namespaces.FILE.custom_name,
self.new_image)))
else:
replacements.append((image_regex, self.new_image))
--
To view, visit https://gerrit.wikimedia.org/r/282105
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I699dbcf5474a683871f20c2d548924ce7bdb137b
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: [cleanup] Add a separate method delete_redirect()
......................................................................
[cleanup] Add a separate method delete_redirect()
- Both actions double and delete uses code to delete a page but for
double action this part is not enabled yet neither in core not in compat.
But this should be solved later and consider that a redirect loop may be
created by accident or by vandalism and the page has a version history.
Anyway we could split off that code and recycle it. This prevents code
duplication and gives a better readability.
- change action comment because deleting is not the only way to solve
broken redirects
Change-Id: I49f39bae584b7c62fd1a99cf80407a5987d9a5a3
---
M scripts/redirect.py
1 file changed, 36 insertions(+), 48 deletions(-)
Approvals:
John Vandenberg: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/redirect.py b/scripts/redirect.py
index cb12475..4bc9c60 100755
--- a/scripts/redirect.py
+++ b/scripts/redirect.py
@@ -395,6 +395,35 @@
self.generator = generator
self.exiting = False
+ def delete_redirect(self, page, summary_key):
+ """Delete the redirect page."""
+ assert page.site == self.site, (
+ 'target page is on different site {0}'.format(page.site))
+ reason = i18n.twtranslate(self.site, summary_key)
+ if page.site.logged_in(sysop=True):
+ page.delete(reason, prompt=False)
+ elif i18n.twhas_key(page.site,
+ 'redirect-broken-redirect-template'):
+ pywikibot.output(u"No sysop in user-config.py, "
+ u"put page to speedy deletion.")
+ try:
+ content = page.get(get_redirect=True)
+ except pywikibot.SectionError:
+ content_page = pywikibot.Page(page.site,
+ page.title(withSection=False))
+ content = content_page.get(get_redirect=True)
+ # TODO: Add bot's signature if needed (Bug: T131517)
+ content = i18n.twtranslate(
+ page.site,
+ 'redirect-broken-redirect-template') + '\n' + content
+ try:
+ page.put(content, reason)
+ except pywikibot.PageSaveRelatedError as e:
+ pywikibot.error(e)
+ else:
+ pywikibot.output(
+ u'No speedy deletion template available')
+
def delete_broken_redirects(self):
"""Process all broken redirects."""
# get reason for deletion text
@@ -483,32 +512,7 @@
u'Do you want to delete %s?'
% (targetPage.title(asLink=True),
redir_page.title(asLink=True))):
- reason = i18n.twtranslate(self.site,
- 'redirect-remove-broken')
- if self.site.logged_in(sysop=True):
- redir_page.delete(reason, prompt=False)
- else:
- assert targetPage.site == self.site, (
- u'target page is on different site %s'
- % targetPage.site)
- if i18n.twhas_key(self.site,
- 'redirect-broken-redirect-template'):
- pywikibot.output(u"No sysop in user-config.py, "
- u"put page to speedy deletion.")
- content = redir_page.get(get_redirect=True)
- # TODO: Add bot's signature if needed
- # Not supported via TW yet
- content = i18n.twtranslate(
- targetPage.site,
- 'redirect-broken-redirect-template'
- ) + "\n" + content
- try:
- redir_page.put(content, reason)
- except pywikibot.PageSaveRelatedError as e:
- pywikibot.error(e)
- else:
- pywikibot.output(
- u'No speedy deletion template available')
+ self.delete_redirect(redir_page, 'redirect-remove-broken')
elif not (self.getOption('delete') or movedTarget):
pywikibot.output(u'Cannot fix or delete the broken redirect')
except pywikibot.IsRedirectPage:
@@ -617,28 +621,12 @@
u'Redirect target %s forms a redirect loop.'
% targetPage.title(asLink=True))
break # FIXME: doesn't work. edits twice!
- try:
- content = targetPage.get(get_redirect=True)
- except pywikibot.SectionError:
- content_page = pywikibot.Page(
- targetPage.site,
- targetPage.title(withSection=False))
- content = content_page.get(get_redirect=True)
- if i18n.twhas_key(
- targetPage.site,
- 'redirect-broken-redirect-template') and \
- i18n.twhas_key(targetPage.site,
- 'redirect-remove-loop'):
- pywikibot.output(u"Tagging redirect for deletion")
+ if self.getOption('delete'):
# Delete the two redirects
- content = i18n.twtranslate(
- targetPage.site,
- 'redirect-broken-redirect-template'
- ) + "\n" + content
- summ = i18n.twtranslate(targetPage.site,
- 'redirect-remove-loop')
- targetPage.put(content, summ)
- redir.put(content, summ)
+ # TODO: Check whether pages aren't vandalized
+ # and (maybe) do not have a version history
+ self.delete_redirect(targetPage, 'redirect-remove-loop')
+ self.delete_redirect(redir, 'redirect-remove-loop')
break
else: # redirect target found
if targetPage.isStaticRedirect():
@@ -726,7 +714,7 @@
@type args: list of unicode
"""
options = {}
- # what the bot should do (either resolve double redirs, or delete broken
+ # what the bot should do (either resolve double redirs, or process broken
# redirs)
action = None
# where the bot should get his infos from (either None to load the
--
To view, visit https://gerrit.wikimedia.org/r/279940
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I49f39bae584b7c62fd1a99cf80407a5987d9a5a3
Gerrit-PatchSet: 3
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: [pep8] keep lines beneath 79 chars
......................................................................
[pep8] keep lines beneath 79 chars
Keep lines beneath 79 chars.
Otherwise there is an ugly line feed while printing.
Change-Id: I8c4fdd33ae8a12e2c67279f22338f5aaceec8775
---
M pywikibot/bot.py
M pywikibot/pagegenerators.py
2 files changed, 13 insertions(+), 12 deletions(-)
Approvals:
John Vandenberg: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/bot.py b/pywikibot/bot.py
index 3c76689..ed2c451 100644
--- a/pywikibot/bot.py
+++ b/pywikibot/bot.py
@@ -996,8 +996,8 @@
-nolog Disable the log file (if it is enabled by default).
-maxlag Sets a new maxlag parameter to a number of seconds. Defer bot
- edits during periods of database server lag. Default is set by
- config.py
+ edits during periods of database server lag. Default is set
+ by config.py
-putthrottle:n Set the minimum time (in seconds) the bot will wait between
-pt:n saving pages.
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index fdc4be9..0bc7bd6 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -136,7 +136,7 @@
-logevents:review,Usr,20121231 gives review pages by user
Usr since the beginning till the 31 Dec 2012
- In some cases it must be written as -logevents:"patrol,Usr,20"
+ In some cases it must be written as -logevents:"move,Usr,20"
-namespaces Filter the page generator to only yield pages in the
-namespace specified namespaces. Separate multiple namespace
@@ -174,7 +174,7 @@
-imagesused Work on all images that contained on a certain page.
Argument can also be given as "-imagesused:linkingpagetitle".
--newimages If given as -newimages:x, it will work on the x newest images.
+-newimages If given as -newimages:x, it will work on x newest images.
Otherwise asks to input the number of wanted images.
-newpages Work on the most recent new pages. If given as -newpages:x,
@@ -182,15 +182,15 @@
-recentchanges Work on the pages with the most recent changes. If
given as -recentchanges:x, will work on the x most recently
- changed pages. If given as -recentchanges:offset,duration it will
- work on pages changed from 'offset' minutes with 'duration'
- minutes of timespan.
+ changed pages. If given as -recentchanges:offset,duration it
+ will work on pages changed from 'offset' minutes with
+ 'duration' minutes of timespan.
By default, if no values follow -recentchanges, then we pass
-recentchanges:x where x = 60
Examples:
- -recentchanges:20 will give the 20 most recently changed pages
+ -recentchanges:20 gives the 20 most recently changed pages
-recentchanges:120,70 will give pages with 120 offset
minutes and 70 minutes of timespan
@@ -214,13 +214,14 @@
-prefixindex Work on pages commencing with a common prefix.
-subpage:n Filters pages to only those that have depth n
- i.e. a depth of 0 filters out all pages that are subpages, and
- a depth of 1 filters out all pages that are subpages of subpages.
+ i.e. a depth of 0 filters out all pages that are subpages,
+ and a depth of 1 filters out all pages that are subpages of
+ subpages.
-titleregex A regular expression that needs to match the article title
otherwise the page won't be returned.
- Multiple -titleregex:regexpr can be provided and the page will
- be returned if title is matched by any of the regexpr
+ Multiple -titleregex:regexpr can be provided and the page
+ will be returned if title is matched by any of the regexpr
provided.
Case insensitive regular expressions will be used and
dot matches any character.
--
To view, visit https://gerrit.wikimedia.org/r/281897
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I8c4fdd33ae8a12e2c67279f22338f5aaceec8775
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: Fix https proxy auth
......................................................................
Fix https proxy auth
urllib2 https proxy auth has not yet been fixed in
Proxy(Basic|Digest)AuthHandler.
http://bugs.python.org/issue7291
To workaround this bug, we must use ProxyHandler which has been fixed.
For the proxy support to be effective throughout the Pywikibot code,
MyURLopener must be set as the default opener with urllib2.install_opener.
Bug: T76292
Change-Id: Ib61587c9e3cfb920bdab117b139d1e3d06b8b3c7
---
M wikipedia.py
1 file changed, 15 insertions(+), 9 deletions(-)
Approvals:
John Vandenberg: Looks good to me, but someone else must approve
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/wikipedia.py b/wikipedia.py
index 2823641..78de31d 100644
--- a/wikipedia.py
+++ b/wikipedia.py
@@ -10468,17 +10468,21 @@
MyURLopener = urllib2.build_opener(U2RedirectHandler)
if config.proxy['host']:
- proxyHandler = urllib2.ProxyHandler({'http': 'http://%s/' % config.proxy['host'],
- 'https': 'https://%s' % config.proxy['host']})
+ if config.proxy['auth']:
+ proxy = {
+ 'host': config.proxy['host'],
+ 'user': urllib.quote(config.proxy['auth'][0], safe=''),
+ 'pass': urllib.quote(config.proxy['auth'][1], safe='')
+ }
+ credentials_and_host = '%(user)s:%(pass)s@%(host)s' % proxy
+ else:
+ credentials_and_host = config.proxy['host']
+
+ proxyHandler = urllib2.ProxyHandler(
+ {'http': 'http://%s/' % credentials_and_host,
+ 'https': 'https://%s/' % credentials_and_host})
MyURLopener.add_handler(proxyHandler)
- if config.proxy['auth']:
- proxyAuth = urllib2.HTTPPasswordMgrWithDefaultRealm()
- proxyAuth.add_password(None, config.proxy['host'],
- config.proxy['auth'][0], config.proxy['auth'][1])
- proxyAuthHandler = urllib2.ProxyBasicAuthHandler(proxyAuth)
-
- MyURLopener.add_handler(proxyAuthHandler)
if config.authenticate:
passman = urllib2.HTTPPasswordMgrWithDefaultRealm()
@@ -10491,6 +10495,8 @@
MyURLopener.addheaders = [('User-agent', useragent)]
+urllib2.install_opener(MyURLopener)
+
# The following will monkey-patch the pywikibot module to contain the same
# functions and variables as wikipedia itself. This means we no longer have
# to import wikipedia as pywikibot - instead, we can just import pywikibot
--
To view, visit https://gerrit.wikimedia.org/r/147073
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ib61587c9e3cfb920bdab117b139d1e3d06b8b3c7
Gerrit-PatchSet: 4
Gerrit-Project: pywikibot/compat
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>