jenkins-bot has submitted this change and it was merged.
Change subject: imagetransfer.py: remove unused messages
......................................................................
imagetransfer.py: remove unused messages
nowCommonsThis and nowCommonsThisMessage are commented out so just drop
them entirely.
Change-Id: I7c201bbe7270747ae00ac9f1b62934be00b74a76
---
M scripts/imagetransfer.py
1 file changed, 0 insertions(+), 16 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/imagetransfer.py b/scripts/imagetransfer.py
index 42b6203..72d2c91 100644
--- a/scripts/imagetransfer.py
+++ b/scripts/imagetransfer.py
@@ -82,13 +82,6 @@
'zh': u'{{NowCommons|Image:%s}}',
}
-#nowCommonsThis = {
- #'en': u'{{NowCommonsThis|%s}}',
- #'it': u'{{NowCommons omonima|%s}}',
- #'kk': u'{{NowCommonsThis|%s}}',
- #'pt': u'{{NowCommonsThis|%s}}',
-#}
-
nowCommonsMessage = {
'ar': u'الملف الآن متوفر في ويكيميديا كومنز.',
'de': u'Datei ist jetzt auf Wikimedia Commons verfügbar.',
@@ -109,15 +102,6 @@
'sr': u'Слика је сада доступна и на Викимедија Остави.',
'zh': u'檔案已存在於維基共享資源。',
}
-
-#nowCommonsThisMessage = {
- #'ar': u'الملف الآن متوفر في كومنز بنفس الاسم.',
- #'en': u'File is now available on Commons with the same name.',
- #'he': u'הקובץ זמין כעת בוויקישיתוף בשם זהה.',
- #'it': u'L\'immagine è adesso disponibile su Wikimedia Commons con lo stesso nome.',
- #'kk': u'Файлды дәл сол атауымен енді Ортаққордан қатынауға болады.',
- #'pt': u'Esta imagem está agora no Commons com o mesmo nome.',
-#}
# Translations for license templates.
# Must only be given when they are in fact different.
--
To view, visit https://gerrit.wikimedia.org/r/131695
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I7c201bbe7270747ae00ac9f1b62934be00b74a76
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Hashar <hashar(a)free.fr>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: Fix some indentation issues
......................................................................
Fix some indentation issues
E111 indentation is not a multiple of four
E112 expected an indented block
E113 unexpected indentation
Change-Id: I017555315f266aeb851fb874e0224be6013ed038
---
M pywikibot/__init__.py
M pywikibot/bot.py
M pywikibot/botirc.py
M pywikibot/comms/threadedhttp.py
M pywikibot/site.py
M pywikibot/throttle.py
M scripts/checkimages.py
M scripts/interwiki.py
M scripts/redirect.py
M scripts/unusedfiles.py
M tests/site_tests.py
11 files changed, 91 insertions(+), 72 deletions(-)
Approvals:
Siebrand: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/__init__.py b/pywikibot/__init__.py
index ec27878..ff2f61a 100644
--- a/pywikibot/__init__.py
+++ b/pywikibot/__init__.py
@@ -519,7 +519,8 @@
def remaining():
remainingPages = page_put_queue.qsize() - 1
- # -1 because we added a None element to stop the queue
+ # -1 because we added a None element to stop the queue
+
remainingSeconds = datetime.timedelta(
seconds=(remainingPages * config.put_throttle))
return (remainingPages, remainingSeconds)
diff --git a/pywikibot/bot.py b/pywikibot/bot.py
index 7beda63..e8487e2 100644
--- a/pywikibot/bot.py
+++ b/pywikibot/bot.py
@@ -16,7 +16,8 @@
import logging
import logging.handlers
- # all output goes thru python std library "logging" module
+# all output goes thru python std library "logging" module
+
import os
import os.path
import sys
@@ -188,12 +189,14 @@
moduleName = "terminal-interface"
logging.addLevelName(VERBOSE, "VERBOSE")
- # for messages to be displayed on terminal at "verbose" setting
- # use INFO for messages to be displayed even on non-verbose setting
+ # for messages to be displayed on terminal at "verbose" setting
+ # use INFO for messages to be displayed even on non-verbose setting
+
logging.addLevelName(STDOUT, "STDOUT")
- # for messages to be displayed to stdout
+ # for messages to be displayed to stdout
+
logging.addLevelName(INPUT, "INPUT")
- # for prompts requiring user response
+ # for prompts requiring user response
root_logger = logging.getLogger("pywiki")
root_logger.setLevel(DEBUG + 1) # all records except DEBUG go to logger
diff --git a/pywikibot/botirc.py b/pywikibot/botirc.py
index d374a93..9a7073c 100644
--- a/pywikibot/botirc.py
+++ b/pywikibot/botirc.py
@@ -19,7 +19,8 @@
import logging.handlers
- # all output goes thru python std library "logging" module
+# all output goes thru python std library "logging" module
+
import re
from ircbot import SingleServerIRCBot
diff --git a/pywikibot/comms/threadedhttp.py b/pywikibot/comms/threadedhttp.py
index 38636cd..1acb1b0 100644
--- a/pywikibot/comms/threadedhttp.py
+++ b/pywikibot/comms/threadedhttp.py
@@ -358,35 +358,36 @@
# Metaweb Technologies, Inc. License:
- # ========================================================================
- # The following dummy classes are:
- # ========================================================================
- # Copyright (c) 2007, Metaweb Technologies, Inc.
- # All rights reserved.
- #
- # Redistribution and use in source and binary forms, with or without
- # modification, are permitted provided that the following conditions
- # are met:
- # * Redistributions of source code must retain the above copyright
- # notice, this list of conditions and the following disclaimer.
- # * Redistributions in binary form must reproduce the above
- # copyright notice, this list of conditions and the following
- # disclaimer in the documentation and/or other materials provided
- # with the distribution.
- #
- # THIS SOFTWARE IS PROVIDED BY METAWEB TECHNOLOGIES AND CONTRIBUTORS
- # ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
- # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
- # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL METAWEB
- # TECHNOLOGIES OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
- # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
- # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
- # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
- # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
- # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
- # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
- # POSSIBILITY OF SUCH DAMAGE.
- # ========================================================================
+#
+# ========================================================================
+# The following dummy classes are:
+# ========================================================================
+# Copyright (c) 2007, Metaweb Technologies, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following
+# disclaimer in the documentation and/or other materials provided
+# with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY METAWEB TECHNOLOGIES AND CONTRIBUTORS
+# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL METAWEB
+# TECHNOLOGIES OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+# POSSIBILITY OF SUCH DAMAGE.
+# ========================================================================
class DummyRequest(object):
"""Simulated urllib2.Request object for httplib2
diff --git a/pywikibot/site.py b/pywikibot/site.py
index c5d647f..ef20492 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -1341,7 +1341,7 @@
# no "pages" element indicates a circular redirect
raise pywikibot.CircularRedirect(redirmap[title])
pagedata = list(result['query']['pages'].values())[0]
- # there should be only one value in 'pages', and it is the target
+ # there should be only one value in 'pages', and it is the target
if self.sametitle(pagedata['title'], target_title):
target = pywikibot.Page(self, pagedata['title'], pagedata['ns'])
api.update_page(target, pagedata)
@@ -1391,13 +1391,13 @@
pywikibot.debug(u"Preloading %s" % pagedata, _logger)
try:
if pagedata['title'] not in cache:
-# API always returns a "normalized" title which is
-# usually the same as the canonical form returned by
-# page.title(), but sometimes not (e.g.,
-# gender-specific localizations of "User" namespace).
-# This checks to see if there is a normalized title in
-# the response that corresponds to the canonical form
-# used in the query.
+ # API always returns a "normalized" title which is
+ # usually the same as the canonical form returned by
+ # page.title(), but sometimes not (e.g.,
+ # gender-specific localizations of "User" namespace).
+ # This checks to see if there is a normalized title in
+ # the response that corresponds to the canonical form
+ # used in the query.
for key in cache:
if self.sametitle(key, pagedata['title']):
cache[pagedata['title']] = cache[key]
diff --git a/pywikibot/throttle.py b/pywikibot/throttle.py
index 4e56464..aebeb94 100644
--- a/pywikibot/throttle.py
+++ b/pywikibot/throttle.py
@@ -19,10 +19,12 @@
_logger = "wiki.throttle"
-pid = False # global process identifier
- # when the first Throttle is instantiated, it will set this
- # variable to a positive integer, which will apply to all
- # throttle objects created by this process.
+# global process identifier
+#
+# When the first Throttle is instantiated, it will set this variable to a
+# positive integer, which will apply to all throttle objects created by this
+# process.
+pid = False
class Throttle(object):
@@ -52,10 +54,16 @@
self.last_read = 0
self.last_write = 0
self.next_multiplicity = 1.0
- self.checkdelay = 300 # Check logfile again after this many seconds
- self.dropdelay = 600 # Ignore processes that have not made
- # a check in this many seconds
- self.releasepid = 1200 # Free the process id after this many seconds
+
+ # Check logfile again after this many seconds:
+ self.checkdelay = 300
+
+ # Ignore processes that have not made a check in this many seconds:
+ self.dropdelay = 600
+
+ # Free the process id after this many seconds:
+ self.releasepid = 1200
+
self.lastwait = 0.0
self.delay = 0
self.checktime = 0
@@ -94,8 +102,8 @@
ptime = int(line[1].split('.')[0])
this_site = line[2].rstrip()
except (IndexError, ValueError):
- continue # Sometimes the file gets corrupted
- # ignore that line
+ # Sometimes the file gets corrupted ignore that line
+ continue
if now - ptime > self.releasepid:
continue # process has expired, drop from file
if now - ptime <= self.dropdelay \
@@ -208,8 +216,8 @@
ptime = int(line[1].split('.')[0])
this_site = line[2].rstrip()
except (IndexError, ValueError):
- continue # Sometimes the file gets corrupted
- # ignore that line
+ # Sometimes the file gets corrupted ignore that line
+ continue
if now - ptime <= self.releasepid \
and this_pid != pid:
processes.append({'pid': this_pid,
diff --git a/scripts/checkimages.py b/scripts/checkimages.py
index 2e69333..64ae192 100644
--- a/scripts/checkimages.py
+++ b/scripts/checkimages.py
@@ -1450,10 +1450,12 @@
if self.allLicenses:
self.license_found = self.allLicenses[0].title()
- self.some_problem = False # If it has "some_problem" it must check
- # the additional settings.
- # if self.settingsData, use addictional settings
+
+ # If it has "some_problem" it must check the additional settings.
+ self.some_problem = False
+
if self.settingsData:
+ # use additional settings
self.findAdditionalProblems()
if self.some_problem:
@@ -1601,9 +1603,9 @@
newGen.append(imageData[0])
return newGen
else:
-## pywikibot.output(
-## u"The wait option is available only with the standard "
-## u"generator.")
+ #pywikibot.output(
+ # u"The wait option is available only with the standard "
+ # u"generator.")
pywikibot.output(
u"The wait option is not available at core yet.")
return generator
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index 019b478..772ef0e 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -2370,10 +2370,11 @@
useFrom = False
if adding or removing or modifying:
- #Version info marks bots without unicode error
- #This also prevents abuse filter blocking on de-wiki
-## if not pywikibot.unicode_error:
-## mcomment += u'r%s) (' % sys.version.split()[0]
+ # Version info marks bots without unicode error
+ # This also prevents abuse filter blocking on de-wiki
+
+ #if not pywikibot.unicode_error:
+ # mcomment += u'r%s) (' % sys.version.split()[0]
mcomment += globalvar.summary
diff --git a/scripts/redirect.py b/scripts/redirect.py
index a4b625d..80de7bb 100755
--- a/scripts/redirect.py
+++ b/scripts/redirect.py
@@ -129,7 +129,7 @@
if target.startswith('%s:' % code) \
or target.startswith(':%s:' % code):
if code == self.site.language():
- # link to our wiki, but with the lang prefix
+ # link to our wiki, but with the lang prefix
target = target[(len(code) + 1):]
if target.startswith(':'):
target = target[1:]
@@ -777,15 +777,17 @@
elif arg.startswith('-namespace:'):
ns = arg[11:]
if ns == '':
- ## "-namespace:" does NOT yield -namespace:0 further down the road!
+ # "-namespace:" does NOT yield -namespace:0 further down the road!
ns = i18n.input('pywikibot-enter-namespace-number')
-# TODO! at least for some generators enter a namespace by its name or number
+ # TODO! at least for some generators enter a namespace by its name
+ # or number
if ns == '':
ns = '0'
try:
ns = int(ns)
except ValueError:
-#-namespace:all Process all namespaces. Works only with the API read interface.
+ # -namespace:all Process all namespaces.
+ # Only works with the API read interface.
pass
if ns not in namespaces:
namespaces.append(ns)
diff --git a/scripts/unusedfiles.py b/scripts/unusedfiles.py
index 9529e70..86628f9 100644
--- a/scripts/unusedfiles.py
+++ b/scripts/unusedfiles.py
@@ -122,7 +122,7 @@
usertalkname = 'User Talk:%s' % uploader
usertalkpage = pywikibot.Page(mysite, usertalkname)
msg2uploader = template_user % {'title': page.title()}
- # msg2uploader = msg2uploader.encode("utf-8")
+ # msg2uploader = msg2uploader.encode("utf-8")
appendtext(usertalkpage, msg2uploader)
if __name__ == "__main__":
main()
diff --git a/tests/site_tests.py b/tests/site_tests.py
index 739d103..f70ef0e 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -106,7 +106,7 @@
ns = mysite.namespaces()
self.assertType(ns, dict)
self.assertTrue(all(x in ns for x in range(0, 16)))
- # built-in namespaces always present
+ # built-in namespaces always present
self.assertType(mysite.ns_normalize("project"), basestring)
self.assertTrue(all(isinstance(key, int)
for key in ns))
--
To view, visit https://gerrit.wikimedia.org/r/131694
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I017555315f266aeb851fb874e0224be6013ed038
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Hashar <hashar(a)free.fr>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Siebrand <siebrand(a)kitano.nl>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: Fix E265 block comment should start with '# '
......................................................................
Fix E265 block comment should start with '# '
Normalize comments so that # is followed by a space (flake8 error E265)
I have left some behind because they are commenting out code.
Change-Id: I6d7919c90ff66c9b7184dac427995cbf01d5023e
---
M pywikibot/comms/http.py
M pywikibot/config2.py
M pywikibot/data/__init__.py
M pywikibot/data/api.py
M pywikibot/data/wikidataquery.py
M pywikibot/families/oldwikivoyage_family.py
M pywikibot/families/wikipedia_family.py
M pywikibot/family.py
M pywikibot/fixes.py
M pywikibot/i18n.py
M pywikibot/logentries.py
M pywikibot/login.py
M pywikibot/page.py
M pywikibot/pagegenerators.py
M pywikibot/site.py
M pywikibot/textlib.py
M pywikibot/userinterfaces/transliteration.py
M pywikibot/version.py
M tests/page_tests.py
M tests/site_tests.py
M tests/wikidataquery_tests.py
21 files changed, 293 insertions(+), 284 deletions(-)
Approvals:
Siebrand: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/comms/http.py b/pywikibot/comms/http.py
index 7766460..86fefc6 100644
--- a/pywikibot/comms/http.py
+++ b/pywikibot/comms/http.py
@@ -145,12 +145,12 @@
while not request.lock.acquire(False):
time.sleep(0.1)
- #TODO: do some error correcting stuff
+ # TODO: do some error correcting stuff
if isinstance(request.data, SSLHandshakeError):
if SSL_CERT_VERIFY_FAILED in str(request.data):
raise FatalServerError(str(request.data))
- #if all else fails
+ # if all else fails
if isinstance(request.data, Exception):
raise request.data
diff --git a/pywikibot/config2.py b/pywikibot/config2.py
index d83009d..42fdd3b 100644
--- a/pywikibot/config2.py
+++ b/pywikibot/config2.py
@@ -23,7 +23,7 @@
# variables that are intended only for internal use and not to be exported
# to other modules.
-############## ACCOUNT SETTINGS ##############
+# ############# ACCOUNT SETTINGS ##############
# The family of sites we are working on. wikipedia.py will import
# families/xxx_family.py so if you want to change this variable,
@@ -185,7 +185,7 @@
# Set to True to override the {{bots}} exclusion protocol (at your own risk!)
ignore_bot_templates = False
-############## USER INTERFACE SETTINGS ##############
+# ############# USER INTERFACE SETTINGS ##############
# The encoding that's used in the user's console, i.e. how strings are encoded
# when they are read by raw_input(). On Windows systems' DOS box, this should
@@ -196,8 +196,8 @@
try:
console_encoding = __sys.stdout.encoding
except:
- #When using pywikipedia inside a daemonized twisted application,
- #we get "StdioOnnaStick instance has no attribute 'encoding'"
+ # When using pywikipedia inside a daemonized twisted application,
+ # we get "StdioOnnaStick instance has no attribute 'encoding'"
console_encoding = None
# The encoding the user would like to see text transliterated to. This can be
@@ -254,7 +254,7 @@
except:
colorized_output = False
-############## EXTERNAL EDITOR SETTINGS ##############
+# ############# EXTERNAL EDITOR SETTINGS ##############
# The command for the editor you want to use. If set to None, a simple Tkinter
# editor will be used.
# On Windows systems, this script tries to determine the default text editor.
@@ -286,7 +286,7 @@
# highlighting in your text editor.
editor_filename_extension = 'wiki'
-############## LOGFILE SETTINGS ##############
+# ############# LOGFILE SETTINGS ##############
# Defines for which scripts a logfile should be enabled. Logfiles will be
# saved in the 'logs' subdirectory.
@@ -319,7 +319,7 @@
# (overrides log setting above)
debug_log = []
-############## INTERWIKI SETTINGS ##############
+# ############# INTERWIKI SETTINGS ##############
# Should interwiki.py report warnings for missing links between foreign
# languages?
@@ -366,7 +366,7 @@
# them in RAM.
interwiki_contents_on_disk = False
-############## SOLVE_DISAMBIGUATION SETTINGS ############
+# ############# SOLVE_DISAMBIGUATION SETTINGS ############
#
# Set disambiguation_comment[FAMILY][LANG] to a non-empty string to override
# the default edit comment for the solve_disambiguation bot.
@@ -378,12 +378,12 @@
sort_ignore_case = False
-############## IMAGE RELATED SETTINGS ##############
+# ############# IMAGE RELATED SETTINGS ##############
# If you set this to True, images will be uploaded to Wikimedia
# Commons by default.
upload_to_commons = False
-############## SETTINGS TO AVOID SERVER OVERLOAD ##############
+# ############# SETTINGS TO AVOID SERVER OVERLOAD ##############
# Slow down the robot such that it never requests a second page within
# 'minthrottle' seconds. This can be lengthened if the server is slow,
@@ -423,7 +423,7 @@
# Minimum time to wait before resubmitting a failed API request.
retry_wait = 5
-############## TABLE CONVERSION BOT SETTINGS ##############
+# ############# TABLE CONVERSION BOT SETTINGS ##############
# will split long paragraphs for better reading the source.
# only table2wiki.py use it by now
@@ -435,7 +435,7 @@
table2wikiAskOnlyWarnings = True
table2wikiSkipWarnings = False
-############## WEBLINK CHECKER SETTINGS ##############
+# ############# WEBLINK CHECKER SETTINGS ##############
# How many external links should weblinkchecker.py check at the same time?
# If you have a fast connection, you might want to increase this number so
@@ -444,12 +444,12 @@
report_dead_links_on_talk = False
-############## DATABASE SETTINGS ##############
+# ############# DATABASE SETTINGS ##############
db_hostname = 'localhost'
db_username = 'wikiuser'
db_password = ''
-############## SEARCH ENGINE SETTINGS ##############
+# ############# SEARCH ENGINE SETTINGS ##############
# Some scripts allow querying Google via the Google Web API. To use this
# feature, you must install the pyGoogle module from http://pygoogle.sf.net/
@@ -466,7 +466,7 @@
# http://search.msn.com/developer
msn_appid = ''
-############## COPYRIGHT SETTINGS ##############
+# ############# COPYRIGHT SETTINGS ##############
# Enable/disable search engine in copyright.py script
copyright_google = True
@@ -519,18 +519,19 @@
# number of results.
copyright_economize_query = True
-############## HTTP SETTINGS ##############
+# ############# HTTP SETTINGS ##############
# Use a persistent http connection. An http connection has to be established
# only once per site object, making stuff a whole lot faster. Do NOT EVER
# use this if you share Site objects across threads without proper locking.
-## DISABLED FUNCTION. Setting this variable will not have any effect.
+#
+# DISABLED FUNCTION. Setting this variable will not have any effect.
persistent_http = False
# Default socket timeout. Set to None to disable timeouts.
socket_timeout = 120 # set a pretty long timeout just in case...
-############## COSMETIC CHANGES SETTINGS ##############
+# ############# COSMETIC CHANGES SETTINGS ##############
# The bot can make some additional changes to each page it edits, e.g. fix
# whitespace or positioning of interwiki and category links.
@@ -570,7 +571,7 @@
cosmetic_changes_deny_script = ['category_redirect', 'cosmetic_changes',
'touch']
-############## REPLICATION BOT ################
+# ############# REPLICATION BOT ################
# You can add replicate_replace to your user_config.py, which has the following
# format:
#
@@ -582,9 +583,10 @@
# liwiki. Note that this does not take the origin wiki into account.
replicate_replace = {}
-############## FURTHER SETTINGS ##############
+# ############# FURTHER SETTINGS ##############
-### Proxy configuration ###
+# Proxy configuration
+
# assign prox = None to connect directly
# For proxy support first run: apt-get install python-socks.py
# then change your user-config.py like:
@@ -596,7 +598,8 @@
# Configuration variable 'socks' is defined but unknown. Misspelled?proxy = None
proxy = None
-### Simulate settings ###
+# Simulate settings
+
# Defines what actions the bots are NOT allowed to do (e.g. 'edit') on wikipedia
# servers. Allows simulation runs of bots to be carried out without changing any
# page on the server side. This setting may be overridden in user_config.py.
diff --git a/pywikibot/data/__init__.py b/pywikibot/data/__init__.py
index 0c879ef..93e17a7 100644
--- a/pywikibot/data/__init__.py
+++ b/pywikibot/data/__init__.py
@@ -9,5 +9,5 @@
#
__version__ = '$Id$'
-#Import the classes exposed by this module:
+# Import the classes exposed by this module:
# TODO
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index 5d86adb..b2e9442 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -289,9 +289,9 @@
rawdata = http.request(self.site, uri, ssl, method="POST",
headers={'Content-Type': 'application/x-www-form-urlencoded'},
body=paramstring)
-## import traceback
-## traceback.print_stack()
-## print rawdata
+# import traceback
+# traceback.print_stack()
+# print rawdata
except Server504Error:
pywikibot.log(u"Caught HTTP 504 error; retrying")
self.wait()
@@ -300,7 +300,7 @@
# This error is not going to be fixed by just waiting
pywikibot.error(traceback.format_exc())
raise
- #TODO: what other exceptions can occur here?
+ # TODO: what other exceptions can occur here?
except Exception as e:
# for any other error on the http request, wait and retry
pywikibot.error(traceback.format_exc())
diff --git a/pywikibot/data/wikidataquery.py b/pywikibot/data/wikidataquery.py
index fdd66c7..1a3e0bf 100644
--- a/pywikibot/data/wikidataquery.py
+++ b/pywikibot/data/wikidataquery.py
@@ -520,7 +520,7 @@
fullQueryString = self.getQueryString(q, labels, props)
- #try to get cached data first
+ # try to get cached data first
data = self.readFromCache(fullQueryString)
if data:
@@ -534,7 +534,7 @@
if not data:
return None
- #cache data for next time
+ # cache data for next time
self.saveToCache(fullQueryString, data)
return data
diff --git a/pywikibot/families/oldwikivoyage_family.py b/pywikibot/families/oldwikivoyage_family.py
index 9872d01..1b8e528 100644
--- a/pywikibot/families/oldwikivoyage_family.py
+++ b/pywikibot/families/oldwikivoyage_family.py
@@ -4,7 +4,7 @@
__version__ = '$Id$'
-#Family file for the original wikivoyage
+# Family file for the original wikivoyage
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
diff --git a/pywikibot/families/wikipedia_family.py b/pywikibot/families/wikipedia_family.py
index 24406fc..b20aaee 100644
--- a/pywikibot/families/wikipedia_family.py
+++ b/pywikibot/families/wikipedia_family.py
@@ -390,7 +390,7 @@
'km': {
'_default': [0, 4, 12],
},
- #wrong wikipedia namespace alias
+ # wrong wikipedia namespace alias
'mzn': {
'_default': [0, 4],
diff --git a/pywikibot/family.py b/pywikibot/family.py
index 86929f3..d9af922 100644
--- a/pywikibot/family.py
+++ b/pywikibot/family.py
@@ -332,7 +332,7 @@
'dmoz': 'dmoz',
'dmozs': 'dmozs',
'docbook': 'docbook',
-## 'doi': 'doi',
+# 'doi': 'doi',
'doom_wiki': 'doom_wiki',
'download': 'download',
'drae': 'drae',
@@ -819,15 +819,19 @@
# '_default'; values are a list of namespace numbers
self.crossnamespace = collections.defaultdict(dict)
##
- ## Examples :
- ## Allowing linking to pt' 102 NS from any other lang' 0 NS is
- # self.crossnamespace[0] = {
- # '_default': { 'pt': [102]}
- # }
- ## While allowing linking from pt' 102 NS to any other lang' = NS is
- # self.crossnamespace[102] = {
- # 'pt': { '_default': [0]}
- # }
+ # Examples :
+ #
+ # Allowing linking to pt' 102 NS from any other lang' 0 NS is
+ #
+ # self.crossnamespace[0] = {
+ # '_default': { 'pt': [102]}
+ # }
+ #
+ # While allowing linking from pt' 102 NS to any other lang' = NS is
+ #
+ # self.crossnamespace[102] = {
+ # 'pt': { '_default': [0]}
+ # }
@property
def iwkeys(self):
@@ -841,8 +845,8 @@
"""
self.langs[code] = location
-## for num, val in namespaces.items():
-## self.namespaces[num][code] = val
+# for num, val in namespaces.items():
+# self.namespaces[num][code] = val
def get_known_families(self, site):
return self.known_families
diff --git a/pywikibot/fixes.py b/pywikibot/fixes.py
index 02d6b31..34f2b1d 100644
--- a/pywikibot/fixes.py
+++ b/pywikibot/fixes.py
@@ -452,8 +452,8 @@
}
},
- #Corrections for Arabic Wikipedia and any Arabic wiki.
- #python replace.py -fix:correct-ar -start:! -always
+ # Corrections for Arabic Wikipedia and any Arabic wiki.
+ # python replace.py -fix:correct-ar -start:! -always
'correct-ar': {
'regex': True,
@@ -461,7 +461,7 @@
'ar': u'تدقيق إملائي',
},
'replacements': [
- #(u' ,', u' ،'), #FIXME: Do not replace comma in non-Arabic text, interwiki, image links or <math> syntax.
+ #(u' ,', u' ،'), # FIXME: Do not replace comma in non-Arabic text, interwiki, image links or <math> syntax.
(r'\b' + u'إمرأة' + r'\b', u'امرأة'),
(r'\b' + u'الى' + r'\b', u'إلى'),
(r'\b' + u'إسم' + r'\b', u'اسم'),
diff --git a/pywikibot/i18n.py b/pywikibot/i18n.py
index 40fed49..879093e 100644
--- a/pywikibot/i18n.py
+++ b/pywikibot/i18n.py
@@ -38,24 +38,24 @@
This code is used by other translating methods below.
"""
- #Akan
+ # Akan
if code in ['ak', 'tw']:
return ['ak', 'tw']
- #Amharic
+ # Amharic
if code in ['aa', 'ti']:
return ['am']
- #Arab
+ # Arab
if code in ['arc', 'arz', 'so']:
return ['ar']
if code == 'kab':
return ['ar', 'fr']
- #Bulgarian
+ # Bulgarian
if code in ['cu', 'mk']:
return ['bg', 'sr', 'sh']
- #Czech
+ # Czech
if code in ['cs', 'sk']:
return ['cs', 'sk']
- #German
+ # German
if code in ['bar', 'frr', 'ksh', 'pdc', 'pfl']:
return ['de']
if code == 'lb':
@@ -72,13 +72,13 @@
return ['de', 'it']
if code == 'stq':
return ['nds', 'de']
- #Greek
+ # Greek
if code in ['grc', 'pnt']:
return ['el']
- #Esperanto
+ # Esperanto
if code in ['io', 'nov']:
return ['eo']
- #Spanish
+ # Spanish
if code in ['an', 'arn', 'ast', 'ay', 'ca', 'ext', 'lad', 'nah', 'nv', 'qu',
'yua']:
return ['es']
@@ -88,22 +88,22 @@
return ['es', 'fr']
if code == 'cbk-zam':
return ['es', 'tl']
- #Estonian
+ # Estonian
if code == 'fiu-vro':
return ['et']
if code == 'liv':
return ['et', 'lv']
- #Persian (Farsi)
+ # Persian (Farsi)
if code == 'ps':
return ['fa']
if code in ['glk', 'mzn']:
return ['glk', 'mzn', 'fa', 'ar']
- #Finnish
+ # Finnish
if code == 'vep':
return ['fi', 'ru']
if code == 'fit':
return ['fi', 'sv']
- #French
+ # French
if code in ['bm', 'br', 'ht', 'kg', 'ln', 'mg', 'nrm', 'pcd',
'rw', 'sg', 'ty', 'wa']:
return ['fr']
@@ -111,46 +111,46 @@
return ['fr', 'ca', 'es']
if code in ['co', 'frp']:
return ['fr', 'it']
- #Hindi
+ # Hindi
if code in ['sa']:
return ['hi']
if code in ['ne', 'new']:
return ['ne', 'new', 'hi']
- #Indonesian and Malay
+ # Indonesian and Malay
if code in ['ace', 'bug', 'bjn', 'id', 'jv', 'ms', 'su']:
return ['id', 'ms', 'jv']
if code == 'map-bms':
return ['jv', 'id', 'ms']
- #Inuit languages
+ # Inuit languages
if code in ['ik', 'iu']:
return ['iu', 'kl']
if code == 'kl':
return ['da', 'iu', 'no']
- #Italian
+ # Italian
if code in ['eml', 'fur', 'lij', 'lmo', 'nap', 'pms', 'roa-tara', 'sc',
'scn', 'vec']:
return ['it']
- #Lithuanian
+ # Lithuanian
if code in ['bat-smg']:
return ['lt']
- #Latvian
+ # Latvian
if code == 'ltg':
return ['lv']
- #Dutch
+ # Dutch
if code in ['af', 'fy', 'li', 'pap', 'srn', 'vls', 'zea']:
return ['nl']
if code == ['nds-nl']:
return ['nds', 'nl']
- #Polish
+ # Polish
if code in ['csb', 'szl']:
return ['pl']
- #Portuguese
+ # Portuguese
if code in ['fab', 'mwl', 'tet']:
return ['pt']
- #Romanian
+ # Romanian
if code in ['mo', 'roa-rup']:
return ['ro']
- #Russian and Belarusian
+ # Russian and Belarusian
if code in ['ab', 'av', 'ba', 'bxr', 'ce', 'cv', 'inh', 'kk', 'koi', 'krc',
'kv', 'ky', 'lbe', 'lez', 'mdf', 'mhr', 'mn', 'mrj', 'myv',
'os', 'sah', 'tg', 'udm', 'uk', 'xal']:
@@ -163,32 +163,32 @@
return ['be', 'be-x-old', 'ru']
if code == 'kaa':
return ['uz', 'ru']
- #Serbocroatian
+ # Serbocroatian
if code in ['bs', 'hr', 'sh']:
return ['sh', 'hr', 'bs', 'sr', 'sr-el']
if code == 'sr':
return ['sr-el', 'sh', 'hr', 'bs']
- #Tagalog
+ # Tagalog
if code in ['bcl', 'ceb', 'ilo', 'pag', 'pam', 'war']:
return ['tl']
- #Turkish and Kurdish
+ # Turkish and Kurdish
if code in ['diq', 'ku']:
return ['ku', 'ku-latn', 'tr']
if code == 'gag':
return ['tr']
if code == 'ckb':
return ['ku']
- #Ukrainian
+ # Ukrainian
if code in ['crh', 'rue']:
return ['uk', 'ru']
- #Chinese
+ # Chinese
if code in ['minnan', 'zh', 'zh-classical', 'zh-min-nan', 'zh-tw',
'zh-hans', 'zh-hant']:
return ['zh', 'zh-tw', 'zh-cn', 'zh-classical']
if code in ['cdo', 'gan', 'hak', 'ii', 'wuu', 'za', 'zh-cdo',
'zh-classical', 'zh-cn', 'zh-yue']:
return ['zh', 'zh-cn', 'zh-tw', 'zh-classical']
- #Scandinavian languages
+ # Scandinavian languages
if code in ['da', 'sv']:
return ['da', 'no', 'nb', 'sv', 'nn']
if code in ['fo', 'is']:
@@ -199,7 +199,7 @@
return ['no', 'nb', 'da', 'nn', 'sv']
if code == 'se':
return ['sv', 'no', 'nb', 'nn', 'fi']
- #Other languages
+ # Other languages
if code in ['bi', 'tpi']:
return ['bi', 'tpi']
if code == 'yi':
@@ -216,7 +216,7 @@
return ['meu', 'hmo']
if code == ['as']:
return ['bn']
- #Default value
+ # Default value
return []
diff --git a/pywikibot/logentries.py b/pywikibot/logentries.py
index 05df835..fcdf029 100644
--- a/pywikibot/logentries.py
+++ b/pywikibot/logentries.py
@@ -67,7 +67,7 @@
return self.data['action']
def user(self):
- #TODO use specific User class ?
+ # TODO use specific User class ?
return self.data['user']
def timestamp(self):
@@ -99,7 +99,7 @@
if this block action targets a username or IP.
* Returns the blockid if this log reflects the removal of an autoblock
"""
- #TODO what for IP ranges ?
+ # TODO what for IP ranges ?
if self.isAutoblockRemoval:
return self._blockid
else:
@@ -201,7 +201,7 @@
class NewUsersEntry(LogEntry):
_expectedType = 'newusers'
-#TODO entries for merge,suppress,makebot,gblblock,renameuser,globalauth,gblrights ?
+# TODO entries for merge,suppress,makebot,gblblock,renameuser,globalauth,gblrights ?
class LogEntryFactory(object):
diff --git a/pywikibot/login.py b/pywikibot/login.py
index 43e8123..8f167ac 100644
--- a/pywikibot/login.py
+++ b/pywikibot/login.py
@@ -189,17 +189,17 @@
return False
self.storecookiedata(cookiedata)
pywikibot.log(u"Should be logged in now")
-## # Show a warning according to the local bot policy
-## FIXME: disabled due to recursion; need to move this to the Site object after
-## login
-## if not self.botAllowed():
-## logger.error(
-## u"Username '%(name)s' is not listed on [[%(page)s]]."
-## % {'name': self.username,
-## 'page': botList[self.site.family.name][self.site.code]})
-## logger.error(
-##"Please make sure you are allowed to use the robot before actually using it!")
-## return False
+# # Show a warning according to the local bot policy
+# FIXME: disabled due to recursion; need to move this to the Site object after
+# login
+# if not self.botAllowed():
+# logger.error(
+# u"Username '%(name)s' is not listed on [[%(page)s]]."
+# % {'name': self.username,
+# 'page': botList[self.site.family.name][self.site.code]})
+# logger.error(
+#"Please make sure you are allowed to use the robot before actually using it!")
+# return False
return True
def showCaptchaWindow(self, url):
diff --git a/pywikibot/page.py b/pywikibot/page.py
index 25e9046..66371e2 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -1581,7 +1581,7 @@
% (self.title(asLink=True), error.message))
return False
-######## DEPRECATED METHODS ########
+# ####### DEPRECATED METHODS ########
@deprecated("Site.encoding()")
def encoding(self):
@@ -1619,7 +1619,7 @@
"""
return self.title(asUrl=True)
-####### DISABLED METHODS (warnings provided) ######
+# ###### DISABLED METHODS (warnings provided) ######
# these methods are easily replaced by editing the page's text using
# textlib methods and then using put() on the result.
@@ -2052,7 +2052,7 @@
"""
return self.site.categoryinfo(self)
-#### DEPRECATED METHODS ####
+# ### DEPRECATED METHODS ####
@deprecated("list(Category.subcategories(...))")
def subcategoriesList(self, recurse=False):
"""DEPRECATED: Equivalent to list(self.subcategories(...))"""
@@ -2668,9 +2668,9 @@
for dbname in self._content['sitelinks']:
# Due to issues with locked/obsolete sites
# this part is commented out
-## site = self.__make_site(dbname)
-## self.sitelinks[site] = pywikibot.Page(
-## site, self._content['sitelinks'][dbname]['title'])
+ #site = self.__make_site(dbname)
+ #self.sitelinks[site] = pywikibot.Page(
+ # site, self._content['sitelinks'][dbname]['title'])
self.sitelinks[dbname] = self._content[
'sitelinks'][dbname]['title']
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index 3922333..d0a64b8 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -966,7 +966,7 @@
yield pywikibot.ItemPage.fromPage(page)
-#TODO below
+# TODO below
@deprecate_arg("extension", None)
@deprecate_arg("number", "total")
@deprecate_arg("repeat", None)
@@ -1245,42 +1245,42 @@
estimatedTotalResultsCount = data.meta.estimatedTotalResultsCount
offset += 10
-#############
-## commented out because it is probably not in compliance with Google's
-## "Terms of service" (see 5.3, http://www.google.com/accounts/TOS?loc=US)
-##
-## def queryViaWeb(self, query):
-## """
-## Google has stopped giving out API license keys, and sooner or later
-## they will probably shut down the service.
-## This is a quick and ugly solution: we just grab the search results from
-## the normal web interface.
-## """
-## linkR = re.compile(r'<a href="([^>"]+?)" class=l>', re.IGNORECASE)
-## offset = 0
-##
-## while True:
-## pywikibot.output("Google: Querying page %d" % (offset / 100 + 1))
-## address = "http://www.google.com/search?q=%s&num=100&hl=en&start=%d" \
-## % (urllib.quote_plus(query), offset)
-## # we fake being Firefox because Google blocks unknown browsers
-## request = urllib2.Request(
-## address, None,
-## {'User-Agent':
-## 'Mozilla/5.0 (X11; U; Linux i686; de; rv:1.8) Gecko/20051128 '
-## 'SUSE/1.5-0.1 Firefox/1.5'})
-## urlfile = urllib2.urlopen(request)
-## page = urlfile.read()
-## urlfile.close()
-## for url in linkR.findall(page):
-## yield url
-##
-## # Is there a "Next" link for next page of results?
-## if "<div id=nn>" in page:
-## offset += 100 # Yes, go to next page of results.
-## else:
-## return
-#############
+# ############
+# commented out because it is probably not in compliance with Google's
+# "Terms of service" (see 5.3, http://www.google.com/accounts/TOS?loc=US)
+#
+# def queryViaWeb(self, query):
+# """
+# Google has stopped giving out API license keys, and sooner or later
+# they will probably shut down the service.
+# This is a quick and ugly solution: we just grab the search results from
+# the normal web interface.
+# """
+# linkR = re.compile(r'<a href="([^>"]+?)" class=l>', re.IGNORECASE)
+# offset = 0
+#
+# while True:
+# pywikibot.output("Google: Querying page %d" % (offset / 100 + 1))
+# address = "http://www.google.com/search?q=%s&num=100&hl=en&start=%d" \
+# % (urllib.quote_plus(query), offset)
+# # we fake being Firefox because Google blocks unknown browsers
+# request = urllib2.Request(
+# address, None,
+# {'User-Agent':
+# 'Mozilla/5.0 (X11; U; Linux i686; de; rv:1.8) Gecko/20051128 '
+# 'SUSE/1.5-0.1 Firefox/1.5'})
+# urlfile = urllib2.urlopen(request)
+# page = urlfile.read()
+# urlfile.close()
+# for url in linkR.findall(page):
+# yield url
+#
+# # Is there a "Next" link for next page of results?
+# if "<div id=nn>" in page:
+# offset += 100 # Yes, go to next page of results.
+# else:
+# return
+# ###########
def __iter__(self):
# restrict query to local site
diff --git a/pywikibot/site.py b/pywikibot/site.py
index 804fd0a..c5d647f 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -580,27 +580,27 @@
Do not use directly; use pywikibot.Site function.
"""
-## Site methods from version 1.0 (as these are implemented in this file,
-## or declared deprecated/obsolete, they will be removed from this list)
-##########
-## cookies: return user's cookies as a string
-##
-## urlEncode: Encode a query to be sent using an http POST request.
-## postForm: Post form data to an address at this site.
-## postData: Post encoded form data to an http address at this site.
-##
-## version: Return MediaWiki version string from Family file.
-## versionnumber: Return int identifying the MediaWiki version.
-## live_version: Return version number read from Special:Version.
-## checkCharset(charset): Warn if charset doesn't match family file.
-##
-## linktrail: Return regex for trailing chars displayed as part of a link.
-## disambcategory: Category in which disambiguation pages are listed.
-##
-## Methods that yield Page objects derived from a wiki's Special: pages
-## (note, some methods yield other information in a tuple along with the
-## Pages; see method docs for details) --
-##
+# Site methods from version 1.0 (as these are implemented in this file,
+# or declared deprecated/obsolete, they will be removed from this list)
+#########
+# cookies: return user's cookies as a string
+#
+# urlEncode: Encode a query to be sent using an http POST request.
+# postForm: Post form data to an address at this site.
+# postData: Post encoded form data to an http address at this site.
+#
+# version: Return MediaWiki version string from Family file.
+# versionnumber: Return int identifying the MediaWiki version.
+# live_version: Return version number read from Special:Version.
+# checkCharset(charset): Warn if charset doesn't match family file.
+#
+# linktrail: Return regex for trailing chars displayed as part of a link.
+# disambcategory: Category in which disambiguation pages are listed.
+#
+# Methods that yield Page objects derived from a wiki's Special: pages
+# (note, some methods yield other information in a tuple along with the
+# Pages; see method docs for details) --
+#
def __init__(self, code, fam=None, user=None, sysop=None):
BaseSite.__init__(self, code, fam, user, sysop)
@@ -1007,7 +1007,7 @@
Group 1 in the regex match object will be the target title.
"""
- #NOTE: this is needed, since the API can give false positives!
+ # NOTE: this is needed, since the API can give false positives!
try:
keywords = set(s.lstrip("#")
for s in self.getmagicwords("redirect"))
@@ -2465,7 +2465,7 @@
wlprop="user|comment|timestamp|title|ids|flags",
wlallrev="", namespaces=namespaces,
step=step, total=total)
- #TODO: allow users to ask for "patrol" as well?
+ # TODO: allow users to ask for "patrol" as well?
if start is not None:
wlgen.request["wlstart"] = str(start)
if end is not None:
@@ -2687,10 +2687,10 @@
pywikibot.warning(
u"editpage: Invalid watch value '%(watch)s' ignored."
% locals())
-## FIXME: API gives 'badmd5' error
-## md5hash = md5()
-## md5hash.update(urllib.quote_plus(text.encode(self.encoding())))
-## params['md5'] = md5hash.digest()
+# FIXME: API gives 'badmd5' error
+# md5hash = md5()
+# md5hash.update(urllib.quote_plus(text.encode(self.encoding())))
+# params['md5'] = md5hash.digest()
req = api.Request(site=self, **params)
while True:
try:
@@ -2866,7 +2866,7 @@
if "move" not in result:
pywikibot.error(u"movepage: %s" % result)
raise Error("movepage: unexpected response")
- #TODO: Check for talkmove-error messages
+ # TODO: Check for talkmove-error messages
if "talkmove-error-code" in result["move"]:
pywikibot.warning(
u"movepage: Talk page %s not moved"
@@ -3019,9 +3019,9 @@
finally:
self.unlock_page(page)
- #TODO: implement undelete
+ # TODO: implement undelete
- #TODO: implement patrol
+ # TODO: implement patrol
@must_be(group='sysop')
def blockuser(self, user, expiry, reason, anononly=True, nocreate=True,
@@ -3292,7 +3292,7 @@
this is derived from the "upload" log events instead.
"""
- #TODO: update docstring
+ # TODO: update docstring
for event in logevents(self, logtype="upload", user=user,
start=start, end=end, reverse=reverse,
step=step, total=total):
@@ -3403,7 +3403,7 @@
step=step, total=total)
return uigen
- #synonym
+ # synonym
uncategorizedfiles = uncategorizedimages
@deprecate_arg("number", None)
@@ -3448,7 +3448,7 @@
step=step, total=total)
return uigen
- #synonym
+ # synonym
unusedimages = unusedfiles
@deprecate_arg("number", None)
@@ -3601,7 +3601,7 @@
props='datatype',
)
expiry = datetime.timedelta(days=365 * 100)
- #Store it for 100 years
+ # Store it for 100 years
req = api.CachedRequest(expiry, site=self, **params)
data = req.submit()
@@ -3616,7 +3616,7 @@
if dtype == 'globe-coordinate':
dtype = 'globecoordinate'
- #TODO Fix this
+ # TODO Fix this
return dtype
@must_be(group='user')
@@ -3660,7 +3660,7 @@
req = api.Request(site=self, **params)
data = req.submit()
claim.snak = data['claim']['id']
- #Update the item
+ # Update the item
if claim.getID() in item.claims:
item.claims[claim.getID()].append(claim)
else:
@@ -3676,7 +3676,7 @@
if claim.isReference or claim.isQualifier:
raise NotImplementedError
if not claim.snak:
- #We need to already have the snak value
+ # We need to already have the snak value
raise pywikibot.NoPage(claim)
params = dict(action='wbsetclaimvalue',
claim=claim.snak,
@@ -3716,7 +3716,7 @@
if bot:
params['bot'] = 1
params['token'] = self.token(claim, 'edit')
- #build up the snak
+ # build up the snak
if isinstance(source, list):
sources = source
else:
@@ -3786,7 +3786,7 @@
qualifier.hash is not None):
params['snakhash'] = qualifier.hash
params['token'] = self.token(claim, 'edit')
- #build up the snak
+ # build up the snak
if qualifier.getSnakType() == 'value':
params['value'] = json.dumps(qualifier._formatDataValue())
params['snaktype'] = qualifier.getSnakType()
@@ -3962,10 +3962,10 @@
raise NotImplementedError
-#### METHODS NOT IMPLEMENTED YET ####
+# ### METHODS NOT IMPLEMENTED YET ####
class NotImplementedYet:
- #TODO: is this needed any more? can it be obtained from the http module?
+ # TODO: is this needed any more? can it be obtained from the http module?
def cookies(self, sysop=False):
"""Return a string containing the user's current cookies."""
self._loadCookies(sysop=sysop)
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index b1de87f..e161ad2 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -103,7 +103,7 @@
'invoke': re.compile(r'(?i)\{\{\s*#invoke:.*?}\}'),
# categories
'category': re.compile(u'\[\[ *(?:%s)\s*:.*?\]\]' % u'|'.join(site.namespace(14, all=True))),
- #files
+ # files
'file': re.compile(u'\[\[ *(?:%s)\s*:.*?\]\]' % u'|'.join(site.namespace(6, all=True))),
}
@@ -219,8 +219,9 @@
# group references such as \2 or \g<name>.
# On the other hand, this approach does not work because it
# can't handle lookahead or lookbehind (see bug #1731008):
- #replacement = old.sub(new, text[match.start():match.end()])
- #text = text[:match.start()] + replacement + text[match.end():]
+ #
+ # replacement = old.sub(new, text[match.start():match.end()])
+ # text = text[:match.start()] + replacement + text[match.end():]
# So we have to process the group references manually.
replacement = new
@@ -374,9 +375,9 @@
return marker
-#-------------------------------------------------
+# -----------------------------------------------
# Functions dealing with interwiki language links
-#-------------------------------------------------
+# -----------------------------------------------
# Note - MediaWiki supports several kinds of interwiki links; two kinds are
# interlanguage links. We deal here with those kinds only.
# A family has by definition only one kind of interlanguage links:
@@ -523,7 +524,7 @@
if s:
if site.language() in site.family.interwiki_attop or \
u'<!-- interwiki at top -->' in oldtext:
- #do not add separator if interwiki links are on one line
+ # do not add separator if interwiki links are on one line
newtext = s + (u'' if site.language()
in site.family.interwiki_on_one_line
else separator) + s2.replace(marker, '').strip()
@@ -625,7 +626,7 @@
sites.sort()
putfirst = insite.interwiki_putfirst()
if putfirst:
- #In this case I might have to change the order
+ # In this case I might have to change the order
firstsites = []
for code in putfirst:
if code in insite.validLanguageLinks():
@@ -635,14 +636,14 @@
firstsites = firstsites + [site]
sites = firstsites + sites
if insite.interwiki_putfirst_doubled(sites):
- #some (all?) implementations return False
+ # some (all?) implementations return False
sites = insite.interwiki_putfirst_doubled(sites) + sites
return sites
-#---------------------------------------
+# -------------------------------------
# Functions dealing with category links
-#---------------------------------------
+# -------------------------------------
def getCategoryLinks(text, site=None):
"""Return a list of category links found in text.
@@ -690,7 +691,7 @@
['nowiki', 'comment', 'math', 'pre', 'source'],
marker=marker)
if marker:
- #avoid having multiple linefeeds at the end of the text
+ # avoid having multiple linefeeds at the end of the text
text = re.sub('\s*%s' % re.escape(marker), config.LS + marker,
text.strip())
return text.strip()
@@ -856,9 +857,9 @@
return sep.join(catLinks) + config.line_separator
-#---------------------------------------
+# -------------------------------------
# Functions dealing with external links
-#---------------------------------------
+# -------------------------------------
def compileLinkR(withoutBracketed=False, onlyBracketed=False):
"""Return a regex that matches external links."""
@@ -890,9 +891,9 @@
return linkR
-#----------------------------------
+# --------------------------------
# Functions dealing with templates
-#----------------------------------
+# --------------------------------
def extract_templates_and_params(text):
"""Return a list of templates found in text.
@@ -1015,29 +1016,29 @@
if not name or name.startswith('#'):
continue
-## TODO: merged from wikipedia.py - implement the following
-## if self.site().isInterwikiLink(name):
-## continue
-## # {{DEFAULTSORT:...}}
-## defaultKeys = self.site().versionnumber() > 13 and \
-## self.site().getmagicwords('defaultsort')
-## # It seems some wikis does not have this magic key
-## if defaultKeys:
-## found = False
-## for key in defaultKeys:
-## if name.startswith(key):
-## found = True
-## break
-## if found: continue
-##
-## try:
-## name = Page(self.site(), name).title()
-## except InvalidTitle:
-## if name:
-## output(
-## u"Page %s contains invalid template name {{%s}}."
-## % (self.title(), name.strip()))
-## continue
+# TODO: merged from wikipedia.py - implement the following
+# if self.site().isInterwikiLink(name):
+# continue
+# # {{DEFAULTSORT:...}}
+# defaultKeys = self.site().versionnumber() > 13 and \
+# self.site().getmagicwords('defaultsort')
+# # It seems some wikis does not have this magic key
+# if defaultKeys:
+# found = False
+# for key in defaultKeys:
+# if name.startswith(key):
+# found = True
+# break
+# if found: continue
+#
+# try:
+# name = Page(self.site(), name).title()
+# except InvalidTitle:
+# if name:
+# output(
+# u"Page %s contains invalid template name {{%s}}."
+# % (self.title(), name.strip()))
+# continue
# Parameters
paramString = m.group('params')
@@ -1099,9 +1100,9 @@
return u'{{%s\n%s}}' % (template, text)
-#----------------------------------
+# --------------------------
# Page parsing functionality
-#----------------------------------
+# --------------------------
def does_text_contain_section(pagetext, section):
"""
@@ -1127,9 +1128,9 @@
return bool(m)
-#---------------------------------
+# ---------------------------------------
# Time parsing functionality (Archivebot)
-#---------------------------------
+# ---------------------------------------
class tzoneFixedOffset(datetime.tzinfo):
"""
@@ -1189,7 +1190,7 @@
self.pmonthR = re.compile(monthR, re.U)
self.pdayR = re.compile(dayR)
- #order is important to avoid mismatch when searching
+ # order is important to avoid mismatch when searching
self.patterns = [
self.ptimeR,
self.timeznR,
@@ -1227,32 +1228,32 @@
"""
_line = line
- #match date fields
+ # match date fields
dateDict = dict()
for pat in self.patterns:
line, matchDict = self.last_match_and_replace(line, pat)
if matchDict:
dateDict.update(matchDict)
- #all fields matched -> date valid
+ # all fields matched -> date valid
if all(g in dateDict for g in self.groups):
- #remove 'time' key, now splitted in hour/minute and not needed by datetime
+ # remove 'time' key, now splitted in hour/minute and not needed by datetime
del dateDict['time']
- #replace month name in original language with month number
+ # replace month name in original language with month number
try:
dateDict['month'] = self.origNames2monthNum[dateDict['month']]
except KeyError:
pywikibot.output(u'incorrect month name in page')
- #convert to integers
+ # convert to integers
for k, v in dateDict.items():
try:
dateDict[k] = int(v)
except ValueError:
pass
- #find timezone
+ # find timezone
dateDict['tzinfo'] = tzoneFixedOffset(self.site.siteinfo['timeoffset'],
self.site.siteinfo['timezone'])
diff --git a/pywikibot/userinterfaces/transliteration.py b/pywikibot/userinterfaces/transliteration.py
index 690bd44..288348d 100644
--- a/pywikibot/userinterfaces/transliteration.py
+++ b/pywikibot/userinterfaces/transliteration.py
@@ -218,7 +218,7 @@
for char in u"Cʗǃ":
self.trans[char] = u"!"
- #Punctuation and typography
+ # Punctuation and typography
for char in u"«»“”„¨":
self.trans[char] = u'"'
for char in u"‘’′":
@@ -2025,15 +2025,15 @@
def transliterate(self, char, default="?", prev="-", next="-"):
if char in self.trans:
return self.trans[char]
- #Arabic
+ # Arabic
if char == u"◌":
return prev
- #Japanese
+ # Japanese
if char == u"ッ":
return self.transliterate(next)[0]
if char in u"々仝ヽヾゝゞ〱〲〳〵〴〵":
return prev
- #Lao
+ # Lao
if char == u"ຫ":
if next in u"ງຍນຣລຼຼວ":
return ""
diff --git a/pywikibot/version.py b/pywikibot/version.py
index 7c25ac2..d2be01d 100644
--- a/pywikibot/version.py
+++ b/pywikibot/version.py
@@ -85,7 +85,7 @@
_program_dir = path or _get_program_dir()
entries = open(os.path.join(_program_dir, '.svn/entries'))
version = entries.readline().strip()
- #use sqlite table for new entries format
+ # use sqlite table for new entries format
if version == "12":
entries.close()
from sqlite3 import dbapi2 as sqlite
diff --git a/tests/page_tests.py b/tests/page_tests.py
index 558c06e..7a023d6 100644
--- a/tests/page_tests.py
+++ b/tests/page_tests.py
@@ -319,7 +319,7 @@
def testReferences(self):
count = 0
- #Ignore redirects for time considerations
+ # Ignore redirects for time considerations
for p in mainpage.getReferences(follow_redirects=False):
count += 1
self.assertType(p, pywikibot.Page)
@@ -363,24 +363,24 @@
# methods that still need tests implemented or expanded:
-## def autoFormat(self):
-## def isAutoTitle(self):
-## def getOldVersion(self, oldid, force=False, get_redirect=False,
-## sysop=False):
-## text = property(_textgetter, _textsetter, _cleartext,
-## "The edited wikitext (unicode) of this Page")
-## def getReferences(self, follow_redirects=True, withTemplateInclusion=True,
-## onlyTemplateInclusion=False, redirectsOnly=False,
-## namespaces=None):
-## def backlinks(self, followRedirects=True, filterRedirects=None,
-## namespaces=None):
-## def embeddedin(self, filter_redirects=None, namespaces=None):
-## def getVersionHistory(self, reverseOrder=False, getAll=False,
-## revCount=500):
-## def getVersionHistoryTable(self, forceReload=False, reverseOrder=False,
-## getAll=False, revCount=500):
-## def fullVersionHistory(self):
-## def contributingUsers(self):
+# def autoFormat(self):
+# def isAutoTitle(self):
+# def getOldVersion(self, oldid, force=False, get_redirect=False,
+# sysop=False):
+# text = property(_textgetter, _textsetter, _cleartext,
+# "The edited wikitext (unicode) of this Page")
+# def getReferences(self, follow_redirects=True, withTemplateInclusion=True,
+# onlyTemplateInclusion=False, redirectsOnly=False,
+# namespaces=None):
+# def backlinks(self, followRedirects=True, filterRedirects=None,
+# namespaces=None):
+# def embeddedin(self, filter_redirects=None, namespaces=None):
+# def getVersionHistory(self, reverseOrder=False, getAll=False,
+# revCount=500):
+# def getVersionHistoryTable(self, forceReload=False, reverseOrder=False,
+# getAll=False, revCount=500):
+# def fullVersionHistory(self):
+# def contributingUsers(self):
if __name__ == '__main__':
diff --git a/tests/site_tests.py b/tests/site_tests.py
index bc6b2d7..739d103 100644
--- a/tests/site_tests.py
+++ b/tests/site_tests.py
@@ -336,14 +336,14 @@
self.assertTrue(mysite.page_exists(page))
self.assertEqual(page.namespace(), 0)
self.assertFalse(page.isRedirectPage())
-## for page in mysite.allpages(filterlanglinks=True, total=5):
-## self.assertType(page, pywikibot.Page)
-## self.assertTrue(mysite.page_exists(page))
-## self.assertEqual(page.namespace(), 0)
-## for page in mysite.allpages(filterlanglinks=False, total=5):
-## self.assertType(page, pywikibot.Page)
-## self.assertTrue(mysite.page_exists(page))
-## self.assertEqual(page.namespace(), 0)
+# for page in mysite.allpages(filterlanglinks=True, total=5):
+# self.assertType(page, pywikibot.Page)
+# self.assertTrue(mysite.page_exists(page))
+# self.assertEqual(page.namespace(), 0)
+# for page in mysite.allpages(filterlanglinks=False, total=5):
+# self.assertType(page, pywikibot.Page)
+# self.assertTrue(mysite.page_exists(page))
+# self.assertEqual(page.namespace(), 0)
for page in mysite.allpages(minsize=100, total=5):
self.assertType(page, pywikibot.Page)
self.assertTrue(mysite.page_exists(page))
@@ -403,10 +403,10 @@
for cat in mysite.allcategories(total=5, prefix="Def"):
self.assertType(cat, pywikibot.Category)
self.assertTrue(cat.title(withNamespace=False).startswith("Def"))
-## # Bug # 15985
-## for cat in mysite.allcategories(total=5, start="Hij", reverse=True):
-## self.assertType(cat, pywikibot.Category)
-## self.assertTrue(cat.title(withNamespace=False) <= "Hij")
+# # Bug # 15985
+# for cat in mysite.allcategories(total=5, start="Hij", reverse=True):
+# self.assertType(cat, pywikibot.Category)
+# self.assertTrue(cat.title(withNamespace=False) <= "Hij")
def testAllUsers(self):
"""Test the site.allusers() method"""
@@ -449,11 +449,11 @@
self.assertType(impage, pywikibot.ImagePage)
self.assertTrue(mysite.page_exists(impage))
self.assertTrue(impage.title(withNamespace=False) >= "Ba")
-## # Bug # 15985
-## for impage in mysite.allimages(start="Da", reverse=True, total=5):
-## self.assertType(impage, pywikibot.ImagePage)
-## self.assertTrue(mysite.page_exists(impage))
-## self.assertTrue(impage.title() <= "Da")
+# # Bug # 15985
+# for impage in mysite.allimages(start="Da", reverse=True, total=5):
+# self.assertType(impage, pywikibot.ImagePage)
+# self.assertTrue(mysite.page_exists(impage))
+# self.assertTrue(impage.title() <= "Da")
for impage in mysite.allimages(prefix="Ch", total=5):
self.assertType(impage, pywikibot.ImagePage)
self.assertTrue(mysite.page_exists(impage))
@@ -1015,19 +1015,19 @@
def testLoadRevisions_revids(self):
"""Test the site.loadrevisions() method, listing based on revid."""
- #revids as list of int
+ # revids as list of int
self.mysite.loadrevisions(self.mainpage, revids=[139992, 139993])
self.assertTrue(all(rev in self.mainpage._revisions for rev in [139992, 139993]))
- #revids as list of str
+ # revids as list of str
self.mysite.loadrevisions(self.mainpage, revids=['139994', '139995'])
self.assertTrue(all(rev in self.mainpage._revisions for rev in [139994, 139995]))
- #revids as int
+ # revids as int
self.mysite.loadrevisions(self.mainpage, revids=140000)
self.assertTrue(140000 in self.mainpage._revisions)
- #revids as str
+ # revids as str
self.mysite.loadrevisions(self.mainpage, revids='140001')
self.assertTrue(140001 in self.mainpage._revisions)
- #revids belonging to a different page raises Exception
+ # revids belonging to a different page raises Exception
self.assertRaises(pywikibot.Error, self.mysite.loadrevisions,
self.mainpage, revids=130000)
diff --git a/tests/wikidataquery_tests.py b/tests/wikidataquery_tests.py
index e2965c4..8c9d83f 100644
--- a/tests/wikidataquery_tests.py
+++ b/tests/wikidataquery_tests.py
@@ -62,18 +62,18 @@
q = query.Tree(92, [1], 2)
self.assertEqual(str(q), 'tree[92][1][2]')
- #missing third arg
+ # missing third arg
q = query.Tree(92, 1)
self.assertEqual(str(q), 'tree[92][1][]')
- #missing second arg
+ # missing second arg
q = query.Tree(92, reverse=3)
self.assertEqual(str(q), 'tree[92][][3]')
q = query.Tree([92, 93], 1, [2, 7])
self.assertEqual(str(q), 'tree[92,93][1][2,7]')
- #bad tree arg types
+ # bad tree arg types
self.assertRaises(TypeError, lambda: query.Tree(99, "hello"))
q = query.Link("enwiki")
@@ -82,11 +82,11 @@
q = query.NoLink(["enwiki", "frwiki"])
self.assertEqual(str(q), 'nolink[enwiki,frwiki]')
- #bad link arg types
+ # bad link arg types
self.assertRaises(TypeError, lambda: query.Link(99))
self.assertRaises(TypeError, lambda: query.Link([99]))
- #HasClaim with tree as arg
+ # HasClaim with tree as arg
q = query.HasClaim(99, query.Tree(1, 2, 3))
self.assertEqual(str(q), "claim[99:(tree[1][2][3])]")
@@ -129,7 +129,7 @@
begin = pywikibot.WbTime(site=self.repo, year=1999)
end = pywikibot.WbTime(site=self.repo, year=2010, hour=1)
- #note no second comma
+ # note no second comma
q = query.Between(PropertyPage(self.repo, "P569"), begin)
self.assertEqual(str(q), 'between[569,+00000001999-01-01T00:00:00Z]')
@@ -189,13 +189,14 @@
q1 = query.HasClaim(99, 100)
q2 = query.HasClaim(99, 101)
- #different joiners get explicit grouping parens (the api also allows implicit, but we don't do that)
+ # different joiners get explicit grouping parens (the api also allows
+ # implicit, but we don't do that)
qs1 = q1.AND(q2)
qs2 = q1.OR(qs1).AND(query.HasClaim(98))
self.assertEqual(str(qs2), '(claim[99:100] OR (claim[99:100] AND claim[99:101])) AND claim[98]')
- #if the joiners are the same, no need to group
+ # if the joiners are the same, no need to group
qs1 = q1.AND(q2)
qs2 = q1.AND(qs1).AND(query.HasClaim(98))
@@ -222,7 +223,7 @@
self.assertEqual(w.getUrl(qs), "http://example.com/api?q=link%5Benwiki%5D")
- #check labels and props work OK
+ # check labels and props work OK
qs = w.getQueryString(query.Link("enwiki"), ['en', 'fr'], ['prop'])
self.assertEqual(qs, "q=link%5Benwiki%5D&labels=en,fr&props=prop")
@@ -236,10 +237,10 @@
w = query.WikidataQuery(cacheMaxAge=0)
- #this query doesn't return any items, save a bit of bandwidth!
+ # this query doesn't return any items, save a bit of bandwidth!
q = query.HasClaim(105).AND([query.NoClaim(225), query.HasClaim(100)])
- #check that the cache file is created
+ # check that the cache file is created
cacheFile = w.getCacheFilename(w.getQueryString(q, [], []))
# remove existing cache file
--
To view, visit https://gerrit.wikimedia.org/r/131684
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I6d7919c90ff66c9b7184dac427995cbf01d5023e
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Hashar <hashar(a)free.fr>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Siebrand <siebrand(a)kitano.nl>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: Clarify Python 2.6.5 or higher is required
......................................................................
Clarify Python 2.6.5 or higher is required
Due to http://bugs.python.org/issue2646, calling
f(**{u'a': 'b'})
is not allowed.
Change-Id: Ib19247ba7fd21a673497f213bdb0f13bf785969b
---
M pwb.py
1 file changed, 4 insertions(+), 4 deletions(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pwb.py b/pwb.py
index b07b0e9..14178a8 100644
--- a/pwb.py
+++ b/pwb.py
@@ -79,16 +79,16 @@
if not os.environ.get("PY3", False):
if sys.version_info[0] != 2:
raise RuntimeError("ERROR: Pywikibot only runs under Python 2")
- if sys.version_info[1] < 6:
- raise RuntimeError("ERROR: Pywikibot only runs under Python 2.6 "
+ if sys.version_info < (2, 6, 5):
+ raise RuntimeError("ERROR: Pywikibot only runs under Python 2.6.5 "
"or higher")
else:
if sys.version_info[0] not in (2, 3):
raise RuntimeError("ERROR: Pywikipediabot only runs under Python 2 "
"or Python 3")
version = tuple(sys.version_info)[:3]
- if version < (2, 6):
- raise RuntimeError("ERROR: Pywikibot only runs under Python 2.6 "
+ if version < (2, 6, 5):
+ raise RuntimeError("ERROR: Pywikibot only runs under Python 2.6.5 "
"or higher")
if version >= (3, ) and version < (3, 3):
raise RuntimeError("ERROR: Pywikibot only runs under Python 3.3 "
--
To view, visit https://gerrit.wikimedia.org/r/131343
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ib19247ba7fd21a673497f213bdb0f13bf785969b
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: update mw version
......................................................................
update mw version
Change-Id: I6e9c2c619d0ed5fe677a8a3befc2d26201851f42
---
M family.py
1 file changed, 1 insertion(+), 1 deletion(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/family.py b/family.py
index 9d9b926..1071207 100644
--- a/family.py
+++ b/family.py
@@ -4960,7 +4960,7 @@
"""Return Wikimedia projects version number as a string."""
# Don't use this, use versionnumber() instead. This only exists
# to not break family files.
- return '1.24wmf1'
+ return '1.24wmf2'
def shared_image_repository(self, code):
return ('commons', 'commons')
--
To view, visit https://gerrit.wikimedia.org/r/131022
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I6e9c2c619d0ed5fe677a8a3befc2d26201851f42
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/compat
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: update mw version
......................................................................
update mw version
Change-Id: I292ea408cf9e48e8dc97948e05971a3420b4cc97
---
M pywikibot/family.py
1 file changed, 1 insertion(+), 1 deletion(-)
Approvals:
Xqt: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/family.py b/pywikibot/family.py
index ca98266..86929f3 100644
--- a/pywikibot/family.py
+++ b/pywikibot/family.py
@@ -1065,7 +1065,7 @@
"""Return Wikimedia projects version number as a string."""
# Don't use this, use versionnumber() instead. This only exists
# to not break family files.
- return '1.24wmf1'
+ return '1.24wmf2'
def shared_image_repository(self, code):
return ('commons', 'commons')
--
To view, visit https://gerrit.wikimedia.org/r/131021
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I292ea408cf9e48e8dc97948e05971a3420b4cc97
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Xqt <info(a)gno.de>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>