jenkins-bot has submitted this change and it was merged.
Change subject: Fixing links
......................................................................
Fixing links
Fixed a few links to domains that have been redirected,
and converted most to either https or protocol-relative URLs.
Change-Id: I2c3c58d0a52d1a80c1871bb9c281a09ce27024d9
---
M .travis.yml
M README-conversion.txt
M README.rst
M externals/README
M generate_family_file.py
M pywikibot/__init__.py
M pywikibot/bot.py
M pywikibot/comms/threadedhttp.py
M pywikibot/config2.py
M pywikibot/data/api.py
M pywikibot/data/wikidataquery.py
M pywikibot/families/wikibooks_family.py
M pywikibot/families/wikinews_family.py
M pywikibot/families/wikipedia_family.py
M pywikibot/families/wikiquote_family.py
M pywikibot/families/wikisource_family.py
M pywikibot/families/wikiversity_family.py
M pywikibot/families/wikivoyage_family.py
M pywikibot/families/wiktionary_family.py
M pywikibot/family.py
M pywikibot/fixes.py
M pywikibot/page.py
M pywikibot/pagegenerators.py
M pywikibot/site.py
M pywikibot/textlib.py
M pywikibot/userinterfaces/transliteration.py
M pywikibot/userinterfaces/win32_unicode.py
M pywikibot/weblib.py
M pywikibot/xmlreader.py
M scripts/add_text.py
M scripts/blockpageschecker.py
M scripts/casechecker.py
M scripts/checkimages.py
M scripts/cosmetic_changes.py
M scripts/data_ingestion.py
M scripts/flickrripper.py
M scripts/freebasemappingupload.py
M scripts/imagerecat.py
M scripts/interwiki.py
M scripts/isbn.py
M scripts/lonelypages.py
M scripts/maintenance/wikimedia_sites.py
M scripts/misspelling.py
M scripts/noreferences.py
M scripts/nowcommons.py
M scripts/protect.py
M scripts/redirect.py
M scripts/reflinks.py
M scripts/replace.py
M scripts/script_wui.py
M scripts/selflink.py
M scripts/template.py
M scripts/weblinkchecker.py
M scripts/welcome.py
M tests/data/article-pear.xml
M tests/data/article-pyrus.xml
M tests/ipregex_tests.py
M tests/pages/enwiki_help_editing.meta
M tests/weblib_tests.py
M tests/wikibase_tests.py
60 files changed, 231 insertions(+), 228 deletions(-)
Approvals:
Merlijn van Deen: Looks good to me, approved
jenkins-bot: Verified
diff --git a/.travis.yml b/.travis.yml
index 857344d..9bc903e 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -27,8 +27,8 @@
env:
global:
# This is the encrypted password, which can only be decrypted by Travis itself
- # See
http://about.travis-ci.org/docs/user/encryption-keys/
- # And
http://about.travis-ci.org/docs/user/build-configuration/#Secure-environmen…
+ # See
http://docs.travis-ci.com/user/encryption-keys/
+ # And
http://docs.travis-ci.com/user/build-configuration/#Secure-environment-vari…
# Command use to generate: travis encrypt USER_PASSWORD=<pwd> -r
wikimedia/pywikibot-core
- secure:
kofInMlisiTBt9o/Ustc/vySlkKfxGzGCX2LwA1D2waym8sDTS0o5gMJ5LsrT/BUKwZbe1vLozPHqZrrkQvsdTml+DpZuotzdILs0m0f3BUoexEC6OON5IDljuxFyETrD1Ug44ih5Mc4lVFOdTcBzg501ZmswGwQrBvg/OyEFfE=
diff --git a/README-conversion.txt b/README-conversion.txt
index 24d62f4..c034db8 100644
--- a/README-conversion.txt
+++ b/README-conversion.txt
@@ -50,7 +50,7 @@
To run pywikibot, you will need the httplib2 and simplejson:
packages--
-* httplib2 :
http://code.google.com/p/httplib2/
+* httplib2 :
https://github.com/jcgregorio/httplib2
* simplejson :
http://svn.red-bean.com/bob/simplejson/tags/simplejson-1.7.1/docs/index.html
or, if you already have setuptools installed, just execute
diff --git a/README.rst b/README.rst
index 0d2a900..e340093 100644
--- a/README.rst
+++ b/README.rst
@@ -42,4 +42,4 @@
.. image::
https://secure.travis-ci.org/wikimedia/pywikibot-core.png?branch=master
:alt: Build Status
- :target:
http://travis-ci.org/wikimedia/pywikibot-core
+ :target:
https://travis-ci.org/wikimedia/pywikibot-core
diff --git a/externals/README b/externals/README
index 4ca2ba0..2e9b788 100644
--- a/externals/README
+++ b/externals/README
@@ -20,10 +20,10 @@
$ rm -rf parse-crontab-master
* lunatic-python: Two-way bridge between Python and Lua - Version 1.0
- (from
http://labix.org/lunatic-python)
+ (from
https://labix.org/lunatic-python)
Download the zip archive and unpack the 'lunatic-python-1.0' dir into the
path 'externals/_lua' afterwards.
- $ wget
http://labix.org/download/lunatic-python/lunatic-python-1.0.tar.bz2
+ $ wget
https://labix.org/download/lunatic-python/lunatic-python-1.0.tar.bz2
$ tar -xvf lunatic-python-1.0.tar.bz2
$ mv lunatic-python-1.0 _lua
Patch package dir in order to work with newer lua (>= 5.1) versions
diff --git a/generate_family_file.py b/generate_family_file.py
index 6ad7fc3..dfcdaf0 100644
--- a/generate_family_file.py
+++ b/generate_family_file.py
@@ -84,8 +84,8 @@
self.base_url = url
self.name = name
- self.wikis = {} # {'http://wiki/$1': Wiki('http://wiki/$1'),
...}
- self.langs = [] # [Wiki('http://wiki/$1'), ...]
+ self.wikis = {} # {'https://wiki/$1': Wiki('https://wiki/$1'),
...}
+ self.langs = [] # [Wiki('https://wiki/$1'), ...]
def run(self):
print("Generating family file from %s" % self.base_url)
@@ -307,7 +307,7 @@
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: %s <url> <short name>" % sys.argv[0])
- print("Example: %s
http://www.mywiki.bogus/wiki/Main_Page mywiki"
+ print("Example: %s
https://www.mywiki.bogus/wiki/Main_Page mywiki"
% sys.argv[0])
print("This will create the file families/mywiki_family.py")
diff --git a/pywikibot/__init__.py b/pywikibot/__init__.py
index 1a4101c..83093f5 100644
--- a/pywikibot/__init__.py
+++ b/pywikibot/__init__.py
@@ -508,7 +508,7 @@
"""
# This is probably not portable to non-terminal interfaces....
- # For information on difflib, see
http://pydoc.org/2.3/difflib.html
+ # For information on difflib, see
http://pydoc.org/2.1/difflib.html
color = {
'+': 'lightgreen',
'-': 'lightred',
diff --git a/pywikibot/bot.py b/pywikibot/bot.py
index 78e291a..a98eba8 100644
--- a/pywikibot/bot.py
+++ b/pywikibot/bot.py
@@ -389,7 +389,7 @@
If toStdout is True, the text will be sent to standard output,
so that it can be piped to another process. All other text will
- be sent to stderr. See:
http://en.wikipedia.org/wiki/Pipeline_%28Unix%29
+ be sent to stderr. See:
https://en.wikipedia.org/wiki/Pipeline_%28Unix%29
text can contain special sequences to create colored output. These
consist of the escape character \03 and the color name in curly braces,
diff --git a/pywikibot/comms/threadedhttp.py b/pywikibot/comms/threadedhttp.py
index 7f6efec..9c85179 100644
--- a/pywikibot/comms/threadedhttp.py
+++ b/pywikibot/comms/threadedhttp.py
@@ -299,7 +299,7 @@
>> import Queue
>> queue = Queue.Queue()
- >>> request =
HttpRequest('http://www.google.com')
+ >>> request = HttpRequest('https://www.google.com')
>> queue.put(request)
>> request.lock.acquire()
>> print request.data
diff --git a/pywikibot/config2.py
b/pywikibot/config2.py
index 9a0efc0..281c44a 100644
--- a/pywikibot/config2.py
+++ b/pywikibot/config2.py
@@ -217,7 +217,7 @@
# The encoding in which textfiles are stored, which contain lists of page
# titles. The most used is: 'utf-8'. 'utf-8-sig' recognizes BOM but it
is
# available on Python 2.5 or higher. For a complete list please see:
-#
http://docs.python.org/library/codecs.html#standard-encodings
+#
https://docs.python.org/2/library/codecs.html#standard-encodings
textfile_encoding = 'utf-8'
# tkinter isn't yet ready
@@ -332,7 +332,9 @@
interwiki_shownew = True
# Should interwiki.py output a graph PNG file on conflicts?
-# You need pydot for this:
http://dkbza.org/pydot.html
+# You need pydot for this:
+#
https://pypi.python.org/pypi/pydot/1.0.2
+#
https://code.google.com/p/pydot/
interwiki_graph = False
# Specifies that the robot should process that amount of subjects at a time,
@@ -354,11 +356,11 @@
interwiki_graph_formats = ['png']
# You can post the contents of your autonomous_problems.dat to the wiki,
-# e.g. to
http://de.wikipedia.org/wiki/Wikipedia:Interwiki-Konflikte .
+# e.g. to
https://de.wikipedia.org/wiki/Wikipedia:Interwiki-Konflikte .
# This allows others to assist you in resolving interwiki problems.
# To help these people, you can upload the interwiki graphs to your
# webspace somewhere. Set the base URL here, e.g.:
-# 'http://www.example.org/~yourname/interwiki-graphs/'
+# 'https://www.example.org/~yourname/interwiki-graphs/'
interwiki_graph_url = None
# Save file with local articles without interwikis.
@@ -407,7 +409,7 @@
noisysleep = 3.0
# Defer bot edits during periods of database server lag. For details, see
-#
http://www.mediawiki.org/wiki/Maxlag_parameter
+#
https://www.mediawiki.org/wiki/Maxlag_parameter
# You can set this variable to a number of seconds, or to None (or 0) to
# disable this behavior. Higher values are more aggressive in seeking
# access to the wiki.
@@ -455,18 +457,18 @@
# ############# SEARCH ENGINE SETTINGS ##############
# Some scripts allow querying Google via the Google Web API. To use this
-# feature, you must install the pyGoogle module from
http://pygoogle.sf.net/
+# feature, you must install the pyGoogle module from
http://pygoogle.sourceforge.net
# and have a Google Web API license key. Note that Google doesn't give out
# license keys anymore.
google_key = ''
# Some scripts allow using the Yahoo! Search Web Services. To use this feature,
-# you must install the pYsearch module from
http://pysearch.sourceforge.net/
-# and get a Yahoo AppID from
http://developer.yahoo.com
+# you must install the pYsearch module from
http://pysearch.sourceforge.net
+# and get a Yahoo AppID from
https://developer.yahoo.com/
yahoo_appid = ''
# To use Windows Live Search web service you must get an AppID from
-#
http://search.msn.com/developer
+#
http://www.bing.com/dev/en-us/dev-center
msn_appid = ''
# ############# COPYRIGHT SETTINGS ##############
@@ -621,7 +623,7 @@
line_separator = LS = u'\n'
# Settings to enable mwparserfromhell
-# <http://mwparserfromhell.readthedocs.org/en/latest/>
+# <https://mwparserfromhell.readthedocs.org/en/latest/>
# Currently used in textlib.extract_templates_and_params
# This is more accurate than our current regex, but only works
# if the user has already installed the library.
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index e3b7872..f7ad4c5 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -86,7 +86,7 @@
This is the lowest-level interface to the API, and can be used for any
request that a particular site's API supports. See the API documentation
- (
http://www.mediawiki.org/wiki/API) and site-specific settings for
+ (
https://www.mediawiki.org/wiki/API) and site-specific settings for
details on what parameters are accepted for each request type.
Uploading files is a special case: to upload, the parameter "mime" must
@@ -101,7 +101,7 @@
>> r = Request(site=mysite,
action="query", meta="userinfo")
>> # This is equivalent to
- >>> #
http://{path}/api.php?action=query&meta=userinfo&format=json
+ >>> # https://{path}/api.php?action=query&meta=userinfo&format=json
>> # change a parameter
>> r['meta'] = "userinfo|siteinfo"
>> # add a new parameter
diff --git a/pywikibot/data/wikidataquery.py
b/pywikibot/data/wikidataquery.py
index 239dbbb..a46b83e 100644
--- a/pywikibot/data/wikidataquery.py
+++ b/pywikibot/data/wikidataquery.py
@@ -397,7 +397,7 @@
class WikidataQuery():
"""
An interface to the WikidataQuery API. Default host is
-
http://wdq.wmflabs.org, but you can substitute
+
https://wdq.wmflabs.org/, but you can substitute
a different one.
Caching defaults to a subdir of the system temp directory with a
@@ -406,7 +406,7 @@
Set a zero or negative maxCacheAge to disable caching
"""
- def __init__(self, host="http://wdq.wmflabs.org", cacheDir=None,
+ def __init__(self, host="https://wdq.wmflabs.org", cacheDir=None,
cacheMaxAge=60):
self.host = host
self.cacheMaxAge = cacheMaxAge
diff --git a/pywikibot/families/wikibooks_family.py
b/pywikibot/families/wikibooks_family.py
index 0321bd9..d9d7c48 100644
--- a/pywikibot/families/wikibooks_family.py
+++ b/pywikibot/families/wikibooks_family.py
@@ -24,7 +24,7 @@
self.langs = dict([(lang, '%s.wikibooks.org' % lang)
for lang in self.languages_by_size])
- # Global bot allowed languages on
http://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementa…
+ # Global bot allowed languages on
https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implement…
self.cross_allowed = [
'af', 'ang', 'ca', 'fa', 'fy',
'it', 'nl', 'ru', 'th', 'zh',
]
@@ -46,51 +46,51 @@
}
self.obsolete = {
- 'aa': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Af…
- 'ak': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ak…
- 'als': None, #
http://als.wikipedia.org/wiki/Wikipedia:Stammtisch/Archiv_2008-1#Afterwards…
- 'as': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_As…
+ 'aa': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_A…
+ 'ak': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_A…
+ 'als': None, #
https://als.wikipedia.org/wiki/Wikipedia:Stammtisch/Archiv_2008-1#Afterward…
+ 'as': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_A…
'ast': None,
- 'ay': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ay…
- 'ba': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ba…
- 'bi': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Bi…
- 'bm': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ba…
- 'bo': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ti…
- 'ch': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ch…
+ 'ay': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_A…
+ 'ba': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_B…
+ 'bi': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_B…
+ 'bm': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_B…
+ 'bo': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_T…
+ 'ch': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_C…
'co': None, #
https://bugzilla.wikimedia.org/show_bug.cgi?id=28644
'dk': 'da',
- 'ga': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ga…
- 'got': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Go…
- 'gn': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Gu…
- 'gu': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Gu…
+ 'ga': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_G…
+ 'got': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_G…
+ 'gn': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_G…
+ 'gu': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_G…
'jp': 'ja',
'kn': None, #
https://bugzilla.wikimedia.org/show_bug.cgi?id=20325
- 'ks': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ka…
- 'lb': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_L%…
- 'ln': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Li…
- 'lv': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_La…
- 'mi': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ma…
+ 'ks': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_K…
+ 'lb': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_L…
+ 'ln': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_L…
+ 'lv': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_L…
+ 'mi': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_M…
'minnan': 'zh-min-nan',
'mn': None,
'my': None,
- 'na': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Na…
- 'nah': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Na…
+ 'na': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_N…
+ 'nah': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_N…
'nb': 'no',
- 'nds': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Pl…
- 'ps': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Pa…
- 'qu': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Qu…
- 'rm': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ru…
- 'se': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Sa…
+ 'nds': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_P…
+ 'ps': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_P…
+ 'qu': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Q…
+ 'rm': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_R…
+ 'se': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_S…
'simple': 'en', #
https://bugzilla.wikimedia.org/show_bug.cgi?id=20325
- 'su': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ba…
+ 'su': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_B…
'sw': None, #
https://bugzilla.wikimedia.org/show_bug.cgi?id=25170
'tk': None,
'tokipona': None,
- 'ug': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Uy…
+ 'ug': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_U…
'vo': None, #
https://bugzilla.wikimedia.org/show_bug.cgi?id=37413
- 'wa': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Wa…
- 'xh': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Xh…
- 'yo': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Yo…
+ 'wa': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_W…
+ 'xh': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_X…
+ 'yo': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Y…
'za': None, #
https://bugzilla.wikimedia.org/show_bug.cgi?id=20325
'zh-tw': 'zh',
'zh-cn': 'zh',
diff --git a/pywikibot/families/wikinews_family.py
b/pywikibot/families/wikinews_family.py
index bdaf23b..0cdcf7e 100644
--- a/pywikibot/families/wikinews_family.py
+++ b/pywikibot/families/wikinews_family.py
@@ -19,7 +19,7 @@
self.langs = dict([(lang, '%s.wikinews.org' % lang)
for lang in self.languages_by_size])
- # Global bot allowed languages on
http://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementa…
+ # Global bot allowed languages on
https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implement…
self.cross_allowed = ['ca', 'cs', 'en', 'fa',
'ko', ]
# Which languages have a special order for putting interlanguage links,
diff --git a/pywikibot/families/wikipedia_family.py
b/pywikibot/families/wikipedia_family.py
index a00a05c..912f8c8 100644
--- a/pywikibot/families/wikipedia_family.py
+++ b/pywikibot/families/wikipedia_family.py
@@ -242,7 +242,7 @@
]
# Global bot allowed languages on
- #
http://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementa…
+ #
https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implement…
self.cross_allowed = [
'ab', 'ace', 'af', 'ak', 'als',
'am', 'an', 'ang', 'ar', 'arc',
'arz', 'as', 'ast', 'av', 'ay',
'az', 'ba', 'bar', 'bat-smg', 'bcl',
@@ -343,24 +343,24 @@
}
self.obsolete = {
- 'aa': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Af…
- 'cho': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ch…
+ 'aa': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_A…
+ 'cho': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_C…
'dk': 'da',
- 'ho': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Hi…
- 'hz': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_He…
- 'ii': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Yi…
- 'kj': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Kw…
- 'kr': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ka…
- 'mh': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ma…
+ 'ho': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_H…
+ 'hz': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_H…
+ 'ii': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Y…
+ 'kj': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_K…
+ 'kr': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_K…
+ 'mh': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_M…
'minnan': 'zh-min-nan',
- 'mo': 'ro', #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Mo…
- 'mus': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Mu…
+ 'mo': 'ro', #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_M…
+ 'mus': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_M…
'nan': 'zh-min-nan',
'nl_nds': 'nl-nds', # miss-spelling
'nb': 'no',
- 'ng': None, # (not reachable)
http://meta.wikimedia.org/wiki/Inactive_wikis
+ 'ng': None, # (not reachable)
https://meta.wikimedia.org/wiki/Inactive_wikis
'jp': 'ja',
- 'ru-sib': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Si…
+ 'ru-sib': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_S…
'tlh': None,
'tokipona': None,
'zh-tw': 'zh',
diff --git a/pywikibot/families/wikiquote_family.py
b/pywikibot/families/wikiquote_family.py
index 659ba15..3c1242b 100644
--- a/pywikibot/families/wikiquote_family.py
+++ b/pywikibot/families/wikiquote_family.py
@@ -23,7 +23,7 @@
self.langs = dict([(lang, '%s.wikiquote.org' % lang)
for lang in self.languages_by_size])
- # Global bot allowed languages on
http://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementa…
+ # Global bot allowed languages on
https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implement…
self.cross_allowed = [
'af', 'am', 'ar', 'az', 'be',
'bg', 'br', 'bs', 'ca', 'cs', 'da',
'el', 'eo', 'es', 'et', 'eu',
'fa', 'fi', 'fr', 'gl', 'he', 'hi',
@@ -51,32 +51,32 @@
}
self.obsolete = {
- 'als': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Al…
+ 'als': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_A…
'ang': None, #
https://bugzilla.wikimedia.org/show_bug.cgi?id=29150
'ast': None, #
https://bugzilla.wikimedia.org/show_bug.cgi?id=28964
- 'bm': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ba…
+ 'bm': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_B…
'co': None,
- 'cr': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ne…
+ 'cr': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_N…
'dk': 'da',
- 'ga': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ga…
+ 'ga': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_G…
'jp': 'ja',
'kk': None, #
https://bugzilla.wikimedia.org/show_bug.cgi?id=20325
- 'kr': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ka…
- 'ks': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ka…
- 'kw': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ke…
+ 'kr': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_K…
+ 'ks': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_K…
+ 'kw': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_K…
'lb': None,
'minnan': 'zh-min-nan',
- 'na': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Na…
+ 'na': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_N…
'nb': 'no',
- 'nds': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Lo…
- 'qu': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Qu…
- 'simple': 'en', #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Si…
- 'tk': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Tu…
+ 'nds': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_L…
+ 'qu': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Q…
+ 'simple': 'en', #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_S…
+ 'tk': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_T…
'tokipona': None,
- 'tt': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ta…
- 'ug': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Oy…
- 'vo': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Vo…
- 'za': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Zh…
+ 'tt': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_T…
+ 'ug': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_O…
+ 'vo': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_V…
+ 'za': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Z…
'zh-tw': 'zh',
'zh-cn': 'zh'
}
diff --git a/pywikibot/families/wikisource_family.py
b/pywikibot/families/wikisource_family.py
index 36bc326..a81d7f8 100644
--- a/pywikibot/families/wikisource_family.py
+++ b/pywikibot/families/wikisource_family.py
@@ -23,7 +23,7 @@
for lang in self.languages_by_size])
self.langs['-'] = 'wikisource.org'
- # Global bot allowed languages on
http://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementa…
+ # Global bot allowed languages on
https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implement…
self.cross_allowed = [
'ca', 'el', 'fa', 'it', 'ko',
'no', 'pl', 'vi', 'zh',
]
@@ -45,9 +45,9 @@
}
self.obsolete = {
- 'ang': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ol…
+ 'ang': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_O…
'dk': 'da',
- 'ht': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ha…
+ 'ht': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_H…
'jp': 'ja',
'minnan': 'zh-min-nan',
'nb': 'no',
diff --git a/pywikibot/families/wikiversity_family.py
b/pywikibot/families/wikiversity_family.py
index 49d94c5..812da21 100644
--- a/pywikibot/families/wikiversity_family.py
+++ b/pywikibot/families/wikiversity_family.py
@@ -18,5 +18,5 @@
self.langs = dict([(lang, '%s.wikiversity.org' % lang)
for lang in self.languages_by_size])
- # Global bot allowed languages on
http://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementa…
+ # Global bot allowed languages on
https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implement…
self.cross_allowed = ['ja', ]
diff --git a/pywikibot/families/wikivoyage_family.py
b/pywikibot/families/wikivoyage_family.py
index 10d6daf..6932561 100644
--- a/pywikibot/families/wikivoyage_family.py
+++ b/pywikibot/families/wikivoyage_family.py
@@ -18,7 +18,7 @@
self.langs = dict([(lang, '%s.wikivoyage.org' % lang)
for lang in self.languages_by_size])
- # Global bot allowed languages on
http://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementa…
+ # Global bot allowed languages on
https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implement…
self.cross_allowed = ['es', 'ru', ]
def shared_data_repository(self, code, transcluded=False):
diff --git a/pywikibot/families/wiktionary_family.py
b/pywikibot/families/wiktionary_family.py
index 0b63546..25516af 100644
--- a/pywikibot/families/wiktionary_family.py
+++ b/pywikibot/families/wiktionary_family.py
@@ -31,7 +31,7 @@
for lang in self.languages_by_size])
# Global bot allowed languages on
- #
http://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implementa…
+ #
https://meta.wikimedia.org/wiki/Bot_policy/Implementation#Current_implement…
self.cross_allowed = [
'am', 'an', 'ang', 'ast', 'ay',
'az', 'be', 'bg', 'bn', 'br', 'bs',
'ca', 'chr', 'co', 'cy', 'da',
'dv', 'eo', 'es', 'et', 'eu', 'fa',
@@ -88,38 +88,38 @@
}
self.obsolete = {
- 'aa': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Af…
- 'ab': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ab…
- 'ak': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ak…
- 'als': None, #
http://als.wikipedia.org/wiki/Wikipedia:Stammtisch/Archiv_2008-1#Afterwards…
- 'as': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_As…
- 'av': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Av…
- 'ba': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ba…
- 'bh': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Bi…
- 'bi': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Bi…
- 'bm': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ba…
- 'bo': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ti…
- 'ch': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ch…
- 'cr': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ne…
+ 'aa': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_A…
+ 'ab': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_A…
+ 'ak': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_A…
+ 'als': None, #
https://als.wikipedia.org/wiki/Wikipedia:Stammtisch/Archiv_2008-1#Afterward…
+ 'as': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_A…
+ 'av': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_A…
+ 'ba': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_B…
+ 'bh': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_B…
+ 'bi': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_B…
+ 'bm': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_B…
+ 'bo': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_T…
+ 'ch': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_C…
+ 'cr': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_N…
'dk': 'da',
'dz': None,
- 'ik': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_In…
+ 'ik': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_I…
'jp': 'ja',
- 'mh': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ma…
- 'mo': 'ro', #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Mo…
+ 'mh': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_M…
+ 'mo': 'ro', #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_M…
'minnan': 'zh-min-nan',
'nb': 'no',
- 'pi': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Pa…
- 'rm': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Rh…
- 'rn': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Ki…
- 'sc': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Sa…
- 'sn': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Sh…
- 'to': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_To…
- 'tlh': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Kl…
- 'tw': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Tw…
+ 'pi': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_P…
+ 'rm': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_R…
+ 'rn': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_K…
+ 'sc': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_S…
+ 'sn': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_S…
+ 'to': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_T…
+ 'tlh': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_K…
+ 'tw': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_T…
'tokipona': None,
- 'xh': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Xh…
- 'yo': None, #
http://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Yo…
+ 'xh': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_X…
+ 'yo': None, #
https://meta.wikimedia.org/wiki/Proposals_for_closing_projects/Closure_of_Y…
'zh-tw': 'zh',
'zh-cn': 'zh'
}
diff --git a/pywikibot/family.py b/pywikibot/family.py
index 0273063..0fab3c1 100644
--- a/pywikibot/family.py
+++ b/pywikibot/family.py
@@ -25,7 +25,7 @@
self.name = None
# For interwiki sorting order see
- #
http://meta.wikimedia.org/wiki/Interwiki_sorting_order
+ #
https://meta.wikimedia.org/wiki/Interwiki_sorting_order
# The sorting order by language name from meta
# MediaWiki:Interwiki_config-sorting_order-native-languagename
@@ -806,7 +806,7 @@
}
# LDAP domain if your wiki uses LDAP authentication,
- #
http://www.mediawiki.org/wiki/Extension:LDAP_Authentication
+ #
https://www.mediawiki.org/wiki/Extension:LDAP_Authentication
self.ldapDomain = ()
# Allows crossnamespace interwiki linking.
diff --git a/pywikibot/fixes.py b/pywikibot/fixes.py
index 34f2b1d..b5e8002 100644
--- a/pywikibot/fixes.py
+++ b/pywikibot/fixes.py
@@ -33,7 +33,7 @@
disabled, see
* fckeditor - Try to convert FCKeditor HTML tags to wiki
syntax.
-
http://lists.wikimedia.org/pipermail/wikibots-l/2009-February/000290.html
+
https://lists.wikimedia.org/pipermail/wikibots-l/2009-February/000290.html
"""
@@ -128,7 +128,7 @@
# Leerzeichen und Komma vertauscht
(u'([a-zäöüß](\]\])?) ,((\[\[)?[a-zäöüA-ZÄÖÜ])',
r'\1, \3'),
# Plenks (d. h. Leerzeichen auch vor dem
Komma/Punkt/Ausrufezeichen/Fragezeichen)
- # Achtung bei Französisch:
http://de.wikipedia.org/wiki/Plenk#Sonderfall_Franz.C3.B6sisch
+ # Achtung bei Französisch:
https://de.wikipedia.org/wiki/Plenk#Sonderfall_Franz.C3.B6sisch
# Leerzeichen vor Doppelpunkt/Semikolon kann korrekt sein, nach irgendeiner
Norm für Zitationen.
(u'([a-zäöüß](\]\])?) ([,\.!\?]) ((\[\[)?[a-zäöüA-ZÄÖÜ])',
r'\1\3 \4'),
#(u'([a-z]\.)([A-Z])',
r'\1 \2'),
diff --git a/pywikibot/page.py b/pywikibot/page.py
index 6e658d0..d97430a 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -1744,7 +1744,7 @@
u'http://wikitravel.org/upload/shared/')
else:
return self.fileUrl().startswith(
- 'http://upload.wikimedia.org/wikipedia/commons/')
+ 'https://upload.wikimedia.org/wikipedia/commons/')
@deprecated("ImagePage.getFileSHA1Sum()")
def getFileMd5Sum(self):
@@ -3239,7 +3239,7 @@
# Normalize unicode string to a NFC (composed) format to allow
# proper string comparisons. According to
- #
http://svn.wikimedia.org/viewvc/mediawiki/branches/REL1_6/phase3/includes/n…
+ #
https://svn.wikimedia.org/viewvc/mediawiki/branches/REL1_6/phase3/includes/…
# the MediaWiki code normalizes everything to NFC, not NFKC
# (which might result in information loss).
t = unicodedata.normalize('NFC', t)
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index 3148e32..e69ef05 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""
This module offers a wide variety of page generators. A page generator is an
-object that is iterable (see
http://www.python.org/dev/peps/pep-0255/ ) and
+object that is iterable (see
http://legacy.python.org/dev/peps/pep-0255/ ) and
that yields page objects on which other scripts can then work.
In general, there is no need to run this script directly. It can, however,
@@ -1197,10 +1197,10 @@
def UntaggedPageGenerator(untaggedProject, limit=500):
""" Function to get the pages returned by this tool:
-
http://toolserver.org/~daniel/WikiSense/UntaggedImages.php
+
https://toolserver.org/~daniel/WikiSense/UntaggedImages.php
"""
- URL = "http://toolserver.org/~daniel/WikiSense/UntaggedImages.php?"
- REGEXP = r"<td valign='top' title='Name'><a
href='http://.*?" \
+ URL = "https://toolserver.org/~daniel/WikiSense/UntaggedImages.php?"
+ REGEXP = r"<td valign='top' title='Name'><a
href='http[s]?://.*?" \
"\.org/w/index\.php\?title=(.*?)'>.*?</a></td>"
lang, project = untaggedProject.split('.', 1)
if lang == 'commons':
@@ -1326,14 +1326,14 @@
title = url[len(base):]
page = pywikibot.Page(pywikibot.Link(title, self.site))
# Google contains links in the format
- #
http://de.wikipedia.org/wiki/en:Foobar
+ #
https://de.wikipedia.org/wiki/en:Foobar
if page.site == self.site:
yield page
def MySQLPageGenerator(query, site=None):
"""
- Requires oursql <http://pythonhosted.org/oursql/> or
+ Requires oursql <https://pythonhosted.org/oursql/> or
MySQLdb <https://sourceforge.net/projects/mysql-python/>
Yields a list of pages based on a MySQL query. Each query
should provide the page namespace and page title. An example
diff --git a/pywikibot/site.py b/pywikibot/site.py
index 59e7635..5f5ba7e 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -1567,7 +1567,7 @@
namespaces=namespaces, step=step, total=total,
g_content=content, **blargs)
if followRedirects:
- # bug: see
http://bugzilla.wikimedia.org/show_bug.cgi?id=7304
+ # bug: see
https://bugzilla.wikimedia.org/show_bug.cgi?id=7304
# links identified by MediaWiki as redirects may not really be,
# so we have to check each "redirect" page and see if it
# really redirects to this page
@@ -2255,7 +2255,7 @@
Note that logevents only logs user blocks, while this method
iterates all blocks including IP ranges. The iterator yields dicts
containing keys corresponding to the block properties (see
-
http://www.mediawiki.org/wiki/API:Query_-_Lists for documentation).
+
https://www.mediawiki.org/wiki/API:Query_-_Lists for documentation).
@param starttime: start iterating at this Timestamp
@param endtime: stop iterating at this Timestamp
@@ -2863,7 +2863,7 @@
% page.title())
return True
page._revid = result["edit"]["newrevid"]
- # see
http://www.mediawiki.org/wiki/API:Wikimania_2006_API_discussion#Notes
+ # see
https://www.mediawiki.org/wiki/API:Wikimania_2006_API_discussion#Notes
# not safe to assume that saved text is the same as sent
self.loadrevisions(page, getText=True)
return True
@@ -3742,7 +3742,7 @@
# the IDs returned from the API can be upper or lowercase, depending
# on the version. See for more information:
#
https://bugzilla.wikimedia.org/show_bug.cgi?id=53894
- #
http://lists.wikimedia.org/pipermail/wikidata-tech/2013-September/000296.ht…
+ #
https://lists.wikimedia.org/pipermail/wikidata-tech/2013-September/000296.h…
try:
dtype = data['entities'][prop.getID()]['datatype']
except KeyError:
diff --git a/pywikibot/textlib.py b/pywikibot/textlib.py
index 182359e..d561b73 100644
--- a/pywikibot/textlib.py
+++ b/pywikibot/textlib.py
@@ -304,7 +304,7 @@
"""
# try to merge with 'removeDisabledParts()' above into one generic function
- # thanks to
http://www.hellboundhackers.org/articles/841-using-python-39;s-htmlparser-c…
+ # thanks to
https://www.hellboundhackers.org/articles/read-article.php?article_id=841
parser = _GetDataHTML()
parser.keeptags = keeptags
parser.feed(text)
@@ -312,7 +312,7 @@
return parser.textdata
-# thanks to
http://docs.python.org/library/htmlparser.html
+# thanks to
https://docs.python.org/2/library/htmlparser.html
class _GetDataHTML(HTMLParser):
textdata = u''
keeptags = []
@@ -780,7 +780,7 @@
The Pywikibot is no longer allowed to touch categories on the German
Wikipedia on pages that contain the Personendaten template because of the
non-standard placement of that template.
-See
http://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/1#Positi…
+See
https://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/1#Posit…
""")
separator = site.family.category_text_separator
iseparator = site.family.interwiki_text_separator
@@ -877,7 +877,7 @@
notInside = '\]\s<>"'
# The first half of this regular expression is required because '' is
# not allowed inside links. For example, in this wiki text:
- # ''Please see
http://www.example.org.''
+ # ''Please see
https://www.example.org.''
# .'' shouldn't be considered as part of the link.
regex = r'(?P<url>http[s]?://[^%(notInside)s]*?[^%(notAtEnd)s]' \
r'(?=[%(notAtEnd)s]*\'\')|http[s]?://[^%(notInside)s]*' \
diff --git a/pywikibot/userinterfaces/transliteration.py
b/pywikibot/userinterfaces/transliteration.py
index 288348d..5389a2f 100644
--- a/pywikibot/userinterfaces/transliteration.py
+++ b/pywikibot/userinterfaces/transliteration.py
@@ -1657,7 +1657,7 @@
self.trans[u"໗"] = "7"
self.trans[u"໘"] = "8"
self.trans[u"໙"] = "9"
- # from:
http://www.wikidata.org/wiki/MediaWiki:Gadget-SimpleTransliterate.js
+ # from:
https://www.wikidata.org/wiki/MediaWiki:Gadget-SimpleTransliterate.js
self.trans[u"ଂ"] = "anusvara"
self.trans[u"ઇ"] = "i"
self.trans[u"എ"] = "e"
diff --git a/pywikibot/userinterfaces/win32_unicode.py
b/pywikibot/userinterfaces/win32_unicode.py
index 5e452c6..42714e2 100755
--- a/pywikibot/userinterfaces/win32_unicode.py
+++ b/pywikibot/userinterfaces/win32_unicode.py
@@ -6,12 +6,12 @@
# Question: "Windows cmd encoding change causes Python crash" [3] by Alex [4],
# Answered [5] by David-Sarah Hopwood [6].
#
-# [1]
http://stackoverflow.com
-# [2]
http://creativecommons.org/licenses/by-sa/3.0/
-# [3]
http://stackoverflow.com/questions/878972
-# [4]
http://stackoverflow.com/users/85185
-# [4]
http://stackoverflow.com/a/3259271/118671
-# [5]
http://stackoverflow.com/users/393146
+# [1]
https://stackoverflow.com
+# [2]
https://creativecommons.org/licenses/by-sa/3.0/
+# [3]
https://stackoverflow.com/questions/878972
+# [4]
https://stackoverflow.com/users/85185
+# [4]
https://stackoverflow.com/a/3259271/118671
+# [5]
https://stackoverflow.com/users/393146
#
################################################
#
@@ -51,16 +51,16 @@
# This also fixes <http://bugs.python.org/issue1602>.
# Credit to Michael Kaplan
<http://blogs.msdn.com/b/michkap/archive/2010/04/07/9989346.aspx>
# and TZOmegaTZIOY
- #
<http://stackoverflow.com/questions/878972/windows-cmd-encoding-change-causes-python-crash/1432462#1432462>.
+ #
<https://stackoverflow.com/questions/878972/windows-cmd-encoding-change-causes-python-crash/1432462#1432462>.
try:
- # <http://msdn.microsoft.com/en-us/library/ms683231(VS.85).aspx>
+ # <https://msdn.microsoft.com/en-us/library/ms683231(VS.85).aspx>
# HANDLE WINAPI GetStdHandle(DWORD nStdHandle);
# returns INVALID_HANDLE_VALUE, NULL, or a valid handle
#
- # <http://msdn.microsoft.com/en-us/library/aa364960(VS.85).aspx>
+ # <https://msdn.microsoft.com/en-us/library/aa364960(VS.85).aspx>
# DWORD WINAPI GetFileType(DWORD hFile);
#
- # <http://msdn.microsoft.com/en-us/library/ms683167(VS.85).aspx>
+ # <https://msdn.microsoft.com/en-us/library/ms683167(VS.85).aspx>
# BOOL WINAPI GetConsoleMode(HANDLE hConsole, LPDWORD lpMode);
GetStdHandle = WINFUNCTYPE(HANDLE, DWORD)(("GetStdHandle",
windll.kernel32))
@@ -184,7 +184,7 @@
n = DWORD(0)
# There is a shorter-than-documented limitation on the
# length of the string passed to WriteConsoleW (see
- #
<http://tahoe-lafs.org/trac/tahoe-lafs/ticket/1232>.
+ #
<https://tahoe-lafs.org/trac/tahoe-lafs/ticket/1232>.
retval = WriteConsoleW(self._hConsole, text,
min(remaining, 10000),
byref(n), None)
diff --git a/pywikibot/weblib.py b/pywikibot/weblib.py
index c2ad86e..11054dd 100644
--- a/pywikibot/weblib.py
+++ b/pywikibot/weblib.py
@@ -23,11 +23,11 @@
timestamp - requested archive date. The version closest to that moment
is returned. Format: YYYYMMDDhhmmss or part thereof.
- See [[:mw:Archived Pages]] and
http://archive.org/help/wayback_api.php
+ See [[:mw:Archived Pages]] and
https://archive.org/help/wayback_api.php
for more details.
"""
import json
- uri =
u'http://archive.org/wayback/available?'
+ uri =
u'https://archive.org/wayback/available?'
query = {'url': url}
diff --git a/pywikibot/xmlreader.py b/pywikibot/xmlreader.py
index 1ad4e2e..4f7988e 100644
--- a/pywikibot/xmlreader.py
+++ b/pywikibot/xmlreader.py
@@ -4,7 +4,7 @@
Each XmlEntry object represents a page, as read from an XML source
The XmlDump class reads a pages_current XML dump (like the ones offered on
-http://dumps.wikimedia.org/backup-index.html) and offers a generator over
+https://dumps.wikimedia.org/backup-index.html) and offers a generator over
XmlEntry objects which can be used by other bots.
"""
#
diff --git a/scripts/add_text.py b/scripts/add_text.py
index 7d17d17..d5c6d3a 100644
--- a/scripts/add_text.py
+++ b/scripts/add_text.py
@@ -58,7 +58,7 @@
This script has been written by Botwiki's staff, if you want to help us
or you need some help regarding this script, you can find us here:
-*
http://botwiki.sno.cc
+*
http://botwiki.sno.cc/wiki/Main_Page
"""
diff --git a/scripts/blockpageschecker.py b/scripts/blockpageschecker.py
index 62b1d26..c21625b 100755
--- a/scripts/blockpageschecker.py
+++ b/scripts/blockpageschecker.py
@@ -14,7 +14,7 @@
¶ms;
-xml Retrieve information from a local XML dump (pages-articles
- or pages-meta-current, see
http://download.wikimedia.org).
+ or pages-meta-current, see
https://download.wikimedia.org).
Argument can also be given as "-xml:filename".
-protectedpages: Check all the blocked pages; useful when you have not
@@ -41,7 +41,7 @@
You have to edit this script in order to add your preferences
otherwise the script won't work!
-If you have problems, ask on botwiki (
http://botwiki.sno.cc )
+If you have problems, ask on botwiki (
http://botwiki.sno.cc/wiki/Main_Page )
or on IRC (#pywikipediabot)
--- Example of how to use the script ---
diff --git a/scripts/casechecker.py b/scripts/casechecker.py
index 2f0852c..afaf2a2 100644
--- a/scripts/casechecker.py
+++ b/scripts/casechecker.py
@@ -21,7 +21,7 @@
#
# Permutations code was taken from
-#
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/190465
+#
https://code.activestate.com/recipes/190465/
#
def xuniqueCombinations(items, n):
if n == 0:
@@ -38,7 +38,7 @@
# This code makes this script Windows ONLY!!!
# Feel free to adapt it to another platform
#
-# Adapted from
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/496901
+# Adapted from
https://code.activestate.com/recipes/496901/
#
STD_OUTPUT_HANDLE = -11
diff --git a/scripts/checkimages.py b/scripts/checkimages.py
index 20f2a6c..e361c38 100644
--- a/scripts/checkimages.py
+++ b/scripts/checkimages.py
@@ -47,7 +47,7 @@
-url[:#] Define the url where are the images
-untagged[:#] Use daniel's tool as generator:
-
http://toolserver.org/~daniel/WikiSense/UntaggedImages.php
+
https://toolserver.org/~daniel/WikiSense/UntaggedImages.php
-nologerror If given, this option will disable the error that is risen
when the log is full.
@@ -831,7 +831,7 @@
lang = untaggedProject.split('.', 1)[0]
project = '.%s' % untaggedProject.split('.', 1)[1]
- URL =
u'http://toolserver.org/~daniel/WikiSense/UntaggedImages.php?'
+ URL =
u'https://toolserver.org/~daniel/WikiSense/UntaggedImages.php?'
if lang == 'commons':
link = (
URL +
@@ -845,7 +845,7 @@
)
text = self.site.getUrl(link, no_hostname=True)
results = re.findall(
- r"<td valign='top' title='Name'><a
href='http://.*?\.org/w/index\.php\?title=(.*?)'>.*?</a>&l…?</a></td>",
+ r"<td valign='top' title='Name'><a
href='http[s]?://.*?\.org/w/index\.php\?title=(.*?)'>.*?</a&g…?</a></td>",
text)
if results:
for result in results:
diff --git a/scripts/cosmetic_changes.py b/scripts/cosmetic_changes.py
index 67bb549..33ae271 100755
--- a/scripts/cosmetic_changes.py
+++ b/scripts/cosmetic_changes.py
@@ -258,7 +258,7 @@
# The PyWikipediaBot is no longer allowed to touch categories on the
# German Wikipedia. See
- #
http://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/1#Positi…
+ #
https://de.wikipedia.org/wiki/Hilfe_Diskussion:Personendaten/Archiv/1#Posit…
# ignoring nn-wiki of cause of the comment line above iw section
if not self.template and '{{Personendaten' not in text and \
'{{SORTIERUNG' not in text and '{{DEFAULTSORT' not in text and
\
@@ -526,7 +526,7 @@
8206, # left-to-right mark (<r;)
8207, # right-to-left mark (&rtl;)
]
- # ignore ' see
http://eo.wikipedia.org/w/index.php?title=Liberec&diff=next&oldid=2…
+ # ignore ' see
https://eo.wikipedia.org/w/index.php?title=Liberec&diff=next&oldid=…
#if self.site.lang == 'eo':
# ignore += [39]
if self.template:
@@ -620,7 +620,7 @@
'startspace']
# link to the wiki working on
## TODO: disable this for difflinks and titled links
- ##
http://de.wikipedia.org/w/index.php?title=Wikipedia%3aVandalismusmeldung&am…
+ ##
https://de.wikipedia.org/w/index.php?title=Wikipedia%3aVandalismusmeldung&a…
## text = pywikibot.replaceExcept(text,
##
r'\[https?://%s\.%s\.org/wiki/(?P<link>\S+)\s+(?P<title>.+?…
## % (self.site.lang, self.site.family.name),
@@ -684,7 +684,7 @@
return text
def fixReferences(self, text):
-
#http://en.wikipedia.org/wiki/User:AnomieBOT/source/tasks/OrphanReferenceFixer.pm
+
#https://en.wikipedia.org/wiki/User:AnomieBOT/source/tasks/OrphanReferenceFixer.pm
exceptions = ['nowiki', 'comment', 'math', 'pre',
'source',
'startspace']
@@ -791,7 +791,7 @@
text = pywikibot.replaceExcept(text, str(i), new[i], exceptions)
return text
- # Retrieved from
"http://commons.wikimedia.org/wiki/Commons:Tools/pywiki_file_description_cleanup"
+ # Retrieved from
"https://commons.wikimedia.org/wiki/Commons:Tools/pywiki_file_description_cleanup"
def commonsfiledesc(self, text):
# section headers to {{int:}} versions
exceptions = ['comment', 'includeonly', 'math',
'noinclude', 'nowiki',
diff --git a/scripts/data_ingestion.py b/scripts/data_ingestion.py
index 4f11224..ecc6052 100755
--- a/scripts/data_ingestion.py
+++ b/scripts/data_ingestion.py
@@ -266,7 +266,7 @@
database = {}
reader = csv.DictReader(open(self.configuration.get('csvFile'),
"rb"), dialect=self.configuration.get('csvDialect'),
delimiter=self.configuration.csvDelimiter)
- # FIXME : Encoding problems
http://docs.python.org/library/csv.html#csv-examples
+ # FIXME : Encoding problems
https://docs.python.org/2/library/csv.html#csv-examples
for row in reader:
self.metadataCSV(row)
self.processFile(metadata)
diff --git a/scripts/flickrripper.py b/scripts/flickrripper.py
index 8cfdf7b..e9816b9 100644
--- a/scripts/flickrripper.py
+++ b/scripts/flickrripper.py
@@ -395,7 +395,7 @@
retry = False
found_start_id = not start_id
- #
http://www.flickr.com/services/api/flickr.groups.pools.getPhotos.html
+ #
https://www.flickr.com/services/api/flickr.groups.pools.getPhotos.html
# Get the photos in a group
if group_id:
#First get the total number of photo's in the group
@@ -427,7 +427,7 @@
pywikibot.output(u'Flickr api problem, sleeping')
time.sleep(30)
- #
http://www.flickr.com/services/api/flickr.photosets.getPhotos.html
+ #
https://www.flickr.com/services/api/flickr.photosets.getPhotos.html
# Get the photos in a photoset
elif photoset_id:
photos = flickr.photosets_getPhotos(photoset_id=photoset_id,
@@ -456,7 +456,7 @@
pywikibot.output(u'Flickr api problem, sleeping')
time.sleep(30)
- #
http://www.flickr.com/services/api/flickr.people.getPublicPhotos.html
+ #
https://www.flickr.com/services/api/flickr.people.getPublicPhotos.html
# Get the (public) photos uploaded by a user
elif user_id:
photos = flickr.people_getPublicPhotos(user_id=user_id,
@@ -511,7 +511,7 @@
if not config.flickr['api_key']:
pywikibot.output('Flickr api key not found! Get yourself an api key')
pywikibot.output(
- 'Any flickr user can get a key at
http://www.flickr.com/services/api/keys/apply/')
+ 'Any flickr user can get a key at
https://www.flickr.com/services/api/keys/apply/')
return
if 'api_secret' in config.flickr and config.flickr['api_secret']:
diff --git a/scripts/freebasemappingupload.py b/scripts/freebasemappingupload.py
index 6a7a334..9aae10a 100644
--- a/scripts/freebasemappingupload.py
+++ b/scripts/freebasemappingupload.py
@@ -57,14 +57,14 @@
if not line or line.startswith('#'):
return
mid, sameas, qid, dot = line.split()
- if sameas != '<http://www.w3.org/2002/07/owl#sameAs>':
+ if sameas != '<https://www.w3.org/2002/07/owl#sameAs>':
return
if dot != '.':
return
- if not
mid.startswith('<http://rdf.freebase.com/ns/m')/ns/m'):
+ if not
mid.startswith('<https://rdf.freebase.com/ns/m')/ns/m'):
return
mid = '/m/' + mid[30:-1]
- if not
qid.startswith('<http://www.wikidata.org/entity/Q')ity/Q'):
+ if not
qid.startswith('<https://www.wikidata.org/entity/Q')ity/Q'):
return
qid = 'Q' + qid[33:-1]
data = pywikibot.ItemPage(self.repo, qid)
diff --git a/scripts/imagerecat.py b/scripts/imagerecat.py
index c6f4c36..748985e 100644
--- a/scripts/imagerecat.py
+++ b/scripts/imagerecat.py
@@ -150,7 +150,7 @@
if tries < maxtries:
tries += 1
commonsHelperPage = urllib.urlopen(
-
"http://toolserver.org/~daniel/WikiSense/CommonSense.php?%s" % parameters)
+
"https://toolserver.org/~daniel/WikiSense/CommonSense.php?%s" % parameters)
matches = commonsenseRe.search(
commonsHelperPage.read().decode('utf-8'))
gotInfo = True
@@ -209,7 +209,7 @@
def getOpenStreetMap(latitude, longitude):
"""
- Get the result from
http://nominatim.openstreetmap.org/reverse
+ Get the result from
https://nominatim.openstreetmap.org/reverse
and put it in a list of tuples to play around with
"""
result = []
@@ -217,7 +217,7 @@
parameters = urllib.urlencode({'lat': latitude, 'lon': longitude,
'accept-language': 'en'})
while not gotInfo:
try:
- page =
urllib.urlopen("http://nominatim.openstreetmap.org/reverse?format=xml&… %
parameters)
+ page =
urllib.urlopen("https://nominatim.openstreetmap.org/reverse?format=xml… %
parameters)
et = xml.etree.ElementTree.parse(page)
gotInfo = True
except IOError:
@@ -375,7 +375,7 @@
filterCategoriesRe = re.compile('\[\[Category:([^\]]*)\]\]')
try:
filterCategoriesPage = urllib.urlopen(
- "http://toolserver.org/~multichill/filtercats.php?%s" %
parameters)
+ "https://toolserver.org/~multichill/filtercats.php?%s" %
parameters)
result = filterCategoriesRe.findall(
filterCategoriesPage.read().decode('utf-8'))
except IOError:
diff --git a/scripts/interwiki.py b/scripts/interwiki.py
index 38b1c43..4e56937 100755
--- a/scripts/interwiki.py
+++ b/scripts/interwiki.py
@@ -315,7 +315,8 @@
interwiki_shownew: should interwiki.py display every new link it discovers?
interwiki_graph: output a graph PNG file on conflicts? You need pydot for
- this:
http://dkbza.org/pydot.html
+ this:
https://pypi.python.org/pypi/pydot/1.0.2
+
https://code.google.com/p/pydot/
interwiki_graph_format: the file format for interwiki graphs
@@ -1706,7 +1707,7 @@
and page.namespace() == 10 and
u'Country data' in
page.title(withNamespace=False))
# edit restriction on is-wiki
- #
http://is.wikipedia.org/wiki/Wikipediaspjall:V%C3%A9lmenni
+ #
https://is.wikipedia.org/wiki/Wikipediaspjall:V%C3%A9lmenni
# and zh-wiki for template namespace which prevents increasing the queue
# allow edits for the same conditions as -whenneeded
# or the last edit wasn't a bot
diff --git a/scripts/isbn.py b/scripts/isbn.py
index 36ba819..660c84e 100755
--- a/scripts/isbn.py
+++ b/scripts/isbn.py
@@ -52,7 +52,7 @@
}
# Maps each group number to the list of its publisher number ranges.
-# Taken from
http://www.isbn-international.org/converter/ranges.htm
+# Taken from
https://www.isbn-international.org/converter/ranges.htm
ranges = {
'0': [ # English speaking area
('00', '19'),
@@ -1263,7 +1263,7 @@
% self.code)
def calculateChecksum(self):
- # See
http://en.wikipedia.org/wiki/ISBN#Check_digit_in_ISBN_13
+ # See
https://en.wikipedia.org/wiki/ISBN#Check_digit_in_ISBN_13
sum = 0
for i in range(0, 13 - 1, 2):
sum += self.digits()[i]
@@ -1298,7 +1298,7 @@
Raises an InvalidIsbnException if the checksum shows that the
ISBN is incorrect.
"""
- # See
http://en.wikipedia.org/wiki/ISBN#Check_digit_in_ISBN_10
+ # See
https://en.wikipedia.org/wiki/ISBN#Check_digit_in_ISBN_10
sum = 0
for i in range(0, 9):
sum += (i + 1) * int(self.digits()[i])
diff --git a/scripts/lonelypages.py b/scripts/lonelypages.py
index 2dfa570..66a7dc6 100644
--- a/scripts/lonelypages.py
+++ b/scripts/lonelypages.py
@@ -10,7 +10,7 @@
¶ms;
-xml Retrieve information from a local XML dump (pages-articles
- or pages-meta-current, see
http://download.wikimedia.org).
+ or pages-meta-current, see
https://download.wikimedia.org).
Argument can also be given as "-xml:filename".
-page Only edit a specific page.
diff --git a/scripts/maintenance/wikimedia_sites.py
b/scripts/maintenance/wikimedia_sites.py
index fa48954..275237b 100644
--- a/scripts/maintenance/wikimedia_sites.py
+++ b/scripts/maintenance/wikimedia_sites.py
@@ -20,7 +20,7 @@
import pywikibot
from pywikibot.site import Family
-URL =
'http://wikistats.wmflabs.org/api.php?action=dump&table=%s&format=xml'
+URL =
'https://wikistats.wmflabs.org/api.php?action=dump&table=%s&format=xml'
familiesDict = {
'anarchopedia': 'anarchopedias',
diff --git a/scripts/misspelling.py b/scripts/misspelling.py
index 62bb918..13f6b08 100644
--- a/scripts/misspelling.py
+++ b/scripts/misspelling.py
@@ -94,7 +94,7 @@
correctSpelling = params[-1]
# On de.wikipedia, there are some cases where the
# misspelling is ambigous, see for example:
- #
http://de.wikipedia.org/wiki/Buthan
+ #
https://de.wikipedia.org/wiki/Buthan
for match in self.linkR.finditer(correctSpelling):
self.alternatives.append(match.group('title'))
diff --git a/scripts/noreferences.py b/scripts/noreferences.py
index 488393c..f1aedb3 100755
--- a/scripts/noreferences.py
+++ b/scripts/noreferences.py
@@ -11,7 +11,7 @@
¶ms;
-xml Retrieve information from a local XML dump (pages-articles
- or pages-meta-current, see
http://download.wikimedia.org).
+ or pages-meta-current, see
https://download.wikimedia.org).
Argument can also be given as "-xml:filename".
-namespace:n Number or name of namespace to process. The parameter can be
diff --git a/scripts/nowcommons.py b/scripts/nowcommons.py
index 2d80e02..ca91c9d 100644
--- a/scripts/nowcommons.py
+++ b/scripts/nowcommons.py
@@ -194,17 +194,17 @@
return nowCommons['_default']
def useHashGenerator(self):
- #
http://toolserver.org/~multichill/nowcommons.php?language=it&page=2&…
+ #
https://toolserver.org/~multichill/nowcommons.php?language=it&page=2&am…
lang = self.site.lang
num_page = 0
word_to_skip_translated = i18n.translate(self.site, word_to_skip)
images_processed = list()
while 1:
- url = ('http://toolserver.org/~multichill/nowcommons.php?'
+ url = ('https://toolserver.org/~multichill/nowcommons.php?'
'language=%s&page=%s&filter=') % (lang, num_page)
HTML_text = self.site.getUrl(url, no_hostname=True)
reg = r'<[Aa]
href="(?P<urllocal>.*?)">(?P<imagelocal>.*?)</[Aa]>
+?</td><td>\n\s*?'
- reg += r'<[Aa]
href="(?P<urlcommons>http://commons.wikimedia.org/.*?)" \
+ reg += r'<[Aa]
href="(?P<urlcommons>http[s]?://commons.wikimedia.org/.*?)" \
Image:(?P<imagecommons>.*?)</[Aa]>
+?</td><td>'
regex = re.compile(reg, re.UNICODE)
found_something = False
diff --git a/scripts/protect.py b/scripts/protect.py
index 9f2c383..cdee924 100644
--- a/scripts/protect.py
+++ b/scripts/protect.py
@@ -39,7 +39,7 @@
"""
#
-# Written by
http://it.wikisource.org/wiki/Utente:Qualc1
+# Written by
https://it.wikisource.org/wiki/Utente:Qualc1
# Created by modifying delete.py
#
# (c) Pywikibot team, 2008-2014
diff --git a/scripts/redirect.py b/scripts/redirect.py
index 016677a..d5649e9 100755
--- a/scripts/redirect.py
+++ b/scripts/redirect.py
@@ -22,7 +22,7 @@
and arguments can be:
-xml Retrieve information from a local XML dump
- (
http://download.wikimedia.org). Argument can also be given as
+ (
https://download.wikimedia.org). Argument can also be given as
"-xml:filename.xml". Cannot be used with -fullscan or -moves.
-fullscan Retrieve redirect pages from live wiki, not from a special page
diff --git a/scripts/reflinks.py b/scripts/reflinks.py
index 3d1b71e..225f6f1 100644
--- a/scripts/reflinks.py
+++ b/scripts/reflinks.py
@@ -1,10 +1,10 @@
# -*- coding: utf-8 -*-
"""
This bot will search for references which are only made of a link without title,
-(i.e. <ref>[http://www.google.fr/]</ref> or
<ref>http://www.google.fr/</ref>)
+(i.e. <ref>[https://www.google.fr/]</ref> or
<ref>https://www.google.fr/</ref>)
and will fetch the html title from the link to use it as the title of the wiki
link in the reference, i.e.
-<ref>[http://www.google.fr/search?q=test test - Google Search]</ref>
+<ref>[https://www.google.fr/search?q=test test - Google Search]</ref>
The bot checks every 20 edits a special stop page : if the page has been edited,
it stops.
diff --git a/scripts/replace.py b/scripts/replace.py
index f83b642..3e2f788 100755
--- a/scripts/replace.py
+++ b/scripts/replace.py
@@ -10,7 +10,7 @@
¶ms;
-xml Retrieve information from a local XML dump (pages-articles
- or pages-meta-current, see
http://download.wikimedia.org).
+ or pages-meta-current, see
https://download.wikimedia.org).
Argument can also be given as "-xml:filename".
-page Only edit a specific page.
@@ -91,7 +91,7 @@
If you want to change templates from the old syntax, e.g. {{msg:Stub}}, to the
new syntax, e.g. {{Stub}}, download an XML dump file (pages-articles) from
-http://download.wikimedia.org, then use this command:
+https://download.wikimedia.org, then use this command:
python replace.py -xml -regex "{{msg:(.*?)}}" "{{\\1}}"
diff --git a/scripts/script_wui.py b/scripts/script_wui.py
index 13ea70f..3bbe3fd 100755
--- a/scripts/script_wui.py
+++ b/scripts/script_wui.py
@@ -28,13 +28,13 @@
# @section FRAMEWORK
#
# Python wikipedia bot framework, DrTrigonBot.
-# @see
http://pywikipediabot.sourceforge.net/
-# @see
http://de.wikipedia.org/wiki/Benutzer:DrTrigonBot
+# @see
https://www.mediawiki.org/wiki/Pywikibot
+# @see
https://de.wikipedia.org/wiki/Benutzer:DrTrigonBot
#
# @section LICENSE
#
# Distributed under the terms of the MIT license.
-# @see
http://de.wikipedia.org/wiki/MIT-Lizenz
+# @see
https://de.wikipedia.org/wiki/MIT-Lizenz
#
# --- --- --- --- --- --- --- --- --- --- --- --- --- --- ---
# @todo Simulationen werden ausgeführt und das Resultat mit eindeutiger
@@ -71,7 +71,7 @@
import resource
import re
-#
http://labix.org/lunatic-python
+#
https://labix.org/lunatic-python
try:
import lua # installed packages (on f15: 'lua',
'lunatic-python')
except ImportError:
@@ -199,7 +199,7 @@
# Define a function for the thread
def main_script(page, rev=None, params=None):
#
http://opensourcehacker.com/2011/02/23/temporarily-capturing-python-logging…
- #
http://docs.python.org/release/2.6/library/logging.html
+ #
https://docs.python.org/release/2.6/library/logging.html
from StringIO import StringIO
import logging
diff --git a/scripts/selflink.py b/scripts/selflink.py
index 87ee18b..f618aaa 100644
--- a/scripts/selflink.py
+++ b/scripts/selflink.py
@@ -146,7 +146,7 @@
oldText = page.text
# Inside image maps, don't touch selflinks, as they're used
# to create tooltip labels. See for example:
- #
http://de.wikipedia.org/w/index.php?diff=next&oldid=35721641
+ #
https://de.wikipedia.org/w/index.php?diff=next&oldid=35721641
if '<imagemap>' in page.text:
pywikibot.output(
u'Skipping page %s because it contains an image map.'
diff --git a/scripts/template.py b/scripts/template.py
index 02d45f9..67c2bd0 100755
--- a/scripts/template.py
+++ b/scripts/template.py
@@ -24,7 +24,7 @@
the same effect.
-xml retrieve information from a local dump
- (
http://download.wikimedia.org). If this argument isn\'t given,
+ (
https://download.wikimedia.org). If this argument isn\'t given,
info will be loaded from the maintenance page of the live wiki.
argument can also be given as "-xml:filename.xml".
diff --git a/scripts/weblinkchecker.py b/scripts/weblinkchecker.py
index 3ee013e..d69c4a7 100644
--- a/scripts/weblinkchecker.py
+++ b/scripts/weblinkchecker.py
@@ -122,7 +122,7 @@
ignorelist = [
# Officialy reserved for testing, documentation, etc. in
- #
http://tools.ietf.org/html/rfc2606#page-2
+ #
https://tools.ietf.org/html/rfc2606#page-2
# top-level domains:
re.compile('.*[\./(a)]test(/.*)?'))?'),
re.compile('.*[\./(a)]example(/.*)?'))?'),
@@ -244,7 +244,7 @@
self.header = {
# 'User-agent': pywikibot.useragent,
# we fake being Firefox because some webservers block unknown
- # clients, e.g.
http://images.google.de/images?q=Albit gives a 403
+ # clients, e.g.
https://images.google.de/images?q=Albit gives a 403
# when using the PyWikipediaBot user agent.
'User-agent': 'Mozilla/5.0 (X11; U; Linux i686; de; rv:1.8)
Gecko/20051128 SUSE/1.5-0.1 Firefox/1.5',
'Accept':
'text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5',
@@ -392,7 +392,7 @@
except httplib.error, error:
return False, u'HTTP Error: %s' % error.__class__.__name__
except socket.error, error:
- #
http://docs.python.org/lib/module-socket.html :
+ #
https://docs.python.org/2/library/socket.html :
# socket.error :
# The accompanying value is either a string telling what went
# wrong or a pair (errno, string) representing an error
@@ -519,7 +519,7 @@
Example:
dict = {
- 'http://www.example.org/page': [
+ 'https://www.example.org/page': [
('WikiPageTitle', DATE, '404: File not found'),
('WikiPageName2', DATE, '404: File not found'),
]
diff --git a/scripts/welcome.py b/scripts/welcome.py
index 4b9ead3..86aeaa7 100644
--- a/scripts/welcome.py
+++ b/scripts/welcome.py
@@ -5,17 +5,17 @@
Albanian, Italian Wikipedia, Wikimedia Commons and English Wikiquote.
Note: You can download the latest version available
-from here:
http://botwiki.sno.cc/wiki/Python:Welcome.py
+from here:
https://www.mediawiki.org/wiki/Manual:Pywikibot/welcome.py
Ensure you have community support before running this bot!
URLs to current implementations:
-* Wikimedia Commons:
http://commons.wikimedia.org/wiki/Commons:Welcome_log
-* Dutch Wikipedia:
http://nl.wikipedia.org/wiki/Wikipedia:Logboek_welkom
-* Italian Wikipedia:
http://it.wikipedia.org/wiki/Wikipedia:Benvenuto_log
-* English Wikiquote:
http://en.wikiquote.org/wiki/Wikiquote:Welcome_log
-* Persian Wikipedia:
http://fa.wikipedia.org/wiki/ویکیپدیا:سیاهه_خوشامد
-* Korean Wikipedia:
http://ko.wikipedia.org/wiki/위키백과:Welcome_log
+* Wikimedia Commons:
https://commons.wikimedia.org/wiki/Commons:Welcome_log
+* Dutch Wikipedia:
https://nl.wikipedia.org/wiki/Wikipedia:Logboek_welkom
+* Italian Wikipedia:
https://it.wikipedia.org/wiki/Wikipedia:Benvenuto_log
+* English Wikiquote:
https://en.wikiquote.org/wiki/Wikiquote:Welcome_log
+* Persian Wikipedia:
https://fa.wikipedia.org/wiki/ویکیپدیا:سیاهه_خوشامد
+* Korean Wikipedia:
https://ko.wikipedia.org/wiki/위키백과:Welcome_log
Everything that needs customisation to support additional projects is
indicated by comments.
diff --git a/tests/data/article-pear.xml b/tests/data/article-pear.xml
index 0203a26..0ab16a6 100644
--- a/tests/data/article-pear.xml
+++ b/tests/data/article-pear.xml
@@ -1,7 +1,7 @@
-<mediawiki
xmlns="http://www.mediawiki.org/xml/export-0.3/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.mediawiki.org/xml/export-0.3/
http://www.mediawiki.org/xml/export-0.3.xsd" version="0.3"
xml:lang="en">
+<mediawiki
xmlns="https://www.mediawiki.org/xml/export-0.3/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://www.mediawiki.org/xml/export-0.3/
https://www.mediawiki.org/xml/export-0.3.xsd" version="0.3"
xml:lang="en">
<siteinfo>
<sitename>Wikipedia</sitename>
- <base>http://en.wikipedia.org/wiki/Main_Page</base>
+ <base>https://en.wikipedia.org/wiki/Main_Page</base>
<generator>MediaWiki 1.15alpha</generator>
<case>first-letter</case>
<namespaces>
diff --git a/tests/data/article-pyrus.xml b/tests/data/article-pyrus.xml
index dfa82d3..38ae49c 100644
--- a/tests/data/article-pyrus.xml
+++ b/tests/data/article-pyrus.xml
@@ -1,7 +1,7 @@
-<mediawiki
xmlns="http://www.mediawiki.org/xml/export-0.3/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.mediawiki.org/xml/export-0.3/
http://www.mediawiki.org/xml/export-0.3.xsd" version="0.3"
xml:lang="en">
+<mediawiki
xmlns="https://www.mediawiki.org/xml/export-0.3/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="https://www.mediawiki.org/xml/export-0.3/
https://www.mediawiki.org/xml/export-0.3.xsd" version="0.3"
xml:lang="en">
<siteinfo>
<sitename>Wikipedia</sitename>
- <base>http://en.wikipedia.org/wiki/Main_Page</base>
+ <base>https://en.wikipedia.org/wiki/Main_Page</base>
<generator>MediaWiki 1.16alpha-wmf</generator>
<case>first-letter</case>
<namespaces>
diff --git a/tests/ipregex_tests.py b/tests/ipregex_tests.py
index 1e05474..2d97459 100644
--- a/tests/ipregex_tests.py
+++ b/tests/ipregex_tests.py
@@ -240,7 +240,7 @@
self.ipv6test(False, ":::")
# Additional test cases
- # from
http://rt.cpan.org/Public/Bug/Display.html?id=50693
+ # from
https://rt.cpan.org/Public/Bug/Display.html?id=50693
self.ipv6test(True, "2001:0db8:85a3:0000:0000:8a2e:0370:7334")
self.ipv6test(True, "2001:db8:85a3:0:0:8a2e:370:7334")
diff --git a/tests/pages/enwiki_help_editing.meta b/tests/pages/enwiki_help_editing.meta
index e3a154e..64c439c 100644
--- a/tests/pages/enwiki_help_editing.meta
+++ b/tests/pages/enwiki_help_editing.meta
@@ -1,2 +1,2 @@
-Help:Editing. (2012, March 9). In Wikipedia, The Free Encyclopedia. Retrieved 17:43,
March 18, 2012, from
http://en.wikipedia.org/w/index.php?title=Help:Editing&oldid=480978372
+Help:Editing. (2012, March 9). In Wikipedia, The Free Encyclopedia. Retrieved 17:43,
March 18, 2012, from
https://en.wikipedia.org/w/index.php?title=Help:Editing&oldid=480978372
Text is available under the Creative Commons Attribution-ShareAlike License; additional
terms may apply.
diff --git a/tests/weblib_tests.py b/tests/weblib_tests.py
index e532f07..8d2a97c 100644
--- a/tests/weblib_tests.py
+++ b/tests/weblib_tests.py
@@ -18,14 +18,14 @@
class TestArchiveSites(unittest.TestCase):
def testInternetArchiveNewest(self):
- archivedversion = weblib.getInternetArchiveURL('http://google.com')
+ archivedversion = weblib.getInternetArchiveURL('https://google.com')
parsed = urlparse(archivedversion)
self.assertIn(parsed.scheme, [u'http', u'https'])
self.assertEqual(parsed.netloc, u'web.archive.org')
self.assertTrue(parsed.path.strip('/').endswith('www.google.com'),
parsed.path)
def testInternetArchiveOlder(self):
- archivedversion = weblib.getInternetArchiveURL('http://google.com',
'200606')
+ archivedversion = weblib.getInternetArchiveURL('https://google.com',
'200606')
parsed = urlparse(archivedversion)
self.assertIn(parsed.scheme, [u'http', u'https'])
self.assertEqual(parsed.netloc, u'web.archive.org')
@@ -33,7 +33,7 @@
self.assertIn('200606', parsed.path)
def testWebCiteOlder(self):
- archivedversion = weblib.getWebCitationURL('http://google.com',
'20130101')
+ archivedversion = weblib.getWebCitationURL('https://google.com',
'20130101')
self.assertEqual(archivedversion,
'http://www.webcitation.org/6DHSeh2L0')
if __name__ == '__main__':
diff --git a/tests/wikibase_tests.py b/tests/wikibase_tests.py
index 0f3cd5c..7422f14 100644
--- a/tests/wikibase_tests.py
+++ b/tests/wikibase_tests.py
@@ -285,8 +285,8 @@
def test_set_website(self):
claim = pywikibot.Claim(wikidata, 'P856')
self.assertEquals(claim.type, 'url')
-
claim.setTarget('http://en.wikipedia.org/')
- self.assertEquals(claim.target, 'http://en.wikipedia.org/')
+
claim.setTarget('https://en.wikipedia.org/')
+ self.assertEquals(claim.target, 'https://en.wikipedia.org/')
def test_set_date(self):
claim = pywikibot.Claim(wikidata, 'P569')
--
To view, visit
https://gerrit.wikimedia.org/r/141726
To unsubscribe, visit
https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I2c3c58d0a52d1a80c1871bb9c281a09ce27024d9
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Withoutaname <drevitchi(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Withoutaname <drevitchi(a)gmail.com>
Gerrit-Reviewer: jenkins-bot <>