jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/351714 )
Change subject: Build GeoShape and TabularData from shared base class
......................................................................
Build GeoShape and TabularData from shared base class
Follow-up to 351697.
Change-Id: I7d1fbf4a1759b29618e8a72be76a40bd73021c42
---
M pywikibot/__init__.py
1 file changed, 155 insertions(+), 104 deletions(-)
Approvals:
Multichill: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/__init__.py b/pywikibot/__init__.py
index 8a1ea57..5393f06 100644
--- a/pywikibot/__init__.py
+++ b/pywikibot/__init__.py
@@ -884,144 +884,195 @@
return cls(wb['text'], wb['language'])
-class WbGeoShape(_WbRepresentation):
+class _WbDataPage(_WbRepresentation):
+ """
+ A Wikibase representation for data pages.
+
+ A temporary implementation until T162336 has been resolved.
+
+ Note that this class cannot be used directly
+ """
+
+ _items = ('page', )
+
+ @classmethod
+ def _get_data_site(cls, repo_site):
+ """
+ Return the site serving as a repository for a given data type.
+
+ Must be implemented in the extended class.
+
+ @param site: The Wikibase site
+ @type site: pywikibot.site.APISite
+ @rtype: pywikibot.site.APISite
+ """
+ raise NotImplementedError
+
+ @classmethod
+ def _get_type_specifics(cls, site):
+ """
+ Return the specifics for a given data type.
+
+ Must be implemented in the extended class.
+
+ The dict should have three keys:
+ * ending: str, required filetype-like ending in page titles.
+ * label: str, describing the data type for use in error messages.
+ * data_site: pywikibot.site.APISite, site serving as a repository for
+ the given data type.
+
+ @param site: The Wikibase site
+ @type site: pywikibot.site.APISite
+ @rtype: dict
+ """
+ raise NotImplementedError
+
+ @staticmethod
+ def _validate(page, data_site, ending, label):
+ """
+ Validate the provided page against general and type specific rules.
+
+ @param page: Page containing the data.
+ @type text: pywikibot.Page
+ @param data_site: The site serving as a repository for the given
+ data type.
+ @type data_site: pywikibot.site.APISite
+ @param ending: Required filetype-like ending in page titles.
+ E.g. '.map'
+ @type ending: str
+ @param label: Label describing the data type in error messages.
+ @type site: str
+ """
+ if not isinstance(page, Page):
+ raise ValueError('Page must be a pywikibot.Page object.')
+
+ # validate page exists
+ if not page.exists():
+ raise ValueError('Page must exist.')
+
+ # validate page is on the right site, and that site supports the type
+ if not data_site:
+ raise ValueError(
+ 'The provided site does not support {0}.'.format(label))
+ if page.site != data_site:
+ raise ValueError(
+ 'Page must be on the {0} repository site.'.format(label))
+
+ # validate page title fulfills hard-coded Wikibase requirement
+ # pcre regexp: '/^Data:[^\\[\\]#\\\:{|}]+\.map$/u' for geo-shape
+ # pcre regexp: '/^Data:[^\\[\\]#\\\:{|}]+\.tab$/u' for tabular-data
+ # As we have already checked for existence the following simplified
+ # check should be enough.
+ if not page.title().startswith('Data:') or \
+ not page.title().endswith(ending):
+ raise ValueError(
+ "Page must be in 'Data:' namespace and end in '{0}' "
+ "for {1}.".format(ending, label))
+
+ def __init__(self, page, site=None):
+ """
+ Create a new _WbDataPage object.
+
+ @param page: page containing the data
+ @type text: pywikibot.Page
+ @param site: The Wikibase site
+ @type site: pywikibot.site.DataSite
+ """
+ site = site or Site().data_repository()
+ specifics = type(self)._get_type_specifics(site)
+ _WbDataPage._validate(page, specifics['data_site'],
+ specifics['ending'], specifics['label'])
+ self.page = page
+
+ def toWikibase(self):
+ """
+ Convert the data to the value required by the Wikibase API.
+
+ @return: title of the data page incl. namespace
+ @rtype: str
+ """
+ return self.page.title()
+
+ @classmethod
+ def fromWikibase(cls, page_name, site, data_site):
+ """
+ Create a _WbDataPage from the JSON data given by the Wikibase API.
+
+ @param page_name: page name from Wikibase value
+ @type page_name: str
+ @param site: The Wikibase site
+ @type site: pywikibot.site.DataSite
+ @rtype: pywikibot._WbDataPage
+ """
+ data_site = cls._get_data_site(site)
+ page = Page(data_site, page_name)
+ return cls(page, site)
+
+
+class WbGeoShape(_WbDataPage):
"""
A Wikibase geo-shape representation.
-
- A temporary implementation until T162336 has been resolved.
"""
- _items = ('page', )
-
- def __init__(self, page, site=None):
+ @classmethod
+ def _get_data_site(cls, site):
"""
- Create a new WbGeoShape object.
+ Return the site serving as a geo-shape repository.
- @param page: page containing the map data
- @type text: pywikibot.Page
@param site: The Wikibase site
@type site: pywikibot.site.DataSite
+ @rtype: pywikibot.site.APISite
"""
- site = site or Site().data_repository()
- if not isinstance(page, Page):
- raise ValueError('page must be a pywikibot.Page object.')
-
- # validate page exists
- if not page.exists():
- raise ValueError('page must exist.')
-
- # validate page is on the right site, and that site supports geo-shapes
- geo_shape_site = site.geo_shape_repository()
- if not geo_shape_site:
- raise ValueError('the provided site does not support geo-shapes.')
- if page.site != geo_shape_site:
- raise ValueError('page must be on the geo-shape repository site.')
-
- # validate page title fulfills hard-coded Wikibase requirement
- # pcre regexp: '/^Data:[^\\[\\]#\\\:{|}]+\.map$/u'
- # As we have already checked for existence the following simplified
- # check should be enough.
- if not page.title().startswith('Data:') or \
- not page.title().endswith('.map'):
- raise ValueError(
- "page must be a '.map' page in the 'Data:' namespace.")
-
- self.page = page
-
- def toWikibase(self):
- """
- Convert the data to the value required by the Wikibase API.
-
- @return: title of the geo-shape page incl. namespace
- @rtype: str
- """
- return self.page.title()
+ return site.geo_shape_repository()
@classmethod
- def fromWikibase(cls, page_name, site):
+ def _get_type_specifics(cls, site):
"""
- Create a WbGeoShape from the JSON data given by the Wikibase API.
+ Return the specifics for WbGeoShape.
- @param page_name: page name from Wikibase value
- @type page_name: str
@param site: The Wikibase site
@type site: pywikibot.site.DataSite
- @rtype: pywikibot.WbGeoShape
+ @rtype: dict
"""
- geo_shape_site = site.geo_shape_repository()
- page = Page(geo_shape_site, page_name)
- return cls(page, site)
+ specifics = {
+ 'ending': '.map',
+ 'label': 'geo-shape',
+ 'data_site': cls._get_data_site(site)
+ }
+ return specifics
-class WbTabularData(_WbRepresentation):
+class WbTabularData(_WbDataPage):
"""
A Wikibase tabular-data representation.
-
- A temporary implementation until T162336 has been resolved.
"""
- _items = ('page', )
-
- def __init__(self, page, site=None):
+ @classmethod
+ def _get_data_site(cls, site):
"""
- Create a new WbTabularData object.
+ Return the site serving as a tabular-data repository.
- @param page: page containing the tabular data
- @type text: pywikibot.Page
@param site: The Wikibase site
@type site: pywikibot.site.DataSite
+ @rtype: pywikibot.site.APISite
"""
- site = site or Site().data_repository()
- if not isinstance(page, Page):
- raise ValueError('page must be a pywikibot.Page object.')
-
- # validate page exists
- if not page.exists():
- raise ValueError('page must exist.')
-
- # validate page is on the right site, and site supports tabular-data
- tabular_data_site = site.tabular_data_repository()
- if not tabular_data_site:
- raise ValueError(
- 'the provided site does not support tabular-data.')
- if page.site != tabular_data_site:
- raise ValueError(
- 'page must be on the tabular-data repository site.')
-
- # validate page title fulfills hard-coded Wikibase requirement
- # pcre regexp: '/^Data:[^\\[\\]#\\\:{|}]+\.tab$/u'
- # As we have already checked for existence the following simplified
- # check should be enough.
- if not page.title().startswith('Data:') or \
- not page.title().endswith('.tab'):
- raise ValueError(
- "page must be a '.tab' page in the 'Data:' namespace.")
-
- self.page = page
-
- def toWikibase(self):
- """
- Convert the data to the value required by the Wikibase API.
-
- @return: title of the tabular-data page incl. namespace
- @rtype: str
- """
- return self.page.title()
+ return site.tabular_data_repository()
@classmethod
- def fromWikibase(cls, page_name, site):
+ def _get_type_specifics(cls, site):
"""
- Create a WbTabularData from the JSON data given by the Wikibase API.
+ Return the specifics for WbTabularData.
- @param page_name: page name from Wikibase value
- @type page_name: str
@param site: The Wikibase site
@type site: pywikibot.site.DataSite
- @rtype: pywikibot.WbTabularData
+ @rtype: dict
"""
- tabular_data_site = site.tabular_data_repository()
- page = Page(tabular_data_site, page_name)
- return cls(page, site)
+ specifics = {
+ 'ending': '.tab',
+ 'label': 'tabular-data',
+ 'data_site': cls._get_data_site(site)
+ }
+ return specifics
_sites = {}
--
To view, visit https://gerrit.wikimedia.org/r/351714
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I7d1fbf4a1759b29618e8a72be76a40bd73021c42
Gerrit-PatchSet: 6
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Lokal Profil <lokal.profil(a)gmail.com>
Gerrit-Reviewer: Dalba <dalba.wiki(a)gmail.com>
Gerrit-Reviewer: Magul <tomasz.magulski(a)gmail.com>
Gerrit-Reviewer: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: Multichill <maarten(a)mdammers.nl>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged. ( https://gerrit.wikimedia.org/r/293626 )
Change subject: Get thumburl information in FilePage()
......................................................................
Get thumburl information in FilePage()
Add get_file_url() to retrieve and generate thumburl information.
Deprecate page.fileUrl(), as file url is returned by get_file_url() by default.
In case a thumburl is selected, it relies directly on the API implementation
and makes an API call per each requested thumburl.
Might not be efficient if several thumburls are needed.
Added tests.
Bug: T137011
Change-Id: I3054cd96292f8976f60f3a3f470339305f223efe
---
M pywikibot/page.py
M pywikibot/site.py
M tests/file_tests.py
3 files changed, 115 insertions(+), 4 deletions(-)
Approvals:
jenkins-bot: Verified
Xqt: Looks good to me, approved
diff --git a/pywikibot/page.py b/pywikibot/page.py
index 8e4a40f..78c2f79 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -2437,11 +2437,44 @@
self._imagePageHtml = http.request(self.site, path)
return self._imagePageHtml
+ @deprecated('get_file_url')
def fileUrl(self):
"""Return the URL for the file described on this page."""
- # TODO add scaling option?
return self.latest_file_info.url
+ def get_file_url(self, url_width=None, url_height=None, url_param=None):
+ """
+ Return the url or the thumburl of the file described on this page.
+
+ Fetch the information if not available.
+
+ Once retrieved, thumburl information will also be accessible as
+ latest_file_info attributes, named as in [1]:
+ - url, thumburl, thumbwidth and thumbheight
+
+ Parameters correspond to iiprops in:
+ [1] U{https://www.mediawiki.org/wiki/API:Imageinfo}
+
+ Parameters validation and error handling left to the API call.
+
+ @param width: see iiurlwidth in [1]
+ @param height: see iiurlheigth in [1]
+ @param param: see iiurlparam in [1]
+
+ @return: latest file url or thumburl
+ @rtype: unicode
+
+ """
+ # Plain url is requested.
+ if url_width is None and url_height is None and url_param is None:
+ return self.latest_file_info.url
+
+ # Thumburl is requested.
+ self.site.loadimageinfo(self, history=not self._file_revisions,
+ url_width=url_width, url_height=url_height,
+ url_param=url_param)
+ return self.latest_file_info.thumburl
+
@deprecated("fileIsShared")
def fileIsOnCommons(self):
"""
diff --git a/pywikibot/site.py b/pywikibot/site.py
index 34a774a..7c778c9 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -2987,13 +2987,27 @@
)
self._update_page(page, query)
- def loadimageinfo(self, page, history=False):
+ def loadimageinfo(self, page, history=False,
+ url_width=None, url_height=None, url_param=None):
"""Load image info from api and save in page attributes.
+ Parameters correspond to iiprops in:
+ [1] U{https://www.mediawiki.org/wiki/API:Imageinfo}
+
+ Parameters validation and error handling left to the API call.
+
@param history: if true, return the image's version history
+ @param url_width: see iiurlwidth in [1]
+ @param url_height: see iiurlheigth in [1]
+ @param url_param: see iiurlparam in [1]
+
"""
title = page.title(withSection=False)
- args = {"titles": title}
+ args = {'titles': title,
+ 'iiurlwidth': url_width,
+ 'iiurlheight': url_height,
+ 'iiurlparam': url_param,
+ }
if not history:
args["total"] = 1
query = self._generator(api.PropertyGenerator,
diff --git a/tests/file_tests.py b/tests/file_tests.py
index 2065bbb..483b484 100644
--- a/tests/file_tests.py
+++ b/tests/file_tests.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
"""FilePage tests."""
#
-# (C) Pywikibot team, 2014
+# (C) Pywikibot team, 2017
#
# Distributed under the terms of the MIT license.
#
@@ -136,6 +136,8 @@
family = 'wikipedia'
code = 'test'
+ file_name = 'File:Albert Einstein Head.jpg'
+
cached = True
def test_file_info_with_no_page(self):
@@ -155,6 +157,68 @@
image = image.latest_file_info
+class TestFilePageLatestFileInfo(TestCase):
+
+ """Test FilePage.latest_file_info.
+
+ These tests cover properties and methods in FilePage that rely
+ on site.loadimageinfo.
+
+ """
+
+ family = 'commons'
+ code = 'commons'
+
+ file_name = 'File:Albert Einstein Head.jpg'
+
+ cached = True
+
+ def setUp(self):
+ """Create File page."""
+ super(TestCase, self).setUp()
+ self.image = pywikibot.FilePage(self.site, self.file_name)
+
+ def test_get_file_url(self):
+ """Get File url."""
+ self.assertTrue(self.image.exists())
+ self.assertEqual(self.image.get_file_url(),
+ 'https://upload.wikimedia.org/wikipedia/commons/'
+ 'd/d3/Albert_Einstein_Head.jpg')
+ self.assertEqual(self.image.latest_file_info.url,
+ 'https://upload.wikimedia.org/wikipedia/commons/'
+ 'd/d3/Albert_Einstein_Head.jpg')
+
+ def test_get_file_url_thumburl_from_width(self):
+ """Get File thumburl from width."""
+ self.assertTrue(self.image.exists())
+ # url_param has no precedence over height/width.
+ self.assertEqual(self.image.get_file_url(url_width=100, url_param='1000px'),
+ 'https://upload.wikimedia.org/wikipedia/commons/thumb/'
+ 'd/d3/Albert_Einstein_Head.jpg/100px-Albert_Einstein_Head.jpg')
+ self.assertEqual(self.image.latest_file_info.thumbwidth, 100)
+ self.assertEqual(self.image.latest_file_info.thumbheight, 133)
+
+ def test_get_file_url_thumburl_from_heigth(self):
+ """Get File thumburl from height."""
+ self.assertTrue(self.image.exists())
+ # url_param has no precedence over height/width.
+ self.assertEqual(self.image.get_file_url(url_height=100, url_param='1000px'),
+ 'https://upload.wikimedia.org/wikipedia/commons/thumb/'
+ 'd/d3/Albert_Einstein_Head.jpg/75px-Albert_Einstein_Head.jpg')
+ self.assertEqual(self.image.latest_file_info.thumbwidth, 75)
+ self.assertEqual(self.image.latest_file_info.thumbheight, 100)
+
+ def test_get_file_url_thumburl_from_url_param(self):
+ """Get File thumburl from height."""
+ self.assertTrue(self.image.exists())
+ # url_param has no precedence over height/width.
+ self.assertEqual(self.image.get_file_url(url_param='100px'),
+ 'https://upload.wikimedia.org/wikipedia/commons/thumb/'
+ 'd/d3/Albert_Einstein_Head.jpg/100px-Albert_Einstein_Head.jpg')
+ self.assertEqual(self.image.latest_file_info.thumbwidth, 100)
+ self.assertEqual(self.image.latest_file_info.thumbheight, 133)
+
+
class TestDeprecatedFilePage(DeprecationTestCase):
"""Test deprecated parts of FilePage."""
--
To view, visit https://gerrit.wikimedia.org/r/293626
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I3054cd96292f8976f60f3a3f470339305f223efe
Gerrit-PatchSet: 10
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Magul <tomasz.magulski(a)gmail.com>
Gerrit-Reviewer: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: Xqt <info(a)gno.de>
Gerrit-Reviewer: jenkins-bot <>