jenkins-bot has submitted this change and it was merged.
Change subject: Fix ItemPage editEntity, and test cases ......................................................................
Fix ItemPage editEntity, and test cases
Iba1fdce1cc199cda0b7f83febdf48ce6a11100e0 resulted in data being normalised even when it was already normalized, making the data unrecognisable to the Wikibase API.
The normalization routines have been moved from __normalizeLanguages and __normalizeData to static methods with a single underscore prefix so they can be easily accessed from unit tests.
Also fix docstrings to be pep257 compliant.
Bug: 69401 Change-Id: Ia2ff456c550670de924099c5993f109c676f9d39 --- M pywikibot/page.py M tests/wikibase_tests.py 2 files changed, 100 insertions(+), 16 deletions(-)
Approvals: Ladsgroup: Looks good to me, approved jenkins-bot: Verified
diff --git a/pywikibot/page.py b/pywikibot/page.py index fb5bffe..bb33ccf 100644 --- a/pywikibot/page.py +++ b/pywikibot/page.py @@ -2756,14 +2756,16 @@ self.get() return self.lastrevid
- def __normalizeLanguages(self, data): + @staticmethod + def _normalizeLanguages(data): """ Helper function to replace site objects with their language codes.
- @param data: The dict to check + @param data: The dict to normalize. @type data: dict
- @return: dict + @return: the altered dict from parameter data. + @rtype: dict """ for key in data: if isinstance(key, pywikibot.site.BaseSite): @@ -2771,18 +2773,32 @@ del data[key] return data
- def __normalizeData(self, data): - for prop in ('labels', 'descriptions', 'aliases'): - if prop in data: - data[prop] = self.__normalizeLanguages(data[prop]) - if prop == 'aliases': - for key, values in data[prop].iteritems(): - for index, value in enumerate(values): - data[prop][key][index] = {'language': key, - 'value': value} - else: - for key, value in data[prop].iteritems(): - data[prop][key] = {'language': key, 'value': value} + @staticmethod + def _normalizeData(data): + """ + Helper function to expand data into the Wikibase API structure. + + @param data: The dict to normalize + @type data: dict + + @return: the altered dict from parameter data. + @rtype: dict + """ + for prop in ('labels', 'descriptions'): + if prop not in data: + continue + data[prop] = WikibasePage._normalizeLanguages(data[prop]) + for key, value in data[prop].iteritems(): + if isinstance(value, basestring): + data[prop][key] = {'language': key, 'value': value} + + if 'aliases' in data: + for key, values in data['aliases'].iteritems(): + if (isinstance(values, list) and + isinstance(values[0], basestring)): + data['aliases'][key] = [{'language': key, 'value': value} + for value in values] + return data
def getdbName(self, site): @@ -2814,7 +2830,7 @@ else: baserevid = None
- data = self.__normalizeData(data) + data = WikibasePage._normalizeData(data)
updates = self.repo.editEntity(self._defined_by(singular=True), data, baserevid=baserevid, **kwargs) @@ -3473,6 +3489,9 @@ def _formatDataValue(self): """ Format the target into the proper JSON datavalue that Wikibase wants. + + @return: Wikibase API representation with type and value. + @rtype: dict """ return {'value': self._formatValue(), 'type': self.value_types.get(self.type, self.type) diff --git a/tests/wikibase_tests.py b/tests/wikibase_tests.py index a9142dd..9772da4 100644 --- a/tests/wikibase_tests.py +++ b/tests/wikibase_tests.py @@ -13,8 +13,10 @@ import os import pywikibot from pywikibot import pagegenerators +from pywikibot.page import WikibasePage from pywikibot.data.api import APIError import json +import copy
from tests.utils import PywikibotTestCase, unittest
@@ -444,6 +446,69 @@ self.assertEquals(len(wvlinks), 2)
+class TestWriteNormalizeLang(PywikibotTestCase): + """Test cases for routines that normalize languages in a dict. + + Exercises WikibasePage._normalizeLanguages with data that is + not normalized and data which is already normalized. + """ + + def setUp(self): + super(TestWriteNormalizeLang, self).setUp() + self.site = pywikibot.Site('en', 'wikipedia') + self.lang_out = {'en': 'foo'} + + def test_normalize_lang(self): + lang_in = {self.site: 'foo'} + + response = WikibasePage._normalizeLanguages(lang_in) + self.assertEquals(response, self.lang_out) + + def test_normalized_lang(self): + response = WikibasePage._normalizeData( + copy.deepcopy(self.lang_out)) + self.assertEquals(response, self.lang_out) + + +class TestWriteNormalizeData(PywikibotTestCase): + """Test cases for routines that normalize data for writing to Wikidata. + + Exercises WikibasePage._normalizeData with data that is not normalized + and data which is already normalized. + """ + + def setUp(self): + super(TestWriteNormalizeData, self).setUp() + self.data_out = {'aliases': + {'en': + [ + {'language': 'en', + 'value': 'Bah'} + ], + }, + 'labels': + {'en': + {'language': 'en', + 'value': 'Foo'}, + } + } + + def test_normalize_data(self): + data_in = {'aliases': + {'en': ['Bah']}, + 'labels': + {'en': 'Foo'}, + } + + response = WikibasePage._normalizeData(data_in) + self.assertEquals(response, self.data_out) + + def test_normalized_data(self): + response = WikibasePage._normalizeData( + copy.deepcopy(self.data_out)) + self.assertEquals(response, self.data_out) + + if __name__ == '__main__': try: unittest.main()
pywikibot-commits@lists.wikimedia.org