jenkins-bot has submitted this change. ( https://gerrit.wikimedia.org/r/c/pywikibot/core/+/883275 )
Change subject: [Bugfix] Copy snak IDs/hashes when using editEntity ......................................................................
[Bugfix] Copy snak IDs/hashes when using editEntity
Previously, any new claims added manually to the item via direct access to `item.claims` would keep on getting added again every time editEntity was called. By assigning the snak IDs/hashes that are created when a new claim/qualifier/reference is created, duplicates should not occur.
Note: To allow flake8 to pass, I had to reduce the names of variables. The original names can be found as a comment on the patchset.
Bug: T327607 Change-Id: I26d043702d7cef63afcfc103ce7206ca9c8e5c41 --- M pywikibot/page/_wikibase.py M tests/wikibase_edit_tests.py 2 files changed, 75 insertions(+), 0 deletions(-)
Approvals: Matěj Suchánek: Looks good to me, approved jenkins-bot: Verified
diff --git a/pywikibot/page/_wikibase.py b/pywikibot/page/_wikibase.py index 405af2b..ca4e2f1 100644 --- a/pywikibot/page/_wikibase.py +++ b/pywikibot/page/_wikibase.py @@ -299,8 +299,12 @@
:param data: Data to be saved """ + update_self = False if data is None: data = self.toJSON(diffto=getattr(self, '_content', None)) + # We only want to copy the IDs from the updates + # if data was not passed in. + update_self = True else: data = self._normalizeData(data)
@@ -321,6 +325,27 @@ if hasattr(self, '_content'): del self._content self.latest_revision_id = updates['entity'].get('lastrevid') + if update_self and 'claims' in updates['entity']: + updated_claims = updates['entity']['claims'] + for claim_prop_id, statements in updated_claims.items(): + for claim_index, statement in enumerate(statements): + claim = self.claims[claim_prop_id][claim_index] + claim.snak = statement['id'] + claim.on_item = self + updated_qualifiers = statement.get('qualifiers', {}) + for qual_propid, qualifier in updated_qualifiers.items(): + for qual_index, qual_statement in enumerate(qualifier): + target_qual_prop = claim.qualifiers[qual_propid] + target_qual = target_qual_prop[qual_index] + target_qual.hash = qual_statement['hash'] + updated_references = statement.get('references', []) + for ref_grp_idx, ref_grp in enumerate(updated_references): + for ref_propid, reference in ref_grp['snaks'].items(): + for ref_index, ref_stat in enumerate(reference): + target_ref_grp = claim.references[ref_grp_idx] + target_ref_prop = target_ref_grp[ref_propid] + target_ref = target_ref_prop[ref_index] + target_ref.hash = ref_stat['hash']
def concept_uri(self) -> str: """ diff --git a/tests/wikibase_edit_tests.py b/tests/wikibase_edit_tests.py index 0de943f..c0892cf 100755 --- a/tests/wikibase_edit_tests.py +++ b/tests/wikibase_edit_tests.py @@ -127,6 +127,37 @@ item = pywikibot.ItemPage(testsite) item.editEntity(data)
+ def test_edit_entity_propogation(self): + """Test that ``ItemPage.editEntity`` propagates changes to claims.""" + testsite = self.get_repo() + item = pywikibot.ItemPage(testsite) + claim = pywikibot.Claim(testsite, 'P97339') + claim.setTarget('test') + qual = pywikibot.Claim(testsite, 'P97339') + qual.setTarget('test') + ref = pywikibot.Claim(testsite, 'P97339') + ref.setTarget('test') + claim.addQualifier(qual) + claim.addSource(ref) + item.editEntity() + self.assertIsNotNone(claim.snak) + self.assertIsNotNone(qual.hash) + self.assertIsNotNone(ref.hash) + self.assertSame(claim.on_item, item) + self.assertSame(qual.on_item, item) + self.assertSame(ref.on_item, item) + qual = pywikibot.Claim(testsite, 'P97339') + qual.setTarget('test') + ref = pywikibot.Claim(testsite, 'P97339') + ref.setTarget('test') + claim.qualifiers[qual.id].append(qual) + claim.sources[0][ref.id].append(ref) + item.editEntity() + self.assertIsNotNone(qual.hash) + self.assertIsNotNone(ref.hash) + self.assertSame(qual.on_item, item) + self.assertSame(ref.on_item, item) + def test_edit_entity_new_property(self): """Test creating a new property using ``PropertyPage.editEntity``.""" testsite = self.get_repo()
pywikibot-commits@lists.wikimedia.org