http://www.mediawiki.org/wiki/Special:Code/pywikipedia/11362
Revision: 11362
Author: legoktm
Date: 2013-04-08 20:07:32 +0000 (Mon, 08 Apr 2013)
Log Message:
-----------
New script which adds properties to pages yielded by a pagegenerator
Added Paths:
-----------
branches/rewrite/scripts/claimit.py
Added: branches/rewrite/scripts/claimit.py
===================================================================
--- branches/rewrite/scripts/claimit.py (rev 0)
+++ branches/rewrite/scripts/claimit.py 2013-04-08 20:07:32 UTC (rev 11362)
@@ -0,0 +1,54 @@
+# -*- coding: utf-8 -*-
+"""
+Copyright (C) 2013 Legoktm
+Copyright (C) 2013 Pywikipediabot team
+
+Distributed under the MIT License
+
+Usage:
+
+python claimit.py [pagegenerators] P1 Q2 P123 Q456
+
+You can use any typical pagegenerator to provide with a list of pages
+
+Then list the property-->target pairs to add.
+"""
+import pywikibot
+from pywikibot import pagegenerators
+repo = pywikibot.Site().data_repository()
+
+
+def main():
+ gen = pagegenerators.GeneratorFactory()
+ claims = list()
+ for arg in pywikibot.handleArgs():
+ if gen.handleArg(arg):
+ continue
+ claims.append(arg)
+ if len(claims) % 2 != 0:
+ raise ValueError # or something.
+ real_claims = list()
+ c = 0
+ while c != len(claims):
+ claim = pywikibot.Claim(repo, claims[c])
+ claim.setTarget(pywikibot.ItemPage(repo, claims[c+1]))
+ real_claims.append(claim)
+ c += 2
+
+ generator = gen.getCombinedGenerator()
+
+ for page in generator:
+ item = pywikibot.ItemPage.fromPage(page)
+ pywikibot.output('Processing %s' % page)
+ if not item.exists():
+ pywikibot.output('%s doesn\'t have a wikidata item :(' % page)
+ #TODO FIXME: We should provide an option to create the page
+ continue
+
+ for claim in real_claims:
+ pywikibot.output('Adding %s --> %s' % (claim.getID(), claim.getTarget().getID()))
+ item.addClaim(claim)
+ #TODO FIXME: We should add a source for each claim that is added
+ #TODO FIXME: We need to check that we aren't adding a duplicate
+
+
http://www.mediawiki.org/wiki/Special:Code/pywikipedia/11358
Revision: 11358
Author: legoktm
Date: 2013-04-08 12:56:05 +0000 (Mon, 08 Apr 2013)
Log Message:
-----------
Rename getdbName since it was conflictng
Modified Paths:
--------------
branches/rewrite/pywikibot/page.py
Modified: branches/rewrite/pywikibot/page.py
===================================================================
--- branches/rewrite/pywikibot/page.py 2013-04-08 08:11:24 UTC (rev 11357)
+++ branches/rewrite/pywikibot/page.py 2013-04-08 12:56:05 UTC (rev 11358)
@@ -2312,7 +2312,7 @@
del data[key]
return data
- def __getdbName(self, site):
+ def getdbName(self, site):
"""
Helper function to normalize site
objects into dbnames
@@ -2446,7 +2446,7 @@
"""
if force or not hasattr(self, '_content'):
self.get(force=force)
- dbname = self.__getdbName(site)
+ dbname = self.getdbName(site)
if not dbname in self.sitelinks:
raise pywikibot.NoPage(self)
else:
@@ -2471,10 +2471,10 @@
Sites should be a list, with values either
being Site objects, or dbNames.
"""
- data = {}
+ data = list()
for site in sites:
- site = self.__getdbName(site)
- data[site] = {'site': site, 'title': ''}
+ site = self.getdbName(site)
+ data.append({'site': site, 'title': ''})
self.setSitelinks(data, **kwargs)
def setSitelinks(self, sitelinks, **kwargs):
@@ -2487,7 +2487,7 @@
data = {}
for obj in sitelinks:
if isinstance(obj, Page):
- dbName = self.__getdbName(obj.site)
+ dbName = self.getdbName(obj.site)
data[dbName] = {'site': dbName, 'title': obj.title()}
else:
#TODO: Do some verification here
http://www.mediawiki.org/wiki/Special:Code/pywikipedia/11356
Revision: 11356
Author: siebrand
Date: 2013-04-07 14:50:30 +0000 (Sun, 07 Apr 2013)
Log Message:
-----------
Localisation updates from http://translatewiki.net.
Modified Paths:
--------------
branches/rewrite/scripts/i18n/redirect.py
branches/rewrite/scripts/i18n/weblinkchecker.py
Modified: branches/rewrite/scripts/i18n/redirect.py
===================================================================
--- branches/rewrite/scripts/i18n/redirect.py 2013-04-07 11:39:46 UTC (rev 11355)
+++ branches/rewrite/scripts/i18n/redirect.py 2013-04-07 14:50:30 UTC (rev 11356)
@@ -162,6 +162,7 @@
'bs': {
'redirect-fix-double': u'Bot: Popravlja dvostruka preusmjerenja na %(to)s',
'redirect-remove-broken': u' [[WP:CSD#G8|G8]]: [[Wikipedia:Redirect|Preusmjerenje]] na obrisanu ili nepostojeću stranicu',
+ 'redirect-fix-broken-moved': u'Bot: Neispravno preusmjerenje prema premještenoj stranici %(to)s',
'redirect-fix-loop': u'Robot: Popravlja petlje preusmjerenja na %(to)s',
'redirect-remove-loop': u' [[WP:CSD#G8|G8]]: [[Wikipedia:Redirect|Preusmjerenje]] pravi petlju na samo sebe',
'redirect-broken-redirect-template': u'{{Brisanje}}',
Modified: branches/rewrite/scripts/i18n/weblinkchecker.py
===================================================================
--- branches/rewrite/scripts/i18n/weblinkchecker.py 2013-04-07 11:39:46 UTC (rev 11355)
+++ branches/rewrite/scripts/i18n/weblinkchecker.py 2013-04-07 14:50:30 UTC (rev 11356)
@@ -45,6 +45,13 @@
'weblinkchecker-summary': u'Robot : A gemenn ez eus ul liamm diavaez diziraezus',
'weblinkchecker-report': u'E-pad meur a batrouilhenn robot n\'eus ket bet gallet diaraez arliamm da-heul. Gwiriit mat eo dall al liamm e gwirionez ha mard eo, reizhit-eñ pe lamit-eñ.',
},
+ # Author: Edinwiki
+ 'bs': {
+ 'weblinkchecker-archive_msg': u'Ova stranica je arhivirana na "Internet Archive". Pokušajte staviti vezu prema arhiviranoj verziji ove stranice: [%(URL)s].',
+ 'weblinkchecker-caption': u'Mrtav link',
+ 'weblinkchecker-summary': u'Bot: Prijavljam nedostupnu vanjsku vezu',
+ 'weblinkchecker-report': u'Poslije nekoliko automatskih pokušaja bota, ustanovljeno je da je sljedeća vanjska veza nedostupna. Provjerite da li je veza zaista neispravna i zatim je ispravite ako je ovo slučaj!',
+ },
# Author: Anskar
# Author: Pitort
'ca': {