http://www.mediawiki.org/wiki/Special:Code/pywikipedia/10968
Revision: 10968
Author: xqt
Date: 2013-01-23 11:29:02 +0000 (Wed, 23 Jan 2013)
Log Message:
-----------
some fixes for weblinkchecker i18n
Modified Paths:
--------------
branches/rewrite/scripts/i18n/weblinkchecker.py
Modified: branches/rewrite/scripts/i18n/weblinkchecker.py
===================================================================
--- branches/rewrite/scripts/i18n/weblinkchecker.py 2013-01-23 11:17:25 UTC (rev 10967)
+++ branches/rewrite/scripts/i18n/weblinkchecker.py 2013-01-23 11:29:02 UTC (rev 10968)
@@ -31,7 +31,7 @@
'weblinkchecker-report': u'بر طبق بررسیهای رباتیکی من چندین پیوند غیرقابل دسترس پیدا شد. لطفا آنها بررسی و در صورت لزوم درستش کنید.تشکر!',
},
'fr': {
- 'weblinkchecker-archive_msg': u'La page a été sauvegardée dans l’''Internet Archive''. Il serait peut-être utile de faire pointer le lien vers une des versions archivées : [%(URL)s].',
+ 'weblinkchecker-archive_msg': u'La page a été sauvegardée dans l’Internet Archive. Il serait peut-être utile de faire pointer le lien vers une des versions archivées : [%(URL)s].',
'weblinkchecker-caption': u'Lien mort',
'weblinkchecker-summary': u'Robot : Rapporte lien externe inaccessible',
'weblinkchecker-report': u'Pendant plusieurs patrouilles par un robot, le lien suivant a été inaccessible. Veuillez vérifier si le lien est effectivement mort et si oui corrigez ou retirez-le.',
@@ -60,7 +60,7 @@
'weblinkchecker-report': u'Өздікті бот бірнеше жегілгенде келесі сыртқы сілтемеге қатынай алмады. Бұл сілтеменің қатыналуын тексеріп шығыңыз да, не түзетіңіз, не аластаңыз!',
},
'ksh': {
- 'weblinkchecker-archive_msg': u'De Websick es em ''Internet Archive'' faßjehallde. Kannß jo felleijsj_obb_en Koppi doh verlengke, süsh hee: [%(URL)s].',
+ 'weblinkchecker-archive_msg': u'De Websick es em Internet Archive faßjehallde. Kannß jo felleijsj_obb_en Koppi doh verlengke, süsh hee: [%(URL)s].',
'weblinkchecker-caption': u'Han enne kappodde Weblengk jefonge',
'weblinkchecker-summary': u'Bot: Ene Weblengk jeijt nit mih.',
'weblinkchecker-report': u'Esch han bonge die Weblingks paa Mol jetschäck. Se han allemoolde nit jedon Doht ens donnoh loore, un dä Lengk reparreere odo eruß nämme.',
@@ -78,12 +78,10 @@
},
'no': {
'weblinkchecker-archive_msg': u'Denne nettsiden er lagra i Internet Archive. Vurder om lenka kan endres til å peke til en av de arkiverte versjonene: [%(URL)s].',
- 'weblinkchecker-caption': u'',
'weblinkchecker-summary': u'bot: Rapporter død eksternlenke',
},
'pl': {
'weblinkchecker-archive_msg': u'%(URL)s',
- 'weblinkchecker-caption': u'',
'weblinkchecker-summary': u'Robot zgłasza niedostępny link zewnętrzny',
},
'pt': {
http://www.mediawiki.org/wiki/Special:Code/pywikipedia/10962
Revision: 10962
Author: xqt
Date: 2013-01-20 16:20:49 +0000 (Sun, 20 Jan 2013)
Log Message:
-----------
rename wikidataPage to DataPage; use _originSite for site parameter for getentity
Modified Paths:
--------------
trunk/pywikipedia/wikipedia.py
Modified: trunk/pywikipedia/wikipedia.py
===================================================================
--- trunk/pywikipedia/wikipedia.py 2013-01-20 16:08:06 UTC (rev 10961)
+++ trunk/pywikipedia/wikipedia.py 2013-01-20 16:20:49 UTC (rev 10962)
@@ -4035,7 +4035,7 @@
return (u'purged' in r)
-class wikidataPage(Page):
+class DataPage(Page):
"""A subclass of Page representing a page on wikidata.
Supports the same interface as Page, with the following added methods:
@@ -4228,20 +4228,21 @@
return
def getentity(self,force=False, get_redirect=False, throttle=True,
- sysop=False, change_edit_time=True):
+ sysop=False, change_edit_time=True):
"""Returns items of a entity in a dictionary
"""
params = {
'action': 'query',
'titles': self.title(),
'prop': ['revisions', 'info'],
- 'rvprop': ['content', 'ids', 'flags', 'timestamp', 'user', 'comment', 'size'],
+ 'rvprop': ['content', 'ids', 'flags', 'timestamp', 'user',
+ 'comment', 'size'],
'rvlimit': 1,
'inprop': ['protection', 'subjectid'],
}
params1=params.copy()
- params['action']='wbgetentities'
- params['sites']='enwiki'
+ params['action'] = 'wbgetentities'
+ params['sites'] = self._originSite.dbName().split('_')[0]
del params['prop']
del params['rvprop']
del params['rvlimit']
@@ -4249,11 +4250,11 @@
textareaFound = False
# retrying loop is done by query.GetData
data = query.GetData(params, self.site(), sysop=sysop)
- data['query']={'pages':data['entities']}
+ data['query'] = {'pages': data['entities']}
for pageid in data['entities'].keys():
- if pageid=="-1":
+ if pageid == "-1":
continue #Means the page does not exist
- params1['titles']=pageid
+ params1['titles'] = pageid
ndata=query.GetData(params1, self.site(), sysop=sysop)
data['entities'].update(ndata['query']['pages'])
data['query']['pages'].update(data['entities'])
@@ -4265,7 +4266,8 @@
if data['query']['pages'].keys()[0] == "-1":
if 'missing' in pageInfo:
raise NoPage(self.site(), unicode(self),
-"Page does not exist. In rare cases, if you are certain the page does exist, look into overriding family.RversionTab")
+"Page does not exist. In rare cases, if you are certain the page does exist, "
+ "look into overriding family.RversionTab")
elif 'invalid' in pageInfo:
raise BadTitle('BadTitle: %s' % self)
elif 'revisions' in pageInfo: #valid Title
@@ -4349,10 +4351,10 @@
pageInfo = entities
if 'missing' in pageInfo:
raise NoPage(self.site(), unicode(self),
-"Page does not exist. In rare cases, if you are certain the page does exist, look into overriding family.RversionTab")
+"Page does not exist. In rare cases, if you are certain the page does exist, "
+ "look into overriding family.RversionTab")
elif 'invalid' in pageInfo:
raise BadTitle('BadTitle: %s' % self)
-
return entities
def searchentities(self, search, sysop=False):
@@ -4382,7 +4384,9 @@
return search
+wikidataPage = DataPage #keep compatible
+
class ImagePage(Page):
"""A subclass of Page representing an image descriptor wiki page.