jenkins-bot has submitted this change and it was merged.
Change subject: [WIP] improve methods not bound to an instance
......................................................................
[WIP] improve methods not bound to an instance
- turned several staticmethods into classmethods
- fixed DataSite's inheritance from APISite
Change-Id: I5c8024bb086c4302a5f02dbed26fd3e24696e699
---
M pywikibot/__init__.py
M pywikibot/data/api.py
M pywikibot/logentries.py
M pywikibot/page.py
M pywikibot/site.py
M scripts/templatecount.py
M tests/__init__.py
7 files changed, 76 insertions(+), 76 deletions(-)
Approvals:
John Vandenberg: Looks good to me, but someone else must approve
XZise: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/__init__.py b/pywikibot/__init__.py
index 38ee92c..b93f53f 100644
--- a/pywikibot/__init__.py
+++ b/pywikibot/__init__.py
@@ -268,8 +268,8 @@
'precision': self.precision,
}
- @staticmethod
- def fromWikibase(data, site):
+ @classmethod
+ def fromWikibase(cls, data, site):
"""Constructor to create an object from Wikibase's JSON
output."""
globes = {}
for k in site.globes():
@@ -282,9 +282,9 @@
# Default to earth or should we use None here?
globe = 'earth'
- return Coordinate(data['latitude'], data['longitude'],
- data['altitude'], data['precision'],
- globe, site=site, entity=data['globe'])
+ return cls(data['latitude'], data['longitude'],
+ data['altitude'], data['precision'],
+ globe, site=site, entity=data['globe'])
@property
def precision(self):
@@ -395,17 +395,17 @@
else:
raise ValueError('Invalid precision: "%s"' %
precision)
- @staticmethod
- def fromTimestr(datetimestr, precision=14, before=0, after=0, timezone=0,
- calendarmodel=None, site=None):
+ @classmethod
+ def fromTimestr(cls, datetimestr, precision=14, before=0, after=0,
+ timezone=0, calendarmodel=None, site=None):
match = re.match('([-+]?\d+)-(\d+)-(\d+)T(\d+):(\d+):(\d+)Z',
datetimestr)
if not match:
raise ValueError(u"Invalid format: '%s'" % datetimestr)
t = match.groups()
- return WbTime(long(t[0]), int(t[1]), int(t[2]),
- int(t[3]), int(t[4]), int(t[5]),
- precision, before, after, timezone, calendarmodel, site)
+ return cls(long(t[0]), int(t[1]), int(t[2]),
+ int(t[3]), int(t[4]), int(t[5]),
+ precision, before, after, timezone, calendarmodel, site)
def toTimestr(self):
"""
@@ -431,11 +431,11 @@
}
return json
- @staticmethod
- def fromWikibase(ts):
- return WbTime.fromTimestr(ts[u'time'], ts[u'precision'],
- ts[u'before'], ts[u'after'],
- ts[u'timezone'], ts[u'calendarmodel'])
+ @classmethod
+ def fromWikibase(cls, ts):
+ return cls.fromTimestr(ts[u'time'], ts[u'precision'],
+ ts[u'before'], ts[u'after'],
+ ts[u'timezone'], ts[u'calendarmodel'])
def __str__(self):
return json.dumps(self.toWikibase(), indent=4, sort_keys=True,
@@ -493,8 +493,8 @@
}
return json
- @staticmethod
- def fromWikibase(wb):
+ @classmethod
+ def fromWikibase(cls, wb):
"""
Create a WbQuanity from the JSON data given by the Wikibase API.
@@ -504,7 +504,7 @@
upperBound = eval(wb['upperBound'])
lowerBound = eval(wb['lowerBound'])
error = (upperBound - amount, amount - lowerBound)
- return WbQuantity(amount, wb['unit'], error)
+ return cls(amount, wb['unit'], error)
def __str__(self):
return json.dumps(self.toWikibase(), indent=4, sort_keys=True,
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index e746af1..b93582e 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -1089,8 +1089,8 @@
submsg.set_payload(content)
return submsg
- @staticmethod
- def _build_mime_request(params, mime_params):
+ @classmethod
+ def _build_mime_request(cls, params, mime_params):
"""Construct a MIME multipart form post.
@param params: HTTP request params
@@ -1103,10 +1103,10 @@
# construct a MIME message containing all API key/values
container = MIMEMultipart(_subtype='form-data')
for key, value in params.items():
- submsg = Request._generate_MIME_part(key, value)
+ submsg = cls._generate_MIME_part(key, value)
container.attach(submsg)
for key, value in mime_params.items():
- submsg = Request._generate_MIME_part(key, *value)
+ submsg = cls._generate_MIME_part(key, *value)
container.attach(submsg)
# strip the headers to get the HTTP message body
@@ -1366,8 +1366,8 @@
self._data = None
self._cachetime = None
- @staticmethod
- def _get_cache_dir():
+ @classmethod
+ def _get_cache_dir(cls):
"""Return the base directory path for cache entries.
The directory will be created if it does not already exist.
@@ -1375,7 +1375,7 @@
@return: basestring
"""
path = os.path.join(pywikibot.config2.base_dir, 'apicache')
- CachedRequest._make_dir(path)
+ cls._make_dir(path)
return path
@staticmethod
diff --git a/pywikibot/logentries.py b/pywikibot/logentries.py
index 791330e..1a8a9bb 100644
--- a/pywikibot/logentries.py
+++ b/pywikibot/logentries.py
@@ -294,8 +294,8 @@
"""
return self._creator(logdata)
- @staticmethod
- def _getEntryClass(logtype):
+ @classmethod
+ def _getEntryClass(cls, logtype):
"""
Return the class corresponding to the @logtype string parameter.
@@ -303,7 +303,7 @@
@rtype: class
"""
try:
- return LogEntryFactory._logtypes[logtype]
+ return cls._logtypes[logtype]
except KeyError:
return LogEntry
diff --git a/pywikibot/page.py b/pywikibot/page.py
index ad0a643..3687982 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -3036,8 +3036,8 @@
del data[key]
return data
- @staticmethod
- def _normalizeData(data):
+ @classmethod
+ def _normalizeData(cls, data):
"""
Helper function to expand data into the Wikibase API structure.
@@ -3050,7 +3050,7 @@
for prop in ('labels', 'descriptions'):
if prop not in data:
continue
- data[prop] = WikibasePage._normalizeLanguages(data[prop])
+ data[prop] = cls._normalizeLanguages(data[prop])
for key, value in data[prop].items():
if isinstance(value, basestring):
data[prop][key] = {'language': key, 'value': value}
@@ -3656,8 +3656,8 @@
self.rank = 'normal'
self.on_item = None # The item it's on
- @staticmethod
- def fromJSON(site, data):
+ @classmethod
+ def fromJSON(cls, site, data):
"""
Create a claim object from JSON returned in the API call.
@@ -3666,8 +3666,8 @@
@return: Claim
"""
- claim = Claim(site, data['mainsnak']['property'],
- datatype=data['mainsnak'].get('datatype', None))
+ claim = cls(site, data['mainsnak']['property'],
+ datatype=data['mainsnak'].get('datatype', None))
if 'id' in data:
claim.snak = data['id']
elif 'hash' in data:
@@ -3685,15 +3685,15 @@
claim.rank = data['rank']
if 'references' in data:
for source in data['references']:
- claim.sources.append(Claim.referenceFromJSON(site, source))
+ claim.sources.append(cls.referenceFromJSON(site, source))
if 'qualifiers' in data:
for prop in data['qualifiers-order']:
- claim.qualifiers[prop] = [Claim.qualifierFromJSON(site, qualifier)
+ claim.qualifiers[prop] = [cls.qualifierFromJSON(site, qualifier)
for qualifier in
data['qualifiers'][prop]]
return claim
- @staticmethod
- def referenceFromJSON(site, data):
+ @classmethod
+ def referenceFromJSON(cls, site, data):
"""
Create a dict of claims from reference JSON returned in the API call.
@@ -3714,15 +3714,15 @@
for prop in prop_list:
for claimsnak in data['snaks'][prop]:
- claim = Claim.fromJSON(site, {'mainsnak': claimsnak,
- 'hash': data['hash']})
+ claim = cls.fromJSON(site, {'mainsnak': claimsnak,
+ 'hash': data['hash']})
if claim.getID() not in source:
source[claim.getID()] = []
source[claim.getID()].append(claim)
return source
- @staticmethod
- def qualifierFromJSON(site, data):
+ @classmethod
+ def qualifierFromJSON(cls, site, data):
"""
Create a Claim for a qualifier from JSON.
@@ -3732,8 +3732,8 @@
@return: Claim
"""
- return Claim.fromJSON(site, {'mainsnak': data,
- 'hash': data['hash']})
+ return cls.fromJSON(site, {'mainsnak': data,
+ 'hash': data['hash']})
def toJSON(self):
data = {
@@ -4502,8 +4502,8 @@
self.site.code,
self.title))
- @staticmethod
- def fromPage(page, source=None):
+ @classmethod
+ def fromPage(cls, page, source=None):
"""
Create a Link to a Page.
@@ -4514,7 +4514,7 @@
@return: Link
"""
- link = Link.__new__(Link)
+ link = cls.__new__(cls)
link._site = page.site
link._section = page.section()
link._namespace = page.namespace()
@@ -4526,8 +4526,8 @@
return link
- @staticmethod
- def langlinkUnsafe(lang, title, source):
+ @classmethod
+ def langlinkUnsafe(cls, lang, title, source):
"""
Create a "lang:title" Link linked from source.
@@ -4542,7 +4542,7 @@
@return: Link
"""
- link = Link.__new__(Link)
+ link = cls.__new__(cls)
if source.family.interwiki_forward:
link._site = pywikibot.Site(lang, source.family.interwiki_forward)
else:
diff --git a/pywikibot/site.py b/pywikibot/site.py
index b3923d7..c9e75b9 100644
--- a/pywikibot/site.py
+++ b/pywikibot/site.py
@@ -362,10 +362,10 @@
% (self.__class__.__name__, self.id, self.custom_name,
self.canonical_name, self.aliases, kwargs)
- @staticmethod
- def builtin_namespaces(use_image_name=False):
+ @classmethod
+ def builtin_namespaces(cls, use_image_name=False):
"""Return a dict of the builtin namespaces."""
- return dict([(i, Namespace(i, use_image_name=use_image_name))
+ return dict([(i, cls(i, use_image_name=use_image_name))
for i in range(-2, 16)])
@staticmethod
@@ -392,8 +392,8 @@
return parts[0].strip()
return False
- @staticmethod
- def lookup_name(name, namespaces=None):
+ @classmethod
+ def lookup_name(cls, name, namespaces=None):
"""Find the Namespace for a name.
@param name: Name of the namespace.
@@ -404,9 +404,9 @@
@return: Namespace or None
"""
if not namespaces:
- namespaces = Namespace.builtin_namespaces()
+ namespaces = cls.builtin_namespaces()
- name = Namespace.normalize_name(name)
+ name = cls.normalize_name(name)
if name is False:
return None
name = name.lower()
@@ -1532,8 +1532,8 @@
super(APISite, self).__setstate__(attrs)
self.tokens = TokenWallet(self)
- @staticmethod
- def fromDBName(dbname):
+ @classmethod
+ def fromDBName(cls, dbname):
# TODO this only works for some WMF sites
req = api.CachedRequest(datetime.timedelta(days=10),
site=pywikibot.Site('meta', 'meta'),
@@ -1548,11 +1548,11 @@
if site['dbname'] == dbname:
if site['code'] == 'wiki':
site['code'] = 'wikipedia'
- return APISite(lang, site['code'])
+ return cls(lang, site['code'])
else:
for site in val:
if site['dbname'] == dbname:
- return APISite(site['code'], site['code'])
+ return cls(site['code'], site['code'])
raise ValueError("Cannot parse a site out of %s." % dbname)
def _generator(self, gen_class, type_arg=None, namespaces=None,
@@ -5200,9 +5200,9 @@
"""Wikibase data capable site."""
- def __init__(self, code, fam=None, user=None, sysop=None):
+ def __init__(self, *args, **kwargs):
"""Constructor."""
- APISite.__init__(self, code, fam, user, sysop)
+ super(DataSite, self).__init__(*args, **kwargs)
self._item_namespace = None
self._property_namespace = None
diff --git a/scripts/templatecount.py b/scripts/templatecount.py
index 8e36b94..9ca5120 100644
--- a/scripts/templatecount.py
+++ b/scripts/templatecount.py
@@ -48,9 +48,9 @@
"""Template count bot."""
- @staticmethod
- def countTemplates(templates, namespaces):
- templateDict = TemplateCountRobot.template_dict(templates, namespaces)
+ @classmethod
+ def countTemplates(cls, templates, namespaces):
+ templateDict = cls.template_dict(templates, namespaces)
pywikibot.output(u'\nNumber of transclusions per template',
toStdout=True)
pywikibot.output(u'-' * 36, toStdout=True)
@@ -65,9 +65,9 @@
% datetime.datetime.utcnow().isoformat(),
toStdout=True)
- @staticmethod
- def listTemplates(templates, namespaces):
- templateDict = TemplateCountRobot.template_dict(templates, namespaces)
+ @classmethod
+ def listTemplates(cls, templates, namespaces):
+ templateDict = cls.template_dict(templates, namespaces)
pywikibot.output(u'\nList of pages transcluding templates:',
toStdout=True)
for key in templates:
@@ -83,9 +83,9 @@
% datetime.datetime.utcnow().isoformat(),
toStdout=True)
- @staticmethod
- def template_dict(templates, namespaces):
- gen = TemplateCountRobot.template_dict_generator(templates, namespaces)
+ @classmethod
+ def template_dict(cls, templates, namespaces):
+ gen = cls.template_dict_generator(templates, namespaces)
templateDict = {}
for template, transcludingArray in gen:
templateDict[template] = transcludingArray
diff --git a/tests/__init__.py b/tests/__init__.py
index 3cf20fd..1ffcabc 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -187,8 +187,8 @@
return collector(loader)
-CachedRequest._get_cache_dir = staticmethod(
- lambda *args: CachedRequest._make_dir(_cache_dir))
+CachedRequest._get_cache_dir = classmethod(
+ lambda cls, *args: cls._make_dir(_cache_dir))
# Travis-CI builds are set to retry twice, which aims to reduce the number
--
To view, visit
https://gerrit.wikimedia.org/r/168948
To unsubscribe, visit
https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I5c8024bb086c4302a5f02dbed26fd3e24696e699
Gerrit-PatchSet: 7
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Ricordisamoa <ricordisamoa(a)openmailbox.org>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Legoktm <legoktm.wikipedia(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: Ricordisamoa <ricordisamoa(a)openmailbox.org>
Gerrit-Reviewer: XZise <CommodoreFabianus(a)gmx.de>
Gerrit-Reviewer: jenkins-bot <>