jenkins-bot has submitted this change and it was merged.
Change subject: Namespace filtering in replace.py
......................................................................
Namespace filtering in replace.py
The namespace filtering (-namespace, -ns) was broken in core for
replace.py, leading to changes intended for one (or more) namespace(s)
bleeding into others.
This commit reintroduces the logic from the compat tree.
This commmit also removes the redundant handling of -page/PageTitles;
that argument is handled by pagegenerators.
Bug: 57303
Change-Id: I05845e6962fbae92719e2a128bb93de6044c2ed5
---
M pywikibot/pagegenerators.py
M scripts/replace.py
2 files changed, 7 insertions(+), 13 deletions(-)
Approvals:
John Vandenberg: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/pagegenerators.py b/pywikibot/pagegenerators.py
index 025c732..b525839 100644
--- a/pywikibot/pagegenerators.py
+++ b/pywikibot/pagegenerators.py
@@ -208,11 +208,15 @@
if self.site is None:
self.site = pywikibot.Site()
- def getCombinedGenerator(self):
+ def getCombinedGenerator(self, gen=None):
"""Return the combination of all accumulated generators.
Only call this after all arguments have been parsed.
"""
+
+ if gen:
+ self.gens.insert(0, gen)
+
namespaces = [int(n) for n in self.namespaces]
for i in range(len(self.gens)):
if isinstance(self.gens[i], pywikibot.data.api.QueryGenerator):
diff --git a/scripts/replace.py b/scripts/replace.py
index 75b0ccf..d109a36 100755
--- a/scripts/replace.py
+++ b/scripts/replace.py
@@ -464,7 +464,6 @@
# if -xml flag is present
xmlFilename = None
useSql = False
- PageTitles = []
# will become True when the user presses a ('yes to all') or uses the
# -always flag.
acceptall = False
@@ -505,12 +504,6 @@
xmlFilename = arg[5:]
elif arg == '-sql':
useSql = True
- elif arg.startswith('-page'):
- if len(arg) == 5:
- PageTitles.append(pywikibot.input(
- u'Which page do you want to change?'))
- else:
- PageTitles.append(arg[6:])
elif arg.startswith('-excepttitle:'):
exceptions['title'].append(arg[13:])
elif arg.startswith('-requiretitle:'):
@@ -545,7 +538,6 @@
else:
commandline_replacements.append(arg)
pywikibot.Site().login()
- gen = genFactory.getCombinedGenerator()
if (len(commandline_replacements) % 2):
raise pywikibot.Error('require even number of replacements.')
elif (len(commandline_replacements) == 2 and fix is None):
@@ -677,10 +669,8 @@
%s
LIMIT 200""" % (whereClause, exceptClause)
gen = pagegenerators.MySQLPageGenerator(query)
- elif PageTitles:
- pages = [pywikibot.Page(pywikibot.Site(), PageTitle)
- for PageTitle in PageTitles]
- gen = iter(pages)
+
+ gen = genFactory.getCombinedGenerator(gen)
if not gen:
# syntax error, show help text from the top of this file
--
To view, visit https://gerrit.wikimedia.org/r/150265
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I05845e6962fbae92719e2a128bb93de6044c2ed5
Gerrit-PatchSet: 6
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Bep <bjorn.erik.pedersen(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: workaround: restore CategoryMoveRobot.oldcat after move
......................................................................
workaround: restore CategoryMoveRobot.oldcat after move
pywikibot.data.api.update_page would otherwise make oldcat point to
newcat, making CategoryMoveRobot._change() almost useless.
I would surely welcome a more thorough solution.
Change-Id: I8842fbba10c9026a9087dfb1503708644f2992fd
---
M scripts/category.py
1 file changed, 2 insertions(+), 0 deletions(-)
Approvals:
John Vandenberg: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/category.py b/scripts/category.py
index 3923ca0..f7fcbcb 100755
--- a/scripts/category.py
+++ b/scripts/category.py
@@ -470,8 +470,10 @@
"""
if self.newcat and self.move_oldcat and not self.newcat.exists():
if "move-categorypages" in self.site.userinfo["rights"]:
+ oldcattitle = self.oldcat.title()
self.oldcat.move(self.newcat.title(), reason=self.comment,
movetalkpage=True)
+ self.oldcat = pywikibot.Category(self.oldcat.site, oldcattitle)
else:
self._movecat()
self._movetalk()
--
To view, visit https://gerrit.wikimedia.org/r/149608
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I8842fbba10c9026a9087dfb1503708644f2992fd
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: Ricordisamoa <ricordisamoa(a)openmailbox.org>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: (merge fixup) cache maintenance script
......................................................................
(merge fixup) cache maintenance script
Ibc7ec0a90a130fb1c834fd632af7872f1325b3e0 altered
CachedRequest._cachefile_path() so it was not usable for cache.py
introduced in I727af9b6fa10d2231e5d777a1bc6f8602ca03f50.
Both being committed at the same time caused cache.py to fail
for directories other than the one which CachedRequest uses.
Change-Id: I36360fd041da952375bc88a4599a21451878baa7
---
M scripts/maintenance/cache.py
1 file changed, 4 insertions(+), 0 deletions(-)
Approvals:
John Vandenberg: Looks good to me, approved
jenkins-bot: Verified
diff --git a/scripts/maintenance/cache.py b/scripts/maintenance/cache.py
index 702e034..2c51070 100644
--- a/scripts/maintenance/cache.py
+++ b/scripts/maintenance/cache.py
@@ -82,6 +82,10 @@
""" Directory of the cached entry. """
return self.directory
+ def _cachefile_path(self):
+ return os.path.join(self._get_cache_dir(),
+ self._create_file_name())
+
def _load_cache(self):
""" Load the cache entry. """
with open(self._cachefile_path(), 'rb') as f:
--
To view, visit https://gerrit.wikimedia.org/r/150204
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I36360fd041da952375bc88a4599a21451878baa7
Gerrit-PatchSet: 1
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: Report site&title for lazy loaded ItemPage
......................................................................
Report site&title for lazy loaded ItemPage
When lazy loading the ID of a ItemPage, NoPage exception is thrown if the
item is 'missing'. Currently the Wikibase API does not distinguish
between a) title not existing on a site, and b) site&title not having
a wikibase item associated with it. Both are 'missing'. Bug 68251.
If the site&title does not exist, the NoPage exception should report
the site&title which does not exist, so this appears in the logging
rather than reporting that 'Item -1' does not exist.
Change-Id: I057bb8e752a6cb190e392847292269cbee1f7194
---
M pywikibot/page.py
M tests/wikibase_tests.py
2 files changed, 39 insertions(+), 2 deletions(-)
Approvals:
John Vandenberg: Looks good to me, but someone else must approve
Merlijn van Deen: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/page.py b/pywikibot/page.py
index 3396ed1..6d45fba 100644
--- a/pywikibot/page.py
+++ b/pywikibot/page.py
@@ -2645,6 +2645,7 @@
@type force: bool
@param args: may be used to specify custom props.
"""
+ lazy_loading_id = not hasattr(self, 'id') and hasattr(self, '_site')
if force or not hasattr(self, '_content'):
data = self.repo.loadcontent(self._defined_by(), *args)
self.id = list(data.keys())[0]
@@ -2652,7 +2653,12 @@
if 'lastrevid' in self._content:
self.lastrevid = self._content['lastrevid']
else:
+ if lazy_loading_id:
+ p = Page(self._site, self._title)
+ if not p.exists():
+ raise pywikibot.NoPage(p)
raise pywikibot.NoPage(self)
+
# aliases
self.aliases = {}
if 'aliases' in self._content:
diff --git a/tests/wikibase_tests.py b/tests/wikibase_tests.py
index ba91af6..b4bbb96 100644
--- a/tests/wikibase_tests.py
+++ b/tests/wikibase_tests.py
@@ -224,6 +224,15 @@
self.assertRaises(pywikibot.InvalidTitle, pywikibot.ItemPage.fromPage, page)
def _test_fromPage_noitem(self, link):
+ """Helper function to test a page without an associated item.
+
+ It tests two of the ways to fetch an item:
+ 1. the Page already has props, which should contain a item id if
+ present, and that item id is used to instantiate the item, and
+ 2. the page doesnt have props, in which case the site&titles is
+ used to lookup the item id, but that lookup occurs after
+ instantiation, during the first attempt to use the data item.
+ """
for props in [True, False]:
for method in ['title', 'get', 'getID', 'exists']:
page = pywikibot.Page(link)
@@ -243,8 +252,15 @@
else:
self.assertRaises(pywikibot.NoPage, getattr(item, method))
- # invoking any of those methods changes the title to '-1'
- self.assertEquals(item._link._title, '-1')
+ # The invocation above of a fetching method shouldnt change
+ # the local item, but it does! The title changes to '-1'.
+ #
+ # However when identifying the item for 'en:Test page'
+ # (a deleted page), the exception handling is smarter, and no
+ # local data is modified in this scenario. This case is
+ # separately tested in test_fromPage_missing_lazy.
+ if link.title != 'Test page':
+ self.assertEquals(item._link._title, '-1')
self.assertEquals(hasattr(item, '_content'), True)
@@ -266,6 +282,21 @@
link = page._link
self._test_fromPage_noitem(link)
+ def test_fromPage_missing_lazy(self):
+ """Test lazy loading of item from nonexistent source page."""
+ # this is a deleted page, and should not have a wikidata item
+ link = pywikibot.page.Link("Test page", site)
+ page = pywikibot.Page(link)
+ item = pywikibot.ItemPage.fromPage(page)
+
+ # Now verify that delay loading will result in the desired semantics.
+ # It should not raise NoPage on the wikibase item which has a title
+ # like '-1' or 'Null', as that is useless to determine the cause
+ # without a full debug log.
+ # It should raise NoPage on the page, as that is what the
+ # bot operator needs to see in the log output.
+ self.assertRaisesRegexp(pywikibot.NoPage, 'Test page', item.get)
+
class TestPropertyPage(PywikibotTestCase):
--
To view, visit https://gerrit.wikimedia.org/r/147711
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I057bb8e752a6cb190e392847292269cbee1f7194
Gerrit-PatchSet: 3
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Legoktm <legoktm.wikipedia(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: Mpaa <mpaa.wiki(a)gmail.com>
Gerrit-Reviewer: Ricordisamoa <ricordisamoa(a)openmailbox.org>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: Add maintenance script for the API cache
......................................................................
Add maintenance script for the API cache
Replace cache entry deletion in TestRequest with a new
script 'cache.py' which reports cache entries with a password
and allows them to be fixed.
cache.py also allows cache entries to be queried and deleted
based on other conditions.
Change-Id: I727af9b6fa10d2231e5d777a1bc6f8602ca03f50
---
M pywikibot/data/api.py
A scripts/maintenance/cache.py
M tests/__init__.py
3 files changed, 340 insertions(+), 7 deletions(-)
Approvals:
John Vandenberg: Looks good to me, but someone else must approve
Merlijn van Deen: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index bb5c1cb..1410205 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -454,6 +454,12 @@
pass
def _uniquedescriptionstr(self):
+ """ Unique description for the cache entry.
+
+ If this is modified, please also update
+ scripts/maintenance/cache.py to support
+ the new key and all previous keys. """
+
login_status = self.site._loginstatus
if login_status > pywikibot.site.LoginStatus.NOT_LOGGED_IN and \
diff --git a/scripts/maintenance/cache.py b/scripts/maintenance/cache.py
new file mode 100644
index 0000000..702e034
--- /dev/null
+++ b/scripts/maintenance/cache.py
@@ -0,0 +1,334 @@
+# -*- coding: utf-8 -*-
+"""
+This script runs commands on each entry in the API caches.
+
+Syntax: cache.py [-password] [-delete] [-c '...'] [dir ...]
+
+If no directory are specified, it will detect the API caches.
+
+If no command is specified, it will print the filename of all entries.
+If only -delete is specified, it will delete all entries.
+
+The option '-c' must be followed by a command in python syntax.
+
+Example commands:
+ Print the filename of any entry with 'wikidata' in the key:
+
+ entry if "wikidata" in entry._uniquedescriptionstr() else None
+
+ Customised output if the site code is 'ar':
+
+ entry.site.code == "ar" and \
+pywikibot.output("%s" % entry._uniquedescriptionstr())
+
+ Or the state of the login
+ entry.site._loginstatus == LoginStatus.NOT_ATTEMPTED and \
+pywikibot.output("%s" % entry._uniquedescriptionstr())
+
+ These functions can be used as a command:
+ has_password(entry)
+ is_logout(entry)
+ empty_response(entry)
+ not_accessed(entry)
+ incorrect_hash(entry)
+ older_than_one_day(entry)
+ recent(entry)
+
+ There are helper functions which can be part of a command:
+ older_than(entry, interval)
+ newer_than(entry, interval)
+"""
+#
+# (C) Pywikibot team, 2014
+#
+# Distributed under the terms of the MIT license.
+#
+__version__ = '$Id$'
+#
+
+import os
+import datetime
+import pickle
+import hashlib
+import pywikibot
+from pywikibot.data import api
+
+from pywikibot.site import APISite, DataSite, LoginStatus # noqa
+from pywikibot.page import User # noqa
+
+
+class ParseError(Exception):
+ """ Error parsing. """
+
+
+class CacheEntry(api.CachedRequest):
+
+ def __init__(self, directory, filename):
+ """ Constructor. """
+ self.directory = directory
+ self.filename = filename
+
+ def __str__(self):
+ return self.filename
+
+ def __repr__(self):
+ return self._cachefile_path()
+
+ def _create_file_name(self):
+ """ Filename of the cached entry. """
+ return self.filename
+
+ def _get_cache_dir(self):
+ """ Directory of the cached entry. """
+ return self.directory
+
+ def _load_cache(self):
+ """ Load the cache entry. """
+ with open(self._cachefile_path(), 'rb') as f:
+ self.key, self._data, self._cachetime = pickle.load(f)
+ return True
+
+ def parse_key(self):
+ """ Parse the key loaded from the cache entry. """
+
+ # find the start of the first parameter
+ start = self.key.index('(')
+ # find the end of the first object
+ end = self.key.index(')')
+
+ if not end:
+ raise ParseError('End of Site() keyword not found: %s' % self.key)
+
+ if 'Site' not in self.key[0:start]:
+ raise ParseError('Site() keyword not found at start of key: %s'
+ % self.key)
+
+ site = self.key[0:end + 1]
+ if site[0:5] == 'Site(':
+ site = 'APISite(' + site[5:]
+
+ username = None
+ login_status = None
+
+ start = end + 1
+ if self.key[start:start + 5] == 'User(':
+ # The addition of user to the cache key used:
+ # repr(User)
+ # which includes namespaces resulting in:
+ # User(User:<username>)
+ # This also accepts User(<username>)
+ if self.key[start:start + 10] == 'User(User:':
+ start += 10
+ else:
+ start += 5
+
+ end = self.key.index(')', start + 5)
+ if not end:
+ raise ParseError('End of User() keyword not found: %s'
+ % self.key)
+ username = self.key[start:end]
+ elif self.key[start:start + 12] == 'LoginStatus(':
+ end = self.key.index(')', start + 12)
+ if not end:
+ raise ParseError('End of LoginStatus() keyword not found: %s'
+ % self.key)
+ login_status = self.key[start:end + 1]
+ # If the key does not contain User(..) or LoginStatus(..),
+ # it must be the old key format which only contains Site and params
+ elif self.key[start:start + 3] != "[('":
+ raise ParseError('Keyword after Site not recognised: %s...'
+ % self.key)
+
+ start = end + 1
+
+ params = self.key[start:]
+
+ self._parsed_key = (site, username, login_status, params)
+ return self._parsed_key
+
+ def _rebuild(self):
+ """ Reconstruct the original Request from the key. """
+ if hasattr(self, '_parsed_key'):
+ (site, username, login_status, params) = self._parsed_key
+ else:
+ (site, username, login_status, params) = self.parse_key()
+ if site:
+ self.site = eval(site)
+ if login_status:
+ self.site._loginstatus = eval('LoginStatus.%s'
+ % login_status[12:-1])
+ if username:
+ self.site._username = [username, username]
+ if params:
+ self.params = dict(eval(params))
+
+ def _delete(self):
+ """ Delete the cache entry. """
+ os.remove(self._cachefile_path())
+
+
+def process_entries(cache_dir, func):
+ """ Check the contents of the cache. """
+
+ # This program tries to use file access times to determine
+ # whether cache files are being used.
+ # However file access times are not always usable.
+ # On many modern filesystems, they have been disabled.
+ # On unix, check the filesystem mount options. You may
+ # need to remount with 'strictatime'.
+ # - None = detect
+ # - False = dont use
+ # - True = always use
+ use_accesstime = None
+
+ if not cache_dir:
+ cache_dir = os.path.join(pywikibot.config2.base_dir, 'apicache')
+ for filename in os.listdir(cache_dir):
+ filepath = os.path.join(cache_dir, filename)
+ if use_accesstime is not False:
+ stinfo = os.stat(filepath)
+
+ entry = CacheEntry(cache_dir, filename)
+ entry._load_cache()
+
+ if use_accesstime is None:
+ stinfo2 = os.stat(filepath)
+ use_accesstime = stinfo.st_atime != stinfo2.st_atime
+
+ if use_accesstime:
+ # Reset access times to values before loading cache entry.
+ os.utime(filepath, (stinfo.st_atime, stinfo.st_mtime))
+ entry.stinfo = stinfo
+
+ try:
+ entry.parse_key()
+ except ParseError:
+ pywikibot.error(u'Problems parsing %s with key %s'
+ % (entry.filename, entry.key))
+ pywikibot.exception()
+ continue
+
+ try:
+ entry._rebuild()
+ except Exception:
+ pywikibot.error(u'Problems loading %s with key %s, %r'
+ % (entry.filename, entry.key, entry._parsed_key))
+ pywikibot.exception()
+ continue
+
+ func(entry)
+
+
+def has_password(entry):
+ """ has a password in the entry """
+ if 'lgpassword' in entry._uniquedescriptionstr():
+ return entry
+
+
+def is_logout(entry):
+ """ is a logout entry """
+ if not entry._data and 'logout' in entry.key:
+ return entry
+
+
+def empty_response(entry):
+ """ has no data """
+ if not entry._data and 'logout' not in entry.key:
+ return entry
+
+
+def not_accessed(entry):
+ """ has never been accessed """
+ if not hasattr(entry, 'stinfo'):
+ return
+
+ if entry.stinfo.st_atime <= entry.stinfo.st_mtime:
+ return entry
+
+
+def incorrect_hash(entry):
+ if hashlib.sha256(entry.key.encode('utf-8')).hexdigest() != entry.filename:
+ return entry
+
+
+def older_than(entry, interval):
+ if entry._cachetime + interval < datetime.datetime.now():
+ return entry
+
+
+def newer_than(entry, interval):
+ if entry._cachetime + interval >= datetime.datetime.now():
+ return entry
+
+
+def older_than_one_day(entry):
+ if older_than(entry, datetime.timedelta(days=1)):
+ return entry
+
+
+def recent(entry):
+ if newer_than(entry, datetime.timedelta(hours=1)):
+ return entry
+
+
+def main():
+ local_args = pywikibot.handleArgs()
+ cache_dirs = None
+ delete = False
+ command = None
+
+ for arg in local_args:
+ if command == '':
+ command = arg
+ elif arg == '-delete':
+ delete = True
+ elif arg == '-password':
+ command = 'has_password(entry)'
+ elif arg == '-c':
+ if command:
+ pywikibot.error('Only one command may be executed.')
+ exit(1)
+ command = ''
+ else:
+ cache_dir = [arg]
+
+ func = None
+
+ if not cache_dirs:
+ cache_dirs = ['apicache', 'tests/apicache']
+
+ # Also process the base directory, if it isnt the current directory
+ if os.path.abspath(os.getcwd()) != pywikibot.config2.base_dir:
+ cache_dirs += [
+ os.path.join(pywikibot.config2.base_dir, 'apicache')]
+
+ # Also process the user home cache, if it isnt the config directory
+ if os.path.expanduser('~/.pywikibot') != pywikibot.config2.base_dir:
+ cache_dirs += [
+ os.path.join(os.path.expanduser('~/.pywikibot'), 'apicache')]
+
+ if delete:
+ action_func = lambda entry: entry._delete()
+ else:
+ action_func = lambda entry: pywikibot.output(entry)
+
+ if command:
+ try:
+ command_func = eval('lambda entry: ' + command)
+ except:
+ pywikibot.exception()
+ pywikibot.error(u'Can not compile command: %s' % command)
+ exit(1)
+
+ func = lambda entry: command_func(entry) and action_func(entry)
+ else:
+ func = action_func
+
+ for cache_dir in cache_dirs:
+ if os.path.isdir(cache_dir):
+ if len(cache_dirs) > 1:
+ pywikibot.output(u'Processing %s' % cache_dir)
+ process_entries(cache_dir, func)
+
+if __name__ == '__main__':
+ main()
diff --git a/tests/__init__.py b/tests/__init__.py
index 3ac2361..4d3ce5a 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -34,17 +34,10 @@
return False
if 'lgpassword' in self._uniquedescriptionstr():
- self._delete_cache()
self._data = None
return False
return True
-
- def _delete_cache(self):
- """Delete cached response if it exists."""
- self._load_cache()
- if self._cachetime:
- os.remove(self._cachefile_path())
def _write_cache(self, data):
"""Write data except login details."""
--
To view, visit https://gerrit.wikimedia.org/r/144144
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: I727af9b6fa10d2231e5d777a1bc6f8602ca03f50
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: jenkins-bot <>
jenkins-bot has submitted this change and it was merged.
Change subject: Always store cache in test/apicache for tests
......................................................................
Always store cache in test/apicache for tests
The API caching under the test framework stores
data in tests/apicache when the Request was uncached
in normal operation, and in <base_dir>/apicache if the
request is processed by CachedRequest in normal operation.
This change forces all cached API requests to be stored
in tests/apicache, irrespective of how the test was invoked,
or whether the request would normally be cached or not.
CachedRequest methods _get_cache_dir and _make_dir converted
to static methods for easy monkey-patching, and docstrings added.
Change-Id: Ibc7ec0a90a130fb1c834fd632af7872f1325b3e0
---
M pywikibot/data/api.py
M tests/__init__.py
2 files changed, 28 insertions(+), 9 deletions(-)
Approvals:
Merlijn van Deen: Looks good to me, approved
jenkins-bot: Verified
diff --git a/pywikibot/data/api.py b/pywikibot/data/api.py
index bb5c1cb..865932c 100644
--- a/pywikibot/data/api.py
+++ b/pywikibot/data/api.py
@@ -441,17 +441,35 @@
self._data = None
self._cachetime = None
- def _get_cache_dir(self):
+ @staticmethod
+ def _get_cache_dir():
+ """The base directory path for cache entries.
+
+ The directory will be created if it does not already exist.
+
+ @return: basestring
+ """
path = os.path.join(pywikibot.config2.base_dir, 'apicache')
- self._make_dir(path)
+ CachedRequest._make_dir(path)
return path
- def _make_dir(self, dir):
+ @staticmethod
+ def _make_dir(dir):
+ """Create directory if it does not exist already.
+
+ The directory name (dir) is returned unmodified.
+
+ @param dir: directory path
+ @type dir: basestring
+
+ @return: basestring
+ """
try:
os.makedirs(dir)
except OSError:
# directory already exists
pass
+ return dir
def _uniquedescriptionstr(self):
login_status = self.site._loginstatus
@@ -475,7 +493,8 @@
).hexdigest()
def _cachefile_path(self):
- return os.path.join(self._get_cache_dir(), self._create_file_name())
+ return os.path.join(CachedRequest._get_cache_dir(),
+ self._create_file_name())
def _expired(self, dt):
return dt + self.expiry < datetime.datetime.now()
diff --git a/tests/__init__.py b/tests/__init__.py
index 3ac2361..e725492 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -11,6 +11,11 @@
from pywikibot.data.api import Request as _original_Request
from pywikibot.data.api import CachedRequest
+_cache_dir = os.path.join(os.path.split(__file__)[0], 'apicache')
+
+CachedRequest._get_cache_dir = staticmethod(
+ lambda *args: CachedRequest._make_dir(_cache_dir))
+
class TestRequest(CachedRequest):
@@ -18,11 +23,6 @@
def __init__(self, *args, **kwargs):
super(TestRequest, self).__init__(0, *args, **kwargs)
-
- def _get_cache_dir(self):
- path = os.path.join(os.path.split(__file__)[0], 'apicache')
- self._make_dir(path)
- return path
def _expired(self, dt):
"""Never invalidate cached data."""
--
To view, visit https://gerrit.wikimedia.org/r/143274
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: merged
Gerrit-Change-Id: Ibc7ec0a90a130fb1c834fd632af7872f1325b3e0
Gerrit-PatchSet: 2
Gerrit-Project: pywikibot/core
Gerrit-Branch: master
Gerrit-Owner: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: John Vandenberg <jayvdb(a)gmail.com>
Gerrit-Reviewer: Ladsgroup <ladsgroup(a)gmail.com>
Gerrit-Reviewer: Legoktm <legoktm.wikipedia(a)gmail.com>
Gerrit-Reviewer: Merlijn van Deen <valhallasw(a)arctus.nl>
Gerrit-Reviewer: jenkins-bot <>