Commit b031c0e4 authored by Ivan Tyagov's avatar Ivan Tyagov Committed by Julien Muchembled

Rename getCacheUid to getCacheId method to avoid confusion.

portal_caches tool is expected to allow only Cache Factory instances inside.
Fix tests to use Cache Factory instead of Cache Bag.
parent d92311f7
...@@ -133,14 +133,14 @@ class TestDocumentMixin(ERP5TypeTestCase): ...@@ -133,14 +133,14 @@ class TestDocumentMixin(ERP5TypeTestCase):
preference_list = self.portal.portal_preferences.contentValues( preference_list = self.portal.portal_preferences.contentValues(
portal_type=portal_type) portal_type=portal_type)
if not preference_list: if not preference_list:
# create a Cache Bag for tests # create a Cache Factory for tests
cache_bag = self.portal.portal_caches.newContent(portal_type = 'Cache Bag') cache_factory = self.portal.portal_caches.newContent(portal_type = 'Cache Factory')
cache_bag.cache_duration = 36000 cache_factory.cache_duration = 36000
cache_plugin = cache_bag.newContent(portal_type='Ram Cache') cache_plugin = cache_factory.newContent(portal_type='Ram Cache')
cache_plugin.cache_expire_check_interval = 54000 cache_plugin.cache_expire_check_interval = 54000
preference = self.portal.portal_preferences.newContent(title="Default System Preference", preference = self.portal.portal_preferences.newContent(title="Default System Preference",
# use local RAM based cache as some tests need it # use local RAM based cache as some tests need it
preferred_conversion_cache_factory = cache_bag.getId(), preferred_conversion_cache_factory = cache_factory.getId(),
portal_type=portal_type) portal_type=portal_type)
else: else:
preference = preference_list[0] preference = preference_list[0]
......
...@@ -200,12 +200,14 @@ class CacheFactory: ...@@ -200,12 +200,14 @@ class CacheFactory:
return cp return cp
return None return None
def getCachePluginByUid(self, uid): def getCachePluginById(self, id, default=None):
""" get cache plugin by its Uid """ """ get cache plugin by its id """
for cp in self.cache_plugins: for cp in self.cache_plugins:
if uid == cp.uid: if id == cp.id:
return cp return cp
return None if default is not None:
return default
raise KeyError("No such plugin exists %s" % id)
def clearCache(self): def clearCache(self):
......
...@@ -94,11 +94,11 @@ class BaseCache(object): ...@@ -94,11 +94,11 @@ class BaseCache(object):
## Time interval (s) to check for expired objects ## Time interval (s) to check for expired objects
cache_expire_check_interval = 60 cache_expire_check_interval = 60
def __init__(self, uid, params={}): def __init__(self, id, params={}):
self._next_cache_expire_check_at = time() self._next_cache_expire_check_at = time()
self._cache_hit_count = 0 self._cache_hit_count = 0
self._cache_miss_count = 0 self._cache_miss_count = 0
self.uid = uid self.id = id
def markCacheHit(self, delta=1): def markCacheHit(self, delta=1):
""" Mark a read operation from cache """ """ Mark a read operation from cache """
......
...@@ -35,7 +35,7 @@ from Products.ERP5Type.Core.CacheFactory import CacheFactory ...@@ -35,7 +35,7 @@ from Products.ERP5Type.Core.CacheFactory import CacheFactory
class CacheBag(CacheFactory): class CacheBag(CacheFactory):
""" """
CacheBag is a special type of CacheFactory that allows multi level caching CacheBag is a special type of a CacheFactory that allows multi level caching
in different backends describe by CachePlugin. in different backends describe by CachePlugin.
CacheBag 1 CacheBag 1
...@@ -59,7 +59,7 @@ class CacheBag(CacheFactory): ...@@ -59,7 +59,7 @@ class CacheBag(CacheFactory):
data_dict = cache_plugin.get(cache_id, DEFAULT_CACHE_SCOPE, default) data_dict = cache_plugin.get(cache_id, DEFAULT_CACHE_SCOPE, default)
if data_dict is not None: if data_dict is not None:
value = data_dict.getValue() value = data_dict.getValue()
if ram_cache_factory_plugin_list.index(cache_plugin)>0: if ram_cache_factory_plugin_list.index(cache_plugin) > 0:
# update first plugin as it's the one to be used # update first plugin as it's the one to be used
# XXX: JPS we can have different update policy here based on a project requirements. # XXX: JPS we can have different update policy here based on a project requirements.
# c0 c1 c2....cN where c0 is filled from cN # c0 c1 c2....cN where c0 is filled from cN
......
...@@ -61,9 +61,9 @@ class CacheFactory(XMLObject): ...@@ -61,9 +61,9 @@ class CacheFactory(XMLObject):
, PropertySheet.SortIndex , PropertySheet.SortIndex
) )
def getCacheUid(self): def getCacheId(self):
""" """
Get a common Cache Factory / Cache Bag UID in this Get a common Cache Factory / Cache Bag ID in this
case relative to portal_caches. case relative to portal_caches.
It's required to use relative url (i.e. mainly ID) due It's required to use relative url (i.e. mainly ID) due
to CachingMethod legacy. to CachingMethod legacy.
...@@ -94,12 +94,12 @@ class CacheFactory(XMLObject): ...@@ -94,12 +94,12 @@ class CacheFactory(XMLObject):
for cache_plugin in cache_plugin_list: for cache_plugin in cache_plugin_list:
cache_plugin.set(cache_id, value) cache_plugin.set(cache_id, value)
def getCachePluginList(self, allowed_types=None): def getCachePluginList(self, allowed_type_list=None):
""" get ordered list of installed cache plugins in ZODB """ """ get ordered list of installed cache plugins in ZODB """
if allowed_types is None: if allowed_type_list is None:
# fall back to default ones # fall back to default ones
allowed_types = self.allowed_types allowed_type_list = self.allowed_types
cache_plugins = self.objectValues(allowed_types) cache_plugins = self.objectValues(allowed_type_list)
cache_plugins = map(None, cache_plugins) cache_plugins = map(None, cache_plugins)
cache_plugins.sort(key=lambda x: x.getIntIndex(0)) cache_plugins.sort(key=lambda x: x.getIntIndex(0))
return cache_plugins return cache_plugins
...@@ -107,7 +107,7 @@ class CacheFactory(XMLObject): ...@@ -107,7 +107,7 @@ class CacheFactory(XMLObject):
security.declareProtected(Permissions.AccessContentsInformation, 'getRamCacheFactory') security.declareProtected(Permissions.AccessContentsInformation, 'getRamCacheFactory')
def getRamCacheFactory(self): def getRamCacheFactory(self):
""" Return RAM based cache factory """ """ Return RAM based cache factory """
cache_factory_name = self.getCacheUid() cache_factory_name = self.getCacheId()
cache_tool = self.portal_caches cache_tool = self.portal_caches
cache_factory = CachingMethod.factories.get(cache_factory_name) cache_factory = CachingMethod.factories.get(cache_factory_name)
#XXX This conditional statement should be remove as soon as #XXX This conditional statement should be remove as soon as
......
...@@ -69,9 +69,9 @@ class CacheTool(BaseTool): ...@@ -69,9 +69,9 @@ class CacheTool(BaseTool):
def getRamCachePlugin(cp): def getRamCachePlugin(cp):
cp_meta_type = cp.meta_type cp_meta_type = cp.meta_type
uid = cp.getCacheUid() id = cp.getCacheId()
if cp_meta_type == 'ERP5 Ram Cache': if cp_meta_type == 'ERP5 Ram Cache':
return RamCache(uid) return RamCache(id)
if cp_meta_type == 'ERP5 Distributed Ram Cache': if cp_meta_type == 'ERP5 Distributed Ram Cache':
## even thougn we have such plugin in ZODB that doens't mean ## even thougn we have such plugin in ZODB that doens't mean
## we have corresponding memcache module installed ## we have corresponding memcache module installed
...@@ -85,11 +85,11 @@ class CacheTool(BaseTool): ...@@ -85,11 +85,11 @@ class CacheTool(BaseTool):
'server_max_key_length': memcached_plugin.getServerMaxKeyLength(), 'server_max_key_length': memcached_plugin.getServerMaxKeyLength(),
'server_max_value_length': memcached_plugin.getServerMaxValueLength(), 'server_max_value_length': memcached_plugin.getServerMaxValueLength(),
'key_prefix': getattr(self, 'erp5_site_global_id', '')} 'key_prefix': getattr(self, 'erp5_site_global_id', '')}
return DistributedRamCache(uid, init_dict) return DistributedRamCache(id, init_dict)
rd = {} rd = {}
for cf in self.objectValues(['ERP5 Cache Factory', 'ERP5 Cache Bag']): for cf in self.objectValues('ERP5 Cache Factory'):
cache_scope = cf.getCacheUid() cache_scope = cf.getCacheId()
rd[cache_scope] = {} rd[cache_scope] = {}
rd[cache_scope]['cache_plugins'] = [] rd[cache_scope]['cache_plugins'] = []
rd[cache_scope]['cache_params'] = {} rd[cache_scope]['cache_params'] = {}
...@@ -104,7 +104,7 @@ class CacheTool(BaseTool): ...@@ -104,7 +104,7 @@ class CacheTool(BaseTool):
# support for cache bags which are like Cache Factory # support for cache bags which are like Cache Factory
# i.e. provide Cache Plugins # i.e. provide Cache Plugins
for cache_bag in cf.objectValues('ERP5 Cache Bag'): for cache_bag in cf.objectValues('ERP5 Cache Bag'):
cache_scope = cache_bag.getCacheUid() cache_scope = cache_bag.getCacheId()
rd[cache_scope] = {} rd[cache_scope] = {}
rd[cache_scope]['cache_plugins'] = [] rd[cache_scope]['cache_plugins'] = []
rd[cache_scope]['cache_params'] = {} rd[cache_scope]['cache_params'] = {}
......
...@@ -42,7 +42,7 @@ class CacheProviderMixIn: ...@@ -42,7 +42,7 @@ class CacheProviderMixIn:
""" """
Get RAM based cache plugin for this ZODB cache plugin. Get RAM based cache plugin for this ZODB cache plugin.
""" """
return self.getParentValue().getRamCacheFactory().getCachePluginByUid(self.getCacheUid()) return self.getParentValue().getRamCacheFactory().getCachePluginById(self.getCacheId())
security.declareProtected(Permissions.AccessContentsInformation, 'get') security.declareProtected(Permissions.AccessContentsInformation, 'get')
def get(self, cache_id, default=None): def get(self, cache_id, default=None):
...@@ -64,9 +64,9 @@ class CacheProviderMixIn: ...@@ -64,9 +64,9 @@ class CacheProviderMixIn:
cache_plugin = self._getRamCachePlugin() cache_plugin = self._getRamCachePlugin()
cache_plugin.set(cache_id, DEFAULT_CACHE_SCOPE, value, cache_duration) cache_plugin.set(cache_id, DEFAULT_CACHE_SCOPE, value, cache_duration)
def getCacheUid(self): def getCacheId(self):
""" """
Get a common Cache Factory / Cache Bag UID in this Get a common Cache Factory / Cache Bag ID in this
case relative to portal_caches. case relative to portal_caches.
It's required to use relative url (i.e. mainly ID) due It's required to use relative url (i.e. mainly ID) due
to CachingMethod legacy. to CachingMethod legacy.
......
...@@ -488,8 +488,9 @@ return 'a' * 1024 * 1024 * 25 ...@@ -488,8 +488,9 @@ return 'a' * 1024 * 1024 * 25
Check Cache Bag Check Cache Bag
""" """
portal_caches = self.portal.portal_caches portal_caches = self.portal.portal_caches
cache_factory = portal_caches.newContent(portal_type="Cache Factory",
cache_bag = portal_caches.newContent(portal_type="Cache Bag", cache_duration=3600)
cache_bag = cache_factory.newContent(portal_type="Cache Bag",
cache_duration=3600) cache_duration=3600)
cache_plugin1 = cache_bag.newContent(portal_type="Ram Cache") cache_plugin1 = cache_bag.newContent(portal_type="Ram Cache")
...@@ -498,6 +499,7 @@ return 'a' * 1024 * 1024 * 25 ...@@ -498,6 +499,7 @@ return 'a' * 1024 * 1024 * 25
cache_plugin2 = cache_bag.newContent(portal_type="Ram Cache") cache_plugin2 = cache_bag.newContent(portal_type="Ram Cache")
cache_plugin2.setIntIndex(1) cache_plugin2.setIntIndex(1)
self.tic() self.tic()
portal_caches.updateCache()
# test proper init # test proper init
ram_cache_factory_plugin_list = cache_bag.getRamCacheFactoryPluginList() ram_cache_factory_plugin_list = cache_bag.getRamCacheFactoryPluginList()
...@@ -535,6 +537,7 @@ return 'a' * 1024 * 1024 * 25 ...@@ -535,6 +537,7 @@ return 'a' * 1024 * 1024 * 25
ram_cache1 = cache_bag1.newContent(portal_type="Ram Cache") ram_cache1 = cache_bag1.newContent(portal_type="Ram Cache")
ram_cache2 = cache_bag1.newContent(portal_type="Ram Cache") ram_cache2 = cache_bag1.newContent(portal_type="Ram Cache")
self.tic() self.tic()
portal_caches.updateCache()
# test get / set API # test get / set API
cache_factory.set('x', 'value_for_x') cache_factory.set('x', 'value_for_x')
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment