Commit 1a08de99 authored by Vincent Pelletier's avatar Vincent Pelletier

Reindent mq.py with 4 spaces.


git-svn-id: https://svn.erp5.org/repos/neo/branches/prototype3@925 71dcc9de-d417-0410-9af5-da40c76e7ee4
parent 8efbb110
...@@ -33,231 +33,231 @@ Multi-Queue Cache Algorithm. ...@@ -33,231 +33,231 @@ Multi-Queue Cache Algorithm.
from math import log from math import log
class Element(object): class Element(object):
""" """
This class defines an element of a FIFO buffer. This class defines an element of a FIFO buffer.
""" """
pass pass
class FIFO(object): class FIFO(object):
""" """
This class implements a FIFO buffer. This class implements a FIFO buffer.
""" """
def __init__(self):
self._head = None
self._tail = None
self._len = 0
def __len__(self):
return self._len
def append(self): def __init__(self):
element = Element() self._head = None
element.next = None self._tail = None
element.prev = self._tail self._len = 0
if self._tail is not None:
self._tail.next = element def __len__(self):
self._tail = element return self._len
if self._head is None:
self._head = element def append(self):
self._len += 1 element = Element()
return element element.next = None
element.prev = self._tail
if self._tail is not None:
self._tail.next = element
self._tail = element
if self._head is None:
self._head = element
self._len += 1
return element
def head(self): def head(self):
return self._head return self._head
def tail(self):
return self._tail
def shift(self):
element = self._head
if element is None:
return None
del self[element]
del element.next
del element.prev
return element
def __delitem__(self, element):
if element.next is None:
self._tail = element.prev
else:
element.next.prev = element.prev
if element.prev is None: def tail(self):
self._head = element.next return self._tail
else:
element.prev.next = element.next def shift(self):
element = self._head
if element is None:
return None
del self[element]
del element.next
del element.prev
return element
self._len -= 1 def __delitem__(self, element):
if element.next is None:
self._tail = element.prev
else:
element.next.prev = element.prev
if element.prev is None:
self._head = element.next
else:
element.prev.next = element.next
self._len -= 1
class Data(object): class Data(object):
""" """
Data for each element in a FIFO buffer. Data for each element in a FIFO buffer.
""" """
pass pass
def sizeof(o): def sizeof(o):
"""This function returns the estimated size of an object.""" """This function returns the estimated size of an object."""
if isinstance(o, tuple): if isinstance(o, tuple):
return sum((len(s)+16 for s in o)) return sum((len(s)+16 for s in o))
else: else:
return len(o)+16 return len(o)+16
class MQ(object): class MQ(object):
""" """
This class manages cached data by a variant of Multi-Queue. This class manages cached data by a variant of Multi-Queue.
This class caches various sizes of objects. Here are some considerations: This class caches various sizes of objects. Here are some considerations:
- Expired objects are not really deleted immediately. But if GC is invoked too often,
it degrades the performance significantly.
- If large objects are cached, the number of cached objects decreases. This might affect
the cache hit ratio. It might be better to tweak a buffer level according to the size of
an object.
- Stored values must be strings.
- The size calculation is not accurate.
"""
- Expired objects are not really deleted immediately. But if GC is invoked too often, def __init__(self, life_time=10000, buffer_levels=9, max_history_size=100000, max_size=20*1024*1024):
it degrades the performance significantly. self._history_buffer = FIFO()
self._cache_buffers = []
for level in range(buffer_levels):
self._cache_buffers.append(FIFO())
self._data = {}
self._time = 0
self._life_time = life_time
self._buffer_levels = buffer_levels
self._max_history_size = max_history_size
self._max_size = max_size
self._size = 0
- If large objects are cached, the number of cached objects decreases. This might affect def has_key(self, id):
the cache hit ratio. It might be better to tweak a buffer level according to the size of if id in self._data:
an object. data = self._data[id]
if data.level >= 0:
return 1
return 0
- Stored values must be strings. __contains__ = has_key
- The size calculation is not accurate. def fetch(self, id):
""" """
Fetch a value associated with the id.
def __init__(self, life_time=10000, buffer_levels=9, max_history_size=100000, max_size=20*1024*1024): """
self._history_buffer = FIFO() data = self._data[id]
self._cache_buffers = [] if data.level >= 0:
for level in range(buffer_levels): value = data.value
self._cache_buffers.append(FIFO()) self._size -= sizeof(value) # XXX inaccurate
self._data = {} self.store(id, value)
self._time = 0 return value
self._life_time = life_time raise KeyError(id)
self._buffer_levels = buffer_levels
self._max_history_size = max_history_size
self._max_size = max_size
self._size = 0
def has_key(self, id): __getitem__ = fetch
if id in self._data:
data = self._data[id]
if data.level >= 0:
return 1
return 0
__contains__ = has_key def get(self, id, d=None):
try:
def fetch(self, id): return self.fetch(id)
""" except KeyError:
Fetch a value associated with the id. return d
"""
data = self._data[id]
if data.level >= 0:
value = data.value
self._size -= sizeof(value) # XXX inaccurate
self.store(id, value)
return value
raise KeyError(id)
__getitem__ = fetch
def get(self, id, d=None):
try:
return self.fetch(id)
except KeyError:
return d
def _evict(self, id):
"""
Evict an element to the history buffer.
"""
data = self._data[id]
self._size -= sizeof(data.value) # XXX inaccurate
del self._cache_buffers[data.level][data.element]
element = self._history_buffer.append()
data.level = -1
data.element = element
del data.value
del data.expire_time
element.data = data
if len(self._history_buffer) > self._max_history_size:
element = self._history_buffer.shift()
del self._data[element.data.id]
def store(self, id, value):
cache_buffers = self._cache_buffers
try:
data = self._data[id]
level, element, counter = data.level, data.element, data.counter + 1
if level >= 0:
del cache_buffers[level][element]
else:
del self._history_buffer[element]
except KeyError:
counter = 1
# XXX It might be better to adjust the level according to the object size.
level = min(int(log(counter, 2)), self._buffer_levels - 1)
element = cache_buffers[level].append()
data = Data()
data.id = id
data.expire_time = self._time + self._life_time
data.level = level
data.element = element
data.value = value
data.counter = counter
element.data = data
self._data[id] = data
self._size += sizeof(value) # XXX inaccurate
del value
self._time += 1 def _evict(self, id):
"""
Evict an element to the history buffer.
"""
data = self._data[id]
self._size -= sizeof(data.value) # XXX inaccurate
del self._cache_buffers[data.level][data.element]
element = self._history_buffer.append()
data.level = -1
data.element = element
del data.value
del data.expire_time
element.data = data
if len(self._history_buffer) > self._max_history_size:
element = self._history_buffer.shift()
del self._data[element.data.id]
def store(self, id, value):
cache_buffers = self._cache_buffers
# Expire old elements. try:
time = self._time data = self._data[id]
for level in xrange(self._buffer_levels): level, element, counter = data.level, data.element, data.counter + 1
cache_buffer = cache_buffers[level] if level >= 0:
head = cache_buffer.head() del cache_buffers[level][element]
if head is not None and head.data.expire_time < time: else:
del cache_buffer[head] del self._history_buffer[element]
data = head.data except KeyError:
if level > 0: counter = 1
new_level = level - 1
element = cache_buffers[new_level].append() # XXX It might be better to adjust the level according to the object size.
element.data = data level = min(int(log(counter, 2)), self._buffer_levels - 1)
data.expire_time = time + self._life_time element = cache_buffers[level].append()
data.level = new_level data = Data()
data.element = element data.id = id
else: data.expire_time = self._time + self._life_time
self._evict(data.id) data.level = level
data.element = element
data.value = value
data.counter = counter
element.data = data
self._data[id] = data
self._size += sizeof(value) # XXX inaccurate
del value
# Limit the size. self._time += 1
size = self._size
max_size = self._max_size # Expire old elements.
if size > max_size: time = self._time
for cache_buffer in cache_buffers: for level in xrange(self._buffer_levels):
while size > max_size: cache_buffer = cache_buffers[level]
element = cache_buffer.shift() head = cache_buffer.head()
if element is None: if head is not None and head.data.expire_time < time:
break del cache_buffer[head]
data = element.data data = head.data
del self._data[data.id] if level > 0:
size -= sizeof(data.value) # XXX inaccurate new_level = level - 1
del data.value element = cache_buffers[new_level].append()
if size <= max_size: element.data = data
break data.expire_time = time + self._life_time
self._size = size data.level = new_level
data.element = element
else:
self._evict(data.id)
# Limit the size.
size = self._size
max_size = self._max_size
if size > max_size:
for cache_buffer in cache_buffers:
while size > max_size:
element = cache_buffer.shift()
if element is None:
break
data = element.data
del self._data[data.id]
size -= sizeof(data.value) # XXX inaccurate
del data.value
if size <= max_size:
break
self._size = size
__setitem__ = store
__setitem__ = store def invalidate(self, id):
if id in self._data:
def invalidate(self, id): data = self._data[id]
if id in self._data: if data.level >= 0:
data = self._data[id] del self._cache_buffers[data.level][data.element]
if data.level >= 0: self._evict(id)
del self._cache_buffers[data.level][data.element] return
self._evict(id) raise KeyError, "%s was not found in the cache" % id
return
raise KeyError, "%s was not found in the cache" % id
__delitem__ = invalidate __delitem__ = invalidate
# Here is a test. # Here is a test.
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment