Commit 6ef5cd2e authored by Chris Withers's avatar Chris Withers

- replace nasty and ineffective hack for dynamic connections ids in ZSQL...

- replace nasty and ineffective hack for dynamic connections ids in ZSQL Method cache which is tested and works
- simplify tests as a result
- make sure full chain of DA.__call__ and DA._cached_result is execercised
parent 01cd806e
......@@ -352,11 +352,8 @@ class DA(
def _searchable_result_columns(self): return self._col
def _cached_result(self, DB__, query):
pure_query = query
# we need to munge the incoming query key in the cache
# so that the same request to a different db is returned
query = query + ('\nDBConnId: %s' % self.connection_hook, )
def _cached_result(self, DB__, query, max_rows, conn_id):
cache_key = query,max_rows,conn_id
# Try to fetch from cache
if hasattr(self,'_v_cache'): cache=self._v_cache
......@@ -376,15 +373,15 @@ class DA(
del cache[q]
del keys[-1]
if cache.has_key(query):
k, r = cache[query]
if cache.has_key(cache_key):
k, r = cache[cache_key]
if k > t: return r
# call the pure query
result=apply(DB__.query, pure_query)
result=DB__.query(query,max_rows)
if self.cache_time_ > 0:
tcache[int(now)]=query
cache[query]= now, result
tcache[int(now)]=cache_key
cache[cache_key]= now, result
return result
......@@ -450,8 +447,9 @@ class DA(
if src__: return query
if self.cache_time_ > 0 and self.max_cache_ > 0:
result=self._cached_result(DB__, (query, self.max_rows_))
else: result=DB__.query(query, self.max_rows_)
result=self._cached_result(DB__, query, self.max_rows_, c)
else:
result=DB__.query(query, self.max_rows_)
if hasattr(self, '_v_brain'): brain=self._v_brain
else:
......
......@@ -6,8 +6,12 @@ class DummyDB:
conn_num = 1
def query(self,*args):
return ('result for:',)+args
result = None
def query(self,query,max_rows):
if self.result:
return self.result
return 'result for ' + query
def hook_method(self):
conn_to_use = 'conn'+str(self.conn_num)
......@@ -34,13 +38,13 @@ class TestCaching(TestCase):
self.da.cache_time_ = 10
self.da.max_cache_ = 2
def _do_query(self,query,time):
def _do_query(self,query,t):
try:
self.DA.time = DummyTime(time)
result = self.da._cached_result(DummyDB(),query)
self.DA.time = DummyTime(t)
result = self.da._cached_result(DummyDB(),query,1,'conn_id')
finally:
self.DA.time = time
self.assertEqual(result,('result for:',)+query)
self.assertEqual(result,'result for '+query)
def _check_mapping(self,expected,actual):
missing = []
......@@ -101,10 +105,10 @@ class TestCaching(TestCase):
# query, but where the item returned is always in the cache
self._check_cache({},{})
for t in range(1,6):
self._do_query(('query',),t)
self._do_query('query',t)
self._check_cache(
{('query', '\nDBConnId: None'): (1,('result for:', 'query'))},
{1: ('query', '\nDBConnId: None')}
{('query',1,'conn_id'): (1,'result for query')},
{1: ('query',1,'conn_id')}
)
def test_same_query_same_second(self):
......@@ -115,10 +119,10 @@ class TestCaching(TestCase):
self._check_cache({},{})
for t in range(11,16,1):
t = float(t)/10
self._do_query(('query',),t)
self._do_query('query',t)
self._check_cache(
{('query', '\nDBConnId: None'): (1.1,('result for:', 'query'))},
{1: ('query', '\nDBConnId: None')}
{('query',1,'conn_id'): (1.1,'result for query')},
{1: ('query',1,'conn_id')}
)
def test_different_queries_different_second(self):
......@@ -128,49 +132,49 @@ class TestCaching(TestCase):
# dumped due to the replacement of Bucket with dict
self._check_cache({},{})
# one
self._do_query(('query1',),1.1)
self._do_query('query1',1.1)
self._check_cache(
{('query1', '\nDBConnId: None'): (1.1,('result for:', 'query1'))},
{1: ('query1', '\nDBConnId: None')}
{('query1',1,'conn_id'): (1.1,'result for query1')},
{1: ('query1',1,'conn_id')}
)
# two
self._do_query( ('query2',),3.2)
self._do_query( 'query2',3.2)
self._check_cache(
{('query1', '\nDBConnId: None'): (1.1,('result for:', 'query1')),
('query2', '\nDBConnId: None'): (3.2,('result for:', 'query2')),},
{1: ('query1', '\nDBConnId: None'),
3: ('query2', '\nDBConnId: None'),}
{('query1',1,'conn_id'): (1.1,'result for query1'),
('query2',1,'conn_id'): (3.2,'result for query2'),},
{1: ('query1',1,'conn_id'),
3: ('query2',1,'conn_id'),}
)
# three
self._do_query(('query3',),4.3)
self._do_query('query3',4.3)
self._check_cache(
{('query1', '\nDBConnId: None'): (1.1,('result for:', 'query1')),
('query2', '\nDBConnId: None'): (3.2,('result for:', 'query2')),
('query3', '\nDBConnId: None'): (4.3,('result for:', 'query3')),},
{1: ('query1', '\nDBConnId: None'),
3: ('query2', '\nDBConnId: None'),
4: ('query3', '\nDBConnId: None'),}
{('query1',1,'conn_id'): (1.1,'result for query1'),
('query2',1,'conn_id'): (3.2,'result for query2'),
('query3',1,'conn_id'): (4.3,'result for query3'),},
{1: ('query1',1,'conn_id'),
3: ('query2',1,'conn_id'),
4: ('query3',1,'conn_id'),}
)
# four - now we drop our first cache entry, this is an off-by-one error
self._do_query(('query4',),8.4)
self._do_query('query4',8.4)
self._check_cache(
{('query2', '\nDBConnId: None'): (3.2,('result for:', 'query2')),
('query3', '\nDBConnId: None'): (4.3,('result for:', 'query3')),
('query4', '\nDBConnId: None'): (8.4,('result for:', 'query4')),},
{3: ('query2', '\nDBConnId: None'),
4: ('query3', '\nDBConnId: None'),
8: ('query4', '\nDBConnId: None'),}
{('query2',1,'conn_id'): (3.2,'result for query2'),
('query3',1,'conn_id'): (4.3,'result for query3'),
('query4',1,'conn_id'): (8.4,'result for query4'),},
{3: ('query2',1,'conn_id'),
4: ('query3',1,'conn_id'),
8: ('query4',1,'conn_id'),}
)
# five - now we drop another cache entry
self._do_query(('query5',),9.5)
self._do_query('query5',9.5)
# XXX oops - because dicts have an arbitary ordering, we dumped the wrong key!
self._check_cache(
{('query3', '\nDBConnId: None'): (4.3,('result for:', 'query3')),
('query2', '\nDBConnId: None'): (3.2,('result for:', 'query2')),
('query5', '\nDBConnId: None'): (9.5,('result for:', 'query5')),},
{4: ('query3', '\nDBConnId: None'),
3: ('query2', '\nDBConnId: None'),
9: ('query5', '\nDBConnId: None'),}
{('query3',1,'conn_id'): (4.3,'result for query3'),
('query2',1,'conn_id'): (3.2,'result for query2'),
('query5',1,'conn_id'): (9.5,'result for query5'),},
{4: ('query3',1,'conn_id'),
3: ('query2',1,'conn_id'),
9: ('query5',1,'conn_id'),}
)
def test_different_queries_same_second(self):
......@@ -179,77 +183,54 @@ class TestCaching(TestCase):
# XXX The demonstrates a memory leak in the cache code
self._check_cache({},{})
# one
self._do_query(('query1',),1.0)
self._do_query('query1',1.0)
self._check_cache(
{('query1', '\nDBConnId: None'): (1.0,('result for:', 'query1'))},
{1: ('query1', '\nDBConnId: None')}
{('query1',1,'conn_id'): (1.0,'result for query1')},
{1: ('query1',1,'conn_id')}
)
# two
self._do_query( ('query2',),1.1)
self._do_query( 'query2',1.1)
self._check_cache(
{('query1', '\nDBConnId: None'): (1.0,('result for:', 'query1')),
('query2', '\nDBConnId: None'): (1.1,('result for:', 'query2')),},
{1.0: ('query2', '\nDBConnId: None'),}
{('query1',1,'conn_id'): (1.0,'result for query1'),
('query2',1,'conn_id'): (1.1,'result for query2'),},
{1.0: ('query2',1,'conn_id'),}
)
# three
self._do_query(('query3',),1.2)
self._do_query('query3',1.2)
self._check_cache(
{('query1', '\nDBConnId: None'): (1,('result for:', 'query1')),
('query2', '\nDBConnId: None'): (1.1,('result for:', 'query2')),
('query3', '\nDBConnId: None'): (1.2,('result for:', 'query3')),},
{1: ('query3', '\nDBConnId: None'),}
{('query1',1,'conn_id'): (1,'result for query1'),
('query2',1,'conn_id'): (1.1,'result for query2'),
('query3',1,'conn_id'): (1.2,'result for query3'),},
{1: ('query3',1,'conn_id'),}
)
# four - now we drop our first cache entry, this is an off-by-one error
self._do_query(('query4',),1.3)
self._do_query('query4',1.3)
self._check_cache(
# XXX - oops, why is query1 here still?
{('query1', '\nDBConnId: None'): (1,('result for:', 'query1')),
('query2', '\nDBConnId: None'): (1.1,('result for:', 'query2')),
('query3', '\nDBConnId: None'): (1.2,('result for:', 'query3')),
('query4', '\nDBConnId: None'): (1.3,('result for:', 'query4')),},
{1: ('query4', '\nDBConnId: None'),}
{('query1',1,'conn_id'): (1,'result for query1'),
('query2',1,'conn_id'): (1.1,'result for query2'),
('query3',1,'conn_id'): (1.2,'result for query3'),
('query4',1,'conn_id'): (1.3,'result for query4'),},
{1: ('query4',1,'conn_id'),}
)
# five - now we drop another cache entry
self._do_query(('query5',),1.4)
self._do_query('query5',1.4)
self._check_cache(
# XXX - oops, why are query1 and query2 here still?
{('query1', '\nDBConnId: None'): (1,('result for:', 'query1')),
('query2', '\nDBConnId: None'): (1.1,('result for:', 'query2')),
('query3', '\nDBConnId: None'): (1.2,('result for:', 'query3')),
('query4', '\nDBConnId: None'): (1.3,('result for:', 'query4')),
('query5', '\nDBConnId: None'): (1.4,('result for:', 'query5')),},
{1: ('query5', '\nDBConnId: None'),}
)
def test_connection_hook(self):
# XXX excercises the nonsense of the connection id cache descriminator
self._do_query(('query1',),1.1)
# XXX this should be '\nDBConnId: conn_id'
self._check_cache(
{('query1', '\nDBConnId: None'): (1.1,('result for:', 'query1'))},
{1: ('query1', '\nDBConnId: None')}
)
del self.da._v_cache
self.da.connection_hook='hook_method'
# XXX this should be '\nDBConnId: conn1'
self._do_query(('query1',),1.1)
self._check_cache(
{('query1', '\nDBConnId: hook_method'): (1.1,('result for:', 'query1'))},
{1: ('query1', '\nDBConnId: hook_method')}
)
del self.da._v_cache
# XXX this should be '\nDBConnId: conn2'
self._do_query(('query1',),1.1)
self._check_cache(
{('query1', '\nDBConnId: hook_method'): (1.1,('result for:', 'query1'))},
{1: ('query1', '\nDBConnId: hook_method')}
{('query1',1,'conn_id'): (1,'result for query1'),
('query2',1,'conn_id'): (1.1,'result for query2'),
('query3',1,'conn_id'): (1.2,'result for query3'),
('query4',1,'conn_id'): (1.3,'result for query4'),
('query5',1,'conn_id'): (1.4,'result for query5'),},
{1: ('query5',1,'conn_id'),}
)
class DummyDA:
def __call__(self):
# we return None here, because this should never actually be called
return None
conn = DummyDB()
conn.result = ((),())
return conn
sql_quote__ = "I don't know what this is."
......@@ -264,8 +245,8 @@ class Hook:
class TestCacheKeys(TestCase):
def _cached_result(self,DB__,query):
self.cache_key = query
def _cached_result(self,DB__,query,row_count,conn_id):
self.cache_key = query,row_count,conn_id
# we return something that can be safely turned into an empty Result
return ((),())
......@@ -280,27 +261,40 @@ class TestCacheKeys(TestCase):
def test_default(self):
self.da()
self.assertEqual(self.cache_key,('some sql',1000))
self.assertEqual(self.cache_key,('some sql',1000,'conn_id'))
def test_different_max_rows(self):
self.da.max_rows_ = 123
self.da()
self.assertEqual(self.cache_key,('some sql',123))
self.assertEqual(self.cache_key,('some sql',123,'conn_id'))
def test_connection_hook(self):
self.da.connection_hook = 'hook_method'
self.da.hook_method = Hook()
self.da.conn1 = DummyDA()
self.da()
# XXX the connection id should be added to the cache key here
self.assertEqual(self.cache_key,('some sql',1000))
self.assertEqual(self.cache_key,('some sql',1000,'conn1'))
self.da.conn2 = DummyDA()
self.da()
# XXX the connection id should be added to the cache key here
self.assertEqual(self.cache_key,('some sql',1000))
self.assertEqual(self.cache_key,('some sql',1000,'conn2'))
class TestFullChain(TestCase):
def test_full_chain(self):
from Shared.DC.ZRDB.DA import DA
self.da = DA('da','title','conn_id','arg1 arg2','some sql')
self.da.conn_id = DummyDA()
# These need to be set so DA.__call__ tries for a cached result
self.da.cache_time_ = 1
self.da.max_cache_ = 1
# run the method, exercising both DA.__call__ and DA._cached_result
# currently all this checks is that DA._cached_result's call signature
# matches that expected by DA.__call__
self.da()
def test_suite():
suite = TestSuite()
suite.addTest(makeSuite(TestCaching))
suite.addTest(makeSuite(TestCacheKeys))
suite.addTest(makeSuite(TestFullChain))
return suite
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment