Commit 196a98b1 authored by Yoshinori Okuji's avatar Yoshinori Okuji

Do not use hasattr, because hasattr drains exceptions.


git-svn-id: https://svn.erp5.org/repos/public/erp5/trunk@8156 20353a03-c40f-0410-a6d1-a30d3c3de9de
parent fe27aada
......@@ -198,16 +198,19 @@ class SQLDict(RAMDict):
# Queue semantic
def dequeueMessage(self, activity_tool, processing_node):
if hasattr(activity_tool,'SQLDict_readMessage'):
readMessage = getattr(activity_tool, 'SQLDict_readMessage', None)
if readMessage is None:
return 1
now_date = DateTime()
priority = random.choice(priority_weight)
# Try to find a message at given priority level which is scheduled for now
result = activity_tool.SQLDict_readMessage(processing_node=processing_node, priority=priority,
result = readMessage(processing_node=processing_node, priority=priority,
to_date=now_date)
if len(result) == 0:
# If empty, take any message which is scheduled for now
priority = None
result = activity_tool.SQLDict_readMessage(processing_node=processing_node, priority=priority, to_date=now_date)
result = readMessage(processing_node=processing_node, priority=priority, to_date=now_date)
if len(result) == 0:
# If the result is still empty, shift the dates so that SQLDict can dispatch pending active
# objects quickly.
......@@ -251,7 +254,7 @@ class SQLDict(RAMDict):
if count < MAX_GROUPED_OBJECTS:
# Retrieve objects which have the same group method.
result = activity_tool.SQLDict_readMessage(processing_node = processing_node, priority = priority,
result = readMessage(processing_node = processing_node, priority = priority,
to_date = now_date, group_method_id = group_method_id,
order_validation_text = order_validation_text)
#LOG('SQLDict dequeueMessage', 0, 'result = %d' % (len(result)))
......@@ -344,10 +347,11 @@ class SQLDict(RAMDict):
return 1
def hasActivity(self, activity_tool, object, **kw):
if hasattr(activity_tool,'SQLDict_readMessageList'):
hasMessage = getattr(activity_tool, 'SQLDict_hasMessage', None)
if hasMessage is not None:
if object is not None:
my_object_path = '/'.join(object.getPhysicalPath())
result = activity_tool.SQLDict_hasMessage(path=my_object_path, **kw)
result = hasMessage(path=my_object_path, **kw)
if len(result) > 0:
return result[0].message_count > 0
else:
......@@ -369,7 +373,8 @@ class SQLDict(RAMDict):
path = '/'.join(object_path)
# LOG('Flush', 0, str((path, invoke, method_id)))
method_dict = {}
if hasattr(activity_tool,'SQLDict_readMessageList'):
readMessageList = getattr(activity_tool, 'SQLDict_readMessageList', None)
if readMessageList is not None:
# Parse each message in registered
for m in activity_tool.getRegisteredMessageList(self):
if list(m.object_path) == list(object_path) and (method_id is None or method_id == m.method_id):
......@@ -391,7 +396,7 @@ class SQLDict(RAMDict):
raise ActivityFlushError, (
'The document %s does not exist' % path)
# Parse each message in SQL dict
result = activity_tool.SQLDict_readMessageList(path=path, method_id=method_id,
result = readMessageList(path=path, method_id=method_id,
processing_node=None,include_processing=0)
for line in result:
path = line.path
......@@ -422,8 +427,10 @@ class SQLDict(RAMDict):
def getMessageList(self, activity_tool, processing_node=None,include_processing=0,**kw):
# YO: reading all lines might cause a deadlock
message_list = []
if hasattr(activity_tool,'SQLDict_readMessageList'):
result = activity_tool.SQLDict_readMessageList(path=None, method_id=None, processing_node=None, to_processing_date=None,include_processing=include_processing)
readMessageList = getattr(activity_tool, 'SQLDict_readMessageList', None)
if readMessageList is not None:
result = readMessageList(path=None, method_id=None, processing_node=None,
to_processing_date=None,include_processing=include_processing)
for line in result:
m = self.loadMessage(line.message, uid = line.uid)
m.processing_node = line.processing_node
......@@ -435,8 +442,9 @@ class SQLDict(RAMDict):
def dumpMessageList(self, activity_tool):
# Dump all messages in the table.
message_list = []
if hasattr(activity_tool, 'SQLDict_dumpMessageList'):
result = activity_tool.SQLDict_dumpMessageList()
dumpMessageList = getattr(activity_tool, 'SQLDict_dumpMessageList', None)
if dumpMessageList is not None:
result = dumpMessageList()
for line in result:
m = self.loadMessage(line.message, uid = line.uid)
message_list.append(m)
......@@ -444,7 +452,8 @@ class SQLDict(RAMDict):
def distribute(self, activity_tool, node_count):
processing_node = 1
if hasattr(activity_tool,'SQLDict_readMessageList'):
readMessageList = getattr(activity_tool, 'SQLDict_readMessageList', None)
if readMessageList is not None:
now_date = DateTime()
if (now_date - self.max_processing_date) > MAX_PROCESSING_TIME:
# Sticky processing messages should be set back to non processing
......@@ -452,7 +461,7 @@ class SQLDict(RAMDict):
self.max_processing_date = now_date
else:
max_processing_date = None
result = activity_tool.SQLDict_readMessageList(path=None, method_id=None, processing_node = -1,
result = readMessageList(path=None, method_id=None, processing_node = -1,
to_processing_date = max_processing_date,
include_processing=0) # Only assign non assigned messages
get_transaction().commit() # Release locks before starting a potentially long calculation
......
......@@ -74,17 +74,20 @@ class SQLQueue(RAMQueue):
activity_tool.SQLQueue_delMessage(uid = m.uid)
def dequeueMessage(self, activity_tool, processing_node):
if hasattr(activity_tool,'SQLQueue_readMessageList'):
readMessage = getattr(activity_tool, 'SQLQueue_readMessage', None)
if readMessage is None:
return 1
now_date = DateTime()
# Next processing date in case of error
next_processing_date = now_date + float(VALIDATION_ERROR_DELAY)/86400
priority = random.choice(priority_weight)
# Try to find a message at given priority level
result = activity_tool.SQLQueue_readMessage(processing_node=processing_node, priority=priority,
result = readMessage(processing_node=processing_node, priority=priority,
to_date=now_date)
if len(result) == 0:
# If empty, take any message
result = activity_tool.SQLQueue_readMessage(processing_node=processing_node, priority=None,to_date=now_date)
result = readMessage(processing_node=processing_node, priority=None,to_date=now_date)
if len(result) > 0:
line = result[0]
path = line.path
......@@ -136,10 +139,11 @@ class SQLQueue(RAMQueue):
return 1
def hasActivity(self, activity_tool, object, **kw):
if hasattr(activity_tool,'SQLQueue_readMessageList'):
hasMessage = getattr(activity_tool, 'SQLQueue_hasMessage', None)
if hasMessage is not None:
if object is not None:
my_object_path = '/'.join(object.getPhysicalPath())
result = activity_tool.SQLQueue_hasMessage(path=my_object_path, **kw)
result = hasMessage(path=my_object_path, **kw)
if len(result) > 0:
return result[0].message_count > 0
else:
......@@ -158,7 +162,8 @@ class SQLQueue(RAMQueue):
NOTE: commiting is very likely nonsenses here. We should just avoid to flush as much as possible
"""
if hasattr(activity_tool,'SQLQueue_readMessageList'):
readMessageList = getattr(activity_tool, 'SQLQueue_readMessageList', None)
if readMessageList is not None:
#return # Do nothing here to precent overlocking
path = '/'.join(object_path)
# Parse each message in registered
......@@ -168,7 +173,7 @@ class SQLQueue(RAMQueue):
activity_tool.unregisterMessage(self, m)
# Parse each message in SQL queue
#LOG('Flush', 0, str((path, invoke, method_id)))
result = activity_tool.SQLQueue_readMessageList(path=path, method_id=method_id,processing_node=None)
result = readMessageList(path=path, method_id=method_id,processing_node=None)
#LOG('Flush', 0, str(len(result)))
method_dict = {}
for line in result:
......@@ -202,8 +207,9 @@ class SQLQueue(RAMQueue):
def getMessageList(self, activity_tool, processing_node=None,**kw):
message_list = []
if hasattr(activity_tool,'SQLQueue_readMessageList'):
result = activity_tool.SQLQueue_readMessageList(path=None, method_id=None, processing_node=None)
readMessageList = getattr(activity_tool, 'SQLQueue_readMessageList', None)
if readMessageList is not None:
result = readMessageList(path=None, method_id=None, processing_node=None)
for line in result:
m = self.loadMessage(line.message)
m.processing_node = line.processing_node
......@@ -214,8 +220,9 @@ class SQLQueue(RAMQueue):
def dumpMessageList(self, activity_tool):
# Dump all messages in the table.
message_list = []
if hasattr(activity_tool, 'SQLQueue_dumpMessageList'):
result = activity_tool.SQLQueue_dumpMessageList()
dumpMessageList = getattr(activity_tool, 'SQLQueue_dumpMessageList', None)
if dumpMessageList is not None:
result = dumpMessageList()
for line in result:
m = self.loadMessage(line.message, uid = line.uid)
message_list.append(m)
......@@ -223,8 +230,9 @@ class SQLQueue(RAMQueue):
def distribute(self, activity_tool, node_count):
processing_node = 1
if hasattr(activity_tool,'SQLQueue_readMessageList'):
result = activity_tool.SQLQueue_readMessageList(path=None, method_id=None, processing_node = -1) # Only assign non assigned messages
readMessageList = getattr(activity_tool, 'SQLQueue_readMessageList', None)
if readMessageList is not None:
result = readMessageList(path=None, method_id=None, processing_node = -1) # Only assign non assigned messages
#LOG('distribute count',0,str(len(result)) )
#LOG('distribute count',0,str(map(lambda x:x.uid, result)))
#get_transaction().commit() # Release locks before starting a potentially long calculation
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment