Commit cb889047 authored by Sebastien Robin's avatar Sebastien Robin

performance: add performance testing for workflows and improve testPerformance for reusability

- use mixin class to allow reusing common performance functions
- remove usage of quiet and run_all_test
parent 610148db
##############################################################################
#
# Copyright (c) 2002-2016 Nexedi SA and Contributors. All Rights Reserved.
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsibility of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# guarantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
##############################################################################
from time import time
from Products.ERP5Type.tests.testPerformance import TestPerformanceMixin
from Products.DCWorkflow.DCWorkflow import ValidationFailed
from Testing import ZopeTestCase
class TestWorkflowPerformance(TestPerformanceMixin):
maxDiff = None
def getTitle(self):
return "Workflow Performance"
def afterSetUp(self):
super(TestWorkflowPerformance, self).afterSetUp()
self.foo_module.manage_delObjects(list(self.foo_module.objectIds()))
def testWorkflowActionAndGetState(self):
foo_list = []
foo_list_append = foo_list.append
range_10 = range(10)
portal_workflow = self.portal.portal_workflow
foo_count = 100
for x in xrange(foo_count):
foo = self.foo_module.newContent()
foo_list_append(foo)
self.assertEqual('draft', foo_list[0].getSimulationState())
start = time()
for foo in foo_list:
foo.getSimulationState()
action_list = portal_workflow.listActions(object=foo)
for x in range_10:
try:
portal_workflow.doActionFor(foo, 'dummy_failing_action')
except ValidationFailed:
pass
portal_workflow.doActionFor(foo, 'dummy_action')
portal_workflow.doActionFor(foo, 'validate_action')
foo.getSimulationState()
end = time()
message = "\n%s took %.4gs (%s foo(s))" % (self._testMethodName,
end - start, foo_count)
print message
ZopeTestCase._print(message)
# some checking to make sure we tested something relevant
self.assertEqual('validated', foo.getSimulationState())
expected_action_id_list = ['custom_action_no_dialog', 'custom_dialog_action',
'display_status_action', 'dummy_action',
'dummy_failing_action', 'validate_action']
expected_action_id_list.sort()
found_action_id_list = [x['id'] for x in action_list if x['category'] == 'workflow']
found_action_id_list.sort()
self.assertEqual(expected_action_id_list, found_action_id_list)
self.assertEqual(23, len(foo.Base_getWorkflowHistoryItemList('foo_workflow', display=0)))
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="Test Component" module="erp5.portal_type"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>_recorded_property_dict</string> </key>
<value>
<persistent> <string encoding="base64">AAAAAAAAAAI=</string> </persistent>
</value>
</item>
<item>
<key> <string>default_reference</string> </key>
<value> <string>testWorkflowPerformance</string> </value>
</item>
<item>
<key> <string>description</string> </key>
<value>
<none/>
</value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>test.erp5.testWorkflowPerformance</string> </value>
</item>
<item>
<key> <string>portal_type</string> </key>
<value> <string>Test Component</string> </value>
</item>
<item>
<key> <string>sid</string> </key>
<value>
<none/>
</value>
</item>
<item>
<key> <string>text_content_error_message</string> </key>
<value>
<tuple/>
</value>
</item>
<item>
<key> <string>text_content_warning_message</string> </key>
<value>
<tuple/>
</value>
</item>
<item>
<key> <string>version</string> </key>
<value> <string>erp5</string> </value>
</item>
<item>
<key> <string>workflow_history</string> </key>
<value>
<persistent> <string encoding="base64">AAAAAAAAAAM=</string> </persistent>
</value>
</item>
</dictionary>
</pickle>
</record>
<record id="2" aka="AAAAAAAAAAI=">
<pickle>
<global name="PersistentMapping" module="Persistence.mapping"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>data</string> </key>
<value>
<dictionary/>
</value>
</item>
</dictionary>
</pickle>
</record>
<record id="3" aka="AAAAAAAAAAM=">
<pickle>
<global name="PersistentMapping" module="Persistence.mapping"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>data</string> </key>
<value>
<dictionary>
<item>
<key> <string>component_validation_workflow</string> </key>
<value>
<persistent> <string encoding="base64">AAAAAAAAAAQ=</string> </persistent>
</value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</pickle>
</record>
<record id="4" aka="AAAAAAAAAAQ=">
<pickle>
<global name="WorkflowHistoryList" module="Products.ERP5Type.patches.WorkflowTool"/>
</pickle>
<pickle>
<tuple>
<none/>
<list>
<dictionary>
<item>
<key> <string>action</string> </key>
<value> <string>validate</string> </value>
</item>
<item>
<key> <string>validation_state</string> </key>
<value> <string>validated</string> </value>
</item>
</dictionary>
</list>
</tuple>
</pickle>
</record>
</ZopeData>
erp5_ui_test
\ No newline at end of file
test.erp5.testWorkflowPerformance
\ No newline at end of file
erp5_performance_test
\ No newline at end of file
......@@ -113,14 +113,7 @@ DO_TEST = 1
# set 1 to get profiler's result (unit_test/tests/<func_name>)
PROFILE=0
class TestPerformance(ERP5TypeTestCase, LogInterceptor):
# Some helper methods
quiet = 0
run_all_test = 1
def getTitle(self):
return "Performance"
class TestPerformanceMixin(ERP5TypeTestCase, LogInterceptor):
def getBusinessTemplateList(self):
"""
......@@ -129,12 +122,6 @@ class TestPerformance(ERP5TypeTestCase, LogInterceptor):
return ('erp5_base',
'erp5_ui_test',)
def getBarModule(self):
"""
Return the bar module
"""
return self.portal['bar_module']
def afterSetUp(self):
"""
Executed before each test_*.
......@@ -154,10 +141,35 @@ class TestPerformance(ERP5TypeTestCase, LogInterceptor):
self.bar_module = self.getBarModule()
self.foo_module = self.portal.foo_module
def getBarModule(self):
"""
Return the bar module
"""
return self.portal['bar_module']
def profile(self, func, suffix=''):
from cProfile import Profile
prof_file = '%s%s' % (func.__name__, suffix)
try:
os.unlink(prof_file)
except OSError:
pass
prof = Profile()
prof.runcall(func)
prof.dump_stats(prof_file)
def beforeTearDown(self):
# Re-enable gc at teardown.
gc.enable()
self.abort()
class TestPerformance(TestPerformanceMixin):
def getTitle(self):
return "Performance"
def beforeTearDown(self):
super(TestPerformance, self).beforeTearDown()
self.bar_module.manage_delObjects(list(self.bar_module.objectIds()))
self.foo_module.manage_delObjects(list(self.foo_module.objectIds()))
gender = self.portal.portal_categories['gender']
......@@ -165,7 +177,7 @@ class TestPerformance(ERP5TypeTestCase, LogInterceptor):
gender = self.portal.portal_caches.clearAllCache()
self.tic()
def checkViewBarObject(self, min, max, quiet=quiet, prefix=None):
def checkViewBarObject(self, min, max, prefix=None):
# Some init to display form with some value
if prefix is None:
prefix = ''
......@@ -193,7 +205,6 @@ class TestPerformance(ERP5TypeTestCase, LogInterceptor):
bar.Bar_viewPerformance()
after_view = time()
req_time = (after_view - before_view)/100.
if not quiet:
print "%s time to view object form %.4f < %.4f < %.4f\n" % \
(prefix, min, req_time, max)
if PROFILE:
......@@ -202,46 +213,19 @@ class TestPerformance(ERP5TypeTestCase, LogInterceptor):
self.assertTrue(min < req_time < max,
'%.4f < %.4f < %.4f' % (min, req_time, max))
def profile(self, func, suffix=''):
from cProfile import Profile
prof_file = '%s%s' % (func.__name__, suffix)
try:
os.unlink(prof_file)
except OSError:
pass
prof = Profile()
prof.runcall(func)
prof.dump_stats(prof_file)
def test_00_viewBarObject(self, quiet=quiet, run=run_all_test,
min=None, max=None):
def test_00_viewBarObject(self, min=None, max=None):
"""
Estimate average time to render object view
"""
if not run : return
if not quiet:
message = 'Test form to view Bar object'
LOG('Testing... ', 0, message)
self.checkViewBarObject(MIN_OBJECT_VIEW, MAX_OBJECT_VIEW,
prefix='objective')
# def test_00b_currentViewBarObject(self, quiet=quiet, run=run_all_test):
# """
# Estimate average time to render object view and check with current values
# """
# if not run : return
# if not quiet:
# message = 'Test form to view Bar object with current values'
# LOG('Testing... ', 0, message)
# self.checkViewBarObject(CURRENT_MIN_OBJECT_VIEW, CURRENT_MAX_OBJECT_VIEW,
# prefix='current')
def test_01_viewBarModule(self, quiet=quiet, run=run_all_test):
def test_01_viewBarModule(self):
"""
Estimate average time to render module view
"""
if not run : return
if not quiet:
message = 'Test form to view Bar module'
LOG('Testing... ', 0, message)
self.tic()
......@@ -293,7 +277,6 @@ class TestPerformance(ERP5TypeTestCase, LogInterceptor):
add_value = add_result[key]
min_view = MIN_MODULE_VIEW + LISTBOX_COEF * i
max_view = MAX_MODULE_VIEW + LISTBOX_COEF * i
if not quiet:
print "nb objects = %s\n\tadd = %.4f < %.4f < %.4f" %(key, MIN_OBJECT_CREATION, add_value, MAX_OBJECT_CREATION)
print "\ttic = %.4f < %.4f < %.4f" %(MIN_TIC, tic_value, MAX_TIC)
print "\tview = %.4f < %.4f < %.4f" %(min_view, module_value, max_view)
......@@ -321,7 +304,7 @@ class TestPerformance(ERP5TypeTestCase, LogInterceptor):
i += 1
def test_viewProxyField(self, quiet=quiet):
def test_viewProxyField(self):
# render a form with proxy fields: Foo_viewProxyField
foo = self.foo_module.newContent(
portal_type='Foo',
......@@ -341,7 +324,6 @@ class TestPerformance(ERP5TypeTestCase, LogInterceptor):
after_view = time()
req_time = (after_view - before_view)/100.
if not quiet:
print "time to view proxyfield form %.4f < %.4f < %.4f\n" % \
( MIN_OBJECT_PROXYFIELD_VIEW,
req_time,
......@@ -356,7 +338,7 @@ class TestPerformance(ERP5TypeTestCase, LogInterceptor):
req_time,
MAX_OBJECT_PROXYFIELD_VIEW))
def test_02_viewFooObjectWithManyLines(self, quiet=quiet):
def test_02_viewFooObjectWithManyLines(self):
"""
Estimate average time to render object view with many lines
"""
......@@ -373,7 +355,6 @@ class TestPerformance(ERP5TypeTestCase, LogInterceptor):
after_view = time()
req_time = (after_view - before_view)/100.
if not quiet:
print "time to view object form with many lines %.4f < %.4f < %.4f\n" % \
( MIN_OBJECT_MANY_LINES_VIEW,
req_time,
......@@ -387,8 +368,3 @@ class TestPerformance(ERP5TypeTestCase, LogInterceptor):
MIN_OBJECT_MANY_LINES_VIEW,
req_time,
MAX_OBJECT_MANY_LINES_VIEW))
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestPerformance))
return suite
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment