Commit 283859d2 authored by Benjamin Blanc's avatar Benjamin Blanc

scalability: runScalabilityTestSuite: add simplistic scalability measure

parent 0eb4e8a6
...@@ -6,6 +6,8 @@ import os ...@@ -6,6 +6,8 @@ import os
import time import time
import sys import sys
import multiprocessing import multiprocessing
import subprocess
import signal
import errno import errno
import json import json
import logging import logging
...@@ -19,6 +21,10 @@ from erp5.util.testnode import Utils ...@@ -19,6 +21,10 @@ from erp5.util.testnode import Utils
from subprocess import call from subprocess import call
LOG_FILE_PREFIX = "performance_tester_erp5" LOG_FILE_PREFIX = "performance_tester_erp5"
# Duration of a test case
TEST_CASE_DURATION = 60
# Maximum limit of documents to create during a test case
MAX_DOCUMENTS = 100000
class ScalabilityTest(object): class ScalabilityTest(object):
def __init__(self, data, test_result): def __init__(self, data, test_result):
...@@ -116,7 +122,7 @@ class ScalabilityLauncher(object): ...@@ -116,7 +122,7 @@ class ScalabilityLauncher(object):
""" """
""" """
complete_scheme = os.path.join(path, scheme) complete_scheme = os.path.join(path, scheme)
file_path_list = glob.glob(scheme) file_path_list = glob.glob(complete_scheme)
content_list = [] content_list = []
for file_path in file_path_list: for file_path in file_path_list:
opened_file = open(file_path, 'r') opened_file = open(file_path, 'r')
...@@ -131,6 +137,14 @@ class ScalabilityLauncher(object): ...@@ -131,6 +137,14 @@ class ScalabilityLauncher(object):
def returnCsvList(self): def returnCsvList(self):
return self._returnFileContentList(self.__argumentNamespace.log_path, return self._returnFileContentList(self.__argumentNamespace.log_path,
"%s*.csv" %LOG_FILE_PREFIX) "%s*.csv" %LOG_FILE_PREFIX)
def getCreatedDocumentNumber(self):
number = 0
complete_scheme = os.path.join(self.__argumentNamespace.log_path,
"%s*.csv" %LOG_FILE_PREFIX)
file_path_list = glob.glob(complete_scheme)
for file_path in file_path_list:
number = number + sum(1 for line in open(file_path))
return number
def cleanUplogAndCsv(self): def cleanUplogAndCsv(self):
files_to_delete = glob.glob(os.path.join(self.__argumentNamespace.log_path, files_to_delete = glob.glob(os.path.join(self.__argumentNamespace.log_path,
...@@ -154,6 +168,30 @@ class ScalabilityLauncher(object): ...@@ -154,6 +168,30 @@ class ScalabilityLauncher(object):
next_test = ScalabilityTest(decoded_data, self.test_result) next_test = ScalabilityTest(decoded_data, self.test_result)
return next_test return next_test
def getCreatedDocumentNumber(self):
# First file line is corresponding to header
number = -1
complete_scheme = os.path.join(self.__argumentNamespace.log_path,
"%s*.csv" %LOG_FILE_PREFIX)
file_path_list = glob.glob(complete_scheme)
for file_path in file_path_list:
number = number + sum(1 for line in open(file_path))
return number
def getFailedDocumentNumber(self):
number = 0
complete_scheme = os.path.join(self.__argumentNamespace.log_path,
"%s*.csv" %LOG_FILE_PREFIX)
file_path_list = glob.glob(complete_scheme)
for file_path in file_path_list:
opened_file = open(file_path, 'r')
lines = opened_file.readlines()
for line in lines:
if '-1' in line:
number = number + 1
opened_file.close()
return number
def run(self): def run(self):
self.log("Scalability Launcher started, with:") self.log("Scalability Launcher started, with:")
self.log("Test suite master url: %s" %self.__argumentNamespace.test_suite_master_url) self.log("Test suite master url: %s" %self.__argumentNamespace.test_suite_master_url)
...@@ -178,11 +216,12 @@ class ScalabilityLauncher(object): ...@@ -178,11 +216,12 @@ class ScalabilityLauncher(object):
self.log("No Test Case Ready") self.log("No Test Case Ready")
time.sleep(5) time.sleep(5)
else: else:
error_count = 1
# Here call a runScalabilityTest ( placed on product/ERP5Type/tests ) ? # Here call a runScalabilityTest ( placed on product/ERP5Type/tests ) ?
self.log("Test Case %s is running..." %(current_test.title)) self.log("Test Case %s is running..." %(current_test.title))
# Call the performance_tester_erp5
try: try:
call([tester_path,
tester_process = subprocess.Popen([tester_path,
self.__argumentNamespace.erp5_url, self.__argumentNamespace.erp5_url,
'1', '1',
test_suites, test_suites,
...@@ -190,16 +229,26 @@ class ScalabilityLauncher(object): ...@@ -190,16 +229,26 @@ class ScalabilityLauncher(object):
'--users-file-path', user_file_path, '--users-file-path', user_file_path,
'--filename-prefix', "%s_%s_" %(LOG_FILE_PREFIX, current_test.title), '--filename-prefix', "%s_%s_" %(LOG_FILE_PREFIX, current_test.title),
'--report-directory', self.__argumentNamespace.log_path, '--report-directory', self.__argumentNamespace.log_path,
'--repeat', '100', '--repeat', "%s" %str(MAX_DOCUMENTS),
]) ])
test_case_duration = TEST_CASE_DURATION
time.sleep(test_case_duration)
#tester_process.kill()
tester_process.send_signal(signal.SIGINT)
error_count = 0
except: except:
self.log("Error during tester call.") self.log("Error during tester call.")
raise ValueError("Tester call failed") raise ValueError("Tester call failed")
self.log("Test Case %s is finish" %(current_test.title)) self.log("Test Case %s is finish" %(current_test.title))
log_contents = self.returnLogList() failed_document_number = self.getFailedDocumentNumber()
csv_contents = self.returnCsvList() created_document_number = self.getCreatedDocumentNumber() - failed_document_number
#self.cleanUplogAndCsv() created_document_per_hour_number = ( (float(created_document_number)*60*60) / float(test_case_duration) )
#log_contents = self.returnLogList()
#csv_contents = self.returnCsvList()
self.cleanUplogAndCsv()
retry_time = 2.0 retry_time = 2.0
proxy = taskdistribution.ServerProxy( proxy = taskdistribution.ServerProxy(
...@@ -211,14 +260,13 @@ class ScalabilityLauncher(object): ...@@ -211,14 +260,13 @@ class ScalabilityLauncher(object):
current_test.relative_path, current_test.relative_path,
current_test.title current_test.title
) )
stdout = "LOG:\n""\n====\n====\n====\n====\n"
for log_content in log_contents: output = "%s doc in %s secs = %s docs per hour" %(created_document_number, test_case_duration, created_document_per_hour_number)
stdout = stdout + log_content + "\n====\n====\n" test_result_line_test.stop(stdout=output,
stdout = stdout + "CSV:\n""\n====\n====\n====\n====\n" test_count=created_document_number,
for csv_content in csv_contents: failure_count=failed_document_number,
stdout = stdout + csv_content + "\n====\n====\n" error_count=error_count,
duration=test_case_duration)
test_result_line_test.stop(stdout=stdout)
self.log("Test Case Stopped") self.log("Test Case Stopped")
return error_message_set, exit_status return error_message_set, exit_status
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment