Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
erp5
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Eteri
erp5
Commits
31804f68
Commit
31804f68
authored
Sep 13, 2018
by
Bryton Lacquement
🚪
Committed by
Julien Muchembled
Jul 31, 2019
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
erp5.util: add support for Python 3
/reviewed-on
nexedi/erp5!830
parent
5abb074d
Changes
23
Show whitespace changes
Inline
Side-by-side
Showing
23 changed files
with
307 additions
and
259 deletions
+307
-259
erp5/tests/testERP5TestNode.py
erp5/tests/testERP5TestNode.py
+63
-64
erp5/util/benchmark/performance_tester.py
erp5/util/benchmark/performance_tester.py
+3
-2
erp5/util/benchmark/report.py
erp5/util/benchmark/report.py
+4
-5
erp5/util/benchmark/scalability_tester.py
erp5/util/benchmark/scalability_tester.py
+2
-1
erp5/util/scalability/requestUrl.py
erp5/util/scalability/requestUrl.py
+2
-2
erp5/util/scalability/runScalabilityTestSuite.py
erp5/util/scalability/runScalabilityTestSuite.py
+11
-22
erp5/util/taskdistribution/__init__.py
erp5/util/taskdistribution/__init__.py
+16
-6
erp5/util/testbrowser/examples/createERP5User.py
erp5/util/testbrowser/examples/createERP5User.py
+3
-5
erp5/util/testbrowser/examples/createPerson.py
erp5/util/testbrowser/examples/createPerson.py
+2
-1
erp5/util/testnode/NodeTestSuite.py
erp5/util/testnode/NodeTestSuite.py
+5
-3
erp5/util/testnode/ProcessManager.py
erp5/util/testnode/ProcessManager.py
+8
-7
erp5/util/testnode/ScalabilityTestRunner.py
erp5/util/testnode/ScalabilityTestRunner.py
+15
-16
erp5/util/testnode/SlapOSControler.py
erp5/util/testnode/SlapOSControler.py
+7
-5
erp5/util/testnode/SlapOSMasterCommunicator.py
erp5/util/testnode/SlapOSMasterCommunicator.py
+12
-6
erp5/util/testnode/Updater.py
erp5/util/testnode/Updater.py
+4
-3
erp5/util/testnode/Utils.py
erp5/util/testnode/Utils.py
+26
-11
erp5/util/testnode/__init__.py
erp5/util/testnode/__init__.py
+2
-2
erp5/util/testnode/testnode.py
erp5/util/testnode/testnode.py
+1
-1
erp5/util/testsuite/__init__.py
erp5/util/testsuite/__init__.py
+38
-33
erp5/util/timinglogparser/__init__.py
erp5/util/timinglogparser/__init__.py
+35
-34
erp5/util/timinglogplotter/__init__.py
erp5/util/timinglogplotter/__init__.py
+11
-9
erp5/util/webchecker/__init__.py
erp5/util/webchecker/__init__.py
+8
-7
product/ERP5/bin/genbt5list
product/ERP5/bin/genbt5list
+29
-14
No files found.
erp5/tests/testERP5TestNode.py
View file @
31804f68
...
@@ -23,7 +23,6 @@ import sys
...
@@ -23,7 +23,6 @@ import sys
import
tempfile
import
tempfile
import
json
import
json
import
time
import
time
import
types
import
re
import
re
@
contextmanager
@
contextmanager
...
@@ -144,7 +143,8 @@ class ERP5TestNode(TestCase):
...
@@ -144,7 +143,8 @@ class ERP5TestNode(TestCase):
self
.
__dict__
.
update
(
**
kw
)
self
.
__dict__
.
update
(
**
kw
)
def
__call__
(
self
,
command
):
def
__call__
(
self
,
command
):
return
subprocess
.
check_output
(
command
,
**
self
.
__dict__
)
return
subprocess
.
check_output
(
command
,
universal_newlines
=
True
,
**
self
.
__dict__
)
return
Caller
(
**
kw
)
return
Caller
(
**
kw
)
def
generateTestRepositoryList
(
self
,
add_third_repository
=
False
):
def
generateTestRepositoryList
(
self
,
add_third_repository
=
False
):
...
@@ -172,10 +172,10 @@ class ERP5TestNode(TestCase):
...
@@ -172,10 +172,10 @@ class ERP5TestNode(TestCase):
output
=
call
([
'git'
,
'log'
,
'--format=%H %s'
])
output
=
call
([
'git'
,
'log'
,
'--format=%H %s'
])
output
=
output
.
strip
()
output
=
output
.
strip
()
output_line_list
=
output
.
split
(
"
\
n
"
)
output_line_list
=
output
.
split
(
"
\
n
"
)
self
.
assertEqual
s
(
2
,
len
(
output_line_list
))
self
.
assertEqual
(
2
,
len
(
output_line_list
))
expected_commit_subject_list
=
[
"next_commit"
,
"first_commit"
]
expected_commit_subject_list
=
[
"next_commit"
,
"first_commit"
]
commit_subject_list
=
[
x
.
split
()[
1
]
for
x
in
output_line_list
]
commit_subject_list
=
[
x
.
split
()[
1
]
for
x
in
output_line_list
]
self
.
assertEqual
s
(
expected_commit_subject_list
,
commit_subject_list
)
self
.
assertEqual
(
expected_commit_subject_list
,
commit_subject_list
)
commit_dict
[
'rep%i'
%
i
]
=
[
x
.
split
()
for
x
in
output_line_list
]
commit_dict
[
'rep%i'
%
i
]
=
[
x
.
split
()
for
x
in
output_line_list
]
if
repository_path
==
self
.
remote_repository2
:
if
repository_path
==
self
.
remote_repository2
:
output
=
call
(
'git checkout master -b foo'
.
split
())
output
=
call
(
'git checkout master -b foo'
.
split
())
...
@@ -192,13 +192,13 @@ class ERP5TestNode(TestCase):
...
@@ -192,13 +192,13 @@ class ERP5TestNode(TestCase):
"""
"""
test_node
=
self
.
getTestNode
()
test_node
=
self
.
getTestNode
()
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
self
.
assertEqual
s
(
0
,
node_test_suite
.
retry_software_count
)
self
.
assertEqual
(
0
,
node_test_suite
.
retry_software_count
)
node_test_suite
.
retry_software_count
=
2
node_test_suite
.
retry_software_count
=
2
self
.
assertIs
(
node_test_suite
,
test_node
.
getNodeTestSuite
(
'foo'
))
self
.
assertIs
(
node_test_suite
,
test_node
.
getNodeTestSuite
(
'foo'
))
self
.
assertEqual
s
(
2
,
node_test_suite
.
retry_software_count
)
self
.
assertEqual
(
2
,
node_test_suite
.
retry_software_count
)
del
test_node
.
node_test_suite_dict
[
'foo'
]
del
test_node
.
node_test_suite_dict
[
'foo'
]
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
self
.
assertEqual
s
(
0
,
node_test_suite
.
retry_software_count
)
self
.
assertEqual
(
0
,
node_test_suite
.
retry_software_count
)
def
test_02_NodeTestSuiteWorkingDirectory
(
self
):
def
test_02_NodeTestSuiteWorkingDirectory
(
self
):
"""
"""
...
@@ -206,9 +206,9 @@ class ERP5TestNode(TestCase):
...
@@ -206,9 +206,9 @@ class ERP5TestNode(TestCase):
"""
"""
test_node
=
self
.
getTestNode
()
test_node
=
self
.
getTestNode
()
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
self
.
assertEqual
s
(
"%s/foo"
%
self
.
working_directory
,
self
.
assertEqual
(
"%s/foo"
%
self
.
working_directory
,
node_test_suite
.
working_directory
)
node_test_suite
.
working_directory
)
self
.
assertEqual
s
(
"%s/foo/test_suite"
%
self
.
working_directory
,
self
.
assertEqual
(
"%s/foo/test_suite"
%
self
.
working_directory
,
node_test_suite
.
test_suite_directory
)
node_test_suite
.
test_suite_directory
)
def
test_03_NodeTestSuiteCheckDataAfterEdit
(
self
):
def
test_03_NodeTestSuiteCheckDataAfterEdit
(
self
):
...
@@ -219,13 +219,13 @@ class ERP5TestNode(TestCase):
...
@@ -219,13 +219,13 @@ class ERP5TestNode(TestCase):
test_node
=
self
.
getTestNode
()
test_node
=
self
.
getTestNode
()
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
self
.
updateNodeTestSuiteData
(
node_test_suite
)
self
.
updateNodeTestSuiteData
(
node_test_suite
)
self
.
assertEqual
s
(
2
,
len
(
node_test_suite
.
vcs_repository_list
))
self
.
assertEqual
(
2
,
len
(
node_test_suite
.
vcs_repository_list
))
repository_path_list
=
[]
repository_path_list
=
[]
for
vcs_repository
in
node_test_suite
.
vcs_repository_list
:
for
vcs_repository
in
node_test_suite
.
vcs_repository_list
:
repository_path_list
.
append
(
vcs_repository
[
'repository_path'
])
repository_path_list
.
append
(
vcs_repository
[
'repository_path'
])
expected_list
=
[
"%s/rep0"
%
node_test_suite
.
working_directory
,
expected_list
=
[
"%s/rep0"
%
node_test_suite
.
working_directory
,
"%s/rep1"
%
node_test_suite
.
working_directory
]
"%s/rep1"
%
node_test_suite
.
working_directory
]
self
.
assertEqual
s
(
expected_list
,
repository_path_list
)
self
.
assertEqual
(
expected_list
,
repository_path_list
)
def
test_04_constructProfile
(
self
,
my_test_type
=
'UnitTest'
):
def
test_04_constructProfile
(
self
,
my_test_type
=
'UnitTest'
):
"""
"""
...
@@ -239,7 +239,7 @@ class ERP5TestNode(TestCase):
...
@@ -239,7 +239,7 @@ class ERP5TestNode(TestCase):
node_test_suite
.
revision_list
=
((
'rep1'
,
(
1234
,
'azerty'
)),
node_test_suite
.
revision_list
=
((
'rep1'
,
(
1234
,
'azerty'
)),
(
'rep2'
,
(
3456
,
'qwerty'
)))
(
'rep2'
,
(
3456
,
'qwerty'
)))
test_node
.
constructProfile
(
node_test_suite
,
my_test_type
)
test_node
.
constructProfile
(
node_test_suite
,
my_test_type
)
self
.
assertEqual
s
(
"%s/software.cfg"
%
(
node_test_suite
.
working_directory
,),
self
.
assertEqual
(
"%s/software.cfg"
%
(
node_test_suite
.
working_directory
,),
node_test_suite
.
custom_profile_path
)
node_test_suite
.
custom_profile_path
)
profile
=
open
(
node_test_suite
.
custom_profile_path
,
'r'
)
profile
=
open
(
node_test_suite
.
custom_profile_path
,
'r'
)
if
my_test_type
==
'UnitTest'
:
if
my_test_type
==
'UnitTest'
:
...
@@ -282,7 +282,7 @@ ignore-ssl-certificate = true
...
@@ -282,7 +282,7 @@ ignore-ssl-certificate = true
develop = false
develop = false
shared = true
shared = true
"""
%
{
'temp_dir'
:
self
.
_temp_dir
,
'revision1'
:
revision1
,
'revision2'
:
revision2
}
"""
%
{
'temp_dir'
:
self
.
_temp_dir
,
'revision1'
:
revision1
,
'revision2'
:
revision2
}
self
.
assertEqual
s
(
expected_profile
,
profile
.
read
())
self
.
assertEqual
(
expected_profile
,
profile
.
read
())
profile
.
close
()
profile
.
close
()
def
getAndUpdateFullRevisionList
(
self
,
test_node
,
node_test_suite
):
def
getAndUpdateFullRevisionList
(
self
,
test_node
,
node_test_suite
):
...
@@ -298,9 +298,9 @@ shared = true
...
@@ -298,9 +298,9 @@ shared = true
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
self
.
updateNodeTestSuiteData
(
node_test_suite
)
self
.
updateNodeTestSuiteData
(
node_test_suite
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
self
.
assertEqual
s
(
2
,
len
(
rev_list
))
self
.
assertEqual
(
2
,
len
(
rev_list
))
self
.
assertEqual
s
(
rev_list
[
0
],
'rep0=2-%s'
%
commit_dict
[
'rep0'
][
0
][
0
])
self
.
assertEqual
(
rev_list
[
0
],
'rep0=2-%s'
%
commit_dict
[
'rep0'
][
0
][
0
])
self
.
assertEqual
s
(
rev_list
[
1
],
'rep1=2-%s'
%
commit_dict
[
'rep1'
][
0
][
0
])
self
.
assertEqual
(
rev_list
[
1
],
'rep1=2-%s'
%
commit_dict
[
'rep1'
][
0
][
0
])
my_file
=
open
(
os
.
path
.
join
(
self
.
remote_repository1
,
'first_file'
),
'w'
)
my_file
=
open
(
os
.
path
.
join
(
self
.
remote_repository1
,
'first_file'
),
'w'
)
my_file
.
write
(
"next_content"
)
my_file
.
write
(
"next_content"
)
my_file
.
close
()
my_file
.
close
()
...
@@ -309,7 +309,7 @@ shared = true
...
@@ -309,7 +309,7 @@ shared = true
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
self
.
assertTrue
(
rev_list
[
0
].
startswith
(
'rep0=2-'
))
self
.
assertTrue
(
rev_list
[
0
].
startswith
(
'rep0=2-'
))
self
.
assertTrue
(
rev_list
[
1
].
startswith
(
'rep1=3-'
))
self
.
assertTrue
(
rev_list
[
1
].
startswith
(
'rep1=3-'
))
self
.
assertEqual
s
(
2
,
len
(
node_test_suite
.
vcs_repository_list
))
self
.
assertEqual
(
2
,
len
(
node_test_suite
.
vcs_repository_list
))
for
vcs_repository
in
node_test_suite
.
vcs_repository_list
:
for
vcs_repository
in
node_test_suite
.
vcs_repository_list
:
self
.
assertTrue
(
os
.
path
.
exists
(
vcs_repository
[
'repository_path'
]))
self
.
assertTrue
(
os
.
path
.
exists
(
vcs_repository
[
'repository_path'
]))
...
@@ -323,8 +323,8 @@ shared = true
...
@@ -323,8 +323,8 @@ shared = true
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
self
.
updateNodeTestSuiteData
(
node_test_suite
,
add_third_repository
=
True
)
self
.
updateNodeTestSuiteData
(
node_test_suite
,
add_third_repository
=
True
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
self
.
assertEqual
s
(
3
,
len
(
rev_list
))
self
.
assertEqual
(
3
,
len
(
rev_list
))
self
.
assertEqual
s
(
3
,
len
(
node_test_suite
.
vcs_repository_list
))
self
.
assertEqual
(
3
,
len
(
node_test_suite
.
vcs_repository_list
))
rep2_clone_path
=
[
x
[
'repository_path'
]
for
x
in
\
rep2_clone_path
=
[
x
[
'repository_path'
]
for
x
in
\
node_test_suite
.
vcs_repository_list
\
node_test_suite
.
vcs_repository_list
\
if
x
[
'repository_path'
].
endswith
(
"rep2"
)][
0
]
if
x
[
'repository_path'
].
endswith
(
"rep2"
)][
0
]
...
@@ -332,13 +332,13 @@ shared = true
...
@@ -332,13 +332,13 @@ shared = true
output
=
call
(
"git branch"
.
split
()).
strip
()
output
=
call
(
"git branch"
.
split
()).
strip
()
self
.
assertTrue
(
"* foo"
in
output
.
split
(
'
\
n
'
))
self
.
assertTrue
(
"* foo"
in
output
.
split
(
'
\
n
'
))
vcs_repository_info
=
node_test_suite
.
vcs_repository_list
[
0
]
vcs_repository_info
=
node_test_suite
.
vcs_repository_list
[
0
]
self
.
assertEqual
s
(
vcs_repository_info
[
'repository_id'
],
'rep2'
)
self
.
assertEqual
(
vcs_repository_info
[
'repository_id'
],
'rep2'
)
self
.
assertEqual
s
(
vcs_repository_info
[
'branch'
],
'foo'
)
self
.
assertEqual
(
vcs_repository_info
[
'branch'
],
'foo'
)
# change it to master
# change it to master
vcs_repository_info
[
'branch'
]
=
'master'
vcs_repository_info
[
'branch'
]
=
'master'
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
output
=
call
(
"git branch"
.
split
()).
strip
()
output
=
call
(
"git branch"
.
split
()).
strip
()
print
output
print
(
output
)
self
.
assertTrue
(
"* master"
in
output
.
split
(
'
\
n
'
))
self
.
assertTrue
(
"* master"
in
output
.
split
(
'
\
n
'
))
# Add a third branch on remote, make sure we could switch to it
# Add a third branch on remote, make sure we could switch to it
remote_call
=
self
.
getCaller
(
cwd
=
self
.
remote_repository2
)
remote_call
=
self
.
getCaller
(
cwd
=
self
.
remote_repository2
)
...
@@ -368,8 +368,8 @@ shared = true
...
@@ -368,8 +368,8 @@ shared = true
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
self
.
updateNodeTestSuiteData
(
node_test_suite
)
self
.
updateNodeTestSuiteData
(
node_test_suite
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
self
.
assertEqual
s
(
2
,
len
(
rev_list
))
self
.
assertEqual
(
2
,
len
(
rev_list
))
self
.
assertEqual
s
(
2
,
len
(
node_test_suite
.
vcs_repository_list
))
self
.
assertEqual
(
2
,
len
(
node_test_suite
.
vcs_repository_list
))
# patch deleteRepository to make sure it will be called once for the wrong
# patch deleteRepository to make sure it will be called once for the wrong
# repos, and not for the repos which has not changed
# repos, and not for the repos which has not changed
deleted_repository_path_list
=
[]
deleted_repository_path_list
=
[]
...
@@ -386,12 +386,12 @@ shared = true
...
@@ -386,12 +386,12 @@ shared = true
node_test_suite
.
vcs_repository_list
\
node_test_suite
.
vcs_repository_list
\
if
x
[
'repository_path'
].
endswith
(
"rep0"
)][
0
]
if
x
[
'repository_path'
].
endswith
(
"rep0"
)][
0
]
call
=
self
.
getCaller
(
cwd
=
rep0_clone_path
)
call
=
self
.
getCaller
(
cwd
=
rep0_clone_path
)
self
.
assertEqual
s
(
call
(
"git config --get remote.origin.url"
.
split
()).
strip
(),
self
.
assertEqual
(
call
(
"git config --get remote.origin.url"
.
split
()).
strip
(),
self
.
remote_repository0
)
self
.
remote_repository0
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
self
.
assertEqual
s
(
call
(
"git config --get remote.origin.url"
.
split
()).
strip
(),
self
.
assertEqual
(
call
(
"git config --get remote.origin.url"
.
split
()).
strip
(),
self
.
remote_repository2
)
self
.
remote_repository2
)
self
.
assertEqual
s
([
rep0_clone_path
],
deleted_repository_path_list
)
self
.
assertEqual
([
rep0_clone_path
],
deleted_repository_path_list
)
finally
:
finally
:
Updater
.
deleteRepository
=
original_deleteRepository
Updater
.
deleteRepository
=
original_deleteRepository
...
@@ -407,8 +407,8 @@ shared = true
...
@@ -407,8 +407,8 @@ shared = true
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
self
.
updateNodeTestSuiteData
(
node_test_suite
)
self
.
updateNodeTestSuiteData
(
node_test_suite
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
self
.
assertEqual
s
(
2
,
len
(
rev_list
))
self
.
assertEqual
(
2
,
len
(
rev_list
))
self
.
assertEqual
s
(
2
,
len
(
node_test_suite
.
vcs_repository_list
))
self
.
assertEqual
(
2
,
len
(
node_test_suite
.
vcs_repository_list
))
rep0_clone_path
=
[
x
[
'repository_path'
]
for
x
in
\
rep0_clone_path
=
[
x
[
'repository_path'
]
for
x
in
\
node_test_suite
.
vcs_repository_list
\
node_test_suite
.
vcs_repository_list
\
if
x
[
'repository_path'
].
endswith
(
"rep0"
)][
0
]
if
x
[
'repository_path'
].
endswith
(
"rep0"
)][
0
]
...
@@ -457,10 +457,9 @@ shared = true
...
@@ -457,10 +457,9 @@ shared = true
node_test_suite
.
vcs_repository_list
\
node_test_suite
.
vcs_repository_list
\
if
x
[
'repository_path'
].
endswith
(
"rep0"
)][
0
]
if
x
[
'repository_path'
].
endswith
(
"rep0"
)][
0
]
# simulate a data corruption on rep0's index
# simulate a data corruption on rep0's index
index_file
=
open
(
os
.
path
.
join
(
rep0_clone_path
,
'.git'
,
'index'
),
'a'
)
with
open
(
os
.
path
.
join
(
rep0_clone_path
,
'.git'
,
'index'
),
'ab'
)
as
index_file
:
index_file
.
seek
(
10
,
os
.
SEEK_END
)
index_file
.
seek
(
10
,
os
.
SEEK_END
)
index_file
.
truncate
()
index_file
.
truncate
()
index_file
.
close
()
# we get rev list with corrupted repository, we get None, but in the same
# we get rev list with corrupted repository, we get None, but in the same
# time the bad repository is deleted
# time the bad repository is deleted
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
rev_list
=
self
.
getAndUpdateFullRevisionList
(
test_node
,
node_test_suite
)
...
@@ -490,8 +489,8 @@ shared = true
...
@@ -490,8 +489,8 @@ shared = true
info_list
.
append
(
info_list
.
append
(
call
(
"git log -n1 --format=%H"
.
split
()).
strip
())
call
(
"git log -n1 --format=%H"
.
split
()).
strip
())
return
info_list
return
info_list
self
.
assertEqual
s
([
'2'
,
'2'
],
getRepInfo
(
count
=
1
))
self
.
assertEqual
([
'2'
,
'2'
],
getRepInfo
(
count
=
1
))
self
.
assertEqual
s
([
commit_dict
[
'rep0'
][
0
][
0
],
commit_dict
[
'rep1'
][
0
][
0
]],
self
.
assertEqual
([
commit_dict
[
'rep0'
][
0
][
0
],
commit_dict
[
'rep1'
][
0
][
0
]],
getRepInfo
(
hash
=
1
))
getRepInfo
(
hash
=
1
))
class
TestResult
(
object
):
class
TestResult
(
object
):
revision
=
NodeTestSuite
.
revision
revision
=
NodeTestSuite
.
revision
...
@@ -501,25 +500,25 @@ shared = true
...
@@ -501,25 +500,25 @@ shared = true
test_result
.
revision_list
=
((
'rep0'
,
(
2
,
commit_dict
[
'rep0'
][
0
][
0
])),
test_result
.
revision_list
=
((
'rep0'
,
(
2
,
commit_dict
[
'rep0'
][
0
][
0
])),
(
'rep1'
,
(
1
,
commit_dict
[
'rep1'
][
1
][
0
])))
(
'rep1'
,
(
1
,
commit_dict
[
'rep1'
][
1
][
0
])))
test_node
.
checkRevision
(
test_result
,
node_test_suite
)
test_node
.
checkRevision
(
test_result
,
node_test_suite
)
self
.
assertEqual
s
([
'2'
,
'1'
],
getRepInfo
(
count
=
1
))
self
.
assertEqual
([
'2'
,
'1'
],
getRepInfo
(
count
=
1
))
self
.
assertEqual
s
([
commit_dict
[
'rep0'
][
0
][
0
],
commit_dict
[
'rep1'
][
1
][
0
]],
self
.
assertEqual
([
commit_dict
[
'rep0'
][
0
][
0
],
commit_dict
[
'rep1'
][
1
][
0
]],
getRepInfo
(
hash
=
1
))
getRepInfo
(
hash
=
1
))
def
test_07_checkExistingTestSuite
(
self
):
def
test_07_checkExistingTestSuite
(
self
):
test_node
=
self
.
getTestNode
()
test_node
=
self
.
getTestNode
()
test_suite_data
=
self
.
getTestSuiteData
(
add_third_repository
=
True
)
test_suite_data
=
self
.
getTestSuiteData
(
add_third_repository
=
True
)
self
.
assertEqual
s
([],
os
.
listdir
(
self
.
working_directory
))
self
.
assertEqual
([],
os
.
listdir
(
self
.
working_directory
))
test_node
.
purgeOldTestSuite
(
test_suite_data
)
test_node
.
purgeOldTestSuite
(
test_suite_data
)
self
.
assertEqual
s
([],
os
.
listdir
(
self
.
working_directory
))
self
.
assertEqual
([],
os
.
listdir
(
self
.
working_directory
))
os
.
mkdir
(
os
.
path
.
join
(
self
.
working_directory
,
'foo'
))
os
.
mkdir
(
os
.
path
.
join
(
self
.
working_directory
,
'foo'
))
self
.
assertEqual
s
([
'foo'
],
os
.
listdir
(
self
.
working_directory
))
self
.
assertEqual
([
'foo'
],
os
.
listdir
(
self
.
working_directory
))
test_node
.
purgeOldTestSuite
(
test_suite_data
)
test_node
.
purgeOldTestSuite
(
test_suite_data
)
self
.
assertEqual
s
([
'foo'
],
os
.
listdir
(
self
.
working_directory
))
self
.
assertEqual
([
'foo'
],
os
.
listdir
(
self
.
working_directory
))
os
.
mkdir
(
os
.
path
.
join
(
self
.
working_directory
,
'bar'
))
os
.
mkdir
(
os
.
path
.
join
(
self
.
working_directory
,
'bar'
))
self
.
assertEqual
s
(
set
([
'bar'
,
'foo'
]),
self
.
assertEqual
(
set
([
'bar'
,
'foo'
]),
set
(
os
.
listdir
(
self
.
working_directory
)))
set
(
os
.
listdir
(
self
.
working_directory
)))
test_node
.
purgeOldTestSuite
(
test_suite_data
)
test_node
.
purgeOldTestSuite
(
test_suite_data
)
self
.
assertEqual
s
([
'foo'
],
os
.
listdir
(
self
.
working_directory
))
self
.
assertEqual
([
'foo'
],
os
.
listdir
(
self
.
working_directory
))
def
test_purgeOldTestSuiteChmodNonWriteable
(
self
):
def
test_purgeOldTestSuiteChmodNonWriteable
(
self
):
"""Old test suites can be deleted even when some files/directories have
"""Old test suites can be deleted even when some files/directories have
...
@@ -633,11 +632,11 @@ shared = true
...
@@ -633,11 +632,11 @@ shared = true
method_list_for_prepareSlapOSForTestSuite
=
[
"initializeSlapOSControler"
,
method_list_for_prepareSlapOSForTestSuite
=
[
"initializeSlapOSControler"
,
"runSoftwareRelease"
,
"runComputerPartition"
]
"runSoftwareRelease"
,
"runComputerPartition"
]
runner
.
prepareSlapOSForTestNode
(
test_node_slapos
)
runner
.
prepareSlapOSForTestNode
(
test_node_slapos
)
self
.
assertEqual
s
(
method_list_for_prepareSlapOSForTestNode
,
self
.
assertEqual
(
method_list_for_prepareSlapOSForTestNode
,
[
x
[
"method_name"
]
for
x
in
call_list
])
[
x
[
"method_name"
]
for
x
in
call_list
])
call_list
=
[]
call_list
=
[]
runner
.
prepareSlapOSForTestSuite
(
node_test_suite
)
runner
.
prepareSlapOSForTestSuite
(
node_test_suite
)
self
.
assertEqual
s
(
method_list_for_prepareSlapOSForTestSuite
,
self
.
assertEqual
(
method_list_for_prepareSlapOSForTestSuite
,
[
x
[
"method_name"
]
for
x
in
call_list
])
[
x
[
"method_name"
]
for
x
in
call_list
])
call_list
=
[]
call_list
=
[]
SlapOSControler
.
runSoftwareRelease
=
Patch
(
"runSoftwareRelease"
,
status_code
=
1
)
SlapOSControler
.
runSoftwareRelease
=
Patch
(
"runSoftwareRelease"
,
status_code
=
1
)
...
@@ -681,7 +680,7 @@ shared = true
...
@@ -681,7 +680,7 @@ shared = true
return
json
.
dumps
([])
return
json
.
dumps
([])
def
_checkExistingTestSuite
(
reference_set
):
def
_checkExistingTestSuite
(
reference_set
):
test_self
.
assertEqual
s
(
set
(
reference_set
),
test_self
.
assertEqual
(
set
(
reference_set
),
set
(
os
.
listdir
(
test_node
.
working_directory
)))
set
(
os
.
listdir
(
test_node
.
working_directory
)))
for
x
in
reference_set
:
for
x
in
reference_set
:
test_self
.
assertTrue
(
os
.
path
.
exists
(
os
.
path
.
join
(
test_self
.
assertTrue
(
os
.
path
.
exists
(
os
.
path
.
join
(
...
@@ -761,7 +760,7 @@ shared = true
...
@@ -761,7 +760,7 @@ shared = true
SlapOSControler
.
initializeSlapOSControler
=
doNothing
SlapOSControler
.
initializeSlapOSControler
=
doNothing
# Inside test_node a runner is created using new UnitTestRunner methods
# Inside test_node a runner is created using new UnitTestRunner methods
test_node
.
run
()
test_node
.
run
()
self
.
assertEqual
s
(
5
,
counter
)
self
.
assertEqual
(
5
,
counter
)
time
.
sleep
=
original_sleep
time
.
sleep
=
original_sleep
# Restore old class methods
# Restore old class methods
if
my_test_type
==
"ScalabilityTest"
:
if
my_test_type
==
"ScalabilityTest"
:
...
@@ -797,23 +796,23 @@ shared = true
...
@@ -797,23 +796,23 @@ shared = true
file_name
=
'AC_Ra
\
xc3
\
xad
zertic
\
xc3
\
xa1
ma'
file_name
=
'AC_Ra
\
xc3
\
xad
zertic
\
xc3
\
xa1
ma'
non_ascii_file
=
open
(
os
.
path
.
join
(
controler
.
software_root
,
file_name
),
'w'
)
non_ascii_file
=
open
(
os
.
path
.
join
(
controler
.
software_root
,
file_name
),
'w'
)
non_ascii_file
.
close
()
non_ascii_file
.
close
()
self
.
assertEqual
s
([
file_name
],
os
.
listdir
(
controler
.
software_root
))
self
.
assertEqual
([
file_name
],
os
.
listdir
(
controler
.
software_root
))
controler
.
_resetSoftware
()
controler
.
_resetSoftware
()
self
.
assertEqual
s
([],
os
.
listdir
(
controler
.
software_root
))
self
.
assertEqual
([],
os
.
listdir
(
controler
.
software_root
))
def
test_14_createFolder
(
self
):
def
test_14_createFolder
(
self
):
test_node
=
self
.
getTestNode
()
test_node
=
self
.
getTestNode
()
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
node_test_suite
=
test_node
.
getNodeTestSuite
(
'foo'
)
folder
=
node_test_suite
.
test_suite_directory
folder
=
node_test_suite
.
test_suite_directory
self
.
assert
Equals
(
False
,
os
.
path
.
exists
(
folder
))
self
.
assert
False
(
os
.
path
.
exists
(
folder
))
createFolder
(
folder
)
createFolder
(
folder
)
self
.
assert
Equals
(
True
,
os
.
path
.
exists
(
folder
))
self
.
assert
True
(
os
.
path
.
exists
(
folder
))
to_drop_path
=
os
.
path
.
join
(
folder
,
'drop'
)
to_drop_path
=
os
.
path
.
join
(
folder
,
'drop'
)
to_drop
=
open
(
to_drop_path
,
'w'
)
to_drop
=
open
(
to_drop_path
,
'w'
)
to_drop
.
close
()
to_drop
.
close
()
self
.
assert
Equals
(
True
,
os
.
path
.
exists
(
to_drop_path
))
self
.
assert
True
(
os
.
path
.
exists
(
to_drop_path
))
createFolder
(
folder
,
clean
=
True
)
createFolder
(
folder
,
clean
=
True
)
self
.
assert
Equals
(
False
,
os
.
path
.
exists
(
to_drop_path
))
self
.
assert
False
(
os
.
path
.
exists
(
to_drop_path
))
def
test_15_suite_log_directory
(
self
,
my_test_type
=
'UnitTest'
,
grade
=
'master'
):
def
test_15_suite_log_directory
(
self
,
my_test_type
=
'UnitTest'
,
grade
=
'master'
):
def
doNothing
(
self
,
*
args
,
**
kw
):
def
doNothing
(
self
,
*
args
,
**
kw
):
...
@@ -861,7 +860,7 @@ shared = true
...
@@ -861,7 +860,7 @@ shared = true
def
checkTestSuite
(
test_node
):
def
checkTestSuite
(
test_node
):
test_node
.
node_test_suite_dict
test_node
.
node_test_suite_dict
rand_part_set
=
set
()
rand_part_set
=
set
()
self
.
assertEqual
s
(
2
,
len
(
test_node
.
node_test_suite_dict
))
self
.
assertEqual
(
2
,
len
(
test_node
.
node_test_suite_dict
))
for
ref
,
suite
in
test_node
.
node_test_suite_dict
.
items
():
for
ref
,
suite
in
test_node
.
node_test_suite_dict
.
items
():
self
.
assertTrue
(
'var/log/testnode/%s'
%
suite
.
reference
in
\
self
.
assertTrue
(
'var/log/testnode/%s'
%
suite
.
reference
in
\
suite
.
suite_log_path
,
suite
.
suite_log_path
,
...
@@ -925,7 +924,7 @@ shared = true
...
@@ -925,7 +924,7 @@ shared = true
RunnerClass
.
_prepareSlapOS
=
patch_prepareSlapOS
RunnerClass
.
_prepareSlapOS
=
patch_prepareSlapOS
SlapOSControler
.
initializeSlapOSControler
=
doNothing
SlapOSControler
.
initializeSlapOSControler
=
doNothing
test_node
.
run
()
test_node
.
run
()
self
.
assertEqual
s
(
counter
,
3
)
self
.
assertEqual
(
counter
,
3
)
checkTestSuite
(
test_node
)
checkTestSuite
(
test_node
)
time
.
sleep
=
original_sleep
time
.
sleep
=
original_sleep
# Restore old class methods
# Restore old class methods
...
@@ -1021,18 +1020,18 @@ shared = true
...
@@ -1021,18 +1020,18 @@ shared = true
def
callRaisingPrepareSlapos
():
def
callRaisingPrepareSlapos
():
self
.
assertRaises
(
SubprocessError
,
callPrepareSlapOS
)
self
.
assertRaises
(
SubprocessError
,
callPrepareSlapOS
)
self
.
assertEqual
s
(
node_test_suite
.
retry_software_count
,
0
)
self
.
assertEqual
(
node_test_suite
.
retry_software_count
,
0
)
for
x
in
xrange
(
0
,
11
):
for
x
in
range
(
11
):
callRaisingPrepareSlapos
()
callRaisingPrepareSlapos
()
self
.
assertEqual
s
(
len
(
init_call_kw_list
),
11
)
self
.
assertEqual
(
len
(
init_call_kw_list
),
11
)
self
.
assertEqual
s
(
init_call_kw_list
[
-
1
][
'reset_software'
],
False
)
self
.
assertEqual
(
init_call_kw_list
[
-
1
][
'reset_software'
],
False
)
self
.
assertEqual
s
(
node_test_suite
.
retry_software_count
,
11
)
self
.
assertEqual
(
node_test_suite
.
retry_software_count
,
11
)
callRaisingPrepareSlapos
()
callRaisingPrepareSlapos
()
self
.
assertEqual
s
(
init_call_kw_list
[
-
1
][
'reset_software'
],
True
)
self
.
assertEqual
(
init_call_kw_list
[
-
1
][
'reset_software'
],
True
)
self
.
assertEqual
s
(
node_test_suite
.
retry_software_count
,
1
)
self
.
assertEqual
(
node_test_suite
.
retry_software_count
,
1
)
callRaisingPrepareSlapos
()
callRaisingPrepareSlapos
()
self
.
assertEqual
s
(
init_call_kw_list
[
-
1
][
'reset_software'
],
False
)
self
.
assertEqual
(
init_call_kw_list
[
-
1
][
'reset_software'
],
False
)
self
.
assertEqual
s
(
node_test_suite
.
retry_software_count
,
2
)
self
.
assertEqual
(
node_test_suite
.
retry_software_count
,
2
)
SlapOSControler
.
initializeSlapOSControler
=
\
SlapOSControler
.
initializeSlapOSControler
=
\
initial_initializeSlapOSControler
initial_initializeSlapOSControler
SlapOSControler
.
runSoftwareRelease
=
initial_runSoftwareRelease
SlapOSControler
.
runSoftwareRelease
=
initial_runSoftwareRelease
...
...
erp5/util/benchmark/performance_tester.py
View file @
31804f68
...
@@ -28,6 +28,7 @@
...
@@ -28,6 +28,7 @@
#
#
##############################################################################
##############################################################################
from
__future__
import
print_function
import
argparse
import
argparse
import
os
import
os
import
sys
import
sys
...
@@ -264,7 +265,7 @@ class PerformanceTester(object):
...
@@ -264,7 +265,7 @@ class PerformanceTester(object):
error_message
=
exit_msg_queue
.
get
()
error_message
=
exit_msg_queue
.
get
()
except
KeyboardInterrupt
,
e
:
except
KeyboardInterrupt
,
e
:
print
>>
sys
.
stderr
,
"
\
n
Interrupted by user, stopping gracefully..."
print
(
"
\
n
Interrupted by user, stopping gracefully..."
,
file
=
sys
.
stderr
)
exit_status
=
2
exit_status
=
2
# An IOError may be raised when receiving a SIGINT which interrupts the
# An IOError may be raised when receiving a SIGINT which interrupts the
...
@@ -337,7 +338,7 @@ class PerformanceTester(object):
...
@@ -337,7 +338,7 @@ class PerformanceTester(object):
def
main
():
def
main
():
error_message_set
,
exit_status
=
PerformanceTester
().
run
()
error_message_set
,
exit_status
=
PerformanceTester
().
run
()
for
error_message
in
error_message_set
:
for
error_message
in
error_message_set
:
print
>>
sys
.
stderr
,
"ERROR: %s"
%
error_message
print
(
"ERROR: %s"
%
error_message
,
file
=
sys
.
stderr
)
sys
.
exit
(
exit_status
)
sys
.
exit
(
exit_status
)
...
...
erp5/util/benchmark/report.py
View file @
31804f68
...
@@ -31,6 +31,7 @@
...
@@ -31,6 +31,7 @@
#
#
##############################################################################
##############################################################################
from
__future__
import
print_function
import
argparse
import
argparse
import
re
import
re
...
@@ -537,7 +538,7 @@ def generateReport():
...
@@ -537,7 +538,7 @@ def generateReport():
for
filename
in
filename_iter
:
for
filename
in
filename_iter
:
# There may be no results at all in case of errors
# There may be no results at all in case of errors
if
not
os
.
stat
(
filename
).
st_size
:
if
not
os
.
stat
(
filename
).
st_size
:
print
>>
sys
.
stderr
,
"Ignoring empty file %s"
%
filename
print
(
"Ignoring empty file %s"
%
filename
,
file
=
sys
.
stderr
)
continue
continue
report_dict
=
per_nb_users_report_dict
.
setdefault
(
report_dict
=
per_nb_users_report_dict
.
setdefault
(
...
@@ -546,10 +547,8 @@ def generateReport():
...
@@ -546,10 +547,8 @@ def generateReport():
report_dict
[
'filename'
].
append
(
filename
)
report_dict
[
'filename'
].
append
(
filename
)
if
not
per_nb_users_report_dict
:
if
not
per_nb_users_report_dict
:
print
>>
sys
.
stderr
,
"ERROR: No result file found, perhaps "
\
sys
.
exit
(
"ERROR: No result file found, perhaps ``--filename-prefix'' should"
"``--filename-prefix'' should be specified?"
"be specified?"
)
sys
.
exit
(
1
)
pdf
=
PdfPages
(
argument_namespace
.
output_filename
)
pdf
=
PdfPages
(
argument_namespace
.
output_filename
)
...
...
erp5/util/benchmark/scalability_tester.py
View file @
31804f68
...
@@ -28,6 +28,7 @@
...
@@ -28,6 +28,7 @@
#
#
##############################################################################
##############################################################################
from
__future__
import
print_function
from
.result
import
CSVBenchmarkResult
,
NothingFlushedException
from
.result
import
CSVBenchmarkResult
,
NothingFlushedException
class
CSVScalabilityBenchmarkResult
(
CSVBenchmarkResult
):
class
CSVScalabilityBenchmarkResult
(
CSVBenchmarkResult
):
...
@@ -60,7 +61,7 @@ class ScalabilityTester(PerformanceTester):
...
@@ -60,7 +61,7 @@ class ScalabilityTester(PerformanceTester):
urllib
.
urlencode
({
'error_message_set'
:
'|'
.
join
(
error_message_set
)})).
close
()
urllib
.
urlencode
({
'error_message_set'
:
'|'
.
join
(
error_message_set
)})).
close
()
except
:
except
:
print
>>
sys
.
stderr
,
"ERROR: %s"
%
Formatter
().
formatException
(
sys
.
exc_info
()
)
print
(
"ERROR: %s"
%
Formatter
().
formatException
(
sys
.
exc_info
()),
file
=
sys
.
stderr
)
def
getResultClass
(
self
):
def
getResultClass
(
self
):
if
not
self
.
_argument_namespace
.
erp5_publish_url
:
if
not
self
.
_argument_namespace
.
erp5_publish_url
:
...
...
erp5/util/scalability/requestUrl.py
View file @
31804f68
...
@@ -27,7 +27,7 @@ def main():
...
@@ -27,7 +27,7 @@ def main():
if
error_message_set
:
if
error_message_set
:
exit_status
=
1
exit_status
=
1
for
error
in
error_message_set
:
for
error
in
error_message_set
:
print
error
print
(
error
)
elif
result
:
elif
result
:
print
result
print
(
result
)
sys
.
exit
(
exit_status
)
sys
.
exit
(
exit_status
)
erp5/util/scalability/runScalabilityTestSuite.py
View file @
31804f68
...
@@ -5,23 +5,15 @@ import os
...
@@ -5,23 +5,15 @@ import os
import
shutil
import
shutil
import
time
import
time
import
sys
import
sys
import
multiprocessing
import
signal
import
errno
import
json
import
json
import
logging
import
logging
import
logging.handlers
import
logging.handlers
import
glob
import
glob
import
urlparse
import
httplib
import
base64
import
threading
import
threading
from
erp5.util.benchmark.argument
import
ArgumentType
from
erp5.util.benchmark.performance_tester
import
PerformanceTester
from
erp5.util.benchmark.thread
import
TestThread
,
TestMetricThread
from
erp5.util.benchmark.thread
import
TestThread
,
TestMetricThread
from
erp5.util
import
taskdistribution
from
erp5.util
import
taskdistribution
from
erp5.util.testnode
import
Utils
from
erp5.util.testnode
import
Utils
from
erp5.util.testnode.ProcessManager
import
SubprocessError
,
ProcessManager
,
CancellationErro
r
from
erp5.util.testnode.ProcessManager
import
ProcessManage
r
import
datetime
import
datetime
MAX_INSTALLATION_TIME
=
60
*
50
MAX_INSTALLATION_TIME
=
60
*
50
...
@@ -179,31 +171,28 @@ class ScalabilityLauncher(object):
...
@@ -179,31 +171,28 @@ class ScalabilityLauncher(object):
"""
"""
data_array
=
self
.
__argumentNamespace
.
current_test_data
.
split
(
','
)
data_array
=
self
.
__argumentNamespace
.
current_test_data
.
split
(
','
)
data
=
json
.
dumps
({
"count"
:
data_array
[
0
],
"title"
:
data_array
[
1
],
"relative_path"
:
data_array
[
2
]})
data
=
json
.
dumps
({
"count"
:
data_array
[
0
],
"title"
:
data_array
[
1
],
"relative_path"
:
data_array
[
2
]})
de
coded_data
=
Utils
.
deunicodeData
(
json
.
loads
(
data
))
en
coded_data
=
Utils
.
deunicodeData
(
json
.
loads
(
data
))
return
ScalabilityTest
(
de
coded_data
,
self
.
test_result
)
return
ScalabilityTest
(
en
coded_data
,
self
.
test_result
)
def
clearUsersFile
(
self
,
user_file_path
):
def
clearUsersFile
(
self
,
user_file_path
):
self
.
log
(
"Clearing users file: %s"
%
user_file_path
)
self
.
log
(
"Clearing users file: %s"
%
user_file_path
)
os
.
remove
(
user_file_path
)
os
.
remove
(
user_file_path
)
users_file
=
open
(
user_file_path
,
"w"
)
with
open
(
user_file_path
,
"w"
)
as
users_file
:
for
line
in
self
.
users_file_original_content
:
for
line
in
self
.
users_file_original_content
:
users_file
.
write
(
line
)
users_file
.
write
(
line
)
users_file
.
close
()
def
updateUsersFile
(
self
,
user_quantity
,
password
,
user_file_path
):
def
updateUsersFile
(
self
,
user_quantity
,
password
,
user_file_path
):
self
.
log
(
"Updating users file: %s"
%
user_file_path
)
self
.
log
(
"Updating users file: %s"
%
user_file_path
)
users_file
=
open
(
user_file_path
,
"r"
)
with
open
(
user_file_path
,
"r"
)
as
users_file
:
file_content
=
users_file
.
readlines
()
file_content
=
users_file
.
readlines
()
self
.
users_file_original_content
=
file_content
self
.
users_file_original_content
=
file_content
new_file_content
=
[]
new_file_content
=
[]
for
line
in
file_content
:
for
line
in
file_content
:
new_file_content
.
append
(
line
.
replace
(
'<password>'
,
password
).
replace
(
'<user_quantity>'
,
str
(
user_quantity
)))
new_file_content
.
append
(
line
.
replace
(
'<password>'
,
password
).
replace
(
'<user_quantity>'
,
str
(
user_quantity
)))
users_file
.
close
()
os
.
remove
(
user_file_path
)
os
.
remove
(
user_file_path
)
users_file
=
open
(
user_file_path
,
"w"
)
with
open
(
user_file_path
,
"w"
)
as
users_file
:
for
line
in
new_file_content
:
for
line
in
new_file_content
:
users_file
.
write
(
line
)
users_file
.
write
(
line
)
users_file
.
close
()
def
run
(
self
):
def
run
(
self
):
self
.
log
(
"Scalability Launcher started, with:"
)
self
.
log
(
"Scalability Launcher started, with:"
)
...
...
erp5/util/taskdistribution/__init__.py
View file @
31804f68
...
@@ -40,11 +40,15 @@ Example use:
...
@@ -40,11 +40,15 @@ Example use:
test_line.stop()
test_line.stop()
"""
"""
from
__future__
import
print_function
from
__future__
import
print_function
import
httplib
import
six
from
six.moves
import
(
map
,
http_client
as
httplib
,
xmlrpc_client
as
xmlrpclib
,
)
import
socket
import
socket
import
threading
import
threading
import
time
import
time
import
xmlrpclib
__all__
=
[
'TaskDistributor'
,
'TestResultProxy'
,
'TestResultLineProxy'
,
'patchRPCParser'
]
__all__
=
[
'TaskDistributor'
,
'TestResultProxy'
,
'TestResultLineProxy'
,
'patchRPCParser'
]
...
@@ -89,11 +93,17 @@ def patchRPCParser(error_handler):
...
@@ -89,11 +93,17 @@ def patchRPCParser(error_handler):
def
verbose_feed
(
self
,
data
):
def
verbose_feed
(
self
,
data
):
try
:
try
:
return
original_feed
(
self
,
data
)
return
original_feed
(
self
,
data
)
except
Exception
,
exc
:
except
Exception
as
exc
:
if
not
error_handler
(
data
,
exc
):
if
not
error_handler
(
data
,
exc
):
raise
raise
parser_klass
.
feed
=
verbose_feed
parser_klass
.
feed
=
verbose_feed
try
:
# PY3
basestring
except
NameError
:
basestring
=
bytes
,
str
unicode
=
str
def
binarize_args
(
arg
):
def
binarize_args
(
arg
):
# Converts recursively basestring arg into xmlrpclib.Binary, as they can
# Converts recursively basestring arg into xmlrpclib.Binary, as they can
# contain non-XML allowed characters
# contain non-XML allowed characters
...
@@ -102,9 +112,9 @@ def binarize_args(arg):
...
@@ -102,9 +112,9 @@ def binarize_args(arg):
arg
=
arg
.
encode
(
'utf-8'
)
arg
=
arg
.
encode
(
'utf-8'
)
return
xmlrpclib
.
Binary
(
arg
)
return
xmlrpclib
.
Binary
(
arg
)
if
isinstance
(
arg
,
(
list
,
tuple
,
set
)):
if
isinstance
(
arg
,
(
list
,
tuple
,
set
)):
return
map
(
binarize_args
,
arg
)
return
list
(
map
(
binarize_args
,
arg
)
)
if
isinstance
(
arg
,
dict
):
if
isinstance
(
arg
,
dict
):
return
{
k
:
binarize_args
(
v
)
for
k
,
v
in
arg
.
iteritems
(
)}
return
{
k
:
binarize_args
(
v
)
for
k
,
v
in
six
.
iteritems
(
arg
)}
return
arg
return
arg
class
RPCRetry
(
object
):
class
RPCRetry
(
object
):
...
@@ -350,7 +360,7 @@ class TestResultProxy(RPCRetry):
...
@@ -350,7 +360,7 @@ class TestResultProxy(RPCRetry):
caption_list
=
[]
caption_list
=
[]
append
=
caption_list
.
append
append
=
caption_list
.
append
for
name
,
(
stream
,
max_history_bytes
)
in
\
for
name
,
(
stream
,
max_history_bytes
)
in
\
s
elf
.
_watcher_dict
.
iteritems
(
):
s
ix
.
iteritems
(
self
.
_watcher_dict
):
append
(
'==> %s <=='
%
(
name
,
))
append
(
'==> %s <=='
%
(
name
,
))
start
=
stream
.
tell
()
start
=
stream
.
tell
()
stream
.
seek
(
0
,
2
)
stream
.
seek
(
0
,
2
)
...
...
erp5/util/testbrowser/examples/createERP5User.py
View file @
31804f68
...
@@ -8,6 +8,7 @@
...
@@ -8,6 +8,7 @@
#
#
# TODO: There must be a better way than the code below to do that though...
# TODO: There must be a better way than the code below to do that though...
from
__future__
import
print_function
import
sys
import
sys
from
erp5.util.testbrowser.browser
import
Browser
from
erp5.util.testbrowser.browser
import
Browser
...
@@ -19,11 +20,8 @@ try:
...
@@ -19,11 +20,8 @@ try:
user_nbr
=
int
(
user_nbr
)
user_nbr
=
int
(
user_nbr
)
except
ValueError
:
except
ValueError
:
print
>>
sys
.
stderr
,
"ERROR: Missing arguments: %s URL USERNAME "
\
sys
.
exit
(
"ERROR: Missing arguments: %s URL USERNAME PASSWORD NUMBER_OF_USERS "
"PASSWORD NUMBER_OF_USERS NEW_USERNAME_PREFIX NEW_USERS_PASSWORD"
%
\
"NEW_USERNAME_PREFIX NEW_USERS_PASSWORD"
%
sys
.
argv
[
0
])
sys
.
argv
[
0
]
sys
.
exit
(
1
)
# Create a browser instance
# Create a browser instance
browser
=
Browser
(
url
,
username
,
password
)
browser
=
Browser
(
url
,
username
,
password
)
...
...
erp5/util/testbrowser/examples/createPerson.py
View file @
31804f68
#!/usr/bin/env python
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
from
__future__
import
division
,
print_function
from
erp5.util.testbrowser.browser
import
Browser
from
erp5.util.testbrowser.browser
import
Browser
ITERATION
=
20
ITERATION
=
20
...
@@ -89,4 +90,4 @@ if __name__ == '__main__':
...
@@ -89,4 +90,4 @@ if __name__ == '__main__':
counter
+=
1
counter
+=
1
for
title
,
time_list
in
result_dict
.
iteritems
():
for
title
,
time_list
in
result_dict
.
iteritems
():
print
"%s: %.4fs"
%
(
title
,
float
(
sum
(
time_list
))
/
ITERATION
)
print
(
"%s: %.4fs"
%
(
title
,
sum
(
time_list
)
/
ITERATION
)
)
erp5/util/testnode/NodeTestSuite.py
View file @
31804f68
...
@@ -32,6 +32,8 @@ import string
...
@@ -32,6 +32,8 @@ import string
import
random
import
random
from
.Utils
import
createFolder
from
.Utils
import
createFolder
from
six.moves
import
range
class
SlapOSInstance
(
object
):
class
SlapOSInstance
(
object
):
"""
"""
Base of an software instance,
Base of an software instance,
...
@@ -69,14 +71,14 @@ class NodeTestSuite(SlapOSInstance):
...
@@ -69,14 +71,14 @@ class NodeTestSuite(SlapOSInstance):
def
createSuiteLog
(
self
):
def
createSuiteLog
(
self
):
# /srv/slapgrid/slappartXX/srv/var/log/testnode/az-D27KqX7FxJ/suite.log
# /srv/slapgrid/slappartXX/srv/var/log/testnode/az-D27KqX7FxJ/suite.log
alphabets
=
string
.
digits
+
string
.
letters
alphabets
=
string
.
digits
+
string
.
ascii_
letters
while
1
:
while
1
:
log_folder_name
=
'%s-%s'
%
(
self
.
reference
,
log_folder_name
=
'%s-%s'
%
(
self
.
reference
,
''
.
join
(
random
.
choice
(
alphabets
)
for
i
in
x
range
(
10
)))
''
.
join
(
random
.
choice
(
alphabets
)
for
i
in
range
(
10
)))
log_folder_path
=
os
.
path
.
join
(
self
.
log_directory
,
log_folder_name
)
log_folder_path
=
os
.
path
.
join
(
self
.
log_directory
,
log_folder_name
)
try
:
try
:
os
.
makedirs
(
log_folder_path
)
os
.
makedirs
(
log_folder_path
)
except
OSError
,
e
:
except
OSError
as
e
:
if
e
.
errno
!=
errno
.
EEXIST
:
if
e
.
errno
!=
errno
.
EEXIST
:
raise
raise
else
:
else
:
...
...
erp5/util/testnode/ProcessManager.py
View file @
31804f68
...
@@ -79,7 +79,8 @@ def subprocess_capture(p, log_prefix, get_output=True):
...
@@ -79,7 +79,8 @@ def subprocess_capture(p, log_prefix, get_output=True):
break
break
if
get_output
:
if
get_output
:
buffer
.
append
(
data
)
buffer
.
append
(
data
)
log
(
log_prefix
+
data
.
rstrip
(
'
\
n
'
))
log
(
log_prefix
+
(
data
if
str
is
bytes
else
data
.
decode
(
'utf-8'
,
errors
=
'replace'
)).
rstrip
(
'
\
n
'
))
if
p
.
stdout
:
if
p
.
stdout
:
stdout
=
[]
stdout
=
[]
stdout_thread
=
threading
.
Thread
(
target
=
readerthread
,
stdout_thread
=
threading
.
Thread
(
target
=
readerthread
,
...
@@ -97,8 +98,8 @@ def subprocess_capture(p, log_prefix, get_output=True):
...
@@ -97,8 +98,8 @@ def subprocess_capture(p, log_prefix, get_output=True):
stdout_thread
.
join
()
stdout_thread
.
join
()
if
p
.
stderr
:
if
p
.
stderr
:
stderr_thread
.
join
()
stderr_thread
.
join
()
return
(
p
.
stdout
and
''
.
join
(
stdout
),
return
(
p
.
stdout
and
b
''
.
join
(
stdout
),
p
.
stderr
and
''
.
join
(
stderr
))
p
.
stderr
and
b
''
.
join
(
stderr
))
def
killCommand
(
pid
):
def
killCommand
(
pid
):
"""
"""
...
@@ -109,7 +110,7 @@ def killCommand(pid):
...
@@ -109,7 +110,7 @@ def killCommand(pid):
try
:
try
:
process
=
psutil
.
Process
(
pid
)
process
=
psutil
.
Process
(
pid
)
process
.
suspend
()
process
.
suspend
()
except
psutil
.
Error
,
e
:
except
psutil
.
Error
as
e
:
return
return
process_list
=
[
process
]
process_list
=
[
process
]
new_list
=
process
.
children
(
recursive
=
True
)
new_list
=
process
.
children
(
recursive
=
True
)
...
@@ -118,19 +119,19 @@ def killCommand(pid):
...
@@ -118,19 +119,19 @@ def killCommand(pid):
for
child
in
new_list
:
for
child
in
new_list
:
try
:
try
:
child
.
suspend
()
child
.
suspend
()
except
psutil
.
Error
,
e
:
except
psutil
.
Error
as
e
:
logger
.
debug
(
"killCommand/suspend: %s"
,
e
)
logger
.
debug
(
"killCommand/suspend: %s"
,
e
)
time
.
sleep
(
1
)
time
.
sleep
(
1
)
new_list
=
set
(
process
.
children
(
recursive
=
True
)).
difference
(
process_list
)
new_list
=
set
(
process
.
children
(
recursive
=
True
)).
difference
(
process_list
)
for
process
in
process_list
:
for
process
in
process_list
:
try
:
try
:
process
.
kill
()
process
.
kill
()
except
psutil
.
Error
,
e
:
except
psutil
.
Error
as
e
:
logger
.
debug
(
"killCommand/kill: %s"
,
e
)
logger
.
debug
(
"killCommand/kill: %s"
,
e
)
class
ProcessManager
(
object
):
class
ProcessManager
(
object
):
stdin
=
file
(
os
.
devnull
)
stdin
=
open
(
os
.
devnull
)
def
__init__
(
self
,
max_timeout
=
MAX_TIMEOUT
):
def
__init__
(
self
,
max_timeout
=
MAX_TIMEOUT
):
self
.
process_pid_set
=
set
()
self
.
process_pid_set
=
set
()
...
...
erp5/util/testnode/ScalabilityTestRunner.py
View file @
31804f68
...
@@ -30,30 +30,31 @@ import subprocess
...
@@ -30,30 +30,31 @@ import subprocess
import
sys
import
sys
import
time
import
time
import
glob
import
glob
import
SlapOSControler
from
.
import
SlapOSControler
,
SlapOSMasterCommunicator
import
SlapOSMasterCommunicator
import
json
import
json
import
time
import
time
import
shutil
import
shutil
import
logging
import
logging
import
string
import
string
import
random
import
random
import
urlparse
from
six.moves.urllib.parse
import
urlparse
import
base64
import
base64
import
httplib
from
six.moves
import
http_client
as
httplib
import
Utils
from
.
import
Utils
import
requests
import
requests
import
slapos.slap
import
slapos.slap
import
cPickle
as
pickle
from
six.moves
import
cPickle
as
pickle
from
ProcessManager
import
SubprocessError
,
ProcessManager
,
CancellationError
from
.
ProcessManager
import
SubprocessError
,
ProcessManager
,
CancellationError
from
subprocess
import
CalledProcessError
from
subprocess
import
CalledProcessError
from
Updater
import
Updater
from
.
Updater
import
Updater
from
erp5.util
import
taskdistribution
from
erp5.util
import
taskdistribution
from
erp5.util.benchmark.thread
import
TestThread
from
erp5.util.benchmark.thread
import
TestThread
# for dummy slapos answer
# for dummy slapos answer
import
signal
import
signal
from
.
import
logger
from
.
import
logger
from
six.moves
import
range
# max time to generate frontend instance: 1.5 hour
# max time to generate frontend instance: 1.5 hour
MAX_FRONTEND_TIME
=
60
*
90
MAX_FRONTEND_TIME
=
60
*
90
# max time to register instance to slapOSMaster: 5 minutes
# max time to register instance to slapOSMaster: 5 minutes
...
@@ -322,18 +323,16 @@ ces or already launched.")
...
@@ -322,18 +323,16 @@ ces or already launched.")
software_hash_directory
=
self
.
testnode
.
config
[
'slapos_binary'
].
rsplit
(
"bin/slapos"
,
1
)[
0
]
software_hash_directory
=
self
.
testnode
.
config
[
'slapos_binary'
].
rsplit
(
"bin/slapos"
,
1
)[
0
]
apache_htpasswd
=
software_hash_directory
+
"parts/apache/bin/htpasswd"
apache_htpasswd
=
software_hash_directory
+
"parts/apache/bin/htpasswd"
testsuite_directory
=
self
.
testnode
.
config
[
'repository_path_list'
][
0
].
rsplit
(
'/'
,
1
)[
0
]
testsuite_directory
=
self
.
testnode
.
config
[
'repository_path_list'
][
0
].
rsplit
(
'/'
,
1
)[
0
]
htaccess_file
=
open
(
testsuite_directory
+
HTACCESS
,
"w"
)
with
open
(
testsuite_directory
+
HTACCESS
,
"w"
)
as
htaccess_file
:
file_content
=
"""
htaccess_file
.
write
(
"""
AuthType Basic
AuthType Basic
AuthName "Password Protected Area"
AuthName "Password Protected Area"
AuthUserFile "%s%s"
AuthUserFile "%s%s"
Require valid-user
Require valid-user
"""
%
(
testsuite_directory
,
HTPASSWD
)
"""
%
(
testsuite_directory
,
HTPASSWD
))
htaccess_file
.
write
(
file_content
)
htaccess_file
.
close
()
password_path
=
testsuite_directory
+
PASSWORD_FILE
password_path
=
testsuite_directory
+
PASSWORD_FILE
with
open
(
password_path
,
"w"
)
as
password_file
:
with
open
(
password_path
,
"w"
)
as
password_file
:
password
=
''
.
join
(
random
.
choice
(
string
.
digits
+
string
.
letters
)
for
i
in
x
range
(
PASSWORD_LENGTH
))
password
=
''
.
join
(
random
.
choice
(
string
.
digits
+
string
.
ascii_letters
)
for
i
in
range
(
PASSWORD_LENGTH
))
password_file
.
write
(
password
)
password_file
.
write
(
password
)
user
=
TESTNODE_USER
user
=
TESTNODE_USER
command
=
[
apache_htpasswd
,
"-bc"
,
testsuite_directory
+
HTPASSWD
,
user
,
password
]
command
=
[
apache_htpasswd
,
"-bc"
,
testsuite_directory
+
HTPASSWD
,
user
,
password
]
...
@@ -363,7 +362,7 @@ Require valid-user
...
@@ -363,7 +362,7 @@ Require valid-user
user
,
password
=
self
.
generateProfilePasswordAccess
()
user
,
password
=
self
.
generateProfilePasswordAccess
()
logger
.
info
(
"Software Profile password: %s"
%
password
)
logger
.
info
(
"Software Profile password: %s"
%
password
)
self
.
reachable_profile
=
"https://%s:%s@%s"
%
(
user
,
password
,
self
.
reachable_profile
=
"https://%s:%s@%s"
%
(
user
,
password
,
os
.
path
.
join
(
urlparse
.
urlparse
(
self
.
testnode
.
config
[
'frontend_url'
]).
netloc
,
os
.
path
.
join
(
urlparse
(
self
.
testnode
.
config
[
'frontend_url'
]).
netloc
,
"software"
,
self
.
randomized_path
,
"software.cfg"
))
"software"
,
self
.
randomized_path
,
"software.cfg"
))
def
prepareSlapOSForTestSuite
(
self
,
node_test_suite
):
def
prepareSlapOSForTestSuite
(
self
,
node_test_suite
):
...
@@ -526,7 +525,7 @@ Require valid-user
...
@@ -526,7 +525,7 @@ Require valid-user
if
not
self
.
launchable
:
if
not
self
.
launchable
:
return
{
'status_code'
:
1
,
'error_message'
:
"Current test_suite is not actually launchable."
}
return
{
'status_code'
:
1
,
'error_message'
:
"Current test_suite is not actually launchable."
}
configuration_list
=
node_test_suite
.
configuration_list
configuration_list
=
node_test_suite
.
configuration_list
test_list
=
range
(
0
,
len
(
configuration_list
))
test_list
=
list
(
range
(
len
(
configuration_list
)
))
try
:
try
:
test_result_proxy
=
self
.
testnode
.
taskdistribution
.
createTestResult
(
test_result_proxy
=
self
.
testnode
.
taskdistribution
.
createTestResult
(
node_test_suite
.
revision
,
test_list
,
node_test_suite
.
revision
,
test_list
,
...
...
erp5/util/testnode/SlapOSControler.py
View file @
31804f68
...
@@ -35,6 +35,8 @@ from slapos import client
...
@@ -35,6 +35,8 @@ from slapos import client
from
.
import
logger
from
.
import
logger
from
.Utils
import
createFolder
from
.Utils
import
createFolder
from
six.moves
import
range
MAX_PARTITIONS
=
10
MAX_PARTITIONS
=
10
MAX_SR_RETRIES
=
3
MAX_SR_RETRIES
=
3
...
@@ -243,7 +245,7 @@ class SlapOSControler(object):
...
@@ -243,7 +245,7 @@ class SlapOSControler(object):
computer
=
slap
.
registerComputer
(
config
[
'computer_id'
])
computer
=
slap
.
registerComputer
(
config
[
'computer_id'
])
# Call a method to ensure connection to master can be established
# Call a method to ensure connection to master can be established
computer
.
getComputerPartitionList
()
computer
.
getComputerPartitionList
()
except
slapos
.
slap
.
ConnectionError
,
e
:
except
slapos
.
slap
.
ConnectionError
as
e
:
retries
+=
1
retries
+=
1
if
retries
>=
60
:
if
retries
>=
60
:
raise
raise
...
@@ -270,7 +272,7 @@ class SlapOSControler(object):
...
@@ -270,7 +272,7 @@ class SlapOSControler(object):
# MySQL DB content) from previous runs. To support changes of partition
# MySQL DB content) from previous runs. To support changes of partition
# naming scheme (which already happened), do this at instance_root level.
# naming scheme (which already happened), do this at instance_root level.
createFolder
(
instance_root
,
True
)
createFolder
(
instance_root
,
True
)
for
i
in
x
range
(
MAX_PARTITIONS
):
for
i
in
range
(
MAX_PARTITIONS
):
# create partition and configure computer
# create partition and configure computer
# XXX: at the moment all partitions do share same virtual interface address
# XXX: at the moment all partitions do share same virtual interface address
# this is not a problem as usually all services are on different ports
# this is not a problem as usually all services are on different ports
...
@@ -278,7 +280,7 @@ class SlapOSControler(object):
...
@@ -278,7 +280,7 @@ class SlapOSControler(object):
partition_path
=
os
.
path
.
join
(
instance_root
,
partition_reference
)
partition_path
=
os
.
path
.
join
(
instance_root
,
partition_reference
)
if
not
(
os
.
path
.
exists
(
partition_path
)):
if
not
(
os
.
path
.
exists
(
partition_path
)):
os
.
mkdir
(
partition_path
)
os
.
mkdir
(
partition_path
)
os
.
chmod
(
partition_path
,
0750
)
os
.
chmod
(
partition_path
,
0
o
750
)
computer
.
updateConfiguration
(
xml_marshaller
.
xml_marshaller
.
dumps
({
computer
.
updateConfiguration
(
xml_marshaller
.
xml_marshaller
.
dumps
({
'address'
:
config
[
'ipv4_address'
],
'address'
:
config
[
'ipv4_address'
],
'instance_root'
:
instance_root
,
'instance_root'
:
instance_root
,
...
@@ -318,7 +320,7 @@ class SlapOSControler(object):
...
@@ -318,7 +320,7 @@ class SlapOSControler(object):
os
.
environ
[
'PATH'
]
=
environment
[
'PATH'
]
os
.
environ
[
'PATH'
]
=
environment
[
'PATH'
]
# a SR may fail for number of reasons (incl. network failures)
# a SR may fail for number of reasons (incl. network failures)
# so be tolerant and run it a few times before giving up
# so be tolerant and run it a few times before giving up
for
_
in
x
range
(
MAX_SR_RETRIES
):
for
_
in
range
(
MAX_SR_RETRIES
):
status_dict
=
self
.
spawn
(
config
[
'slapos_binary'
],
status_dict
=
self
.
spawn
(
config
[
'slapos_binary'
],
'node'
,
'software'
,
'--all'
,
'node'
,
'software'
,
'--all'
,
'--pidfile'
,
os
.
path
.
join
(
self
.
software_root
,
'slapos-node.pid'
),
'--pidfile'
,
os
.
path
.
join
(
self
.
software_root
,
'slapos-node.pid'
),
...
@@ -346,7 +348,7 @@ class SlapOSControler(object):
...
@@ -346,7 +348,7 @@ class SlapOSControler(object):
# try to run for all partitions as one partition may in theory request another one
# try to run for all partitions as one partition may in theory request another one
# this not always is required but curently no way to know how "tree" of partitions
# this not always is required but curently no way to know how "tree" of partitions
# may "expand"
# may "expand"
for
_
in
x
range
(
max_quantity
):
for
_
in
range
(
max_quantity
):
status_dict
=
self
.
spawn
(
config
[
'slapos_binary'
],
'node'
,
'instance'
,
status_dict
=
self
.
spawn
(
config
[
'slapos_binary'
],
'node'
,
'instance'
,
'--pidfile'
,
os
.
path
.
join
(
self
.
instance_root
,
'slapos-node.pid'
),
'--pidfile'
,
os
.
path
.
join
(
self
.
instance_root
,
'slapos-node.pid'
),
'--cfg'
,
self
.
slapos_config
,
raise_error_if_fail
=
False
,
'--cfg'
,
self
.
slapos_config
,
raise_error_if_fail
=
False
,
...
...
erp5/util/testnode/SlapOSMasterCommunicator.py
View file @
31804f68
from
__future__
import
print_function
import
datetime
import
datetime
import
json
import
json
import
traceback
import
traceback
...
@@ -12,6 +14,8 @@ from requests.exceptions import HTTPError
...
@@ -12,6 +14,8 @@ from requests.exceptions import HTTPError
from
..taskdistribution
import
SAFE_RPC_EXCEPTION_LIST
from
..taskdistribution
import
SAFE_RPC_EXCEPTION_LIST
from
.
import
logger
from
.
import
logger
import
six
# max time to instance changing state: 3 hour
# max time to instance changing state: 3 hour
MAX_INSTANCE_TIME
=
60
*
60
*
3
MAX_INSTANCE_TIME
=
60
*
60
*
3
...
@@ -52,7 +56,7 @@ def retryOnNetworkFailure(func,
...
@@ -52,7 +56,7 @@ def retryOnNetworkFailure(func,
except
_except_list
:
except
_except_list
:
traceback
.
print_exc
()
traceback
.
print_exc
()
print
'Network failure. Retry method %s in %i seconds'
%
(
func
,
retry_time
)
print
(
'Network failure. Retry method %s in %i seconds'
%
(
func
,
retry_time
)
)
time
.
sleep
(
retry_time
)
time
.
sleep
(
retry_time
)
retry_time
=
min
(
retry_time
*
1.5
,
640
)
retry_time
=
min
(
retry_time
*
1.5
,
640
)
...
@@ -92,8 +96,9 @@ class SlapOSMasterCommunicator(object):
...
@@ -92,8 +96,9 @@ class SlapOSMasterCommunicator(object):
if
instance_title
is
not
None
:
if
instance_title
is
not
None
:
self
.
name
=
instance_title
self
.
name
=
instance_title
if
request_kw
is
not
None
:
if
request_kw
is
not
None
:
if
isinstance
(
request_kw
,
basestring
)
or
\
if
isinstance
(
request_kw
,
bytes
):
isinstance
(
request_kw
,
unicode
):
self
.
request_kw
=
json
.
loads
(
request_kw
.
decode
(
'utf-8'
))
elif
isinstance
(
request_kw
,
six
.
text_type
):
self
.
request_kw
=
json
.
loads
(
request_kw
)
self
.
request_kw
=
json
.
loads
(
request_kw
)
else
:
else
:
self
.
request_kw
=
request_kw
self
.
request_kw
=
request_kw
...
@@ -214,7 +219,7 @@ class SlapOSMasterCommunicator(object):
...
@@ -214,7 +219,7 @@ class SlapOSMasterCommunicator(object):
result
=
self
.
hateoas_navigator
.
GET
(
url
)
result
=
self
.
hateoas_navigator
.
GET
(
url
)
result
=
json
.
loads
(
result
)
result
=
json
.
loads
(
result
)
if
result
[
'_links'
].
get
(
'action_object_slap'
,
None
)
is
None
:
if
result
[
'_links'
].
get
(
'action_object_slap'
,
None
)
is
None
:
print
result
[
'links'
]
print
(
result
[
'links'
])
return
None
return
None
object_link
=
self
.
hateoas_navigator
.
hateoasGetLinkFromLinks
(
object_link
=
self
.
hateoas_navigator
.
hateoasGetLinkFromLinks
(
...
@@ -385,8 +390,9 @@ class SlapOSTester(SlapOSMasterCommunicator):
...
@@ -385,8 +390,9 @@ class SlapOSTester(SlapOSMasterCommunicator):
self
.
name
=
name
self
.
name
=
name
self
.
computer_guid
=
computer_guid
self
.
computer_guid
=
computer_guid
if
isinstance
(
request_kw
,
str
)
or
\
if
isinstance
(
request_kw
,
bytes
):
isinstance
(
request_kw
,
unicode
):
self
.
request_kw
=
json
.
loads
(
request_kw
.
decode
(
'utf-8'
))
elif
isinstance
(
request_kw
,
six
.
text_type
):
self
.
request_kw
=
json
.
loads
(
request_kw
)
self
.
request_kw
=
json
.
loads
(
request_kw
)
else
:
else
:
self
.
request_kw
=
request_kw
self
.
request_kw
=
request_kw
...
...
erp5/util/testnode/Updater.py
View file @
31804f68
...
@@ -30,6 +30,7 @@ import re
...
@@ -30,6 +30,7 @@ import re
from
.
import
logger
from
.
import
logger
from
.ProcessManager
import
SubprocessError
from
.ProcessManager
import
SubprocessError
from
.Utils
import
rmtree
from
.Utils
import
rmtree
from
slapos.util
import
bytes2str
,
str2bytes
SVN_UP_REV
=
re
.
compile
(
r'^(?:At|Updated to) revision (\
d+).$
')
SVN_UP_REV
=
re
.
compile
(
r'^(?:At|Updated to) revision (\
d+).$
')
SVN_CHANGED_REV = re.compile(r'
^
Last
Changed
Rev
.
*
:
\
s
*
(
\
d
+
)
', re.MULTILINE)
SVN_CHANGED_REV = re.compile(r'
^
Last
Changed
Rev
.
*
:
\
s
*
(
\
d
+
)
', re.MULTILINE)
...
@@ -82,7 +83,7 @@ class Updater(object):
...
@@ -82,7 +83,7 @@ class Updater(object):
# allow several processes clean the same folder at the same time
# allow several processes clean the same folder at the same time
try:
try:
os.remove(os.path.join(path, file))
os.remove(os.path.join(path, file))
except OSError
,
e:
except OSError
as
e:
if e.errno != errno.ENOENT:
if e.errno != errno.ENOENT:
raise
raise
...
@@ -96,7 +97,7 @@ class Updater(object):
...
@@ -96,7 +97,7 @@ class Updater(object):
**kw)
**kw)
def _git(self, *args, **kw):
def _git(self, *args, **kw):
return
self.spawn(self.git_binary, *args, **kw)['
stdout
'].strip(
)
return
bytes2str(self.spawn(self.git_binary, *args, **kw)['
stdout
'].strip()
)
def git_update_server_info(self):
def git_update_server_info(self):
return self._git('
update
-
server
-
info
', '
-
f')
return self._git('
update
-
server
-
info
', '
-
f')
...
@@ -219,7 +220,7 @@ class Updater(object):
...
@@ -219,7 +220,7 @@ class Updater(object):
self.deletePycFiles(path)
self.deletePycFiles(path)
try:
try:
status_dict = self.spawn(*args)
status_dict = self.spawn(*args)
except SubprocessError
,
e:
except SubprocessError
as
e:
if '
cleanup
' not in e.stderr:
if '
cleanup
' not in e.stderr:
raise
raise
self.spawn('
svn
', '
cleanup
', path)
self.spawn('
svn
', '
cleanup
', path)
...
...
erp5/util/testnode/Utils.py
View file @
31804f68
...
@@ -3,6 +3,9 @@ import stat
...
@@ -3,6 +3,9 @@ import stat
import
shutil
import
shutil
import
errno
import
errno
import
six
from
six.moves
import
map
def
rmtree
(
path
):
def
rmtree
(
path
):
"""Delete a path recursively.
"""Delete a path recursively.
...
@@ -11,14 +14,22 @@ def rmtree(path):
...
@@ -11,14 +14,22 @@ def rmtree(path):
def
chmod_retry
(
func
,
failed_path
,
exc_info
):
def
chmod_retry
(
func
,
failed_path
,
exc_info
):
"""Make sure the directories are executable and writable.
"""Make sure the directories are executable and writable.
"""
"""
# Depending on the Python version, the following items differ.
if
six
.
PY3
:
expected_error_type
=
PermissionError
expected_func
=
os
.
lstat
else
:
expected_error_type
=
OSError
expected_func
=
os
.
listdir
e
=
exc_info
[
1
]
e
=
exc_info
[
1
]
if
isinstance
(
e
,
OSError
):
if
isinstance
(
e
,
expected_error_type
):
if
e
.
errno
==
errno
.
ENOENT
:
if
e
.
errno
==
errno
.
ENOENT
:
# because we are calling again rmtree on listdir errors, this path might
# because we are calling again rmtree on listdir errors, this path might
# have been already deleted by the recursive call to rmtree.
# have been already deleted by the recursive call to rmtree.
return
return
if
e
.
errno
==
errno
.
EACCES
:
if
e
.
errno
==
errno
.
EACCES
:
if
func
is
os
.
listdir
:
if
func
is
expected_func
:
os
.
chmod
(
failed_path
,
0o700
)
os
.
chmod
(
failed_path
,
0o700
)
# corner case to handle errors in listing directories.
# corner case to handle errors in listing directories.
# https://bugs.python.org/issue8523
# https://bugs.python.org/issue8523
...
@@ -39,12 +50,16 @@ def createFolder(folder, clean=False):
...
@@ -39,12 +50,16 @@ def createFolder(folder, clean=False):
rmtree
(
folder
)
rmtree
(
folder
)
os
.
mkdir
(
folder
)
os
.
mkdir
(
folder
)
def
deunicodeData
(
data
):
if
six
.
PY3
:
def
deunicodeData
(
data
):
return
data
else
:
def
deunicodeData
(
data
):
if
isinstance
(
data
,
list
):
if
isinstance
(
data
,
list
):
return
map
(
deunicodeData
,
data
)
return
list
(
map
(
deunicodeData
,
data
)
)
if
isinstance
(
data
,
unicode
):
if
isinstance
(
data
,
unicode
):
return
data
.
encode
(
'utf8'
)
return
data
.
encode
(
'utf8'
)
if
isinstance
(
data
,
dict
):
if
isinstance
(
data
,
dict
):
return
{
deunicodeData
(
key
):
deunicodeData
(
value
)
return
{
deunicodeData
(
key
):
deunicodeData
(
value
)
for
key
,
value
in
data
.
iteritems
(
)}
for
key
,
value
in
six
.
iteritems
(
data
)}
return
data
return
data
erp5/util/testnode/__init__.py
View file @
31804f68
...
@@ -24,7 +24,7 @@
...
@@ -24,7 +24,7 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#
##############################################################################
##############################################################################
import
ConfigP
arser
from
six.moves
import
configp
arser
import
argparse
import
argparse
import
logging
import
logging
import
logging.handlers
import
logging.handlers
...
@@ -64,7 +64,7 @@ def main(*args):
...
@@ -64,7 +64,7 @@ def main(*args):
CONFIG
=
{
CONFIG
=
{
'partition_reference'
:
'test0'
,
'partition_reference'
:
'test0'
,
}
}
config
=
ConfigP
arser
.
SafeConfigParser
()
config
=
configp
arser
.
SafeConfigParser
()
# do not change case of option keys
# do not change case of option keys
config
.
optionxform
=
str
config
.
optionxform
=
str
config
.
readfp
(
parsed_argument
.
configuration_file
[
0
])
config
.
readfp
(
parsed_argument
.
configuration_file
[
0
])
...
...
erp5/util/testnode/testnode.py
View file @
31804f68
...
@@ -171,7 +171,7 @@ shared = true
...
@@ -171,7 +171,7 @@ shared = true
# only limit to particular error, if we run that code for all errors,
# only limit to particular error, if we run that code for all errors,
# then if server having most repositories is down for some time, we would
# then if server having most repositories is down for some time, we would
# erase all repositories and facing later hours of downloads
# erase all repositories and facing later hours of downloads
if
getattr
(
error
,
'stderr'
,
''
).
find
(
'index'
)
>=
0
:
if
b'index'
in
getattr
(
error
,
'stderr'
,
b''
)
:
rmtree
(
repository_path
)
rmtree
(
repository_path
)
logger
.
warning
(
"Error while getting repository, ignoring this test suite"
,
logger
.
warning
(
"Error while getting repository, ignoring this test suite"
,
exc_info
=
1
)
exc_info
=
1
)
...
...
erp5/util/testsuite/__init__.py
View file @
31804f68
from
__future__
import
print_function
import
argparse
import
argparse
import
re
,
os
,
shlex
,
glob
import
re
,
os
,
shlex
,
glob
import
sys
,
threading
,
subprocess
import
sys
,
threading
,
subprocess
import
traceback
import
traceback
import
errno
import
errno
import
pprint
import
six
from
six.moves
import
range
from
erp5.util
import
taskdistribution
from
erp5.util
import
taskdistribution
from
pprint
import
pprint
if
six
.
PY3
:
stdbin
=
lambda
x
:
x
.
buffer
else
:
stdbin
=
lambda
x
:
x
# PY3: use shlex.quote
_format_command_search
=
re
.
compile
(
"[[
\
\
s $({?*
\
\
`#~';<>&|]"
).
search
_format_command_search
=
re
.
compile
(
"[[
\
\
s $({?*
\
\
`#~';<>&|]"
).
search
_format_command_escape
=
lambda
s
:
"'%s'"
%
r"'\''"
.
join
(
s
.
split
(
"'"
))
_format_command_escape
=
lambda
s
:
"'%s'"
%
r"'\''"
.
join
(
s
.
split
(
"'"
))
def
format_command
(
*
args
,
**
kw
):
def
format_command
(
*
args
,
**
kw
):
...
@@ -31,7 +41,7 @@ def subprocess_capture(p, quiet=False):
...
@@ -31,7 +41,7 @@ def subprocess_capture(p, quiet=False):
buffer
.
append
(
data
)
buffer
.
append
(
data
)
if
p
.
stdout
:
if
p
.
stdout
:
stdout
=
[]
stdout
=
[]
output
=
quiet
and
(
lambda
data
:
None
)
or
sys
.
stdout
.
write
output
=
(
lambda
data
:
None
)
if
quiet
else
stdbin
(
sys
.
stdout
)
.
write
stdout_thread
=
threading
.
Thread
(
target
=
readerthread
,
stdout_thread
=
threading
.
Thread
(
target
=
readerthread
,
args
=
(
p
.
stdout
,
output
,
stdout
))
args
=
(
p
.
stdout
,
output
,
stdout
))
stdout_thread
.
setDaemon
(
True
)
stdout_thread
.
setDaemon
(
True
)
...
@@ -39,7 +49,7 @@ def subprocess_capture(p, quiet=False):
...
@@ -39,7 +49,7 @@ def subprocess_capture(p, quiet=False):
if
p
.
stderr
:
if
p
.
stderr
:
stderr
=
[]
stderr
=
[]
stderr_thread
=
threading
.
Thread
(
target
=
readerthread
,
stderr_thread
=
threading
.
Thread
(
target
=
readerthread
,
args
=
(
p
.
stderr
,
sys
.
stderr
.
write
,
stderr
))
args
=
(
p
.
stderr
,
stdbin
(
sys
.
stderr
)
.
write
,
stderr
))
stderr_thread
.
setDaemon
(
True
)
stderr_thread
.
setDaemon
(
True
)
stderr_thread
.
start
()
stderr_thread
.
start
()
if
p
.
stdout
:
if
p
.
stdout
:
...
@@ -47,8 +57,8 @@ def subprocess_capture(p, quiet=False):
...
@@ -47,8 +57,8 @@ def subprocess_capture(p, quiet=False):
if
p
.
stderr
:
if
p
.
stderr
:
stderr_thread
.
join
()
stderr_thread
.
join
()
p
.
wait
()
p
.
wait
()
return
(
p
.
stdout
and
''
.
join
(
stdout
),
return
(
p
.
stdout
and
b
''
.
join
(
stdout
),
p
.
stderr
and
''
.
join
(
stderr
))
p
.
stderr
and
b
''
.
join
(
stderr
))
class
SubprocessError
(
EnvironmentError
):
class
SubprocessError
(
EnvironmentError
):
def
__init__
(
self
,
status_dict
):
def
__init__
(
self
,
status_dict
):
...
@@ -72,15 +82,15 @@ class Persistent(object):
...
@@ -72,15 +82,15 @@ class Persistent(object):
def
__getattr__
(
self
,
attr
):
def
__getattr__
(
self
,
attr
):
if
attr
==
'_db'
:
if
attr
==
'_db'
:
try
:
try
:
db
=
file
(
self
.
_filename
,
'r+'
)
db
=
open
(
self
.
_filename
,
'r+'
)
except
IOError
,
e
:
except
IOError
as
e
:
if
e
.
errno
!=
errno
.
ENOENT
:
if
e
.
errno
!=
errno
.
ENOENT
:
raise
raise
db
=
file
(
self
.
_filename
,
'w+'
)
db
=
open
(
self
.
_filename
,
'w+'
)
else
:
else
:
try
:
try
:
self
.
__dict__
.
update
(
eval
(
db
.
read
()))
self
.
__dict__
.
update
(
eval
(
db
.
read
()))
except
StandardError
:
except
Exception
:
pass
pass
self
.
_db
=
db
self
.
_db
=
db
return
db
return
db
...
@@ -89,7 +99,7 @@ class Persistent(object):
...
@@ -89,7 +99,7 @@ class Persistent(object):
def
sync
(
self
):
def
sync
(
self
):
self
.
_db
.
seek
(
0
)
self
.
_db
.
seek
(
0
)
db
=
dict
(
x
for
x
in
s
elf
.
__dict__
.
iteritems
(
)
if
x
[
0
][:
1
]
!=
'_'
)
db
=
dict
(
x
for
x
in
s
ix
.
iteritems
(
self
.
__dict__
)
if
x
[
0
][:
1
]
!=
'_'
)
pprint
.
pprint
(
db
,
self
.
_db
)
pprint
.
pprint
(
db
,
self
.
_db
)
self
.
_db
.
truncate
()
self
.
_db
.
truncate
()
...
@@ -103,10 +113,10 @@ class TestSuite(object):
...
@@ -103,10 +113,10 @@ class TestSuite(object):
"""
"""
RUN_RE
=
re
.
compile
(
RUN_RE
=
re
.
compile
(
r'Ran (?P<all_tests>\
d+)
tests? in (?P<seconds>\
d+
\.\
d+)s
',
b
r'Ran (?P<all_tests>\
d+)
tests? in (?P<seconds>\
d+
\.\
d+)s
',
re.DOTALL)
re.DOTALL)
STATUS_RE = re.compile(r"""
STATUS_RE = re.compile(
b
r"""
(OK|FAILED)\
s+
\(
(OK|FAILED)\
s+
\(
(failures=(?P<failures>\
d+),?
\s*)?
(failures=(?P<failures>\
d+),?
\s*)?
(errors=(?P<errors>\
d+),?
\s*)?
(errors=(?P<errors>\
d+),?
\s*)?
...
@@ -117,7 +127,7 @@ class TestSuite(object):
...
@@ -117,7 +127,7 @@ class TestSuite(object):
""", re.DOTALL | re.VERBOSE)
""", re.DOTALL | re.VERBOSE)
SUB_STATUS_RE = re.compile(
SUB_STATUS_RE = re.compile(
r"""SUB\
s+RESULT:
\s+(?P<all_tests>\
d+)
\s+Tests,\
s+
b
r"""SUB\
s+RESULT:
\s+(?P<all_tests>\
d+)
\s+Tests,\
s+
(?P<failures>\
d+)
\s+Failures\
s*
(?P<failures>\
d+)
\s+Failures\
s*
\
(?
\
(?
(skipped=(?P<skips>\
d+),?
\s*)?
(skipped=(?P<skips>\
d+),?
\s*)?
...
@@ -130,7 +140,10 @@ class TestSuite(object):
...
@@ -130,7 +140,10 @@ class TestSuite(object):
mysql_db_count = 1
mysql_db_count = 1
allow_restart = False
allow_restart = False
realtime_output = True
realtime_output = True
stdin = file(os.devnull)
try: # PY3
stdin = subprocess.DEVNULL
except AttributeError:
stdin = open(os.devnull, 'rb')
def __init__(self, max_instance_count, **kw):
def __init__(self, max_instance_count, **kw):
self.__dict__.update(kw)
self.__dict__.update(kw)
...
@@ -139,8 +152,8 @@ class TestSuite(object):
...
@@ -139,8 +152,8 @@ class TestSuite(object):
self.acquire = pool.acquire
self.acquire = pool.acquire
self.release = pool.release
self.release = pool.release
self._instance = threading.local()
self._instance = threading.local()
self._pool =
max_instance_count == 1 and [None] or
\
self._pool =
[None] if max_instance_count == 1 else
\
range(1, max_instance_count + 1
)
list(range(1, max_instance_count + 1)
)
self._ready = set()
self._ready = set()
self.running = {}
self.running = {}
if max_instance_count != 1:
if max_instance_count != 1:
...
@@ -185,13 +198,14 @@ class TestSuite(object):
...
@@ -185,13 +198,14 @@ class TestSuite(object):
def spawn(self, *args, **kw):
def spawn(self, *args, **kw):
quiet = kw.pop('
quiet
', False)
quiet = kw.pop('
quiet
', False)
cwd = kw.pop('
cwd
', None)
env = kw and dict(os.environ, **kw) or None
env = kw and dict(os.environ, **kw) or None
command = format_command(*args, **kw)
command = format_command(*args, **kw)
print
'
\
n
$
' + command
print
('
\
n
$
' + command)
sys.stdout.flush()
sys.stdout.flush()
try:
try:
p = subprocess.Popen(args, stdin=self.stdin, stdout=subprocess.PIPE,
p = subprocess.Popen(args, stdin=self.stdin, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, env=env)
stderr=subprocess.PIPE, env=env
, cwd=cwd
)
except Exception:
except Exception:
# Catch any exception here, to warn user instead of beeing silent,
# Catch any exception here, to warn user instead of beeing silent,
# by generating fake error result
# by generating fake error result
...
@@ -229,20 +243,11 @@ class EggTestSuite(TestSuite):
...
@@ -229,20 +243,11 @@ class EggTestSuite(TestSuite):
The python interpreter is ``python_interpreter``
The python interpreter is ``python_interpreter``
"""
"""
def run(self, test):
def run(self, test):
print test
print(test)
original_dir = os.getcwd()
try:
os.chdir(self.egg_test_path_dict[test])
return self.runUnitTest(test)
finally:
os.chdir(original_dir)
def runUnitTest(self, *args, **kw):
try:
try:
runUnitTest = "{python} setup.py test".format(python=self.python_interpreter)
status_dict = self.spawn(self.python_interpreter, '
setup
.
py
', '
test
',
args = tuple(shlex.split(runUnitTest))
cwd=self.egg_test_path_dict[test])
status_dict = self.spawn(*args, **kw)
except SubprocessError as e:
except SubprocessError, e:
status_dict = e.status_dict
status_dict = e.status_dict
test_log = status_dict['
stderr
']
test_log = status_dict['
stderr
']
search = self.RUN_RE.search(test_log)
search = self.RUN_RE.search(test_log)
...
@@ -270,7 +275,7 @@ class EggTestSuite(TestSuite):
...
@@ -270,7 +275,7 @@ class EggTestSuite(TestSuite):
return status_dict
return status_dict
def getTestList(self):
def getTestList(self):
return
self.egg_test_path_dict.keys(
)
return
list(self.egg_test_path_dict
)
def runTestSuite():
def runTestSuite():
parser = argparse.ArgumentParser(description='
Run
a
test
suite
.
')
parser = argparse.ArgumentParser(description='
Run
a
test
suite
.
')
...
@@ -327,7 +332,7 @@ def runTestSuite():
...
@@ -327,7 +332,7 @@ def runTestSuite():
if test_result is not None:
if test_result is not None:
assert revision == test_result.revision, (revision, test_result.revision)
assert revision == test_result.revision, (revision, test_result.revision)
while suite.acquire():
while suite.acquire():
test = test_result.start(
suite.running.keys(
))
test = test_result.start(
list(suite.running
))
if test is not None:
if test is not None:
suite.start(test.name, lambda status_dict, __test=test:
suite.start(test.name, lambda status_dict, __test=test:
__test.stop(**status_dict))
__test.stop(**status_dict))
...
...
erp5/util/timinglogparser/__init__.py
View file @
31804f68
...
@@ -27,6 +27,8 @@
...
@@ -27,6 +27,8 @@
#
#
##############################################################################
##############################################################################
from
__future__
import
division
,
print_function
import
os
import
os
import
sys
import
sys
import
imp
import
imp
...
@@ -126,7 +128,7 @@ def parseFile(filename, measure_dict):
...
@@ -126,7 +128,7 @@ def parseFile(filename, measure_dict):
sys
.
stderr
.
flush
()
sys
.
stderr
.
flush
()
match_list
=
LINE_PATTERN
.
findall
(
line
)
match_list
=
LINE_PATTERN
.
findall
(
line
)
if
len
(
match_list
)
!=
1
:
if
len
(
match_list
)
!=
1
:
print
>>
sys
.
stderr
,
'Unparseable line: %s:%i %r'
%
(
filename
,
line_number
,
line
)
print
(
'Unparseable line: %s:%i %r'
%
(
filename
,
line_number
,
line
),
file
=
sys
.
stderr
)
else
:
else
:
result
,
filter_id
,
date
,
duration
=
processLine
(
match_list
[
0
],
filename
,
line_number
)
result
,
filter_id
,
date
,
duration
=
processLine
(
match_list
[
0
],
filename
,
line_number
)
# Possible result values & meaning:
# Possible result values & meaning:
...
@@ -135,20 +137,21 @@ def parseFile(filename, measure_dict):
...
@@ -135,20 +137,21 @@ def parseFile(filename, measure_dict):
# (string): use & skip to next line
# (string): use & skip to next line
if
result
is
False
:
if
result
is
False
:
if
debug
:
if
debug
:
print
>>
sys
.
stderr
,
'? %s:%i %r'
%
(
filename
,
line_number
,
match_list
[
0
]
)
print
(
'? %s:%i %r'
%
(
filename
,
line_number
,
match_list
[
0
]),
file
=
sys
.
stderr
)
elif
result
is
True
:
elif
result
is
True
:
if
debug
:
if
debug
:
print
>>
sys
.
stderr
,
'- %s:%i %r'
%
(
filename
,
line_number
,
match_list
[
0
]
)
print
(
'- %s:%i %r'
%
(
filename
,
line_number
,
match_list
[
0
]),
file
=
sys
.
stderr
)
skip_count
+=
1
skip_count
+=
1
else
:
else
:
measure_dict
.
setdefault
(
filter_id
,
{}).
setdefault
(
result
,
{}).
setdefault
(
date
,
[]).
append
(
int
(
duration
))
measure_dict
.
setdefault
(
filter_id
,
{}).
setdefault
(
result
,
{}).
setdefault
(
date
,
[]).
append
(
int
(
duration
))
match_count
+=
1
match_count
+=
1
line
=
logfile
.
readline
()
line
=
logfile
.
readline
()
print
>>
sys
.
stderr
,
'%i'
%
(
line_number
,
)
print
(
'%i'
%
(
line_number
,
),
file
=
sys
.
stderr
)
if
line_number
>
0
:
if
line_number
>
0
:
duration
=
time
()
-
begin
duration
=
time
()
-
begin
print
>>
sys
.
stderr
,
"Matched %i lines (%.2f%%), %i skipped (%.2f%%), %i unmatched (%.2f%%) in %.2fs (%i lines per second)."
%
\
print
(
"Matched %i lines (%.2f%%), %i skipped (%.2f%%), %i unmatched (%.2f%%) in %.2fs (%i lines per second)."
%
\
(
match_count
,
(
float
(
match_count
)
/
line_number
)
*
100
,
skip_count
,
(
float
(
skip_count
)
/
line_number
)
*
100
,
(
line_number
-
match_count
-
skip_count
),
(
1
-
(
float
(
match_count
+
skip_count
)
/
line_number
))
*
100
,
duration
,
line_number
/
duration
)
(
match_count
,
(
match_count
/
line_number
)
*
100
,
skip_count
,
(
skip_count
/
line_number
)
*
100
,
(
line_number
-
match_count
-
skip_count
),
(
1
-
(
match_count
+
skip_count
)
/
line_number
))
*
100
,
duration
,
line_number
//
duration
),
file
=
sys
.
stderr
)
debug
=
False
debug
=
False
outfile_prefix
=
None
outfile_prefix
=
None
...
@@ -161,9 +164,9 @@ decimate_count = 1
...
@@ -161,9 +164,9 @@ decimate_count = 1
try
:
try
:
opts
,
file_list
=
getopt
.
getopt
(
sys
.
argv
[
1
:],
''
,
[
'debug'
,
'config='
,
'prefix='
,
'no-average'
,
'sum'
,
'load='
,
'save='
,
'decimate='
])
opts
,
file_list
=
getopt
.
getopt
(
sys
.
argv
[
1
:],
''
,
[
'debug'
,
'config='
,
'prefix='
,
'no-average'
,
'sum'
,
'load='
,
'save='
,
'decimate='
])
except
Exception
,
reason
:
except
Exception
as
reason
:
print
>>
sys
.
stderr
,
reason
print
(
reason
,
file
=
sys
.
stderr
)
print
>>
sys
.
stderr
,
usage
print
(
usage
,
file
=
sys
.
stderr
)
sys
.
exit
(
1
)
sys
.
exit
(
1
)
for
name
,
value
in
opts
:
for
name
,
value
in
opts
:
...
@@ -185,7 +188,7 @@ for name, value in opts:
...
@@ -185,7 +188,7 @@ for name, value in opts:
decimate_count
=
int
(
value
)
decimate_count
=
int
(
value
)
if
configuration
is
None
:
if
configuration
is
None
:
raise
ValueError
,
'--config is mandatory'
raise
ValueError
(
'--config is mandatory'
)
config_file
=
os
.
path
.
splitext
(
os
.
path
.
basename
(
configuration
))[
0
]
config_file
=
os
.
path
.
splitext
(
os
.
path
.
basename
(
configuration
))[
0
]
config_path
=
[
os
.
path
.
dirname
(
os
.
path
.
abspath
(
configuration
))]
+
sys
.
path
config_path
=
[
os
.
path
.
dirname
(
os
.
path
.
abspath
(
configuration
))]
+
sys
.
path
...
@@ -203,26 +206,24 @@ file_number = 0
...
@@ -203,26 +206,24 @@ file_number = 0
measure_dict
=
{}
measure_dict
=
{}
if
len
(
load_file_name_list
):
if
len
(
load_file_name_list
):
for
load_file_name
in
load_file_name_list
:
for
load_file_name
in
load_file_name_list
:
load_file
=
open
(
load_file_name
)
with
open
(
load_file_name
)
as
load_file
:
temp_measure_dict
=
eval
(
load_file
.
read
(),
{})
temp_measure_dict
=
eval
(
load_file
.
read
(),
{})
load_file
.
close
()
assert
isinstance
(
measure_dict
,
dict
)
assert
isinstance
(
measure_dict
,
dict
)
for
filter_id
,
result_dict
in
temp_measure_dict
.
iteritems
():
for
filter_id
,
result_dict
in
temp_measure_dict
.
iteritems
():
for
result
,
date_dict
in
result_dict
.
iteritems
():
for
result
,
date_dict
in
result_dict
.
iteritems
():
for
date
,
duration_list
in
date_dict
.
iteritems
():
for
date
,
duration_list
in
date_dict
.
iteritems
():
measure_dict
.
setdefault
(
filter_id
,
{}).
setdefault
(
result
,
{}).
setdefault
(
date
,
[]).
extend
(
duration_list
)
measure_dict
.
setdefault
(
filter_id
,
{}).
setdefault
(
result
,
{}).
setdefault
(
date
,
[]).
extend
(
duration_list
)
print
>>
sys
.
stderr
,
'Previous processing result restored from %r'
%
(
load_file_name
,
)
print
(
'Previous processing result restored from %r'
%
(
load_file_name
,
),
file
=
sys
.
stderr
)
for
filename
in
file_list
:
for
filename
in
file_list
:
file_number
+=
1
file_number
+=
1
print
>>
sys
.
stderr
,
'Processing %s [%i/%i]...'
%
(
filename
,
file_number
,
file_count
)
print
(
'Processing %s [%i/%i]...'
%
(
filename
,
file_number
,
file_count
),
file
=
sys
.
stderr
)
parseFile
(
filename
,
measure_dict
)
parseFile
(
filename
,
measure_dict
)
if
save_file_name
is
not
None
:
if
save_file_name
is
not
None
:
save_file
=
open
(
save_file_name
,
'w'
)
with
open
(
save_file_name
,
'w'
)
as
save_file
:
save_file
.
write
(
repr
(
measure_dict
))
save_file
.
write
(
repr
(
measure_dict
))
save_file
.
close
()
print
(
'Processing result saved to %r'
%
(
save_file_name
,
),
file
=
sys
.
stderr
)
print
>>
sys
.
stderr
,
'Processing result saved to %r'
%
(
save_file_name
,
)
if
outfile_prefix
is
not
None
:
if
outfile_prefix
is
not
None
:
## Generate a list of all measures and a 2-levels dictionnary with date as key and measure dictionnary as value
## Generate a list of all measures and a 2-levels dictionnary with date as key and measure dictionnary as value
...
@@ -252,9 +253,9 @@ if outfile_prefix is not None:
...
@@ -252,9 +253,9 @@ if outfile_prefix is not None:
def
renderOutput
(
data_format
,
filename_suffix
):
def
renderOutput
(
data_format
,
filename_suffix
):
for
sheet_id
,
sheet_column_list
in
sheet_dict
.
iteritems
():
for
sheet_id
,
sheet_column_list
in
sheet_dict
.
iteritems
():
outfile_name
=
'%s_%s_%s.csv'
%
(
outfile_prefix
,
sheet_id
,
filename_suffix
)
outfile_name
=
'%s_%s_%s.csv'
%
(
outfile_prefix
,
sheet_id
,
filename_suffix
)
print
>>
sys
.
stderr
,
'Writing to %r...'
%
(
outfile_name
,
)
print
(
'Writing to %r...'
%
(
outfile_name
,
),
file
=
sys
.
stderr
)
outfile
=
open
(
outfile_name
,
'w'
)
with
open
(
outfile_name
,
'w'
)
as
outfile
:
print
>>
outfile
,
'"date",%s'
%
(
','
.
join
([
'"%s"'
%
(
x
[
0
],
)
for
x
in
sheet_column_list
]),
)
print
(
'"date",%s'
%
(
','
.
join
([
'"%s"'
%
(
x
[
0
],
)
for
x
in
sheet_column_list
]),
),
file
=
outfile
)
decimate_dict
=
{}
decimate_dict
=
{}
decimate
=
0
decimate
=
0
for
date
in
date_list
:
for
date
in
date_list
:
...
@@ -262,11 +263,11 @@ if outfile_prefix is not None:
...
@@ -262,11 +263,11 @@ if outfile_prefix is not None:
decimate_dict
.
setdefault
(
key
,
[]).
extend
(
value
)
decimate_dict
.
setdefault
(
key
,
[]).
extend
(
value
)
decimate
+=
1
decimate
+=
1
if
decimate
==
decimate_count
:
if
decimate
==
decimate_count
:
print
>>
outfile
,
'"%s",%s'
%
(
date
,
','
.
join
([
render_cell
(
decimate_dict
.
get
(
x
[
1
],
''
),
data_format
)
for
x
in
sheet_column_list
])
)
print
(
'"%s",%s'
%
(
date
,
','
.
join
([
render_cell
(
decimate_dict
.
get
(
x
[
1
],
''
),
data_format
)
for
x
in
sheet_column_list
])),
file
=
outfile
)
decimate_dict
=
{}
decimate_dict
=
{}
decimate
=
0
decimate
=
0
if
len
(
decimate_dict
):
if
len
(
decimate_dict
):
print
>>
outfile
,
'"%s",%s'
%
(
date
,
','
.
join
([
render_cell
(
decimate_dict
.
get
(
x
[
1
],
''
),
data_format
)
for
x
in
sheet_column_list
])
)
print
(
'"%s",%s'
%
(
date
,
','
.
join
([
render_cell
(
decimate_dict
.
get
(
x
[
1
],
''
),
data_format
)
for
x
in
sheet_column_list
])),
file
=
outfile
)
if
do_average
:
if
do_average
:
renderOutput
(
'=%(sum)i/%(count)i'
,
'avg'
)
renderOutput
(
'=%(sum)i/%(count)i'
,
'avg'
)
...
...
erp5/util/timinglogplotter/__init__.py
View file @
31804f68
...
@@ -27,6 +27,8 @@
...
@@ -27,6 +27,8 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#
##############################################################################
##############################################################################
from
__future__
import
division
,
print_function
from
datetime
import
date
from
datetime
import
date
from
os
import
path
from
os
import
path
import
rpy2.robjects
as
robjects
import
rpy2.robjects
as
robjects
...
@@ -82,9 +84,9 @@ class CSVFile(object):
...
@@ -82,9 +84,9 @@ class CSVFile(object):
if
cell
>
value_max
.
get
(
key
,
0
):
if
cell
>
value_max
.
get
(
key
,
0
):
value_max
[
key
]
=
cell
value_max
[
key
]
=
cell
column_dict
[
key
].
append
(
cell
)
column_dict
[
key
].
append
(
cell
)
line_num
=
float
(
line_num
)
/
100
line_num
=
line_num
/
100
for
key
in
ratio_dict
:
for
key
in
ratio_dict
:
ratio_dict
[
key
]
/=
line_num
ratio_dict
[
key
]
/
/
=
line_num
def
getColumn
(
self
,
column_id
):
def
getColumn
(
self
,
column_id
):
return
self
.
column_dict
[
self
.
column_list
[
column_id
]]
return
self
.
column_dict
[
self
.
column_list
[
column_id
]]
...
@@ -101,7 +103,7 @@ def computeExpr(expr):
...
@@ -101,7 +103,7 @@ def computeExpr(expr):
if
expr
:
if
expr
:
assert
expr
[
0
]
==
'='
assert
expr
[
0
]
==
'='
num
,
denom
=
expr
[
1
:].
split
(
'/'
)
num
,
denom
=
expr
[
1
:].
split
(
'/'
)
result
=
float
(
int
(
num
)
)
/
int
(
denom
)
result
=
int
(
num
)
/
int
(
denom
)
else
:
else
:
result
=
None
result
=
None
return
result
return
result
...
@@ -121,7 +123,7 @@ def main():
...
@@ -121,7 +123,7 @@ def main():
current_dir
=
os
.
getcwd
()
current_dir
=
os
.
getcwd
()
for
file_name
in
file_name_list
:
for
file_name
in
file_name_list
:
print
'Loading %s...'
%
(
file_name
,
)
print
(
'Loading %s...'
%
(
file_name
,
)
)
file
=
CSVFile
(
file_name
)
file
=
CSVFile
(
file_name
)
date_string_list
=
file
.
getColumn
(
0
)
date_string_list
=
file
.
getColumn
(
0
)
...
@@ -134,7 +136,7 @@ def main():
...
@@ -134,7 +136,7 @@ def main():
# date_list will be like ['2009/07/01', '2009/07/05', '2009/07/10', ...]
# date_list will be like ['2009/07/01', '2009/07/05', '2009/07/10', ...]
factor
=
1
factor
=
1
if
len
(
date_string_list
)
>
20
:
if
len
(
date_string_list
)
>
20
:
factor
=
int
(
len
(
date_string_list
)
/
20
)
factor
=
int
(
len
(
date_string_list
)
/
/
20
)
i
=
0
i
=
0
for
date_string
in
date_string_list
:
for
date_string
in
date_string_list
:
if
i
%
factor
==
0
:
if
i
%
factor
==
0
:
...
@@ -183,13 +185,13 @@ def main():
...
@@ -183,13 +185,13 @@ def main():
y_data
.
append
(
value
)
y_data
.
append
(
value
)
i
+=
1
i
+=
1
if
len
(
x_data
)
==
0
:
if
len
(
x_data
)
==
0
:
print
'Nothing to plot for %s...'
%
(
out_file_name
,
)
print
(
'Nothing to plot for %s...'
%
(
out_file_name
,
)
)
continue
continue
if
options
.
minimal_non_empty_ratio
is
not
None
:
if
options
.
minimal_non_empty_ratio
is
not
None
:
column_len
=
len
(
column
)
column_len
=
len
(
column
)
if
column_len
:
if
column_len
:
if
float
(
len
(
x_data
))
/
column_len
<
options
.
minimal_non_empty_ratio
:
if
len
(
x_data
)
/
column_len
<
options
.
minimal_non_empty_ratio
:
print
'Not enough values to plot for %s...'
%
(
out_file_name
,
)
print
(
'Not enough values to plot for %s...'
%
(
out_file_name
,
)
)
continue
continue
r_y_data
=
robjects
.
FloatVector
(
y_data
)
r_y_data
=
robjects
.
FloatVector
(
y_data
)
r_x_data
=
robjects
.
FloatVector
(
x_data
)
r_x_data
=
robjects
.
FloatVector
(
x_data
)
...
@@ -220,7 +222,7 @@ def main():
...
@@ -220,7 +222,7 @@ def main():
# stop changing the out-type file
# stop changing the out-type file
r
(
"""dev.off()"""
)
r
(
"""dev.off()"""
)
print
'Saving %s...'
%
(
out_file_name
,
)
print
(
'Saving %s...'
%
(
out_file_name
,
)
)
if
__name__
==
'__main__'
:
if
__name__
==
'__main__'
:
main
()
main
()
...
...
erp5/util/webchecker/__init__.py
View file @
31804f68
...
@@ -26,6 +26,8 @@
...
@@ -26,6 +26,8 @@
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#
##############################################################################
##############################################################################
from
__future__
import
print_function
import
os
import
os
import
shutil
import
shutil
import
sys
import
sys
...
@@ -543,7 +545,7 @@ class HTTPCacheCheckerTestSuite(object):
...
@@ -543,7 +545,7 @@ class HTTPCacheCheckerTestSuite(object):
from optparse import OptionParser
from optparse import OptionParser
import ConfigP
arser
from six.moves import configp
arser
def _formatConfiguration(configuration):
def _formatConfiguration(configuration):
""" format the configuration"""
""" format the configuration"""
...
@@ -559,11 +561,11 @@ def web_checker_utility():
...
@@ -559,11 +561,11 @@ def web_checker_utility():
(options, args) = parser.parse_args()
(options, args) = parser.parse_args()
if len(args) != 1 :
if len(args) != 1 :
print
parser.print_help(
)
print
(parser.print_help()
)
parser.error('incorrect number of arguments')
parser.error('incorrect number of arguments')
config_path = args[0]
config_path = args[0]
config =
ConfigP
arser.RawConfigParser()
config =
configp
arser.RawConfigParser()
config.read(config_path)
config.read(config_path)
working_directory = config.get('web_checker', 'working_directory')
working_directory = config.get('web_checker', 'working_directory')
url = config.get('web_checker', 'url')
url = config.get('web_checker', 'url')
...
@@ -615,10 +617,9 @@ def web_checker_utility():
...
@@ -615,10 +617,9 @@ def web_checker_utility():
result = instance.start(prohibited_file_name_list=prohibited_file_name_list,
result = instance.start(prohibited_file_name_list=prohibited_file_name_list,
prohibited_folder_name_list=prohibited_folder_name_list)
prohibited_folder_name_list=prohibited_folder_name_list)
if options.output_file:
if options.output_file:
file_object = open(options.output_file, 'w')
with open(options.output_file, 'w') as file_object:
file_object.write(result)
file_object.write(result)
file_object.close()
else:
else:
print
result
print
(result)
product/ERP5/bin/genbt5list
View file @
31804f68
...
@@ -35,12 +35,24 @@ import posixpath
...
@@ -35,12 +35,24 @@ import posixpath
import
tarfile
import
tarfile
import
os
import
os
import
sys
import
sys
import
cgi
try
:
from
html
import
escape
except
ImportError
:
from
cgi
import
escape
# Deprecated since version 3.2
from
base64
import
b64encode
from
base64
import
b64encode
from
cStringIO
import
String
IO
from
io
import
Bytes
IO
from
hashlib
import
sha1
from
hashlib
import
sha1
from
urllib
import
unquote
try
:
from
urllib.parse
import
unquote
except
ImportError
:
from
urllib
import
unquote
if
sys
.
version_info
[
0
]
==
3
:
def
iteritems
(
d
):
return
iter
(
d
.
items
())
else
:
def
iteritems
(
d
):
return
d
.
iteritems
()
# Order is important for installation
# Order is important for installation
# We want to have:
# We want to have:
...
@@ -109,11 +121,11 @@ item_name_list = tuple('_%s_item' % x for x in item_name_list)
...
@@ -109,11 +121,11 @@ item_name_list = tuple('_%s_item' % x for x in item_name_list)
class
BusinessTemplateRevision
(
list
):
class
BusinessTemplateRevision
(
list
):
def
hash
(
self
,
path
,
text
):
def
hash
(
self
,
path
,
text
):
self
.
append
((
path
,
sha1
(
text
).
digest
()))
self
.
append
((
path
.
encode
(
'utf-8'
)
,
sha1
(
text
).
digest
()))
def
digest
(
self
):
def
digest
(
self
):
self
.
sort
()
self
.
sort
()
return
b64encode
(
sha1
(
'
\
0
'
.
join
(
h
+
p
for
(
h
,
p
)
in
self
)).
digest
())
return
b64encode
(
sha1
(
b
'
\
0
'
.
join
(
h
+
p
for
(
h
,
p
)
in
self
)).
digest
())
class
BusinessTemplate
(
dict
):
class
BusinessTemplate
(
dict
):
...
@@ -151,7 +163,7 @@ force_install
...
@@ -151,7 +163,7 @@ force_install
def
__iter__
(
self
):
def
__iter__
(
self
):
self
[
'revision'
]
=
self
.
revision
.
digest
()
self
[
'revision'
]
=
self
.
revision
.
digest
()
return
iter
(
sorted
(
self
.
iteritems
(
)))
return
iter
(
sorted
(
iteritems
(
self
)))
@
classmethod
@
classmethod
def
fromTar
(
cls
,
tar
):
def
fromTar
(
cls
,
tar
):
...
@@ -179,8 +191,8 @@ force_install
...
@@ -179,8 +191,8 @@ force_install
return
iter
(
self
)
return
iter
(
self
)
def
generateInformation
(
dir
,
info
=
id
,
err
=
None
):
def
generateInformation
(
dir
,
info
=
id
,
err
=
None
):
xml
=
String
IO
()
xml
=
Bytes
IO
()
xml
.
write
(
'<?xml version="1.0"?>
\
n
<repository>
\
n
'
)
xml
.
write
(
b
'<?xml version="1.0"?>
\
n
<repository>
\
n
'
)
for
name
in
sorted
(
os
.
listdir
(
dir
)):
for
name
in
sorted
(
os
.
listdir
(
dir
)):
path
=
os
.
path
.
join
(
dir
,
name
)
path
=
os
.
path
.
join
(
dir
,
name
)
if
name
.
endswith
(
'.bt5'
):
if
name
.
endswith
(
'.bt5'
):
...
@@ -201,13 +213,16 @@ def generateInformation(dir, info=id, err=None):
...
@@ -201,13 +213,16 @@ def generateInformation(dir, info=id, err=None):
property_list
=
BusinessTemplate
.
fromDir
(
path
)
property_list
=
BusinessTemplate
.
fromDir
(
path
)
else
:
else
:
continue
continue
xml
.
write
(
' <template id="%s">
\
n
'
%
name
)
xml
.
write
(
b' <template id="%s">
\
n
'
%
name
.
encode
()
)
for
k
,
v
in
property_list
:
for
k
,
v
in
property_list
:
for
v
in
(
v
,)
if
type
(
v
)
is
str
else
v
:
if
str
is
not
bytes
:
xml
.
write
(
' <%s>%s</%s>
\
n
'
%
(
k
,
cgi
.
escape
(
v
),
k
))
k
=
k
.
encode
()
xml
.
write
(
' </template>
\
n
'
)
for
v
in
(
v
,)
if
type
(
v
)
is
bytes
else
v
:
xml
.
write
(
b' <%s>%s</%s>
\
n
'
%
(
k
,
escape
(
v
)
if
str
is
bytes
else
escape
(
v
.
decode
()).
encode
(),
k
))
xml
.
write
(
b' </template>
\
n
'
)
info
(
'done
\
n
'
)
info
(
'done
\
n
'
)
xml
.
write
(
'</repository>
\
n
'
)
xml
.
write
(
b
'</repository>
\
n
'
)
return
xml
return
xml
def
main
(
dir_list
=
None
,
**
kw
):
def
main
(
dir_list
=
None
,
**
kw
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment