Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
W
wendelin-telecom
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Paul Graydon
wendelin-telecom
Commits
9d8ba356
Commit
9d8ba356
authored
Sep 18, 2024
by
Paul Graydon
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
wendelin_telecom_test: Update and improve tests
parent
ebef772d
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
480 additions
and
312 deletions
+480
-312
bt5/wendelin_telecom_test/TestTemplateItem/portal_components/test.erp5.testWendelinTelecom.py
...teItem/portal_components/test.erp5.testWendelinTelecom.py
+480
-312
No files found.
bt5/wendelin_telecom_test/TestTemplateItem/portal_components/test.erp5.testWendelinTelecom.py
View file @
9d8ba356
...
...
@@ -34,10 +34,14 @@ import string
from
Products.ERP5Type.tests.SecurityTestCase
import
SecurityTestCase
def
generateRandomString
(
length
=
32
):
return
''
.
join
(
[
random
.
choice
(
string
.
ascii_letters
+
string
.
digits
)
for
_
in
xrange
(
length
)]
)
def
generateRandomString
(
length
=
24
,
only_digits
=
False
,
hexadecimal
=
False
):
character_list
=
string
.
digits
if
not
only_digits
:
if
hexadecimal
:
character_list
+=
'ABCDEF'
else
:
character_list
+=
string
.
ascii_letters
return
''
.
join
([
random
.
choice
(
character_list
)
for
_
in
xrange
(
length
)])
class
WendelinTelecomTest
(
SecurityTestCase
):
"""
...
...
@@ -50,8 +54,16 @@ class WendelinTelecomTest(SecurityTestCase):
def
afterSetUp
(
self
):
# Set up variables for ORS ingestion testing
self
.
ors_enb_log_ingestion
=
self
.
portal
.
portal_ingestion_policies
.
ors_enb_log_ingestion
self
.
test_ors_example_log_valid
=
{
'log'
:
self
.
portal
.
web_page_module
.
test_example_ors_enb_log_valid
.
getTextContent
()}
self
.
test_ors_example_log_invalid
=
{
'log'
:
self
.
portal
.
web_page_module
.
test_example_ors_enb_log_invalid
.
getTextContent
()}
self
.
test_ors_example_log_valid
=
{
'log'
:
self
.
portal
.
web_page_module
.
test_example_ors_enb_log_valid
.
getTextContent
()
}
self
.
test_ors_example_log_invalid
=
{
'log'
:
self
.
portal
.
web_page_module
.
test_example_ors_enb_log_invalid
.
getTextContent
()
}
self
.
test_ors_example_log_empty
=
{
'log'
:
""
}
# Set up ingestor user for performing ingestions
self
.
ingestor_user
=
self
.
createWendelinTelecomUser
(
'test_ingestor_%s'
%
generateRandomString
(),
None
,
'ingestor'
)
def
beforeTearDown
(
self
):
self
.
abort
()
...
...
@@ -67,16 +79,19 @@ class WendelinTelecomTest(SecurityTestCase):
(
self
.
portal
.
data_analysis_module
,
'Data Analysis'
),
(
self
.
portal
.
data_array_module
,
'Data Array'
),
):
objects
=
module
.
objectValues
(
portal_type
=
portal_type
)
if
objects
:
test_object_ids
=
[
obj
.
getId
()
for
obj
in
objects
if
(
'test'
in
obj
.
getReference
()
and
'default'
not
in
obj
.
getId
())]
if
test_object_ids
:
module
.
manage_delObjects
(
ids
=
test_object_ids
)
object_list
=
module
.
objectValues
(
portal_type
=
portal_type
)
if
object_list
:
test_object_id_list
=
[
obj
.
getId
()
for
obj
in
object_list
\
if
(
'test'
in
obj
.
getReference
().
lower
()
and
'default'
not
in
obj
.
getId
())
]
if
test_object_id_list
:
module
.
manage_delObjects
(
ids
=
test_object_id_list
)
self
.
tic
()
def
createWendelinTelecomUser
(
self
,
reference
,
project
,
function
):
# Create and validate a new Person with an assignment
associat
ed to the provided project and function
# Also generate a
validated
ERP5 login for the Person
# Create and validate a new Person with an assignment
link
ed to the provided project and function
# Also generate a
nd validate an
ERP5 login for the Person
user
=
self
.
portal
.
person_module
.
newContent
(
portal_type
=
'Person'
,
reference
=
reference
...
...
@@ -88,94 +103,114 @@ class WendelinTelecomTest(SecurityTestCase):
return
user
def
createOrsClientProject
(
self
,
reference
=
None
,
user_reference
=
None
,
ors_tag_reference
=
None
,
ors_title_reference
=
None
):
# Create a client project with the provided reference. The reference will be used for all items linked to the project
# If set, user_reference overrides reference for the client user account
# If set, ors_tag_reference overrides reference for the Data Acquisition Unit (ORS)
# If set, ors_title_reference overrides reference for the title of the Data Acquisition Unit
if
reference
is
None
:
reference
=
generateRandomString
()
ors_tag
=
'test_%s'
%
(
ors_tag_reference
or
reference
)
parameter_dict
=
{
'project'
:
'test_project_%s'
%
reference
,
'project_title'
:
'Test Project %s'
%
reference
,
'username'
:
'test_user_%s'
%
(
user_reference
or
reference
),
'ors_tag'
:
ors_tag
,
'ors_data_acquisition_unit'
:
'ors.%s'
%
ors_tag
,
'ors_title'
:
'Test ORS %s'
%
(
ors_title_reference
or
reference
)
def
registerOrs
(
self
,
tag_hostname_seed
=
None
,
tag_comp_id_seed
=
None
,
tag_enb_id_seed
=
None
):
# Create a Data Acquisition Unit and related Data Supply with a tag constructed from the provided seeds.
# If any seed is NOT defined, it is generated at random.
if
tag_hostname_seed
is
None
:
tag_hostname_seed
=
generateRandomString
(
length
=
3
,
only_digits
=
True
)
if
tag_comp_id_seed
is
None
:
tag_comp_id_seed
=
generateRandomString
(
length
=
4
,
only_digits
=
True
)
if
tag_enb_id_seed
is
None
:
tag_enb_id_seed
=
generateRandomString
(
length
=
5
,
hexadecimal
=
True
)
ors_tag
=
'ors%s_COMP-%s_e0x%sTest'
%
(
tag_hostname_seed
,
tag_comp_id_seed
,
tag_enb_id_seed
)
response
=
self
.
portal
.
ERP5Site_registerOrs
(
ors_tag
)
self
.
tic
()
# Fetch created items from the catalog
data_acquisition_unit
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Data Acquisition Unit'
,
reference
=
ors_tag
,
validation_state
=
'validated'
)
data_supply
=
None
if
data_acquisition_unit
is
not
None
:
data_supply
=
data_acquisition_unit
.
DataAcquisitionUnit_createOrsDataSupply
(
batch
=
1
)
# Return all associated items
return
{
'response'
:
response
,
'data_acquisition_unit'
:
data_acquisition_unit
,
'data_supply'
:
data_supply
}
# Call the script responsible for creating the project and all associated items and store the JSON response
response
=
self
.
portal
.
Base_registerOrsClientProject
(
parameter_dict
[
'project'
],
parameter_dict
[
'project_title'
],
parameter_dict
[
'username'
],
parameter_dict
[
'ors_tag'
],
parameter_dict
[
'ors_title'
]
def
registerOrsClientProject
(
self
,
reference_seed
=
None
,
client_user_reference_seed
=
None
):
# Create a client project with the provided reference seed,
# as well as a related ERP5 Person with the same reference seed, and an ERP5 login.
# If NOT defined, reference_seed is generated at random.
# If defined, client_user_reference_seed overrides reference_seed for the client user.
if
reference_seed
is
None
:
reference_seed
=
generateRandomString
()
project_reference
=
'test_project_%s'
%
reference_seed
project_title
=
'Test Project %s'
%
reference_seed
client_email
=
'test_user_%s@test.wendelin-tele.com'
%
(
client_user_reference_seed
or
reference_seed
)
client_user_reference
=
client_email
.
split
(
'@'
)[
0
]
# Call the script responsible for creating the project and the associated user, and store the JSON response
response
=
self
.
portal
.
ProjectModule_registerOrsClientProject
(
project_reference
,
project_title
,
client_email
,
form_id
=
'testing'
)
self
.
tic
()
# Fetch
all associ
ated items from the catalog
# Fetch
cre
ated items from the catalog
project
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Project'
,
reference
=
p
arameter_dict
[
'project'
]
,
title
=
p
arameter_dict
[
'project_title'
]
,
reference
=
p
roject_reference
,
title
=
p
roject_title
,
validation_state
=
'validated'
)
client_user
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Person'
,
reference
=
parameter_dict
[
'username'
],
validation_state
=
'validated'
)
data_acquisition_unit
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Data Acquisition Unit'
,
reference
=
parameter_dict
[
'ors_data_acquisition_unit'
],
title
=
parameter_dict
[
'ors_title'
],
reference
=
client_user_reference
,
default_email_text
=
client_email
,
validation_state
=
'validated'
)
data_supply
=
None
if
data_acquisition_unit
:
data_supply
=
data_acquisition_unit
.
DataAcquisitionUnit_createOrsDataSupply
(
batch
=
1
)
# Return all associated items
return
{
'response'
:
response
,
'project'
:
project
,
'client_user'
:
client_user
,
'data_acquisition_unit'
:
data_acquisition_unit
,
'data_supply'
:
data_supply
'client_user'
:
client_user
}
def
ingestOrsLogDataFromFluentd
(
self
,
log_data
,
reference
):
def
ingestOrsLogDataFromFluentd
(
self
,
log_data
,
ors_tag
):
# Simulate a fluentd instance sending the provided log data to Wendelin for ingestion
reference
=
'ors.%s'
%
ors_tag
body
=
msgpack
.
packb
([
0
,
log_data
],
use_bin_type
=
True
)
env
=
{
'CONTENT_TYPE'
:
'application/octet-stream'
}
path
=
self
.
ors_enb_log_ingestion
.
getPath
()
+
'/ingest?reference='
+
reference
publish_kw
=
dict
(
user
=
'ERP5TypeTestCase'
,
env
=
env
,
request_method
=
'POST'
,
stdin
=
StringIO
(
body
))
publish_kw
=
dict
(
env
=
env
,
user
=
self
.
ingestor_user
.
Person_getUserId
(),
request_method
=
'POST'
,
stdin
=
StringIO
(
body
)
)
return
self
.
publish
(
path
,
**
publish_kw
)
def
getDataStream
(
self
,
data_acquisition_unit
):
#
Get
a Data Stream linked to the provided Data Acquisition Unit
#
Retrieve
a Data Stream linked to the provided Data Acquisition Unit
for
line
in
data_acquisition_unit
.
getAggregateRelatedValueList
(
portal_type
=
'Data Ingestion Line'
):
data_stream
=
line
.
getAggregateValue
(
portal_type
=
'Data Stream'
)
if
data_stream
:
return
data_stream
def
getDataAnalysis
(
self
,
data_supply
):
#
Get
a Data Analysis linked to the provided Data Supply
#
Retrieve
a Data Analysis linked to the provided Data Supply
for
data_analysis
in
data_supply
.
getSpecialiseRelatedValueList
(
portal_type
=
'Data Analysis'
):
if
data_analysis
:
return
data_analysis
def
getDataArrays
(
self
,
data_analysis
):
#
Get
the Data Arrays linked to the provided Data Analysis
data_array
s
=
[]
#
Retrieve
the Data Arrays linked to the provided Data Analysis
data_array
_list
=
[]
for
line
in
data_analysis
.
contentValues
(
portal_type
=
'Data Analysis Line'
):
data_array
=
line
.
getAggregateValue
(
portal_type
=
'Data Array'
)
if
data_array
:
data_array
s
.
append
(
data_array
)
return
data_array
s
data_array
_list
.
append
(
data_array
)
return
data_array
_list
def
getOrsLogIngestionItems
(
self
,
log_data
,
reference
,
stop_data_analysis
=
False
):
# Simulate an ingestion of the provided log data
...
...
@@ -183,7 +218,10 @@ class WendelinTelecomTest(SecurityTestCase):
self
.
tic
()
# Retrieve all items linked to the ingestion
data_acquisition_unit
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Data Acquisition Unit'
,
reference
=
reference
)
data_acquisition_unit
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Data Acquisition Unit'
,
reference
=
reference
)
data_supply
=
data_acquisition_unit
.
DataAcquisitionUnit_createOrsDataSupply
(
batch
=
1
)
self
.
tic
()
...
...
@@ -200,7 +238,7 @@ class WendelinTelecomTest(SecurityTestCase):
data_analysis
.
stop
()
self
.
tic
()
data_array
s
=
self
.
getDataArrays
(
data_analysis
)
data_array
_list
=
self
.
getDataArrays
(
data_analysis
)
# Return all associated items
return
{
...
...
@@ -209,7 +247,7 @@ class WendelinTelecomTest(SecurityTestCase):
'data_supply'
:
data_supply
,
'data_stream'
:
data_stream
,
'data_analysis'
:
data_analysis
,
'data_array
s'
:
data_arrays
'data_array
_list'
:
data_array_list
}
def
checkDocumentPermissions
(
self
,
user
,
document
,
user_can_view
,
user_can_modify
,
user_can_add
):
...
...
@@ -235,61 +273,140 @@ class WendelinTelecomTest(SecurityTestCase):
else
:
self
.
failIfUserCanAddDocument
(
user_id
,
document
)
def
checkIngestionDocumentsPermissions
(
self
,
user
,
ingestion_items
,
user_is_admin
,
user_is_assigned_to_same_project
):
def
checkIngestionDocumentsPermissions
(
self
,
user
,
ingestion_item_dict
):
user_destination_project
=
None
user_function_list
=
None
for
assignment
in
user
.
contentValues
(
portal_type
=
'Assignment'
):
if
assignment
.
getValidationState
()
==
'open'
:
user_destination_project
=
assignment
.
getDestinationProject
()
user_function_list
=
assignment
.
getFunctionList
()
ors_destination_project
=
ingestion_item_dict
[
'data_supply'
].
getDestinationProject
()
same_project
=
(
user_destination_project
is
not
None
)
\
and
(
ors_destination_project
is
not
None
)
\
and
(
user_destination_project
==
ors_destination_project
)
user_is_admin
=
'administrator'
in
user_function_list
user_is_ingestor
=
'ingestor'
in
user_function_list
# A client can only view a Data Acquisition Unit (ORS) if they are related to the same project
# An administrator can view and edit all of them, as well as add one
self
.
checkDocumentPermissions
(
user
,
ingestion_items
[
'data_acquisition_unit'
],
user_is_admin
or
user_is_assigned_to_same_project
,
user_is_admin
,
user_is_admin
)
# An ingestor can view any of them
self
.
checkDocumentPermissions
(
user
,
ingestion_item_dict
[
'data_acquisition_unit'
],
same_project
or
user_is_admin
or
user_is_ingestor
,
user_is_admin
,
user_is_admin
)
# Same as above for a Data Supply (required for computing security roles on users)
self
.
checkDocumentPermissions
(
user
,
ingestion_items
[
'data_supply'
],
user_is_admin
or
user_is_assigned_to_same_project
,
user_is_admin
,
user_is_admin
)
self
.
checkDocumentPermissions
(
user
,
ingestion_item_dict
[
'data_supply'
],
same_project
or
user_is_admin
or
user_is_ingestor
,
user_is_admin
,
user_is_admin
)
# Only an administrator can view a Data Stream, and nothing else
self
.
checkDocumentPermissions
(
user
,
ingestion_items
[
'data_stream'
],
user_is_admin
,
False
,
False
)
# An administrator can view a Data Stream
# An ingestor has all rights to a Data Stream (in order to append new log data to it)
self
.
checkDocumentPermissions
(
user
,
ingestion_item_dict
[
'data_stream'
],
user_is_admin
or
user_is_ingestor
,
user_is_ingestor
,
user_is_ingestor
)
# A client can only view a Data Analysis if they are related to the same project (required for KPI graphing)
# An administrator can view all of them
self
.
checkDocumentPermissions
(
user
,
ingestion_item
s
[
'data_analysis'
],
user_is_admin
or
user_is_assigned_to_
same_project
,
False
,
False
)
self
.
checkDocumentPermissions
(
user
,
ingestion_item
_dict
[
'data_analysis'
],
user_is_admin
or
same_project
,
False
,
False
)
# A client can only view a Data Array if they are related to the same project
# An administrator can view all of them
for
data_array
in
ingestion_item
s
[
'data_arrays
'
]:
self
.
checkDocumentPermissions
(
user
,
data_array
,
user_is_admin
or
user_is_assigned_to_
same_project
,
False
,
False
)
for
data_array
in
ingestion_item
_dict
[
'data_array_list
'
]:
self
.
checkDocumentPermissions
(
user
,
data_array
,
user_is_admin
or
same_project
,
False
,
False
)
def
checkModulePermissions
(
self
,
user
,
user_is_admin
):
# Everyone can view the Data Acquisition Unit and Data Supply modules
# Only administrators can add documents to them
self
.
checkDocumentPermissions
(
user
,
self
.
portal
.
data_acquisition_unit_module
,
True
,
False
,
user_is_admin
)
self
.
checkDocumentPermissions
(
user
,
self
.
portal
.
data_supply_module
,
True
,
False
,
user_is_admin
)
def
checkModulePermissions
(
self
,
user
):
user_function_list
=
None
for
assignment
in
user
.
contentValues
(
portal_type
=
'Assignment'
):
if
assignment
.
getValidationState
()
==
'open'
:
user_function_list
=
assignment
.
getFunctionList
()
user_is_client
=
'user'
in
user_function_list
user_is_admin
=
'administrator'
in
user_function_list
user_is_ingestor
=
'ingestor'
in
user_function_list
# Everyone can view the Data Product module (required for KPI graphing)
# Everyone can also view the two data products used in the KPI calculation process (required for KPI graphing)
self
.
checkDocumentPermissions
(
user
,
self
.
portal
.
data_product_module
,
True
,
False
,
False
)
ors_kpi
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Data Product'
,
ors_kpi
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Data Product'
,
reference
=
'ors_kpi'
,
validation_state
=
'validated'
)
validation_state
=
'validated'
)
self
.
checkDocumentPermissions
(
user
,
ors_kpi
,
True
,
False
,
False
)
ors_enb_log_data
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Data Product'
,
ors_enb_log_data
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Data Product'
,
reference
=
'ors_enb_log_data'
,
validation_state
=
'validated'
)
validation_state
=
'validated'
)
self
.
checkDocumentPermissions
(
user
,
ors_enb_log_data
,
True
,
False
,
False
)
# Everyone can view the Data Transformation module (required for KPI graphing)
# Everyone can also view the data transformation used to produce the KPIs (required for KPI graphing)
self
.
checkDocumentPermissions
(
user
,
self
.
portal
.
data_transformation_module
,
True
,
False
,
False
)
data_transformation
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Data Transformation'
,
reference
=
'ors_enb_log_data_transformation'
,
validation_state
=
'validated'
)
self
.
checkDocumentPermissions
(
user
,
data_transformation
,
True
,
False
,
False
)
# Only ingestors can view the Data Operation Module, as well as the two Data Operations required
# for ORS eNB log ingestion
self
.
checkDocumentPermissions
(
user
,
self
.
portal
.
data_operation_module
,
user_is_ingestor
,
False
,
False
)
ingest_ors_enb_log_data
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Data Operation'
,
reference
=
'ingest_ors_enb_log_data'
,
validation_state
=
'validated'
)
self
.
checkDocumentPermissions
(
user
,
ingest_ors_enb_log_data
,
user_is_ingestor
,
False
,
False
)
calculate_ors_kpi
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Data Operation'
,
reference
=
'calculate_ors_kpi'
,
validation_state
=
'validated'
)
self
.
checkDocumentPermissions
(
user
,
calculate_ors_kpi
,
user_is_ingestor
,
False
,
False
)
# Everyone can view the Data Acquisition Unit and Data Supply modules
# Only administrators can add items to them
self
.
checkDocumentPermissions
(
user
,
self
.
portal
.
data_acquisition_unit_module
,
True
,
False
,
user_is_admin
)
self
.
checkDocumentPermissions
(
user
,
self
.
portal
.
data_supply_module
,
True
,
False
,
user_is_admin
)
# Only administrators can view the Data Ingestion and Data Stream modules
self
.
checkDocumentPermissions
(
user
,
self
.
portal
.
data_ingestion_module
,
user_is_admin
,
False
,
False
)
self
.
checkDocumentPermissions
(
user
,
self
.
portal
.
data_stream_module
,
user_is_admin
,
False
,
False
)
# Only clients and administrator can view the Data Transformation module (required for KPI graphing)
# Only they can also view the data transformation used to produce the KPIs (required for KPI graphing)
self
.
checkDocumentPermissions
(
user
,
self
.
portal
.
data_transformation_module
,
user_is_client
or
user_is_admin
,
False
,
False
)
data_transformation
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Data Transformation'
,
reference
=
'ors_enb_log_data_transformation'
,
validation_state
=
'validated'
)
self
.
checkDocumentPermissions
(
user
,
data_transformation
,
user_is_client
or
user_is_admin
,
False
,
False
)
# Only ingestors and administrators can view the Data Ingestion and Data Stream modules
# Only ingestors can add new Data Ingestions and Data Streams
self
.
checkDocumentPermissions
(
user
,
self
.
portal
.
data_ingestion_module
,
user_is_ingestor
or
user_is_admin
,
False
,
user_is_ingestor
)
self
.
checkDocumentPermissions
(
user
,
self
.
portal
.
data_stream_module
,
user_is_ingestor
or
user_is_admin
,
False
,
user_is_ingestor
)
# Only administrators can view the Data Analysis module
self
.
checkDocumentPermissions
(
user
,
self
.
portal
.
data_analysis_module
,
user_is_admin
,
False
,
False
)
#
Everyone
can view the Data Array module (required for KPI graphing)
self
.
checkDocumentPermissions
(
user
,
self
.
portal
.
data_array_module
,
True
,
False
,
False
)
#
Only clients and administrators
can view the Data Array module (required for KPI graphing)
self
.
checkDocumentPermissions
(
user
,
self
.
portal
.
data_array_module
,
user_is_client
or
user_is_admin
,
False
,
False
)
# Only administrators have access to the Person and Project modules and can add items to them for client management purposes
self
.
checkDocumentPermissions
(
user
,
self
.
portal
.
project_module
,
user_is_admin
,
False
,
user_is_admin
)
...
...
@@ -300,289 +417,340 @@ class WendelinTelecomTest(SecurityTestCase):
Test the action which creates an ORS Data Supply from a Data Acquisition Unit.
Check that the Data Supply is indeed created and validated.
'''
ors_reference
=
'ors.
test_%s'
%
generateRandomString
()
reference
=
'
test_%s'
%
generateRandomString
()
# Create and validate a Data Acquisition Unit
data_acquisition_unit
=
self
.
portal
.
data_acquisition_unit_module
.
newContent
(
portal_type
=
'Data Acquisition Unit'
,
reference
=
ors_
reference
reference
=
reference
)
data_acquisition_unit
.
validate
()
self
.
tic
()
# Call the tested script which creates a related Data Supply
# No need to assign a project to it here: that is implicitly tested later
data_supply
=
data_acquisition_unit
.
DataAcquisitionUnit_createOrsDataSupply
(
batch
=
1
)
# Call the script which creates a related Data Supply
created_data_supply
=
data_acquisition_unit
.
DataAcquisitionUnit_createOrsDataSupply
(
batch
=
1
)
self
.
tic
()
# Check that the Data Supply exists and is validated
self
.
assertTrue
(
data_supply
is
not
None
)
self
.
assertTrue
(
data_supply
.
getValidationState
()
==
'validated'
)
self
.
assertTrue
(
created_data_supply
is
not
None
)
self
.
assertTrue
(
created_data_supply
.
getValidationState
()
==
'validated'
)
# Call the script again to retrieve the same Data Supply
retrieved_data_supply
=
data_acquisition_unit
.
DataAcquisitionUnit_createOrsDataSupply
(
batch
=
1
)
# Check that both Data Supplies are identical
self
.
assertTrue
(
created_data_supply
==
retrieved_data_supply
)
def
test_02_registerOrsClientProject
(
self
):
'''
Test the
script called during slave instantiation in SlapOS
to register a new client project.
Test the
action performed by Administrator users in the Project module
to register a new client project.
Check the successful case as well as all error cases.
'''
# Generate a random reference
, call the tested script and retrieve all
associated items
# Generate a random reference
seed, call the script and retrieve the
associated items
# This first call should succeed
reference
=
generateRandomString
()
project_items
=
self
.
createOrsClientProject
(
reference
=
reference
)
# Parse the JSON response and check that the user credentials exist and are valid
response_dict
=
json
.
loads
(
project_items
[
'response'
])
self
.
assertTrue
(
response_dict
[
'username'
]
==
'test_user_%s'
%
reference
)
self
.
assertTrue
(
len
(
response_dict
[
'init_password'
])
==
16
)
reference_seed
=
generateRandomString
()
project_item_dict
=
self
.
registerOrsClientProject
(
reference_seed
=
reference_seed
)
# Check that all items created by the script have been created
self
.
assertTrue
(
project_items
[
'project'
]
is
not
None
)
self
.
assertTrue
(
project_items
[
'client_user'
]
is
not
None
)
self
.
assertTrue
(
project_items
[
'data_acquisition_unit'
]
is
not
None
)
self
.
assertTrue
(
project_items
[
'data_supply'
]
is
not
None
)
# Check that both the project and the client user have been created
self
.
assertTrue
(
project_item_dict
[
'project'
]
is
not
None
)
self
.
assertTrue
(
project_item_dict
[
'client_user'
]
is
not
None
)
# Call the script a second time with the same reference
# This should not do anything and respond with an error as the project already exists
project_items
=
self
.
createOrsClientProject
(
reference
=
reference
)
# Parse the JSON response and check that the error message is valid
response_dict
=
json
.
loads
(
project_items
[
'response'
])
self
.
assertTrue
(
'error_msg'
in
response_dict
)
self
.
assertTrue
(
response_dict
[
'error_msg'
]
==
"Client project %s already registered."
%
project_items
[
'project'
].
getReference
())
# Create a new reference for the project, but reuse the previous reference for the client user account
new_project_reference
=
generateRandomString
()
while
new_project_reference
==
reference
:
new_project_reference
=
generateRandomString
()
# Call the script a third time, keeping the same reference as before ONLY for the client user account
# This should not do anything as the project already exists
repeated_project_item_dict
=
self
.
registerOrsClientProject
(
reference_seed
=
reference_seed
)
# Check that both the project and the client user are identical to the previous ones
self
.
assertTrue
(
repeated_project_item_dict
[
'project'
]
==
project_item_dict
[
'project'
])
self
.
assertTrue
(
repeated_project_item_dict
[
'client_user'
]
==
repeated_project_item_dict
[
'client_user'
])
# Create a new reference seed for the project, but reuse the previous reference for the client user account
new_project_reference_seed
=
generateRandomString
()
while
new_project_reference_seed
==
reference_seed
:
new_project_reference_seed
=
generateRandomString
()
# Call the script a third time, keeping the same reference seed as before ONLY for the client user account
# This should also error out as the client user account already exists
project_items
=
self
.
createOrsClientProject
(
reference
=
new_project_reference
,
user_reference
=
reference
)
# Parse the JSON response and check that the error message is valid
response_dict
=
json
.
loads
(
project_items
[
'response'
])
self
.
assertTrue
(
'error_msg'
in
response_dict
)
self
.
assertTrue
(
response_dict
[
'error_msg'
]
==
"Client account username not available: test_user_%s."
%
reference
)
# Check that the new project is NOT created
self
.
assertTrue
(
project_items
[
'project'
]
is
None
)
new_project_item_dict
=
self
.
registerOrsClientProject
(
reference_seed
=
new_project_reference_seed
,
client_user_reference_seed
=
reference_seed
)
# Same thing as above, but reuse the original reference for the Data Acquisition Unit (ORS)
new_project_reference_2
=
generateRandomString
()
while
(
new_project_reference_2
==
reference
)
or
(
new_project_reference_2
==
new_project_reference
):
new_project_reference_2
=
generateRandomString
()
# Call the script a fourth time, keeping the same reference as before ONLY for the Data Acquisition Unit
# This should also error out as it already exists
project_items
=
self
.
createOrsClientProject
(
reference
=
new_project_reference_2
,
ors_tag_reference
=
reference
)
# Check that the new project is NOT created and that the client user is the same as previously
self
.
assertTrue
(
new_project_item_dict
[
'project'
]
is
None
)
self
.
assertTrue
(
new_project_item_dict
[
'client_user'
]
==
project_item_dict
[
'client_user'
])
# Parse the JSON response and check that the error message is valid
response_dict
=
json
.
loads
(
project_items
[
'response'
])
self
.
assertTrue
(
'error_msg'
in
response_dict
)
self
.
assertTrue
(
response_dict
[
'error_msg'
]
==
"ORS with tag test_%s already registered."
%
reference
)
# Check that the new project is NOT created
self
.
assertTrue
(
project_items
[
'project'
]
is
None
)
# Generate new reference again, but reuse the original reference for the Data Acquisition Unit's title
new_project_reference_3
=
generateRandomString
()
while
(
new_project_reference_3
==
reference
):
new_project_reference_3
=
generateRandomString
()
# Call the script a fifth time, using the new reference for everything except the ORS title
# This should succeed as the title is not checked for collision
project_items
=
self
.
createOrsClientProject
(
reference
=
new_project_reference_3
,
ors_title_reference
=
reference
)
# Parse the JSON response and check that the user credentials exist and are valid
response_dict
=
json
.
loads
(
project_items
[
'response'
])
self
.
assertTrue
(
response_dict
[
'username'
]
==
'test_user_%s'
%
new_project_reference_3
)
self
.
assertTrue
(
len
(
response_dict
[
'init_password'
])
==
16
)
# Check that all items created by the script have been created
self
.
assertTrue
(
project_items
[
'project'
]
is
not
None
)
self
.
assertTrue
(
project_items
[
'client_user'
]
is
not
None
)
self
.
assertTrue
(
project_items
[
'data_acquisition_unit'
]
is
not
None
)
self
.
assertTrue
(
project_items
[
'data_supply'
]
is
not
None
)
def
test_03_registerNewOrsToProject
(
self
):
def
test_03_registerOrs
(
self
):
'''
Test the
action which configures a Data Acquisition Unit and Data Supply representing an ORS
assigned to a given client project
.
Test the
script called during slave instantiation in SlapOS by an ORS to automatically register itself.
Check all detected cases
.
'''
project_items
=
self
.
createOrsClientProject
()
# Check that all items related to the project exist
self
.
assertTrue
(
project_items
[
'project'
]
is
not
None
)
self
.
assertTrue
(
project_items
[
'client_user'
]
is
not
None
)
self
.
assertTrue
(
project_items
[
'data_acquisition_unit'
]
is
not
None
)
self
.
assertTrue
(
project_items
[
'data_supply'
]
is
not
None
)
# Add a new ORS to the project, with a new reference
reference
=
generateRandomString
()
ors_tag
=
'test_%s'
%
reference
ors_reference
=
'ors.test_%s'
%
reference
ors_title
=
'ORS Test %s'
%
reference
project_items
[
'project'
].
Project_newOrs
(
ors_tag
,
ors_title
)
self
.
tic
()
# Check that the Data Acquisition Unit exists and is validated
data_acquisition_unit
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Data Acquisition Unit'
,
reference
=
ors_reference
,
title
=
ors_title
,
validation_state
=
'validated'
tag_hostname_seed
=
generateRandomString
(
length
=
3
,
only_digits
=
True
)
tag_comp_id_seed
=
generateRandomString
(
length
=
4
,
only_digits
=
True
)
tag_enb_id_seed
=
generateRandomString
(
length
=
5
,
hexadecimal
=
True
)
ors_item_dict
=
self
.
registerOrs
(
tag_hostname_seed
=
tag_hostname_seed
,
tag_comp_id_seed
=
tag_comp_id_seed
,
tag_enb_id_seed
=
tag_enb_id_seed
)
self
.
assertTrue
(
data_acquisition_unit
is
not
None
)
#
Check that the Data Supply exists and is validated
data_supply
=
data_acquisition_unit
.
DataAcquisitionUnit_createOrsDataSupply
(
batch
=
1
)
self
.
assertTrue
(
data_supply
is
not
None
)
#
Parse the JSON response and check that it is empty, indicating a success
response_dict
=
json
.
loads
(
ors_item_dict
[
'response'
]
)
self
.
assertTrue
(
response_dict
==
{}
)
# Generate new references
new_reference
=
generateRandomString
()
new_generic_reference
=
'test_%s'
%
new_reference
new_ors_reference
=
'ors.test_%s'
%
new_reference
new_ors_title
=
'ORS Test %s'
%
new_reference
# Check that the Data Acquisition Unit and Data Supply have been created
self
.
assertTrue
(
ors_item_dict
[
'data_acquisition_unit'
]
is
not
None
)
self
.
assertTrue
(
ors_item_dict
[
'data_supply'
]
is
not
None
)
# Add a new ORS to the project, with a different reference but the same title
project_items
[
'project'
].
Project_newOrs
(
new_generic_reference
,
ors_title
)
self
.
tic
()
# Check that the Data Acquisition Unit exists and is validated
new_data_acquisition_unit
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Data Acquisition Unit'
,
reference
=
new_ors_reference
,
title
=
ors_title
,
validation_state
=
'validated'
# Call the script a second time with the same seeds
# This should not do anything as the items already exist
repeated_ors_item_dict
=
self
.
registerOrs
(
tag_hostname_seed
=
tag_hostname_seed
,
tag_comp_id_seed
=
tag_comp_id_seed
,
tag_enb_id_seed
=
tag_enb_id_seed
)
self
.
assertTrue
(
new_data_acquisition_unit
is
not
None
)
# Check that the Data Supply exists and is validated
new_data_supply
=
new_data_acquisition_unit
.
DataAcquisitionUnit_createOrsDataSupply
(
batch
=
1
)
self
.
assertTrue
(
new_data_supply
is
not
None
)
# Parse the JSON response and check the error message
response_dict
=
json
.
loads
(
repeated_ors_item_dict
[
'response'
])
self
.
assertTrue
(
'error_msg'
in
response_dict
)
self
.
assertTrue
(
response_dict
[
'error_msg'
]
==
"ORS with tag %s already exists."
%
ors_item_dict
[
'data_acquisition_unit'
].
getReference
())
# Generate a new seed that will cause the tag to be invalid
invalid_tag_hostname_seed
=
'invalid_hostname'
# Call the script a third time with the new seed
# This should error out as the tag is invalid
invalid_ors_item_dict
=
self
.
registerOrs
(
tag_hostname_seed
=
invalid_tag_hostname_seed
,
tag_comp_id_seed
=
tag_comp_id_seed
,
tag_enb_id_seed
=
tag_enb_id_seed
)
# Add another ORS to the project, with the same reference and a new title
# This should fail as there can be no reference collision
project_items
[
'project'
].
Project_newOrs
(
ors_tag
,
new_ors_title
)
self
.
tic
()
# Parse the JSON response and check the error message
response_dict
=
json
.
loads
(
invalid_ors_item_dict
[
'response'
])
self
.
assertTrue
(
'error_msg'
in
response_dict
)
self
.
assertTrue
(
response_dict
[
'error_msg'
]
==
"Invalid ORS tag ors%s_COMP-%s_e0x%sTest found"
%
(
invalid_tag_hostname_seed
,
tag_comp_id_seed
,
tag_enb_id_seed
))
# Check that the Data Acquisition Unit and Data Supply have NOT been created
self
.
assertTrue
(
invalid_ors_item_dict
[
'data_acquisition_unit'
]
is
None
)
self
.
assertTrue
(
invalid_ors_item_dict
[
'data_supply'
]
is
None
)
# Now, link the original Data Supply to a client project
project_a_item_dict
=
self
.
registerOrsClientProject
()
project_a_url
=
project_a_item_dict
[
'project'
].
getRelativeUrl
()
ors_item_dict
[
'data_supply'
].
setDestinationProject
(
project_a_url
)
# Generate a new valid enb_id seed
new_tag_enb_id_seed
=
generateRandomString
(
length
=
5
,
hexadecimal
=
True
)
# Call the script to simulate an ORS re-registering with another eNB identifier
new_enb_id_ors_item_dict
=
self
.
registerOrs
(
tag_hostname_seed
=
tag_hostname_seed
,
tag_comp_id_seed
=
tag_comp_id_seed
,
tag_enb_id_seed
=
new_tag_enb_id_seed
)
# Check that the Data Acquisition Unit does not exist
new_data_acquisition_unit
=
self
.
portal
.
portal_catalog
.
getResultValue
(
portal_type
=
'Data Acquisition Unit'
,
reference
=
ors_reference
,
title
=
new_ors_title
,
validation_state
=
'validated'
# Check that the Data Acquisition Unit and Data Supply have been created
self
.
assertTrue
(
new_enb_id_ors_item_dict
[
'data_acquisition_unit'
]
is
not
None
)
self
.
assertTrue
(
new_enb_id_ors_item_dict
[
'data_supply'
]
is
not
None
)
self
.
assertTrue
(
new_enb_id_ors_item_dict
[
'data_supply'
].
getDestinationProject
()
==
project_a_url
)
# Now, link the above Data Supply to a second project
project_b_item_dict
=
self
.
registerOrsClientProject
()
new_enb_id_ors_item_dict
[
'data_supply'
].
setDestinationProject
(
project_b_item_dict
[
'project'
].
getRelativeUrl
())
# Generate another valid enb_id seed
another_tag_enb_id_seed
=
generateRandomString
(
length
=
5
,
hexadecimal
=
True
)
while
another_tag_enb_id_seed
==
new_tag_enb_id_seed
:
another_tag_enb_id_seed
=
generateRandomString
(
length
=
5
,
hexadecimal
=
True
)
# Call the script to simulate the same ORS registering a third time with another eNB identifier
another_enb_id_ors_item_dict
=
self
.
registerOrs
(
tag_hostname_seed
=
tag_hostname_seed
,
tag_comp_id_seed
=
tag_comp_id_seed
,
tag_enb_id_seed
=
another_tag_enb_id_seed
)
self
.
assertTrue
(
new_data_acquisition_unit
is
None
)
def
test_04_1_ingestValidOrsLogDataFromFluentd
(
self
,
valid_data
=
True
):
# Check that the Data Acquisition Unit and Data Supply have been created
self
.
assertTrue
(
another_enb_id_ors_item_dict
[
'data_acquisition_unit'
]
is
not
None
)
self
.
assertTrue
(
another_enb_id_ors_item_dict
[
'data_supply'
]
is
not
None
)
# As the ORS has been linked to two different projects already,
# it cannot be automatically decided to which project this version should be assigned to
self
.
assertTrue
(
another_enb_id_ors_item_dict
[
'data_supply'
].
getDestinationProject
()
is
None
)
def
test_04_1_ingestValidOrsLogDataFromFluentd
(
self
,
data_key
=
"valid"
):
'''
Test a simple valid ORS log ingestion: simulate a fluentd gateway forwarding valid ORS logs to
a project on
the platform,
Test a simple valid ORS log ingestion: simulate a fluentd gateway forwarding valid ORS logs to the platform,
and check that all items related to the ingestion are valid.
'''
project_items
=
self
.
createOrsClientProject
()
ors_reference
=
project_items
[
'data_acquisition_unit'
].
getReference
()
# Register the ORS
ors_item_dict
=
self
.
registerOrs
()
ors_tag
=
ors_item_dict
[
'data_acquisition_unit'
].
getReference
()
# Get the correct data logs according to what is being tested
test_ors_example_log
=
None
if
data_key
==
"valid"
:
test_ors_example_log
=
self
.
test_ors_example_log_valid
if
not
valid_data
:
elif
data_key
==
"invalid"
:
test_ors_example_log
=
self
.
test_ors_example_log_invalid
ingestion_items
=
self
.
getOrsLogIngestionItems
(
test_ors_example_log
,
ors_reference
)
elif
data_key
==
"empty"
:
test_ors_example_log
=
self
.
test_ors_example_log_empty
# Perform ingestion
ingestion_item_dict
=
self
.
getOrsLogIngestionItems
(
test_ors_example_log
,
ors_tag
)
# In all cases, check that all items related to the ingestion exist
self
.
assertEqual
(
NO_CONTENT
,
ingestion_item
s
[
'response'
].
getStatus
())
self
.
assertTrue
(
ingestion_item
s
[
'data_acquisition_unit'
]
is
not
None
)
self
.
assertTrue
(
ingestion_item
s
[
'data_supply'
]
is
not
None
)
self
.
assertTrue
(
ingestion_item
s
[
'data_stream'
].
getData
()
is
not
None
)
self
.
assertTrue
(
ingestion_item
s
[
'data_analysis'
]
is
not
None
)
self
.
assertTrue
(
all
(
data_array
is
not
None
for
data_array
in
ingestion_item
s
[
'data_arrays
'
]))
self
.
assertEqual
(
NO_CONTENT
,
ingestion_item
_dict
[
'response'
].
getStatus
())
self
.
assertTrue
(
ingestion_item
_dict
[
'data_acquisition_unit'
]
is
not
None
)
self
.
assertTrue
(
ingestion_item
_dict
[
'data_supply'
]
is
not
None
)
self
.
assertTrue
(
ingestion_item
_dict
[
'data_stream'
].
getData
()
is
not
None
)
self
.
assertTrue
(
ingestion_item
_dict
[
'data_analysis'
]
is
not
None
)
self
.
assertTrue
(
all
(
data_array
is
not
None
for
data_array
in
ingestion_item
_dict
[
'data_array_list
'
]))
# Check that the data arrays containing the KPI data have correctly been initialized
self
.
assertTrue
(
ingestion_items
[
'data_acquisition_unit'
].
Base_getERabDataArrayKey
()
is
not
None
)
self
.
assertTrue
(
ingestion_items
[
'data_acquisition_unit'
].
Base_getEUtranDataArrayKey
()
is
not
None
)
if
valid_data
:
# If all data is valid: check that the arrays contain some data
self
.
assertTrue
(
all
(
data_array
.
getArrayShape
()
is
not
None
for
data_array
in
ingestion_items
[
'data_arrays'
]))
self
.
assertTrue
(
all
(
data_array
.
getArrayDtype
()
is
not
None
for
data_array
in
ingestion_items
[
'data_arrays'
]))
else
:
# If some data is invalid: check that the whole log (one data chunk in this case) is ignored
# and the data arrays are empty
self
.
assertTrue
(
all
(
data_array
.
getArrayShape
()
is
None
for
data_array
in
ingestion_items
[
'data_arrays'
]))
self
.
assertTrue
(
all
(
data_array
.
getArrayDtype
()
is
None
for
data_array
in
ingestion_items
[
'data_arrays'
]))
self
.
assertTrue
(
ingestion_item_dict
[
'data_acquisition_unit'
].
DataAcquisitionUnit_getERabDataArrayKey
()
is
not
None
)
self
.
assertTrue
(
ingestion_item_dict
[
'data_acquisition_unit'
].
DataAcquisitionUnit_getEUtranDataArrayKey
()
is
not
None
)
e_rab_shape
=
(
0
,)
e_rab_dtype
=
[
(
'vt'
,
'<f8'
),
(
'vInitialEPSBEstabSR_lo'
,
'<f8'
),
(
'vInitialEPSBEstabSR_hi'
,
'<f8'
),
(
'vAddedEPSBEstabSR_lo'
,
'<f8'
),
(
'vAddedEPSBEstabSR_hi'
,
'<f8'
)
]
e_utran_shape
=
(
0
,)
e_utran_dtype
=
[(
'evt'
,
'<f8'
),
(
'qci'
,
'<f8'
),
(
'dl_lo'
,
'<f8'
),
(
'dl_hi'
,
'<f8'
),
(
'ul_lo'
,
'<f8'
),
(
'ul_hi'
,
'<f8'
)]
if
data_key
==
"valid"
:
e_rab_shape
=
(
24
,)
elif
data_key
==
"invalid"
:
e_rab_shape
=
(
30
,)
elif
data_key
==
"empty"
:
e_rab_shape
=
None
e_rab_dtype
=
None
e_utran_shape
=
None
e_utran_dtype
=
None
for
data_array
in
ingestion_item_dict
[
'data_array_list'
]:
if
'e_rab'
in
data_array
.
getReference
():
self
.
assertTrue
(
data_array
.
getArrayDtype
()
==
e_rab_dtype
)
self
.
assertTrue
(
data_array
.
getArrayShape
()
==
e_rab_shape
)
elif
'e_utran'
in
data_array
.
getReference
():
self
.
assertTrue
(
data_array
.
getArrayDtype
()
==
e_utran_dtype
)
self
.
assertTrue
(
data_array
.
getArrayShape
()
==
e_utran_shape
)
def
test_04_2_ingestInvalidOrsLogDataFromFluentd
(
self
):
'''
Test an invalid ORS log ingestion: simulate a fluentd gateway forwarding invalid ORS logs to
a project on
the platform.
Check that all items are
valid, but that the data arrays contain no data due to the invalid data chunk being
ignored.
Test an invalid ORS log ingestion: simulate a fluentd gateway forwarding invalid ORS logs to the platform.
Check that all items are
still valid, as only the invalid measurements are
ignored.
'''
# Call the above test, but test with invalid data
self
.
test_04_1_ingestValidOrsLogDataFromFluentd
(
valid_data
=
False
)
self
.
test_04_1_ingestValidOrsLogDataFromFluentd
(
data_key
=
"invalid"
)
def
test_0
5_wendelinTelecomSecurityModel
(
self
):
def
test_0
4_3_ingestEmptyOrsLogDataFromFluentd
(
self
):
'''
Test Wendelin Telecom's custom security model: check that different users have the correct permissions according to their function and project.
Test an empty ORS log ingestion: simulate a fluentd gateway forwarding empty ORS logs to the platform.
Check that all items are valid, but the data arrays remain uninitialized.
'''
# Create two distinct projects
project_a_items
=
self
.
createOrsClientProject
()
ors_a_reference
=
project_a_items
[
'data_acquisition_unit'
].
getReference
()
project_b_items
=
self
.
createOrsClientProject
()
ors_b_reference
=
project_b_items
[
'data_acquisition_unit'
].
getReference
()
# Call the above test, but test with empty data
self
.
test_04_1_ingestValidOrsLogDataFromFluentd
(
data_key
=
"empty"
)
# Perform ingestions for the ORSs of both projects
ingestion_a_items
=
self
.
getOrsLogIngestionItems
(
self
.
test_ors_example_log_valid
,
ors_a_reference
,
stop_data_analysis
=
True
)
ingestion_b_items
=
self
.
getOrsLogIngestionItems
(
self
.
test_ors_example_log_valid
,
ors_b_reference
,
stop_data_analysis
=
True
)
def
test_04_4_ingestOrsLogDataWithoutPrefix
(
self
):
'''
Simulate an entity trying to send data to the platform for ingestion
without using the 'ors.' prefix added by fluentd.
Check that the ingestion is refused.
'''
#
Create a client user not associated to a project (should not happen in practice)
client_user_n
=
self
.
createWendelinTelecomUser
(
'test_user_%s'
%
generateRandomString
(),
None
,
'user'
)
#
No need to register an ORS here
entity_tag
=
generateRandomString
(
)
# Create two administrator users: one associated to Project A and the second not associated to a project
admin_user_a
=
self
.
createWendelinTelecomUser
(
'test_user_%s'
%
generateRandomString
(),
project_a_items
[
'project'
].
getRelativeUrl
(),
'administrator'
)
admin_user_n
=
self
.
createWendelinTelecomUser
(
'test_user_%s'
%
generateRandomString
(),
None
,
'administrator'
)
# Call the script that parses the ingestion tag
# Check that it raises the expected error
self
.
assertRaises
(
ValueError
,
self
.
portal
.
IngestionPolicy_parseOrsFluentdTag
,
entity_tag
)
ors_n_reference
=
'ors.test_%s'
%
generateRandomString
()
def
test_05_wendelinTelecomSecurityModel
(
self
):
'''
Test Wendelin Telecom's custom security model:
check that different users have the correct permissions according to their function and project.
'''
# Generate a Data Acquisition Unit without a project (should not happen in practice)
data_acquisition_unit
=
self
.
portal
.
data_acquisition_unit_module
.
newContent
(
portal_type
=
'Data Acquisition Unit'
,
reference
=
ors_n_reference
# Setup two distinct projects with one linked ORS each
project_a_item_dict
=
self
.
registerOrsClientProject
()
ors_a_item_dict
=
self
.
registerOrs
()
ors_a_tag
=
ors_a_item_dict
[
'data_acquisition_unit'
].
getReference
()
ors_a_item_dict
[
'data_supply'
].
setDestinationProject
(
project_a_item_dict
[
'project'
].
getRelativeUrl
())
project_b_item_dict
=
self
.
registerOrsClientProject
()
ors_b_item_dict
=
self
.
registerOrs
()
ors_b_tag
=
ors_b_item_dict
[
'data_acquisition_unit'
].
getReference
()
ors_b_item_dict
[
'data_supply'
].
setDestinationProject
(
project_b_item_dict
[
'project'
].
getRelativeUrl
())
# Register a third ORS without linking it to a project
ors_n_item_dict
=
self
.
registerOrs
()
ors_n_tag
=
ors_n_item_dict
[
'data_acquisition_unit'
].
getReference
()
# Perform ingestions for all three ORSs
ingestion_a_item_dict
=
self
.
getOrsLogIngestionItems
(
self
.
test_ors_example_log_valid
,
ors_a_tag
,
stop_data_analysis
=
True
)
ingestion_b_item_dict
=
self
.
getOrsLogIngestionItems
(
self
.
test_ors_example_log_valid
,
ors_b_tag
,
stop_data_analysis
=
True
)
ingestion_n_item_dict
=
self
.
getOrsLogIngestionItems
(
self
.
test_ors_example_log_valid
,
ors_n_tag
,
stop_data_analysis
=
True
)
data_acquisition_unit
.
validate
()
self
.
tic
()
data_acquisition_unit
.
DataAcquisitionUnit_createOrsDataSupply
(
batch
=
1
)
self
.
tic
()
# Perform an ingestion for the ORS not associated to a project
ingestion_n_items
=
self
.
getOrsLogIngestionItems
(
self
.
test_ors_example_log_valid
,
ors_n_reference
,
stop_data_analysis
=
True
)
# Create a client user not associated to a project
client_user_n
=
self
.
createWendelinTelecomUser
(
'test_user_%s'
%
generateRandomString
(),
None
,
'user'
)
# Create two administrator users: one associated to Project A and the second not associated to a project
admin_user_a
=
self
.
createWendelinTelecomUser
(
'test_user_%s'
%
generateRandomString
(),
project_a_item_dict
[
'project'
].
getRelativeUrl
(),
'administrator'
)
admin_user_n
=
self
.
createWendelinTelecomUser
(
'test_administrator_%s'
%
generateRandomString
(),
None
,
'administrator'
)
# Check that the client of Project A only has access to Project A documents
client_user_a
=
project_a_item
s
[
'client_user'
]
self
.
checkModulePermissions
(
client_user_a
,
False
)
self
.
checkIngestionDocumentsPermissions
(
client_user_a
,
ingestion_a_item
s
,
False
,
True
)
self
.
checkIngestionDocumentsPermissions
(
client_user_a
,
ingestion_b_item
s
,
False
,
False
)
self
.
checkIngestionDocumentsPermissions
(
client_user_a
,
ingestion_n_item
s
,
False
,
False
)
client_user_a
=
project_a_item
_dict
[
'client_user'
]
self
.
checkModulePermissions
(
client_user_a
)
self
.
checkIngestionDocumentsPermissions
(
client_user_a
,
ingestion_a_item
_dict
)
self
.
checkIngestionDocumentsPermissions
(
client_user_a
,
ingestion_b_item
_dict
)
self
.
checkIngestionDocumentsPermissions
(
client_user_a
,
ingestion_n_item
_dict
)
# Check that the client of project_B only has access to project_B documents
client_user_b
=
project_b_item
s
[
'client_user'
]
self
.
checkModulePermissions
(
client_user_b
,
False
)
self
.
checkIngestionDocumentsPermissions
(
client_user_b
,
ingestion_a_item
s
,
False
,
False
)
self
.
checkIngestionDocumentsPermissions
(
client_user_b
,
ingestion_b_item
s
,
False
,
True
)
self
.
checkIngestionDocumentsPermissions
(
client_user_b
,
ingestion_n_item
s
,
False
,
False
)
client_user_b
=
project_b_item
_dict
[
'client_user'
]
self
.
checkModulePermissions
(
client_user_b
)
self
.
checkIngestionDocumentsPermissions
(
client_user_b
,
ingestion_a_item
_dict
)
self
.
checkIngestionDocumentsPermissions
(
client_user_b
,
ingestion_b_item
_dict
)
self
.
checkIngestionDocumentsPermissions
(
client_user_b
,
ingestion_n_item
_dict
)
# Check that the client without a project does not have access to any document
self
.
checkModulePermissions
(
client_user_n
,
False
)
self
.
checkIngestionDocumentsPermissions
(
client_user_n
,
ingestion_a_item
s
,
False
,
False
)
self
.
checkIngestionDocumentsPermissions
(
client_user_n
,
ingestion_b_item
s
,
False
,
False
)
self
.
checkIngestionDocumentsPermissions
(
client_user_n
,
ingestion_n_item
s
,
False
,
False
)
self
.
checkModulePermissions
(
client_user_n
)
self
.
checkIngestionDocumentsPermissions
(
client_user_n
,
ingestion_a_item
_dict
)
self
.
checkIngestionDocumentsPermissions
(
client_user_n
,
ingestion_b_item
_dict
)
self
.
checkIngestionDocumentsPermissions
(
client_user_n
,
ingestion_n_item
_dict
)
# Check that both administrators, whether assigned to a project, have access to all documents
self
.
checkModulePermissions
(
admin_user_a
,
True
)
self
.
checkIngestionDocumentsPermissions
(
admin_user_a
,
ingestion_a_items
,
True
,
True
)
self
.
checkIngestionDocumentsPermissions
(
admin_user_a
,
ingestion_b_items
,
True
,
False
)
self
.
checkIngestionDocumentsPermissions
(
admin_user_a
,
ingestion_n_items
,
True
,
False
)
self
.
checkModulePermissions
(
admin_user_a
,
True
)
self
.
checkIngestionDocumentsPermissions
(
admin_user_n
,
ingestion_a_items
,
True
,
False
)
self
.
checkIngestionDocumentsPermissions
(
admin_user_n
,
ingestion_b_items
,
True
,
False
)
self
.
checkIngestionDocumentsPermissions
(
admin_user_n
,
ingestion_n_items
,
True
,
False
)
\ No newline at end of file
self
.
checkModulePermissions
(
admin_user_a
)
self
.
checkIngestionDocumentsPermissions
(
admin_user_a
,
ingestion_a_item_dict
)
self
.
checkIngestionDocumentsPermissions
(
admin_user_a
,
ingestion_b_item_dict
)
self
.
checkIngestionDocumentsPermissions
(
admin_user_a
,
ingestion_n_item_dict
)
self
.
checkModulePermissions
(
admin_user_a
)
self
.
checkIngestionDocumentsPermissions
(
admin_user_n
,
ingestion_a_item_dict
)
self
.
checkIngestionDocumentsPermissions
(
admin_user_n
,
ingestion_b_item_dict
)
self
.
checkIngestionDocumentsPermissions
(
admin_user_n
,
ingestion_n_item_dict
)
# Check that the ingestor user only has access to documents needed for ingestion
self
.
checkModulePermissions
(
self
.
ingestor_user
)
self
.
checkIngestionDocumentsPermissions
(
self
.
ingestor_user
,
ingestion_a_item_dict
)
self
.
checkIngestionDocumentsPermissions
(
self
.
ingestor_user
,
ingestion_b_item_dict
)
self
.
checkIngestionDocumentsPermissions
(
self
.
ingestor_user
,
ingestion_n_item_dict
)
\ No newline at end of file
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment