Commit 85d8329a authored by Ophélie Gagnard's avatar Ophélie Gagnard

slapos_metadata_transform_agent&test: multi data array

parent e5024fd6
...@@ -4,7 +4,6 @@ portal = context.getPortalObject() ...@@ -4,7 +4,6 @@ portal = context.getPortalObject()
portal.portal_catalog.searchAndActivate( portal.portal_catalog.searchAndActivate(
portal_type="Data Array", portal_type="Data Array",
publication_section_relative_url="publication_section/file_system_image/node_image",
simulation_state="converted", simulation_state="converted",
method_id='DataArray_generateDiffND', method_id='DataArray_generateDiffND',
activate_kw={'tag': tag}, activate_kw={'tag': tag},
......
portal = context.getPortalObject()
operation = None
use = None
parameter_dict = {}
context.checkConsistency(fixit=True)
initial_product = context.getSpecialiseValue(portal_type="Data Transformation").getResourceValue()
for analysis_line in context.objectValues(portal_type="Data Analysis Line"):
resource = analysis_line.getResourceValue()
if resource == initial_product:
use = analysis_line.getUse()
if resource is not None:
resource_portal_type = resource.getPortalType()
else:
resource_portal_type = ''
if resource_portal_type == 'Data Operation':
operation_analysis_line = analysis_line
operation = analysis_line.getResourceValue()
else:
parameter = {}
for portal_type in ["Data Array", "Progress Indicator"] + \
list(portal.getPortalDataSinkTypeList()) + \
list(portal.getPortalDataDescriptorTypeList()):
value = analysis_line.getAggregateValue(portal_type=portal_type)
if value is not None:
parameter[portal_type] = value
#data array
for value in analysis_line.getAggregateValueList(portal_type='Data Array'):
if value.getSimulationState() != 'processed':
parameter['Data Array'] = value
for base_category in analysis_line.getVariationRangeBaseCategoryList():
parameter[base_category] = analysis_line.getVariationCategoryItemList(
base_category_list=(base_category,))[0][0]
reference = analysis_line.getReference()
# several lines with same reference wil turn the parameter into a list
if reference in parameter_dict:
if not isinstance(parameter_dict[reference], list):
parameter_dict[reference] = [parameter_dict[reference]]
parameter_dict[reference].append(parameter)
else:
parameter_dict[reference] = parameter
script_id = operation.getScriptId()
out = getattr(operation_analysis_line, script_id)(**parameter_dict)
if out == 1:
context.activate(serialization_tag=str(context.getUid())).DataAnalysis_executeDataOperation()
else:
# only stop batch ingestions
if use == "big_data/ingestion/batch":
context.stop()
# stop refresh
if context.getRefreshState() == "refresh_started":
context.stopRefresh()
return out
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="PythonScript" module="Products.PythonScripts.PythonScript"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>Script_magic</string> </key>
<value> <int>3</int> </value>
</item>
<item>
<key> <string>_bind_names</string> </key>
<value>
<object>
<klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
</klass>
<tuple/>
<state>
<dictionary>
<item>
<key> <string>_asgns</string> </key>
<value>
<dictionary>
<item>
<key> <string>name_container</string> </key>
<value> <string>container</string> </value>
</item>
<item>
<key> <string>name_context</string> </key>
<value> <string>context</string> </value>
</item>
<item>
<key> <string>name_m_self</string> </key>
<value> <string>script</string> </value>
</item>
<item>
<key> <string>name_subpath</string> </key>
<value> <string>traverse_subpath</string> </value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</state>
</object>
</value>
</item>
<item>
<key> <string>_params</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>DataAnalysis_executeDataOperation</string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
import numpy as np import numpy as np
from Products.ZSQLCatalog.SQLCatalog import AndQuery, Query from Products.ZSQLCatalog.SQLCatalog import AndQuery, Query
# Do nothing for reference image, just change state
if 'file_system_image/reference_image' in context.getPublicationSectionList():
context.processFile()
return
for publication_section in context.getPublicationSectionList(): for publication_section in context.getPublicationSectionList():
if 'distribution' in publication_section: if 'distribution' in publication_section:
current_node_distribution = publication_section current_node_distribution = publication_section
......
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="PythonScript" module="Products.PythonScripts.PythonScript"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>Script_magic</string> </key>
<value> <int>3</int> </value>
</item>
<item>
<key> <string>_bind_names</string> </key>
<value>
<object>
<klass>
<global name="NameAssignments" module="Shared.DC.Scripts.Bindings"/>
</klass>
<tuple/>
<state>
<dictionary>
<item>
<key> <string>_asgns</string> </key>
<value>
<dictionary>
<item>
<key> <string>name_container</string> </key>
<value> <string>container</string> </value>
</item>
<item>
<key> <string>name_context</string> </key>
<value> <string>context</string> </value>
</item>
<item>
<key> <string>name_m_self</string> </key>
<value> <string>script</string> </value>
</item>
<item>
<key> <string>name_subpath</string> </key>
<value> <string>traverse_subpath</string> </value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</state>
</object>
</value>
</item>
<item>
<key> <string>_params</string> </key>
<value> <string>include_delivered=False</string> </value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>ERP5Site_createDataAnalysisList</string> </value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
...@@ -373,3 +373,54 @@ tail.0:2021-10-15 15:11:02.230745474 +0200 CEST[fluentbit_end]\n' ...@@ -373,3 +373,54 @@ tail.0:2021-10-15 15:11:02.230745474 +0200 CEST[fluentbit_end]\n'
) )
self.assertEqual(len(data_array_list), 1) self.assertEqual(len(data_array_list), 1)
self.assertEqual(data_array_list[0].getCausalityValue(), self.data_product) self.assertEqual(data_array_list[0].getCausalityValue(), self.data_product)
def test_multi_data_array(self):
request = self.portal.REQUEST
request_dict = self._create_request_dict()
for reference in request_dict:
request.environ["REQUEST_METHOD"] = 'POST'
request.set('reference', reference)
request.set('data_chunk', request_dict[reference])
self.portal.portal_ingestion_policies.metadata_upload.ingest()
self.tic()
data_stream_list = self.portal.portal_catalog(portal_type = 'Data Stream')
self.assertEqual(len(data_stream_list), 3)
self.portal.portal_alarms.wendelin_handle_analysis.activeSense()
self.tic()
self.portal.portal_alarms.wendelin_handle_analysis.activeSense()
self.tic()
# 2 references, 1 node
data_array_list = self.portal.portal_catalog(portal_type='Data Array')
self.assertEqual(len(data_array_list), 3)
# 1 is created
self.portal.portal_alarms.slapos_check_node_status.activeSense()
self.tic()
self.portal.portal_alarms.slapos_check_node_status.activeSense()
self.tic()
data_array_list = self.portal.portal_catalog(portal_type='Data Array')
self.assertEqual(len(data_array_list), 4)
for data_array in data_array_list:
self.assertEqual(data_array.getSimulationState(), 'processed')
request = self.portal.REQUEST
request_dict = self._create_request_dict()
for reference in request_dict:
request.environ["REQUEST_METHOD"] = 'POST'
request.set('reference', reference)
request.set('data_chunk', request_dict[reference])
self.portal.portal_ingestion_policies.metadata_upload.ingest()
self.tic()
self.portal.portal_alarms.wendelin_handle_analysis.activeSense()
self.tic()
self.portal.portal_alarms.wendelin_handle_analysis.activeSense()
self.tic()
new_data_array_list = self.portal.portal_catalog(portal_type='Data Array')
new_data_array_list = [x for x in new_data_array_list if x.getSimulationState() == 'converted']
self.assertEqual(len(new_data_array_list), 3)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment