Commit b04e1a04 authored by Eteri's avatar Eteri

erp5_wendelin : fix ERP5Site_createDataAnalysisList for new functionality

parent 9b8f5496
...@@ -4,7 +4,6 @@ from Products.ERP5Type.Errors import UnsupportedWorkflowMethod ...@@ -4,7 +4,6 @@ from Products.ERP5Type.Errors import UnsupportedWorkflowMethod
portal = context.getPortalObject() portal = context.getPortalObject()
portal_catalog = portal.portal_catalog portal_catalog = portal.portal_catalog
now = DateTime() now = DateTime()
if not include_delivered: if not include_delivered:
...@@ -49,6 +48,7 @@ for movement in portal_catalog(query): ...@@ -49,6 +48,7 @@ for movement in portal_catalog(query):
validation_state = "validated", validation_state = "validated",
resource_relative_url = movement.getResource())) resource_relative_url = movement.getResource()))
for transformation in transformation_list: for transformation in transformation_list:
is_shared_data_analysis = False is_shared_data_analysis = False
# Check if analysis already exists # Check if analysis already exists
data_analysis = portal_catalog.getResultValue( data_analysis = portal_catalog.getResultValue(
...@@ -85,6 +85,7 @@ for movement in portal_catalog(query): ...@@ -85,6 +85,7 @@ for movement in portal_catalog(query):
destination = delivery.getDestination(), destination = delivery.getDestination(),
destination_section = delivery.getDestinationSection(), destination_section = delivery.getDestinationSection(),
destination_project = delivery.getDestinationProject()) destination_project = delivery.getDestinationProject())
data_analysis.checkConsistency(fixit=True) data_analysis.checkConsistency(fixit=True)
# create input and output lines # create input and output lines
for transformation_line in transformation.objectValues( for transformation_line in transformation.objectValues(
...@@ -99,6 +100,10 @@ for movement in portal_catalog(query): ...@@ -99,6 +100,10 @@ for movement in portal_catalog(query):
if is_shared_data_analysis and quantity > -1: if is_shared_data_analysis and quantity > -1:
continue continue
aggregate_set = set()
# manually add device to every line
aggregate_set.add(movement.getAggregateDevice())
# If it is batch processing we additionally get items from the other # If it is batch processing we additionally get items from the other
# batch movements and deliver the other batch movements # batch movements and deliver the other batch movements
if transformation_line.getUse() == "big_data/ingestion/batch" and \ if transformation_line.getUse() == "big_data/ingestion/batch" and \
...@@ -116,6 +121,41 @@ for movement in portal_catalog(query): ...@@ -116,6 +121,41 @@ for movement in portal_catalog(query):
#aggregate_set.update(related_movement.getAggregateSet()) #aggregate_set.update(related_movement.getAggregateSet())
related_movement.getParentValue().deliver() related_movement.getParentValue().deliver()
# create new item based on item_type if it is not already aggregated
aggregate_type_set = set(
[portal.restrictedTraverse(a).getPortalType() for a in aggregate_set])
for item_type in transformation_line.getAggregatedPortalTypeList():
# create item if it does note exist yet.
# Except if it is a Data Array Line, then it is currently created by
# data operation itself (probably this exception is inconsistent)
if item_type not in aggregate_type_set and item_type != "Data Array Line":
item = portal.portal_catalog.getResultValue(
portal_type=item_type,
validation_state="validated",
item_variation_text=transformation_line.getVariationText(),
item_device_relative_url=movement.getAggregateDevice(),
item_project_relative_url=data_analysis.getDestinationProject(),
item_resource_uid=resource.getUid(),
item_source_relative_url=data_analysis.getSource())
#if transformation_line.getRelativeUrl() == "data_transformation_module/woelfel_r0331_statistic_raw":
# raise TypeError("JUST STOP")
if item is None:
module = portal.getDefaultModule(item_type)
item = module.newContent(portal_type = item_type,
title = transformation.getTitle(),
reference = "%s-%s" %(transformation.getTitle(),
delivery.getReference()),
version = '001')
try:
item.validate()
except AttributeError:
pass
aggregate_set.add(item.getRelativeUrl())
data_analysis_line = data_analysis.newContent( data_analysis_line = data_analysis.newContent(
portal_type = "Data Analysis Line", portal_type = "Data Analysis Line",
title = transformation_line.getTitle(), title = transformation_line.getTitle(),
...@@ -126,14 +166,13 @@ for movement in portal_catalog(query): ...@@ -126,14 +166,13 @@ for movement in portal_catalog(query):
quantity = quantity, quantity = quantity,
quantity_unit = transformation_line.getQuantityUnit(), quantity_unit = transformation_line.getQuantityUnit(),
use = transformation_line.getUse(), use = transformation_line.getUse(),
aggregate = movement.getAggregateDevice()) aggregate_set = aggregate_set)
# for intput lines of first level analysis set causality and specialise # for intput lines of first level analysis set causality and specialise
if quantity < 0 and delivery.getPortalType() == "Data Ingestion": if quantity < 0 and delivery.getPortalType() == "Data Ingestion":
data_analysis_line.edit( data_analysis_line.edit(
causality_value = delivery, causality_value = delivery,
specialise_value_list = data_supply_list) specialise_value_list = data_supply_list)
data_analysis.checkConsistency(fixit=True) data_analysis.checkConsistency(fixit=True)
try: try:
data_analysis.start() data_analysis.start()
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment