Commit e52fc349 authored by Martin Manchev's avatar Martin Manchev

Revert "Changes in 'erp5_wendelin' ..."

This reverts commit 4d702d2f.
parent b29292d2
......@@ -142,7 +142,7 @@ class DataArray(BigFile):
self.getArray().dtype.names = names
security.declareProtected(Permissions.View, 'index_html')
def index_html(self, REQUEST, RESPONSE, fmt=_MARKER, inline=_MARKER, **kw):
def index_html(self, REQUEST, RESPONSE, format=_MARKER, inline=_MARKER, **kw):
"""
Support streaming
"""
......@@ -174,12 +174,12 @@ class DataArray(BigFile):
RESPONSE.write(self.getArray()[tuple(slice_index_list)].tobytes())
return True
range = REQUEST.get_header('Range', None)
request_range = REQUEST.get_header('Request-Range', None)
if request_range is not None:
# Netscape 2 through 4 and MSIE 3 implement a draft version
# Later on, we need to serve a different mime-type as well.
# header_range = request_range
pass
range = request_range
if_range = REQUEST.get_header('If-Range', None)
if range is not None:
ranges = HTTPRangeSupport.parseRange(range)
......@@ -200,8 +200,7 @@ class DataArray(BigFile):
# Date
date = if_range.split( ';')[0]
try: mod_since=long(DateTime(date).timeTime())
except Exception:
mod_since=None
except: mod_since=None
if mod_since is not None:
last_mod = self._data_mtime()
if last_mod is None:
......
......@@ -73,7 +73,7 @@ class DataArrayView(DataArray):
Data Array like view on one or multiple Data Arrays
"""
def initArray(self, shape, dimensional_type):
def initArray(self, shape, dtype):
"""
Not Implemented.
"""
......
......@@ -126,7 +126,7 @@ class DataBucketStream(Document):
PropertySheet.SortIndex
)
def __init__(self, identifier, **kw):
def __init__(self, id, **kw):
self.initBucketTree()
self.initIndexTree()
Document.__init__(self, id, **kw)
......@@ -192,7 +192,7 @@ class DataBucketStream(Document):
except ValueError:
return None
def _getOb(self, identifier, *args, **kw):
def _getOb(self, id, *args, **kw):
return None
def getBucketByKey(self, key=None):
......
if consuming_analysis_list is None:
consuming_analysis_list=[]
portal = context.getPortalObject()
operation = None
use_list = []
......
"""
This script will return all Data streams for Data set
"""
if limit is None:
limit=[]
catalog_kw = {'portal_type': 'Data Ingestion Line',
'aggregate_uid': context.getUid(),
'limit': limit,
......
......@@ -10,6 +10,7 @@
thus correction needed.
"""
import json
chunk_text = ''.join(chunk_list)
#context.log('%s %s %s' %(start, end, len(chunk_text)))
......@@ -30,6 +31,9 @@ for line in line_list:
if line.count('{') > 1:
# multiple concatenated dictionaries in one line, bad format ignore for now
pass
else:
d = json.loads(line)
# xxx: save this value as a Data Array identified by data_array_reference
# start and enf offsets may not match existing record structure in stream
# thus corrections in start and end offsets is needed thus we
......
from DateTime import DateTime
from erp5.component.module.DateUtils import addToDate
from Products.ZSQLCatalog.SQLCatalog import Query
from Products.ZSQLCatalog.SQLCatalog import Query, SimpleQuery
portal_catalog = context.getPortalObject().portal_catalog
......
obj = state_change['object']
obj.Base_checkConsistency()
object = state_change['object']
object.Base_checkConsistency()
obj = state_change['object']
obj.Base_checkConsistency()
object = state_change['object']
object.Base_checkConsistency()
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment