Commit ae7db4ae authored by Bryton Lacquement's avatar Bryton Lacquement 🚪

BigFile: fix

parent 2d94df67
......@@ -28,6 +28,7 @@ from cStringIO import StringIO
from ZPublisher.HTTPRequest import HTTPRequest
from ZPublisher.HTTPResponse import HTTPResponse
from ZPublisher.Iterators import IUnboundStreamIterator
from Products.ERP5Type.tests.ERP5TypeTestCase import ERP5TypeTestCase
from Products.ERP5Type.BTreeData import BTreeData
......@@ -102,6 +103,13 @@ class TestBigFile(ERP5TypeTestCase):
# check that object (document) method corresponding to request returns
# result, with expected response body, status and headers
def checkRequest(self, document, request, kw, result, body, status, header_dict):
assert type(result) is str
assert type(body) is str
# - result corresponds to the content returned as a string;
# - body corresponds to the content returned inside a stream iterator.
# We can't have both at the same time.
assert not(bool(result) == bool(body) == True)
# request -> method to call
method_name = request['REQUEST_METHOD']
if method_name == 'GET':
......@@ -112,18 +120,18 @@ class TestBigFile(ERP5TypeTestCase):
# like in ZPublisher - returned RESPONSE means empty
if ret is request.RESPONSE:
ret = ''
self.assertEqual(ret, result)
self.assertEqual(status, request.RESPONSE.getStatus())
elif IUnboundStreamIterator.providedBy(ret):
ret = ''.join(ret)
self.assertEqual(status, request.RESPONSE.getStatus())
for h,v in header_dict.items():
rv = request.RESPONSE.getHeader(h)
self.assertEqual(v, rv, '%s: %r != %r' % (h, v, rv))
# force response flush to its stdout
request.RESPONSE.write('')
# body and headers are delimited by empty line (RFC 2616, 4.1)
response_body = request.RESPONSE.stdout.getvalue().split('\r\n\r\n', 1)[1]
self.assertEqual(body, response_body)
if result:
self.assertEqual(ret, result)
elif body:
self.assertEqual(ret, body)
else:
self.assertEqual(ret, '')
# basic tests for working with BigFile via its public interface
def testBigFile_01_Basic(self):
......@@ -356,4 +364,3 @@ class TestBigFile(ERP5TypeTestCase):
# TODO write big data to file and ensure it still works
# TODO test streaming works in chunks
......@@ -18,6 +18,7 @@ from cStringIO import StringIO
from AccessControl import ClassSecurityInfo
from Products.ERP5Type import Permissions, PropertySheet
from Products.ERP5Type.Base import removeIContentishInterface
from Products.ERP5Type.Utils import IterableAsStreamIterator
from Products.ERP5.Document.File import File, _MARKER
from Products.ERP5Type.BTreeData import BTreeData
from ZPublisher.HTTPRequest import FileUpload
......@@ -214,7 +215,7 @@ class BigFile(File):
RESPONSE.setHeader('Content-Type', self.content_type)
RESPONSE.setHeader('Content-Length', self.getSize())
RESPONSE.setStatus(416)
return True
return ''
ranges = HTTPRangeSupport.expandRanges(ranges, self.getSize())
......@@ -233,11 +234,8 @@ class BigFile(File):
# NOTE data cannot be None here (if it is - ranges are not satisfiable)
if isinstance(data, str):
RESPONSE.write(data[start:end])
return True
for chunk in data.iterate(start, end-start):
RESPONSE.write(chunk)
return True
return data[start:end]
return IterableAsStreamIterator(data.iterate(start, size), size)
else:
boundary = choose_boundary()
......@@ -266,33 +264,35 @@ class BigFile(File):
draftprefix, boundary))
RESPONSE.setStatus(206) # Partial content
for start, end in ranges:
RESPONSE.write('\r\n--%s\r\n' % boundary)
RESPONSE.write('Content-Type: %s\r\n' %
self.content_type)
RESPONSE.write(
'Content-Range: bytes %d-%d/%d\r\n\r\n' % (
start, end - 1, self.getSize()))
# NOTE data cannot be None here (if it is - ranges are not satisfiable)
if isinstance(data, str):
RESPONSE.write(data[start:end])
else:
for chunk in data.iterate(start, end-start):
RESPONSE.write(chunk)
RESPONSE.write('\r\n--%s--\r\n' % boundary)
return True
self_content_type = self.content_type
self_getSize = self.getSize()
def generator():
for start, end in ranges:
yield '\r\n--%s\r\n' % boundary
yield 'Content-Type: %s\r\n' % self.content_type
yield 'Content-Range: bytes %d-%d/%d\r\n\r\n' % (
start, end - 1, self_getSize)
# NOTE data cannot be None here (if it is - ranges are not satisfiable)
if isinstance(data, str):
yield data[start:end]
else:
for chunk in data.iterate(start, end - start):
# BBB: Python 3.3+ yield from
yield chunk
yield '\r\n--%s--\r\n' % boundary
return IterableAsStreamIterator(generator(), size)
security.declareProtected(Permissions.View, 'index_html')
def index_html(self, REQUEST, RESPONSE, format=_MARKER, inline=_MARKER, **kw):
"""
Support streaming
"""
if self._range_request_handler(REQUEST, RESPONSE):
# we served a chunk of content in response to a range request.
return ''
response_iterable = self._range_request_handler(REQUEST, RESPONSE)
if response_iterable is not None:
# we serve a chunk of content in response to a range request.
return response_iterable
web_cache_kw = kw.copy()
if format is not _MARKER:
......@@ -327,9 +327,7 @@ class BigFile(File):
if isinstance(data, str):
RESPONSE.setBase(None)
return data
for chunk in data.iterate():
RESPONSE.write(chunk)
return ''
return IterableAsStreamIterator(data.iterate(), len(data))
security.declareProtected(Permissions.ModifyPortalContent,'PUT')
def PUT(self, REQUEST, RESPONSE):
......
......@@ -58,6 +58,9 @@ from Products.PageTemplates.Expressions import getEngine
from Products.PageTemplates.Expressions import SecureModuleImporter
from Products.ZCatalog.Lazy import LazyMap
from zope.interface import implementer
from ZPublisher.Iterators import IStreamIterator
try:
import chardet
except ImportError:
......@@ -1795,3 +1798,24 @@ def formatRFC822Headers(headers):
vallines = linesplit.split(str(value))
munged.append('%s: %s' % (key, '\r\n '.join(vallines)))
return '\r\n'.join(munged)
#####################################################
# WSGI/Medusa compatibility
#####################################################
@implementer(IStreamIterator)
class IterableAsStreamIterator:
def __init__(self, iterable, content_length):
self.iterable = iterable
self.content_length = content_length
def __iter__(self):
return self
def __len__(self):
return self.content_length
def next(self):
for chunk in self.iterable:
return chunk
raise StopIteration
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment