Commit 7365979b authored by Kirill Smelkov's avatar Kirill Smelkov

bigarray/tests: Factor out a way to spcify on which BigFile/BigFileH an array...

bigarray/tests: Factor out a way to spcify on which BigFile/BigFileH an array is tested into fixture parameter

Currently we have only one BigFile and its BigFileH handle. However in
the next patch, for RAMArray, we'll be adding handles for opened RAM
files, and it would be good to test whole BigArray functionality on
data served by those handles too.

Prepare for this and first factor out into testbig fixture the way to
open such handles.
parent 318efce0
...@@ -28,7 +28,7 @@ from numpy import ndarray, dtype, int64, int32, uint32, int16, uint8, all, zeros ...@@ -28,7 +28,7 @@ from numpy import ndarray, dtype, int64, int32, uint32, int16, uint8, all, zeros
from numpy.lib.stride_tricks import as_strided from numpy.lib.stride_tricks import as_strided
import numpy import numpy
from pytest import raises from pytest import raises, fixture
# Synthetic bigfile that just loads zeros, and ignores writes (= a-la /dev/zero) # Synthetic bigfile that just loads zeros, and ignores writes (= a-la /dev/zero)
...@@ -59,19 +59,35 @@ class BigFile_Data(BigFile): ...@@ -59,19 +59,35 @@ class BigFile_Data(BigFile):
memcpy(self.datab[self.blksize * blk : self.blksize * (blk+1)], buf) memcpy(self.datab[self.blksize * blk : self.blksize * (blk+1)], buf)
# synthetic bigfile that only loads data from numpy array PS = 2*1024*1024 # FIXME hardcoded, TODO -> ram.pagesize
class BigFile_Data_RO(BigFile_Data):
def storeblk(self, blk, buf):
raise RuntimeError('tests should not try to change test data')
# tBigFile provides .fopen() to fileh_open a big file handle via ^^^ BigFile_*
class tBigFile:
def fopen(self, data=None, readonly=False):
if data is None:
bigf = BigFile_Zero(PS)
else:
bigf = BigFile_Data(data, PS)
if readonly:
def _(self, blk, buf):
raise RuntimeError('tests should not try to change test data')
bigf.storeblk = _
return bigf.fileh_open()
# testbig is fixture that provides .fopen(...) to open a big file handle from
# ^^^ BigFile_*.
@fixture(scope="module", params=[tBigFile])
def testbig(request):
cls = request.param
yield cls()
PS = 2*1024*1024 # FIXME hardcoded, TODO -> ram.pagesize
# make sure we don't let dtype with object to be used with BigArray # make sure we don't let dtype with object to be used with BigArray
def test_bigarray_noobject(): def test_bigarray_noobject(testbig):
Z = BigFile_Zero(PS) Zh = testbig.fopen()
Zh = Z.fileh_open()
# NOTE str & unicode are fixed-size types - if size is not explicitly given # NOTE str & unicode are fixed-size types - if size is not explicitly given
# it will become S0 or U0 # it will become S0 or U0
...@@ -81,9 +97,8 @@ def test_bigarray_noobject(): ...@@ -81,9 +97,8 @@ def test_bigarray_noobject():
# basic ndarray-compatibility attributes of BigArray # basic ndarray-compatibility attributes of BigArray
def test_bigarray_basic(): def test_bigarray_basic(testbig):
Z = BigFile_Zero(PS) Zh = testbig.fopen()
Zh = Z.fileh_open()
A = BigArray((10,3), int32, Zh) A = BigArray((10,3), int32, Zh)
...@@ -143,9 +158,8 @@ class DoubleCheck(DoubleGet): ...@@ -143,9 +158,8 @@ class DoubleCheck(DoubleGet):
# getitem/setitem (1d case) # getitem/setitem (1d case)
def test_bigarray_indexing_1d(): def test_bigarray_indexing_1d(testbig):
Z = BigFile_Zero(PS) Zh = testbig.fopen()
Zh = Z.fileh_open()
A = BigArray((10*PS,), uint8, Zh) A = BigArray((10*PS,), uint8, Zh)
...@@ -303,12 +317,11 @@ def test_bigarray_indexing_1d(): ...@@ -303,12 +317,11 @@ def test_bigarray_indexing_1d():
# indexing where accessed element overlaps edge between pages # indexing where accessed element overlaps edge between pages
def test_bigarray_indexing_pageedge(): def test_bigarray_indexing_pageedge(testbig):
shape = (10, PS-1) shape = (10, PS-1)
data = arange(mul(shape), dtype=uint32).view(uint8) # NOTE 4 times bigger than uint8 data = arange(mul(shape), dtype=uint32).view(uint8) # NOTE 4 times bigger than uint8
f = BigFile_Data_RO(data, PS) fh = testbig.fopen(data, readonly=True)
fh = f.fileh_open()
A = BigArray(shape, uint8, fh) # bigarray with test data and shape A = BigArray(shape, uint8, fh) # bigarray with test data and shape
A_ = data[:mul(shape)].reshape(shape) # ndarray ----//---- A_ = data[:mul(shape)].reshape(shape) # ndarray ----//----
...@@ -325,8 +338,7 @@ def test_bigarray_indexing_pageedge(): ...@@ -325,8 +338,7 @@ def test_bigarray_indexing_pageedge():
shape = (10, PS+1) shape = (10, PS+1)
f = BigFile_Data_RO(data, PS) fh = testbig.fopen(data, readonly=True)
fh = f.fileh_open()
A = BigArray(shape, uint8, fh) A = BigArray(shape, uint8, fh)
A_ = data[:mul(shape)].reshape(shape) A_ = data[:mul(shape)].reshape(shape)
...@@ -371,7 +383,7 @@ def idx_to_test(shape, idx_prefix=()): ...@@ -371,7 +383,7 @@ def idx_to_test(shape, idx_prefix=()):
# getitem/setitem (Nd case) # getitem/setitem (Nd case)
def test_bigarray_indexing_Nd(): def test_bigarray_indexing_Nd(testbig):
# shape of tested array - all primes, total size for uint32 ~ 7 2M pages # shape of tested array - all primes, total size for uint32 ~ 7 2M pages
# XXX even less dimensions (to speed up tests)? # XXX even less dimensions (to speed up tests)?
shape = tuple(reversed( (17,23,101,103) )) shape = tuple(reversed( (17,23,101,103) ))
...@@ -381,8 +393,7 @@ def test_bigarray_indexing_Nd(): ...@@ -381,8 +393,7 @@ def test_bigarray_indexing_Nd():
# (else data slice will be smaller than buf) # (else data slice will be smaller than buf)
data = arange(mul(shape) + PS, dtype=uint32) data = arange(mul(shape) + PS, dtype=uint32)
f = BigFile_Data_RO(data, PS) fh = testbig.fopen(data, readonly=True)
fh = f.fileh_open()
for order in ('C', 'F'): for order in ('C', 'F'):
A = BigArray(shape, uint32, fh, order=order) # bigarray with test data and shape A = BigArray(shape, uint32, fh, order=order) # bigarray with test data and shape
...@@ -440,10 +451,9 @@ def test_bigarray_indexing_Nd(): ...@@ -440,10 +451,9 @@ def test_bigarray_indexing_Nd():
""" """
def test_bigarray_resize(): def test_bigarray_resize(testbig):
data = zeros(8*PS, dtype=uint32) data = zeros(8*PS, dtype=uint32)
f = BigFile_Data(data, PS) fh = testbig.fopen(data)
fh = f.fileh_open()
# set first part & ensure it is set correctly # set first part & ensure it is set correctly
A = BigArray((10,3), uint32, fh) A = BigArray((10,3), uint32, fh)
...@@ -507,11 +517,10 @@ def arange32_f(start, stop, dtype=None): ...@@ -507,11 +517,10 @@ def arange32_f(start, stop, dtype=None):
return arange(start*3*2, stop*3*2, dtype=dtype).reshape(2,3,(stop-start), order='F') return arange(start*3*2, stop*3*2, dtype=dtype).reshape(2,3,(stop-start), order='F')
#return arange(start*3*2, stop*3*2, dtype=dtype).reshape(2,3,(stop-start)) #return arange(start*3*2, stop*3*2, dtype=dtype).reshape(2,3,(stop-start))
def test_bigarray_append(): def test_bigarray_append(testbig):
for order in ('C', 'F'): for order in ('C', 'F'):
data = zeros(8*PS, dtype=uint32) data = zeros(8*PS, dtype=uint32)
f = BigFile_Data(data, PS) fh = testbig.fopen(data)
fh = f.fileh_open()
arange32 = {'C': arange32_c, 'F': arange32_f} [order] arange32 = {'C': arange32_c, 'F': arange32_f} [order]
...@@ -552,9 +561,8 @@ def test_bigarray_append(): ...@@ -552,9 +561,8 @@ def test_bigarray_append():
def test_bigarray_list(): def test_bigarray_list(testbig):
Z = BigFile_Zero(PS) Zh = testbig.fopen()
Zh = Z.fileh_open()
A = BigArray((10,), uint8, Zh) A = BigArray((10,), uint8, Zh)
# the IndexError for out-of-bound scalar access should allow, though # the IndexError for out-of-bound scalar access should allow, though
...@@ -564,9 +572,8 @@ def test_bigarray_list(): ...@@ -564,9 +572,8 @@ def test_bigarray_list():
assert l == [0]*10 assert l == [0]*10
def test_bigarray_to_ndarray(): def test_bigarray_to_ndarray(testbig):
Z = BigFile_Zero(PS) Zh = testbig.fopen()
Zh = Z.fileh_open()
A = BigArray((10,), uint8, Zh) A = BigArray((10,), uint8, Zh)
# without IndexError on out-of-bound scalar access, the following # without IndexError on out-of-bound scalar access, the following
...@@ -594,7 +601,7 @@ def test_bigarray_to_ndarray(): ...@@ -594,7 +601,7 @@ def test_bigarray_to_ndarray():
def test_arrayref(): def test_arrayref(testbig):
# test data - all items are unique - so we can check array by content # test data - all items are unique - so we can check array by content
data = zeros(PS, dtype=uint8) data = zeros(PS, dtype=uint8)
data32 = data.view(uint32) data32 = data.view(uint32)
...@@ -683,8 +690,7 @@ def test_arrayref(): ...@@ -683,8 +690,7 @@ def test_arrayref():
# data_ is the same as data but shifted to exercise vma and vma->broot offsets calculation. # data_ is the same as data but shifted to exercise vma and vma->broot offsets calculation.
data_ = zeros(8*PS, dtype=uint8) data_ = zeros(8*PS, dtype=uint8)
data_[2*PS-1:][:PS] = data data_[2*PS-1:][:PS] = data
f = BigFile_Data_RO(data_, PS) fh = testbig.fopen(data_, readonly=True)
fh = f.fileh_open()
A = BigArray(data_.shape, data_.dtype, fh) A = BigArray(data_.shape, data_.dtype, fh)
assert array_equal(A[2*PS-1:][:PS], data) assert array_equal(A[2*PS-1:][:PS], data)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment