|
| 1 | +############################################################################## |
| 2 | +# Copyright by The HDF Group. # |
| 3 | +# All rights reserved. # |
| 4 | +# # |
| 5 | +# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # |
| 6 | +# Utilities. The full HDF5 REST Server copyright notice, including # |
| 7 | +# terms governing use, modification, and redistribution, is contained in # |
| 8 | +# the file COPYING, which can be found at the root of the source code # |
| 9 | +# distribution tree. If you do not have access to this file, you may # |
| 10 | +# request a copy from [email protected]. # |
| 11 | +############################################################################## |
| 12 | + |
| 13 | +import numpy as np |
| 14 | +import math |
| 15 | +import logging |
| 16 | +import config |
| 17 | + |
| 18 | +if config.get("use_h5py"): |
| 19 | + import h5py |
| 20 | +else: |
| 21 | + import h5pyd as h5py |
| 22 | + |
| 23 | +from common import ut, TestCase |
| 24 | + |
| 25 | + |
| 26 | +class TestScalarCompound(TestCase): |
| 27 | + |
| 28 | + def setUp(self): |
| 29 | + filename = self.getFileName("scalar_compound_dset") |
| 30 | + print("filename:", filename) |
| 31 | + self.f = h5py.File(filename, "w") |
| 32 | + self.data = np.array((42.5, -118, "Hello"), dtype=[('a', 'f'), ('b', 'i'), ('c', '|S10')]) |
| 33 | + self.dset = self.f.create_dataset('x', data=self.data) |
| 34 | + |
| 35 | + def test_ndim(self): |
| 36 | + """ Verify number of dimensions """ |
| 37 | + self.assertEqual(self.dset.ndim, 0) |
| 38 | + |
| 39 | + def test_shape(self): |
| 40 | + """ Verify shape """ |
| 41 | + self.assertEqual(self.dset.shape, tuple()) |
| 42 | + |
| 43 | + def test_size(self): |
| 44 | + """ Verify size """ |
| 45 | + self.assertEqual(self.dset.size, 1) |
| 46 | + |
| 47 | + def test_ellipsis(self): |
| 48 | + """ Ellipsis -> scalar ndarray """ |
| 49 | + out = self.dset[...] |
| 50 | + # assertArrayEqual doesn't work with compounds; do manually |
| 51 | + self.assertIsInstance(out, np.ndarray) |
| 52 | + self.assertEqual(out.shape, self.data.shape) |
| 53 | + self.assertEqual(out.dtype, self.data.dtype) |
| 54 | + |
| 55 | + def test_tuple(self): |
| 56 | + """ () -> np.void instance """ |
| 57 | + out = self.dset[()] |
| 58 | + self.assertIsInstance(out, np.void) |
| 59 | + self.assertEqual(out.dtype, self.data.dtype) |
| 60 | + |
| 61 | + def test_slice(self): |
| 62 | + """ slice -> ValueError """ |
| 63 | + with self.assertRaises(ValueError): |
| 64 | + self.dset[0:4] |
| 65 | + |
| 66 | + def test_index(self): |
| 67 | + """ index -> ValueError """ |
| 68 | + with self.assertRaises(ValueError): |
| 69 | + self.dset[0] |
| 70 | + |
| 71 | + def test_rt(self): |
| 72 | + """ Compound types are read back in correct order (h5py issue 236)""" |
| 73 | + |
| 74 | + dt = np.dtype([('weight', np.float64), |
| 75 | + ('cputime', np.float64), |
| 76 | + ('walltime', np.float64), |
| 77 | + ('parents_offset', np.uint32), |
| 78 | + ('n_parents', np.uint32), |
| 79 | + ('status', np.uint8), |
| 80 | + ('endpoint_type', np.uint8),]) |
| 81 | + |
| 82 | + testdata = np.ndarray((16,), dtype=dt) |
| 83 | + for key in dt.fields: |
| 84 | + testdata[key] = np.random.random((16,)) * 100 |
| 85 | + |
| 86 | + self.f['test'] = testdata |
| 87 | + outdata = self.f['test'][...] |
| 88 | + self.assertTrue(np.all(outdata == testdata)) |
| 89 | + self.assertEqual(outdata.dtype, testdata.dtype) |
| 90 | + |
| 91 | + def test_assign(self): |
| 92 | + dt = np.dtype([('weight', (np.float64)), |
| 93 | + ('endpoint_type', np.uint8),]) |
| 94 | + |
| 95 | + testdata = np.ndarray((16,), dtype=dt) |
| 96 | + for key in dt.fields: |
| 97 | + testdata[key] = np.random.random(size=testdata[key].shape) * 100 |
| 98 | + |
| 99 | + ds = self.f.create_dataset('test', (16,), dtype=dt) |
| 100 | + for key in dt.fields: |
| 101 | + ds[key] = testdata[key] |
| 102 | + |
| 103 | + outdata = self.f['test'][...] |
| 104 | + |
| 105 | + self.assertTrue(np.all(outdata == testdata)) |
| 106 | + self.assertEqual(outdata.dtype, testdata.dtype) |
| 107 | + |
| 108 | + def test_read(self): |
| 109 | + dt = np.dtype([('weight', (np.float64)), |
| 110 | + ('endpoint_type', np.uint8),]) |
| 111 | + |
| 112 | + testdata = np.ndarray((16,), dtype=dt) |
| 113 | + for key in dt.fields: |
| 114 | + testdata[key] = np.random.random(size=testdata[key].shape) * 100 |
| 115 | + |
| 116 | + ds = self.f.create_dataset('test', (16,), dtype=dt) |
| 117 | + |
| 118 | + # Write to all fields |
| 119 | + ds[...] = testdata |
| 120 | + |
| 121 | + for key in dt.fields: |
| 122 | + outdata = self.f['test'][key] |
| 123 | + np.testing.assert_array_equal(outdata, testdata[key]) |
| 124 | + self.assertEqual(outdata.dtype, testdata[key].dtype) |
| 125 | + |
| 126 | + """ |
| 127 | + TBD |
| 128 | + def test_nested_compound_vlen(self): |
| 129 | + dt_inner = np.dtype([('a', h5py.vlen_dtype(np.int32)), |
| 130 | + ('b', h5py.vlen_dtype(np.int32))]) |
| 131 | +
|
| 132 | + dt = np.dtype([('f1', h5py.vlen_dtype(dt_inner)), |
| 133 | + ('f2', np.int64)]) |
| 134 | +
|
| 135 | + inner1 = (np.array(range(1, 3), dtype=np.int32), |
| 136 | + np.array(range(6, 9), dtype=np.int32)) |
| 137 | +
|
| 138 | + inner2 = (np.array(range(10, 14), dtype=np.int32), |
| 139 | + np.array(range(16, 21), dtype=np.int32)) |
| 140 | +
|
| 141 | + data = np.array([(np.array([inner1, inner2], dtype=dt_inner), 2), |
| 142 | + (np.array([inner1], dtype=dt_inner), 3)], |
| 143 | + dtype=dt) |
| 144 | +
|
| 145 | + self.f["ds"] = data |
| 146 | + out = self.f["ds"] |
| 147 | +
|
| 148 | + # Specifying check_alignment=False because vlen fields have 8 bytes of padding |
| 149 | + # because the vlen datatype in hdf5 occupies 16 bytes |
| 150 | + self.assertArrayEqual(out, data, check_alignment=False) |
| 151 | + """ |
| 152 | + |
| 153 | + |
| 154 | +if __name__ == '__main__': |
| 155 | + loglevel = logging.ERROR |
| 156 | + logging.basicConfig(format='%(asctime)s %(message)s', level=loglevel) |
| 157 | + ut.main() |
0 commit comments