summaryrefslogtreecommitdiffstats
path: root/test/dsets.c
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2016-04-17 07:26:51 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2016-04-17 07:26:51 (GMT)
commit36062736a5ca5729b5720982859398dfe7862394 (patch)
treeed9dc140502861a3f248c50e4e2d1d51f1145c1d /test/dsets.c
parentce8905cc906d6448b79eded0324a1e86b6e09bac (diff)
downloadhdf5-36062736a5ca5729b5720982859398dfe7862394.zip
hdf5-36062736a5ca5729b5720982859398dfe7862394.tar.gz
hdf5-36062736a5ca5729b5720982859398dfe7862394.tar.bz2
[svn-r29722] Description:
Bring over more dataset tests from the revise_chunks branch. Tested on: MacOSX/64 10.11.4 (amazon) w/serial, parallel & production (h5committest forthcoming)
Diffstat (limited to 'test/dsets.c')
-rw-r--r--test/dsets.c1691
1 files changed, 1585 insertions, 106 deletions
diff --git a/test/dsets.c b/test/dsets.c
index b8a66e7..4f124f2 100644
--- a/test/dsets.c
+++ b/test/dsets.c
@@ -51,7 +51,8 @@ const char *FILENAME[] = {
"partial_chunks", /* 14 */
"layout_extend", /* 15 */
"zero_chunk", /* 16 */
- "storage_size", /* 17 */
+ "dls_01_strings", /* 17 */
+ "storage_size", /* 18 */
NULL
};
#define FILENAME_BUF_SIZE 1024
@@ -174,9 +175,11 @@ const char *FILENAME[] = {
/* Names for zero-dim test */
#define ZERODIM_DATASET "zerodim"
+#define ZERODIM_DATASET2 "zerodim2"
/* Parameters for zero-dim test */
#define MISSING_CHUNK_DATASET "missing_chunk"
+#define MISSING_CHUNK_DATASET2 "missing_chunk2"
#define MISSING_CHUNK_DIM 100
/* Names for random chunks test */
@@ -201,10 +204,21 @@ const char *FILENAME[] = {
/* Parameters for testing bypassing chunk cache */
#define BYPASS_DATASET1 "Dset1"
#define BYPASS_DATASET2 "Dset2"
+
+#define T_BYPASS_DATASET1 "T_Dset1"
+#define T_BYPASS_DATASET2 "T_Dset2"
+
#define BYPASS_DIM 1000
#define BYPASS_CHUNK_DIM 500
#define BYPASS_FILL_VALUE 7
+/* Parameters for testing extensible array chunk indices */
+#define EARRAY_MAX_RANK 3
+#define EARRAY_DSET_DIM 15
+#define EARRAY_CHUNK_DIM 3
+#define EARRAY_EXTEND_INCR 15
+#define EARRAY_MAX_EXTEND 75
+
/* Parameters for datasets in query storage size tests */
#define STORAGE_SIZE_DIM1 12
#define STORAGE_SIZE_DIM2 6
@@ -221,6 +235,15 @@ double points_dbl[DSET_DIM1][DSET_DIM2], check_dbl[DSET_DIM1][DSET_DIM2];
size_t count_nbytes_read = 0;
size_t count_nbytes_written = 0;
+/* Declarations for test_idx_compatible() */
+#define DSET "dset"
+#define DSET_FILTER "dset_filter"
+const char *OLD_FILENAME[] = { /* Files created under 1.6 branch and 1.8 branch */
+ "btree_idx_1_6.h5", /* 1.6 HDF5 file */
+ "btree_idx_1_8.h5" /* 1.8 HDF5 file */
+};
+
+
/* Local prototypes for filter functions */
static size_t filter_bogus(unsigned int flags, size_t cd_nelmts,
const unsigned int *cd_values, size_t nbytes, size_t *buf_size, void **buf);
@@ -6478,7 +6501,11 @@ static herr_t
test_zero_dims(hid_t file)
{
hid_t s = -1, d = -1, dcpl = -1;
+ hid_t s2 = -1, d2 = -1, dcpl2 = -1;
hsize_t dzero = 0, dmax = H5S_UNLIMITED, csize = 5;
+ hsize_t dzero2[2] = {0, 0};
+ hsize_t dmax2[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
+ hsize_t csize2[2] = {5, 5};
hid_t fapl; /* File access property list */
H5D_chunk_index_t idx_type; /* Dataset chunk index type */
H5F_libver_t low; /* File format low bound */
@@ -6549,6 +6576,56 @@ test_zero_dims(hid_t file)
if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
if(H5Sclose(s) < 0) FAIL_STACK_ERROR
+ /*
+ * Two-dimensional dataset
+ */
+ if((s2 = H5Screate_simple(2, dzero2, dmax2)) < 0) FAIL_STACK_ERROR
+
+ /* Try creating chunked dataset with undefined chunk dimensions */
+ if((dcpl2 = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
+ if(H5Pset_layout(dcpl2, H5D_CHUNKED) < 0) FAIL_STACK_ERROR
+
+ H5E_BEGIN_TRY {
+ d2 = H5Dcreate2(file, ZERODIM_DATASET2, H5T_NATIVE_INT, s2, H5P_DEFAULT, dcpl2, H5P_DEFAULT);
+ } H5E_END_TRY;
+ if(d2 > 0) {
+ H5Dclose(d2);
+ FAIL_PUTS_ERROR("created dataset with undefined chunk dimensions")
+ } /* end if */
+
+ /* Try creating chunked dataset with zero-sized chunk dimensions */
+ H5E_BEGIN_TRY {
+ ret = H5Pset_chunk(dcpl2, 2, dzero2);
+ } H5E_END_TRY;
+ if(ret > 0)
+ FAIL_PUTS_ERROR("set zero-sized chunk dimensions")
+
+ if(H5Pclose(dcpl2) < 0) FAIL_STACK_ERROR
+
+ /* Write to the zero-sized extendible dataset */
+ if((dcpl2 = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
+ if(H5Pset_chunk(dcpl2, 2, csize2) < 0) FAIL_STACK_ERROR
+
+ /* Create the dataset */
+ if((d2 = H5Dcreate2(file, ZERODIM_DATASET2, H5T_NATIVE_INT, s2, H5P_DEFAULT, dcpl2, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
+
+ /* Get the chunk index type */
+ if(H5D__layout_idx_type_test(d2, &idx_type) < 0) FAIL_STACK_ERROR
+
+ /* Verify index type */
+ if(low == H5F_LIBVER_LATEST) {
+ if(idx_type != H5D_CHUNK_IDX_BT2)
+ FAIL_PUTS_ERROR("should be using v2 B-tree as index");
+ } else if(idx_type != H5D_CHUNK_IDX_BTREE)
+ FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+
+ /* Just a no-op */
+ if(H5Dwrite(d2, H5T_NATIVE_INT, s2, s2, H5P_DEFAULT, (void*)911) < 0) FAIL_STACK_ERROR
+
+ if(H5Dclose(d2) < 0) FAIL_STACK_ERROR
+ if(H5Pclose(dcpl2) < 0) FAIL_STACK_ERROR
+ if(H5Sclose(s2) < 0) FAIL_STACK_ERROR
+
PASSED();
return 0;
@@ -6557,6 +6634,10 @@ error:
H5Pclose(dcpl);
H5Dclose(d);
H5Sclose(s);
+
+ H5Pclose(dcpl2);
+ H5Dclose(d2);
+ H5Sclose(s2);
} H5E_END_TRY;
return -1;
} /* end test_zero_dims() */
@@ -6580,39 +6661,89 @@ error:
static herr_t
test_missing_chunk(hid_t file)
{
- hid_t s = -1, d = -1, dcpl = -1;
- hsize_t hs_start[1];
- hsize_t hs_stride[1],
- hs_count[1],
- hs_block[1];
+ hid_t d = -1, did2 = -1; /* Dataset IDs */
+ hid_t dcpl = -1, dcpl2 = -1; /* Dataset creation property IDs */
+ hid_t s = -1, sid2 = -1; /* Dataspace ID */
+ hsize_t hs_start[1], hs_stride[1], hs_count[1], hs_block[1]; /* Hyperslab setting */
+ hsize_t hs_start2[2], hs_stride2[2], hs_count2[2], hs_block2[2];/* Hyperslab setting */
+
+ /* Buffers for reading/writing dataset */
int wdata[MISSING_CHUNK_DIM],
rdata[MISSING_CHUNK_DIM];
+ int wdata2[MISSING_CHUNK_DIM][MISSING_CHUNK_DIM],
+ rdata2[MISSING_CHUNK_DIM][MISSING_CHUNK_DIM];
+
+ /* Setting for 1-D dataset */
hsize_t dsize=100, dmax=H5S_UNLIMITED;
hsize_t csize=5;
- size_t u;
+
+ /* Setting for 2-D dataset */
+ hsize_t dsize2[2] = {100, 100}, dmax2[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
+ hsize_t csize2[2] = {5, 5};
+ size_t u, i, j; /* Local Index variable */
+
+ hid_t fapl; /* File access property list */
+ H5F_libver_t low; /* File format low bound */
+ H5D_chunk_index_t idx_type, idx_type2; /* Dataset chunk index types */
TESTING("Read dataset with unwritten chunk & undefined fill value");
+ /* Get the file's file access property list */
+ if((fapl = H5Fget_access_plist(file)) < 0) TEST_ERROR;
+
+ /* Get library format */
+ if(H5Pget_libver_bounds(fapl, &low, NULL) < 0) TEST_ERROR;
+
/* Initialize data for 1-D dataset */
for(u = 0; u < MISSING_CHUNK_DIM; u++) {
wdata[u] = (int)u;
rdata[u] = 911;
} /* end for */
+ /* Initialize data for 2-D dataset */
+ for(i = 0; i < MISSING_CHUNK_DIM; i++) {
+ for(j = 0; j < MISSING_CHUNK_DIM; j++) {
+ wdata2[i][j] = (int)j + (i * MISSING_CHUNK_DIM);
+ rdata2[i][j] = 911;
+ }
+ } /* end for */
+
/* Create dataspace */
if((s = H5Screate_simple(1, &dsize, &dmax)) < 0) TEST_ERROR;
+ if((sid2 = H5Screate_simple(2, dsize2, dmax2)) < 0) TEST_ERROR;
/* Create dataset creation property list */
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR;
+ if((dcpl2 = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR;
/* Set to chunked */
if(H5Pset_chunk(dcpl, 1, &csize) < 0) TEST_ERROR;
+ if(H5Pset_chunk(dcpl2, 2, csize2) < 0) TEST_ERROR;
/* Undefine fill value */
if(H5Pset_fill_value(dcpl, H5T_NATIVE_INT, NULL) < 0) TEST_ERROR;
+ if(H5Pset_fill_value(dcpl2, H5T_NATIVE_INT, NULL) < 0) TEST_ERROR;
- /* Create dataset */
+ /* Create the 1-D & 2-D datasets */
if((d = H5Dcreate2(file, MISSING_CHUNK_DATASET, H5T_NATIVE_INT, s, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR;
+ if((did2 = H5Dcreate2(file, MISSING_CHUNK_DATASET2, H5T_NATIVE_INT, sid2, H5P_DEFAULT, dcpl2, H5P_DEFAULT)) < 0) TEST_ERROR;
+
+ /* Get the chunk index types */
+ if(H5D__layout_idx_type_test(d, &idx_type) < 0) TEST_ERROR;
+ if(H5D__layout_idx_type_test(did2, &idx_type2) < 0) TEST_ERROR;
+
+ /* Verify index type */
+ if(low == H5F_LIBVER_LATEST) {
+ if(idx_type != H5D_CHUNK_IDX_EARRAY)
+ FAIL_PUTS_ERROR("should be using Extensible Array as index");
+ if(idx_type2 != H5D_CHUNK_IDX_BT2)
+ FAIL_PUTS_ERROR("should be using v2 B-tree as index");
+ } else {
+ if(idx_type != H5D_CHUNK_IDX_BTREE)
+ FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+ if(idx_type2 != H5D_CHUNK_IDX_BTREE)
+ FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+ }
/* Select elements in every other chunk for 1-D dataset */
hs_start[0]=0;
@@ -6622,13 +6753,23 @@ test_missing_chunk(hid_t file)
if(H5Sselect_hyperslab(s, H5S_SELECT_SET, hs_start, hs_stride, hs_count,
hs_block) < 0) TEST_ERROR;
- /* Write selected data */
+ /* Select elements in every other chunk for 2-D dataset */
+ hs_start2[0] = hs_start2[1] = 0;
+ hs_stride2[0] = hs_stride2[1] = 10;
+ hs_count2[0] = hs_count2[1] = 10;
+ hs_block2[0] = hs_block2[1] = 5;
+ if(H5Sselect_hyperslab(sid2, H5S_SELECT_SET, hs_start2, hs_stride2, hs_count2,
+ hs_block2) < 0) TEST_ERROR;
+
+ /* Write selected data to the datasets */
if(H5Dwrite(d, H5T_NATIVE_INT, s, s, H5P_DEFAULT, wdata) < 0) TEST_ERROR;
+ if(H5Dwrite(did2, H5T_NATIVE_INT, sid2, sid2, H5P_DEFAULT, wdata2) < 0) TEST_ERROR;
- /* Read all data */
+ /* Read all data from the datasets */
if(H5Dread(d, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata) < 0) TEST_ERROR;
+ if(H5Dread(did2, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata2) < 0) TEST_ERROR;
- /* Validata values read */
+ /* Validata values read for the 1-D dataset */
for(u=0; u<MISSING_CHUNK_DIM; u++) {
if((u%10)>=5) {
if(rdata[u]!=911) {
@@ -6644,10 +6785,34 @@ test_missing_chunk(hid_t file)
} /* end else */
} /* end for */
+ /* Validata values read for the 2-D dataset */
+ for(i = 0; i < MISSING_CHUNK_DIM; i++) {
+ for(j = 0; j < MISSING_CHUNK_DIM; j++) {
+
+ if((i % 10) >= 5 || (j % 10) >= 5) {
+ if(rdata2[i][j] != 911) {
+ printf(" Line %d: Incorrect value, rdata2[%u][%u] = %d\n",
+ __LINE__,(unsigned)i, (unsigned)j, rdata2[i][j]);
+ TEST_ERROR;
+ } /* end if */
+ } /* end if */
+ else {
+ if(rdata2[i][j] != wdata2[i][j]) {
+ printf(" Line %d: Incorrect value, wdata2[%u][%u] = %d, rdata2[%u][%u] = %d\n",
+ __LINE__,(unsigned)i, (unsigned)j, wdata2[i][j],(unsigned)i, (unsigned)j, rdata2[i][j]);
+ TEST_ERROR;
+ } /* end if */
+ } /* end else */
+ } /* end for */
+ } /* end for */
+
/* Close everything */
if(H5Pclose(dcpl) < 0) TEST_ERROR;
+ if(H5Pclose(dcpl2) < 0) TEST_ERROR;
if(H5Sclose(s) < 0) TEST_ERROR;
+ if(H5Sclose(sid2) < 0) TEST_ERROR;
if(H5Dclose(d) < 0) TEST_ERROR;
+ if(H5Dclose(did2) < 0) TEST_ERROR;
PASSED();
return 0;
@@ -6655,19 +6820,20 @@ test_missing_chunk(hid_t file)
error:
H5E_BEGIN_TRY {
H5Pclose(dcpl);
+ H5Pclose(dcpl2);
H5Dclose(d);
+ H5Dclose(did2);
H5Sclose(s);
+ H5Sclose(sid2);
} H5E_END_TRY;
return -1;
} /* end test_missing_chunk() */
/*-------------------------------------------------------------------------
- * Function: test_random_chunks
+ * Function: test_random_chunks_real
*
- * Purpose: Tests that write/read on randomly selected chunks in 2 datasets.
- * One dataset has fixed dimensions, and the other has unlimited
- * dimensions which are extended before write/read operations.
+ * Purpose: Tests that write/read on randomly selected chunks
*
*
* Return: Success: 0
@@ -6679,7 +6845,7 @@ error:
*-------------------------------------------------------------------------
*/
static herr_t
-test_random_chunks(hid_t fapl)
+test_random_chunks_real(const char *testname, hbool_t early_alloc, hid_t fapl)
{
char filename[FILENAME_BUF_SIZE];
hid_t s=-1, m=-1, d=-1, dcpl=-1, file=-1;
@@ -6688,18 +6854,23 @@ test_random_chunks(hid_t fapl)
check2[20][20];
hsize_t coord[NPOINTS][2];
hsize_t dsize[2]={100,100}, dmax[2]={H5S_UNLIMITED, H5S_UNLIMITED}, csize[2]={10,10}, nsize[2]={200,200};
+ hsize_t fixed_dmax[2] = {1000, 1000};
hsize_t msize[1]={NPOINTS};
const char dname[]="dataset";
int chunk_row, chunk_col;
size_t i, j;
+ H5D_chunk_index_t idx_type; /* Dataset chunk index type */
+ H5F_libver_t low; /* File format low bound */
- TESTING("Write/read on randomly selected chunks");
+ TESTING(testname);
assert(NPOINTS < 100);
h5_fixname(FILENAME[6], fapl, filename, sizeof filename);
+ if(H5Pget_libver_bounds(fapl, &low, NULL) < 0) TEST_ERROR;
+
/* Create file for first test */
if((file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) TEST_ERROR;
@@ -6712,8 +6883,9 @@ test_random_chunks(hid_t fapl)
/* Set chunked layout */
if(H5Pset_chunk(dcpl, 2, csize) < 0) TEST_ERROR;
- /* Set early allocation time */
- if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0) TEST_ERROR;
+ /* Set early allocation time for one dataset; the other dataset is using default alloc time */
+ if(early_alloc)
+ if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0) TEST_ERROR;
/* Create dataset */
if((d = H5Dcreate2(file, dname, H5T_NATIVE_INT, s, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR;
@@ -6751,12 +6923,22 @@ test_random_chunks(hid_t fapl)
if(H5Dclose(d) < 0) TEST_ERROR;
if(H5Fclose(file) < 0) TEST_ERROR;
- /* Open file again */
+ /* Open first file again */
if((file = H5Fopen(filename, H5F_ACC_RDWR, fapl)) < 0) TEST_ERROR;
/* Open dataset */
if((d = H5Dopen2(file, dname, H5P_DEFAULT)) < 0) TEST_ERROR;
+ /* Get the chunk index type */
+ if(H5D__layout_idx_type_test(d, &idx_type) < 0) TEST_ERROR;
+
+ /* Verify index type */
+ if(low == H5F_LIBVER_LATEST) {
+ if(idx_type != H5D_CHUNK_IDX_FARRAY)
+ FAIL_PUTS_ERROR("should be using Fixed Array as index");
+ } else if(idx_type != H5D_CHUNK_IDX_BTREE)
+ FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+
/* Get dataset dataspace */
if((s = H5Dget_space(d)) < 0) TEST_ERROR;
@@ -6788,7 +6970,10 @@ test_random_chunks(hid_t fapl)
if((file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) TEST_ERROR;
/* Create dataspace with unlimited maximum dimensions */
- if((s = H5Screate_simple(2, dsize, dmax)) < 0) TEST_ERROR;
+ if(early_alloc) {
+ if((s = H5Screate_simple(2, dsize, fixed_dmax)) < 0) TEST_ERROR;
+ } else
+ if((s = H5Screate_simple(2, dsize, dmax)) < 0) TEST_ERROR;
/* Create dataset creation property list */
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR;
@@ -6796,12 +6981,26 @@ test_random_chunks(hid_t fapl)
/* Set chunked layout */
if(H5Pset_chunk(dcpl, 2, csize) < 0) TEST_ERROR;
- /* Set allocation time to early */
- if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0) TEST_ERROR;
+ /* Set early allocation time for one dataset; the other dataset is using default alloc time */
+ if(early_alloc)
+ if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0) TEST_ERROR;
/* Create dataset */
if((d = H5Dcreate2(file, dname, H5T_NATIVE_INT, s, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR;
+ /* Get the chunk index type */
+ if(H5D__layout_idx_type_test(d, &idx_type) < 0) TEST_ERROR;
+
+ /* Verify index type */
+ if(low == H5F_LIBVER_LATEST) {
+ if(early_alloc) {
+ if(idx_type != H5D_CHUNK_IDX_FARRAY)
+ FAIL_PUTS_ERROR("should be using implicit indexing");
+ } else if(idx_type != H5D_CHUNK_IDX_BT2)
+ FAIL_PUTS_ERROR("should be using v2 B-tree as index");
+ } else if(idx_type != H5D_CHUNK_IDX_BTREE)
+ FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+
/* Extend both dimensions of the dataset */
if(H5Dset_extent(d, nsize) < 0) TEST_ERROR;
@@ -6841,7 +7040,107 @@ test_random_chunks(hid_t fapl)
if(H5Dclose(d) < 0) TEST_ERROR;
if(H5Fclose(file) < 0) TEST_ERROR;
- /* Open file again */
+ /* Open second file again */
+ if((file = H5Fopen(filename, H5F_ACC_RDWR, fapl)) < 0) TEST_ERROR;
+
+ /* Open dataset */
+ if((d = H5Dopen2(file, dname, H5P_DEFAULT)) < 0) TEST_ERROR;
+
+ /* Get dataset dataspace */
+ if((s = H5Dget_space(d)) < 0) TEST_ERROR;
+
+ /* Create dataspace for read buffer */
+ if((m = H5Screate_simple(1, msize, NULL)) < 0) TEST_ERROR;
+
+ /* Select the random points for reading */
+ if(H5Sselect_elements (s, H5S_SELECT_SET, (size_t)NPOINTS, (const hsize_t *)coord) < 0) TEST_ERROR;
+
+ /* Read from dataset */
+ if(H5Dread(d, H5T_NATIVE_INT, m, s, H5P_DEFAULT, rbuf) < 0) TEST_ERROR;
+
+ /* Verify that written and read data are the same */
+ for(i = 0; i < NPOINTS; i++)
+ if(rbuf[i] != wbuf[i]){
+ printf(" Line %d: Incorrect value, wbuf[%u]=%d, rbuf[%u]=%d\n",__LINE__,(unsigned)i,wbuf[i],(unsigned)i,rbuf[i]);
+ TEST_ERROR;
+ } /* end if */
+
+ /* Close resources */
+ if(H5Sclose(s) < 0) TEST_ERROR;
+ if(H5Sclose(m) < 0) TEST_ERROR;
+ if(H5Dclose(d) < 0) TEST_ERROR;
+ if(H5Fclose(file) < 0) TEST_ERROR;
+
+
+ /* Create third file */
+ if((file = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) TEST_ERROR;
+
+ /* Create dataspace with fixed maximum dimensions */
+ if((s = H5Screate_simple(2, dsize, fixed_dmax)) < 0) TEST_ERROR;
+
+ /* Create dataset creation property list */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR;
+
+ /* Set chunked layout */
+ if(H5Pset_chunk(dcpl, 2, csize) < 0) TEST_ERROR;
+
+ /* Set early allocation time for one dataset; the other dataset is using default alloc time */
+ if(early_alloc)
+ if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0) TEST_ERROR;
+
+ /* Create dataset */
+ if((d = H5Dcreate2(file, dname, H5T_NATIVE_INT, s, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR;
+
+ /* Get the chunk index type */
+ if(H5D__layout_idx_type_test(d, &idx_type) < 0) TEST_ERROR;
+
+ /* Verify index type */
+ if(low == H5F_LIBVER_LATEST) {
+ if(idx_type != H5D_CHUNK_IDX_FARRAY)
+ FAIL_PUTS_ERROR("should be using Fixed Array as index");
+ } else if(idx_type != H5D_CHUNK_IDX_BTREE)
+ FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+
+ /* Extend both dimensions of the dataset */
+ if(H5Dset_extent(d, nsize) < 0) TEST_ERROR;
+
+ /* Reset the dataset dataspace to new dimensions */
+ if(H5Sset_extent_simple(s, 2, nsize, dmax) < 0) TEST_ERROR;
+
+ /* Initialize check buffer for repeated coordinates */
+ for(i = 0; i < nsize[0]/csize[0]; i++)
+ for(j = 0; j < nsize[1] / csize[1]; j++)
+ check2[i][j] = 0;
+
+ /* Generate random point coordinates. Only one point is selected per chunk */
+ for(i = 0; i < NPOINTS; i++){
+ do {
+ chunk_row = (int)HDrandom() % (int)(nsize[0] / csize[0]);
+ chunk_col = (int)HDrandom() % (int)(nsize[1] / csize[1]);
+ } while (check2[chunk_row][chunk_col]);
+
+ wbuf[i] = check2[chunk_row][chunk_col] = chunk_row + chunk_col + 1;
+ coord[i][0] = (hsize_t)chunk_row * csize[0];
+ coord[i][1] = (hsize_t)chunk_col * csize[1];
+ }
+
+ /* Create dataspace for write buffer */
+ if((m = H5Screate_simple(1, msize, NULL)) < 0) TEST_ERROR;
+
+ /* Select the random points for writing */
+ if(H5Sselect_elements(s, H5S_SELECT_SET, (size_t)NPOINTS, (const hsize_t *)coord) < 0) TEST_ERROR;
+
+ /* Write into dataset */
+ if(H5Dwrite(d, H5T_NATIVE_INT, m, s, H5P_DEFAULT, wbuf) < 0) TEST_ERROR;
+
+ /* Close resources */
+ if(H5Sclose(s) < 0) TEST_ERROR;
+ if(H5Sclose(m) < 0) TEST_ERROR;
+ if(H5Pclose(dcpl) < 0) TEST_ERROR;
+ if(H5Dclose(d) < 0) TEST_ERROR;
+ if(H5Fclose(file) < 0) TEST_ERROR;
+
+ /* Open third file again */
if((file = H5Fopen(filename, H5F_ACC_RDWR, fapl)) < 0) TEST_ERROR;
/* Open dataset */
@@ -6884,6 +7183,43 @@ error:
H5Fclose(file);
} H5E_END_TRY;
return -1;
+} /* end test_random_chunks_real() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: test_random_chunks
+ *
+ * Purpose: Tests that write/read on randomly selected chunks
+ * First file:
+ * One dataset has fixed dimensions without max. dims & H5D_ALLOC_TIME_EARLY
+ * One dataset has fixed dimensions without max. dims & default alloc time
+ * Second file:
+ * One extendible dataset with unlimited max. dims & H5D_ALLOC_TIME_EARLY
+ * One extendible dataset with unlimited max. dims & default alloc time
+ * third file:
+ * one extendible dataset with fixed max. dims & H5D_ALLOC_TIME_EARLY
+ * one extendible dataset with fixed max. dims & default alloc time
+ *
+ * All the datasets in second & third files are extended before write/read operations
+ *
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Christian Chilan
+ * Monday, March 26, 2007
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+test_random_chunks(hid_t fapl)
+{
+ int nerrors = 0; /* Errors in sub-tests */
+
+ nerrors += test_random_chunks_real("Write/read on randomly selected chunks w/non-implicit index", FALSE, fapl);
+ nerrors += test_random_chunks_real("Write/read on randomly selected chunks w/implicit index", TRUE, fapl);
+
+ return nerrors;;
} /* end test_random_chunks() */
#ifndef H5_NO_DEPRECATED_SYMBOLS
@@ -7443,23 +7779,36 @@ static herr_t
test_big_chunks_bypass_cache(hid_t fapl)
{
char filename[FILENAME_BUF_SIZE];
- hid_t fid = -1; /* File ID */
- hid_t fapl_local = -1; /* File access property list ID */
- hid_t dcpl = -1; /* Dataset creation property list ID */
- hid_t sid = -1; /* Dataspace ID */
- hid_t dsid = -1; /* Dataset ID */
- hsize_t dim, chunk_dim; /* Dataset and chunk dimensions */
- size_t rdcc_nelmts, rdcc_nbytes;
- int fvalue = BYPASS_FILL_VALUE;
- hsize_t count, stride, offset, block;
+ hid_t fid = -1; /* File ID */
+ hid_t fapl_local = -1; /* File access property list ID */
+ hid_t dcpl = -1, t_dcpl = -1; /* Dataset creation property list ID */
+ hid_t sid = -1, t_sid = -1; /* Dataspace ID */
+ hid_t mid; /* Memory space ID */
+ hid_t dsid = -1, t_dsid = -1; /* Dataset ID */
+ hsize_t dim, chunk_dim; /* Dataset and chunk dimensions */
+ hsize_t t_dim[2], t_max[2], t_chunk_dim[2]; /* Dataset and chunk dimensions */
+ size_t rdcc_nelmts, rdcc_nbytes; /* Chunk cache parameters */
+ int fvalue = BYPASS_FILL_VALUE; /* Fill value */
+ hsize_t count, stride, offset, block; /* Setting for hyperslab (1-D) */
+ hsize_t t_count[2], t_stride[2], t_offset[2], t_block[2]; /* Setting for hyperslab (2-D) */
+ /* Buffer for reading and writing data (1-D) */
static int wdata[BYPASS_CHUNK_DIM/2], rdata1[BYPASS_DIM],
- rdata2[BYPASS_CHUNK_DIM/2];
- int i, j;
+ rdata2[BYPASS_CHUNK_DIM/2];
+ /* Buffer for reading and writing data (2-D) */
+ static int t_wdata[BYPASS_CHUNK_DIM/2][BYPASS_CHUNK_DIM/2], t_rdata1[BYPASS_DIM][BYPASS_DIM],
+ t_rdata2[BYPASS_CHUNK_DIM/2][BYPASS_CHUNK_DIM/2];
+ int i, j; /* Local index variables */
+ H5F_libver_t low; /* File format low bound */
+ H5D_chunk_index_t idx_type, t_idx_type; /* Dataset chunk index types */
+
TESTING("big chunks bypassing the cache");
h5_fixname(FILENAME[9], fapl, filename, sizeof filename);
+ /* Check if we are using the latest version of the format */
+ if(H5Pget_libver_bounds(fapl, &low, NULL) < 0) FAIL_STACK_ERROR
+
/* Copy fapl passed to this function (as we will be modifying it) */
if((fapl_local = H5Pcopy(fapl)) < 0) FAIL_STACK_ERROR
@@ -7471,52 +7820,101 @@ test_big_chunks_bypass_cache(hid_t fapl)
/* Create file */
if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_local)) < 0) FAIL_STACK_ERROR
- /* Create 1-D dataspace */
- dim = BYPASS_DIM;
+ /* Create 1-D & 2-D dataspace */
+ dim = t_dim[0] = t_dim[1] = BYPASS_DIM;
+ t_max[0] = t_max[1] = H5S_UNLIMITED;
if((sid = H5Screate_simple(1, &dim, NULL)) < 0) FAIL_STACK_ERROR
+ if((t_sid = H5Screate_simple(2, t_dim, t_max)) < 0) FAIL_STACK_ERROR
- /* Create dataset creation property list */
+ /* Create 1-D & 2-D dataset creation property list */
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
+ if((t_dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
- /* Define chunk size. There will be only 2 chunks in the dataset. */
- chunk_dim = BYPASS_CHUNK_DIM;
+ /* Define chunk size. */
+ /* There will be 2 chunks in 1-D dataset & 4 chunks in the 2-D dataset */
+ chunk_dim = t_chunk_dim[0] = t_chunk_dim[1] = BYPASS_CHUNK_DIM;
if(H5Pset_chunk(dcpl, 1, &chunk_dim) < 0) FAIL_STACK_ERROR
+ if(H5Pset_chunk(t_dcpl, 2, t_chunk_dim) < 0) FAIL_STACK_ERROR
/* Define fill value, fill time, and chunk allocation time */
if(H5Pset_fill_value(dcpl, H5T_NATIVE_INT, &fvalue) < 0) FAIL_STACK_ERROR
+ if(H5Pset_fill_value(t_dcpl, H5T_NATIVE_INT, &fvalue) < 0) FAIL_STACK_ERROR
+
if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_IFSET) < 0) FAIL_STACK_ERROR
+ if(H5Pset_fill_time(t_dcpl, H5D_FILL_TIME_IFSET) < 0) FAIL_STACK_ERROR
+
if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_INCR) < 0) FAIL_STACK_ERROR
+ if(H5Pset_alloc_time(t_dcpl, H5D_ALLOC_TIME_INCR) < 0) FAIL_STACK_ERROR
/* Create the first 1-D dataset */
if((dsid = H5Dcreate2(fid, BYPASS_DATASET1, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
FAIL_STACK_ERROR
+ /* Create the first 2-D dataset */
+ if((t_dsid = H5Dcreate2(fid, T_BYPASS_DATASET1, H5T_NATIVE_INT, t_sid, H5P_DEFAULT, t_dcpl, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Get the chunk index types for 1-D and 2-d datasets */
+ if(H5D__layout_idx_type_test(dsid, &idx_type) < 0) FAIL_STACK_ERROR
+ if(H5D__layout_idx_type_test(t_dsid, &t_idx_type) < 0) FAIL_STACK_ERROR
+
+ /* Chunk index type expected depends on whether we are using the latest version of the format */
+ if(low == H5F_LIBVER_LATEST) {
+ /* Verify index type */
+ if(idx_type != H5D_CHUNK_IDX_FARRAY) FAIL_PUTS_ERROR("should be using Fixed Array as index");
+ if(t_idx_type != H5D_CHUNK_IDX_BT2) FAIL_PUTS_ERROR("should be using v2 B-tree as index");
+ } else {
+ /* Verify index type */
+ if(idx_type != H5D_CHUNK_IDX_BTREE) FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+ if(t_idx_type != H5D_CHUNK_IDX_BTREE) FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+ } /* end else */
+
/* Select first chunk to write the data */
- offset = 0;
- count = 1;
- stride = 1;
- block = BYPASS_CHUNK_DIM / 2;
+ offset = t_offset[0] = t_offset[1] = 0;
+ count = t_count[0] = t_count[1] = 1;
+ stride = t_stride[0] = t_stride[1] = 1;
+ block = t_block[0] = t_block[1] = BYPASS_CHUNK_DIM / 2;
if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, &offset, &stride, &count, &block) < 0)
FAIL_STACK_ERROR
- /* Initialize data to write */
+ if(H5Sselect_hyperslab(t_sid, H5S_SELECT_SET, t_offset, t_stride, t_count, t_block) < 0)
+ FAIL_STACK_ERROR
+
+ /* Initialize data to write for 1-D dataset */
for(i = 0; i < BYPASS_CHUNK_DIM / 2; i++)
wdata[i] = i;
+ /* Initialize data to write for 2-D dataset */
+ for(i = 0; i < BYPASS_CHUNK_DIM / 2; i++)
+ for(j = 0; j < BYPASS_CHUNK_DIM / 2; j++)
+ t_wdata[i][j] = j;
+
+ /* Set up memory space for the 2-D dataset */
+ mid = H5Screate_simple(2, t_block, NULL);
+
+ /* Write to the first 1-D & 2-D datasets */
/* This write should go through the cache because fill value is used. */
if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, sid, H5P_DEFAULT, wdata) < 0)
FAIL_STACK_ERROR
+ if(H5Dwrite(t_dsid, H5T_NATIVE_INT, mid, t_sid, H5P_DEFAULT, t_wdata) < 0)
+ FAIL_STACK_ERROR
+ /* Close the first 1-D & 2-D datasets */
if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
+ if(H5Dclose(t_dsid) < 0) FAIL_STACK_ERROR
- /* Reopen the dataset */
+ /* Reopen the first 1-D & 2-D datasets */
if((dsid = H5Dopen2(fid, BYPASS_DATASET1, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
+ if((t_dsid = H5Dopen2(fid, T_BYPASS_DATASET1, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
/* Reads both 2 chunks. Reading the second chunk should bypass the cache because the
* chunk is bigger than the cache size and it isn't allocated on disk. */
if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata1) < 0)
FAIL_STACK_ERROR
+ if(H5Dread(t_dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, t_rdata1) < 0)
+ FAIL_STACK_ERROR
+ /* Verify data for the first 1-D dataset */
for(i = 0; i < BYPASS_CHUNK_DIM / 2; i++)
if(rdata1[i] != i) {
printf(" Read different values than written in the 1st chunk.\n");
@@ -7531,30 +7929,64 @@ test_big_chunks_bypass_cache(hid_t fapl)
TEST_ERROR
} /* end if */
- /* Close the first dataset */
+ /* Verify data for the first 2-D dataset */
+ for(i = 0; i < BYPASS_CHUNK_DIM / 2; i++)
+ for(j = 0; j < BYPASS_CHUNK_DIM / 2; j++)
+ if(t_rdata1[i][j] != j) {
+ printf(" Read different values than written in the 1st chunk.\n");
+ printf(" At line %d and index (%d, %d), t_rdata1 = %d. It should be %d.\n",
+ __LINE__, i, j, t_rdata1[i][j], j);
+ TEST_ERROR
+ } /* end if */
+
+ for(i = BYPASS_CHUNK_DIM / 2; i < BYPASS_DIM; i++)
+ for(j = BYPASS_CHUNK_DIM / 2; j < BYPASS_DIM; j++)
+ if(t_rdata1[i][j] != fvalue) {
+ printf(" Read different values than written in the 2nd chunk.\n");
+ printf(" At line %d and index (%d, %d), t_rdata1 = %d. It should be %d.\n",
+ __LINE__, i, j, t_rdata1[i][j], fvalue);
+ TEST_ERROR
+ } /* end if */
+
+ /* Close the first 1-D & 2-D datasets */
if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
+ if(H5Dclose(t_dsid) < 0) FAIL_STACK_ERROR
/* Create a second dataset without fill value. This time, both write
* and read should bypass the cache because the chunk is bigger than the
* cache size and it's not allocated on disk. */
if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_NEVER) < 0) FAIL_STACK_ERROR
+ if(H5Pset_fill_time(t_dcpl, H5D_FILL_TIME_NEVER) < 0) FAIL_STACK_ERROR
+ /* Create a second 1-D & 2-D dataset */
if((dsid = H5Dcreate2(fid, BYPASS_DATASET2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
FAIL_STACK_ERROR
+ if((t_dsid = H5Dcreate2(fid, T_BYPASS_DATASET2, H5T_NATIVE_INT, t_sid, H5P_DEFAULT, t_dcpl, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+ /* Write to the second 1-D & 2-D dataset */
if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, sid, H5P_DEFAULT, wdata) < 0)
FAIL_STACK_ERROR
+ if(H5Dwrite(t_dsid, H5T_NATIVE_INT, mid, t_sid, H5P_DEFAULT, t_wdata) < 0)
+ FAIL_STACK_ERROR
+ /* Close the second 1-D & 2-D dataset */
if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
+ if(H5Dclose(t_dsid) < 0) FAIL_STACK_ERROR
- /* Reopen the dataset */
+ /* Reopen the second 1-d dataset and 2-d dataset */
if((dsid = H5Dopen2(fid, BYPASS_DATASET2, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
+ if((t_dsid = H5Dopen2(fid, T_BYPASS_DATASET2, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
/* Read back only the part that was written to the file. Reading the
* half chunk should bypass the cache because the chunk is bigger than
* the cache size. */
if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, sid, H5P_DEFAULT, rdata2) < 0)
+ FAIL_STACK_ERROR
+ if(H5Dread(t_dsid, H5T_NATIVE_INT, mid, t_sid, H5P_DEFAULT, t_rdata2) < 0)
+ FAIL_STACK_ERROR
+ /* Verify data for the second 1-D dataset */
for(i = 0; i < BYPASS_CHUNK_DIM / 2; i++)
if(rdata2[i] != i) {
printf(" Read different values than written in the chunk.\n");
@@ -7562,10 +7994,23 @@ test_big_chunks_bypass_cache(hid_t fapl)
TEST_ERROR
} /* end if */
+ /* Verify data for the second 2-D dataset */
+ for(i = 0; i < BYPASS_CHUNK_DIM / 2; i++)
+ for(j = 0; j < BYPASS_CHUNK_DIM / 2; j++)
+ if(t_rdata2[i][j] != j) {
+ printf(" Read different values than written in the chunk.\n");
+ printf(" At line %d and index (%d, %d), t_rdata2 = %d. It should be %d.\n",
+ __LINE__, i, j, t_rdata2[i][j], j);
+ TEST_ERROR
+ } /* end if */
+
/* Close IDs */
if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
+ if(H5Sclose(t_sid) < 0) FAIL_STACK_ERROR
if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
+ if(H5Dclose(t_dsid) < 0) FAIL_STACK_ERROR
if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
+ if(H5Pclose(t_dcpl) < 0) FAIL_STACK_ERROR
if(H5Pclose(fapl_local) < 0) FAIL_STACK_ERROR
if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
@@ -7575,14 +8020,584 @@ test_big_chunks_bypass_cache(hid_t fapl)
error:
H5E_BEGIN_TRY {
H5Pclose(dcpl);
+ H5Pclose(t_dcpl);
H5Pclose(fapl_local);
H5Dclose(dsid);
+ H5Dclose(t_dsid);
H5Sclose(sid);
+ H5Sclose(t_sid);
H5Fclose(fid);
} H5E_END_TRY;
return -1;
} /* end test_big_chunks_bypass_cache() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_chunk_fast
+ *
+ * Purpose: Tests support for extensible arrays as chunk index.
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Quincey Koziol
+ * Tuesday, February 3, 2009
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+test_chunk_fast(const char *env_h5_driver, hid_t fapl)
+{
+ char filename[FILENAME_BUF_SIZE];
+ hid_t fid = -1; /* File ID */
+ hid_t my_fapl = -1; /* File access property list ID */
+ hid_t dcpl = -1; /* Dataset creation property list ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t scalar_sid = -1;/* Scalar dataspace ID */
+ hid_t dsid = -1; /* Dataset ID */
+ hsize_t fill; /* Temporary value, for filling arrays */
+ hsize_t hs_size[EARRAY_MAX_RANK]; /* Hyperslab size */
+ hsize_t chunk_dim[EARRAY_MAX_RANK]; /* Chunk dimensions */
+ H5F_libver_t low; /* File format low bound */
+
+ TESTING("datasets w/extensible array as chunk index");
+
+ h5_fixname(FILENAME[10], fapl, filename, sizeof filename);
+
+ /* Copy the file access property list */
+ if((my_fapl = H5Pcopy(fapl)) < 0) FAIL_STACK_ERROR
+
+ /* Turn on the chunk cache again */
+ {
+ int mdc_nelmts; /* # of elements in metadata cache */
+ size_t rdcc_nelmts; /* # of chunks in chunk cache */
+ size_t rdcc_nbytes; /* # of bytes in chunk cache */
+ double rdcc_w0; /* write-ratio for chunk cache */
+
+ if(H5Pget_cache(my_fapl, &mdc_nelmts, &rdcc_nelmts, &rdcc_nbytes, &rdcc_w0) < 0) FAIL_STACK_ERROR
+ rdcc_nbytes = 1048576;
+ if(H5Pset_cache(my_fapl, mdc_nelmts, rdcc_nelmts, rdcc_nbytes, rdcc_w0) < 0) FAIL_STACK_ERROR
+ } /* end block */
+
+ /* Check if we are using the latest version of the format */
+ if(H5Pget_libver_bounds(my_fapl, &low, NULL) < 0) FAIL_STACK_ERROR
+
+ /* Create scalar dataspace */
+ if((scalar_sid = H5Screate(H5S_SCALAR)) < 0) FAIL_STACK_ERROR
+
+ /* Initialize chunk dimensions */
+ fill = EARRAY_CHUNK_DIM;
+ H5VM_array_fill(chunk_dim, &fill, sizeof(fill), EARRAY_MAX_RANK);
+
+ /* Initialize hyperslab size */
+ fill = 1;
+ H5VM_array_fill(hs_size, &fill, sizeof(fill), EARRAY_MAX_RANK);
+
+ {
+ int compress; /* Whether chunks should be compressed */
+
+#ifdef H5_HAVE_FILTER_DEFLATE
+ /* Loop over compressing chunks */
+ for(compress = 0; compress <= 1; compress++)
+#else
+ /* Loop over without compression */
+ for(compress = 0; compress <= 0; compress++)
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ {
+ H5D_alloc_time_t alloc_time; /* Storage allocation time */
+
+ /* Loop over storage allocation time */
+ for(alloc_time = H5D_ALLOC_TIME_EARLY; alloc_time <= H5D_ALLOC_TIME_INCR; alloc_time++) {
+ unsigned ndims; /* Current # of dims to test */
+
+ /* Loop over dataspace ranks to test */
+ for(ndims = 1; ndims < (EARRAY_MAX_RANK + 1); ndims++) {
+ unsigned unlim_dim;
+
+ /* Create dataset creation property list */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
+
+ /* Set chunking & chunk dims */
+ if(H5Pset_chunk(dcpl, (int)ndims, chunk_dim) < 0) FAIL_STACK_ERROR
+
+#ifdef H5_HAVE_FILTER_DEFLATE
+ /* Check if we should compress the chunks */
+ if(compress)
+ if(H5Pset_deflate(dcpl, 9) < 0) FAIL_STACK_ERROR
+#endif /* H5_HAVE_FILTER_DEFLATE */
+
+ /* Set fill time */
+ if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_ALLOC) < 0) FAIL_STACK_ERROR
+
+ /* Set allocation time */
+ if(H5Pset_alloc_time(dcpl, alloc_time) < 0) FAIL_STACK_ERROR
+
+ /* Loop over which dimension is unlimited */
+ for(unlim_dim = 0; unlim_dim < ndims; unlim_dim++) {
+ H5D_chunk_index_t idx_type; /* Dataset chunk index type */
+ hsize_t dim[EARRAY_MAX_RANK], max_dim[EARRAY_MAX_RANK]; /* Dataset dimensions */
+ hsize_t swizzled_dim[EARRAY_MAX_RANK]; /* Dimensions, with unlimited dimension moved to rank 0 */
+ hsize_t down[EARRAY_MAX_RANK]; /* 'down' sizes, for computing array index */
+ hsize_t hs_offset[EARRAY_MAX_RANK]; /* Hyperslab offset */
+ hssize_t snpoints; /* # of points in dataspace extent (signed) */
+ hsize_t npoints; /* # of points in dataspace extent */
+ unsigned write_elem, read_elem; /* Element written/read */
+ hsize_t u; /* Local index variable */
+
+ /* Create file */
+ if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, my_fapl)) < 0) FAIL_STACK_ERROR
+
+ /* Create n-D dataspace */
+ fill = EARRAY_DSET_DIM;
+ H5VM_array_fill(dim, &fill, sizeof(fill), EARRAY_MAX_RANK);
+ fill = EARRAY_DSET_DIM;
+ H5VM_array_fill(max_dim, &fill, sizeof(fill), EARRAY_MAX_RANK);
+ max_dim[unlim_dim] = H5S_UNLIMITED;
+ fill = EARRAY_DSET_DIM;
+ H5VM_array_fill(swizzled_dim, &fill, sizeof(fill), EARRAY_MAX_RANK);
+ if((sid = H5Screate_simple((int)ndims, dim, max_dim)) < 0) FAIL_STACK_ERROR
+
+ /* Get the number of points in the dataspace */
+ if((snpoints = H5Sget_simple_extent_npoints(sid)) < 0) FAIL_STACK_ERROR
+ npoints = (hsize_t)snpoints;
+
+ /* Compute the "down" dimension values */
+ if(H5VM_array_down(ndims, dim, down) < 0) FAIL_STACK_ERROR
+
+ /* Create chunked dataset */
+ if((dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_UINT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Get the chunk index type */
+ if(H5D__layout_idx_type_test(dsid, &idx_type) < 0) FAIL_STACK_ERROR
+
+ /* Chunk index type expected depends on whether we are using the latest version of the format */
+ if(low == H5F_LIBVER_LATEST) {
+ /* Verify index type */
+ if(idx_type != H5D_CHUNK_IDX_EARRAY) FAIL_PUTS_ERROR("should be using extensible array as index");
+ } /* end if */
+ else {
+ /* Verify index type */
+ if(idx_type != H5D_CHUNK_IDX_BTREE) FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+ } /* end else */
+
+ /* Fill existing elements */
+ for(u = 0; u < npoints; u++) {
+ /* Compute the coordinate from the linear offset */
+ if(H5VM_array_calc_pre(u, ndims, down, hs_offset) < 0) FAIL_STACK_ERROR
+
+ /* Un-swizzle hyperslab offset in same way as swizzled dimensions */
+ H5VM_unswizzle_coords(hsize_t, hs_offset, unlim_dim);
+
+ /* Select a single element in the dataset */
+ if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL) < 0) FAIL_STACK_ERROR
+
+ /* Read (unwritten) element from dataset */
+ read_elem = 1;
+ if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
+
+ /* Verify unwritten element is fill value (0) */
+ if(read_elem != 0) FAIL_PUTS_ERROR("invalid unwritten element read");
+
+ /* Write element to dataset */
+ write_elem = (unsigned)u;
+ if(H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem) < 0) FAIL_STACK_ERROR
+
+ /* Read element from dataset */
+ read_elem = write_elem + 1;
+ if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
+
+ /* Verify written element is read in */
+ if(read_elem != write_elem) FAIL_PUTS_ERROR("invalid written element read");
+ } /* end for */
+
+ /* Incrementally extend dataset and verify write/reads */
+ while(dim[unlim_dim] < EARRAY_MAX_EXTEND) {
+ hssize_t snew_npoints; /* # of points in dataspace extent (signed) */
+ hsize_t new_npoints; /* # of points in dataspace extent */
+
+ /* Extend dataset */
+ dim[unlim_dim] += EARRAY_EXTEND_INCR;
+ swizzled_dim[0] += EARRAY_EXTEND_INCR;
+ if(H5Dset_extent(dsid, dim) < 0) FAIL_STACK_ERROR
+
+ /* Close old dataspace */
+ if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
+
+ /* Get dataspace for dataset now */
+ if((sid = H5Dget_space(dsid)) < 0) FAIL_STACK_ERROR
+
+ /* Get the new number of points in the dataspace */
+ if((snew_npoints = H5Sget_simple_extent_npoints(sid)) < 0) FAIL_STACK_ERROR
+ new_npoints = (hsize_t)snew_npoints;
+
+ /* Fill new elements */
+ for(u = npoints; u < new_npoints; u++) {
+ /* Compute the coordinate from the linear offset */
+ if(H5VM_array_calc(u, ndims, swizzled_dim, hs_offset) < 0) FAIL_STACK_ERROR
+
+ /* Un-swizzle hyperslab offset in same way as swizzled dimensions */
+ H5VM_unswizzle_coords(hsize_t, hs_offset, unlim_dim);
+
+ /* Select a single element in the dataset */
+ if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL) < 0) FAIL_STACK_ERROR
+
+ /* Read (unwritten) element from dataset */
+ read_elem = 1;
+ if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
+
+ /* Verify unwritten element is fill value (0) */
+ if(read_elem != 0) FAIL_PUTS_ERROR("invalid unwritten element read");
+
+ /* Write element to dataset */
+ write_elem = (unsigned)u;
+ if(H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem) < 0) FAIL_STACK_ERROR
+
+ /* Read element from dataset */
+ read_elem = write_elem + 1;
+ if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
+
+ /* Verify written element is read in */
+ if(read_elem != write_elem) FAIL_PUTS_ERROR("invalid written element read");
+ } /* end for */
+
+ /* Update the number of points in the dataspace */
+ npoints = new_npoints;
+ } /* end while */
+
+ /* Close everything */
+ if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
+ if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
+ if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
+
+
+ /* Re-open file & dataset */
+ if((fid = H5Fopen(filename, H5F_ACC_RDONLY, my_fapl)) < 0) FAIL_STACK_ERROR
+
+ /* Open dataset */
+ if((dsid = H5Dopen2(fid, "dset", H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
+
+ /* Get the chunk index type */
+ if(H5D__layout_idx_type_test(dsid, &idx_type) < 0) FAIL_STACK_ERROR
+
+ /* Chunk index tyepe expected depends on whether we are using the latest version of the format */
+ if(low == H5F_LIBVER_LATEST) {
+ /* Verify index type */
+ if(idx_type != H5D_CHUNK_IDX_EARRAY) FAIL_PUTS_ERROR("should be using extensible array as index");
+ } /* end if */
+ else {
+ /* Verify index type */
+ if(idx_type != H5D_CHUNK_IDX_BTREE) FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+ } /* end else */
+
+ /* Get dataspace for dataset now */
+ if((sid = H5Dget_space(dsid)) < 0) FAIL_STACK_ERROR
+
+ /* Get the number of points in the dataspace */
+ if((snpoints = H5Sget_simple_extent_npoints(sid)) < 0) FAIL_STACK_ERROR
+ npoints = (hsize_t)snpoints;
+
+ /* Get the current dimensions into swizzled_dim array */
+ if(H5Sget_simple_extent_dims(sid, swizzled_dim, NULL) < 0) FAIL_STACK_ERROR
+
+ /* Generate the swizzled dimensions */
+ H5VM_swizzle_coords(hsize_t, swizzled_dim, unlim_dim);
+
+ /* Compute the "down" dimension values */
+ if(H5VM_array_down(ndims, swizzled_dim, down) < 0) FAIL_STACK_ERROR
+
+ /* Read elements */
+ for(u = 0; u < npoints; u++) {
+ /* Compute the coordinate from the linear offset */
+ if(H5VM_array_calc_pre(u, ndims, down, hs_offset) < 0) FAIL_STACK_ERROR
+
+ /* Unswizzle hyperslab offset in same way as swizzled dimensions */
+ H5VM_unswizzle_coords(hsize_t, hs_offset, unlim_dim);
+
+ /* Select a single element in the dataset */
+ if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL) < 0) FAIL_STACK_ERROR
+
+ /* Read written element from dataset */
+ read_elem = (unsigned)(u + 1);
+ if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
+
+ /* Verify written element is correct */
+ if(read_elem != u) FAIL_PUTS_ERROR("invalid element read");
+ } /* end for */
+
+ /* Close everything */
+ if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
+ if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
+ if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
+
+
+ /* Re-open file */
+ if((fid = H5Fopen(filename, H5F_ACC_RDWR, my_fapl)) < 0) FAIL_STACK_ERROR
+
+ /* Delete dataset */
+ if(H5Ldelete(fid, "dset", H5P_DEFAULT) < 0) FAIL_STACK_ERROR
+
+ /* Close everything */
+ if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
+ } /* end for */
+
+ /* Close everything */
+ if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
+ } /* end for */
+ } /* end for */
+ } /* end for */
+ } /* end for */
+
+ /* Close everything */
+ if(H5Sclose(scalar_sid) < 0) FAIL_STACK_ERROR
+ if(H5Pclose(my_fapl) < 0) FAIL_STACK_ERROR
+
+ PASSED();
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dsid);
+ H5Sclose(sid);
+ H5Sclose(scalar_sid);
+ H5Fclose(fid);
+ H5Pclose(my_fapl);
+ } H5E_END_TRY;
+ return -1;
+} /* end test_chunk_fast() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: test_reopen_chunk_fast
+ *
+ * Purpose: To verify a bug in extensible arrays as chunk index.
+ * When the dataset is closed in H5D_close(), the pointer
+ * to the extensible array struct in the layout message
+ * is copied via H5D_flush_real() before H5D_chunk_dest().
+ * This causes an abort from "Assertion `ea->hdr' failed."
+ * later when the dataset is re-opened and read.
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Vailin Choi
+ * April 13, 2009
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+test_reopen_chunk_fast(hid_t fapl)
+{
+ char filename[FILENAME_BUF_SIZE];
+ hid_t fid = -1; /* File ID */
+ hid_t dcpl = -1; /* Dataset creation property list ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t scalar_sid = -1;/* Scalar dataspace ID */
+ hid_t dsid = -1; /* Dataset ID */
+ hsize_t dim, max_dim, chunk_dim; /* Dataset and chunk dimensions */
+ hsize_t hs_offset; /* Hyperslab offset */
+ hsize_t hs_size; /* Hyperslab size */
+ H5D_alloc_time_t alloc_time; /* Storage allocation time */
+ unsigned write_elem, read_elem; /* Element written/read */
+ unsigned u; /* Local index variable */
+
+ TESTING("datasets w/extensible array open/reopen with read/write");
+
+ h5_fixname(FILENAME[10], fapl, filename, sizeof filename);
+
+ /* Loop over storage allocation time */
+ for(alloc_time = H5D_ALLOC_TIME_EARLY; alloc_time <= H5D_ALLOC_TIME_INCR; alloc_time++) {
+ /* Create file */
+ if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) FAIL_STACK_ERROR
+
+ /* Create dataset creation property list */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
+
+ /* Set chunking */
+ chunk_dim = 10;
+ if(H5Pset_chunk(dcpl, 1, &chunk_dim) < 0) FAIL_STACK_ERROR
+
+ /* Set fill time */
+ if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_ALLOC) < 0) FAIL_STACK_ERROR
+
+ /* Set allocation time */
+ if(H5Pset_alloc_time(dcpl, alloc_time) < 0) FAIL_STACK_ERROR
+
+ /* Create scalar dataspace */
+ if((scalar_sid = H5Screate(H5S_SCALAR)) < 0) FAIL_STACK_ERROR
+
+ /* Create 1-D dataspace */
+ dim = 100;
+ max_dim = H5S_UNLIMITED;
+ if((sid = H5Screate_simple(1, &dim, &max_dim)) < 0) FAIL_STACK_ERROR
+
+ /* Create chunked dataset */
+ if((dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_UINT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Fill existing elements */
+ hs_size = 1;
+ for(u = 0; u < 100; u++) {
+ /* Select a single element in the dataset */
+ hs_offset = u;
+ if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, &hs_offset, NULL, &hs_size, NULL) < 0)
+ FAIL_STACK_ERROR
+ /* Write element to dataset */
+ write_elem = u;
+ if(H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem) < 0)
+ FAIL_STACK_ERROR
+ } /* end for */
+
+ /* Close everything */
+ if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
+
+ /* Reopen the dataset */
+ if((dsid = H5Dopen2(fid, "dset", H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
+ hs_size = 1;
+
+ /* Read from dataset */
+ for(u = 0; u < 100; u++) {
+ /* Select a single element in the dataset */
+ hs_offset = u;
+ if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, &hs_offset, NULL, &hs_size, NULL) < 0)
+ FAIL_STACK_ERROR
+
+ /* Read element from dataset */
+ if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0)
+ FAIL_STACK_ERROR
+ } /* end for */
+
+ if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
+ if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
+ if(H5Sclose(scalar_sid) < 0) FAIL_STACK_ERROR
+ if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
+ if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
+
+ } /* end for */
+
+ PASSED();
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dsid);
+ H5Sclose(sid);
+ H5Sclose(scalar_sid);
+ H5Fclose(fid);
+ } H5E_END_TRY;
+ return -1;
+} /* end test_reopen_chunk_fast() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: test_chunk_fast_bug1
+ *
+ * Purpose: Test extensible arrays where the first dimension in the
+ * chunk size is the same as the second dimension in the
+ * dataset size. This helps to confirm that all dimensions
+ * are being "swizzled" correctly in the earray chunk index
+ * code.
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Neil Fortner
+ * March 22, 2012
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+test_chunk_fast_bug1(hid_t fapl)
+{
+ char filename[FILENAME_BUF_SIZE];
+ hid_t fid = -1; /* File ID */
+ hid_t dcpl = -1; /* Dataset creation property list ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t dsid = -1; /* Dataset ID */
+ hsize_t dim[2], max_dim[2], chunk_dim[2]; /* Dataset and chunk dimensions */
+ H5D_alloc_time_t alloc_time; /* Storage allocation time */
+ static unsigned wbuf[40][20], rbuf[40][20]; /* Element written/read */
+ unsigned i, j; /* Local index variables */
+
+ TESTING("datasets w/extensible array chunk indexing bug");
+
+ h5_fixname(FILENAME[10], fapl, filename, sizeof filename);
+
+ /* Initialize write buffer */
+ for(i=0; i<40; i++)
+ for(j=0; j<20; j++)
+ wbuf[i][j] = (i * 20) + j;
+
+ /* Create 2-D dataspace */
+ dim[0] = 40;
+ dim[1] = 20;
+ max_dim[0] = 40;
+ max_dim[1] = H5S_UNLIMITED;
+ if((sid = H5Screate_simple(2, dim, max_dim)) < 0) FAIL_STACK_ERROR
+
+ /* Loop over storage allocation time */
+ for(alloc_time = H5D_ALLOC_TIME_EARLY; alloc_time <= H5D_ALLOC_TIME_INCR; alloc_time++) {
+ /* Create file */
+ if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) FAIL_STACK_ERROR
+
+ /* Create dataset creation property list */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
+
+ /* Set chunking */
+ chunk_dim[0] = 20;
+ chunk_dim[1] = 10;
+ if(H5Pset_chunk(dcpl, 2, chunk_dim) < 0) FAIL_STACK_ERROR
+
+ /* Set allocation time */
+ if(H5Pset_alloc_time(dcpl, alloc_time) < 0) FAIL_STACK_ERROR
+
+ /* Create chunked dataset */
+ if((dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_UINT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Write buffer to dataset */
+ if(H5Dwrite(dsid, H5T_NATIVE_UINT, sid, sid, H5P_DEFAULT, &wbuf) < 0)
+ FAIL_STACK_ERROR
+
+ /* Close everything */
+ if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
+
+ /* Reopen the dataset */
+ if((dsid = H5Dopen2(fid, "dset", H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
+
+ /* Read from dataset */
+ if(H5Dread(dsid, H5T_NATIVE_UINT, sid, sid, H5P_DEFAULT, &rbuf) < 0)
+ FAIL_STACK_ERROR
+
+ /* Verify read data */
+ for(i=0; i<40; i++)
+ for(j=0; j<20; j++)
+ if(wbuf[i][j] != rbuf[i][j])
+ FAIL_PUTS_ERROR("invalid element read");
+
+ if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
+ if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
+ if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
+
+ } /* end for */
+
+ if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
+
+ PASSED();
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dsid);
+ H5Sclose(sid);
+ H5Fclose(fid);
+ } H5E_END_TRY;
+ return -1;
+} /* end test_chunk_fast_bug1() */
+
/* This message derives from H5Z */
const H5Z_class2_t H5Z_EXPAND[1] = {{
H5Z_CLASS_T_VERS, /* H5Z_class_t version */
@@ -7665,28 +8680,33 @@ static herr_t
test_chunk_expand(hid_t fapl)
{
char filename[FILENAME_BUF_SIZE];
- hid_t fid = -1; /* File ID */
- hid_t dcpl = -1; /* Dataset creation property list ID */
- hid_t sid = -1; /* Dataspace ID */
- hid_t scalar_sid = -1;/* Scalar dataspace ID */
- hid_t dsid = -1; /* Dataset ID */
- hsize_t dim, max_dim, chunk_dim; /* Dataset and chunk dimensions */
- hsize_t hs_offset; /* Hyperslab offset */
- hsize_t hs_size; /* Hyperslab size */
- H5D_alloc_time_t alloc_time; /* Storage allocation time */
- unsigned write_elem, read_elem; /* Element written/read */
- unsigned u; /* Local index variable */
- size_t size; /* Size of type */
- herr_t status; /* Generic return value */
+ hid_t fid = -1; /* File ID */
+ hid_t dcpl = -1, dcpl2 = -1; /* Dataset creation property list ID */
+ hid_t sid = -1, sid2 = -1; /* Dataspace ID */
+ hid_t scalar_sid = -1; /* Scalar dataspace ID */
+ hid_t dsid = -1, dsid2 = -1; /* Dataset ID */
+ hsize_t dim, max_dim, chunk_dim; /* Dataset and chunk dimensions */
+ hsize_t dim2[2], max_dim2[2], chunk_dim2[2]; /* Dataset and chunk dimensions */
+ H5D_chunk_index_t idx_type, idx_type2; /* Dataset chunk index type */
+ H5F_libver_t low, high; /* File format bounds */
+ hsize_t hs_offset, hs_offset2[2]; /* Hyperslab offset */
+ hsize_t hs_size, hs_size2[2]; /* Hyperslab size */
+ H5D_alloc_time_t alloc_time; /* Storage allocation time */
+ unsigned write_elem, read_elem; /* Element written/read */
+ unsigned write_elem2, read_elem2; /* Element written/read */
+ unsigned u; /* Local index variable */
+ herr_t status; /* Generic return value */
TESTING("filter expanding chunks too much");
- h5_fixname(FILENAME[10], fapl, filename, sizeof filename);
+ h5_fixname(FILENAME[11], fapl, filename, sizeof filename);
- size = sizeof(size_t);
- if(size <= 4) {
+ /* Check if we are using the latest version of the format */
+ if(H5Pget_libver_bounds(fapl, &low, &high) < 0) FAIL_STACK_ERROR
+
+ if(sizeof(size_t) <= 4 && low != H5F_LIBVER_LATEST) {
SKIPPED();
- puts(" Current machine can't test for error");
+ puts(" Current machine can't test for error w/old file format");
} /* end if */
else {
/* Register "expansion" filter */
@@ -7697,34 +8717,41 @@ test_chunk_expand(hid_t fapl)
/* Loop over storage allocation time */
for(alloc_time = H5D_ALLOC_TIME_EARLY; alloc_time <= H5D_ALLOC_TIME_INCR; H5_INC_ENUM(H5D_alloc_time_t, alloc_time)) {
+
/* Create file */
if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) FAIL_STACK_ERROR
/* Create dataset creation property list */
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
+ if((dcpl2 = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
/* Set chunking */
- chunk_dim = 10;
+ chunk_dim = chunk_dim2[0] = chunk_dim2[1] = 10;
if(H5Pset_chunk(dcpl, 1, &chunk_dim) < 0) FAIL_STACK_ERROR
+ if(H5Pset_chunk(dcpl2, 2, chunk_dim2) < 0) FAIL_STACK_ERROR
/* Set fill time */
if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_ALLOC) < 0) FAIL_STACK_ERROR
+ if(H5Pset_fill_time(dcpl2, H5D_FILL_TIME_ALLOC) < 0) FAIL_STACK_ERROR
/* Set allocation time */
if(H5Pset_alloc_time(dcpl, alloc_time) < 0) FAIL_STACK_ERROR
+ if(H5Pset_alloc_time(dcpl2, alloc_time) < 0) FAIL_STACK_ERROR
/* Set "expand" filter */
if(H5Pset_filter(dcpl, H5Z_FILTER_EXPAND, 0, (size_t)0, NULL) < 0) FAIL_STACK_ERROR
+ if(H5Pset_filter(dcpl2, H5Z_FILTER_EXPAND, 0, (size_t)0, NULL) < 0) FAIL_STACK_ERROR
/* Create scalar dataspace */
if((scalar_sid = H5Screate(H5S_SCALAR)) < 0) FAIL_STACK_ERROR
- /* Create 1-D dataspace */
- dim = 100;
- max_dim = H5S_UNLIMITED;
+ /* Create 1-D and 2-D dataspace */
+ dim = dim2[0] = dim2[1] = 100;
+ max_dim = max_dim2[0] = max_dim2[1] = H5S_UNLIMITED;
if((sid = H5Screate_simple(1, &dim, &max_dim)) < 0) FAIL_STACK_ERROR
+ if((sid2 = H5Screate_simple(2, dim2, max_dim2)) < 0) FAIL_STACK_ERROR
- /* Create chunked dataset */
+ /* Create 1-D & 2-D chunked datasets */
if(H5D_ALLOC_TIME_EARLY == alloc_time) {
/* Make the expansion factor large enough to cause failure right away */
filter_expand_factor_g = 8;
@@ -7733,176 +8760,272 @@ test_chunk_expand(hid_t fapl)
dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_UINT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
} H5E_END_TRY;
if(dsid >= 0) FAIL_PUTS_ERROR("should fail to create dataset when allocation time is early");
+
+ H5E_BEGIN_TRY {
+ dsid2 = H5Dcreate2(fid, "dset2", H5T_NATIVE_UINT, sid2, H5P_DEFAULT, dcpl2, H5P_DEFAULT);
+ } H5E_END_TRY;
+ if(dsid2 >= 0) FAIL_PUTS_ERROR("should fail to create dataset when allocation time is early");
+
} /* end if */
else {
if((dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_UINT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
FAIL_STACK_ERROR
+ if((dsid2 = H5Dcreate2(fid, "dset2", H5T_NATIVE_UINT, sid2, H5P_DEFAULT, dcpl2, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Get the chunk index type */
+ if(H5D__layout_idx_type_test(dsid, &idx_type) < 0) FAIL_STACK_ERROR
+ if(H5D__layout_idx_type_test(dsid2, &idx_type2) < 0) FAIL_STACK_ERROR
+
+ /* Chunk index type expected depends on whether we are using the latest version of the format */
+ if(low == H5F_LIBVER_LATEST) {
+ /* Verify index type */
+ if(idx_type != H5D_CHUNK_IDX_EARRAY) FAIL_PUTS_ERROR("should be using extensible array as index");
+ if(idx_type2 != H5D_CHUNK_IDX_BT2) FAIL_PUTS_ERROR("should be using v2 B-tree as index");
+ } /* end if */
+ else {
+ /* Verify index type */
+ if(idx_type != H5D_CHUNK_IDX_BTREE) FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+ if(idx_type2 != H5D_CHUNK_IDX_BTREE) FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+ } /* end else */
+
/* Fill elements */
- hs_size = 1;
+ hs_size = hs_size2[0] = hs_size2[1] = 1;
for(u = 0; u < 100; u++) {
- /* Select a single element in the dataset */
- hs_offset = u;
+
+ hs_offset = hs_offset2[0] = hs_offset2[1] = u;
+
+ /* Select a single element in the 1-D dataset */
if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, &hs_offset, NULL, &hs_size, NULL) < 0) FAIL_STACK_ERROR
+ /* Select a single element in the 2-D dataset; NOT every element is selected */
+ if(H5Sselect_hyperslab(sid2, H5S_SELECT_SET, hs_offset2, NULL, hs_size2, NULL) < 0) FAIL_STACK_ERROR
+
/* Read (unwritten) element from dataset */
- read_elem = 1;
+ read_elem = read_elem2 = 1;
if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
+ if(H5Dread(dsid2, H5T_NATIVE_UINT, scalar_sid, sid2, H5P_DEFAULT, &read_elem2) < 0) FAIL_STACK_ERROR
/* Verify unwritten element is fill value (0) */
if(read_elem != 0) FAIL_PUTS_ERROR("invalid unwritten element read");
+ if(read_elem2 != 0) FAIL_PUTS_ERROR("invalid unwritten element read");
/* Don't expand chunks yet */
filter_expand_factor_g = 0;
- /* Write element to dataset */
- write_elem = u;
+ /* Write element to the datasets */
+ write_elem = write_elem2 = u;
if(H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem) < 0) FAIL_STACK_ERROR
+ if(H5Dwrite(dsid2, H5T_NATIVE_UINT, scalar_sid, sid2, H5P_DEFAULT, &write_elem2) < 0) FAIL_STACK_ERROR
- /* Read element from dataset */
+ /* Read element from the datasets */
read_elem = write_elem + 1;
+ read_elem2 = write_elem2 + 1;
if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
+ if(H5Dread(dsid2, H5T_NATIVE_UINT, scalar_sid, sid2, H5P_DEFAULT, &read_elem2) < 0) FAIL_STACK_ERROR
/* Verify written element is read in */
if(read_elem != write_elem) FAIL_PUTS_ERROR("invalid written element read");
+ if(read_elem2 != write_elem2) FAIL_PUTS_ERROR("invalid written element read");
/* Expand chunks now */
filter_expand_factor_g = 8;
- /* Write element to dataset */
- write_elem = u;
+ /* Write element to the datasets */
+ write_elem = write_elem2 = u;
H5E_BEGIN_TRY {
status = H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem);
} H5E_END_TRY;
if(status >= 0) FAIL_PUTS_ERROR("should fail to write to dataset when allocation time is not early");
+
+ H5E_BEGIN_TRY {
+ status = H5Dwrite(dsid2, H5T_NATIVE_UINT, scalar_sid, sid2, H5P_DEFAULT, &write_elem2);
+ } H5E_END_TRY;
+ if(status >= 0) FAIL_PUTS_ERROR("should fail to write to dataset when allocation time is not early");
} /* end for */
/* Incrementally extend dataset and verify write/reads */
while(dim < 1000) {
- /* Extend dataset */
+ /* Extend the datasets */
dim += 100;
+ dim2[0] += 100;
+ dim2[1] += 100;
if(H5Dset_extent(dsid, &dim) < 0) FAIL_STACK_ERROR
+ if(H5Dset_extent(dsid2, dim2) < 0) FAIL_STACK_ERROR
/* Close old dataspace */
if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
+ if(H5Sclose(sid2) < 0) FAIL_STACK_ERROR
- /* Get dataspace for dataset now */
+ /* Get dataspace for the datasets now */
if((sid = H5Dget_space(dsid)) < 0) FAIL_STACK_ERROR
+ if((sid2 = H5Dget_space(dsid2)) < 0) FAIL_STACK_ERROR
/* Fill new elements */
- hs_size = 1;
+ hs_size = hs_size2[0] = hs_size2[1] = 1;
for(u = 0; u < 100; u++) {
- /* Select a single element in the dataset */
+ /* Select a single element in the datasets */
hs_offset = (dim + u) - 100;
+ hs_offset2[0] = (dim2[0] + u) - 100;
+ hs_offset2[1] = (dim2[1] + u) - 100;
if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, &hs_offset, NULL, &hs_size, NULL) < 0) FAIL_STACK_ERROR
+ if(H5Sselect_hyperslab(sid2, H5S_SELECT_SET, hs_offset2, NULL, hs_size2, NULL) < 0) FAIL_STACK_ERROR
- /* Read (unwritten) element from dataset */
- read_elem = 1;
+ /* Read (unwritten) element from the datasets */
+ read_elem = read_elem2 = 1;
if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
+ if(H5Dread(dsid2, H5T_NATIVE_UINT, scalar_sid, sid2, H5P_DEFAULT, &read_elem2) < 0) FAIL_STACK_ERROR
/* Verify unwritten element is fill value (0) */
if(read_elem != 0) FAIL_PUTS_ERROR("invalid unwritten element read");
+ if(read_elem2 != 0) FAIL_PUTS_ERROR("invalid unwritten element read");
/* Don't expand chunks yet */
filter_expand_factor_g = 0;
- /* Write element to dataset */
- write_elem = u;
+ /* Write element to the datasets */
+ write_elem = write_elem2 = u;
if(H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem) < 0) FAIL_STACK_ERROR
+ if(H5Dwrite(dsid2, H5T_NATIVE_UINT, scalar_sid, sid2, H5P_DEFAULT, &write_elem2) < 0) FAIL_STACK_ERROR
- /* Read element from dataset */
+ /* Read element from the datasets */
read_elem = write_elem + 1;
+ read_elem2 = write_elem2 + 1;
if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
+ if(H5Dread(dsid2, H5T_NATIVE_UINT, scalar_sid, sid2, H5P_DEFAULT, &read_elem2) < 0) FAIL_STACK_ERROR
/* Verify written element is read in */
if(read_elem != write_elem) FAIL_PUTS_ERROR("invalid written element read");
+ if(read_elem2 != write_elem2) FAIL_PUTS_ERROR("invalid written element read");
/* Expand chunks now */
filter_expand_factor_g = 8;
- /* Write element to dataset */
- write_elem = u;
+ /* Write element to the datasets */
+ write_elem = write_elem2 = u;
H5E_BEGIN_TRY {
status = H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem);
} H5E_END_TRY;
if(status >= 0) FAIL_PUTS_ERROR("should fail to write to dataset when allocation time is not early");
+
+ H5E_BEGIN_TRY {
+ status = H5Dwrite(dsid2, H5T_NATIVE_UINT, scalar_sid, sid2, H5P_DEFAULT, &write_elem2);
+ } H5E_END_TRY;
+ if(status >= 0) FAIL_PUTS_ERROR("should fail to write to dataset when allocation time is not early");
} /* end for */
} /* end while */
- /* Close dataset */
+ /* Close the datasets */
if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
+ if(H5Dclose(dsid2) < 0) FAIL_STACK_ERROR
} /* end else */
/* Close everything */
if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
+ if(H5Sclose(sid2) < 0) FAIL_STACK_ERROR
if(H5Sclose(scalar_sid) < 0) FAIL_STACK_ERROR
if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
+ if(H5Pclose(dcpl2) < 0) FAIL_STACK_ERROR
if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
/* If the dataset was created, do some extra testing */
if(H5D_ALLOC_TIME_EARLY != alloc_time) {
- /* Re-open file & dataset */
+ /* Re-open file & datasets */
if((fid = H5Fopen(filename, H5F_ACC_RDWR, fapl)) < 0) FAIL_STACK_ERROR
- /* Open dataset */
+ /* Open the datasets */
if((dsid = H5Dopen2(fid, "dset", H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
+ if((dsid2 = H5Dopen2(fid, "dset2", H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
+
+ /* Get the chunk index type for the two datasets */
+ if(H5D__layout_idx_type_test(dsid, &idx_type) < 0) FAIL_STACK_ERROR
+ if(H5D__layout_idx_type_test(dsid2, &idx_type2) < 0) FAIL_STACK_ERROR
+
+ /* Chunk index type expected depends on whether we are using the latest version of the format */
+ if(low == H5F_LIBVER_LATEST) {
+ /* Verify index type */
+ if(idx_type != H5D_CHUNK_IDX_EARRAY) FAIL_PUTS_ERROR("should be using extensible array as index");
+ if(idx_type2 != H5D_CHUNK_IDX_BT2) FAIL_PUTS_ERROR("should be using v2 B-tree as index");
+ } /* end if */
+ else {
+ /* Verify index type */
+ if(idx_type != H5D_CHUNK_IDX_BTREE) FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+ if(idx_type2 != H5D_CHUNK_IDX_BTREE) FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+ } /* end else */
/* Create scalar dataspace */
if((scalar_sid = H5Screate(H5S_SCALAR)) < 0) FAIL_STACK_ERROR
- /* Get dataspace for dataset now */
+ /* Get dataspace for the datasets now */
if((sid = H5Dget_space(dsid)) < 0) FAIL_STACK_ERROR
+ if((sid2 = H5Dget_space(dsid2)) < 0) FAIL_STACK_ERROR
/* Read elements */
- hs_size = 1;
+ hs_size = hs_size2[0] = hs_size2[1] = 1;
for(u = 0; u < 1000; u++) {
- /* Select a single element in the dataset */
- hs_offset = u;
+ /* Select a single element in the datasets */
+ hs_offset = hs_offset2[0] = hs_offset2[1] = u;
if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, &hs_offset, NULL, &hs_size, NULL) < 0) FAIL_STACK_ERROR
+ if(H5Sselect_hyperslab(sid2, H5S_SELECT_SET, hs_offset2, NULL, hs_size2, NULL) < 0) FAIL_STACK_ERROR
- /* Read element from dataset */
- read_elem = u + 1;
+ /* Read element from the datasets */
+ read_elem = read_elem2 = u + 1;
if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
+ if(H5Dread(dsid2, H5T_NATIVE_UINT, scalar_sid, sid2, H5P_DEFAULT, &read_elem2) < 0) FAIL_STACK_ERROR
/* Verify unwritten element is proper value */
if(read_elem != (u % 100)) FAIL_PUTS_ERROR("invalid element read");
+ if(read_elem2 != (u % 100)) FAIL_PUTS_ERROR("invalid element read");
/* Don't expand chunks yet */
filter_expand_factor_g = 0;
- /* Write element to dataset */
- write_elem = u % 100;
+ /* Write element to the datasets */
+ write_elem = write_elem2 = u % 100;
if(H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem) < 0) FAIL_STACK_ERROR
+ if(H5Dwrite(dsid2, H5T_NATIVE_UINT, scalar_sid, sid2, H5P_DEFAULT, &write_elem2) < 0) FAIL_STACK_ERROR
- /* Read element from dataset */
+ /* Read element from the datasets */
read_elem = write_elem + 1;
+ read_elem2 = write_elem2 + 1;
if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
+ if(H5Dread(dsid2, H5T_NATIVE_UINT, scalar_sid, sid2, H5P_DEFAULT, &read_elem2) < 0) FAIL_STACK_ERROR
/* Verify written element is read in */
if(read_elem != write_elem) FAIL_PUTS_ERROR("invalid written element read");
+ if(read_elem2 != write_elem2) FAIL_PUTS_ERROR("invalid written element read");
/* Expand chunks now */
filter_expand_factor_g = 8;
- /* Write element to dataset */
- write_elem = u % 100;
+ /* Write element to the datasets */
+ write_elem = write_elem2 = u % 100;
H5E_BEGIN_TRY {
status = H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem);
} H5E_END_TRY;
if(status >= 0) FAIL_PUTS_ERROR("should fail to write to dataset when allocation time is not early");
+
+ H5E_BEGIN_TRY {
+ status = H5Dwrite(dsid2, H5T_NATIVE_UINT, scalar_sid, sid2, H5P_DEFAULT, &write_elem2);
+ } H5E_END_TRY;
+ if(status >= 0) FAIL_PUTS_ERROR("should fail to write to dataset when allocation time is not early");
} /* end for */
/* Close everything */
if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
+ if(H5Sclose(sid2) < 0) FAIL_STACK_ERROR
if(H5Sclose(scalar_sid) < 0) FAIL_STACK_ERROR
if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
+ if(H5Dclose(dsid2) < 0) FAIL_STACK_ERROR
if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
/* Re-open file */
if((fid = H5Fopen(filename, H5F_ACC_RDWR, fapl)) < 0) FAIL_STACK_ERROR
- /* Delete dataset */
+ /* Delete the datasets */
if(H5Ldelete(fid, "dset", H5P_DEFAULT) < 0) FAIL_STACK_ERROR
+ if(H5Ldelete(fid, "dset2", H5P_DEFAULT) < 0) FAIL_STACK_ERROR
/* Close everything */
if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
@@ -7923,8 +9046,11 @@ test_chunk_expand(hid_t fapl)
error:
H5E_BEGIN_TRY {
H5Pclose(dcpl);
+ H5Pclose(dcpl2);
H5Dclose(dsid);
+ H5Dclose(dsid2);
H5Sclose(sid);
+ H5Sclose(sid2);
H5Sclose(scalar_sid);
H5Fclose(fid);
} H5E_END_TRY;
@@ -8329,9 +9455,83 @@ error:
} H5E_END_TRY;
return -1;
} /* end test_fixed_array() */
+
+
/*-------------------------------------------------------------------------
*
+ * test_idx_compatible():
+ * Verify that the library can read datasets created with
+ * 1.6/1.8 library that use the B-tree indexing method.
+ *
+ * Programmer: Vailin Choi; 26th August, 2009
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+test_idx_compatible(void)
+{
+ hid_t fid = -1; /* File id */
+ hid_t did = -1; /* Dataset id */
+ const char *filename = NULL; /* old test file name */
+ unsigned j; /* Local index variable */
+ H5D_chunk_index_t idx_type; /* Chunked dataset index type */
+
+ /* Output message about test being performed */
+ TESTING("compatibility for 1.6/1.8 datasets that use B-tree indexing");
+
+ for(j = 0; j < NELMTS(OLD_FILENAME); j++) {
+
+ /* Generate correct name for test file by prepending the source path */
+ filename = H5_get_srcdir_filename(OLD_FILENAME[j]);
+
+ /* Open the file */
+ if((fid = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /* Should be able to read the dataset w/o filter created under 1.8/1.6 */
+ if((did = H5Dopen2(fid, DSET, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /* Get the chunk index type */
+ if(H5D__layout_idx_type_test(did, &idx_type) < 0) FAIL_STACK_ERROR
+
+ /* Verify index type */
+ if(idx_type != H5D_CHUNK_IDX_BTREE)
+ FAIL_PUTS_ERROR("should be using v1 B-tree as index")
+
+ if(H5Dclose(did) < 0) FAIL_STACK_ERROR
+
+ /* Should be able to read the dataset w/ filter created under 1.8/1.6 */
+ if((did = H5Dopen2(fid, DSET_FILTER, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /* Get the chunk index type */
+ if(H5D__layout_idx_type_test(did, &idx_type) < 0) FAIL_STACK_ERROR
+
+ /* Verify index type */
+ if(idx_type != H5D_CHUNK_IDX_BTREE)
+ FAIL_PUTS_ERROR("should be using v1 B-tree as index")
+
+ if(H5Dclose(did) < 0) FAIL_STACK_ERROR
+
+ /* Close the file */
+ if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
+ }
+
+ PASSED();
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Dclose(did);
+ H5Fclose(fid);
+ } H5E_END_TRY;
+ return -1;
+} /* test_idx_compatible */
+
+/*-------------------------------------------------------------------------
+ *
* test_unfiltered_edge_chunks():
* Tests that partial edge chunks aren't filtered when the
* H5D_CHUNK_FILTER_PARTIAL_CHUNKS option is set.
@@ -8690,7 +9890,7 @@ test_storage_size(hid_t fapl)
TESTING("querying storage size");
- h5_fixname(FILENAME[16], fapl, filename, sizeof filename);
+ h5_fixname(FILENAME[18], fapl, filename, sizeof filename);
/* Create file */
if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) FAIL_STACK_ERROR
@@ -10075,6 +11275,278 @@ error:
return -1;
} /* end test_gather_error() */
+/*-------------------------------------------------------------------------
+ * DLS bug -- HDFFV-9672
+ *
+ * The following functions replicate the test code provided by DLS to
+ * expose bug hdffv-9672. All functions associated with this test
+ * have the prefix DLS_01_
+ *
+ * The note documenting the bug is reproduced below:
+ *
+ * ------------------------------------------------------
+ *
+ * Hi,
+ * We've found an issue regarding fixed length strings.
+ *
+ * If we create a chunked dataset of large fixed length strings
+ * (up to 1kb per string) with small chunk sizes (~8 elements per
+ * chunk) then the resulting dataset may not be read later.
+ * This only happens if the file is created with LIBVER_LATEST
+ * for the version bounds.
+ *
+ * Calling H5Oget_info(...) on the dataset results in the following:
+ *
+ * H5Dearray.c:250: H5D__earray_crt_context: Assertion
+ * `udata->chunk_size > 0' failed.
+ *
+ * Example:
+ * void create_data(...)
+ * {
+ * ...
+ *
+ * hsize_t chunks[1] = {8} ;
+ *
+ * err = H5Tset_size( tid, 256 );
+ *
+ * err = H5Pset_chunk( dcpl, 1, chunks );
+ *
+ * H5Dcreate2( fid, "data", tid, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT );
+ *
+ * // write data
+ * }
+ *
+ * void read_data(...)
+ * {
+ * ...
+ *
+ * H5O_into_t info; status = H5Oget_info( did, &info ) // crash
+ * ...
+ * }
+ *
+ * If the size of the chunk is increased (usually matching the
+ * string length) then this problem disappears.
+ *
+ * A full program that produces such a file (and crashes trying to
+ * read it) is attached.
+ *
+ * Tested with 1.10.0-alpha1.
+ *
+ * Regards,
+ *
+ * Charles Mita
+ * Software Engineer
+ * Diamond Light Source Ltd.
+ * +44 1235 778029
+ *
+ * ------------------------------------------------------
+ *
+ * The bug in question turned out to be caused by a failure to update
+ * the enc_bytes_per_dim field in the layout if the size of the
+ * underlying type required more bytes to encode than any of the
+ * chunk dimensions.
+ *
+ * At least in debug builds, the following test code exposes the
+ * failure via an assertion failure.
+ *
+ * Note that the test code make no attempt to run with different
+ * file drivers, as the bug is in the actual on disk encoding of
+ * the chunk layout.
+ *
+ * JRM -- 2/5/16
+ *
+ *-------------------------------------------------------------------------
+ */
+
+#define DLS_01_DATASET "data"
+#define DLS_01_STR_SIZE 256
+#define DLS_01_CHUNK_SIZE 8
+#define DLS_01_DIMS 4
+
+static herr_t dls_01_setup_file( hid_t fid );
+static herr_t dls_01_write_data( hid_t fid, char* buffer );
+static herr_t dls_01_read_stuff( hid_t fid );
+static herr_t dls_01_main( void );
+
+static herr_t
+dls_01_setup_file( hid_t fid ) {
+
+ int status = 0;
+ hid_t sid = 0, did = 0, tid = 0, dcpl = 0;
+ int ndims = 1;
+ hsize_t max_shape[1] = {H5S_UNLIMITED};
+ hsize_t initial_shape[1] = {0};
+ hsize_t chunks[1] = {DLS_01_CHUNK_SIZE};
+
+ sid = H5Screate_simple( ndims, initial_shape, max_shape );
+ if ( sid <= 0 ) TEST_ERROR
+
+ tid = H5Tcopy( H5T_C_S1 );
+ if ( tid <= 0 ) TEST_ERROR
+
+ status = H5Tset_size( tid, DLS_01_STR_SIZE );
+ if ( status != 0 ) TEST_ERROR
+
+ dcpl = H5Pcreate( H5P_DATASET_CREATE );
+ if ( dcpl <= 0 ) TEST_ERROR
+
+ status = H5Pset_chunk( dcpl, ndims, chunks );
+ if ( status != 0 ) TEST_ERROR
+
+ did = H5Dcreate2( fid, DLS_01_DATASET, tid, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT );
+ if ( did <= 0 ) TEST_ERROR
+
+ status = H5Dclose( did );
+ if ( status != 0 ) TEST_ERROR
+
+ status = H5Pclose( dcpl );
+ if ( status != 0 ) TEST_ERROR
+
+ status = H5Tclose( tid );
+ if ( status != 0 ) TEST_ERROR
+
+ status = H5Sclose( sid );
+ if ( status != 0 ) TEST_ERROR
+
+ return 0;
+
+error:
+
+ return -1;
+
+} /* dls_01_setup_file */
+
+static herr_t
+dls_01_write_data( hid_t fid, char* buffer ) {
+
+ int status = 0;
+ hid_t did = 0, tid = 0;
+ hsize_t extent[1] = {4};
+
+ did = H5Dopen2( fid, DLS_01_DATASET, H5P_DEFAULT );
+ if ( did <= 0 ) TEST_ERROR
+
+ tid = H5Dget_type( did );
+ if ( tid <= 0 ) TEST_ERROR
+
+ status = H5Dset_extent( did, extent );
+ if ( status != 0 ) TEST_ERROR
+
+ status = H5Dwrite( did, tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, buffer );
+ if ( status != 0 ) TEST_ERROR
+
+ status = H5Fflush( fid, H5F_SCOPE_LOCAL );
+ if ( status != 0 ) TEST_ERROR
+
+ status = H5Tclose( tid );
+ if ( status != 0 ) TEST_ERROR
+
+ status = H5Dclose( did );
+ if ( status != 0 ) TEST_ERROR
+
+ return 0;
+
+error:
+
+ return -1;
+
+} /* dls_01_write_data */
+
+static herr_t
+dls_01_read_stuff( hid_t fid ) {
+
+ int status = 0;
+ hid_t did = 0;
+ H5O_info_t info;
+
+ did = H5Dopen2( fid, DLS_01_DATASET, H5P_DEFAULT );
+ if ( did <= 0 ) TEST_ERROR
+
+ status = H5Oget_info( did, &info );
+ if ( status != 0 ) TEST_ERROR
+
+ status = H5Dclose( did );
+ if ( status != 0 ) TEST_ERROR
+
+ return 0;
+
+error:
+
+ return -1;
+
+} /* dls_01_read_stuff() */
+
+static herr_t
+dls_01_main( void ) {
+
+ char filename[512];
+ int status = 0;
+ hid_t fapl = 0, fid = 0;
+ const char* strings[DLS_01_DIMS] =
+ { "String 1", "Test string 2", "Another string", "Final String" };
+ char* buffer = NULL;
+
+ TESTING("Testing DLS bugfix 1");
+
+ if ( NULL == h5_fixname(FILENAME[17], H5P_DEFAULT, filename,
+ sizeof(filename)) )
+ TEST_ERROR
+
+ buffer = (char *)HDcalloc( DLS_01_DIMS, DLS_01_STR_SIZE );
+ if ( NULL == buffer )
+ TEST_ERROR
+
+ HDstrcpy( buffer, strings[0] );
+ HDstrcpy( buffer + DLS_01_STR_SIZE, strings[1] );
+ HDstrcpy( buffer + DLS_01_STR_SIZE * 2, strings[2] );
+ HDstrcpy( buffer + DLS_01_STR_SIZE * 3, strings[3] );
+
+ fapl = H5Pcreate( H5P_FILE_ACCESS );
+ if ( fapl <= 0 ) TEST_ERROR
+
+ status = H5Pset_libver_bounds( fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST );
+ if ( status != 0 ) TEST_ERROR
+
+ fid = H5Fcreate( filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl );
+ if ( fid <= 0 ) TEST_ERROR
+
+ if ( 0 != dls_01_setup_file( fid ) )
+ goto error;
+
+ if ( 0 != dls_01_write_data( fid, buffer ) )
+ goto error;
+
+ status = H5Fclose( fid );
+ if ( status != 0 ) TEST_ERROR
+
+ fid = H5Fopen( filename, H5F_ACC_RDONLY, fapl );
+ if ( fid <= 0 ) TEST_ERROR
+
+ if ( 0 != dls_01_read_stuff( fid ) )
+ goto error;
+
+ status = H5Fclose( fid );
+ if ( status != 0 ) TEST_ERROR
+
+ status = H5Pclose( fapl );
+ if ( status != 0 ) TEST_ERROR
+
+ HDfree(buffer);
+
+ PASSED();
+
+ return 0;
+
+error:
+
+ if ( buffer ) HDfree(buffer);
+
+ return -1;
+
+} /* dls_01_main() */
+
+
+
/*-------------------------------------------------------------------------
* Function: main
@@ -10200,9 +11672,13 @@ main(void)
nerrors += (test_huge_chunks(my_fapl) < 0 ? 1 : 0);
nerrors += (test_chunk_cache(my_fapl) < 0 ? 1 : 0);
nerrors += (test_big_chunks_bypass_cache(my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_chunk_fast(envval, my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_reopen_chunk_fast(my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_chunk_fast_bug1(my_fapl) < 0 ? 1 : 0);
nerrors += (test_chunk_expand(my_fapl) < 0 ? 1 : 0);
nerrors += (test_layout_extend(my_fapl) < 0 ? 1 : 0);
nerrors += (test_fixed_array(my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_idx_compatible() < 0 ? 1 : 0);
nerrors += (test_unfiltered_edge_chunks(my_fapl) < 0 ? 1 : 0);
nerrors += (test_large_chunk_shrink(my_fapl) < 0 ? 1 : 0);
nerrors += (test_zero_dim_dset(my_fapl) < 0 ? 1 : 0);
@@ -10221,6 +11697,9 @@ main(void)
nerrors += (test_scatter_error() < 0 ? 1 : 0);
nerrors += (test_gather_error() < 0 ? 1 : 0);
+ /* Run misc tests */
+ nerrors += dls_01_main();
+
/* Verify symbol table messages are cached */
nerrors += (h5_verify_cached_stabs(FILENAME, fapl) < 0 ? 1 : 0);