summaryrefslogtreecommitdiffstats
path: root/test
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2009-07-25 03:30:19 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2009-07-25 03:30:19 (GMT)
commit21f8fa203a153bb9d3d26f73c9e56d22fd9671f2 (patch)
tree2076af8e57763501f48faaa361ba666b3444fdad /test
parentacd5a4278a1e2323fede8289d9eaa567de545eac (diff)
downloadhdf5-21f8fa203a153bb9d3d26f73c9e56d22fd9671f2.zip
hdf5-21f8fa203a153bb9d3d26f73c9e56d22fd9671f2.tar.gz
hdf5-21f8fa203a153bb9d3d26f73c9e56d22fd9671f2.tar.bz2
[svn-r17236] Description:
Add full support for any [single] dimension being unlimited when using extensible arrays as a chunk index. Tested on: FreeBSD/32 6.3 (duty) in debug mode FreeBSD/64 6.3 (liberty) w/C++ & FORTRAN, in debug mode Linux/32 2.6 (jam) w/PGI compilers, w/C++ & FORTRAN, w/threadsafe, in debug mode Linux/64-amd64 2.6 (smirom) w/Intel compilers w/default API=1.6.x, w/C++ & FORTRAN, in production mode Solaris/32 2.10 (linew) w/deprecated symbols disabled, w/C++ & FORTRAN, w/szip filter, in production mode Linux/64-ia64 2.6 (cobalt) w/Intel compilers, w/C++ & FORTRAN, in production mode Linux/64-ia64 2.4 (tg-login3) w/parallel, w/FORTRAN, in debug mode Linux/64-amd64 2.6 (abe) w/parallel, w/FORTRAN, in production mode Mac OS X/32 10.5.7 (amazon) in debug mode Mac OS X/32 10.5.7 (amazon) w/C++ & FORTRAN, w/threadsafe, in production mode
Diffstat (limited to 'test')
-rw-r--r--test/Makefile.am3
-rw-r--r--test/Makefile.in2
-rw-r--r--test/dsets.c340
-rw-r--r--test/swmr_common.c4
-rw-r--r--test/swmr_reader.c2
5 files changed, 188 insertions, 163 deletions
diff --git a/test/Makefile.am b/test/Makefile.am
index ec6df85..81c7362 100644
--- a/test/Makefile.am
+++ b/test/Makefile.am
@@ -124,7 +124,8 @@ CHECK_CLEANFILES+=cmpd_dset.h5 compact_dataset.h5 dataset.h5 dset_offset.h5 \
new_move_[ab].h5 ntypes.h5 dangle.h5 error_test.h5 err_compat.h5 \
dtransform.h5 test_filters.h5 get_file_name.h5 tstint[1-2].h5 \
unlink_chunked.h5 btree2.h5 objcopy_src.h5 objcopy_dst.h5 \
- objcopy_ext.dat trefer1.h5 trefer2.h5 app_ref.h5 earray.h5
+ objcopy_ext.dat trefer1.h5 trefer2.h5 app_ref.h5 earray.h5 \
+ swmr_data.h5
# Sources for testhdf5 executable
testhdf5_SOURCES=testhdf5.c tarray.c tattr.c tchecksum.c tconfig.c tfile.c \
diff --git a/test/Makefile.in b/test/Makefile.in
index 130cd44..3d1468d 100644
--- a/test/Makefile.in
+++ b/test/Makefile.in
@@ -668,7 +668,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog cmpd_dset.h5 \
err_compat.h5 dtransform.h5 test_filters.h5 get_file_name.h5 \
tstint[1-2].h5 unlink_chunked.h5 btree2.h5 objcopy_src.h5 \
objcopy_dst.h5 objcopy_ext.dat trefer1.h5 trefer2.h5 \
- app_ref.h5 earray.h5
+ app_ref.h5 earray.h5 swmr_data.h5
INCLUDES = -I$(top_srcdir)/src -I$(top_builddir)/src
# Test script for error_test and err_compat
diff --git a/test/dsets.c b/test/dsets.c
index 454d4a2..c45b2c1 100644
--- a/test/dsets.c
+++ b/test/dsets.c
@@ -197,10 +197,10 @@ const char *FILENAME[] = {
/* Parameters for testing extensible array chunk indices */
#define EARRAY_MAX_RANK 3
-#define EARRAY_DSET_DIM 30
+#define EARRAY_DSET_DIM 15
#define EARRAY_CHUNK_DIM 3
-#define EARRAY_EXTEND_INCR 30
-#define EARRAY_MAX_EXTEND 150
+#define EARRAY_EXTEND_INCR 15
+#define EARRAY_MAX_EXTEND 75
/* Shared global arrays */
#define DSET_DIM1 100
@@ -6944,6 +6944,9 @@ test_chunk_fast(hid_t fapl)
hid_t sid = -1; /* Dataspace ID */
hid_t scalar_sid = -1;/* Scalar dataspace ID */
hid_t dsid = -1; /* Dataset ID */
+ hsize_t fill; /* Temporary value, for filling arrays */
+ hsize_t hs_size[EARRAY_MAX_RANK]; /* Hyperslab size */
+ hsize_t chunk_dim[EARRAY_MAX_RANK]; /* Chunk dimensions */
H5F_libver_t low; /* File format low bound */
hbool_t swmr; /* Whether file should be written with SWMR access enabled */
@@ -6972,6 +6975,14 @@ test_chunk_fast(hid_t fapl)
/* Create scalar dataspace */
if((scalar_sid = H5Screate(H5S_SCALAR)) < 0) FAIL_STACK_ERROR
+ /* Initialize chunk dimensions */
+ fill = EARRAY_CHUNK_DIM;
+ H5V_array_fill(chunk_dim, &fill, sizeof(fill), EARRAY_MAX_RANK);
+
+ /* Initialize hyperslab size */
+ fill = 1;
+ H5V_array_fill(hs_size, &fill, sizeof(fill), EARRAY_MAX_RANK);
+
/* Loop over using SWMR access to write */
for(swmr = FALSE; swmr <= TRUE; swmr++) {
#ifdef H5_HAVE_FILTER_DEFLATE
@@ -6988,27 +6999,12 @@ test_chunk_fast(hid_t fapl)
/* Loop over dataspace ranks to test */
for(ndims = 1; ndims < (EARRAY_MAX_RANK + 1); ndims++) {
- H5D_chunk_index_t idx_type; /* Dataset chunk index type */
- hsize_t fill; /* Temporary value, for filling arrays */
- hsize_t chunk_dim[EARRAY_MAX_RANK]; /* Chunk dimensions */
- hsize_t dim[EARRAY_MAX_RANK], max_dim[EARRAY_MAX_RANK]; /* Dataset dimensions */
- hsize_t down[EARRAY_MAX_RANK]; /* 'down' sizes, for computing array index */
- hsize_t hs_offset[EARRAY_MAX_RANK]; /* Hyperslab offset */
- hsize_t hs_size[EARRAY_MAX_RANK]; /* Hyperslab size */
- hssize_t snpoints; /* # of points in dataspace extent (signed) */
- hsize_t npoints; /* # of points in dataspace extent */
- unsigned write_elem, read_elem; /* Element written/read */
- hsize_t u; /* Local index variable */
-
- /* Create file */
- if((fid = H5Fcreate(filename, H5F_ACC_TRUNC | (swmr ? H5F_ACC_SWMR_WRITE : 0), H5P_DEFAULT, my_fapl)) < 0) FAIL_STACK_ERROR
+ unsigned unlim_dim;
/* Create dataset creation property list */
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
/* Set chunking & chunk dims */
- fill = EARRAY_CHUNK_DIM;
- H5V_array_fill(chunk_dim, &fill, sizeof(fill), EARRAY_MAX_RANK);
if(H5Pset_chunk(dcpl, (int)ndims, chunk_dim) < 0) FAIL_STACK_ERROR
#ifdef H5_HAVE_FILTER_DEFLATE
@@ -7023,90 +7019,62 @@ test_chunk_fast(hid_t fapl)
/* Set allocation time */
if(H5Pset_alloc_time(dcpl, alloc_time) < 0) FAIL_STACK_ERROR
- /* Create n-D dataspace */
- fill = EARRAY_DSET_DIM;
- H5V_array_fill(dim, &fill, sizeof(fill), EARRAY_MAX_RANK);
- fill = EARRAY_DSET_DIM;
- H5V_array_fill(max_dim, &fill, sizeof(fill), EARRAY_MAX_RANK);
- max_dim[0] = H5S_UNLIMITED;
- if((sid = H5Screate_simple((int)ndims, dim, max_dim)) < 0) FAIL_STACK_ERROR
-
- /* Get the number of points in the dataspace */
- if((snpoints = H5Sget_simple_extent_npoints(sid)) < 0) FAIL_STACK_ERROR
- npoints = (hsize_t)snpoints;
-
- /* Compute the "down" dimension values */
- if(H5V_array_down(ndims, dim, down) < 0) FAIL_STACK_ERROR
-
- /* Create chunked dataset */
- if((dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_UINT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
- FAIL_STACK_ERROR
-
- /* Get the chunk index type */
- if(H5D_layout_idx_type_test(dsid, &idx_type) < 0) FAIL_STACK_ERROR
-
- /* Chunk index tyepe expected depends on whether we are using the latest version of the format */
- if(low == H5F_LIBVER_LATEST) {
- /* Verify index type */
- if(idx_type != H5D_CHUNK_IDX_EARRAY) FAIL_PUTS_ERROR("should be using extensible array as index");
- } /* end if */
- else {
- /* Verify index type */
- if(idx_type != H5D_CHUNK_IDX_BTREE) FAIL_PUTS_ERROR("should be using v1 B-tree as index");
- } /* end else */
-
- /* Fill existing elements */
- fill = 1;
- H5V_array_fill(hs_size, &fill, sizeof(fill), EARRAY_MAX_RANK);
- for(u = 0; u < npoints; u++) {
- /* Compute the coordinate from the linear offset */
- if(H5V_array_calc_pre(u, ndims, dim, down, hs_offset) < 0) FAIL_STACK_ERROR
-
- /* Select a single element in the dataset */
- if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL) < 0) FAIL_STACK_ERROR
-
- /* Read (unwritten) element from dataset */
- read_elem = 1;
- if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
-
- /* Verify unwritten element is fill value (0) */
- if(read_elem != 0) FAIL_PUTS_ERROR("invalid unwritten element read");
-
- /* Write element to dataset */
- write_elem = (unsigned)u;
- if(H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem) < 0) FAIL_STACK_ERROR
-
- /* Read element from dataset */
- read_elem = write_elem + 1;
- if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
-
- /* Verify written element is read in */
- if(read_elem != write_elem) FAIL_PUTS_ERROR("invalid written element read");
- } /* end for */
-
- /* Incrementally extend dataset and verify write/reads */
- while(dim[0] < EARRAY_MAX_EXTEND) {
- hssize_t snew_npoints; /* # of points in dataspace extent (signed) */
- hsize_t new_npoints; /* # of points in dataspace extent */
-
- /* Extend dataset */
- dim[0] += EARRAY_EXTEND_INCR;
- if(H5Dset_extent(dsid, dim) < 0) FAIL_STACK_ERROR
-
- /* Close old dataspace */
- if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
-
- /* Get dataspace for dataset now */
- if((sid = H5Dget_space(dsid)) < 0) FAIL_STACK_ERROR
-
- /* Get the new number of points in the dataspace */
- if((snew_npoints = H5Sget_simple_extent_npoints(sid)) < 0) FAIL_STACK_ERROR
- new_npoints = (hsize_t)snew_npoints;
-
- /* Fill new elements */
- for(u = npoints; u < new_npoints; u++) {
+ /* Loop over which dimension is unlimited */
+ for(unlim_dim = 0; unlim_dim < ndims; unlim_dim++) {
+ H5D_chunk_index_t idx_type; /* Dataset chunk index type */
+ hsize_t dim[EARRAY_MAX_RANK], max_dim[EARRAY_MAX_RANK]; /* Dataset dimensions */
+ hsize_t swizzled_dim[EARRAY_MAX_RANK]; /* Dimensions, with unlimited dimension moved to rank 0 */
+ hsize_t down[EARRAY_MAX_RANK]; /* 'down' sizes, for computing array index */
+ hsize_t hs_offset[EARRAY_MAX_RANK]; /* Hyperslab offset */
+ hssize_t snpoints; /* # of points in dataspace extent (signed) */
+ hsize_t npoints; /* # of points in dataspace extent */
+ unsigned write_elem, read_elem; /* Element written/read */
+ hsize_t u; /* Local index variable */
+
+ /* Create file */
+ if((fid = H5Fcreate(filename, H5F_ACC_TRUNC | (swmr ? H5F_ACC_SWMR_WRITE : 0), H5P_DEFAULT, my_fapl)) < 0) FAIL_STACK_ERROR
+
+ /* Create n-D dataspace */
+ fill = EARRAY_DSET_DIM;
+ H5V_array_fill(dim, &fill, sizeof(fill), EARRAY_MAX_RANK);
+ fill = EARRAY_DSET_DIM;
+ H5V_array_fill(max_dim, &fill, sizeof(fill), EARRAY_MAX_RANK);
+ max_dim[unlim_dim] = H5S_UNLIMITED;
+ fill = EARRAY_DSET_DIM;
+ H5V_array_fill(swizzled_dim, &fill, sizeof(fill), EARRAY_MAX_RANK);
+ if((sid = H5Screate_simple((int)ndims, dim, max_dim)) < 0) FAIL_STACK_ERROR
+
+ /* Get the number of points in the dataspace */
+ if((snpoints = H5Sget_simple_extent_npoints(sid)) < 0) FAIL_STACK_ERROR
+ npoints = (hsize_t)snpoints;
+
+ /* Compute the "down" dimension values */
+ if(H5V_array_down(ndims, dim, down) < 0) FAIL_STACK_ERROR
+
+ /* Create chunked dataset */
+ if((dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_UINT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Get the chunk index type */
+ if(H5D_layout_idx_type_test(dsid, &idx_type) < 0) FAIL_STACK_ERROR
+
+ /* Chunk index tyepe expected depends on whether we are using the latest version of the format */
+ if(low == H5F_LIBVER_LATEST) {
+ /* Verify index type */
+ if(idx_type != H5D_CHUNK_IDX_EARRAY) FAIL_PUTS_ERROR("should be using extensible array as index");
+ } /* end if */
+ else {
+ /* Verify index type */
+ if(idx_type != H5D_CHUNK_IDX_BTREE) FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+ } /* end else */
+
+ /* Fill existing elements */
+ for(u = 0; u < npoints; u++) {
/* Compute the coordinate from the linear offset */
- if(H5V_array_calc(u, ndims, dim, hs_offset) < 0) FAIL_STACK_ERROR
+ if(H5V_array_calc_pre(u, ndims, dim, down, hs_offset) < 0) FAIL_STACK_ERROR
+
+ /* Un-swizzle hyperslab offset in same way as swizzled dimensions */
+ H5V_unswizzle_coords(hs_offset, unlim_dim);
/* Select a single element in the dataset */
if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL) < 0) FAIL_STACK_ERROR
@@ -7130,83 +7098,139 @@ test_chunk_fast(hid_t fapl)
if(read_elem != write_elem) FAIL_PUTS_ERROR("invalid written element read");
} /* end for */
- /* Update the number of points in the dataspace */
- npoints = new_npoints;
- } /* end while */
+ /* Incrementally extend dataset and verify write/reads */
+ while(dim[unlim_dim] < EARRAY_MAX_EXTEND) {
+ hssize_t snew_npoints; /* # of points in dataspace extent (signed) */
+ hsize_t new_npoints; /* # of points in dataspace extent */
- /* Close everything */
- if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
- if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
- if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
+ /* Extend dataset */
+ dim[unlim_dim] += EARRAY_EXTEND_INCR;
+ swizzled_dim[0] += EARRAY_EXTEND_INCR;
+ if(H5Dset_extent(dsid, dim) < 0) FAIL_STACK_ERROR
+ /* Close old dataspace */
+ if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
- /* Re-open file & dataset */
- if((fid = H5Fopen(filename, H5F_ACC_RDONLY | (swmr ? H5F_ACC_SWMR_READ : 0), my_fapl)) < 0) FAIL_STACK_ERROR
+ /* Get dataspace for dataset now */
+ if((sid = H5Dget_space(dsid)) < 0) FAIL_STACK_ERROR
- /* Open dataset */
- if((dsid = H5Dopen2(fid, "dset", H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
+ /* Get the new number of points in the dataspace */
+ if((snew_npoints = H5Sget_simple_extent_npoints(sid)) < 0) FAIL_STACK_ERROR
+ new_npoints = (hsize_t)snew_npoints;
- /* Get the chunk index type */
- if(H5D_layout_idx_type_test(dsid, &idx_type) < 0) FAIL_STACK_ERROR
+ /* Fill new elements */
+ for(u = npoints; u < new_npoints; u++) {
+ /* Compute the coordinate from the linear offset */
+ if(H5V_array_calc(u, ndims, swizzled_dim, hs_offset) < 0) FAIL_STACK_ERROR
- /* Chunk index tyepe expected depends on whether we are using the latest version of the format */
- if(low == H5F_LIBVER_LATEST) {
- /* Verify index type */
- if(idx_type != H5D_CHUNK_IDX_EARRAY) FAIL_PUTS_ERROR("should be using extensible array as index");
- } /* end if */
- else {
- /* Verify index type */
- if(idx_type != H5D_CHUNK_IDX_BTREE) FAIL_PUTS_ERROR("should be using v1 B-tree as index");
- } /* end else */
+ /* Un-swizzle hyperslab offset in same way as swizzled dimensions */
+ H5V_unswizzle_coords(hs_offset, unlim_dim);
- /* Get dataspace for dataset now */
- if((sid = H5Dget_space(dsid)) < 0) FAIL_STACK_ERROR
+ /* Select a single element in the dataset */
+ if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL) < 0) FAIL_STACK_ERROR
- /* Get the number of points in the dataspace */
- if((snpoints = H5Sget_simple_extent_npoints(sid)) < 0) FAIL_STACK_ERROR
- npoints = (hsize_t)snpoints;
+ /* Read (unwritten) element from dataset */
+ read_elem = 1;
+ if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
- /* Get the current dimensions */
- if(H5Sget_simple_extent_dims(sid, dim, NULL) < 0) FAIL_STACK_ERROR
+ /* Verify unwritten element is fill value (0) */
+ if(read_elem != 0) FAIL_PUTS_ERROR("invalid unwritten element read");
- /* Compute the "down" dimension values */
- if(H5V_array_down(ndims, dim, down) < 0) FAIL_STACK_ERROR
+ /* Write element to dataset */
+ write_elem = (unsigned)u;
+ if(H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem) < 0) FAIL_STACK_ERROR
- /* Read elements */
- for(u = 0; u < npoints; u++) {
- /* Compute the coordinate from the linear offset */
- if(H5V_array_calc_pre(u, ndims, dim, down, hs_offset) < 0) FAIL_STACK_ERROR
+ /* Read element from dataset */
+ read_elem = write_elem + 1;
+ if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
- /* Select a single element in the dataset */
- if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL) < 0) FAIL_STACK_ERROR
+ /* Verify written element is read in */
+ if(read_elem != write_elem) FAIL_PUTS_ERROR("invalid written element read");
+ } /* end for */
- /* Read written element from dataset */
- read_elem = (unsigned)(u + 1);
- if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
+ /* Update the number of points in the dataspace */
+ npoints = new_npoints;
+ } /* end while */
- /* Verify written element is correct */
- if(read_elem != u) FAIL_PUTS_ERROR("invalid element read");
- } /* end for */
+ /* Close everything */
+ if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
+ if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
+ if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
- /* Close everything */
- if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
- if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
- if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
+ /* Re-open file & dataset */
+ if((fid = H5Fopen(filename, H5F_ACC_RDONLY | (swmr ? H5F_ACC_SWMR_READ : 0), my_fapl)) < 0) FAIL_STACK_ERROR
- /* Re-open file */
- if((fid = H5Fopen(filename, H5F_ACC_RDWR, my_fapl)) < 0) FAIL_STACK_ERROR
+ /* Open dataset */
+ if((dsid = H5Dopen2(fid, "dset", H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
- /* Delete dataset */
- if(H5Ldelete(fid, "dset", H5P_DEFAULT) < 0) FAIL_STACK_ERROR
+ /* Get the chunk index type */
+ if(H5D_layout_idx_type_test(dsid, &idx_type) < 0) FAIL_STACK_ERROR
- /* Close everything */
- if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
+ /* Chunk index tyepe expected depends on whether we are using the latest version of the format */
+ if(low == H5F_LIBVER_LATEST) {
+ /* Verify index type */
+ if(idx_type != H5D_CHUNK_IDX_EARRAY) FAIL_PUTS_ERROR("should be using extensible array as index");
+ } /* end if */
+ else {
+ /* Verify index type */
+ if(idx_type != H5D_CHUNK_IDX_BTREE) FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+ } /* end else */
- } /* end for */
+ /* Get dataspace for dataset now */
+ if((sid = H5Dget_space(dsid)) < 0) FAIL_STACK_ERROR
- /* Close everything */
- if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
+ /* Get the number of points in the dataspace */
+ if((snpoints = H5Sget_simple_extent_npoints(sid)) < 0) FAIL_STACK_ERROR
+ npoints = (hsize_t)snpoints;
+
+ /* Get the current dimensions into swizzled_dim array */
+ if(H5Sget_simple_extent_dims(sid, swizzled_dim, NULL) < 0) FAIL_STACK_ERROR
+
+ /* Generate the swizzled dimensions */
+ H5V_swizzle_coords(swizzled_dim, unlim_dim);
+
+ /* Compute the "down" dimension values */
+ if(H5V_array_down(ndims, swizzled_dim, down) < 0) FAIL_STACK_ERROR
+
+ /* Read elements */
+ for(u = 0; u < npoints; u++) {
+ /* Compute the coordinate from the linear offset */
+ if(H5V_array_calc_pre(u, ndims, swizzled_dim, down, hs_offset) < 0) FAIL_STACK_ERROR
+
+ /* Unswizzle hyperslab offset in same way as swizzled dimensions */
+ H5V_unswizzle_coords(hs_offset, unlim_dim);
+
+ /* Select a single element in the dataset */
+ if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL) < 0) FAIL_STACK_ERROR
+
+ /* Read written element from dataset */
+ read_elem = (unsigned)(u + 1);
+ if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
+
+ /* Verify written element is correct */
+ if(read_elem != u) FAIL_PUTS_ERROR("invalid element read");
+ } /* end for */
+
+ /* Close everything */
+ if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
+ if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
+ if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
+
+
+ /* Re-open file */
+ if((fid = H5Fopen(filename, H5F_ACC_RDWR, my_fapl)) < 0) FAIL_STACK_ERROR
+
+ /* Delete dataset */
+ if(H5Ldelete(fid, "dset", H5P_DEFAULT) < 0) FAIL_STACK_ERROR
+
+ /* Close everything */
+ if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
+ } /* end for */
+
+ /* Close everything */
+ if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
+ } /* end for */
} /* end for */
#ifdef H5_HAVE_FILTER_DEFLATE
} /* end for */
diff --git a/test/swmr_common.c b/test/swmr_common.c
index bb75c0d..41a24ac 100644
--- a/test/swmr_common.c
+++ b/test/swmr_common.c
@@ -61,12 +61,12 @@ generate_symbols(void)
unsigned u, v; /* Local index variables */
for(u = 0; u < NLEVELS; u++) {
- symbol_info[u] = malloc(symbol_count[u] * sizeof(symbol_info_t));
+ symbol_info[u] = (symbol_info_t *)malloc(symbol_count[u] * sizeof(symbol_info_t));
for(v = 0; v < symbol_count[u]; v++) {
char name_buf[64];
generate_name(name_buf, u, v);
- symbol_info[u][v].name = malloc(strlen(name_buf) + 1);
+ symbol_info[u][v].name = (char *)malloc(strlen(name_buf) + 1);
strcpy(symbol_info[u][v].name, name_buf);
symbol_info[u][v].dsid = -1;
symbol_info[u][v].nrecords = 0;
diff --git a/test/swmr_reader.c b/test/swmr_reader.c
index 7045954..9c9cd1e 100644
--- a/test/swmr_reader.c
+++ b/test/swmr_reader.c
@@ -13,7 +13,7 @@ check_dataset(hid_t fid, unsigned verbose, const char *sym_name, symbol_t *recor
hsize_t start, count = 1; /* Hyperslab selection values */
/* Open dataset for symbol */
- if((dsid = H5Dopen(fid, sym_name, H5P_DEFAULT)) < 0)
+ if((dsid = H5Dopen2(fid, sym_name, H5P_DEFAULT)) < 0)
return(-1);
/* Get the dataset's dataspace */