summaryrefslogtreecommitdiffstats
path: root/test
diff options
context:
space:
mode:
authorRaymond Lu <songyulu@hdfgroup.org>2011-04-11 15:59:41 (GMT)
committerRaymond Lu <songyulu@hdfgroup.org>2011-04-11 15:59:41 (GMT)
commita67dbbaf810f44b7b05a72fa38fa1da621651896 (patch)
treeb73ce8a66ddf048d9d14247525e860ea77e9ff59 /test
parent338beff4e4744011d703c4ffecb58f2c2a34b292 (diff)
downloadhdf5-a67dbbaf810f44b7b05a72fa38fa1da621651896.zip
hdf5-a67dbbaf810f44b7b05a72fa38fa1da621651896.tar.gz
hdf5-a67dbbaf810f44b7b05a72fa38fa1da621651896.tar.bz2
[svn-r20469] Bug 1386 - allow dimension size to be zero even though it isn't unlimited. This is a follow-up checkin for r20440:
1. I added a test case of extending dataset of zero dimension size and shrinking back to zero dimension size. 2. I updated the Makefile to include the new data file to be cleaned up. Tested on jam - relatively simple.
Diffstat (limited to 'test')
-rw-r--r--test/Makefile.am2
-rw-r--r--test/Makefile.in2
-rw-r--r--test/th5s.c69
3 files changed, 65 insertions, 8 deletions
diff --git a/test/Makefile.am b/test/Makefile.am
index 6abd20b..718092e 100644
--- a/test/Makefile.am
+++ b/test/Makefile.am
@@ -108,7 +108,7 @@ CHECK_CLEANFILES+=accum.h5 cmpd_dset.h5 compact_dataset.h5 dataset.h5 dset_offse
max_compact_dataset.h5 simple.h5 set_local.h5 random_chunks.h5 \
huge_chunks.h5 chunk_cache.h5 big_chunk.h5 chunk_expand.h5 \
copy_dcpl_newfile.h5 extend.h5 istore.h5 extlinks*.h5 frspace.h5 links*.h5 \
- sys_file1 tfile[1-5].h5 th5s[1-3].h5 lheap.h5 fheap.h5 ohdr.h5 \
+ sys_file1 tfile[1-5].h5 th5s[1-4].h5 lheap.h5 fheap.h5 ohdr.h5 \
stab.h5 extern_[1-3].h5 extern_[1-4][ab].raw gheap[0-4].h5 \
dt_arith[1-2] links.h5 links[0-6]*.h5 extlinks[0-15].h5 tmp \
big.data big[0-9][0-9][0-9][0-9][0-9].h5 \
diff --git a/test/Makefile.in b/test/Makefile.in
index 98fa754..3c2a57a 100644
--- a/test/Makefile.in
+++ b/test/Makefile.in
@@ -714,7 +714,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog accum.h5 cmpd_dset.h5 \
max_compact_dataset.h5 simple.h5 set_local.h5 random_chunks.h5 \
huge_chunks.h5 chunk_cache.h5 big_chunk.h5 chunk_expand.h5 \
copy_dcpl_newfile.h5 extend.h5 istore.h5 extlinks*.h5 \
- frspace.h5 links*.h5 sys_file1 tfile[1-5].h5 th5s[1-3].h5 \
+ frspace.h5 links*.h5 sys_file1 tfile[1-5].h5 th5s[1-4].h5 \
lheap.h5 fheap.h5 ohdr.h5 stab.h5 extern_[1-3].h5 \
extern_[1-4][ab].raw gheap[0-4].h5 dt_arith[1-2] links.h5 \
links[0-6]*.h5 extlinks[0-15].h5 tmp big.data \
diff --git a/test/th5s.c b/test/th5s.c
index 2aa4a92..2435fc9 100644
--- a/test/th5s.c
+++ b/test/th5s.c
@@ -528,6 +528,7 @@ test_h5s_zero_dim(void)
hid_t attr; /* Attribute ID */
int rank; /* Logical rank of dataspace */
hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t extend_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
hsize_t chunk_dims[] = {SPACE1_DIM1, SPACE1_DIM2/3, SPACE1_DIM3};
hsize_t tdims[SPACE1_RANK]; /* Dimension array to test with */
int wdata[SPACE1_DIM2][SPACE1_DIM3];
@@ -610,6 +611,12 @@ test_h5s_zero_dim(void)
rdata_short[i][j] = 7;
}
+ for(i=0; i<SPACE1_DIM1; i++)
+ for(j=0; j<SPACE1_DIM2; j++)
+ for(k=0; k<SPACE1_DIM3; k++)
+ wdata_real[i][j][k] = i + j + k;
+
+
/* Contiguous dataset */
dset1 = H5Dcreate2(fid1, BASICDATASET, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
CHECK(dset1, FAIL, "H5Dcreate2");
@@ -721,6 +728,54 @@ test_h5s_zero_dim(void)
}
}
+ /* Now extend the dataset and make sure we can write data to it */
+ ret = H5Dset_extent(dset1, extend_dims);
+ CHECK(ret, FAIL, "H5Dset_extent");
+
+ ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata_real);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Fflush(fid1, H5F_SCOPE_GLOBAL);
+ CHECK(ret, FAIL, "H5Fflush");
+
+ ret = H5Dread(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata_real);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check results */
+ for(i=0; i<SPACE1_DIM1; i++) {
+ for(j=0; j<SPACE1_DIM2; j++) {
+ for(k=0; k<SPACE1_DIM3; k++) {
+ if(rdata_real[i][j][k] != wdata_real[i][j][k]) {
+ H5_FAILED();
+ printf("element [%d][%d][%d] is %d but should have been %d\n",
+ i, j, k, rdata_real[i][j][k], wdata_real[i][j][k]);
+ }
+ }
+ }
+ }
+
+ /* Now shrink the dataset to 0 dimension size and make sure no data is in it */
+ extend_dims[0] = 0;
+ ret = H5Dset_extent(dset1, extend_dims);
+ CHECK(ret, FAIL, "H5Dset_extent");
+
+ ret = H5Fflush(fid1, H5F_SCOPE_GLOBAL);
+ CHECK(ret, FAIL, "H5Fflush");
+
+ /* Try reading from the dataset (make certain our buffer is unmodified) */
+ ret = H5Dread(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Check results */
+ for(i=0; i<SPACE1_DIM2; i++)
+ for(j=0; j<SPACE1_DIM3; j++) {
+ if(rdata[i][j] != 7) {
+ H5_FAILED();
+ printf("element [%d][%d] is %d but should have been 7\n",
+ i, j, rdata[i][j]);
+ }
+ }
+
ret = H5Pclose(plist_id);
CHECK(ret, FAIL, "H5Pclose");
@@ -883,12 +938,6 @@ test_h5s_zero_dim(void)
dset1 = H5Dcreate2(fid1, BASICDATASET4, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
CHECK(dset1, FAIL, "H5Dcreate2");
- for(i=0; i<SPACE1_DIM1; i++)
- for(j=0; j<SPACE1_DIM2; j++)
- for(k=0; k<SPACE1_DIM3; k++)
- wdata_real[i][j][k] = i + j + k;
-
-
ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata_real);
CHECK(ret, FAIL, "H5Dwrite");
@@ -969,6 +1018,13 @@ test_h5s_zero_dim(void)
nelem = H5Sget_simple_extent_npoints(sid1);
VERIFY(nelem, 0, "H5Sget_simple_extent_npoints");
+ /* Verify the dimension sizes are correct */
+ rank = H5Sget_simple_extent_dims(sid1, tdims, NULL);
+ CHECK(rank, FAIL, "H5Sget_simple_extent_dims");
+ VERIFY(tdims[0], 0, "H5Sget_simple_extent_dims");
+ VERIFY(tdims[1], SPACE1_DIM2, "H5Sget_simple_extent_dims");
+ VERIFY(tdims[2], SPACE1_DIM3, "H5Sget_simple_extent_dims");
+
/* Try reading from the dataset (make certain our buffer is unmodified) */
ret = H5Dread(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
CHECK(ret, FAIL, "H5Dread");
@@ -2139,4 +2195,5 @@ cleanup_h5s(void)
remove(DATAFILE);
remove(NULLFILE);
remove(BASICFILE);
+ remove(ZEROFILE);
}