summaryrefslogtreecommitdiffstats
path: root/test
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2007-06-19 17:34:39 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2007-06-19 17:34:39 (GMT)
commit0e2ef3fdb7246ea3b4b5342abf2aeef86511ea1f (patch)
treea0ed3504ac3f7104c3a24f14934f53eafbb762f8 /test
parent673ee09a6299f2a4099da8869534b4aab8b2951e (diff)
downloadhdf5-0e2ef3fdb7246ea3b4b5342abf2aeef86511ea1f.zip
hdf5-0e2ef3fdb7246ea3b4b5342abf2aeef86511ea1f.tar.gz
hdf5-0e2ef3fdb7246ea3b4b5342abf2aeef86511ea1f.tar.bz2
[svn-r13882] Description:
Fixed bugs w/VL-datatype fill values for chunked dataset storage. Tested on: Mac OS X/32 10.4.9 (amazon) FreeBSD/32 6.2 (duty) FreeBSD/64 6.2 (liberty) Linux/32 2.6 (chicago) Linux/64 2.6 (chicago2)
Diffstat (limited to 'test')
-rw-r--r--test/tvltypes.c756
1 files changed, 332 insertions, 424 deletions
diff --git a/test/tvltypes.c b/test/tvltypes.c
index 48ed402..15af456 100644
--- a/test/tvltypes.c
+++ b/test/tvltypes.c
@@ -2538,99 +2538,91 @@ test_vltypes_fill_value(void)
CHECK(file_id, FAIL, "H5Fcreate");
/* Create datasets with different storage layouts */
-#ifdef QAK
-HDfprintf(stderr, "Before creating datasets\n");
-#endif /* QAK */
-HDfprintf(stderr, "Uncomment loop!\n");
-/* for(layout = H5D_COMPACT; layout <= H5D_CHUNKED; layout++) { */
- for(layout = H5D_COMPACT; layout <= H5D_CONTIGUOUS; layout++) {
- hid_t tmp_dcpl_id; /* Temporary copy of the dataset creation property list */
-
- /* Make a copy of the dataset creation property list */
- tmp_dcpl_id = H5Pcopy(dcpl_id);
- CHECK(tmp_dcpl_id, FAIL, "H5Pcopy");
-
- /* Layout specific actions */
- switch(layout) {
- case H5D_COMPACT:
- HDstrcpy(dset_name1, "dataset1-compact");
- HDstrcpy(dset_name2, "dataset2-compact");
- dset_dspace_id = small_dspace_id;
- ret = H5Pset_layout(tmp_dcpl_id, H5D_COMPACT);
- CHECK(ret, FAIL, "H5Pset_layout");
- break;
-
- case H5D_CONTIGUOUS:
- HDstrcpy(dset_name1, "dataset1-contig");
- HDstrcpy(dset_name2, "dataset2-contig");
- dset_dspace_id = large_dspace_id;
- break;
-
- case H5D_CHUNKED:
- {
- hsize_t chunk_dims[1] = {SPACE4_DIM_LARGE / 4};
-
- HDstrcpy(dset_name1, "dataset1-chunked");
- HDstrcpy(dset_name2, "dataset2-chunked");
+ for(layout = H5D_COMPACT; layout <= H5D_CHUNKED; layout++) {
+ unsigned compress_loop; /* # of times to run loop, for testing compressed chunked dataset */
+ unsigned test_loop; /* Loop over datasets */
+
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if(layout == H5D_CHUNKED)
+ compress_loop = 2;
+ else
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ compress_loop = 1;
+
+ /* Loop over dataset operations */
+ for(test_loop = 0; test_loop < compress_loop; test_loop++) {
+ hid_t tmp_dcpl_id; /* Temporary copy of the dataset creation property list */
+
+ /* Make a copy of the dataset creation property list */
+ tmp_dcpl_id = H5Pcopy(dcpl_id);
+ CHECK(tmp_dcpl_id, FAIL, "H5Pcopy");
+
+ /* Layout specific actions */
+ switch(layout) {
+ case H5D_COMPACT:
+ HDstrcpy(dset_name1, "dataset1-compact");
+ HDstrcpy(dset_name2, "dataset2-compact");
+ dset_dspace_id = small_dspace_id;
+ ret = H5Pset_layout(tmp_dcpl_id, H5D_COMPACT);
+ CHECK(ret, FAIL, "H5Pset_layout");
+ break;
+
+ case H5D_CONTIGUOUS:
+ HDstrcpy(dset_name1, "dataset1-contig");
+ HDstrcpy(dset_name2, "dataset2-contig");
dset_dspace_id = large_dspace_id;
- ret = H5Pset_chunk(tmp_dcpl_id, 1, chunk_dims);
- CHECK(ret, FAIL, "H5Pset_chunk");
- }
- break;
- } /* end switch */
-
- /* Create first data set with default setting - no space is allocated */
-#ifdef QAK
-HDfprintf(stderr, "Before creating first dataset: '%s'\n", dset_name1);
-#endif /* QAK */
- dset_id = H5Dcreate(file_id, dset_name1, dtype1_id, dset_dspace_id, tmp_dcpl_id);
-#ifdef QAK
-HDfprintf(stderr, "After creating first dataset\n");
-#endif /* QAK */
- CHECK(dset_id, FAIL, "H5Dcreate");
-
-#ifdef QAK
-HDfprintf(stderr, "Before closing first dataset\n");
-#endif /* QAK */
- ret = H5Dclose(dset_id);
-#ifdef QAK
-HDfprintf(stderr, "After closing first dataset\n");
-#endif /* QAK */
- CHECK(ret, FAIL, "H5Dclose");
-
-
-#ifndef QAK2
- /* Create a second data set with space allocated and fill value written */
- ret = H5Pset_fill_time(tmp_dcpl_id, H5D_FILL_TIME_IFSET);
- CHECK(ret, FAIL, "H5Pset_fill_time");
-
- ret = H5Pset_alloc_time(tmp_dcpl_id, H5D_ALLOC_TIME_EARLY);
- CHECK(ret, FAIL, "H5Pset_alloc_time");
-
-#ifdef QAK
-HDfprintf(stderr, "Before creating second dataset: '%s'\n", dset_name2);
-#endif /* QAK */
- dset_id = H5Dcreate(file_id, dset_name2, dtype1_id, dset_dspace_id, tmp_dcpl_id);
-#ifdef QAK
-HDfprintf(stderr, "After creating second dataset\n");
-#endif /* QAK */
- CHECK(dset_id, FAIL, "H5Dcreate");
-
-#ifdef QAK
-HDfprintf(stderr, "Before closing second dataset\n");
-#endif /* QAK */
- ret = H5Dclose(dset_id);
-#ifdef QAK
-HDfprintf(stderr, "After closing second dataset\n");
-#endif /* QAK */
- CHECK(ret, FAIL, "H5Dclose");
-#else /* QAK2 */
-HDfprintf(stderr, "Uncomment test for second dataset!\n");
-#endif /* QAK2 */
-
- /* Close temporary DCPL */
- ret = H5Pclose(tmp_dcpl_id);
- CHECK(ret, FAIL, "H5Pclose");
+ break;
+
+ case H5D_CHUNKED:
+ {
+ hsize_t chunk_dims[1] = {SPACE4_DIM_LARGE / 4};
+
+ dset_dspace_id = large_dspace_id;
+ ret = H5Pset_chunk(tmp_dcpl_id, 1, chunk_dims);
+ CHECK(ret, FAIL, "H5Pset_chunk");
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if(test_loop == 1) {
+ HDstrcpy(dset_name1, "dataset1-chunked-compressed");
+ HDstrcpy(dset_name2, "dataset2-chunked-compressed");
+ ret = H5Pset_deflate(tmp_dcpl_id, 3);
+ CHECK(ret, FAIL, "H5Pset_deflate");
+ } /* end if */
+ else {
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ HDstrcpy(dset_name1, "dataset1-chunked");
+ HDstrcpy(dset_name2, "dataset2-chunked");
+#ifdef H5_HAVE_FILTER_DEFLATE
+ } /* end else */
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ }
+ break;
+ } /* end switch */
+
+ /* Create first data set with default setting - no space is allocated */
+ dset_id = H5Dcreate(file_id, dset_name1, dtype1_id, dset_dspace_id, tmp_dcpl_id);
+ CHECK(dset_id, FAIL, "H5Dcreate");
+
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+
+ /* Create a second data set with space allocated and fill value written */
+ ret = H5Pset_fill_time(tmp_dcpl_id, H5D_FILL_TIME_IFSET);
+ CHECK(ret, FAIL, "H5Pset_fill_time");
+
+ ret = H5Pset_alloc_time(tmp_dcpl_id, H5D_ALLOC_TIME_EARLY);
+ CHECK(ret, FAIL, "H5Pset_alloc_time");
+
+ dset_id = H5Dcreate(file_id, dset_name2, dtype1_id, dset_dspace_id, tmp_dcpl_id);
+ CHECK(dset_id, FAIL, "H5Dcreate");
+
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Close temporary DCPL */
+ ret = H5Pclose(tmp_dcpl_id);
+ CHECK(ret, FAIL, "H5Pclose");
+ } /* end for */
} /* end for */
ret = H5Fclose(file_id);
@@ -2652,358 +2644,278 @@ HDfprintf(stderr, "Uncomment test for second dataset!\n");
CHECK(file_id, FAIL, "H5Fopen");
/* Read datasets with different storage layouts */
-#ifdef QAK
-HDfprintf(stderr, "Before testing empty reads\n");
-#endif /* QAK */
-HDfprintf(stderr, "Uncomment loop!\n");
-/* for(layout = H5D_COMPACT; layout <= H5D_CHUNKED; layout++) { */
- for(layout = H5D_COMPACT; layout <= H5D_CONTIGUOUS; layout++) {
-
- /* Layout specific actions */
- switch(layout) {
- case H5D_COMPACT:
- HDstrcpy(dset_name1, "dataset1-compact");
- HDstrcpy(dset_name2, "dataset2-compact");
- dset_dspace_id = small_dspace_id;
- dset_elmts = SPACE4_DIM_SMALL;
- break;
-
- case H5D_CONTIGUOUS:
- HDstrcpy(dset_name1, "dataset1-contig");
- HDstrcpy(dset_name2, "dataset2-contig");
- dset_dspace_id = large_dspace_id;
- dset_elmts = SPACE4_DIM_LARGE;
- break;
-
- case H5D_CHUNKED:
- HDstrcpy(dset_name1, "dataset1-chunked");
- HDstrcpy(dset_name2, "dataset2-chunked");
- dset_dspace_id = large_dspace_id;
- dset_elmts = SPACE4_DIM_LARGE;
- break;
- } /* end switch */
-
- /* Open first data set */
-#ifdef QAK
-HDfprintf(stderr, "Before opening first dataset: '%s'\n", dset_name1);
-#endif /* QAK */
- dset_id = H5Dopen(file_id, dset_name1);
-#ifdef QAK
-HDfprintf(stderr, "After opening first dataset\n");
-#endif /* QAK */
- CHECK(dset_id, FAIL, "H5Dopen");
-
- /* Read in the data of fill value */
-#ifdef QAK
-HDfprintf(stderr, "Before reading from first dataset\n");
-#endif /* QAK */
- ret = H5Dread(dset_id, dtype1_id, dset_dspace_id, dset_dspace_id, xfer_pid, rbuf);
-#ifdef QAK
-HDfprintf(stderr, "After reading from first dataset\n");
-#endif /* QAK */
- CHECK(ret, FAIL, "H5Dread");
-
- /* Compare data read in */
- for(i = 0; i < dset_elmts; i++) {
- if(HDstrcmp(rbuf[i].str_id, "foobar")
- || HDstrcmp(rbuf[i].str_name, "")
- || rbuf[i].str_desc
- || HDstrcmp(rbuf[i].str_orig, "\0")
- || HDstrcmp(rbuf[i].str_stat, "dead")
- || HDstrcmp(rbuf[i].str_form, "liquid")
- || HDstrcmp(rbuf[i].str_unit, "meter")) {
- TestErrPrintf("%d: VL data doesn't match!, index(i) = %d\n", __LINE__, (int)i);
- continue;
- } /* end if */
- } /* end for */
-
-#ifdef QAK
-HDfprintf(stderr, "Before closing first dataset\n");
-#endif /* QAK */
- ret = H5Dclose(dset_id);
-#ifdef QAK
-HDfprintf(stderr, "After closing first dataset\n");
-#endif /* QAK */
- CHECK(ret, FAIL, "H5Dclose");
-
- /* Release the space */
-#ifdef QAK
-HDfprintf(stderr, "Before reclaiming space\n");
-#endif /* QAK */
- ret = H5Dvlen_reclaim(dtype1_id, dset_dspace_id, xfer_pid, rbuf);
-#ifdef QAK
-HDfprintf(stderr, "After reclaiming space\n");
-#endif /* QAK */
- CHECK(ret, FAIL, "H5Dvlen_reclaim");
-
-
-#ifndef QAK2
- /* Open the second data set to check the value of data */
-#ifdef QAK
-HDfprintf(stderr, "Before opening second dataset: '%s'\n", dset_name2);
-#endif /* QAK */
- dset_id = H5Dopen(file_id, dset_name2);
-#ifdef QAK
-HDfprintf(stderr, "After opening second dataset\n");
-#endif /* QAK */
- CHECK(dset_id, FAIL, "H5Dopen");
-
-#ifdef QAK
-HDfprintf(stderr, "Before reading from second dataset\n");
-#endif /* QAK */
- ret = H5Dread(dset_id, dtype1_id, dset_dspace_id, dset_dspace_id, xfer_pid, rbuf);
-#ifdef QAK
-HDfprintf(stderr, "After reading from second dataset\n");
-#endif /* QAK */
- CHECK(ret, FAIL, "H5Dread");
-
- /* Compare data read in */
- for(i = 0; i < dset_elmts; i++) {
- if(HDstrcmp(rbuf[i].str_id, "foobar")
- || HDstrcmp(rbuf[i].str_name, "")
- || rbuf[i].str_desc
- || HDstrcmp(rbuf[i].str_orig, "\0")
- || HDstrcmp(rbuf[i].str_stat, "dead")
- || HDstrcmp(rbuf[i].str_form, "liquid")
- || HDstrcmp(rbuf[i].str_unit, "meter")) {
- TestErrPrintf("%d: VL data doesn't match!, index(i)=%d\n",__LINE__,(int)i);
- continue;
- } /* end if */
- } /* end for */
-
-#ifdef QAK
-HDfprintf(stderr, "Before closing second dataset\n");
-#endif /* QAK */
- ret = H5Dclose(dset_id);
-#ifdef QAK
-HDfprintf(stderr, "After closing second dataset\n");
-#endif /* QAK */
- CHECK(ret, FAIL, "H5Dclose");
-
- /* Release the space */
-#ifdef QAK
-HDfprintf(stderr, "Before reclaiming space\n");
-#endif /* QAK */
- ret = H5Dvlen_reclaim(dtype1_id, dset_dspace_id, xfer_pid, rbuf);
-#ifdef QAK
-HDfprintf(stderr, "After reclaiming space\n");
-#endif /* QAK */
- CHECK(ret, FAIL, "H5Dvlen_reclaim");
-#else /* QAK2 */
-HDfprintf(stderr, "Uncomment test for second dataset!\n");
-#endif /* QAK2 */
- } /* end for */
-
- ret = H5Fclose(file_id);
- CHECK(ret, FAIL, "H5Fclose");
+ for(layout = H5D_COMPACT; layout <= H5D_CHUNKED; layout++) {
+ unsigned compress_loop; /* # of times to run loop, for testing compressed chunked dataset */
+ unsigned test_loop; /* Loop over datasets */
+
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if(layout == H5D_CHUNKED)
+ compress_loop = 2;
+ else
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ compress_loop = 1;
+
+ /* Loop over dataset operations */
+ for(test_loop = 0; test_loop < compress_loop; test_loop++) {
+
+ /* Layout specific actions */
+ switch(layout) {
+ case H5D_COMPACT:
+ HDstrcpy(dset_name1, "dataset1-compact");
+ HDstrcpy(dset_name2, "dataset2-compact");
+ dset_dspace_id = small_dspace_id;
+ dset_elmts = SPACE4_DIM_SMALL;
+ break;
+
+ case H5D_CONTIGUOUS:
+ HDstrcpy(dset_name1, "dataset1-contig");
+ HDstrcpy(dset_name2, "dataset2-contig");
+ dset_dspace_id = large_dspace_id;
+ dset_elmts = SPACE4_DIM_LARGE;
+ break;
+
+ case H5D_CHUNKED:
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if(test_loop == 1) {
+ HDstrcpy(dset_name1, "dataset1-chunked-compressed");
+ HDstrcpy(dset_name2, "dataset2-chunked-compressed");
+ } /* end if */
+ else {
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ HDstrcpy(dset_name1, "dataset1-chunked");
+ HDstrcpy(dset_name2, "dataset2-chunked");
+#ifdef H5_HAVE_FILTER_DEFLATE
+ } /* end else */
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ dset_dspace_id = large_dspace_id;
+ dset_elmts = SPACE4_DIM_LARGE;
+ break;
+ } /* end switch */
+ /* Open first data set */
+ dset_id = H5Dopen(file_id, dset_name1);
+ CHECK(dset_id, FAIL, "H5Dopen");
- /* Open the file to check data set value */
- file_id = H5Fopen(FILENAME, H5F_ACC_RDWR, H5P_DEFAULT);
- CHECK(file_id, FAIL, "H5Fopen");
+ /* Read in the data of fill value */
+ ret = H5Dread(dset_id, dtype1_id, dset_dspace_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
- /* Write one element & fill values to datasets with different storage layouts */
-#ifdef QAK
-HDfprintf(stderr, "Before testing single element writes\n");
-#endif /* QAK */
-HDfprintf(stderr, "Uncomment loop!\n");
-/* for(layout = H5D_COMPACT; layout <= H5D_CHUNKED; layout++) { */
- for(layout = H5D_COMPACT; layout <= H5D_CONTIGUOUS; layout++) {
-
- /* Layout specific actions */
- switch(layout) {
- case H5D_COMPACT:
- HDstrcpy(dset_name1, "dataset1-compact");
- HDstrcpy(dset_name2, "dataset2-compact");
- dset_dspace_id = small_dspace_id;
- dset_elmts = SPACE4_DIM_SMALL;
- break;
-
- case H5D_CONTIGUOUS:
- HDstrcpy(dset_name1, "dataset1-contig");
- HDstrcpy(dset_name2, "dataset2-contig");
- dset_dspace_id = large_dspace_id;
- dset_elmts = SPACE4_DIM_LARGE;
- break;
-
- case H5D_CHUNKED:
- HDstrcpy(dset_name1, "dataset1-chunked");
- HDstrcpy(dset_name2, "dataset2-chunked");
- dset_dspace_id = large_dspace_id;
- dset_elmts = SPACE4_DIM_LARGE;
- break;
- } /* end switch */
-
- /* Copy the dataset's dataspace */
- single_dspace_id = H5Scopy(dset_dspace_id);
- CHECK(single_dspace_id, FAIL, "H5Scopy");
-
- /* Set a single element in the dataspace */
- ret = H5Sselect_hyperslab(single_dspace_id, H5S_SELECT_SET, single_offset,
- NULL, single_block, NULL);
- CHECK(ret, FAIL, "H5Sselect_hyperslab");
-
- /* Open first data set */
-#ifdef QAK
-HDfprintf(stderr, "Before opening first dataset: '%s'\n", dset_name1);
-#endif /* QAK */
- dset_id = H5Dopen(file_id, dset_name1);
-#ifdef QAK
-HDfprintf(stderr, "After opening first dataset\n");
-#endif /* QAK */
- CHECK(dset_id, FAIL, "H5Dopen");
-
- /* Write one element in the dataset */
-#ifdef QAK
-HDfprintf(stderr, "Before writing to first dataset\n");
-#endif /* QAK */
- ret = H5Dwrite(dset_id, dtype1_id, scalar_dspace_id, single_dspace_id, xfer_pid, &wdata);
-#ifdef QAK
-HDfprintf(stderr, "After writing to first dataset\n");
-#endif /* QAK */
- CHECK(ret, FAIL, "H5Dwrite");
-
-#ifdef QAK
-HDfprintf(stderr, "Before reading from first dataset\n");
-#endif /* QAK */
- ret = H5Dread(dset_id, dtype1_id, dset_dspace_id, dset_dspace_id, xfer_pid, rbuf);
-#ifdef QAK
-HDfprintf(stderr, "After reading from first dataset\n");
-#endif /* QAK */
- CHECK(ret, FAIL, "H5Dread");
-
- /* Compare data read in */
- for(i = 0; i < dset_elmts; i++) {
- if(i == single_offset[0]) {
- if(HDstrcmp(rbuf[i].str_id, wdata.str_id)
- || rbuf[i].str_name
- || HDstrcmp(rbuf[i].str_desc, wdata.str_desc)
- || HDstrcmp(rbuf[i].str_orig, wdata.str_orig)
- || HDstrcmp(rbuf[i].str_stat, wdata.str_stat)
- || HDstrcmp(rbuf[i].str_form, wdata.str_form)
- || HDstrcmp(rbuf[i].str_unit, wdata.str_unit)) {
- TestErrPrintf("%d: VL data doesn't match!, index(i)=%d\n",__LINE__,(int)i);
- continue;
- } /* end if */
- } /* end if */
- else {
+ /* Compare data read in */
+ for(i = 0; i < dset_elmts; i++) {
if(HDstrcmp(rbuf[i].str_id, "foobar")
|| HDstrcmp(rbuf[i].str_name, "")
|| rbuf[i].str_desc
- || HDstrcmp(rbuf[i].str_orig,"\0")
+ || HDstrcmp(rbuf[i].str_orig, "\0")
|| HDstrcmp(rbuf[i].str_stat, "dead")
|| HDstrcmp(rbuf[i].str_form, "liquid")
|| HDstrcmp(rbuf[i].str_unit, "meter")) {
- TestErrPrintf("%d: VL data doesn't match!, index(i)=%d\n",__LINE__,(int)i);
+ TestErrPrintf("%d: VL data doesn't match!, index(i) = %d\n", __LINE__, (int)i);
continue;
} /* end if */
- } /* end if */
- } /* end for */
+ } /* end for */
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
-#ifdef QAK
-HDfprintf(stderr, "Before closing first dataset\n");
-#endif /* QAK */
- ret = H5Dclose(dset_id);
-#ifdef QAK
-HDfprintf(stderr, "After closing first dataset\n");
-#endif /* QAK */
- CHECK(ret, FAIL, "H5Dclose");
-
- /* Release the space */
-#ifdef QAK
-HDfprintf(stderr, "Before reclaiming space\n");
-#endif /* QAK */
- ret = H5Dvlen_reclaim(dtype1_id, dset_dspace_id, xfer_pid, rbuf);
-#ifdef QAK
-HDfprintf(stderr, "After reclaiming space\n");
-#endif /* QAK */
- CHECK(ret, FAIL, "H5Dvlen_reclaim");
-
-
-#ifndef QAK2
- /* Open the second data set to check the value of data */
-#ifdef QAK
-HDfprintf(stderr, "Before opening second dataset: '%s'\n", dset_name2);
-#endif /* QAK */
- dset_id = H5Dopen(file_id, dset_name2);
-#ifdef QAK
-HDfprintf(stderr, "After opening second dataset\n");
-#endif /* QAK */
- CHECK(dset_id, FAIL, "H5Dopen");
-
- /* Write one element in the dataset */
-#ifdef QAK
-HDfprintf(stderr, "Before writing to second dataset\n");
-#endif /* QAK */
- ret = H5Dwrite(dset_id, dtype1_id, scalar_dspace_id, single_dspace_id, xfer_pid, &wdata);
-#ifdef QAK
-HDfprintf(stderr, "After writing to second dataset\n");
-#endif /* QAK */
- CHECK(ret, FAIL, "H5Dwrite");
-
-#ifdef QAK
-HDfprintf(stderr, "Before reading from second dataset\n");
-#endif /* QAK */
- ret = H5Dread(dset_id, dtype1_id, dset_dspace_id, dset_dspace_id, xfer_pid, rbuf);
-#ifdef QAK
-HDfprintf(stderr, "After reading from second dataset\n");
-#endif /* QAK */
- CHECK(ret, FAIL, "H5Dread");
-
- /* Compare data read in */
- for(i = 0; i < dset_elmts; i++) {
- if(i == single_offset[0]) {
- if(HDstrcmp(rbuf[i].str_id, wdata.str_id)
- || rbuf[i].str_name
- || HDstrcmp(rbuf[i].str_desc, wdata.str_desc)
- || HDstrcmp(rbuf[i].str_orig, wdata.str_orig)
- || HDstrcmp(rbuf[i].str_stat, wdata.str_stat)
- || HDstrcmp(rbuf[i].str_form, wdata.str_form)
- || HDstrcmp(rbuf[i].str_unit, wdata.str_unit)) {
- TestErrPrintf("%d: VL data doesn't match!, index(i)=%d\n",__LINE__,(int)i);
- continue;
- } /* end if */
- } /* end if */
- else {
+ /* Release the space */
+ ret = H5Dvlen_reclaim(dtype1_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Dvlen_reclaim");
+
+
+ /* Open the second data set to check the value of data */
+ dset_id = H5Dopen(file_id, dset_name2);
+ CHECK(dset_id, FAIL, "H5Dopen");
+
+ ret = H5Dread(dset_id, dtype1_id, dset_dspace_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for(i = 0; i < dset_elmts; i++) {
if(HDstrcmp(rbuf[i].str_id, "foobar")
|| HDstrcmp(rbuf[i].str_name, "")
|| rbuf[i].str_desc
- || HDstrcmp(rbuf[i].str_orig,"\0")
+ || HDstrcmp(rbuf[i].str_orig, "\0")
|| HDstrcmp(rbuf[i].str_stat, "dead")
|| HDstrcmp(rbuf[i].str_form, "liquid")
|| HDstrcmp(rbuf[i].str_unit, "meter")) {
TestErrPrintf("%d: VL data doesn't match!, index(i)=%d\n",__LINE__,(int)i);
continue;
} /* end if */
- } /* end if */
+ } /* end for */
+
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Release the space */
+ ret = H5Dvlen_reclaim(dtype1_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Dvlen_reclaim");
} /* end for */
+ } /* end for */
+
+ ret = H5Fclose(file_id);
+ CHECK(ret, FAIL, "H5Fclose");
+
+
+ /* Open the file to check data set value */
+ file_id = H5Fopen(FILENAME, H5F_ACC_RDWR, H5P_DEFAULT);
+ CHECK(file_id, FAIL, "H5Fopen");
+
+ /* Write one element & fill values to datasets with different storage layouts */
+ for(layout = H5D_COMPACT; layout <= H5D_CHUNKED; layout++) {
+ unsigned compress_loop; /* # of times to run loop, for testing compressed chunked dataset */
+ unsigned test_loop; /* Loop over datasets */
+
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if(layout == H5D_CHUNKED)
+ compress_loop = 2;
+ else
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ compress_loop = 1;
+
+ /* Loop over dataset operations */
+ for(test_loop = 0; test_loop < compress_loop; test_loop++) {
+
+ /* Layout specific actions */
+ switch(layout) {
+ case H5D_COMPACT:
+ HDstrcpy(dset_name1, "dataset1-compact");
+ HDstrcpy(dset_name2, "dataset2-compact");
+ dset_dspace_id = small_dspace_id;
+ dset_elmts = SPACE4_DIM_SMALL;
+ break;
+
+ case H5D_CONTIGUOUS:
+ HDstrcpy(dset_name1, "dataset1-contig");
+ HDstrcpy(dset_name2, "dataset2-contig");
+ dset_dspace_id = large_dspace_id;
+ dset_elmts = SPACE4_DIM_LARGE;
+ break;
+
+ case H5D_CHUNKED:
+#ifdef H5_HAVE_FILTER_DEFLATE
+ if(test_loop == 1) {
+ HDstrcpy(dset_name1, "dataset1-chunked-compressed");
+ HDstrcpy(dset_name2, "dataset2-chunked-compressed");
+ } /* end if */
+ else {
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ HDstrcpy(dset_name1, "dataset1-chunked");
+ HDstrcpy(dset_name2, "dataset2-chunked");
+#ifdef H5_HAVE_FILTER_DEFLATE
+ } /* end else */
+#endif /* H5_HAVE_FILTER_DEFLATE */
+ dset_dspace_id = large_dspace_id;
+ dset_elmts = SPACE4_DIM_LARGE;
+ break;
+ } /* end switch */
+
+ /* Copy the dataset's dataspace */
+ single_dspace_id = H5Scopy(dset_dspace_id);
+ CHECK(single_dspace_id, FAIL, "H5Scopy");
+
+ /* Set a single element in the dataspace */
+ ret = H5Sselect_hyperslab(single_dspace_id, H5S_SELECT_SET, single_offset,
+ NULL, single_block, NULL);
+ CHECK(ret, FAIL, "H5Sselect_hyperslab");
+
+ /* Open first data set */
+ dset_id = H5Dopen(file_id, dset_name1);
+ CHECK(dset_id, FAIL, "H5Dopen");
+
+ /* Write one element in the dataset */
+ ret = H5Dwrite(dset_id, dtype1_id, scalar_dspace_id, single_dspace_id, xfer_pid, &wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
+
+ ret = H5Dread(dset_id, dtype1_id, dset_dspace_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for(i = 0; i < dset_elmts; i++) {
+ if(i == single_offset[0]) {
+ if(HDstrcmp(rbuf[i].str_id, wdata.str_id)
+ || rbuf[i].str_name
+ || HDstrcmp(rbuf[i].str_desc, wdata.str_desc)
+ || HDstrcmp(rbuf[i].str_orig, wdata.str_orig)
+ || HDstrcmp(rbuf[i].str_stat, wdata.str_stat)
+ || HDstrcmp(rbuf[i].str_form, wdata.str_form)
+ || HDstrcmp(rbuf[i].str_unit, wdata.str_unit)) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i)=%d\n",__LINE__,(int)i);
+ continue;
+ } /* end if */
+ } /* end if */
+ else {
+ if(HDstrcmp(rbuf[i].str_id, "foobar")
+ || HDstrcmp(rbuf[i].str_name, "")
+ || rbuf[i].str_desc
+ || HDstrcmp(rbuf[i].str_orig,"\0")
+ || HDstrcmp(rbuf[i].str_stat, "dead")
+ || HDstrcmp(rbuf[i].str_form, "liquid")
+ || HDstrcmp(rbuf[i].str_unit, "meter")) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i)=%d\n",__LINE__,(int)i);
+ continue;
+ } /* end if */
+ } /* end if */
+ } /* end for */
+
+
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Release the space */
+ ret = H5Dvlen_reclaim(dtype1_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Dvlen_reclaim");
+
+
+ /* Open the second data set to check the value of data */
+ dset_id = H5Dopen(file_id, dset_name2);
+ CHECK(dset_id, FAIL, "H5Dopen");
+
+ /* Write one element in the dataset */
+ ret = H5Dwrite(dset_id, dtype1_id, scalar_dspace_id, single_dspace_id, xfer_pid, &wdata);
+ CHECK(ret, FAIL, "H5Dwrite");
-#ifdef QAK
-HDfprintf(stderr, "Before closing second dataset\n");
-#endif /* QAK */
- ret = H5Dclose(dset_id);
-#ifdef QAK
-HDfprintf(stderr, "After closing second dataset\n");
-#endif /* QAK */
- CHECK(ret, FAIL, "H5Dclose");
-
- /* Release the space */
-#ifdef QAK
-HDfprintf(stderr, "Before reclaiming space\n");
-#endif /* QAK */
- ret = H5Dvlen_reclaim(dtype1_id, dset_dspace_id, xfer_pid, rbuf);
-#ifdef QAK
-HDfprintf(stderr, "After reclaiming space\n");
-#endif /* QAK */
- CHECK(ret, FAIL, "H5Dvlen_reclaim");
-#else /* QAK2 */
-HDfprintf(stderr, "Uncomment test for second dataset!\n");
-#endif /* QAK2 */
-
- /* Close the dataspace for the writes */
- ret = H5Sclose(single_dspace_id);
- CHECK(ret, FAIL, "H5Sclose");
+ ret = H5Dread(dset_id, dtype1_id, dset_dspace_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Dread");
+
+ /* Compare data read in */
+ for(i = 0; i < dset_elmts; i++) {
+ if(i == single_offset[0]) {
+ if(HDstrcmp(rbuf[i].str_id, wdata.str_id)
+ || rbuf[i].str_name
+ || HDstrcmp(rbuf[i].str_desc, wdata.str_desc)
+ || HDstrcmp(rbuf[i].str_orig, wdata.str_orig)
+ || HDstrcmp(rbuf[i].str_stat, wdata.str_stat)
+ || HDstrcmp(rbuf[i].str_form, wdata.str_form)
+ || HDstrcmp(rbuf[i].str_unit, wdata.str_unit)) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i)=%d\n",__LINE__,(int)i);
+ continue;
+ } /* end if */
+ } /* end if */
+ else {
+ if(HDstrcmp(rbuf[i].str_id, "foobar")
+ || HDstrcmp(rbuf[i].str_name, "")
+ || rbuf[i].str_desc
+ || HDstrcmp(rbuf[i].str_orig,"\0")
+ || HDstrcmp(rbuf[i].str_stat, "dead")
+ || HDstrcmp(rbuf[i].str_form, "liquid")
+ || HDstrcmp(rbuf[i].str_unit, "meter")) {
+ TestErrPrintf("%d: VL data doesn't match!, index(i)=%d\n",__LINE__,(int)i);
+ continue;
+ } /* end if */
+ } /* end if */
+ } /* end for */
+
+ ret = H5Dclose(dset_id);
+ CHECK(ret, FAIL, "H5Dclose");
+
+ /* Release the space */
+ ret = H5Dvlen_reclaim(dtype1_id, dset_dspace_id, xfer_pid, rbuf);
+ CHECK(ret, FAIL, "H5Dvlen_reclaim");
+
+ /* Close the dataspace for the writes */
+ ret = H5Sclose(single_dspace_id);
+ CHECK(ret, FAIL, "H5Sclose");
+ } /* end for */
} /* end for */
ret = H5Fclose(file_id);
@@ -3043,7 +2955,6 @@ test_vltypes(void)
MESSAGE(5, ("Testing Variable-Length Datatypes\n"));
/* These next tests use the same file */
-#ifndef QAK
test_vltypes_dataset_create(); /* Check dataset of VL when fill value
* won't be rewritten to it.*/
test_vltypes_vlen_atomic(); /* Test VL atomic datatypes */
@@ -3057,9 +2968,6 @@ test_vltypes(void)
rewrite_shorter_vltypes_vlen_vlen_atomic(); /*overwrite with VL data of shorted sequence*/
test_vltypes_compound_vlen_vlen(); /* Test compound datatypes with VL atomic components */
test_vltypes_compound_vlstr(); /* Test data rewritten of nested VL data */
-#else /* QAK */
-HDfprintf(stderr, "Uncomment tests!\n");
-#endif /* QAK */
test_vltypes_fill_value(); /* Test fill value for VL data */
} /* test_vltypes() */