summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorNeil Fortner <nfortne2@hdfgroup.org>2011-12-01 18:35:54 (GMT)
committerNeil Fortner <nfortne2@hdfgroup.org>2011-12-01 18:35:54 (GMT)
commitd34c5fdb79946a97bdf81744f425ca7cfb8b9047 (patch)
treebae3bd5975ae6f9557d943fe4cd548c090c3e141
parent738a33556ca3d7f5f9ccc2e22b649d12135f314e (diff)
downloadhdf5-d34c5fdb79946a97bdf81744f425ca7cfb8b9047.zip
hdf5-d34c5fdb79946a97bdf81744f425ca7cfb8b9047.tar.gz
hdf5-d34c5fdb79946a97bdf81744f425ca7cfb8b9047.tar.bz2
[svn-r21793] Add RELEASE.txt note for r21789
-rw-r--r--release_docs/RELEASE.txt2
-rw-r--r--test/dsets.c115
2 files changed, 117 insertions, 0 deletions
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index b7df894..b0b4f03 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -343,6 +343,8 @@ Bug Fixes since HDF5-1.8.0 release
Library
-------
+ - Fixed a seg fault that could occur when shrinking a dataset with chunks
+ larger than 1 MB. (NAF - 2011/11/30 - HDFFV-7833)
- The library allowed the conversion of strings between ASCII and UTF8
(Issue 7582). We have corrected it to report an error under this
situation. (SLU - 2011/11/8)
diff --git a/test/dsets.c b/test/dsets.c
index ee341c9..a61a15b 100644
--- a/test/dsets.c
+++ b/test/dsets.c
@@ -8066,6 +8066,121 @@ error:
return -1;
} /* end test_chunk_expand() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_large_chunk_shrink
+ *
+ * Purpose: Tests support for shrinking a chunk larger than 1 MB by a
+ * size greater than 1 MB.
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Neil Fortner
+ * Monday, November 31, 2011
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+test_large_chunk_shrink(hid_t fapl)
+{
+ char filename[FILENAME_BUF_SIZE];
+ hid_t fid = -1; /* File ID */
+ hid_t dcpl = -1; /* Dataset creation property list ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t scalar_sid = -1;/* Scalar dataspace ID */
+ hid_t dsid = -1; /* Dataset ID */
+ hsize_t dim, max_dim, chunk_dim; /* Dataset and chunk dimensions */
+ hsize_t hs_offset; /* Hyperslab offset */
+ hsize_t hs_size; /* Hyperslab size */
+ unsigned write_elem, read_elem; /* Element written/read */
+
+ TESTING("shrinking large chunk");
+
+ h5_fixname(FILENAME[10], fapl, filename, sizeof filename);
+
+ /* Create file */
+ if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) FAIL_STACK_ERROR
+
+ /* Create dataset creation property list */
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
+
+ /* Set 2 MB chunk size */
+ chunk_dim = 2 * 1024 * 1024 / sizeof(unsigned);
+ if(H5Pset_chunk(dcpl, 1, &chunk_dim) < 0) FAIL_STACK_ERROR
+
+ /* Create scalar dataspace */
+ if((scalar_sid = H5Screate(H5S_SCALAR)) < 0) FAIL_STACK_ERROR
+
+ /* Create 1-D dataspace */
+ dim = 2 * 1024 * 1024 / sizeof(unsigned);
+ max_dim = H5S_UNLIMITED;
+ if((sid = H5Screate_simple(1, &dim, &max_dim)) < 0) FAIL_STACK_ERROR
+
+ /* Create 2 MB chunked dataset */
+ if((dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_UINT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Select last element in the dataset */
+ hs_offset = dim - 1;
+ hs_size = 1;
+ if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, &hs_offset, NULL, &hs_size, NULL) < 0) FAIL_STACK_ERROR
+
+ /* Read (unwritten) element from dataset */
+ read_elem = 1;
+ if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
+
+ /* Verify unwritten element is fill value (0) */
+ if(read_elem != 0) FAIL_PUTS_ERROR("invalid unwritten element read");
+
+ /* Write element to dataset */
+ write_elem = 2;
+ if(H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem) < 0) FAIL_STACK_ERROR
+
+ /* Read element from dataset */
+ read_elem = write_elem + 1;
+ if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
+
+ /* Verify written element is read in */
+ if(read_elem != write_elem) FAIL_PUTS_ERROR("invalid written element read");
+
+ /* Shrink dataset to 512 KB */
+ dim = 512 * 1024 / sizeof(unsigned);
+ if(H5Dset_extent(dsid, &dim) < 0) FAIL_STACK_ERROR
+
+ /* Expand dataset back to 2MB */
+ dim = 2 * 1024 * 1024 / sizeof(unsigned);
+ if(H5Dset_extent(dsid, &dim) < 0) FAIL_STACK_ERROR
+
+ /* Read element from dataset */
+ read_elem = 1;
+ if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
+
+ /* Verify element is now 0 */
+ if(read_elem != 0) FAIL_PUTS_ERROR("invalid element read");
+
+ /* Close everything */
+ if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
+ if(H5Sclose(scalar_sid) < 0) FAIL_STACK_ERROR
+ if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
+ if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
+ if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Dclose(dsid);
+ H5Sclose(sid);
+ H5Sclose(scalar_sid);
+ H5Fclose(fid);
+ } H5E_END_TRY;
+ return -1;
+} /* end test_large_chunk_shrink() */
+
/*-------------------------------------------------------------------------
* Function: test_large_chunk_shrink