summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2001-01-12 19:57:33 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2001-01-12 19:57:33 (GMT)
commit7bdd4ed41f1fac07c853be292b2f5fb73a5b2ccb (patch)
tree8bdd9110528f309ec88a56c226847c635c4a12c6
parent093c2fea18fa2266e4707ba7a21c7160516b5533 (diff)
downloadhdf5-7bdd4ed41f1fac07c853be292b2f5fb73a5b2ccb.zip
hdf5-7bdd4ed41f1fac07c853be292b2f5fb73a5b2ccb.tar.gz
hdf5-7bdd4ed41f1fac07c853be292b2f5fb73a5b2ccb.tar.bz2
[svn-r3281] Purpose:
Bug fix Description: Datasets were allowed to be created with chunks larger than the maximum dimension for each dimension. Solution: Wrote test to verify new error checking is working correctly. Platforms tested: FreeBSD 4.2 (hawkwind)
-rw-r--r--test/dsets.c16
1 files changed, 16 insertions, 0 deletions
diff --git a/test/dsets.c b/test/dsets.c
index 6b60991..824e000 100644
--- a/test/dsets.c
+++ b/test/dsets.c
@@ -114,6 +114,22 @@ test_create(hid_t file)
*/
create_parms = H5Pcreate(H5P_DATASET_CREATE);
assert(create_parms >= 0);
+
+ /* Attempt to create a dataset with invalid chunk sizes */
+ csize[0] = dims[0]*2;
+ csize[1] = dims[1]*2;
+ status = H5Pset_chunk(create_parms, 2, csize);
+ assert(status >= 0);
+ H5E_BEGIN_TRY {
+ dataset = H5Dcreate(file, DSET_CHUNKED_NAME, H5T_NATIVE_DOUBLE, space,
+ create_parms);
+ } H5E_END_TRY;
+ if (dataset >= 0) {
+ FAILED();
+ puts(" Opened a dataset with incorrect chunking parameters.");
+ goto error;
+ }
+
csize[0] = 5;
csize[1] = 100;
status = H5Pset_chunk(create_parms, 2, csize);