summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDana Robinson <derobins@hdfgroup.org>2020-05-20 15:57:20 (GMT)
committerDana Robinson <derobins@hdfgroup.org>2020-05-20 15:57:20 (GMT)
commit78931a0d83bc6447d123413604f76cc374aaeca0 (patch)
tree55563a74e915b7127ec65f9e11aaadf99925c779
parentd701855cc128336ef3a18b930634639b7133f24f (diff)
downloadhdf5-78931a0d83bc6447d123413604f76cc374aaeca0.zip
hdf5-78931a0d83bc6447d123413604f76cc374aaeca0.tar.gz
hdf5-78931a0d83bc6447d123413604f76cc374aaeca0.tar.bz2
Brought the dsets test in line with develop. Brings over the fix
for 0-size dataset metadata allocation.
-rw-r--r--src/H5Dint.c2
-rw-r--r--src/H5Dlayout.c44
-rw-r--r--src/H5Oprivate.h12
-rw-r--r--test/dsets.c1072
4 files changed, 845 insertions, 285 deletions
diff --git a/src/H5Dint.c b/src/H5Dint.c
index 696c7d0..8f2178d 100644
--- a/src/H5Dint.c
+++ b/src/H5Dint.c
@@ -2270,7 +2270,7 @@ H5D__alloc_storage(const H5D_io_info_t *io_info, H5D_time_alloc_t time_alloc,
* We assume that external storage is already
* allocated by the caller, or at least will be before I/O is performed.
*/
- if(!(H5S_NULL == H5S_GET_EXTENT_TYPE(dset->shared->space) || dset->shared->dcpl_cache.efl.nused > 0)) {
+ if(!(0 == H5S_GET_EXTENT_NPOINTS(dset->shared->space) || dset->shared->dcpl_cache.efl.nused > 0)) {
/* Get a pointer to the dataset's layout information */
layout = &(dset->shared->layout);
diff --git a/src/H5Dlayout.c b/src/H5Dlayout.c
index c8afb8e..2447fb6 100644
--- a/src/H5Dlayout.c
+++ b/src/H5Dlayout.c
@@ -25,7 +25,6 @@
#include "H5Dpkg.h" /* Datasets */
#include "H5Eprivate.h" /* Error handling */
#include "H5HLprivate.h" /* Local heaps */
-#include "H5MMprivate.h" /* Memory management */
/****************/
@@ -82,7 +81,7 @@ const unsigned H5O_layout_ver_bounds[] = {
herr_t
H5D__layout_set_io_ops(const H5D_t *dataset)
{
- herr_t ret_value = SUCCEED; /* Return value */
+ herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_PACKAGE
@@ -335,7 +334,7 @@ done:
*-------------------------------------------------------------------------
*/
herr_t
-H5D__layout_set_latest_indexing(H5O_layout_t *layout, const H5S_t *space,
+H5D__layout_set_latest_indexing(H5O_layout_t *layout, const H5S_t *space,
const H5D_dcpl_cache_t *dcpl_cache)
{
herr_t ret_value = SUCCEED; /* Return value */
@@ -361,9 +360,9 @@ H5D__layout_set_latest_indexing(H5O_layout_t *layout, const H5S_t *space,
if(ndims > 0) {
hsize_t max_dims[H5O_LAYOUT_NDIMS]; /* Maximum dimension sizes */
hsize_t cur_dims[H5O_LAYOUT_NDIMS]; /* Current dimension sizes */
- unsigned unlim_count = 0; /* Count of unlimited max. dimensions */
+ unsigned unlim_count = 0; /* Count of unlimited max. dimensions */
hbool_t single = TRUE; /* Fulfill single chunk indexing */
- unsigned u; /* Local index variable */
+ unsigned u; /* Local index variable */
/* Query the dataspace's dimensions */
if(H5S_get_simple_extent_dims(space, cur_dims, max_dims) < 0)
@@ -417,7 +416,7 @@ H5D__layout_set_latest_indexing(H5O_layout_t *layout, const H5S_t *space,
layout->storage.u.chunk.idx_type = H5D_CHUNK_IDX_SINGLE;
layout->storage.u.chunk.ops = H5D_COPS_SINGLE;
} /* end if */
- else if(!dcpl_cache->pline.nused &&
+ else if(!dcpl_cache->pline.nused &&
dcpl_cache->fill.alloc_time == H5D_ALLOC_TIME_EARLY) {
/* Set the chunk index type to "none" Index */
@@ -506,7 +505,7 @@ H5D__layout_oh_create(H5F_t *file, H5O_t *oh, H5D_t *dset, hid_t dapl_id)
if(H5D__alloc_storage(&io_info, H5D_ALLOC_CREATE, FALSE, NULL) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize storage")
- } /* end if */
+ }
/* Update external storage message, if it's used */
if(dset->shared->dcpl_cache.efl.nused > 0) {
@@ -557,10 +556,13 @@ H5D__layout_oh_create(H5F_t *file, H5O_t *oh, H5D_t *dset, hid_t dapl_id)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update external file list message")
} /* end if */
- /* (Don't make layout message constant unless allocation time is early and non-filtered, since space may not be allocated) */
+ /* Create layout message */
+ /* (Don't make layout message constant unless allocation time is early and
+ * non-filtered and has >0 elements, since space may not be allocated -QAK) */
/* (Note: this is relying on H5D__alloc_storage not calling H5O_msg_write during dataset creation) */
if(fill_prop->alloc_time == H5D_ALLOC_TIME_EARLY && H5D_COMPACT != layout->type
- && !dset->shared->dcpl_cache.pline.nused)
+ && !dset->shared->dcpl_cache.pline.nused
+ && (0 != H5S_GET_EXTENT_NPOINTS(dset->shared->space)))
layout_mesg_flags = H5O_MSG_FLAG_CONSTANT;
else
layout_mesg_flags = 0;
@@ -587,15 +589,15 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5D__layout_oh_read
+ * Function: H5D__layout_oh_read
*
- * Purpose: Read layout/pline/efl information for dataset
+ * Purpose: Read layout/pline/efl information for dataset
*
- * Return: Success: SUCCEED
- * Failure: FAIL
+ * Return: Success: SUCCEED
+ * Failure: FAIL
*
- * Programmer: Quincey Koziol
- * Monday, July 27, 2009
+ * Programmer: Quincey Koziol
+ * Monday, July 27, 2009
*
*-------------------------------------------------------------------------
*/
@@ -681,15 +683,15 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5D__layout_oh_write
+ * Function: H5D__layout_oh_write
*
- * Purpose: Write layout information for dataset
+ * Purpose: Write layout information for dataset
*
- * Return: Success: SUCCEED
- * Failure: FAIL
+ * Return: Success: SUCCEED
+ * Failure: FAIL
*
- * Programmer: Quincey Koziol
- * Monday, July 27, 2009
+ * Programmer: Quincey Koziol
+ * Monday, July 27, 2009
*
*-------------------------------------------------------------------------
*/
diff --git a/src/H5Oprivate.h b/src/H5Oprivate.h
index 08589c0..32df33f 100644
--- a/src/H5Oprivate.h
+++ b/src/H5Oprivate.h
@@ -120,7 +120,7 @@ typedef struct H5O_mesg_t H5O_mesg_t;
/* If the module using this macro is allowed access to the private variables, access them directly */
#ifdef H5O_MODULE
#define H5O_OH_GET_ADDR(O) ((O)->chunk[0].addr)
-#define H5O_OH_GET_VERSION ((O)->version)
+#define H5O_OH_GET_VERSION(O) ((O)->version)
#define H5O_OH_GET_FLAGS(O) ((O)->flags)
#define H5O_OH_GET_MTIME(O) ((O)->mtime)
#else /* H5O_MODULE */
@@ -222,7 +222,7 @@ typedef struct H5O_copy_t {
#define H5O_MDCI_MSG_ID 0x0018 /* Metadata Cache Image Message */
#define H5O_UNKNOWN_ID 0x0019 /* Placeholder message ID for unknown message. */
/* (this should never exist in a file) */
-/*
+/*
* Note: Must increment H5O_MSG_TYPES in H5Opkg.h and update H5O_msg_class_g
* in H5O.c when creating a new message type. Also bump the value of
* H5O_BOGUS_INVALID_ID, below, to be one greater than the value of
@@ -481,7 +481,7 @@ typedef struct H5O_storage_chunk_t {
const struct H5D_chunk_ops_t *ops; /* Pointer to chunked storage operations */
union {
H5O_storage_chunk_btree_t btree; /* Information for v1 B-tree index */
- H5O_storage_chunk_bt2_t btree2; /* Information for v2 B-tree index */
+ H5O_storage_chunk_bt2_t btree2; /* Information for v2 B-tree index */
H5O_storage_chunk_earray_t earray; /* Information for extensible array index */
H5O_storage_chunk_farray_t farray; /* Information for fixed array index */
H5O_storage_chunk_single_filt_t single; /* Information for single chunk w/ filters index */
@@ -582,8 +582,8 @@ typedef struct H5O_storage_t {
typedef struct H5O_layout_chunk_farray_t {
/* Creation parameters for fixed array data structure */
struct {
- uint8_t max_dblk_page_nelmts_bits; /* Log2(Max. # of elements in a data block page) -
- i.e. # of bits needed to store max. # of elements
+ uint8_t max_dblk_page_nelmts_bits; /* Log2(Max. # of elements in a data block page) -
+ i.e. # of bits needed to store max. # of elements
in a data block page */
} cparam;
} H5O_layout_chunk_farray_t;
@@ -869,7 +869,6 @@ typedef struct {
} u;
} H5O_mesg_operator_t;
-
/* Typedef for abstract object creation */
typedef struct {
H5O_type_t obj_type; /* Type of object to create */
@@ -877,6 +876,7 @@ typedef struct {
void *new_obj; /* Pointer to new object created */
} H5O_obj_create_t;
+
/* Forward declarations for prototype arguments */
struct H5P_genplist_t;
diff --git a/test/dsets.c b/test/dsets.c
index 6c91ba9..f7e90a5 100644
--- a/test/dsets.c
+++ b/test/dsets.c
@@ -27,7 +27,6 @@
#include "testhdf5.h"
#include "H5srcdir.h"
-#include "H5Bprivate.h"
#include "H5CXprivate.h" /* API Contexts */
#include "H5Iprivate.h"
#include "H5Pprivate.h"
@@ -84,6 +83,7 @@ const char *FILENAME[] = {
"dls_01_strings", /* 23 */
"power2up", /* 24 */
"version_bounds", /* 25 */
+ "alloc_0sized", /* 26 */
NULL
};
@@ -148,10 +148,12 @@ const char *FILENAME[] = {
#define DSET_COPY_DCPL_NAME_1 "copy_dcpl_1"
#define DSET_COPY_DCPL_NAME_2 "copy_dcpl_2"
#define COPY_DCPL_EXTFILE_NAME "ext_file"
+#ifndef H5_NO_DEPRECATED_SYMBOLS
#define DSET_DEPREC_NAME "deprecated"
#define DSET_DEPREC_NAME_CHUNKED "deprecated_chunked"
#define DSET_DEPREC_NAME_COMPACT "deprecated_compact"
#define DSET_DEPREC_NAME_FILTER "deprecated_filter"
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
/* Dataset names for testing Fixed Array Indexing */
#define DSET_FIXED_MAX "DSET_FIXED_MAX"
@@ -177,7 +179,9 @@ const char *FILENAME[] = {
#define H5Z_FILTER_CORRUPT 306
#define H5Z_FILTER_CAN_APPLY_TEST 307
#define H5Z_FILTER_SET_LOCAL_TEST 308
+#ifndef H5_NO_DEPRECATED_SYMBOLS
#define H5Z_FILTER_DEPREC 309
+#endif /* H5_NO_DEPRECATED_SYMBOLS */
#define H5Z_FILTER_EXPAND 310
#define H5Z_FILTER_CAN_APPLY_TEST2 311
#define H5Z_FILTER_COUNT 312
@@ -280,10 +284,16 @@ const char *FILENAME[] = {
/* Shared global arrays */
#define DSET_DIM1 100
#define DSET_DIM2 200
-int points[DSET_DIM1][DSET_DIM2], check[DSET_DIM1][DSET_DIM2];
-double points_dbl[DSET_DIM1][DSET_DIM2], check_dbl[DSET_DIM1][DSET_DIM2];
-size_t count_nbytes_read = 0;
-size_t count_nbytes_written = 0;
+int **points = NULL;
+int *points_data = NULL;
+double **points_dbl = NULL;
+double *points_dbl_data = NULL;
+int **check = NULL;
+int *check_data = NULL;
+double **check_dbl = NULL;
+double *check_dbl_data = NULL;
+size_t count_nbytes_read = 0;
+size_t count_nbytes_written = 0;
/* Temporary buffer dimensions */
#define DSET_TMP_DIM1 50
@@ -519,7 +529,8 @@ test_simple_io(const char *env_h5_drvr, hid_t fapl)
void *tconv_buf = NULL;
int f = -1;
haddr_t offset;
- int rdata[DSET_DIM1][DSET_DIM2];
+ int **rdata = NULL;
+ int *rdata_bytes = NULL;
TESTING("simple I/O");
@@ -527,6 +538,14 @@ test_simple_io(const char *env_h5_drvr, hid_t fapl)
if(HDstrcmp(env_h5_drvr, "split") && HDstrcmp(env_h5_drvr, "multi") && HDstrcmp(env_h5_drvr, "family")) {
h5_fixname(FILENAME[4], fapl, filename, sizeof filename);
+ /* Set up data array */
+ if(NULL == (rdata_bytes = (int *)HDcalloc(DSET_DIM1 * DSET_DIM2, sizeof(int))))
+ TEST_ERROR;
+ if(NULL == (rdata = (int **)HDcalloc(DSET_DIM1, sizeof(rdata_bytes))))
+ TEST_ERROR;
+ for (i = 0; i < DSET_DIM1; i++)
+ rdata[i] = rdata_bytes + (i * DSET_DIM2);
+
/* Initialize the dataset */
for(i = n = 0; i < DSET_DIM1; i++)
for(j = 0; j < DSET_DIM2; j++)
@@ -556,7 +575,7 @@ test_simple_io(const char *env_h5_drvr, hid_t fapl)
if(H5Dget_offset(dataset) != HADDR_UNDEF) goto error;
/* Write the data to the dataset */
- if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, xfer, points) < 0)
+ if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, xfer, points_data) < 0)
goto error;
/* Test dataset address in file. Open the same file as a C file, seek
@@ -565,7 +584,7 @@ test_simple_io(const char *env_h5_drvr, hid_t fapl)
if((offset=H5Dget_offset(dataset))==HADDR_UNDEF) goto error;
/* Read the dataset back */
- if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, xfer, check) < 0)
+ if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, xfer, check_data) < 0)
goto error;
/* Check that the values read are the same as the values written */
@@ -589,7 +608,7 @@ test_simple_io(const char *env_h5_drvr, hid_t fapl)
f = HDopen(filename, O_RDONLY);
HDlseek(f, (off_t)offset, SEEK_SET);
- if(HDread(f, rdata, sizeof(int)*DSET_DIM1*DSET_DIM2) < 0)
+ if(HDread(f, rdata_bytes, sizeof(int)*DSET_DIM1*DSET_DIM2) < 0)
goto error;
/* Check that the values read are the same as the values written */
@@ -608,6 +627,9 @@ test_simple_io(const char *env_h5_drvr, hid_t fapl)
f = -1;
HDfree(tconv_buf);
+ HDfree(rdata_bytes);
+ HDfree(rdata);
+
PASSED();
} /* end if */
else {
@@ -628,8 +650,11 @@ error:
if(H5Fclose(file) < 0) TEST_ERROR
if(f > 0)
HDclose(f);
- if(tconv_buf)
- HDfree(tconv_buf);
+
+ HDfree(tconv_buf);
+ HDfree(rdata_bytes);
+ HDfree(rdata);
+
return FAIL;
} /* end test_simple_io() */
@@ -652,7 +677,8 @@ test_userblock_offset(const char *env_h5_drvr, hid_t fapl, hbool_t new_format)
hsize_t dims[2];
int f = -1;
haddr_t offset;
- int rdata[DSET_DIM1][DSET_DIM2];
+ int **rdata = NULL;
+ int *rdata_bytes = NULL;
TESTING("dataset offset with user block");
@@ -660,6 +686,14 @@ test_userblock_offset(const char *env_h5_drvr, hid_t fapl, hbool_t new_format)
if(HDstrcmp(env_h5_drvr, "split") && HDstrcmp(env_h5_drvr, "multi") && HDstrcmp(env_h5_drvr, "family")) {
h5_fixname(FILENAME[2], fapl, filename, sizeof filename);
+ /* Set up data array */
+ if(NULL == (rdata_bytes = (int *)HDcalloc(DSET_DIM1 * DSET_DIM2, sizeof(int))))
+ TEST_ERROR;
+ if(NULL == (rdata = (int **)HDcalloc(DSET_DIM1, sizeof(rdata_bytes))))
+ TEST_ERROR;
+ for (i = 0; i < DSET_DIM1; i++)
+ rdata[i] = rdata_bytes + (i * DSET_DIM2);
+
if((fcpl=H5Pcreate(H5P_FILE_CREATE)) < 0) goto error;
if(H5Pset_userblock(fcpl, (hsize_t)USER_BLOCK) < 0) goto error;
if(new_format)
@@ -683,7 +717,7 @@ test_userblock_offset(const char *env_h5_drvr, hid_t fapl, hbool_t new_format)
space = -1;
/* Write the data to the dataset */
- if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points) < 0)
+ if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points_data) < 0)
goto error;
/* Test dataset address in file. Open the same file as a C file, seek
@@ -698,7 +732,7 @@ test_userblock_offset(const char *env_h5_drvr, hid_t fapl, hbool_t new_format)
f = HDopen(filename, O_RDONLY);
HDlseek(f, (off_t)offset, SEEK_SET);
- if(HDread(f, rdata, sizeof(int)*DSET_DIM1*DSET_DIM2) < 0)
+ if(HDread(f, rdata_bytes, sizeof(int)*DSET_DIM1*DSET_DIM2) < 0)
goto error;
/* Check that the values read are the same as the values written */
@@ -716,6 +750,9 @@ test_userblock_offset(const char *env_h5_drvr, hid_t fapl, hbool_t new_format)
HDclose(f);
f = -1;
+ HDfree(rdata_bytes);
+ HDfree(rdata);
+
PASSED();
} /* end if */
else {
@@ -736,6 +773,10 @@ error:
if(H5Fclose(file) < 0) TEST_ERROR
if(f > 0)
HDclose(f);
+
+ HDfree(rdata_bytes);
+ HDfree(rdata);
+
return FAIL;
} /* end test_userblock_offset() */
@@ -836,7 +877,7 @@ test_compact_io(hid_t fapl)
**************************************/
/* Create a copy of file access property list */
- if((new_fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0) TEST_ERROR
+ if((new_fapl = h5_fileaccess()) < 0) TEST_ERROR
/* Loop through all the combinations of low/high library format bounds,
skipping invalid combinations.
@@ -1794,7 +1835,7 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
*/
TESTING(" filters (uninitialized read)");
- if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
+ if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check_data) < 0)
TEST_ERROR;
for(i=0; i<(size_t)size[0]; i++) {
@@ -1823,7 +1864,7 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
}
}
- if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, points) < 0)
+ if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, points_data) < 0)
TEST_ERROR;
if((*dset_size=H5Dget_storage_size(dataset))==0) TEST_ERROR;
@@ -1841,25 +1882,25 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
/* Default behavior is failure when data is corrupted. */
/* (Use the "write" DXPL in order to make certain corruption is seen) */
H5E_BEGIN_TRY {
- status=H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check);
+ status=H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check_data);
} H5E_END_TRY;
if(status>=0) TEST_ERROR;
/* Callback decides to continue inspite data is corrupted. */
if(H5Pset_filter_callback(dxpl, filter_cb_cont, NULL) < 0) TEST_ERROR;
- if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
+ if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check_data) < 0)
TEST_ERROR;
/* Callback decides to fail when data is corrupted. */
if(H5Pset_filter_callback(write_dxpl, filter_cb_fail, NULL) < 0) TEST_ERROR;
/* (Use the "write" DXPL in order to make certain corruption is seen) */
H5E_BEGIN_TRY {
- status=H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check);
+ status = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check_data);
} H5E_END_TRY;
if(status>=0) TEST_ERROR;
}
else {
- if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
+ if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check_data) < 0)
TEST_ERROR;
/* Check that the values read are the same as the values written */
@@ -1893,33 +1934,33 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
points[i][j] = (int)HDrandom ();
}
}
- if(H5Dwrite (dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, points) < 0)
+ if(H5Dwrite (dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, points_data) < 0)
TEST_ERROR;
if(corrupted) {
/* Default behavior is failure when data is corrupted. */
/* (Use the "write" DXPL in order to make certain corruption is seen) */
H5E_BEGIN_TRY {
- status=H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check);
+ status = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check_data);
} H5E_END_TRY;
if(status>=0) TEST_ERROR;
/* Callback decides to continue inspite data is corrupted. */
if(H5Pset_filter_callback(dxpl, filter_cb_cont, NULL) < 0) TEST_ERROR;
- if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
+ if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check_data) < 0)
TEST_ERROR;
/* Callback decides to fail when data is corrupted. */
if(H5Pset_filter_callback(write_dxpl, filter_cb_fail, NULL) < 0) TEST_ERROR;
/* (Use the "write" DXPL in order to make certain corruption is seen) */
H5E_BEGIN_TRY {
- status=H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check);
+ status = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check_data);
} H5E_END_TRY;
if(status>=0) TEST_ERROR;
}
else {
/* Read the dataset back and check it */
- if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
+ if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check_data) < 0)
TEST_ERROR;
/* Check that the values read are the same as the values written */
@@ -1954,13 +1995,13 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
/* Default behavior is failure when data is corrupted. */
/* (Use the "write" DXPL in order to make certain corruption is seen) */
H5E_BEGIN_TRY {
- status = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check);
+ status = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check_data);
} H5E_END_TRY;
if(status >= 0) TEST_ERROR;
/* Callback decides to continue inspite data is corrupted. */
if(H5Pset_filter_callback(dxpl, filter_cb_cont, NULL) < 0) TEST_ERROR;
- if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
+ if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check_data) < 0)
TEST_ERROR;
/* Callback decides to fail when data is corrupted. */
@@ -1968,12 +2009,12 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
/* (Use the "write" DXPL in order to make certain corruption is seen) */
H5E_BEGIN_TRY {
- status = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check);
+ status = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check_data);
} H5E_END_TRY;
if(status >= 0) TEST_ERROR;
} /* end if */
else {
- if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
+ if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check_data) < 0)
TEST_ERROR;
/* Check that the values read are the same as the values written */
@@ -2007,32 +2048,32 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, hs_offset, NULL, hs_size,
NULL) < 0) TEST_ERROR;
/* (Use the "read" DXPL because partial I/O on corrupted data test needs to ignore errors during writing) */
- if(H5Dwrite (dataset, H5T_NATIVE_INT, sid, sid, dxpl, points) < 0)
+ if(H5Dwrite (dataset, H5T_NATIVE_INT, sid, sid, dxpl, points_data) < 0)
TEST_ERROR;
if(corrupted) {
/* Default behavior is failure when data is corrupted. */
/* (Use the "write" DXPL in order to make certain corruption is seen) */
H5E_BEGIN_TRY {
- status=H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check);
+ status = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check_data);
} H5E_END_TRY;
if(status>=0) TEST_ERROR;
/* Callback decides to continue inspite data is corrupted. */
if(H5Pset_filter_callback(dxpl, filter_cb_cont, NULL) < 0) TEST_ERROR;
- if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
+ if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check_data) < 0)
TEST_ERROR;
/* Callback decides to fail when data is corrupted. */
if(H5Pset_filter_callback(write_dxpl, filter_cb_fail, NULL) < 0) TEST_ERROR;
/* (Use the "write" DXPL in order to make certain corruption is seen) */
H5E_BEGIN_TRY {
- status=H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check);
+ status = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, check_data);
} H5E_END_TRY;
if(status>=0) TEST_ERROR;
}
else {
- if(H5Dread (dataset, H5T_NATIVE_INT, sid, sid, dxpl, check) < 0)
+ if(H5Dread(dataset, H5T_NATIVE_INT, sid, sid, dxpl, check_data) < 0)
TEST_ERROR;
/* Check that the values read are the same as the values written */
@@ -2298,9 +2339,9 @@ H5_ATTR_UNUSED
hsize_t shuffle_size; /* Size of dataset with shuffle filter */
-#if(defined H5_HAVE_FILTER_DEFLATE | defined H5_HAVE_FILTER_SZIP)
+#if defined(H5_HAVE_FILTER_DEFLATE) || defined(H5_HAVE_FILTER_SZIP)
hsize_t combo_size; /* Size of dataset with multiple filters */
-#endif /* defined H5_HAVE_FILTER_DEFLATE | defined H5_HAVE_FILTER_SZIP */
+#endif /* defined(H5_HAVE_FILTER_DEFLATE) || defined(H5_HAVE_FILTER_SZIP) */
/* test the H5Zget_filter_info function */
if(test_get_filter_info() < 0) goto error;
@@ -2637,7 +2678,7 @@ test_missing_filter(hid_t file)
} /* end if */
/* Write data */
- if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points) < 0) {
+ if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points_data) < 0) {
H5_FAILED();
HDprintf(" Line %d: Error writing dataset data\n",__LINE__);
goto error;
@@ -2666,7 +2707,7 @@ test_missing_filter(hid_t file)
} /* end if */
/* Read data */
- if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, check) < 0) {
+ if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, check_data) < 0) {
H5_FAILED();
HDprintf(" Line %d: Error reading dataset data\n",__LINE__);
goto error;
@@ -2727,7 +2768,7 @@ test_missing_filter(hid_t file)
/* Read data (should fail, since deflate filter is missing) */
H5E_BEGIN_TRY {
- ret = H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, check);
+ ret = H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, check_data);
} H5E_END_TRY;
if(ret>=0) {
H5_FAILED();
@@ -3081,7 +3122,7 @@ test_nbit_float(hid_t file)
*/
for(i = 0; i < (size_t)size[0]; i++) {
for(j = 0; j < (size_t)size[1]; j++) {
- if(!(orig_data[i][j] == orig_data[i][j]))
+ if(HDisnan(orig_data[i][j]))
continue; /* skip if value is NaN */
if(!H5_FLT_ABS_EQUAL(new_data[i][j], orig_data[i][j])) {
H5_FAILED();
@@ -3213,7 +3254,7 @@ test_nbit_double(hid_t file)
*/
for(i = 0; i < (size_t)size[0]; i++) {
for(j = 0; j < (size_t)size[1]; j++) {
- if(!(orig_data[i][j] == orig_data[i][j]))
+ if(HDisnan(orig_data[i][j]))
continue; /* skip if value is NaN */
if(!H5_DBL_ABS_EQUAL(new_data[i][j], orig_data[i][j])) {
H5_FAILED();
@@ -3548,7 +3589,7 @@ test_nbit_compound(hid_t file)
if(((unsigned)new_data[i][j].i & i_mask) != ((unsigned)orig_data[i][j].i & i_mask) ||
((unsigned)new_data[i][j].c & c_mask) != ((unsigned)orig_data[i][j].c & c_mask) ||
((unsigned)new_data[i][j].s & s_mask) != ((unsigned)orig_data[i][j].s & s_mask) ||
- (orig_data[i][j].f == orig_data[i][j].f && !H5_FLT_ABS_EQUAL(new_data[i][j].f, orig_data[i][j].f)))
+ (!HDisnan(orig_data[i][j].f) && !H5_FLT_ABS_EQUAL(new_data[i][j].f, orig_data[i][j].f)))
{
H5_FAILED();
HDprintf(" Read different values than written.\n");
@@ -3870,7 +3911,7 @@ test_nbit_compound_2(hid_t file)
if(((unsigned)new_data[i][j].d[m][n].i & i_mask) != ((unsigned)orig_data[i][j].d[m][n].i & i_mask)||
((unsigned)new_data[i][j].d[m][n].c & c_mask) != ((unsigned)orig_data[i][j].d[m][n].c & c_mask)||
((unsigned)new_data[i][j].d[m][n].s & s_mask) != ((unsigned)orig_data[i][j].d[m][n].s & s_mask)||
- (new_data[i][j].d[m][n].f == new_data[i][j].d[m][n].f && !H5_FLT_ABS_EQUAL(new_data[i][j].d[m][n].f, new_data[i][j].d[m][n].f))) {
+ (!HDisnan(new_data[i][j].d[m][n].f) && !H5_FLT_ABS_EQUAL(new_data[i][j].d[m][n].f, new_data[i][j].d[m][n].f))) {
d_failed = 1;
goto out;
}
@@ -3879,7 +3920,7 @@ out:
if(((unsigned)new_data[i][j].a.i & i_mask) != ((unsigned)orig_data[i][j].a.i & i_mask)||
((unsigned)new_data[i][j].a.c & c_mask) != ((unsigned)orig_data[i][j].a.c & c_mask)||
((unsigned)new_data[i][j].a.s & s_mask) != ((unsigned)orig_data[i][j].a.s & s_mask)||
- (new_data[i][j].a.f == new_data[i][j].a.f && !H5_FLT_ABS_EQUAL(new_data[i][j].a.f, new_data[i][j].a.f)) ||
+ (!HDisnan(new_data[i][j].a.f) && !H5_FLT_ABS_EQUAL(new_data[i][j].a.f, new_data[i][j].a.f)) ||
new_data[i][j].v != orig_data[i][j].v || b_failed || d_failed) {
H5_FAILED();
HDprintf(" Read different values than written.\n");
@@ -4135,149 +4176,164 @@ test_nbit_int_size(hid_t file)
hid_t dataspace, dataset, datatype, mem_datatype, dset_create_props;
hsize_t dims[2], chunk_size[2];
hsize_t dset_size = 0;
- int orig_data[DSET_DIM1][DSET_DIM2];
+ int **orig = NULL;
+ int *orig_data = NULL;
double power;
int i, j;
size_t precision, offset;
TESTING(" nbit integer dataset size");
- /* Define dataset datatype (integer), and set precision, offset */
- if((datatype = H5Tcopy(H5T_NATIVE_INT)) < 0) {
- H5_FAILED();
- HDprintf(" line %d: H5Tcopy failed\n",__LINE__);
- goto error;
- } /* end if */
+ /* Set up data array */
+ if(NULL == (orig_data = (int *)HDcalloc(DSET_DIM1 * DSET_DIM2, sizeof(int))))
+ TEST_ERROR;
+ if(NULL == (orig = (int **)HDcalloc(DSET_DIM1, sizeof(orig_data))))
+ TEST_ERROR;
+ for (i = 0; i < DSET_DIM1; i++)
+ orig[i] = orig_data + (i * DSET_DIM2);
- precision = 16; /* precision includes sign bit */
- if(H5Tset_precision(datatype,precision)<0) {
- H5_FAILED();
- HDprintf(" line %d: H5Pset_precision failed\n",__LINE__);
- goto error;
- } /* end if */
+ /* Define dataset datatype (integer), and set precision, offset */
+ if((datatype = H5Tcopy(H5T_NATIVE_INT)) < 0) {
+ H5_FAILED();
+ HDprintf(" line %d: H5Tcopy failed\n",__LINE__);
+ goto error;
+ }
- offset = 8;
- if(H5Tset_offset(datatype,offset)<0) {
- H5_FAILED();
- HDprintf(" line %d: H5Tset_offset failed\n",__LINE__);
- goto error;
- } /* end if */
+ precision = 16; /* precision includes sign bit */
+ if(H5Tset_precision(datatype,precision)<0) {
+ H5_FAILED();
+ HDprintf(" line %d: H5Pset_precision failed\n",__LINE__);
+ goto error;
+ }
- /* Copy to memory datatype */
- if((mem_datatype = H5Tcopy(datatype)) < 0) {
- H5_FAILED();
- HDprintf(" line %d: H5Tcopy failed\n",__LINE__);
- goto error;
- } /* end if */
+ offset = 8;
+ if(H5Tset_offset(datatype,offset)<0) {
+ H5_FAILED();
+ HDprintf(" line %d: H5Tset_offset failed\n",__LINE__);
+ goto error;
+ }
- /* Set order of dataset datatype */
- if(H5Tset_order(datatype, H5T_ORDER_BE)<0) {
+ /* Copy to memory datatype */
+ if((mem_datatype = H5Tcopy(datatype)) < 0) {
H5_FAILED();
- HDprintf(" line %d: H5Pset_order failed\n",__LINE__);
- goto error;
- } /* end if */
+ HDprintf(" line %d: H5Tcopy failed\n",__LINE__);
+ goto error;
+ }
- if(H5Tset_size(datatype, 4)<0) {
- H5_FAILED();
- HDprintf(" line %d: H5Pset_size failed\n",__LINE__);
- goto error;
- } /* end if */
+ /* Set order of dataset datatype */
+ if(H5Tset_order(datatype, H5T_ORDER_BE)<0) {
+ H5_FAILED();
+ HDprintf(" line %d: H5Pset_order failed\n",__LINE__);
+ goto error;
+ }
- /* Initiliaze data buffer with random data within correct range
- * corresponding to the memory datatype's precision and offset.
- */
- for (i=0; i < DSET_DIM1; i++)
- for (j=0; j < DSET_DIM2; j++) {
- power = HDpow(2.0F, (double)(precision-1));
- orig_data[i][j] = HDrandom() % (int)power << offset;
- } /* end for */
+ if(H5Tset_size(datatype, 4)<0) {
+ H5_FAILED();
+ HDprintf(" line %d: H5Pset_size failed\n",__LINE__);
+ goto error;
+ }
+ /* Initiliaze data buffer with random data within correct range
+ * corresponding to the memory datatype's precision and offset.
+ */
+ for (i=0; i < DSET_DIM1; i++)
+ for (j=0; j < DSET_DIM2; j++) {
+ power = HDpow(2.0F, (double)(precision-1));
+ orig[i][j] = HDrandom() % (int)power << offset;
+ }
- /* Describe the dataspace. */
- dims[0] = DSET_DIM1;
- dims[1] = DSET_DIM2;
- if((dataspace = H5Screate_simple (2, dims, NULL))<0) {
- H5_FAILED();
- HDprintf(" line %d: H5Pcreate failed\n",__LINE__);
- goto error;
- } /* end if */
- /*
- * Set the dataset creation property list to specify the chunks
- */
- chunk_size[0] = DSET_DIM1/10;
- chunk_size[1] = DSET_DIM2/10;
- if((dset_create_props = H5Pcreate (H5P_DATASET_CREATE))<0) {
- H5_FAILED();
- HDprintf(" line %d: H5Pcreate failed\n",__LINE__);
- goto error;
- } /* end if */
+ /* Describe the dataspace. */
+ dims[0] = DSET_DIM1;
+ dims[1] = DSET_DIM2;
+ if((dataspace = H5Screate_simple(2, dims, NULL))<0) {
+ H5_FAILED();
+ HDprintf(" line %d: H5Pcreate failed\n",__LINE__);
+ goto error;
+ }
- if(H5Pset_chunk (dset_create_props, 2, chunk_size)<0) {
- H5_FAILED();
- HDprintf(" line %d: H5Pset_chunk failed\n",__LINE__);
- goto error;
- } /* end if */
+ /*
+ * Set the dataset creation property list to specify the chunks
+ */
+ chunk_size[0] = DSET_DIM1/10;
+ chunk_size[1] = DSET_DIM2/10;
+ if((dset_create_props = H5Pcreate(H5P_DATASET_CREATE))<0) {
+ H5_FAILED();
+ HDprintf(" line %d: H5Pcreate failed\n",__LINE__);
+ goto error;
+ }
- /*
- * Set for n-bit compression
- */
- if(H5Pset_nbit (dset_create_props)<0) {
- H5_FAILED();
- HDprintf(" line %d: H5Pset_nbit failed\n",__LINE__);
- goto error;
- } /* end if */
+ if(H5Pset_chunk(dset_create_props, 2, chunk_size)<0) {
+ H5_FAILED();
+ HDprintf(" line %d: H5Pset_chunk failed\n",__LINE__);
+ goto error;
+ }
- /*
- * Create a new dataset within the file.
- */
- if((dataset = H5Dcreate2 (file, DSET_NBIT_INT_SIZE_NAME, datatype,
+ /*
+ * Set for n-bit compression
+ */
+ if(H5Pset_nbit(dset_create_props)<0) {
+ H5_FAILED();
+ HDprintf(" line %d: H5Pset_nbit failed\n",__LINE__);
+ goto error;
+ }
+
+ /*
+ * Create a new dataset within the file.
+ */
+ if((dataset = H5Dcreate2(file, DSET_NBIT_INT_SIZE_NAME, datatype,
dataspace, H5P_DEFAULT,
dset_create_props, H5P_DEFAULT))<0) {
- H5_FAILED();
- HDprintf(" line %d: H5dwrite failed\n",__LINE__);
- goto error;
- } /* end if */
+ H5_FAILED();
+ HDprintf(" line %d: H5dwrite failed\n",__LINE__);
+ goto error;
+ }
- /*
- * Write the array to the file.
- */
- if(H5Dwrite (dataset, mem_datatype, H5S_ALL, H5S_ALL,
- H5P_DEFAULT, orig_data)<0) {
- H5_FAILED();
- HDprintf(" Line %d: H5Dwrite failed\n",__LINE__);
- goto error;
- } /* end if */
+ /*
+ * Write the array to the file.
+ */
+ if(H5Dwrite(dataset, mem_datatype, H5S_ALL, H5S_ALL, H5P_DEFAULT, orig_data)<0) {
+ H5_FAILED();
+ HDprintf(" Line %d: H5Dwrite failed\n",__LINE__);
+ goto error;
+ }
- /*
- * Get the precision of the data type
- */
- if((precision = H5Tget_precision(datatype)) == 0) {
- H5_FAILED();
- HDprintf(" Line %d: wrong precision size: %zu\n",__LINE__, precision);
- goto error;
- } /* end if */
+ /*
+ * Get the precision of the data type
+ */
+ if((precision = H5Tget_precision(datatype)) == 0) {
+ H5_FAILED();
+ HDprintf(" Line %d: wrong precision size: %zu\n",__LINE__, precision);
+ goto error;
+ }
- /*
- * The size of the dataset after compression should around 2 * DSET_DIM1 * DSET_DIM2
- */
- if((dset_size = H5Dget_storage_size(dataset)) < DSET_DIM1*DSET_DIM2*(precision/8) ||
- dset_size > DSET_DIM1*DSET_DIM2*(precision/8) + 1*KB) {
- H5_FAILED();
- HDfprintf(stdout, " Line %d: wrong dataset size: %Hu\n",__LINE__, dset_size);
- goto error;
- } /* end if */
+ /*
+ * The size of the dataset after compression should around 2 * DSET_DIM1 * DSET_DIM2
+ */
+ if((dset_size = H5Dget_storage_size(dataset)) < DSET_DIM1*DSET_DIM2*(precision/8) ||
+ dset_size > DSET_DIM1*DSET_DIM2*(precision/8) + 1*KB) {
+ H5_FAILED();
+ HDfprintf(stdout, " Line %d: wrong dataset size: %Hu\n",__LINE__, dset_size);
+ goto error;
+ }
- H5Tclose (datatype);
- H5Tclose (mem_datatype);
- H5Dclose (dataset);
- H5Sclose (dataspace);
- H5Pclose (dset_create_props);
+ H5Tclose(datatype);
+ H5Tclose(mem_datatype);
+ H5Dclose(dataset);
+ H5Sclose(dataspace);
+ H5Pclose(dset_create_props);
+
+ HDfree(orig);
+ HDfree(orig_data);
PASSED();
- return SUCCEED;
+ return SUCCEED;
+
error:
+ HDfree(orig);
+ HDfree(orig_data);
+
return FAIL;
} /* end test_nbit_int_size() */
@@ -4303,13 +4359,22 @@ test_nbit_flt_size(hid_t file)
hid_t dataspace, dataset, datatype, dset_create_props;
hsize_t dims[2], chunk_size[2];
hsize_t dset_size = 0;
- float orig_data[DSET_DIM1][DSET_DIM2];
+ float **orig = NULL;
+ float *orig_data = NULL;
int i, j;
size_t precision, offset;
size_t spos, epos, esize, mpos, msize;
TESTING(" nbit floating-number dataset size");
+ /* Set up data array */
+ if(NULL == (orig_data = (float *)HDcalloc(DSET_DIM1 * DSET_DIM2, sizeof(float))))
+ TEST_ERROR;
+ if(NULL == (orig = (float **)HDcalloc(DSET_DIM1, sizeof(orig_data))))
+ TEST_ERROR;
+ for (i = 0; i < DSET_DIM1; i++)
+ orig[i] = orig_data + (i * DSET_DIM2);
+
/* Define floating-point type for dataset
*-------------------------------------------------------------------
* size=4 byte, precision=16 bits, offset=8 bits,
@@ -4388,7 +4453,7 @@ test_nbit_flt_size(hid_t file)
*/
for (i=0; i < DSET_DIM1; i++)
for (j=0; j < DSET_DIM2; j++)
- orig_data[i][j] = (float)(HDrandom() % 1234567) / 2;
+ orig[i][j] = (float)(HDrandom() % 1234567) / 2;
/* Describe the dataspace. */
@@ -5301,7 +5366,7 @@ test_types(hid_t file)
(space=H5Screate_simple(1, &nelmts, NULL)) < 0 ||
(dset=H5Dcreate2(grp, "bitfield_1", type, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
goto error;
- for(i=0; i<sizeof buf; i++) buf[i] = (unsigned char)(0xff ^ i);
+ for(i=0; i<sizeof buf; i++) buf[i] = (unsigned char)(0xff ^ i);
if(H5Dwrite(dset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0)
goto error;
@@ -5315,7 +5380,7 @@ test_types(hid_t file)
(space=H5Screate_simple(1, &nelmts, NULL)) < 0 ||
(dset=H5Dcreate2(grp, "bitfield_2", type, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
goto error;
- for(i=0; i<sizeof buf; i++) buf[i] = (unsigned char)(0xff ^ i);
+ for(i=0; i<sizeof buf; i++) buf[i] = (unsigned char)(0xff ^ i);
if(H5Dwrite(dset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0)
goto error;
if(H5Sclose(space) < 0) goto error;
@@ -5468,7 +5533,7 @@ test_can_apply(hid_t file)
} /* end if */
/* Write data */
- if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points) < 0) {
+ if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points_data) < 0) {
H5_FAILED();
HDprintf(" Line %d: Error writing dataset data\n",__LINE__);
goto error;
@@ -5496,7 +5561,7 @@ test_can_apply(hid_t file)
} /* end if */
/* Read data */
- if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, check) < 0) {
+ if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, check_data) < 0) {
H5_FAILED();
HDprintf(" Line %d: Error reading dataset data\n",__LINE__);
goto error;
@@ -5627,7 +5692,7 @@ test_can_apply2(hid_t file)
} /* end if */
/* Write data */
- if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points) < 0) {
+ if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points_data) < 0) {
H5_FAILED();
HDprintf(" Line %d: Error writing dataset data\n",__LINE__);
goto error;
@@ -5655,7 +5720,7 @@ test_can_apply2(hid_t file)
} /* end if */
/* Read data */
- if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, check) < 0) {
+ if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, check_data) < 0) {
H5_FAILED();
HDprintf(" Line %d: Error reading dataset data\n",__LINE__);
goto error;
@@ -5996,7 +6061,7 @@ test_set_local(hid_t fapl)
} /* end if */
/* Write data */
- if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points) < 0) {
+ if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points_data) < 0) {
H5_FAILED();
HDprintf(" Line %d: Error writing dataset data\n",__LINE__);
goto error;
@@ -6018,7 +6083,7 @@ test_set_local(hid_t fapl)
} /* end if */
/* Write data */
- if(H5Dwrite(dsid, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, points_dbl) < 0) {
+ if(H5Dwrite(dsid, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, points_dbl_data) < 0) {
H5_FAILED();
HDprintf(" Line %d: Error writing dataset data\n",__LINE__);
goto error;
@@ -6081,7 +6146,7 @@ test_set_local(hid_t fapl)
} /* end if */
/* Read data */
- if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, check) < 0) {
+ if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, check_data) < 0) {
H5_FAILED();
HDprintf(" Line %d: Error reading dataset data\n", __LINE__);
goto error;
@@ -6131,7 +6196,7 @@ test_set_local(hid_t fapl)
} /* end if */
/* Read data */
- if(H5Dread(dsid, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, check_dbl) < 0) {
+ if(H5Dread(dsid, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, check_dbl_data) < 0) {
H5_FAILED();
HDprintf(" Line %d: Error reading dataset data\n", __LINE__);
goto error;
@@ -6864,10 +6929,12 @@ test_missing_chunk(hid_t file)
hsize_t hs_start2[2], hs_stride2[2], hs_count2[2], hs_block2[2];/* Hyperslab setting */
/* Buffers for reading/writing dataset */
- int wdata[MISSING_CHUNK_DIM],
- rdata[MISSING_CHUNK_DIM];
- int wdata2[MISSING_CHUNK_DIM][MISSING_CHUNK_DIM],
- rdata2[MISSING_CHUNK_DIM][MISSING_CHUNK_DIM];
+ int *wdata = NULL;
+ int *rdata = NULL;
+ int **wdata2 = NULL;
+ int **rdata2 = NULL;
+ int *wdata2_bytes = NULL;
+ int *rdata2_bytes = NULL;
/* Setting for 1-D dataset */
hsize_t dsize=100, dmax=H5S_UNLIMITED;
@@ -6884,6 +6951,26 @@ test_missing_chunk(hid_t file)
TESTING("Read dataset with unwritten chunk & undefined fill value");
+ /* Set up data arrays */
+ if(NULL == (wdata = (int *)HDcalloc(MISSING_CHUNK_DIM, sizeof(int))))
+ TEST_ERROR;
+ if(NULL == (rdata = (int *)HDcalloc(MISSING_CHUNK_DIM, sizeof(int))))
+ TEST_ERROR;
+
+ if(NULL == (wdata2_bytes = (int *)HDcalloc(MISSING_CHUNK_DIM * MISSING_CHUNK_DIM, sizeof(int))))
+ TEST_ERROR;
+ if(NULL == (wdata2 = (int **)HDcalloc(MISSING_CHUNK_DIM, sizeof(wdata2_bytes))))
+ TEST_ERROR;
+ for (i = 0; i < MISSING_CHUNK_DIM; i++)
+ wdata2[i] = wdata2_bytes + (i * MISSING_CHUNK_DIM);
+
+ if(NULL == (rdata2_bytes = (int *)HDcalloc(MISSING_CHUNK_DIM * MISSING_CHUNK_DIM, sizeof(int))))
+ TEST_ERROR;
+ if(NULL == (rdata2 = (int **)HDcalloc(MISSING_CHUNK_DIM, sizeof(rdata2_bytes))))
+ TEST_ERROR;
+ for (i = 0; i < MISSING_CHUNK_DIM; i++)
+ rdata2[i] = rdata2_bytes + (i * MISSING_CHUNK_DIM);
+
/* Get the file's file access property list */
if((fapl = H5Fget_access_plist(file)) < 0) TEST_ERROR;
@@ -6901,10 +6988,10 @@ test_missing_chunk(hid_t file)
/* Initialize data for 2-D dataset */
for(i = 0; i < MISSING_CHUNK_DIM; i++) {
- for(j = 0; j < MISSING_CHUNK_DIM; j++) {
- wdata2[i][j] = (int)(j + (i * MISSING_CHUNK_DIM));
- rdata2[i][j] = 911;
- }
+ for(j = 0; j < MISSING_CHUNK_DIM; j++) {
+ wdata2[i][j] = (int)(j + (i * MISSING_CHUNK_DIM));
+ rdata2[i][j] = 911;
+ }
} /* end for */
/* Create dataspace */
@@ -6962,11 +7049,11 @@ test_missing_chunk(hid_t file)
/* Write selected data to the datasets */
if(H5Dwrite(d, H5T_NATIVE_INT, s, s, H5P_DEFAULT, wdata) < 0) TEST_ERROR;
- if(H5Dwrite(did2, H5T_NATIVE_INT, sid2, sid2, H5P_DEFAULT, wdata2) < 0) TEST_ERROR;
+ if(H5Dwrite(did2, H5T_NATIVE_INT, sid2, sid2, H5P_DEFAULT, wdata2_bytes) < 0) TEST_ERROR;
/* Read all data from the datasets */
if(H5Dread(d, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata) < 0) TEST_ERROR;
- if(H5Dread(did2, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata2) < 0) TEST_ERROR;
+ if(H5Dread(did2, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata2_bytes) < 0) TEST_ERROR;
/* Validata values read for the 1-D dataset */
for(u=0; u<MISSING_CHUNK_DIM; u++) {
@@ -7013,6 +7100,13 @@ test_missing_chunk(hid_t file)
if(H5Dclose(d) < 0) TEST_ERROR;
if(H5Dclose(did2) < 0) TEST_ERROR;
+ HDfree(rdata);
+ HDfree(wdata);
+ HDfree(rdata2);
+ HDfree(wdata2);
+ HDfree(rdata2_bytes);
+ HDfree(wdata2_bytes);
+
PASSED();
return SUCCEED;
@@ -7027,9 +7121,82 @@ error:
H5Sclose(s);
H5Sclose(sid2);
} H5E_END_TRY;
+
+ HDfree(rdata);
+ HDfree(wdata);
+ HDfree(rdata2);
+ HDfree(wdata2);
+ HDfree(rdata2_bytes);
+ HDfree(wdata2_bytes);
+
return FAIL;
} /* end test_missing_chunk() */
+/* Using Euclid's algorithm, find the greatest common divisor (GCD) of
+ * the two arguments and return it.
+ *
+ * The GCD is negative if the arguments have opposite sign. Otherwise,
+ * it is positive.
+ *
+ * If either argument is zero, then the result is undefined.
+ */
+static H5_ATTR_CONST long
+gcd(long l0, long r0)
+{
+ long magnitude, remainder;
+ bool negative = ((l0 < 0) != (r0 < 0));
+ long l = HDlabs(l0), r = HDlabs(r0);
+
+ do {
+ if (l < r) {
+ r = r % l;
+ remainder = r;
+ } else /* r <= l */ {
+ l = l % r;
+ remainder = l;
+ }
+ } while (remainder != 0);
+
+ magnitude = (l == 0) ? r : l;
+ return negative ? -magnitude : magnitude;
+}
+
+/* Choose a random offset into an array `nelts` elements long, and store
+ * it at `offsetp`. The offset will be in the range [0, nelts - 1].
+ * Also choose a random increment, `inc`, that "generates" all
+ * indices in [0, nelts - 1] when it is added to itself repeatedly.
+ * That is, the range of the discrete function `f(i) = (i * inc)
+ * mod nelts` on the domain [0, nelts - 1] is [0, nelts - 1]. Store
+ * `inc` at `incp`.
+ *
+ * If `nelts <= 0`, results are undefined.
+ */
+static void
+make_random_offset_and_increment(long nelts, long *offsetp, long *incp)
+{
+ long inc;
+ long maxinc;
+
+ HDassert(0 < nelts);
+
+ *offsetp = HDrandom() % nelts;
+
+ /* `maxinc` is chosen so that for any `x` in [0, nelts - 1],
+ * `x + maxinc` does not overflow a long.
+ */
+ maxinc = MIN(nelts - 1, LONG_MAX - nelts);
+
+ /* Choose a random number in [1, nelts - 1]. If its greatest divisor
+ * in common with `nelts` is 1, then it will "generate" the additive ring
+ * [0, nelts - 1], so let it be our increment. Otherwise, choose a new
+ * number.
+ */
+ do {
+ inc = 1 + HDrandom() % maxinc;
+ } while (gcd(inc, nelts) != 1);
+
+ *incp = inc;
+}
/*-------------------------------------------------------------------------
* Function: test_random_chunks_real
@@ -7054,7 +7221,7 @@ test_random_chunks_real(const char *testname, hbool_t early_alloc, hid_t fapl)
rbuf[NPOINTS],
check2[20][20];
hsize_t coord[NPOINTS][2];
- hsize_t dsize[2]={100,100}, dmax[2]={H5S_UNLIMITED, H5S_UNLIMITED}, csize[2]={10,10}, nsize[2]={200,200};
+ const hsize_t dsize[2]={100,100}, dmax[2]={H5S_UNLIMITED, H5S_UNLIMITED}, csize[2]={10,10}, nsize[2]={200,200};
hsize_t fixed_dmax[2] = {1000, 1000};
hsize_t msize[1]={NPOINTS};
const char dname[]="dataset";
@@ -7062,7 +7229,9 @@ test_random_chunks_real(const char *testname, hbool_t early_alloc, hid_t fapl)
size_t i, j;
H5D_chunk_index_t idx_type; /* Dataset chunk index type */
H5F_libver_t low; /* File format low bound */
-
+ long ofs, inc;
+ long rows;
+ long cols;
TESTING(testname);
@@ -7096,12 +7265,16 @@ test_random_chunks_real(const char *testname, hbool_t early_alloc, hid_t fapl)
for(j=0; j<dsize[1]/csize[1]; j++)
check2[i][j] = 0;
+ rows = (long)(dsize[0]/csize[0]);
+ cols = (long)(dsize[1]/csize[1]);
+ make_random_offset_and_increment(rows * cols, &ofs, &inc);
+
/* Generate random point coordinates. Only one point is selected per chunk */
for(i=0; i<NPOINTS; i++){
- do {
- chunk_row = (int)HDrandom () % (int)(dsize[0]/csize[0]);
- chunk_col = (int)HDrandom () % (int)(dsize[1]/csize[1]);
- } while (check2[chunk_row][chunk_col]);
+ H5_CHECKED_ASSIGN(chunk_row, int, ofs / cols, long);
+ H5_CHECKED_ASSIGN(chunk_col, int, ofs % cols, long);
+ ofs = (ofs + inc) % (rows * cols);
+ HDassert(!check2[chunk_row][chunk_col]);
wbuf[i] = check2[chunk_row][chunk_col] = chunk_row+chunk_col+1;
coord[i][0] = (hsize_t)chunk_row * csize[0];
@@ -7219,12 +7392,16 @@ test_random_chunks_real(const char *testname, hbool_t early_alloc, hid_t fapl)
for(j = 0; j < nsize[1] / csize[1]; j++)
check2[i][j] = 0;
+ H5_CHECKED_ASSIGN(rows, int, nsize[0] / csize[0], long);
+ H5_CHECKED_ASSIGN(cols, int, nsize[1] / csize[1], long);
+ make_random_offset_and_increment(rows * cols, &ofs, &inc);
+
/* Generate random point coordinates. Only one point is selected per chunk */
for(i = 0; i < NPOINTS; i++){
- do {
- chunk_row = (int)HDrandom() % (int)(nsize[0] / csize[0]);
- chunk_col = (int)HDrandom() % (int)(nsize[1] / csize[1]);
- } while (check2[chunk_row][chunk_col]);
+ H5_CHECKED_ASSIGN(chunk_row, int, ofs / cols, long);
+ H5_CHECKED_ASSIGN(chunk_col, int, ofs % cols, long);
+ ofs = (ofs + inc) % (rows * cols);
+ HDassert(!check2[chunk_row][chunk_col]);
wbuf[i] = check2[chunk_row][chunk_col] = chunk_row + chunk_col + 1;
coord[i][0] = (hsize_t)chunk_row * csize[0];
@@ -7325,12 +7502,16 @@ test_random_chunks_real(const char *testname, hbool_t early_alloc, hid_t fapl)
for(j = 0; j < nsize[1] / csize[1]; j++)
check2[i][j] = 0;
+ rows = (long)(nsize[0] / csize[0]);
+ cols = (long)(nsize[1] / csize[1]);
+ make_random_offset_and_increment(rows * cols, &ofs, &inc);
+
/* Generate random point coordinates. Only one point is selected per chunk */
for(i = 0; i < NPOINTS; i++){
- do {
- chunk_row = (int)HDrandom() % (int)(nsize[0] / csize[0]);
- chunk_col = (int)HDrandom() % (int)(nsize[1] / csize[1]);
- } while (check2[chunk_row][chunk_col]);
+ H5_CHECKED_ASSIGN(chunk_row, int, ofs / cols, long);
+ H5_CHECKED_ASSIGN(chunk_col, int, ofs % cols, long);
+ ofs = (ofs + inc) % (rows * cols);
+ HDassert(!check2[chunk_row][chunk_col]);
wbuf[i] = check2[chunk_row][chunk_col] = chunk_row + chunk_col + 1;
coord[i][0] = (hsize_t)chunk_row * csize[0];
@@ -7476,10 +7657,10 @@ static herr_t
test_deprec(hid_t file)
{
hid_t dataset, space, small_space, create_parms, dcpl;
- hsize_t dims[2], small_dims[2];
- hsize_t deprec_size;
- herr_t status;
- hsize_t csize[2];
+ hsize_t dims[2], small_dims[2];
+ hsize_t deprec_size;
+ herr_t H5_ATTR_NDEBUG_UNUSED status;
+ hsize_t csize[2];
TESTING("deprecated API routines");
@@ -8006,9 +8187,13 @@ test_big_chunks_bypass_cache(hid_t fapl)
hsize_t t_count[2], t_stride[2], t_offset[2], t_block[2]; /* Setting for hyperslab (2-D) */
/* Buffers for reading and writing data (1-D) */
int *wdata = NULL, *rdata1 = NULL, *rdata2 = NULL;
- /* Buffer for reading and writing data (2-D) */
- static int t_wdata[BYPASS_CHUNK_DIM/2][BYPASS_CHUNK_DIM/2], t_rdata1[BYPASS_DIM][BYPASS_DIM],
- t_rdata2[BYPASS_CHUNK_DIM/2][BYPASS_CHUNK_DIM/2];
+ /* Buffers for reading and writing data (2-D) */
+ int **t_wdata = NULL;
+ int **t_rdata1 = NULL;
+ int **t_rdata2 = NULL;
+ int *t_wdata_bytes = NULL;
+ int *t_rdata1_bytes = NULL;
+ int *t_rdata2_bytes = NULL;
int i, j; /* Local index variables */
H5F_libver_t low; /* File format low bound */
H5D_chunk_index_t idx_type, t_idx_type; /* Dataset chunk index types */
@@ -8018,6 +8203,29 @@ test_big_chunks_bypass_cache(hid_t fapl)
h5_fixname(FILENAME[9], fapl, filename, sizeof filename);
+ /* Set up data arrays */
+ if(NULL == (t_wdata_bytes = (int *)HDcalloc((BYPASS_CHUNK_DIM/2) * (BYPASS_CHUNK_DIM/2), sizeof(int))))
+ TEST_ERROR;
+ if(NULL == (t_wdata = (int **)HDcalloc((BYPASS_CHUNK_DIM/2), sizeof(t_wdata_bytes))))
+ TEST_ERROR;
+ for (i = 0; i < (BYPASS_CHUNK_DIM/2); i++)
+ t_wdata[i] = t_wdata_bytes + (i * (BYPASS_CHUNK_DIM/2));
+
+ if(NULL == (t_rdata1_bytes = (int *)HDcalloc(BYPASS_DIM * BYPASS_DIM, sizeof(int))))
+ TEST_ERROR;
+ if(NULL == (t_rdata1 = (int **)HDcalloc(BYPASS_DIM, sizeof(t_rdata1_bytes))))
+ TEST_ERROR;
+ for (i = 0; i < BYPASS_DIM; i++)
+ t_rdata1[i] = t_rdata1_bytes + (i * BYPASS_DIM);
+
+ if(NULL == (t_rdata2_bytes = (int *)HDcalloc((BYPASS_CHUNK_DIM/2) * (BYPASS_CHUNK_DIM/2), sizeof(int))))
+ TEST_ERROR;
+ if(NULL == (t_rdata2 = (int **)HDcalloc((BYPASS_CHUNK_DIM/2), sizeof(t_rdata2_bytes))))
+ TEST_ERROR;
+ for (i = 0; i < (BYPASS_CHUNK_DIM/2); i++)
+ t_rdata2[i] = t_rdata2_bytes + (i * (BYPASS_CHUNK_DIM/2));
+
+
/* Check if we are using the latest version of the format */
if(H5Pget_libver_bounds(fapl, &low, NULL) < 0) FAIL_STACK_ERROR
@@ -8106,8 +8314,8 @@ test_big_chunks_bypass_cache(hid_t fapl)
/* Initialize data to write for 2-D dataset */
for(i = 0; i < BYPASS_CHUNK_DIM / 2; i++)
- for(j = 0; j < BYPASS_CHUNK_DIM / 2; j++)
- t_wdata[i][j] = j;
+ for(j = 0; j < BYPASS_CHUNK_DIM / 2; j++)
+ t_wdata[i][j] = j;
/* Set up memory space for the 2-D dataset */
mid = H5Screate_simple(2, t_block, NULL);
@@ -8116,7 +8324,7 @@ test_big_chunks_bypass_cache(hid_t fapl)
/* This write should go through the cache because fill value is used. */
if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, sid, H5P_DEFAULT, wdata) < 0)
FAIL_STACK_ERROR
- if(H5Dwrite(t_dsid, H5T_NATIVE_INT, mid, t_sid, H5P_DEFAULT, t_wdata) < 0)
+ if(H5Dwrite(t_dsid, H5T_NATIVE_INT, mid, t_sid, H5P_DEFAULT, t_wdata_bytes) < 0)
FAIL_STACK_ERROR
/* Close the first 1-D & 2-D datasets */
@@ -8131,7 +8339,7 @@ test_big_chunks_bypass_cache(hid_t fapl)
* chunk is bigger than the cache size and it isn't allocated on disk. */
if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata1) < 0)
FAIL_STACK_ERROR
- if(H5Dread(t_dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, t_rdata1) < 0)
+ if(H5Dread(t_dsid, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, t_rdata1_bytes) < 0)
FAIL_STACK_ERROR
/* Verify data for the first 1-D dataset */
@@ -8162,8 +8370,8 @@ test_big_chunks_bypass_cache(hid_t fapl)
for(i = BYPASS_CHUNK_DIM / 2; i < BYPASS_DIM; i++)
for(j = BYPASS_CHUNK_DIM / 2; j < BYPASS_DIM; j++)
if(t_rdata1[i][j] != fvalue) {
- HDprintf(" Read different values than written in the 2nd chunk.\n");
- HDprintf(" At line %d and index (%d, %d), t_rdata1 = %d. It should be %d.\n",
+ HDprintf(" Read different values than written in the 2nd chunk.\n");
+ HDprintf(" At line %d and index (%d, %d), t_rdata1 = %d. It should be %d.\n",
__LINE__, i, j, t_rdata1[i][j], fvalue);
TEST_ERROR
} /* end if */
@@ -8187,7 +8395,7 @@ test_big_chunks_bypass_cache(hid_t fapl)
/* Write to the second 1-D & 2-D dataset */
if(H5Dwrite(dsid, H5T_NATIVE_INT, H5S_ALL, sid, H5P_DEFAULT, wdata) < 0)
FAIL_STACK_ERROR
- if(H5Dwrite(t_dsid, H5T_NATIVE_INT, mid, t_sid, H5P_DEFAULT, t_wdata) < 0)
+ if(H5Dwrite(t_dsid, H5T_NATIVE_INT, mid, t_sid, H5P_DEFAULT, t_wdata_bytes) < 0)
FAIL_STACK_ERROR
/* Close the second 1-D & 2-D dataset */
@@ -8203,7 +8411,7 @@ test_big_chunks_bypass_cache(hid_t fapl)
* the cache size. */
if(H5Dread(dsid, H5T_NATIVE_INT, H5S_ALL, sid, H5P_DEFAULT, rdata2) < 0)
FAIL_STACK_ERROR
- if(H5Dread(t_dsid, H5T_NATIVE_INT, mid, t_sid, H5P_DEFAULT, t_rdata2) < 0)
+ if(H5Dread(t_dsid, H5T_NATIVE_INT, mid, t_sid, H5P_DEFAULT, t_rdata2_bytes) < 0)
FAIL_STACK_ERROR
/* Verify data for the second 1-D dataset */
@@ -8238,6 +8446,12 @@ test_big_chunks_bypass_cache(hid_t fapl)
HDfree(wdata);
HDfree(rdata1);
HDfree(rdata2);
+ HDfree(t_wdata);
+ HDfree(t_rdata1);
+ HDfree(t_rdata2);
+ HDfree(t_wdata_bytes);
+ HDfree(t_rdata1_bytes);
+ HDfree(t_rdata2_bytes);
PASSED();
return SUCCEED;
@@ -8253,12 +8467,17 @@ error:
H5Sclose(t_sid);
H5Fclose(fid);
} H5E_END_TRY;
- if(wdata)
- HDfree(wdata);
- if(rdata1)
- HDfree(rdata1);
- if(rdata2)
- HDfree(rdata2);
+
+ HDfree(wdata);
+ HDfree(rdata1);
+ HDfree(rdata2);
+ HDfree(t_wdata);
+ HDfree(t_rdata1);
+ HDfree(t_rdata2);
+ HDfree(t_wdata_bytes);
+ HDfree(t_rdata1_bytes);
+ HDfree(t_rdata2_bytes);
+
return FAIL;
} /* end test_big_chunks_bypass_cache() */
@@ -8760,13 +8979,33 @@ test_chunk_fast_bug1(hid_t fapl)
hid_t dsid = -1; /* Dataset ID */
hsize_t dim[2], max_dim[2], chunk_dim[2]; /* Dataset and chunk dimensions */
H5D_alloc_time_t alloc_time; /* Storage allocation time */
- static unsigned wbuf[40][20], rbuf[40][20]; /* Element written/read */
+
+ unsigned **wbuf = NULL;
+ unsigned **rbuf = NULL;
+ unsigned *wbuf_bytes = NULL;
+ unsigned *rbuf_bytes = NULL;
+
unsigned i, j; /* Local index variables */
TESTING("datasets w/extensible array chunk indexing bug");
h5_fixname(FILENAME[10], fapl, filename, sizeof filename);
+ /* Set up data array */
+ if(NULL == (wbuf_bytes = (unsigned *)HDcalloc(40 * 20, sizeof(unsigned))))
+ TEST_ERROR;
+ if(NULL == (wbuf = (unsigned **)HDcalloc(40, sizeof(wbuf_bytes))))
+ TEST_ERROR;
+ for (i = 0; i < 40; i++)
+ wbuf[i] = wbuf_bytes + (i * 20);
+
+ if(NULL == (rbuf_bytes = (unsigned *)HDcalloc(40 * 20, sizeof(unsigned))))
+ TEST_ERROR;
+ if(NULL == (rbuf = (unsigned **)HDcalloc(40, sizeof(rbuf_bytes))))
+ TEST_ERROR;
+ for (i = 0; i < 40; i++)
+ rbuf[i] = rbuf_bytes + (i * 20);
+
/* Initialize write buffer */
for(i=0; i<40; i++)
for(j=0; j<20; j++)
@@ -8800,7 +9039,7 @@ test_chunk_fast_bug1(hid_t fapl)
FAIL_STACK_ERROR
/* Write buffer to dataset */
- if(H5Dwrite(dsid, H5T_NATIVE_UINT, sid, sid, H5P_DEFAULT, &wbuf) < 0)
+ if(H5Dwrite(dsid, H5T_NATIVE_UINT, sid, sid, H5P_DEFAULT, wbuf_bytes) < 0)
FAIL_STACK_ERROR
/* Close everything */
@@ -8810,7 +9049,7 @@ test_chunk_fast_bug1(hid_t fapl)
if((dsid = H5Dopen2(fid, "dset", H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
/* Read from dataset */
- if(H5Dread(dsid, H5T_NATIVE_UINT, sid, sid, H5P_DEFAULT, &rbuf) < 0)
+ if(H5Dread(dsid, H5T_NATIVE_UINT, sid, sid, H5P_DEFAULT, rbuf_bytes) < 0)
FAIL_STACK_ERROR
/* Verify read data */
@@ -8827,6 +9066,11 @@ test_chunk_fast_bug1(hid_t fapl)
if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
+ HDfree(wbuf);
+ HDfree(rbuf);
+ HDfree(wbuf_bytes);
+ HDfree(rbuf_bytes);
+
PASSED();
return SUCCEED;
@@ -8837,6 +9081,12 @@ error:
H5Sclose(sid);
H5Fclose(fid);
} H5E_END_TRY;
+
+ HDfree(wbuf);
+ HDfree(rbuf);
+ HDfree(wbuf_bytes);
+ HDfree(rbuf_bytes);
+
return FAIL;
} /* end test_chunk_fast_bug1() */
@@ -9346,7 +9596,7 @@ test_fixed_array(hid_t fapl)
hid_t dsid_max = -1; /* Dataset ID for dataset with maximum dimensions set */
hsize_t dim2[2] = {48, 18}; /* Dataset dimensions */
- hsize_t dim2_big[2] = {500, 60}; /* Big dataset dimensions */
+ const hsize_t dim2_big[2] = {500, 60}; /* Big dataset dimensions */
hsize_t dim2_max[2] = {120, 50}; /* Maximum dataset dimensions */
hid_t mem_id; /* Memory space ID */
@@ -9360,14 +9610,20 @@ test_fixed_array(hid_t fapl)
int rbuf[POINTS]; /* read buffer */
int *rbuf_big = NULL; /* read buffer for big dataset */
- hsize_t chunk_dim2[2] = {4, 3}; /* Chunk dimensions */
- int chunks[12][6]; /* # of chunks for dataset dimensions */
- int chunks_big[125][20]; /* # of chunks for big dataset dimensions */
+ const hsize_t chunk_dim2[2] = {4, 3}; /* Chunk dimensions */
+
+ int **chunks = NULL; /* # of chunks for dataset dimensions */
+ int **chunks_big = NULL; /* # of chunks for big dataset dimensions */
+ int *chunks_bytes = NULL;
+ int *chunks_big_bytes = NULL;
+
int chunk_row; /* chunk row index */
int chunk_col; /* chunk column index */
- hsize_t coord[POINTS][2]; /* datdaset coordinates */
- hsize_t coord_big[POINTS_BIG][2]; /* big dataset coordinates */
+ hsize_t **coord = NULL; /* datdaset coordinates */
+ hsize_t **coord_big = NULL; /* big datdaset coordinates */
+ hsize_t *coord_bytes = NULL;
+ hsize_t *coord_big_bytes = NULL;
H5D_chunk_index_t idx_type; /* Dataset chunk index type */
H5F_libver_t low, high; /* File format bounds */
@@ -9382,11 +9638,43 @@ test_fixed_array(hid_t fapl)
size_t i, j; /* local index variables */
herr_t ret; /* Generic return value */
+ long ofs, inc;
+ long rows;
+ long cols;
TESTING("datasets w/fixed array as chunk index");
h5_fixname(FILENAME[12], fapl, filename, sizeof filename);
+ /* Set up 2D data arrays */
+ if(NULL == (chunks_bytes = (int *)HDcalloc(12 * 6, sizeof(int))))
+ TEST_ERROR;
+ if(NULL == (chunks = (int **)HDcalloc(12, sizeof(chunks_bytes))))
+ TEST_ERROR;
+ for (i = 0; i < 12; i++)
+ chunks[i] = chunks_bytes + (i * 6);
+
+ if(NULL == (chunks_big_bytes = (int *)HDcalloc(125 * 20, sizeof(int))))
+ TEST_ERROR;
+ if(NULL == (chunks_big = (int **)HDcalloc(125, sizeof(chunks_big_bytes))))
+ TEST_ERROR;
+ for (i = 0; i < 125; i++)
+ chunks_big[i] = chunks_big_bytes + (i * 20);
+
+ if(NULL == (coord_bytes = (hsize_t *)HDcalloc(POINTS * 2, sizeof(hsize_t))))
+ TEST_ERROR;
+ if(NULL == (coord = (hsize_t **)HDcalloc(POINTS, sizeof(coord_bytes))))
+ TEST_ERROR;
+ for (i = 0; i < POINTS; i++)
+ coord[i] = coord_bytes + (i * 2);
+
+ if(NULL == (coord_big_bytes = (hsize_t *)HDcalloc(POINTS_BIG * 2, sizeof(hsize_t))))
+ TEST_ERROR;
+ if(NULL == (coord_big = (hsize_t **)HDcalloc(POINTS_BIG, sizeof(coord_big_bytes))))
+ TEST_ERROR;
+ for (i = 0; i < POINTS_BIG; i++)
+ coord_big[i] = coord_big_bytes + (i * 2);
+
/* Check if we are using the latest version of the format */
if(H5Pget_libver_bounds(fapl, &low, &high) < 0) FAIL_STACK_ERROR
@@ -9440,16 +9728,20 @@ test_fixed_array(hid_t fapl)
for(j = 0; j < dim2[1]/chunk_dim2[1]; j++)
chunks[i][j] = 0;
+ rows = (long)(dim2[0]/chunk_dim2[0]);
+ cols = (long)(dim2[1]/chunk_dim2[1]);
+ make_random_offset_and_increment(rows * cols, &ofs, &inc);
+
/* Generate random point coordinates. Only one point is selected per chunk */
for(i = 0; i < POINTS; i++){
- do {
- chunk_row = (int)HDrandom () % (int)(dim2[0]/chunk_dim2[0]);
- chunk_col = (int)HDrandom () % (int)(dim2[1]/chunk_dim2[1]);
- } while (chunks[chunk_row][chunk_col]);
-
- wbuf[i] = chunks[chunk_row][chunk_col] = chunk_row+chunk_col+1;
- coord[i][0] = (hsize_t)chunk_row * chunk_dim2[0];
- coord[i][1] = (hsize_t)chunk_col * chunk_dim2[1];
+ H5_CHECKED_ASSIGN(chunk_row, int, ofs / cols, long);
+ H5_CHECKED_ASSIGN(chunk_col, int, ofs % cols, long);
+ ofs = (ofs + inc) % (rows * cols);
+ HDassert(!chunks[chunk_row][chunk_col]);
+
+ wbuf[i] = chunks[chunk_row][chunk_col] = chunk_row+chunk_col+1;
+ coord[i][0] = (hsize_t)chunk_row * chunk_dim2[0];
+ coord[i][1] = (hsize_t)chunk_col * chunk_dim2[1];
} /* end for */
/* Create first dataset with cur and max dimensions */
@@ -9482,7 +9774,7 @@ test_fixed_array(hid_t fapl)
if((mem_id = H5Screate_simple(1, msize, NULL)) < 0) TEST_ERROR;
/* Select the random points for writing */
- if(H5Sselect_elements(sid_max, H5S_SELECT_SET, POINTS, (const hsize_t *)coord) < 0)
+ if(H5Sselect_elements(sid_max, H5S_SELECT_SET, POINTS, (const hsize_t *)coord_bytes) < 0)
TEST_ERROR;
/* Write into dataset */
@@ -9523,7 +9815,7 @@ test_fixed_array(hid_t fapl)
if((mem_id = H5Screate_simple(1, msize, NULL)) < 0) TEST_ERROR;
/* Select the random points for writing */
- if(H5Sselect_elements(sid, H5S_SELECT_SET, POINTS, (const hsize_t *)coord) < 0)
+ if(H5Sselect_elements(sid, H5S_SELECT_SET, POINTS, (const hsize_t *)coord_bytes) < 0)
TEST_ERROR;
/* Write into dataset */
@@ -9565,23 +9857,27 @@ test_fixed_array(hid_t fapl)
for(j = 0; j < dim2_big[1]/chunk_dim2[1]; j++)
chunks_big[i][j] = 0;
+ rows = (long)(dim2_big[0]/chunk_dim2[0]);
+ cols = (long)(dim2_big[1]/chunk_dim2[1]);
+ make_random_offset_and_increment(rows * cols, &ofs, &inc);
+
/* Generate random point coordinates. Only one point is selected per chunk */
for(i = 0; i < POINTS_BIG; i++){
- do {
- chunk_row = (int)HDrandom () % (int)(dim2_big[0]/chunk_dim2[0]);
- chunk_col = (int)HDrandom () % (int)(dim2_big[1]/chunk_dim2[1]);
- } while (chunks_big[chunk_row][chunk_col]);
-
- wbuf_big[i] = chunks_big[chunk_row][chunk_col] = chunk_row+chunk_col+1;
- coord_big[i][0] = (hsize_t)chunk_row * chunk_dim2[0];
- coord_big[i][1] = (hsize_t)chunk_col * chunk_dim2[1];
+ H5_CHECKED_ASSIGN(chunk_row, int, ofs / cols, long);
+ H5_CHECKED_ASSIGN(chunk_col, int, ofs % cols, long);
+ ofs = (ofs + inc) % (rows * cols);
+ HDassert(!chunks_big[chunk_row][chunk_col]);
+
+ wbuf_big[i] = chunks_big[chunk_row][chunk_col] = chunk_row+chunk_col+1;
+ coord_big[i][0] = (hsize_t)chunk_row * chunk_dim2[0];
+ coord_big[i][1] = (hsize_t)chunk_col * chunk_dim2[1];
} /* end for */
/* Create dataspace for write buffer */
if((big_mem_id = H5Screate_simple(1, msize_big, NULL)) < 0) TEST_ERROR;
/* Select the random points for writing */
- if(H5Sselect_elements(sid_big, H5S_SELECT_SET, POINTS_BIG, (const hsize_t *)coord_big) < 0)
+ if(H5Sselect_elements(sid_big, H5S_SELECT_SET, POINTS_BIG, (const hsize_t *)coord_big_bytes) < 0)
TEST_ERROR;
/* Write into dataset */
@@ -9603,7 +9899,7 @@ test_fixed_array(hid_t fapl)
if((mem_id = H5Screate_simple(1, msize, NULL)) < 0) TEST_ERROR;
/* Select the random points for reading */
- if(H5Sselect_elements (sid, H5S_SELECT_SET, POINTS, (const hsize_t *)coord) < 0) TEST_ERROR;
+ if(H5Sselect_elements (sid, H5S_SELECT_SET, POINTS, (const hsize_t *)coord_bytes) < 0) TEST_ERROR;
/* Read from dataset */
if(H5Dread(dsid, H5T_NATIVE_INT, mem_id, sid, H5P_DEFAULT, rbuf) < 0) TEST_ERROR;
@@ -9631,7 +9927,7 @@ test_fixed_array(hid_t fapl)
if((mem_id = H5Screate_simple(1, msize, NULL)) < 0) TEST_ERROR;
/* Select the random points for reading */
- if(H5Sselect_elements (sid, H5S_SELECT_SET, POINTS, (const hsize_t *)coord) < 0) TEST_ERROR;
+ if(H5Sselect_elements (sid, H5S_SELECT_SET, POINTS, (const hsize_t *)coord_bytes) < 0) TEST_ERROR;
/* Read from dataset */
if(H5Dread(dsid, H5T_NATIVE_INT, mem_id, sid, H5P_DEFAULT, rbuf) < 0) TEST_ERROR;
@@ -9658,7 +9954,7 @@ test_fixed_array(hid_t fapl)
if((big_mem_id = H5Screate_simple(1, msize_big, NULL)) < 0) TEST_ERROR;
/* Select the random points for reading */
- if(H5Sselect_elements (sid_big, H5S_SELECT_SET, POINTS_BIG, (const hsize_t *)coord_big) < 0) TEST_ERROR;
+ if(H5Sselect_elements (sid_big, H5S_SELECT_SET, POINTS_BIG, (const hsize_t *)coord_big_bytes) < 0) TEST_ERROR;
/* Read from dataset */
if(H5Dread(dsid_big, H5T_NATIVE_INT, big_mem_id, sid_big, H5P_DEFAULT, rbuf_big) < 0) TEST_ERROR;
@@ -9700,6 +9996,15 @@ test_fixed_array(hid_t fapl)
HDfree(wbuf_big);
HDfree(rbuf_big);
+ HDfree(chunks);
+ HDfree(chunks_big);
+ HDfree(coord);
+ HDfree(coord_big);
+ HDfree(chunks_bytes);
+ HDfree(chunks_big_bytes);
+ HDfree(coord_bytes);
+ HDfree(coord_big_bytes);
+
PASSED();
return SUCCEED;
@@ -9711,10 +10016,18 @@ error:
H5Sclose(mem_id);
H5Fclose(fid);
} H5E_END_TRY;
- if(wbuf_big)
- HDfree(wbuf_big);
- if(rbuf_big)
- HDfree(rbuf_big);
+
+ HDfree(wbuf_big);
+ HDfree(rbuf_big);
+ HDfree(chunks);
+ HDfree(chunks_big);
+ HDfree(coord);
+ HDfree(coord_big);
+ HDfree(chunks_bytes);
+ HDfree(chunks_big_bytes);
+ HDfree(coord_bytes);
+ HDfree(coord_big_bytes);
+
return FAIL;
} /* end test_fixed_array() */
@@ -12930,7 +13243,6 @@ test_versionbounds(void)
hid_t vspace = -1; /* Virtual dset dataspaces */
hid_t srcdset = -1; /* Source datset */
hid_t vdset = -1; /* Virtual dataset */
- hid_t null_dspace = -1; /* Data space of H5S_NULL */
hsize_t dims[1] = {3}; /* Data space current size */
char srcfilename[FILENAME_BUF_SIZE];
char vfilename1[FILENAME_BUF_SIZE];
@@ -13067,7 +13379,7 @@ test_versionbounds(void)
*-----------------------------------------------------------------------------
*/
static herr_t
-test_object_header_minimization_dcpl(hid_t fapl_id)
+test_object_header_minimization_dcpl(void)
{
hid_t dcpl_id = -1;
hid_t file_id = -1;
@@ -13081,10 +13393,10 @@ test_object_header_minimization_dcpl(hid_t fapl_id)
/* SETUP */
/*********/
- if(NULL == h5_fixname(OHMIN_FILENAME_A, fapl_id, filename, sizeof(filename)))
+ if(NULL == h5_fixname(OHMIN_FILENAME_A, H5P_DEFAULT, filename, sizeof(filename)))
TEST_ERROR
- file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
+ file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
if (file_id == H5I_INVALID_HID)
TEST_ERROR
@@ -13169,6 +13481,197 @@ error:
} /* end test_object_header_minimization_dcpl() */
+/*-----------------------------------------------------------------------------
+ * Function: test_0sized_dset_metadata_alloc
+ *
+ * Purpose: Tests the metadata allocation for 0-sized datasets.
+ *
+ * Return: Success/pass: 0
+ * Failure/error: -1
+ *
+ * Programmer: Quincey Koziol
+ * 2020 April 30
+ *
+ *-----------------------------------------------------------------------------
+ */
+static herr_t
+test_0sized_dset_metadata_alloc(hid_t fapl_id)
+{
+ char filename[FILENAME_BUF_SIZE] = "";
+ hid_t file_id = H5I_INVALID_HID;
+ hid_t fapl_id_copy = H5I_INVALID_HID;
+ hid_t dset_id = H5I_INVALID_HID;
+ hid_t dcpl_id = H5I_INVALID_HID;
+ hid_t dcpl_id_copy = H5I_INVALID_HID;
+ hid_t dset_space_id = H5I_INVALID_HID;
+ hid_t buf_space_id = H5I_INVALID_HID;
+ unsigned new_format; /* Whether to use latest file format */
+
+ TESTING("allocation of metadata for 0-sized datasets");
+
+ /*********/
+ /* SETUP */
+ /*********/
+
+ if(NULL == h5_fixname(FILENAME[26], fapl_id, filename, sizeof(filename)))
+ FAIL_STACK_ERROR
+
+ /* Create DCPL for the dataset */
+ if((dcpl_id = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ FAIL_STACK_ERROR
+
+
+ /*************/
+ /* RUN TESTS */
+ /*************/
+
+ /* Iterate over file format versions */
+ for(new_format = FALSE; new_format <= TRUE; new_format++) {
+ H5D_layout_t layout; /* Dataset layout type */
+ H5D_alloc_time_t alloc_time; /* Storage allocation time */
+
+ /* Copy the file access property list */
+ if((fapl_id_copy = H5Pcopy(fapl_id)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Set the "use the latest version of the format" bounds for creating objects in the file */
+ if(new_format)
+ if(H5Pset_libver_bounds(fapl_id_copy, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0)
+ FAIL_STACK_ERROR
+
+ /* Create test file */
+ if((file_id = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id_copy)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Close the copy of the FAPL */
+ if(H5Pclose(fapl_id_copy) < 0)
+ FAIL_STACK_ERROR
+
+ /* Iterate over combinations of testing parameters */
+ for(layout = H5D_COMPACT; layout <= H5D_CHUNKED; layout++) {
+ for(alloc_time = H5D_ALLOC_TIME_EARLY; alloc_time <= H5D_ALLOC_TIME_INCR; alloc_time++) {
+ const hsize_t dims[1] = {0}; /* Dataset dimensions */
+ const hsize_t max_dims[1] = {H5S_UNLIMITED}; /* Maximum dataset dimensions */
+ const hsize_t chunk_dims[1] = {100}; /* Chunk dimensions */
+ char dset_name[32]; /* Dataset name */
+ H5O_info_t oinfo; /* Information about the dataset */
+
+ /* Compact storage must have early allocation */
+ if(H5D_COMPACT == layout && H5D_ALLOC_TIME_EARLY != alloc_time)
+ continue;
+
+ /* Compose dataset name */
+ HDsnprintf(dset_name, sizeof(dset_name), "/Dataset-%u-%u", (unsigned)alloc_time, (unsigned)layout);
+
+ /* Set up DCPL */
+ if((dcpl_id_copy = H5Pcopy(dcpl_id)) < 0)
+ FAIL_STACK_ERROR
+ if(H5Pset_alloc_time(dcpl_id_copy, alloc_time) < 0)
+ FAIL_STACK_ERROR
+ if(H5Pset_layout(dcpl_id_copy, layout) < 0)
+ FAIL_STACK_ERROR
+ if(H5D_CHUNKED == layout)
+ if(H5Pset_chunk(dcpl_id_copy, 1, chunk_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Create the dataspace for the dataset */
+ if((dset_space_id = H5Screate_simple(1, dims, (H5D_CHUNKED == layout ? max_dims : NULL))) < 0)
+ FAIL_STACK_ERROR
+
+ /* Create the dataset with the appropriate parameters */
+ if((dset_id = H5Dcreate2(file_id, dset_name, H5T_NATIVE_INT, dset_space_id, H5P_DEFAULT, dcpl_id_copy, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Close objects used to create dataset */
+ if(H5Pclose(dcpl_id_copy) < 0)
+ FAIL_STACK_ERROR
+ if(H5Sclose(dset_space_id) < 0)
+ FAIL_STACK_ERROR
+
+ /* Retrieve & verify the dataset's index info */
+ HDmemset(&oinfo, 0, sizeof(oinfo));
+ if(H5Oget_info2(dset_id, &oinfo, H5O_INFO_META_SIZE) < 0)
+ FAIL_STACK_ERROR
+ if(0 != oinfo.meta_size.obj.index_size)
+ FAIL_PUTS_ERROR("dataset index allocation size is non-zero")
+
+ /* If chunked, try extending and verify that the index is allocated */
+ if(H5D_CHUNKED == layout) {
+ const hsize_t new_dims[1] = {1500}; /* New dataset dimensions */
+ const hsize_t mem_dims[1] = {1}; /* Memory buffer dataset dimensions */
+ const hsize_t coord = 0; /* Dataset selection coordinate */
+ int val = 0; /* Data value */
+
+ /* Extend dataset */
+ if(H5Dset_extent(dset_id, new_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Get the dataspace for the dataset & set single point selection */
+ if((dset_space_id = H5Dget_space(dset_id)) < 0)
+ FAIL_STACK_ERROR
+ if(H5Sselect_elements(dset_space_id, H5S_SELECT_SET, (size_t)1, (const hsize_t *)&coord) < 0)
+ FAIL_STACK_ERROR
+
+ /* Create memory dataspace, with only one element */
+ if((buf_space_id = H5Screate_simple(1, mem_dims, NULL)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Write the data to the dataset */
+ if(H5Dwrite(dset_id, H5T_NATIVE_INT, buf_space_id, dset_space_id, H5P_DEFAULT, &val) < 0)
+ FAIL_STACK_ERROR
+
+ /* Close objects used to perform I/O */
+ if(H5Sclose(dset_space_id) < 0)
+ FAIL_STACK_ERROR
+ if(H5Sclose(buf_space_id) < 0)
+ FAIL_STACK_ERROR
+
+ /* Retrieve & verify the dataset's index info */
+ HDmemset(&oinfo, 0, sizeof(oinfo));
+ if(H5Oget_info2(dset_id, &oinfo, H5O_INFO_META_SIZE) < 0)
+ FAIL_STACK_ERROR
+ if(0 == oinfo.meta_size.obj.index_size)
+ FAIL_PUTS_ERROR("dataset index allocation size is zero")
+ } /* end if */
+
+ /* Close dataset */
+ if(H5Dclose(dset_id) < 0)
+ FAIL_STACK_ERROR
+ } /* end for */
+ } /* end for */
+
+ /* Close test file */
+ if(H5Fclose(file_id) < 0)
+ FAIL_STACK_ERROR
+ } /* end for */
+
+
+ /************/
+ /* TEARDOWN */
+ /************/
+
+ if(H5Pclose(dcpl_id) < 0)
+ FAIL_STACK_ERROR
+
+ PASSED();
+
+ return SUCCEED;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dset_space_id);
+ H5Pclose(buf_space_id);
+ H5Pclose(fapl_id_copy);
+ H5Pclose(dcpl_id_copy);
+ H5Pclose(dcpl_id);
+ H5Dclose(dset_id);
+ H5Fclose(file_id);
+ } H5E_END_TRY;
+
+ return FAIL;
+} /* end test_0sized_dset_metadata_alloc() */
+
+
/*-------------------------------------------------------------------------
* Function: main
*
@@ -13197,6 +13700,7 @@ main(void)
int nerrors = 0;
const char *envval;
hbool_t contig_addr_vfd; /* Whether VFD used has a contigous address space */
+ int i;
/* Don't run this test using certain file drivers */
envval = HDgetenv("HDF5_DRIVER");
@@ -13209,6 +13713,39 @@ main(void)
/* Set the random # seed */
HDsrandom((unsigned)HDtime(NULL));
+ /* Initialize global arrays */
+ /* points */
+ if(NULL == (points_data = (int *)HDcalloc(DSET_DIM1 * DSET_DIM2, sizeof(int))))
+ TEST_ERROR;
+ if(NULL == (points = (int **)HDcalloc(DSET_DIM1, sizeof(points_data))))
+ TEST_ERROR;
+ for (i = 0; i < DSET_DIM1; i++)
+ points[i] = points_data + (i * DSET_DIM2);
+
+ /* check */
+ if(NULL == (check_data = (int *)HDcalloc(DSET_DIM1 * DSET_DIM2, sizeof(int))))
+ TEST_ERROR;
+ if(NULL == (check = (int **)HDcalloc(DSET_DIM1, sizeof(check_data))))
+ TEST_ERROR;
+ for (i = 0; i < DSET_DIM1; i++)
+ check[i] = check_data + (i * DSET_DIM2);
+
+ /* points_dbl */
+ if(NULL == (points_dbl_data = (double *)HDcalloc(DSET_DIM1 * DSET_DIM2, sizeof(double))))
+ TEST_ERROR;
+ if(NULL == (points_dbl = (double **)HDcalloc(DSET_DIM1, sizeof(points_dbl_data))))
+ TEST_ERROR;
+ for (i = 0; i < DSET_DIM1; i++)
+ points_dbl[i] = points_dbl_data + (i * DSET_DIM2);
+
+ /* check_dbl */
+ if(NULL == (check_dbl_data = (double *)HDcalloc(DSET_DIM1 * DSET_DIM2, sizeof(double))))
+ TEST_ERROR;
+ if(NULL == (check_dbl = (double **)HDcalloc(DSET_DIM1, sizeof(check_dbl_data))))
+ TEST_ERROR;
+ for (i = 0; i < DSET_DIM1; i++)
+ check_dbl[i] = check_dbl_data + (i * DSET_DIM2);
+
/* Testing setup */
h5_reset();
fapl = h5_fileaccess();
@@ -13377,10 +13914,11 @@ main(void)
/* Tests version bounds using its own file */
nerrors += (test_versionbounds() < 0 ? 1 : 0);
- nerrors += (test_object_header_minimization_dcpl(fapl) < 0 ? 1 : 0);
+ nerrors += (test_object_header_minimization_dcpl() < 0 ? 1 : 0);
/* Run misc tests */
- nerrors += dls_01_main();
+ nerrors += (dls_01_main() < 0 ? 1 : 0);
+ nerrors += (test_0sized_dset_metadata_alloc(fapl) < 0 ? 1 : 0);
/* Verify symbol table messages are cached */
nerrors += (h5_verify_cached_stabs(FILENAME, fapl) < 0 ? 1 : 0);
@@ -13394,9 +13932,29 @@ main(void)
#endif /* H5_HAVE_FILTER_SZIP */
h5_cleanup(FILENAME, fapl);
+ HDfree(points);
+ HDfree(check);
+ HDfree(points_dbl);
+ HDfree(check_dbl);
+
+ HDfree(points_data);
+ HDfree(check_data);
+ HDfree(points_dbl_data);
+ HDfree(check_dbl_data);
+
HDexit(EXIT_SUCCESS);
error:
+ HDfree(points);
+ HDfree(check);
+ HDfree(points_dbl);
+ HDfree(check_dbl);
+
+ HDfree(points_data);
+ HDfree(check_data);
+ HDfree(points_dbl_data);
+ HDfree(check_dbl_data);
+
nerrors = MAX(1, nerrors);
HDprintf("***** %d DATASET TEST%s FAILED! *****\n",
nerrors, 1 == nerrors ? "" : "S");