summaryrefslogtreecommitdiffstats
path: root/test/dsets.c
diff options
context:
space:
mode:
authorDana Robinson <derobins@hdfgroup.org>2020-10-24 00:13:05 (GMT)
committerDana Robinson <derobins@hdfgroup.org>2020-10-24 00:13:05 (GMT)
commitca3659a01427cd58e5fddb57349b7030e5cb2dcd (patch)
tree5633c0b4bb7b5174518d27b33db43228c1c538d0 /test/dsets.c
parentf9679de85c9de96603d448950da5e1f2c18b4494 (diff)
downloadhdf5-ca3659a01427cd58e5fddb57349b7030e5cb2dcd.zip
hdf5-ca3659a01427cd58e5fddb57349b7030e5cb2dcd.tar.gz
hdf5-ca3659a01427cd58e5fddb57349b7030e5cb2dcd.tar.bz2
HD prefix updates in src/ and test/
Adds missing HD prefixes to API calls in src and test. Adds some extra processing to bin/checkposix to keep the noise levels down when running the script (not comprehensive).
Diffstat (limited to 'test/dsets.c')
-rw-r--r--test/dsets.c42
1 files changed, 21 insertions, 21 deletions
diff --git a/test/dsets.c b/test/dsets.c
index 8a4332d..a7b67ef 100644
--- a/test/dsets.c
+++ b/test/dsets.c
@@ -375,13 +375,13 @@ test_create(hid_t file)
dims[0] = 256;
dims[1] = 512;
space = H5Screate_simple(2, dims, NULL);
- assert(space >= 0);
+ HDassert(space >= 0);
/* Create a small data space for compact dataset */
small_dims[0] = 16;
small_dims[1] = 8;
small_space = H5Screate_simple(2, small_dims, NULL);
- assert(space >= 0);
+ HDassert(space >= 0);
/*
* Create a dataset using the default dataset creation properties. We're
@@ -447,13 +447,13 @@ test_create(hid_t file)
* layout.
*/
create_parms = H5Pcreate(H5P_DATASET_CREATE);
- assert(create_parms >= 0);
+ HDassert(create_parms >= 0);
/* Attempt to create a dataset with invalid chunk sizes */
csize[0] = dims[0] * 2;
csize[1] = dims[1] * 2;
status = H5Pset_chunk(create_parms, 2, csize);
- assert(status >= 0);
+ HDassert(status >= 0);
H5E_BEGIN_TRY
{
dataset = H5Dcreate2(file, DSET_CHUNKED_NAME, H5T_NATIVE_DOUBLE, space, H5P_DEFAULT, create_parms,
@@ -469,7 +469,7 @@ test_create(hid_t file)
csize[0] = 5;
csize[1] = 100;
status = H5Pset_chunk(create_parms, 2, csize);
- assert(status >= 0);
+ HDassert(status >= 0);
dataset =
H5Dcreate2(file, DSET_CHUNKED_NAME, H5T_NATIVE_DOUBLE, space, H5P_DEFAULT, create_parms, H5P_DEFAULT);
@@ -491,11 +491,11 @@ test_create(hid_t file)
* Create a compact dataset, then close it.
*/
create_parms = H5Pcreate(H5P_DATASET_CREATE);
- assert(create_parms >= 0);
+ HDassert(create_parms >= 0);
status = H5Pset_layout(create_parms, H5D_COMPACT);
- assert(status >= 0);
+ HDassert(status >= 0);
status = H5Pset_alloc_time(create_parms, H5D_ALLOC_TIME_EARLY);
- assert(status >= 0);
+ HDassert(status >= 0);
dataset = H5Dcreate2(file, DSET_COMPACT_NAME, H5T_NATIVE_DOUBLE, small_space, H5P_DEFAULT, create_parms,
H5P_DEFAULT);
@@ -567,7 +567,7 @@ test_simple_io(const char *env_h5_drvr, hid_t fapl)
/* Create a small conversion buffer to test strip mining */
tconv_buf = HDmalloc((size_t)1000);
xfer = H5Pcreate(H5P_DATASET_XFER);
- assert(xfer >= 0);
+ HDassert(xfer >= 0);
if (H5Pset_buffer(xfer, (size_t)1000, tconv_buf, NULL) < 0)
goto error;
@@ -4336,9 +4336,9 @@ test_nbit_compound_3(hid_t file)
/* Check that the values read are the same as the values written */
for (i = 0; i < (size_t)size[0]; i++) {
- if (new_data[i].i != orig_data[i].i || strcmp(new_data[i].str, orig_data[i].str) != 0 ||
- strcmp(new_data[i].vl_str, orig_data[i].vl_str) != 0 || new_data[i].v.len != orig_data[i].v.len ||
- new_data[i].r != orig_data[i].r) {
+ if (new_data[i].i != orig_data[i].i || HDstrcmp(new_data[i].str, orig_data[i].str) != 0 ||
+ HDstrcmp(new_data[i].vl_str, orig_data[i].vl_str) != 0 ||
+ new_data[i].v.len != orig_data[i].v.len || new_data[i].r != orig_data[i].r) {
H5_FAILED();
HDprintf(" Read different values than written.\n");
HDprintf(" At index %lu\n", (unsigned long)i);
@@ -7832,7 +7832,7 @@ test_random_chunks_real(const char *testname, hbool_t early_alloc, hid_t fapl)
TESTING(testname);
- assert(NPOINTS < 100);
+ HDassert(NPOINTS < 100);
h5_fixname(FILENAME[6], fapl, filename, sizeof filename);
@@ -8354,13 +8354,13 @@ test_deprec(hid_t file)
dims[0] = 256;
dims[1] = 512;
space = H5Screate_simple(2, dims, NULL);
- assert(space >= 0);
+ HDassert(space >= 0);
/* Create a small data space for compact dataset */
small_dims[0] = 16;
small_dims[1] = 8;
small_space = H5Screate_simple(2, small_dims, NULL);
- assert(space >= 0);
+ HDassert(space >= 0);
/*
* Create a dataset using the default dataset creation properties. We're
@@ -8413,7 +8413,7 @@ test_deprec(hid_t file)
* layout.
*/
create_parms = H5Pcreate(H5P_DATASET_CREATE);
- assert(create_parms >= 0);
+ HDassert(create_parms >= 0);
/* Add the deflate filter, if available */
#if defined H5_HAVE_FILTER_DEFLATE
@@ -8449,7 +8449,7 @@ test_deprec(hid_t file)
csize[0] = dims[0] * 2;
csize[1] = dims[1] * 2;
status = H5Pset_chunk(create_parms, 2, csize);
- assert(status >= 0);
+ HDassert(status >= 0);
H5E_BEGIN_TRY
{
dataset = H5Dcreate1(file, DSET_DEPREC_NAME_CHUNKED, H5T_NATIVE_DOUBLE, space, create_parms);
@@ -8464,7 +8464,7 @@ test_deprec(hid_t file)
csize[0] = 5;
csize[1] = 100;
status = H5Pset_chunk(create_parms, 2, csize);
- assert(status >= 0);
+ HDassert(status >= 0);
if ((dataset = H5Dcreate1(file, DSET_DEPREC_NAME_CHUNKED, H5T_NATIVE_DOUBLE, space, create_parms)) < 0)
goto error;
@@ -8489,11 +8489,11 @@ test_deprec(hid_t file)
* Create a compact dataset, then close it.
*/
create_parms = H5Pcreate(H5P_DATASET_CREATE);
- assert(create_parms >= 0);
+ HDassert(create_parms >= 0);
status = H5Pset_layout(create_parms, H5D_COMPACT);
- assert(status >= 0);
+ HDassert(status >= 0);
status = H5Pset_alloc_time(create_parms, H5D_ALLOC_TIME_EARLY);
- assert(status >= 0);
+ HDassert(status >= 0);
if ((dataset = H5Dcreate1(file, DSET_DEPREC_NAME_COMPACT, H5T_NATIVE_DOUBLE, small_space, create_parms)) <
0)