summaryrefslogtreecommitdiffstats
path: root/test
diff options
context:
space:
mode:
authorBinh-Minh Ribler <bmribler@hdfgroup.org>2019-01-31 02:05:04 (GMT)
committerBinh-Minh Ribler <bmribler@hdfgroup.org>2019-01-31 02:05:04 (GMT)
commit38c89fa5c005cee7f422d7002154ad8b93654109 (patch)
treeee0bb3dd288fbbdf5942177423be4aa8174fcaad /test
parent02d03b4624122955ee3de635699a4e3880fea377 (diff)
parentd99c9670e576471bc0a822c25f268703cf715869 (diff)
downloadhdf5-38c89fa5c005cee7f422d7002154ad8b93654109.zip
hdf5-38c89fa5c005cee7f422d7002154ad8b93654109.tar.gz
hdf5-38c89fa5c005cee7f422d7002154ad8b93654109.tar.bz2
Merge branch 'hdf5_1_10' of https://bitbucket.hdfgroup.org/scm/~bmribler/hdf5_1_10_bmr into hdf5_1_10
Diffstat (limited to 'test')
-rw-r--r--test/cork.c18
-rw-r--r--test/direct_chunk.c182
-rw-r--r--test/dt_arith.c81
-rw-r--r--test/th5o.c88
4 files changed, 216 insertions, 153 deletions
diff --git a/test/cork.c b/test/cork.c
index dceaf5a..240be77 100644
--- a/test/cork.c
+++ b/test/cork.c
@@ -286,19 +286,19 @@ static unsigned
verify_obj_dset_cork(hbool_t swmr)
{
/* Variable Declarations */
- hid_t fid = -1; /* File ID */
+ hid_t fid = -1; /* File ID */
hid_t fapl = -1; /* File access property list */
hid_t aid = -1; /* Attribute ID */
hid_t sid = -1, sid2 = -1; /* Dataspace IDs */
hid_t did = -1, did2 = -1; /* Dataset IDs */
hid_t oid = -1; /* Object ID */
- hid_t dcpl2; /* Dataset creation property list */
+ hid_t dcpl2; /* Dataset creation property list */
int i = 0; /* Local index variable */
- hsize_t dim[1] = {100}; /* Dataset dimension size */
+ hsize_t dim[1] = {100}; /* Dataset dimension size */
hsize_t chunk_dim[1] = {7}; /* Dataset chunk dimension size */
H5O_info_t oinfo, oinfo2; /* Object metadata information */
char attrname[500]; /* Name of attribute */
- unsigned flags; /* File access flags */
+ unsigned flags; /* File access flags */
if(swmr) {
TESTING("cork status for dataset objects with attributes (SWMR)");
@@ -341,7 +341,7 @@ verify_obj_dset_cork(hbool_t swmr)
TEST_ERROR
/* Attach and write to an attribute to the dataset: DSET */
- if((aid = H5Acreate2(did, ATTR, H5T_NATIVE_UINT, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ if((aid = H5Acreate2(did, ATTR, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0)
TEST_ERROR
/* Verify cork status of the dataset: DSET */
@@ -380,11 +380,11 @@ verify_obj_dset_cork(hbool_t swmr)
TEST_ERROR
/* Attach 8 attributes to the dataset */
- for(i = 0;i < 8; i++) {
- sprintf(attrname, "attr %d", i);
- if((aid = H5Acreate2(did2, attrname, H5T_NATIVE_UINT, sid2, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ for(i = 0; i < 8; i++) {
+ HDsprintf(attrname, "attr %d", i);
+ if((aid = H5Acreate2(did2, attrname, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0)
TEST_ERROR
- if(H5Awrite(aid, H5T_NATIVE_UINT, &i) < 0)
+ if(H5Awrite(aid, H5T_NATIVE_INT, &i) < 0)
TEST_ERROR
if(H5Aclose(aid) < 0)
TEST_ERROR
diff --git a/test/direct_chunk.c b/test/direct_chunk.c
index 2ef38ea..1c5862d 100644
--- a/test/direct_chunk.c
+++ b/test/direct_chunk.c
@@ -13,7 +13,7 @@
#include "h5test.h"
-#if defined(H5_HAVE_ZLIB_H) && !defined(H5_ZLIB_HEADER)
+#if defined(H5_HAVE_ZLIB_H) && !defined(H5_ZLIB_HEADER)
# define H5_ZLIB_HEADER "zlib.h"
#endif
#if defined(H5_ZLIB_HEADER)
@@ -46,10 +46,10 @@
#define DEFLATE_SIZE_ADJUST(s) (HDceil(((double)(s))*H5_DOUBLE(1.001))+H5_DOUBLE(12.0))
/* Temporary filter IDs used for testing */
-#define H5Z_FILTER_BOGUS1 305
-#define H5Z_FILTER_BOGUS2 306
-#define ADD_ON 7
-#define FACTOR 3
+#define H5Z_FILTER_BOGUS1 305
+#define H5Z_FILTER_BOGUS2 306
+#define ADD_ON 7
+#define FACTOR 3
/* Constants for the overwrite test */
#define OVERWRITE_NDIMS 3
@@ -75,33 +75,33 @@ static size_t filter_bogus2(unsigned int flags, size_t cd_nelmts,
/* This message derives from H5Z */
const H5Z_class2_t H5Z_BOGUS1[1] = {{
H5Z_CLASS_T_VERS, /* H5Z_class_t version */
- H5Z_FILTER_BOGUS1, /* Filter id number */
+ H5Z_FILTER_BOGUS1, /* Filter id number */
1, 1, /* Encoding and decoding enabled */
- "bogus1", /* Filter name for debugging */
+ "bogus1", /* Filter name for debugging */
NULL, /* The "can apply" callback */
NULL, /* The "set local" callback */
- filter_bogus1, /* The actual filter function */
+ filter_bogus1, /* The actual filter function */
}};
const H5Z_class2_t H5Z_BOGUS2[1] = {{
H5Z_CLASS_T_VERS, /* H5Z_class_t version */
- H5Z_FILTER_BOGUS2, /* Filter id number */
+ H5Z_FILTER_BOGUS2, /* Filter id number */
1, 1, /* Encoding and decoding enabled */
- "bogus2", /* Filter name for debugging */
+ "bogus2", /* Filter name for debugging */
NULL, /* The "can apply" callback */
NULL, /* The "set local" callback */
- filter_bogus2, /* The actual filter function */
+ filter_bogus2, /* The actual filter function */
}};
/*-------------------------------------------------------------------------
- * Function: test_direct_chunk_write
+ * Function: test_direct_chunk_write
*
- * Purpose: Test the basic functionality of H5Dwrite_chunk
+ * Purpose: Test the basic functionality of H5Dwrite_chunk
*
- * Return: Success: 0
- * Failure: 1
+ * Return: Success: 0
+ * Failure: 1
*
- * Programmer: Raymond Lu
+ * Programmer: Raymond Lu
* 30 November 2012
*
*-------------------------------------------------------------------------
@@ -113,7 +113,7 @@ test_direct_chunk_write (hid_t file)
hid_t dataspace = -1, dataset = -1;
hid_t mem_space = -1;
hid_t cparms = -1, dxpl = -1;
- hsize_t dims[2] = {NX, NY};
+ hsize_t dims[2] = {NX, NY};
hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
hsize_t chunk_dims[2] ={CHUNK_NX, CHUNK_NY};
herr_t status;
@@ -128,11 +128,11 @@ test_direct_chunk_write (hid_t file)
size_t buf_size = CHUNK_NX*CHUNK_NY*sizeof(int);
const Bytef *z_src = (const Bytef*)(direct_buf);
- Bytef *z_dst = NULL; /*destination buffer */
- uLongf z_dst_nbytes = (uLongf)DEFLATE_SIZE_ADJUST(buf_size);
- uLong z_src_nbytes = (uLong)buf_size;
+ Bytef *z_dst = NULL; /*destination buffer */
+ uLongf z_dst_nbytes = (uLongf)DEFLATE_SIZE_ADJUST(buf_size);
+ uLong z_src_nbytes = (uLong)buf_size;
int aggression = 9; /* Compression aggression setting */
- void *outbuf = NULL; /* Pointer to new buffer */
+ void *outbuf = NULL; /* Pointer to new buffer */
hsize_t start[2]; /* Start of hyperslab */
hsize_t stride[2]; /* Stride of hyperslab */
@@ -167,29 +167,29 @@ test_direct_chunk_write (hid_t file)
* creation properties.
*/
if((dataset = H5Dcreate2(file, DATASETNAME1, H5T_NATIVE_INT, dataspace, H5P_DEFAULT,
- cparms, H5P_DEFAULT)) < 0)
+ cparms, H5P_DEFAULT)) < 0)
goto error;
/* Initialize the dataset */
for(i = n = 0; i < NX; i++)
for(j = 0; j < NY; j++)
- data[i][j] = n++;
+ data[i][j] = n++;
if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0)
goto error;
/*
* Write the data for the dataset. It should stay in the chunk cache.
- * It will be evicted from the cache by the H5Dwrite_chunk calls.
+ * It will be evicted from the cache by the H5Dwrite_chunk calls.
*/
if((status = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL,
- dxpl, data)) < 0)
+ dxpl, data)) < 0)
goto error;
/* Initialize data for one chunk */
for(i = n = 0; i < CHUNK_NX; i++)
for(j = 0; j < CHUNK_NY; j++)
- direct_buf[i][j] = n++;
+ direct_buf[i][j] = n++;
/* Allocate output (compressed) buffer */
outbuf = HDmalloc(z_dst_nbytes);
@@ -210,8 +210,8 @@ test_direct_chunk_write (hid_t file)
goto error;
}
- /* Write the compressed chunk data repeatedly to cover all the chunks in the
- * dataset, using the direct writing function. */
+ /* Write the compressed chunk data repeatedly to cover all the chunks in the
+ * dataset, using the direct writing function. */
for(i=0; i<NX/CHUNK_NX; i++) {
for(j=0; j<NY/CHUNK_NY; j++) {
status = H5Dwrite_chunk(dataset, dxpl, filter_mask, offset, z_dst_nbytes, outbuf);
@@ -224,7 +224,7 @@ test_direct_chunk_write (hid_t file)
if(outbuf)
HDfree(outbuf);
- if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0)
+ if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0)
goto error;
if(H5Dclose(dataset) < 0)
@@ -253,7 +253,7 @@ test_direct_chunk_write (hid_t file)
if(direct_buf[i][j] != check_chunk[i][j]) {
HDprintf(" 1. Read different values than written.");
HDprintf(" At index %d,%d\n", i, j);
- HDprintf(" direct_buf=%d, check_chunk=%d\n", direct_buf[i][j], check_chunk[i][j]);
+ HDprintf(" direct_buf=%d, check_chunk=%d\n", direct_buf[i][j], check_chunk[i][j]);
goto error;
}
}
@@ -262,7 +262,7 @@ test_direct_chunk_write (hid_t file)
/* Reinitialize different data for one chunk */
for(i = 0; i < CHUNK_NX; i++)
for(j = 0; j < CHUNK_NY; j++)
- direct_buf[i][j] = i + j;
+ direct_buf[i][j] = i + j;
/* Allocate output (compressed) buffer */
outbuf = HDmalloc(z_dst_nbytes);
@@ -283,8 +283,8 @@ test_direct_chunk_write (hid_t file)
goto error;
}
- /* Rewrite the compressed chunk data repeatedly to cover all the chunks in the
- * dataset, using the direct writing function. */
+ /* Rewrite the compressed chunk data repeatedly to cover all the chunks in the
+ * dataset, using the direct writing function. */
offset[0] = offset[1] = 0;
for(i=0; i<NX/CHUNK_NX; i++) {
for(j=0; j<NY/CHUNK_NY; j++) {
@@ -298,7 +298,7 @@ test_direct_chunk_write (hid_t file)
if(outbuf)
HDfree(outbuf);
- if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0)
+ if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0)
goto error;
if(H5Dclose(dataset) < 0)
@@ -317,7 +317,7 @@ test_direct_chunk_write (hid_t file)
if(direct_buf[i][j] != check_chunk[i][j]) {
HDprintf(" 2. Read different values than written.");
HDprintf(" At index %d,%d\n", i, j);
- HDprintf(" direct_buf=%d, check_chunk=%d\n", direct_buf[i][j], check_chunk[i][j]);
+ HDprintf(" direct_buf=%d, check_chunk=%d\n", direct_buf[i][j], check_chunk[i][j]);
goto error;
}
}
@@ -331,7 +331,7 @@ test_direct_chunk_write (hid_t file)
H5Sclose(dataspace);
H5Pclose(cparms);
H5Pclose(dxpl);
-
+
PASSED();
return 0;
@@ -459,15 +459,15 @@ error:
} /* end test_direct_chunk_overwrite_data() */
/*-------------------------------------------------------------------------
- * Function: test_skip_compress_write1
+ * Function: test_skip_compress_write1
*
- * Purpose: Test skipping compression filter when it is the only filter
+ * Purpose: Test skipping compression filter when it is the only filter
* for the dataset
*
- * Return: Success: 0
- * Failure: 1
+ * Return: Success: 0
+ * Failure: 1
*
- * Programmer: Raymond Lu
+ * Programmer: Raymond Lu
* 30 November 2012
*
*-------------------------------------------------------------------------
@@ -528,7 +528,7 @@ test_skip_compress_write1(hid_t file)
* creation properties.
*/
if((dataset = H5Dcreate2(file, DATASETNAME2, H5T_NATIVE_INT, dataspace, H5P_DEFAULT,
- cparms, H5P_DEFAULT)) < 0)
+ cparms, H5P_DEFAULT)) < 0)
goto error;
if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0)
@@ -636,13 +636,13 @@ error:
} /* test_skip_compress_write1() */
/*-------------------------------------------------------------------------
- * Function: filter_bogus1
+ * Function: filter_bogus1
*
- * Purpose: A bogus filter that adds ADD_ON to the original value
+ * Purpose: A bogus filter that adds ADD_ON to the original value
*
- * Return: Success: Data chunk size
+ * Return: Success: Data chunk size
*
- * Programmer: Raymond Lu
+ * Programmer: Raymond Lu
* 30 November 2012
*
*-------------------------------------------------------------------------
@@ -674,13 +674,13 @@ filter_bogus1(unsigned int flags, size_t H5_ATTR_UNUSED cd_nelmts,
} /* filter_bogus1() */
/*-------------------------------------------------------------------------
- * Function: filter_bogus2
+ * Function: filter_bogus2
*
- * Purpose: A bogus filter that multiplies the original value by FACTOR.
+ * Purpose: A bogus filter that multiplies the original value by FACTOR.
*
- * Return: Success: Data chunk size
+ * Return: Success: Data chunk size
*
- * Programmer: Raymond Lu
+ * Programmer: Raymond Lu
* 30 November 2012
*-------------------------------------------------------------------------
*/
@@ -711,15 +711,15 @@ filter_bogus2(unsigned int flags, size_t H5_ATTR_UNUSED cd_nelmts,
} /* filter_bogus2() */
/*-------------------------------------------------------------------------
- * Function: test_skip_compress_write2
+ * Function: test_skip_compress_write2
*
- * Purpose: Test skipping compression filter when there are three filters
+ * Purpose: Test skipping compression filter when there are three filters
* for the dataset
*
- * Return: Success: 0
- * Failure: 1
+ * Return: Success: 0
+ * Failure: 1
*
- * Programmer: Raymond Lu
+ * Programmer: Raymond Lu
* 30 November 2012
*
*-------------------------------------------------------------------------
@@ -797,7 +797,7 @@ test_skip_compress_write2(hid_t file)
* creation properties.
*/
if((dataset = H5Dcreate2(file, DATASETNAME3, H5T_NATIVE_INT, dataspace, H5P_DEFAULT,
- cparms, H5P_DEFAULT)) < 0)
+ cparms, H5P_DEFAULT)) < 0)
goto error;
if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0)
@@ -907,14 +907,14 @@ error:
} /* test_skip_compress_write2() */
/*-------------------------------------------------------------------------
- * Function: test_data_conv
+ * Function: test_data_conv
*
- * Purpose: Test data conversion
+ * Purpose: Test data conversion
*
- * Return: Success: 0
- * Failure: 1
+ * Return: Success: 0
+ * Failure: 1
*
- * Programmer: Raymond Lu
+ * Programmer: Raymond Lu
* 30 November 2012
*
*-------------------------------------------------------------------------
@@ -932,13 +932,13 @@ test_data_conv(hid_t file)
hid_t dataspace = -1, dataset = -1;
hid_t mem_space = -1;
hid_t cparms = -1, dxpl = -1;
- hsize_t dims[2] = {NX, NY};
+ hsize_t dims[2] = {NX, NY};
hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
hsize_t chunk_dims[2] ={CHUNK_NX, CHUNK_NY};
herr_t status;
int i, j, n;
- const hsize_t four = 4;
- hid_t st=-1, dt=-1;
+ const hsize_t four = 4;
+ hid_t st=-1, dt=-1;
hid_t array_dt;
unsigned filter_mask = 0;
@@ -1002,7 +1002,7 @@ test_data_conv(hid_t file)
* creation properties.
*/
if((dataset = H5Dcreate2(file, DATASETNAME4, st, dataspace, H5P_DEFAULT,
- cparms, H5P_DEFAULT)) < 0)
+ cparms, H5P_DEFAULT)) < 0)
goto error;
if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0)
@@ -1022,7 +1022,7 @@ test_data_conv(hid_t file)
}
}
- /* write the chunk data to dataset, using the direct writing function.
+ /* write the chunk data to dataset, using the direct writing function.
* There should be no data conversion involved. */
offset[0] = CHUNK_NX;
offset[1] = CHUNK_NY;
@@ -1030,7 +1030,7 @@ test_data_conv(hid_t file)
if((status = H5Dwrite_chunk(dataset, dxpl, filter_mask, offset, buf_size, direct_buf)) < 0)
goto error;
- if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0)
+ if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0)
goto error;
if(H5Dclose(dataset) < 0)
@@ -1057,10 +1057,10 @@ test_data_conv(hid_t file)
HDprintf(" 1. Read different values than written.");
HDprintf(" At index %d,%d\n", i, j);
HDprintf(" src={a=%d, b=%d, c=[%d,%d,%d,%d], d=%d, e=%d\n",
- (direct_buf[i][j]).a, (direct_buf[i][j]).b, (direct_buf[i][j]).c[0], (direct_buf[i][j]).c[1],
+ (direct_buf[i][j]).a, (direct_buf[i][j]).b, (direct_buf[i][j]).c[0], (direct_buf[i][j]).c[1],
(direct_buf[i][j]).c[2], (direct_buf[i][j]).c[3], (direct_buf[i][j]).d, (direct_buf[i][j]).e);
HDprintf(" dst={a=%d, b=%d, c=[%d,%d,%d,%d], d=%d, e=%d\n",
- (read_chunk[i][j]).a, (read_chunk[i][j]).b, (read_chunk[i][j]).c[0], (read_chunk[i][j]).c[1],
+ (read_chunk[i][j]).a, (read_chunk[i][j]).b, (read_chunk[i][j]).c[0], (read_chunk[i][j]).c[1],
(read_chunk[i][j]).c[2], (read_chunk[i][j]).c[3], (read_chunk[i][j]).d, (read_chunk[i][j]).e);
goto error;
@@ -1094,10 +1094,10 @@ test_data_conv(hid_t file)
HDprintf(" 1. Read different values than written.");
HDprintf(" At index %d,%d\n", i, j);
HDprintf(" src={a=%d, b=%d, c=[%d,%d,%d,%d], d=%d, e=%d\n",
- (direct_buf[i][j]).a, (direct_buf[i][j]).b, (direct_buf[i][j]).c[0], (direct_buf[i][j]).c[1],
+ (direct_buf[i][j]).a, (direct_buf[i][j]).b, (direct_buf[i][j]).c[0], (direct_buf[i][j]).c[1],
(direct_buf[i][j]).c[2], (direct_buf[i][j]).c[3], (direct_buf[i][j]).d, (direct_buf[i][j]).e);
HDprintf(" dst={a=%d, c=[%d,%d,%d,%d], e=%d\n",
- (check_chunk[i][j]).a, (check_chunk[i][j]).c[0], (check_chunk[i][j]).c[1], (check_chunk[i][j]).c[2],
+ (check_chunk[i][j]).a, (check_chunk[i][j]).c[0], (check_chunk[i][j]).c[1], (check_chunk[i][j]).c[2],
(check_chunk[i][j]).c[3], (check_chunk[i][j]).e);
goto error;
@@ -1135,14 +1135,14 @@ error:
} /* test_data_conv() */
/*-------------------------------------------------------------------------
- * Function: test_invalid_parameters
+ * Function: test_invalid_parameters
*
* Purpose: Test invalid parameters for H5Dwrite_chunk and H5Dread_chunk
*
- * Return: Success: 0
- * Failure: 1
+ * Return: Success: 0
+ * Failure: 1
*
- * Programmer: Raymond Lu
+ * Programmer: Raymond Lu
* 30 November 2012
*
*-------------------------------------------------------------------------
@@ -1153,7 +1153,7 @@ test_invalid_parameters(hid_t file)
hid_t dataspace = -1, dataset = -1;
hid_t mem_space = -1;
hid_t cparms = -1, dxpl = -1;
- hsize_t dims[2] = {NX, NY};
+ hsize_t dims[2] = {NX, NY};
hsize_t chunk_dims[2] ={CHUNK_NX, CHUNK_NY};
herr_t status;
int i, j, n;
@@ -1187,7 +1187,7 @@ test_invalid_parameters(hid_t file)
* Create a new contiguous dataset to verify H5Dwrite_chunk/H5Dread_chunk doesn't work
*/
if((dataset = H5Dcreate2(file, DATASETNAME5, H5T_NATIVE_INT, dataspace, H5P_DEFAULT,
- cparms, H5P_DEFAULT)) < 0)
+ cparms, H5P_DEFAULT)) < 0)
goto error;
if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0)
@@ -1236,10 +1236,10 @@ test_invalid_parameters(hid_t file)
* creation properties.
*/
if((dataset = H5Dcreate2(file, DATASETNAME6, H5T_NATIVE_INT, dataspace, H5P_DEFAULT,
- cparms, H5P_DEFAULT)) < 0)
+ cparms, H5P_DEFAULT)) < 0)
goto error;
- /* Check invalid dataset ID for H5Dwrite_chunk and H5Dread_chunk */
+ /* Check invalid dataset ID for H5Dwrite_chunk and H5Dread_chunk */
H5E_BEGIN_TRY {
if((status = H5Dwrite_chunk((hid_t)-1, dxpl, filter_mask, offset, buf_size, direct_buf)) != FAIL)
goto error;
@@ -1301,7 +1301,7 @@ test_invalid_parameters(hid_t file)
/* Check invalid buffer size for H5Dwrite_chunk only */
offset[0] = CHUNK_NX;
offset[1] = CHUNK_NY;
- buf_size = 0;
+ buf_size = 0;
H5E_BEGIN_TRY {
if((status = H5Dwrite_chunk(dataset, dxpl, filter_mask, offset, buf_size, direct_buf)) != FAIL)
goto error;
@@ -1329,7 +1329,7 @@ test_invalid_parameters(hid_t file)
H5Sclose(dataspace);
H5Pclose(cparms);
H5Pclose(dxpl);
-
+
PASSED();
return 0;
@@ -1635,7 +1635,7 @@ test_direct_chunk_read_cache (hid_t file, hbool_t flush)
goto error;
if(read_buf_size == 0)
goto error;
-
+
/* Read the compressed chunk back using the direct read function. */
if((status = H5Dread_chunk(dataset, dxpl, offset, &filter_mask, outbuf)) < 0)
goto error;
@@ -1910,10 +1910,10 @@ test_read_unallocated_chunk (hid_t file)
goto error;
/* Write a single chunk to intialize the chunk storage */
- HDmemset(&chunk_dims, 0, sizeof(chunk_dims));
+ HDmemset(direct_buf, 0, CHUNK_NX * CHUNK_NY * sizeof(int));
offset[0] = 0; offset[1] = 0;
- if(H5Dwrite_chunk(dataset, dxpl, filter_mask, offset, chunk_nbytes, &chunk_dims) < 0)
+ if(H5Dwrite_chunk(dataset, dxpl, filter_mask, offset, chunk_nbytes, direct_buf) < 0)
goto error;
/* Attempt to read each chunk in the dataset. Chunks are not allocated,
@@ -1986,7 +1986,7 @@ error:
* this jira issue:
* Create a file with the latest format and a chunked dataset
* with one single chunk. The library will use single chunk
- * index for the dataset.
+ * index for the dataset.
* Verify that the data read is the same as the written data.
*
* Return: Success: 0
@@ -2106,15 +2106,15 @@ error:
} /* test_single_chunk_latest() */
/*-------------------------------------------------------------------------
- * Function: Main function
+ * Function: Main function
*
- * Purpose: Test direct chunk write function H5Dwrite_chunk and
+ * Purpose: Test direct chunk write function H5Dwrite_chunk and
* chunk direct read function H5Dread_chunk
*
- * Return: Success: 0
- * Failure: 1
+ * Return: Success: 0
+ * Failure: 1
*
- * Programmer: Raymond Lu
+ * Programmer: Raymond Lu
* 30 November 2012
*
*-------------------------------------------------------------------------
@@ -2134,7 +2134,7 @@ int main( void )
#ifdef H5_HAVE_FILTER_DEFLATE
nerrors += test_direct_chunk_write(file_id);
#endif /* H5_HAVE_FILTER_DEFLATE */
- nerrors += test_direct_chunk_overwrite_data(file_id);
+ nerrors += test_direct_chunk_overwrite_data(file_id);
nerrors += test_skip_compress_write1(file_id);
nerrors += test_skip_compress_write2(file_id);
nerrors += test_data_conv(file_id);
diff --git a/test/dt_arith.c b/test/dt_arith.c
index c7f2986..2729ba1 100644
--- a/test/dt_arith.c
+++ b/test/dt_arith.c
@@ -4883,7 +4883,24 @@ run_fp_tests(const char *name)
#if H5_SIZEOF_LONG_DOUBLE!=H5_SIZEOF_DOUBLE && H5_SIZEOF_LONG_DOUBLE!=0
nerrors += test_conv_flt_1(name, TEST_DENORM, H5T_NATIVE_FLOAT, H5T_NATIVE_LDOUBLE);
nerrors += test_conv_flt_1(name, TEST_DENORM, H5T_NATIVE_DOUBLE, H5T_NATIVE_LDOUBLE);
+#ifndef H5_DISABLE_SOME_LDOUBLE_CONV
nerrors += test_conv_flt_1(name, TEST_DENORM, H5T_NATIVE_LDOUBLE, H5T_NATIVE_FLOAT);
+#else
+ {
+ char str[256]; /*string */
+
+ HDsnprintf(str, sizeof(str), "Testing %s denormalized %s -> %s conversions",
+ name, "long double", "float");
+ printf("%-70s", str);
+ SKIPPED();
+#if H5_SIZEOF_LONG_DOUBLE!=0
+ HDputs(" Test skipped due to the conversion problem on IBM ppc64le cpu.");
+#else
+ HDputs(" Test skipped due to disabled long double.");
+#endif
+ }
+#endif
+
nerrors += test_conv_flt_1(name, TEST_DENORM, H5T_NATIVE_LDOUBLE, H5T_NATIVE_DOUBLE);
#endif
@@ -4893,8 +4910,24 @@ run_fp_tests(const char *name)
#if H5_SIZEOF_LONG_DOUBLE!=H5_SIZEOF_DOUBLE && H5_SIZEOF_LONG_DOUBLE!=0
nerrors += test_conv_flt_1(name, TEST_SPECIAL, H5T_NATIVE_FLOAT, H5T_NATIVE_LDOUBLE);
nerrors += test_conv_flt_1(name, TEST_SPECIAL, H5T_NATIVE_DOUBLE, H5T_NATIVE_LDOUBLE);
+#ifndef H5_DISABLE_SOME_LDOUBLE_CONV
nerrors += test_conv_flt_1(name, TEST_SPECIAL, H5T_NATIVE_LDOUBLE, H5T_NATIVE_FLOAT);
nerrors += test_conv_flt_1(name, TEST_SPECIAL, H5T_NATIVE_LDOUBLE, H5T_NATIVE_DOUBLE);
+#else
+ {
+ char str[256]; /*string */
+
+ HDsnprintf(str, sizeof(str), "Testing %s special %s -> %s conversions",
+ name, "long double", "float or double");
+ printf("%-70s", str);
+ SKIPPED();
+#if H5_SIZEOF_LONG_DOUBLE!=0
+ HDputs(" Test skipped due to the conversion problem on IBM ppc64le cpu.");
+#else
+ HDputs(" Test skipped due to disabled long double.");
+#endif
+ }
+#endif
#endif
done:
@@ -4963,7 +4996,7 @@ run_int_fp_conv(const char *name)
nerrors += test_conv_int_fp(name, TEST_NORMAL, H5T_NATIVE_INT, H5T_NATIVE_LDOUBLE);
nerrors += test_conv_int_fp(name, TEST_NORMAL, H5T_NATIVE_UINT, H5T_NATIVE_LDOUBLE);
#if H5_SIZEOF_LONG!=H5_SIZEOF_INT
-#ifndef H5_LONG_TO_LDOUBLE_SPECIAL
+#if !defined(H5_LONG_TO_LDOUBLE_SPECIAL) && !defined(H5_DISABLE_SOME_LDOUBLE_CONV)
nerrors += test_conv_int_fp(name, TEST_NORMAL, H5T_NATIVE_LONG, H5T_NATIVE_LDOUBLE);
nerrors += test_conv_int_fp(name, TEST_NORMAL, H5T_NATIVE_ULONG, H5T_NATIVE_LDOUBLE);
#else
@@ -5077,16 +5110,46 @@ run_fp_int_conv(const char *name)
#endif
#if H5_SIZEOF_LONG_DOUBLE!=H5_SIZEOF_DOUBLE
- nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_SCHAR);
- nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_UCHAR);
- nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_SHORT);
- nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_USHORT);
- nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_INT);
- nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_UINT);
+ if(test_values != TEST_SPECIAL) {
+ nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_SCHAR);
+ nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_UCHAR);
+ nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_SHORT);
+ nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_USHORT);
+ nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_INT);
+ nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_UINT);
+ } else {
+#ifndef H5_DISABLE_SOME_LDOUBLE_CONV
+ nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_SCHAR);
+ nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_UCHAR);
+ nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_SHORT);
+ nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_USHORT);
+ nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_INT);
+ nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_UINT);
+#else
+ char str[256]; /*string */
+
+ HDsnprintf(str, sizeof(str), "Testing %s special %s -> %s conversions",
+ name, "long double", "signed and unsigned char, short, int, long");
+ printf("%-70s", str);
+ SKIPPED();
+#if H5_SIZEOF_LONG_DOUBLE!=0
+ HDputs(" Test skipped due to the conversion problem on IBM ppc64le cpu.");
+#else
+ HDputs(" Test skipped due to disabled long double.");
+#endif
+#endif
+ }
#if H5_SIZEOF_LONG!=H5_SIZEOF_INT && H5_SIZEOF_LONG_DOUBLE!=0
#ifndef H5_LDOUBLE_TO_LONG_SPECIAL
- nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_LONG);
- nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_ULONG);
+ if(test_values != TEST_SPECIAL && test_values != TEST_NORMAL) {
+ nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_LONG);
+ nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_ULONG);
+ } else {
+#ifndef H5_DISABLE_SOME_LDOUBLE_CONV
+ nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_LONG);
+ nerrors += test_conv_int_fp(name, test_values, H5T_NATIVE_LDOUBLE, H5T_NATIVE_ULONG);
+#endif
+ }
#else
{
char str[256]; /*string */
diff --git a/test/th5o.c b/test/th5o.c
index 63fee5f..2949c4e 100644
--- a/test/th5o.c
+++ b/test/th5o.c
@@ -13,7 +13,7 @@
/***********************************************************
*
-* Test program: th5o
+* Test program: th5o
*
* Test public H5O functions for accessing
*
@@ -30,7 +30,7 @@
#define TEST6_DIM1 100
#define TEST6_DIM2 100
-
+
/****************************************************************
**
** test_h5o_open(): Test H5Oopen function.
@@ -140,7 +140,7 @@ test_h5o_open(void)
} /* test_h5o_open() */
-
+
/****************************************************************
**
** test_h5o_close(): Test H5Oclose function.
@@ -230,7 +230,7 @@ test_h5o_close(void)
CHECK(ret, FAIL, "H5Fclose");
}
-
+
/****************************************************************
**
** test_h5o_open_by_addr(): Test H5Oopen_by_addr function.
@@ -367,7 +367,7 @@ test_h5o_open_by_addr(void)
VERIFY(dtype, FAIL, "H5Oopen_by_addr");
} /* test_h5o_open_by_addr() */
-
+
/****************************************************************
**
** test_h5o_refcount(): Test H5O refcounting functions.
@@ -378,7 +378,7 @@ test_h5o_refcount(void)
{
hid_t fid; /* HDF5 File ID */
hid_t grp, dset, dtype, dspace; /* Object identifiers */
- H5O_info_t oinfo; /* Object info struct */
+ H5O_info_t oinfo; /* Object info struct */
hsize_t dims[RANK];
herr_t ret; /* Value returned from API calls */
@@ -562,7 +562,7 @@ test_h5o_refcount(void)
CHECK(ret, FAIL, "H5Fclose");
} /* test_h5o_refcount() */
-
+
/****************************************************************
**
** test_h5o_plist(): Test object creation properties
@@ -756,7 +756,7 @@ test_h5o_plist(void)
CHECK(ret, FAIL, "H5Pclose");
} /* test_h5o_plist() */
-
+
/****************************************************************
**
** test_h5o_link(): Test creating link to object
@@ -850,7 +850,7 @@ test_h5o_link(void)
/* Verify the data */
for(i = 0; i < (TEST6_DIM1 * TEST6_DIM2); i++)
VERIFY(wdata[i], rdata[i], "H5Dread");
-
+
/* Create a group with no name*/
group_id = H5Gcreate_anon(file_id, H5P_DEFAULT, H5P_DEFAULT);
CHECK(group_id, FAIL, "H5Gcreate_anon");
@@ -910,7 +910,7 @@ test_h5o_link(void)
HDfree(rdata);
} /* end test_h5o_link() */
-
+
/****************************************************************
**
** test_h5o_comment(): Test H5Oset(get)_comment functions.
@@ -1083,7 +1083,7 @@ test_h5o_comment(void)
} /* test_h5o_comment() */
-
+
/****************************************************************
**
** test_h5o_comment_by_name(): Test H5Oset(get)_comment_by_name functions.
@@ -1242,7 +1242,7 @@ test_h5o_comment_by_name(void)
} /* test_h5o_comment_by_name() */
-
+
/****************************************************************
**
** test_h5o_getinfo_same_file(): Test that querying the object info for
@@ -1254,7 +1254,7 @@ test_h5o_getinfo_same_file(void)
{
hid_t fid1, fid2; /* HDF5 File ID */
hid_t gid1, gid2; /* Group IDs */
- H5O_info_t oinfo1, oinfo2; /* Object info structs */
+ H5O_info_t oinfo1, oinfo2; /* Object info structs */
herr_t ret; /* Value returned from API calls */
/* Create a new HDF5 file */
@@ -1353,7 +1353,7 @@ test_h5o_getinfo_same_file(void)
/****************************************************************
**
** visit_obj_cb():
-** This is the callback function invoked by H5Ovisit1() in
+** This is the callback function invoked by H5Ovisit() in
** test_h5o_getinfo_visit():
** --Verify that the object info returned to the callback
** function is the same as H5Oget_info2().
@@ -1368,27 +1368,27 @@ visit_obj_cb(hid_t group_id, const char *name, const H5O_info_t *oinfo1,
/* Verify the object info for "group1", "group2" and the root group */
if(!(HDstrcmp(name, "group1"))) {
H5Oget_info_by_name2(group_id, name, &oinfo2, H5O_INFO_NUM_ATTRS, H5P_DEFAULT);
- VERIFY(oinfo1->num_attrs, oinfo2.num_attrs, "obj info from H5Ovisit1");
+ VERIFY(oinfo1->num_attrs, oinfo2.num_attrs, "obj info from H5Ovisit");
} else if(!(HDstrcmp(name, "group2"))) {
H5Oget_info_by_name2(group_id, name, &oinfo2, H5O_INFO_HDR, H5P_DEFAULT);
- VERIFY(oinfo1->hdr.nmesgs, oinfo2.hdr.nmesgs, "obj info from H5Ovisit1/H5Oget_info2");
- VERIFY(oinfo1->hdr.nchunks, oinfo2.hdr.nchunks, "obj info from H5Ovisit1/H5Oget_info2");
+ VERIFY(oinfo1->hdr.nmesgs, oinfo2.hdr.nmesgs, "obj info from H5Ovisit/H5Oget_info2");
+ VERIFY(oinfo1->hdr.nchunks, oinfo2.hdr.nchunks, "obj info from H5Ovisit/H5Oget_info2");
} else if(!(HDstrcmp(name, "."))) {
H5Oget_info_by_name2(group_id, name, &oinfo2, H5O_INFO_META_SIZE, H5P_DEFAULT);
- VERIFY(oinfo1->meta_size.obj.index_size, oinfo2.meta_size.obj.index_size, "obj info from H5Ovisit1/H5Oget_info2");
- VERIFY(oinfo1->meta_size.obj.heap_size, oinfo2.meta_size.obj.heap_size, "obj info from H5Ovisit1/H5Oget_info2");
+ VERIFY(oinfo1->meta_size.obj.index_size, oinfo2.meta_size.obj.index_size, "obj info from H5Ovisit/H5Oget_info2");
+ VERIFY(oinfo1->meta_size.obj.heap_size, oinfo2.meta_size.obj.heap_size, "obj info from H5Ovisit/H5Oget_info2");
}
return(H5_ITER_CONT);
} /* end visit_obj_cb() */
-
+
/****************************************************************
**
-** test_h5o_getinfo_visit():
-** Verify that the object info returned via H5Oget_info1()
+** test_h5o_getinfo_visit():
+** Verify that the object info returned via H5Oget_info()
** and H5Oget_info2() are the same.
-** Verify that the object info retrieved via H5Ovisit1() is
+** Verify that the object info retrieved via H5Ovisit() is
** the same as H5Oget_info2().
**
****************************************************************/
@@ -1436,31 +1436,31 @@ test_h5o_getinfo_visit(void)
HDmemset(&oinfo1, 0, sizeof(oinfo1));
HDmemset(&oinfo2, 0, sizeof(oinfo2));
- /* Query the object info for "group1" via H5Oget_info1 and H5Oget_info2 */
- ret = H5Oget_info1(gid1, &oinfo1);
+ /* Query the object info for "group1" via H5Oget_info and H5Oget_info2 */
+ ret = H5Oget_info(gid1, &oinfo1);
CHECK(ret, FAIL, "H5Oget_info");
ret = H5Oget_info2(gid1, &oinfo2, H5O_INFO_BASIC|H5O_INFO_NUM_ATTRS);
CHECK(ret, FAIL, "H5Oget_info");
/* Verify the object info for "group1" is correct */
- VERIFY(oinfo1.fileno, oinfo2.fileno, "obj info from H5Oget_info1/2");
- VERIFY(oinfo1.num_attrs, oinfo2.num_attrs, "obj info from H5Oget_info1/2");
+ VERIFY(oinfo1.fileno, oinfo2.fileno, "obj info from H5Oget_info/2");
+ VERIFY(oinfo1.num_attrs, oinfo2.num_attrs, "obj info from H5Oget_info/2");
/* Reset object info */
HDmemset(&oinfo1, 0, sizeof(oinfo1));
HDmemset(&oinfo2, 0, sizeof(oinfo2));
- /* Query the object info for "group2" via H5Oget_info1 and H5Oget_info2 */
- ret = H5Oget_info_by_name1(fid, "group2", &oinfo1, H5P_DEFAULT);
+ /* Query the object info for "group2" via H5Oget_info_by_name and H5Oget_info_by_name2 */
+ ret = H5Oget_info_by_name(fid, "group2", &oinfo1, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Oget_info_by_name");
ret = H5Oget_info_by_name2(fid, "group2", &oinfo2, H5O_INFO_HDR|H5O_INFO_META_SIZE, H5P_DEFAULT);
CHECK(ret, FAIL, "H5Oget_info_by_name");
/* Verify the object info for "group2" is correct */
- VERIFY(oinfo1.hdr.nmesgs, oinfo2.hdr.nmesgs, "obj info from H5Oget_info1/2");
- VERIFY(oinfo1.hdr.nchunks, oinfo2.hdr.nchunks, "obj info from H5Oget_info1/2");
- VERIFY(oinfo1.meta_size.obj.index_size, oinfo2.meta_size.obj.index_size, "obj info from H5Oget_info1/2");
- VERIFY(oinfo1.meta_size.obj.heap_size, oinfo2.meta_size.obj.heap_size, "obj info from H5Oget_info1/2");
+ VERIFY(oinfo1.hdr.nmesgs, oinfo2.hdr.nmesgs, "obj info from H5Oget_info_by_name/2");
+ VERIFY(oinfo1.hdr.nchunks, oinfo2.hdr.nchunks, "obj info from H5Oget_info_by_name/2");
+ VERIFY(oinfo1.meta_size.obj.index_size, oinfo2.meta_size.obj.index_size, "obj info from H5Oget_info_by_name/2");
+ VERIFY(oinfo1.meta_size.obj.heap_size, oinfo2.meta_size.obj.heap_size, "obj info from H5Oget_info_by_name/2");
/* Close everything */
ret = H5Gclose(gid1);
@@ -1469,7 +1469,7 @@ test_h5o_getinfo_visit(void)
CHECK(ret, FAIL, "H5Gclose");
/* Verify the object info returned to the callback function is correct */
- ret = H5Ovisit1(fid, H5_INDEX_NAME, H5_ITER_INC, visit_obj_cb, NULL);
+ ret = H5Ovisit(fid, H5_INDEX_NAME, H5_ITER_INC, visit_obj_cb, NULL);
/* Close the file */
ret = H5Fclose(fid);
@@ -1479,7 +1479,7 @@ test_h5o_getinfo_visit(void)
#endif /* H5_NO_DEPRECATED_SYMBOLS */
-
+
/****************************************************************
**
** test_h5o(): Main H5O (generic object) testing routine.
@@ -1491,9 +1491,9 @@ test_h5o(void)
/* Output message about test being performed */
MESSAGE(5, ("Testing Objects\n"));
- test_h5o_open(); /* Test generic open function */
- test_h5o_open_by_addr(); /* Test opening objects by address */
- test_h5o_close(); /* Test generic close function */
+ test_h5o_open(); /* Test generic open function */
+ test_h5o_open_by_addr(); /* Test opening objects by address */
+ test_h5o_close(); /* Test generic close function */
test_h5o_refcount(); /* Test incrementing and decrementing reference count */
test_h5o_plist(); /* Test object creation properties */
test_h5o_link(); /* Test object link routine */
@@ -1501,19 +1501,19 @@ test_h5o(void)
test_h5o_comment_by_name(); /* Test routines for comment by name */
test_h5o_getinfo_same_file(); /* Test info for objects in the same file */
#ifndef H5_NO_DEPRECATED_SYMBOLS
- test_h5o_getinfo_visit(); /* Test object info for H5Oget_info1/2 and H5Ovisit1 */
+ test_h5o_getinfo_visit(); /* Test object info for H5Oget_info/2 and H5Ovisit */
#endif
} /* test_h5o() */
-
+
/*-------------------------------------------------------------------------
- * Function: cleanup_h5o
+ * Function: cleanup_h5o
*
- * Purpose: Cleanup temporary test files
+ * Purpose: Cleanup temporary test files
*
- * Return: none
+ * Return: none
*
- * Programmer: James Laird
+ * Programmer: James Laird
* June 3, 2006
*
*-------------------------------------------------------------------------