summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDana Robinson <derobins@hdfgroup.org>2019-07-10 17:12:07 (GMT)
committerDana Robinson <derobins@hdfgroup.org>2019-07-10 17:12:07 (GMT)
commit59f94cd8f84f4911b9157b101bb01b1d53a04781 (patch)
tree1239b8ab6b1b9894662a6396fe8b421ecd448a58
parent52f0622bfed19a56d2f57b25b07b5a7996915507 (diff)
parent3e33d61c4ee74298273d1ca18ea942e1ceb5f26d (diff)
downloadhdf5-59f94cd8f84f4911b9157b101bb01b1d53a04781.zip
hdf5-59f94cd8f84f4911b9157b101bb01b1d53a04781.tar.gz
hdf5-59f94cd8f84f4911b9157b101bb01b1d53a04781.tar.bz2
Merge branch 'develop' into gnu-flags-work
-rw-r--r--c++/src/CMakeLists.txt2
-rw-r--r--config/cmake_ext_mod/FindSZIP.cmake2
-rw-r--r--hl/test/test_dset_append.c996
-rw-r--r--release_docs/RELEASE.txt19
-rw-r--r--src/H5PLpath.c2
5 files changed, 513 insertions, 508 deletions
diff --git a/c++/src/CMakeLists.txt b/c++/src/CMakeLists.txt
index 8e7f8be..b141a5e 100644
--- a/c++/src/CMakeLists.txt
+++ b/c++/src/CMakeLists.txt
@@ -139,7 +139,7 @@ if (HDF5_EXPORTED_TARGETS)
INSTALL_TARGET_PDB (${HDF5_CPP_LIBSH_TARGET} ${HDF5_INSTALL_BIN_DIR} cpplibraries)
endif ()
if (NOT ONLY_SHARED_LIBS)
- INSTALL_TARGET_PDB (${HDF5_CPP_LIB_TARGET} ${HDF5_INSTALL_BIN_DIR} cpplibraries)
+ INSTALL_TARGET_PDB (${HDF5_CPP_LIB_TARGET} ${HDF5_INSTALL_LIB_DIR} cpplibraries)
endif ()
install (
diff --git a/config/cmake_ext_mod/FindSZIP.cmake b/config/cmake_ext_mod/FindSZIP.cmake
index 152f8ac..b84d768 100644
--- a/config/cmake_ext_mod/FindSZIP.cmake
+++ b/config/cmake_ext_mod/FindSZIP.cmake
@@ -57,7 +57,7 @@ endforeach()
if(NOT SZIP_LIBRARY)
find_library(SZIP_LIBRARY_RELEASE NAMES ${szip_names})
find_library(SZIP_LIBRARY_DEBUG NAMES ${szip_names_debug})
- include(${CMAKE_CURRENT_LIST_DIR}/SelectLibraryConfigurations.cmake)
+ include(SelectLibraryConfigurations)
select_library_configurations(SZIP)
mark_as_advanced(SZIP_LIBRARY_RELEASE SZIP_LIBRARY_DEBUG)
endif()
diff --git a/hl/test/test_dset_append.c b/hl/test/test_dset_append.c
index d890481..9d466b5 100644
--- a/hl/test/test_dset_append.c
+++ b/hl/test/test_dset_append.c
@@ -11,38 +11,39 @@
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+#include <math.h>
#include <stdlib.h>
#include <string.h>
+
#include "h5hltest.h"
#include "H5DOpublic.h"
-#include <math.h>
#if defined(H5_HAVE_ZLIB_H) && !defined(H5_ZLIB_HEADER)
# define H5_ZLIB_HEADER "zlib.h"
#endif
#if defined(H5_ZLIB_HEADER)
-# include H5_ZLIB_HEADER /* "zlib.h" */
+#include H5_ZLIB_HEADER /* "zlib.h" */
#endif
-#define FILE "test_append.h5"
-#define DNAME_NOTSET "dataset_notset"
-#define DNAME_UNLIM "dataset_unlim"
-#define DNAME_LESS "dataset_less"
-#define DNAME_VARY "dataset_vary"
-#define DNAME_ROW "dataset_row"
-#define DNAME_COLUMN "dataset_column"
-#define DBUGNAME1 "dataset_bug1"
-#define DBUGNAME2 "dataset_bug2"
+#define FILENAME "test_append.h5"
+#define DNAME_NOTSET "dataset_notset"
+#define DNAME_UNLIM "dataset_unlim"
+#define DNAME_LESS "dataset_less"
+#define DNAME_VARY "dataset_vary"
+#define DNAME_ROW "dataset_row"
+#define DNAME_COLUMN "dataset_column"
+#define DBUGNAME1 "dataset_bug1"
+#define DBUGNAME2 "dataset_bug2"
/*-------------------------------------------------------------------------
- * Function: test_dataset_append_notset
+ * Function: test_dataset_append_notset
*
- * Purpose: Verify that H5DOappend works properly with default dapl.
- * That is, H5Pset_append_flush() is not used to set boundary
- * and callback in dapl.
+ * Purpose: Verify that H5DOappend works properly with default dapl.
+ * That is, H5Pset_append_flush() is not used to set boundary
+ * and callback in dapl.
*
- * Return: Success: 0
- * Failure: 1
+ * Return: Success: 0
+ * Failure: 1
*
* Programmer: Vailin Choi; Aug 2016
*
@@ -51,68 +52,68 @@
static int
test_dataset_append_notset(hid_t fid)
{
- hid_t did = -1; /* Dataset ID */
- hid_t sid = -1; /* Dataspace ID */
- hid_t dcpl = -1; /* A copy of dataset creation property */
- hid_t ffapl = -1; /* The file's file access property list */
-
- hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
- hsize_t maxdims[2] = {H5S_UNLIMITED, 20}; /* Maximum dimension sizes */
- hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
- int lbuf[10]; /* The data buffers */
- int i, j; /* Local index variables */
- h5_stat_t sb1, sb2; /* File info */
+ hid_t did = -1; /* Dataset ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t dcpl = -1; /* A copy of dataset creation property */
+ hid_t ffapl = -1; /* The file's file access property list */
+
+ hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
+ hsize_t maxdims[2] = {H5S_UNLIMITED, 20}; /* Maximum dimension sizes */
+ hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
+ int lbuf[10]; /* The data buffers */
+ int i, j; /* Local index variables */
+ h5_stat_t sb1, sb2; /* File info */
TESTING("Append flush with H5DOappend()--append rows with default dapl");
/* Get the file's file access property list */
if((ffapl = H5Fget_access_plist(fid)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set to create a chunked dataset with extendible dimensions */
if((sid = H5Screate_simple(2, dims, maxdims)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Create the dataset */
if((did = H5Dcreate2(fid, DNAME_NOTSET, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Append 6 rows to the dataset */
for(i = 0; i < 6; i++) {
- for(j = 0; j < 10; j++)
- lbuf[j] = (i * 10) + (j + 1);
- /* Append without boundary, callback and flush */
- if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)1, H5T_NATIVE_INT, lbuf) < 0)
- FAIL_STACK_ERROR;
+ for(j = 0; j < 10; j++)
+ lbuf[j] = (i * 10) + (j + 1);
+ /* Append without boundary, callback and flush */
+ if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)1, H5T_NATIVE_INT, lbuf) < 0)
+ FAIL_STACK_ERROR;
} /* end for */
/* File size when not flushed */
- if(HDstat(FILE, &sb1) < 0)
- TEST_ERROR;
+ if(HDstat(FILENAME, &sb1) < 0)
+ TEST_ERROR;
/* Close the dataset */
if(H5Dclose(did) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* File size after flushing */
- if(HDstat(FILE, &sb2) < 0)
- TEST_ERROR;
+ if(HDstat(FILENAME, &sb2) < 0)
+ TEST_ERROR;
/* File size before flushing should be less */
if(sb1.st_size > sb2.st_size)
- TEST_ERROR;
+ TEST_ERROR;
/* Closing */
if(H5Sclose(sid) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(dcpl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(ffapl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
PASSED();
@@ -120,10 +121,10 @@ test_dataset_append_notset(hid_t fid)
error:
H5E_BEGIN_TRY {
- H5Pclose(dcpl);
- H5Pclose(sid);
- H5Dclose(did);
- H5Pclose(ffapl);
+ H5Pclose(dcpl);
+ H5Pclose(sid);
+ H5Dclose(did);
+ H5Pclose(ffapl);
} H5E_END_TRY;
return 1;
@@ -148,14 +149,14 @@ append_func(hid_t H5_ATTR_UNUSED dset_id, hsize_t H5_ATTR_UNUSED *cur_dims, void
}
/*-------------------------------------------------------------------------
- * Function: test_dataset_append_rows_columns
+ * Function: test_dataset_append_rows_columns
*
- * Purpose: Verify that the object flush property and the append flush property
- * are working properly when appending rows and columns to a dataset
- * with 2 extendible dimensions.
+ * Purpose: Verify that the object flush property and the append flush property
+ * are working properly when appending rows and columns to a dataset
+ * with 2 extendible dimensions.
*
- * Return: Success: 0
- * Failure: 1
+ * Return: Success: 0
+ * Failure: 1
*
* Programmer: Vailin Choi; Jan 2014
*
@@ -164,91 +165,91 @@ append_func(hid_t H5_ATTR_UNUSED dset_id, hsize_t H5_ATTR_UNUSED *cur_dims, void
static int
test_dataset_append_rows_columns(hid_t fid)
{
- hid_t did = -1; /* Dataset ID */
- hid_t sid = -1; /* Dataspace ID */
- hid_t dcpl = -1; /* A copy of dataset creation property */
- hid_t dapl = -1; /* A copy of dataset access property */
- hid_t ffapl = -1; /* The file's file access property list */
+ hid_t did = -1; /* Dataset ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t dcpl = -1; /* A copy of dataset creation property */
+ hid_t dapl = -1; /* A copy of dataset access property */
+ hid_t ffapl = -1; /* The file's file access property list */
- hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
- hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; /* Maximum dimension sizes */
- hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
- int lbuf[10], cbuf[6]; /* The data buffers */
- int buf[6][13], rbuf[6][13]; /* The data buffers */
+ hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
+ hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; /* Maximum dimension sizes */
+ hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
+ int lbuf[10], cbuf[6]; /* The data buffers */
+ int buf[6][13], rbuf[6][13]; /* The data buffers */
- hsize_t boundary[2] = {1, 1}; /* Boundary sizes */
- unsigned append_ct = 0; /* The # of appends */
- unsigned *flush_ptr; /* Points to the flush counter */
+ hsize_t boundary[2] = {1, 1}; /* Boundary sizes */
+ unsigned append_ct = 0; /* The # of appends */
+ unsigned *flush_ptr; /* Points to the flush counter */
- int i, j; /* Local index variables */
+ int i, j; /* Local index variables */
TESTING("Append flush with H5DOappend()--append rows & columns");
/* Get the file's file access property list */
if((ffapl = H5Fget_access_plist(fid)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set to create a chunked dataset with 2 extendible dimensions */
if((sid = H5Screate_simple(2, dims, maxdims)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set append flush property */
if((dapl = H5Pcreate(H5P_DATASET_ACCESS)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pset_append_flush(dapl, 2, boundary, append_func, &append_ct) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Create the dataset */
if((did = H5Dcreate2(fid, DNAME_UNLIM, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, dapl)) < 0)
- TEST_ERROR;
+ TEST_ERROR;
/* Append 6 rows to the dataset */
for(i = 0; i < 6; i++) {
- for(j = 0; j < 10; j++)
- lbuf[j] = buf[i][j] = (i * 10) + (j + 1);
- if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)1, H5T_NATIVE_INT, lbuf) < 0)
- TEST_ERROR;
+ for(j = 0; j < 10; j++)
+ lbuf[j] = buf[i][j] = (i * 10) + (j + 1);
+ if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)1, H5T_NATIVE_INT, lbuf) < 0)
+ TEST_ERROR;
} /* end for */
/* Verify the # of appends */
if(append_ct != 6)
- TEST_ERROR;
+ TEST_ERROR;
/* Retrieve and verify object flush counts */
if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(*flush_ptr != 6)
- TEST_ERROR;
+ TEST_ERROR;
/* Append 3 columns to the dataset */
for(i = 0; i < 3; i++) {
- for(j = 0; j < 6; j++)
- cbuf[j] = buf[j][i + 10] = ((i * 6) + (j + 1)) * -1;
- if(H5DOappend(did, H5P_DEFAULT, 1, (size_t)1, H5T_NATIVE_INT, cbuf) < 0)
- TEST_ERROR;
+ for(j = 0; j < 6; j++)
+ cbuf[j] = buf[j][i + 10] = ((i * 6) + (j + 1)) * -1;
+ if(H5DOappend(did, H5P_DEFAULT, 1, (size_t)1, H5T_NATIVE_INT, cbuf) < 0)
+ TEST_ERROR;
} /* end for */
/* Verify the # of appends */
if(append_ct != 9)
- TEST_ERROR;
+ TEST_ERROR;
/* Retrieve and verify object flush counts */
if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(*flush_ptr != 9)
- TEST_ERROR;
+ TEST_ERROR;
/* Read the dataset */
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Verify the data */
for(i = 0; i < 6; i++)
- for(j = 0; j < 13; j++)
+ for(j = 0; j < 13; j++)
if(buf[i][j] != rbuf[i][j])
TEST_ERROR;
@@ -257,33 +258,33 @@ test_dataset_append_rows_columns(hid_t fid)
/* Close the dataset */
if(H5Dclose(did) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Open the dataset again */
if((did = H5Dopen2(fid, DNAME_UNLIM, H5P_DEFAULT)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Read the dataset */
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Verify the data */
for(i = 0; i < 6; i++)
- for(j = 0; j < 13; j++)
+ for(j = 0; j < 13; j++)
if(buf[i][j] != rbuf[i][j])
TEST_ERROR;
/* Closing */
if(H5Dclose(did) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Sclose(sid) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(dapl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(dcpl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(ffapl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
PASSED();
@@ -291,25 +292,25 @@ test_dataset_append_rows_columns(hid_t fid)
error:
H5E_BEGIN_TRY {
- H5Pclose(dapl);
- H5Pclose(dcpl);
- H5Pclose(sid);
- H5Dclose(did);
- H5Pclose(ffapl);
+ H5Pclose(dapl);
+ H5Pclose(dcpl);
+ H5Pclose(sid);
+ H5Dclose(did);
+ H5Pclose(ffapl);
} H5E_END_TRY;
return 1;
} /* test_dataset_append_rows_columns() */
/*-------------------------------------------------------------------------
- * Function: test_dataset_append_rows
+ * Function: test_dataset_append_rows
*
- * Purpose: Verify that the object flush property and the append flush property
- * are working properly when appending rows to a dataset with
- * one extendible dimension (row).
+ * Purpose: Verify that the object flush property and the append flush property
+ * are working properly when appending rows to a dataset with
+ * one extendible dimension (row).
*
- * Return: Success: 0
- * Failure: 1
+ * Return: Success: 0
+ * Failure: 1
*
* Programmer: Vailin Choi; Jan 2014
*
@@ -318,71 +319,71 @@ error:
static int
test_dataset_append_rows(hid_t fid)
{
- hid_t did = -1; /* Dataset ID */
- hid_t sid = -1; /* Dataspace ID */
- hid_t dcpl = -1; /* A copy of dataset creation property */
- hid_t dapl = -1; /* A copy of dataset access property */
- hid_t ffapl = -1; /* The file's file access property list */
-
- hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
- hsize_t maxdims[2] = {H5S_UNLIMITED, 10}; /* Maximum dimension sizes */
- hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
- int lbuf[10]; /* The data buffer */
- int buf[6][10], rbuf[6][10]; /* The data buffers */
- int i, j; /* Local index variables */
-
- hsize_t boundary[2] = {1, 0}; /* Boundary sizes */
- unsigned append_ct = 0; /* The # of appends */
- unsigned *flush_ptr; /* Points to the flush counter */
+ hid_t did = -1; /* Dataset ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t dcpl = -1; /* A copy of dataset creation property */
+ hid_t dapl = -1; /* A copy of dataset access property */
+ hid_t ffapl = -1; /* The file's file access property list */
+
+ hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
+ hsize_t maxdims[2] = {H5S_UNLIMITED, 10}; /* Maximum dimension sizes */
+ hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
+ int lbuf[10]; /* The data buffer */
+ int buf[6][10], rbuf[6][10]; /* The data buffers */
+ int i, j; /* Local index variables */
+
+ hsize_t boundary[2] = {1, 0}; /* Boundary sizes */
+ unsigned append_ct = 0; /* The # of appends */
+ unsigned *flush_ptr; /* Points to the flush counter */
TESTING("Append flush with H5DOappend()--append rows");
/* Get the file's file access property list */
if((ffapl = H5Fget_access_plist(fid)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set to create a chunked dataset with 1 extendible dimension */
if((sid = H5Screate_simple(2, dims, maxdims)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set append flush property */
if((dapl = H5Pcreate(H5P_DATASET_ACCESS)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pset_append_flush(dapl, 2, boundary, append_func, &append_ct) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Create the dataset */
if((did = H5Dcreate2(fid, DNAME_ROW, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, dapl)) < 0)
- TEST_ERROR;
+ TEST_ERROR;
/* Append 6 rows to the dataset */
for(i = 0; i < 6; i++) {
- for(j = 0; j < 10; j++)
- lbuf[j] = buf[i][j] = (i * 10) + (j + 1);
- if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)1, H5T_NATIVE_INT, lbuf) < 0)
- TEST_ERROR;
+ for(j = 0; j < 10; j++)
+ lbuf[j] = buf[i][j] = (i * 10) + (j + 1);
+ if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)1, H5T_NATIVE_INT, lbuf) < 0)
+ TEST_ERROR;
} /* end for */
/* Verify the # of appends */
if(append_ct != 6)
- TEST_ERROR;
+ TEST_ERROR;
if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(*flush_ptr != 6)
- TEST_ERROR;
+ TEST_ERROR;
/* Read the dataset */
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Verify the data */
for(i = 0; i < 6; i++)
- for(j = 0; j < 10; j++)
+ for(j = 0; j < 10; j++)
if(buf[i][j] != rbuf[i][j])
TEST_ERROR;
@@ -391,33 +392,33 @@ test_dataset_append_rows(hid_t fid)
/* Close the dataset */
if(H5Dclose(did) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Open the dataset again */
if((did = H5Dopen2(fid, DNAME_ROW, H5P_DEFAULT)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Read the dataset */
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Verify the data */
for(i = 0; i < 6; i++)
- for(j = 0; j < 10; j++)
+ for(j = 0; j < 10; j++)
if(buf[i][j] != rbuf[i][j])
TEST_ERROR;
/* Closing */
if(H5Dclose(did) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Sclose(sid) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(dapl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(dcpl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(ffapl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
PASSED();
@@ -425,25 +426,25 @@ test_dataset_append_rows(hid_t fid)
error:
H5E_BEGIN_TRY {
- H5Pclose(dapl);
- H5Pclose(dcpl);
- H5Pclose(sid);
- H5Dclose(did);
- H5Pclose(ffapl);
+ H5Pclose(dapl);
+ H5Pclose(dcpl);
+ H5Pclose(sid);
+ H5Dclose(did);
+ H5Pclose(ffapl);
} H5E_END_TRY;
return 1;
} /* test_dataset_append_rows() */
/*-------------------------------------------------------------------------
- * Function: test_dataset_append_columns
+ * Function: test_dataset_append_columns
*
- * Purpose: Verify that the object flush property and the append flush property
- * are working properly when appending columns to a dataset
- * with one extendible dimension (column).
+ * Purpose: Verify that the object flush property and the append flush property
+ * are working properly when appending columns to a dataset
+ * with one extendible dimension (column).
*
- * Return: Success: 0
- * Failure: 1
+ * Return: Success: 0
+ * Failure: 1
*
* Programmer: Vailin Choi; Jan 2014
*
@@ -452,72 +453,72 @@ error:
static int
test_dataset_append_columns(hid_t fid)
{
- hid_t did = -1; /* Dataset ID */
- hid_t sid = -1; /* Dataspace ID */
- hid_t dcpl = -1; /* A copy of dataset creation property */
- hid_t dapl = -1; /* A copy of dataset access property */
- hid_t ffapl = -1; /* The file's file access property list */
-
- hsize_t dims[2] = {6, 0}; /* Current dimension sizes */
- hsize_t maxdims[2] = {6, H5S_UNLIMITED}; /* Maximum dimension sizes */
- hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
- int cbuf[6]; /* The data buffer */
- int buf[6][3], rbuf[6][3]; /* The data buffers */
- int i, j; /* Local index variable */
-
- hsize_t boundary[2] = {0, 1}; /* Boundary sizes */
- unsigned append_ct = 0; /* The # of appends */
- unsigned *flush_ptr; /* Points to the flush counter */
+ hid_t did = -1; /* Dataset ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t dcpl = -1; /* A copy of dataset creation property */
+ hid_t dapl = -1; /* A copy of dataset access property */
+ hid_t ffapl = -1; /* The file's file access property list */
+
+ hsize_t dims[2] = {6, 0}; /* Current dimension sizes */
+ hsize_t maxdims[2] = {6, H5S_UNLIMITED}; /* Maximum dimension sizes */
+ hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
+ int cbuf[6]; /* The data buffer */
+ int buf[6][3], rbuf[6][3]; /* The data buffers */
+ int i, j; /* Local index variable */
+
+ hsize_t boundary[2] = {0, 1}; /* Boundary sizes */
+ unsigned append_ct = 0; /* The # of appends */
+ unsigned *flush_ptr; /* Points to the flush counter */
TESTING("Append flush with H5DOappend()--append columns");
/* Get the file's file access property list */
if((ffapl = H5Fget_access_plist(fid)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set to create a chunked dataset with 1 extendible dimension */
if((sid = H5Screate_simple(2, dims, maxdims)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set append flush property */
if((dapl = H5Pcreate(H5P_DATASET_ACCESS)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pset_append_flush(dapl, 2, boundary, append_func, &append_ct) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Create the dataset */
if((did = H5Dcreate2(fid, DNAME_COLUMN, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, dapl)) < 0)
- TEST_ERROR;
+ TEST_ERROR;
/* Append 3 columns to the dataset */
for(i = 0; i < 3; i++) {
- for(j = 0; j < 6; j++)
- cbuf[j] = buf[j][i] = ((i * 6) + (j + 1)) * -1;
- if(H5DOappend(did, H5P_DEFAULT, 1, (size_t)1, H5T_NATIVE_INT, cbuf) < 0)
- TEST_ERROR;
+ for(j = 0; j < 6; j++)
+ cbuf[j] = buf[j][i] = ((i * 6) + (j + 1)) * -1;
+ if(H5DOappend(did, H5P_DEFAULT, 1, (size_t)1, H5T_NATIVE_INT, cbuf) < 0)
+ TEST_ERROR;
} /* end for */
/* Verify the # of appends */
if(append_ct != 3)
- TEST_ERROR;
+ TEST_ERROR;
/* Retrieve and verify object flush counts */
if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(*flush_ptr != 3)
- TEST_ERROR;
+ TEST_ERROR;
/* Read the dataset */
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Verify the data */
for(i = 0; i < 6; i++)
- for(j = 0; j < 3; j++)
+ for(j = 0; j < 3; j++)
if(buf[i][j] != rbuf[i][j])
TEST_ERROR;
@@ -526,33 +527,33 @@ test_dataset_append_columns(hid_t fid)
/* Close the dataset */
if(H5Dclose(did) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Open the dataset again */
if((did = H5Dopen2(fid, DNAME_COLUMN, H5P_DEFAULT)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Read the dataset */
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Verify the data */
for(i = 0; i < 6; i++)
- for(j = 0; j < 3; j++)
+ for(j = 0; j < 3; j++)
if(buf[i][j] != rbuf[i][j])
TEST_ERROR;
/* Closing */
if(H5Dclose(did) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Sclose(sid) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(dapl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(dcpl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(ffapl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
PASSED();
@@ -560,33 +561,34 @@ test_dataset_append_columns(hid_t fid)
error:
H5E_BEGIN_TRY {
- H5Pclose(dapl);
- H5Pclose(dcpl);
- H5Pclose(sid);
- H5Dclose(did);
- H5Pclose(ffapl);
+ H5Pclose(dapl);
+ H5Pclose(dcpl);
+ H5Pclose(sid);
+ H5Dclose(did);
+ H5Pclose(ffapl);
} H5E_END_TRY;
return 1;
} /* test_dataset_append_columns() */
/*-------------------------------------------------------------------------
- * Function: test_dataset_append_BUG1
+ * Function: test_dataset_append_BUG1
*
- * Purpose: Verify that the object flush property and the append flush property
+ * Purpose: Verify that the object flush property and the append flush property
* are working properly when appending rows and columns to an
* extendible dataset.
- * A BUG occurs:
- * when the extendible dataset is set up as follows:
- * hsize_t dims[2] = {0, 10};
- * hsize_t maxdims[2] = {H5S_UNLIMITED, 50};
- * when append 6 rows and 3 columns to the dataset;
- * The data is correct when the dataset is read at this point;
- * The data is incorrect when the dataset is closed, opened again, and read at this point;
- * NOTE: the problem does not occur when H5Dflush() is not performed for each row/column.
*
- * Return: Success: 0
- * Failure: 1
+ * A BUG occurs:
+ * when the extendible dataset is set up as follows:
+ * hsize_t dims[2] = {0, 10};
+ * hsize_t maxdims[2] = {H5S_UNLIMITED, 50};
+ * when append 6 rows and 3 columns to the dataset;
+ * The data is correct when the dataset is read at this point;
+ * The data is incorrect when the dataset is closed, opened again, and read at this point;
+ * NOTE: the problem does not occur when H5Dflush() is not performed for each row/column.
+ *
+ * Return: Success: 0
+ * Failure: 1
*
* Programmer: Vailin Choi; Jan 2014
*
@@ -595,126 +597,124 @@ error:
static int
test_dataset_append_BUG1(hid_t fid)
{
- hid_t did = -1; /* Dataset ID */
- hid_t sid = -1; /* Dataspace ID */
- hid_t dcpl = -1; /* Dataset creation property */
- hid_t dapl = -1; /* Dataset access property */
- hid_t ffapl = -1; /* The file's file access property list */
-
- hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
- hsize_t maxdims[2] = {H5S_UNLIMITED, 50}; /* Maximum dimension sizes */
- hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
- int lbuf[10], cbuf[6]; /* The data buffers */
- int buf[6][13], rbuf[6][13]; /* The data buffers */
- int i, j; /* Local index variables */
-
- hsize_t boundary[2] = {1, 1}; /* Boundary sizes */
- unsigned append_ct = 0; /* The # of appends */
- unsigned *flush_ptr; /* Points to the flush counter */
+ hid_t did = -1; /* Dataset ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t dcpl = -1; /* Dataset creation property */
+ hid_t dapl = -1; /* Dataset access property */
+ hid_t ffapl = -1; /* The file's file access property list */
+
+ hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
+ hsize_t maxdims[2] = {H5S_UNLIMITED, 50}; /* Maximum dimension sizes */
+ hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
+ int lbuf[10], cbuf[6]; /* The data buffers */
+ int buf[6][13], rbuf[6][13]; /* The data buffers */
+ int i, j; /* Local index variables */
+
+ hsize_t boundary[2] = {1, 1}; /* Boundary sizes */
+ unsigned append_ct = 0; /* The # of appends */
+ unsigned *flush_ptr; /* Points to the flush counter */
TESTING("Append flush with H5DOappend()--append rows & columns--BUG1");
/* Get the file's file access property list */
if((ffapl = H5Fget_access_plist(fid)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set to create a chunked dataset with 2 extendible dimensions */
if((sid = H5Screate_simple(2, dims, maxdims)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set append flush property */
if((dapl = H5Pcreate(H5P_DATASET_ACCESS)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pset_append_flush(dapl, 2, boundary, append_func, &append_ct) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Create the dataset */
if((did = H5Dcreate2(fid, DBUGNAME1, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, dapl)) < 0)
- TEST_ERROR;
+ TEST_ERROR;
/* Append 6 rows to the dataset */
for(i = 0; i < 6; i++) {
- for(j = 0; j < 10; j++)
- lbuf[j] = buf[i][j] = (i * 10) + (j + 1);
- if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)1, H5T_NATIVE_INT, lbuf) < 0)
- TEST_ERROR;
+ for(j = 0; j < 10; j++)
+ lbuf[j] = buf[i][j] = (i * 10) + (j + 1);
+ if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)1, H5T_NATIVE_INT, lbuf) < 0)
+ TEST_ERROR;
} /* end for */
/* Verify the # of appends */
if(append_ct != 6)
- TEST_ERROR;
+ TEST_ERROR;
/* Retrieve and verify object flush counts */
if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(*flush_ptr != 6)
- TEST_ERROR;
+ TEST_ERROR;
/* Append 3 columns to the dataset */
for(i = 0; i < 3; i++) {
- for(j = 0; j < 6; j++)
- cbuf[j] = buf[j][i+10] = ((i * 6) + (j + 1)) * -1;
- if(H5DOappend(did, H5P_DEFAULT, 1, (size_t)1, H5T_NATIVE_INT, cbuf) < 0)
- TEST_ERROR;
+ for(j = 0; j < 6; j++)
+ cbuf[j] = buf[j][i+10] = ((i * 6) + (j + 1)) * -1;
+ if(H5DOappend(did, H5P_DEFAULT, 1, (size_t)1, H5T_NATIVE_INT, cbuf) < 0)
+ TEST_ERROR;
} /* end for */
/* Verify the # of appends */
if(append_ct != 9)
- TEST_ERROR;
+ TEST_ERROR;
/* Retrieve and verify object flush counts */
if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(*flush_ptr != 9)
- TEST_ERROR;
+ TEST_ERROR;
/* Read the dataset */
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Verify the data */
for(i = 0; i < 6; i++)
- for(j = 0; j < 13; j++)
+ for(j = 0; j < 13; j++)
if(buf[i][j] != rbuf[i][j])
TEST_ERROR;
-#ifdef BUG1
HDmemset(rbuf, 0, sizeof(rbuf));
/* Close the dataset */
if(H5Dclose(did) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Open the dataset again */
if((did = H5Dopen(fid, DBUGNAME1, H5P_DEFAULT)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Read the dataset */
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Verify the data */
for(i = 0; i < 6; i++)
- for(j = 0; j < 13; j++)
+ for(j = 0; j < 13; j++)
if(buf[i][j] != rbuf[i][j])
TEST_ERROR;
-#endif
/* Closing */
if(H5Dclose(did) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Sclose(sid) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(dapl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(dcpl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(ffapl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
PASSED();
@@ -722,33 +722,34 @@ test_dataset_append_BUG1(hid_t fid)
error:
H5E_BEGIN_TRY {
- H5Pclose(dcpl);
- H5Pclose(dapl);
- H5Pclose(sid);
- H5Dclose(did);
- H5Pclose(ffapl);
+ H5Pclose(dcpl);
+ H5Pclose(dapl);
+ H5Pclose(sid);
+ H5Dclose(did);
+ H5Pclose(ffapl);
} H5E_END_TRY;
return 1;
} /* test_dataset_append_BUG1() */
/*-------------------------------------------------------------------------
- * Function: test_dataset_append_BUG2
+ * Function: test_dataset_append_BUG2
*
- * Purpose: Verify that the object flush property and the append flush property
+ * Purpose: Verify that the object flush property and the append flush property
* are working properly when appending rows and columns to an
* extendible dataset.
- * A BUG occurs:
- * when the extendible dataset is set up as follows:
- * hsize_t dims[2] = {0, 10};
- * hsize_t maxdims[2] = {50, H5S_UNLIMITED};
- * when append 6 rows and 3 columns to the dataset;
- * The data is correct when the dataset is read at this point;
- * The data is incorrect when the dataset is closed, opened again, and read at this point;
- * NOTE: the problem does not occur when H5Dflush() is not performed for each row/column.
*
- * Return: Success: 0
- * Failure: 1
+ * A BUG occurs:
+ * when the extendible dataset is set up as follows:
+ * hsize_t dims[2] = {0, 10};
+ * hsize_t maxdims[2] = {50, H5S_UNLIMITED};
+ * when append 6 rows and 3 columns to the dataset;
+ * The data is correct when the dataset is read at this point;
+ * The data is incorrect when the dataset is closed, opened again, and read at this point;
+ * NOTE: the problem does not occur when H5Dflush() is not performed for each row/column.
+ *
+ * Return: Success: 0
+ * Failure: 1
*
* Programmer: Vailin Choi; Jan 2014
*
@@ -757,127 +758,125 @@ error:
static int
test_dataset_append_BUG2(hid_t fid)
{
- hid_t did = -1; /* Dataset ID */
- hid_t sid = -1; /* Dataspace ID */
- hid_t dcpl = -1; /* Dataset creation property */
- hid_t dapl = -1; /* Dataset access property */
- hid_t ffapl = -1; /* The file's file access property list */
-
- hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
- hsize_t maxdims[2] = {50, H5S_UNLIMITED}; /* Maximum dimension sizes */
- hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
- int lbuf[10], cbuf[6]; /* Data buffers */
- int buf[6][13], rbuf[6][13]; /* Data buffers */
- int i, j; /* Local index variables */
-
- hsize_t boundary[2] = {1, 1}; /* Boundary sizes */
- unsigned append_ct = 0; /* The # of appends */
- unsigned *flush_ptr; /* Points to the flush counter */
+ hid_t did = -1; /* Dataset ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t dcpl = -1; /* Dataset creation property */
+ hid_t dapl = -1; /* Dataset access property */
+ hid_t ffapl = -1; /* The file's file access property list */
+
+ hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
+ hsize_t maxdims[2] = {50, H5S_UNLIMITED}; /* Maximum dimension sizes */
+ hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
+ int lbuf[10], cbuf[6]; /* Data buffers */
+ int buf[6][13], rbuf[6][13]; /* Data buffers */
+ int i, j; /* Local index variables */
+
+ hsize_t boundary[2] = {1, 1}; /* Boundary sizes */
+ unsigned append_ct = 0; /* The # of appends */
+ unsigned *flush_ptr; /* Points to the flush counter */
TESTING("Append flush with H5DOappend()--append rows & columns--BUG2");
/* Get the file's file access property list */
if((ffapl = H5Fget_access_plist(fid)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set to create a chunked dataset with 2 extendible dimensions */
if((sid = H5Screate_simple(2, dims, maxdims)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set append flush property */
if((dapl = H5Pcreate(H5P_DATASET_ACCESS)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pset_append_flush(dapl, 2, boundary, append_func, &append_ct) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Create the dataset */
if((did = H5Dcreate2(fid, DBUGNAME2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, dapl)) < 0)
- TEST_ERROR;
+ TEST_ERROR;
/* Append 6 rows to the dataset */
for(i = 0; i < 6; i++) {
- for(j = 0; j < 10; j++)
- lbuf[j] = buf[i][j] = (i * 10) + (j + 1);
- if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)1, H5T_NATIVE_INT, lbuf) < 0)
- TEST_ERROR;
+ for(j = 0; j < 10; j++)
+ lbuf[j] = buf[i][j] = (i * 10) + (j + 1);
+ if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)1, H5T_NATIVE_INT, lbuf) < 0)
+ TEST_ERROR;
} /* end for */
/* Verify the # of appends */
if(append_ct != 6)
- TEST_ERROR;
+ TEST_ERROR;
/* Retrieve and verify object flush counts */
if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(*flush_ptr != 6)
- TEST_ERROR;
+ TEST_ERROR;
/* Append 3 columns to the dataset */
for(i = 0; i < 3; i++) {
- for(j = 0; j < 6; j++)
- cbuf[j] = buf[j][i+10] = ((i * 6) + (j + 1)) * -1;
- if(H5DOappend(did, H5P_DEFAULT, 1, (size_t)1, H5T_NATIVE_INT, cbuf) < 0)
- TEST_ERROR;
+ for(j = 0; j < 6; j++)
+ cbuf[j] = buf[j][i+10] = ((i * 6) + (j + 1)) * -1;
+ if(H5DOappend(did, H5P_DEFAULT, 1, (size_t)1, H5T_NATIVE_INT, cbuf) < 0)
+ TEST_ERROR;
} /* end for */
/* Verify the # of appends */
if(append_ct != 9)
- TEST_ERROR;
+ TEST_ERROR;
/* Retrieve and verify object flush counts */
if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(*flush_ptr != 9)
- TEST_ERROR;
+ TEST_ERROR;
/* Read the dataset */
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Verify the data */
for(i = 0; i < 6; i++)
- for(j = 0; j < 13; j++)
+ for(j = 0; j < 13; j++)
if(buf[i][j] != rbuf[i][j])
TEST_ERROR;
-#ifdef BUG2
HDmemset(rbuf, 0, sizeof(rbuf));
/* Close the dataset */
if(H5Dclose(did) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Open the dataset again */
if((did = H5Dopen(fid, DBUGNAME2, H5P_DEFAULT)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Read the dataset */
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Verify the data */
for(i = 0; i < 6; i++)
- for(j = 0; j < 13; j++)
+ for(j = 0; j < 13; j++)
if(buf[i][j] != rbuf[i][j])
TEST_ERROR;
-#endif
/* Closing */
if(H5Dclose(did) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Sclose(sid) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(dapl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(dcpl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(ffapl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
PASSED();
@@ -885,11 +884,11 @@ test_dataset_append_BUG2(hid_t fid)
error:
H5E_BEGIN_TRY {
- H5Pclose(dcpl);
- H5Pclose(dapl);
- H5Pclose(sid);
- H5Dclose(did);
- H5Pclose(ffapl);
+ H5Pclose(dcpl);
+ H5Pclose(dapl);
+ H5Pclose(sid);
+ H5Dclose(did);
+ H5Pclose(ffapl);
} H5E_END_TRY;
return 1;
@@ -897,15 +896,15 @@ error:
/*-------------------------------------------------------------------------
- * Function: test_dataset_append_less
+ * Function: test_dataset_append_less
*
- * Purpose: Verify that the object flush property and the append flush property
- * are working properly when appending rows and columns to an
- * extendible dataset where the append size is less than the boundary
- * size.
+ * Purpose: Verify that the object flush property and the append flush property
+ * are working properly when appending rows and columns to an
+ * extendible dataset where the append size is less than the boundary
+ * size.
*
- * Return: Success: 0
- * Failure: 1
+ * Return: Success: 0
+ * Failure: 1
*
* Programmer: Vailin Choi; Jan 2014
*
@@ -914,93 +913,93 @@ error:
static int
test_dataset_append_less(hid_t fid)
{
- hid_t did = -1; /* Dataset ID */
- hid_t sid = -1; /* Dataspace ID */
- hid_t dcpl = -1; /* A copy of dataset creation property */
- hid_t dapl = -1; /* A copy of dataset access property */
- hid_t ffapl = -1; /* The file's file access property list */
-
- hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
- hsize_t maxdims[2] = {100, 100}; /* Maximum dimension sizes */
- hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
- int lbuf[20], cbuf[6][3]; /* Data buffers */
- int buf[6][13], rbuf[6][13]; /* Data buffers */
- int i, j, k; /* Local index variables */
-
- hsize_t boundary[2] = {3, 3}; /* Boundary sizes */
- unsigned append_ct = 0; /* The # of appends */
- unsigned *flush_ptr; /* Points to the flush counter */
+ hid_t did = -1; /* Dataset ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t dcpl = -1; /* A copy of dataset creation property */
+ hid_t dapl = -1; /* A copy of dataset access property */
+ hid_t ffapl = -1; /* The file's file access property list */
+
+ hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
+ hsize_t maxdims[2] = {100, 100}; /* Maximum dimension sizes */
+ hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
+ int lbuf[20], cbuf[6][3]; /* Data buffers */
+ int buf[6][13], rbuf[6][13]; /* Data buffers */
+ int i, j, k; /* Local index variables */
+
+ hsize_t boundary[2] = {3, 3}; /* Boundary sizes */
+ unsigned append_ct = 0; /* The # of appends */
+ unsigned *flush_ptr; /* Points to the flush counter */
TESTING("Append flush with H5DOappend()--append size < boundary size");
/* Get the file's file access property list */
if((ffapl = H5Fget_access_plist(fid)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set to create a chunked dataset with 2 extendible dimensions */
if((sid = H5Screate_simple(2, dims, maxdims)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set append flush property */
if((dapl = H5Pcreate(H5P_DATASET_ACCESS)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pset_append_flush(dapl, 2, boundary, append_func, &append_ct) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Create the dataset */
if((did = H5Dcreate2(fid, DNAME_LESS, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, dapl)) < 0)
- TEST_ERROR;
+ TEST_ERROR;
/* Append to the dataset 2 rows at a time for 3 times */
for(i = 0, k = 0; i < 6; i++) {
- for(j = 0; j < 10; j++, k++)
- buf[i][j] = lbuf[k] = (i * 10) + (j + 1);
-
- if((i + 1) % 2 == 0) {
- if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)2, H5T_NATIVE_INT, lbuf) < 0)
- TEST_ERROR;
- k = 0;
- } /* end if */
+ for(j = 0; j < 10; j++, k++)
+ buf[i][j] = lbuf[k] = (i * 10) + (j + 1);
+
+ if((i + 1) % 2 == 0) {
+ if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)2, H5T_NATIVE_INT, lbuf) < 0)
+ TEST_ERROR;
+ k = 0;
+ } /* end if */
} /* end for */
/* Verify the # of appends */
if(append_ct != 2)
- TEST_ERROR;
+ TEST_ERROR;
/* Retrieve and verify object flush counts */
if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(*flush_ptr != 2)
- TEST_ERROR;
+ TEST_ERROR;
/* Append 3 columns to the dataset, once */
for(i = 0; i < 3; i++)
- for(j = 0; j < 6; j++, k++)
- cbuf[j][i] = buf[j][i + 10] = ((i * 6) + (j + 1)) * -1;
+ for(j = 0; j < 6; j++, k++)
+ cbuf[j][i] = buf[j][i + 10] = ((i * 6) + (j + 1)) * -1;
if(H5DOappend(did, H5P_DEFAULT, 1, (size_t)3, H5T_NATIVE_INT, cbuf) < 0)
- TEST_ERROR;
+ TEST_ERROR;
/* Verify the # of appends */
if(append_ct != 3)
- TEST_ERROR;
+ TEST_ERROR;
/* Retrieve and verify object flush counts */
if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(*flush_ptr != 3)
- TEST_ERROR;
+ TEST_ERROR;
/* Read the dataset */
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Verify the data */
for(i = 0; i < 6; i++)
- for(j = 0; j < 13; j++)
+ for(j = 0; j < 13; j++)
if(buf[i][j] != rbuf[i][j])
TEST_ERROR;
@@ -1009,33 +1008,33 @@ test_dataset_append_less(hid_t fid)
/* Close the dataset */
if(H5Dclose(did) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Open the dataset again */
if((did = H5Dopen2(fid, DNAME_LESS, H5P_DEFAULT)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Read the dataset */
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Verify the data */
for(i = 0; i < 6; i++)
- for(j = 0; j < 13; j++)
+ for(j = 0; j < 13; j++)
if(buf[i][j] != rbuf[i][j])
TEST_ERROR;
/* Closing */
if(H5Dclose(did) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Sclose(sid) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(dapl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(dcpl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(ffapl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
PASSED();
@@ -1043,29 +1042,29 @@ test_dataset_append_less(hid_t fid)
error:
H5E_BEGIN_TRY {
- H5Pclose(dapl);
- H5Pclose(dcpl);
- H5Pclose(sid);
- H5Dclose(did);
- H5Pclose(ffapl);
+ H5Pclose(dapl);
+ H5Pclose(dcpl);
+ H5Pclose(sid);
+ H5Dclose(did);
+ H5Pclose(ffapl);
} H5E_END_TRY;
return 1;
} /* test_dataset_append_less() */
/*-------------------------------------------------------------------------
- * Function: test_dataset_append_vary
+ * Function: test_dataset_append_vary
*
- * Purpose: Verify that the object flush property and the append flush property
- * are working properly when appending rows and columns to an
- * extendible dataset where
- * row: the append size is 3 times of the boundary size
- * the append callback/flush is performed on the 1st boundary hit
- * column: the boundary is greater than the append size
- * the boundary is not hit at all
+ * Purpose: Verify that the object flush property and the append flush property
+ * are working properly when appending rows and columns to an
+ * extendible dataset where
+ * row: the append size is 3 times of the boundary size
+ * the append callback/flush is performed on the 1st boundary hit
+ * column: the boundary is greater than the append size
+ * the boundary is not hit at all
*
- * Return: Success: 0
- * Failure: 1
+ * Return: Success: 0
+ * Failure: 1
*
* Programmer: Vailin Choi; Jan 2014
*
@@ -1074,88 +1073,88 @@ error:
static int
test_dataset_append_vary(hid_t fid)
{
- hid_t did = -1; /* Dataset ID */
- hid_t sid = -1; /* Dataspace ID */
- hid_t dcpl = -1; /* A copy of dataset creation property */
- hid_t dapl = -1; /* A copy of dataset access property */
- hid_t ffapl = -1; /* The file's file access property list */
-
- hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
- hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; /* Maximum dimension sizes */
- hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
- int lbuf[60], cbuf[6][3]; /* Data buffers */
- int buf[6][13], rbuf[6][13]; /* Data buffers */
- int i, j, k; /* Local index variables */
-
- hsize_t boundary[2] = {3, 7}; /* Boundary sizes */
- unsigned append_ct = 0; /* The # of appends */
- unsigned *flush_ptr; /* Points to the flush counter */
+ hid_t did = -1; /* Dataset ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t dcpl = -1; /* A copy of dataset creation property */
+ hid_t dapl = -1; /* A copy of dataset access property */
+ hid_t ffapl = -1; /* The file's file access property list */
+
+ hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
+ hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; /* Maximum dimension sizes */
+ hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
+ int lbuf[60], cbuf[6][3]; /* Data buffers */
+ int buf[6][13], rbuf[6][13]; /* Data buffers */
+ int i, j, k; /* Local index variables */
+
+ hsize_t boundary[2] = {3, 7}; /* Boundary sizes */
+ unsigned append_ct = 0; /* The # of appends */
+ unsigned *flush_ptr; /* Points to the flush counter */
TESTING("Append flush with H5DOappend()--append & boundary size vary");
/* Get the file's file access property list */
if((ffapl = H5Fget_access_plist(fid)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set to create a chunked dataset with 2 extendible dimensions */
if((sid = H5Screate_simple(2, dims, maxdims)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set append flush property */
if((dapl = H5Pcreate(H5P_DATASET_ACCESS)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pset_append_flush(dapl, 2, boundary, append_func, &append_ct) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Create the dataset */
if((did = H5Dcreate2(fid, DNAME_VARY, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, dapl)) < 0)
- TEST_ERROR;
+ TEST_ERROR;
/* Append 6 rows to the dataset, once */
for(i = 0, k = 0; i < 6; i++)
- for(j = 0; j < 10; j++, k++)
- buf[i][j] = lbuf[k] = (i * 10) + (j + 1);
+ for(j = 0; j < 10; j++, k++)
+ buf[i][j] = lbuf[k] = (i * 10) + (j + 1);
if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)6, H5T_NATIVE_INT, lbuf) < 0)
- TEST_ERROR;
+ TEST_ERROR;
/* Verify the # of appends */
if(append_ct != 1)
- TEST_ERROR;
+ TEST_ERROR;
/* Retrieve and verify object flush counts */
if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(*flush_ptr != 1)
- TEST_ERROR;
+ TEST_ERROR;
/* Append 3 columns to the dataset, once */
for(i = 0; i < 3; i++)
- for(j = 0; j < 6; j++, k++)
- cbuf[j][i] = buf[j][i + 10] = ((i * 6) + (j + 1)) * -1;
+ for(j = 0; j < 6; j++, k++)
+ cbuf[j][i] = buf[j][i + 10] = ((i * 6) + (j + 1)) * -1;
if(H5DOappend(did, H5P_DEFAULT, 1, (size_t)3, H5T_NATIVE_INT, cbuf) < 0)
- TEST_ERROR;
+ TEST_ERROR;
/* Verify the # of appends */
if(append_ct != 1)
- TEST_ERROR;
+ TEST_ERROR;
/* Retrieve and verify object flush counts */
if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(*flush_ptr != 1)
- TEST_ERROR;
+ TEST_ERROR;
/* Read the dataset */
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Verify the data */
for(i = 0; i < 6; i++)
- for(j = 0; j < 13; j++)
+ for(j = 0; j < 13; j++)
if(buf[i][j] != rbuf[i][j])
TEST_ERROR;
@@ -1164,33 +1163,33 @@ test_dataset_append_vary(hid_t fid)
/* Close the dataset */
if(H5Dclose(did) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Open the dataset again */
if((did = H5Dopen2(fid, DNAME_VARY, H5P_DEFAULT)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Read the dataset */
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Verify the data */
for(i = 0; i < 6; i++)
- for(j = 0; j < 13; j++)
+ for(j = 0; j < 13; j++)
if(buf[i][j] != rbuf[i][j])
TEST_ERROR;
/* Closing */
if(H5Dclose(did) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Sclose(sid) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(dapl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(dcpl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Pclose(ffapl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
PASSED();
@@ -1198,24 +1197,23 @@ test_dataset_append_vary(hid_t fid)
error:
H5E_BEGIN_TRY {
- H5Pclose(dapl);
- H5Pclose(dcpl);
- H5Pclose(sid);
- H5Dclose(did);
- H5Pclose(ffapl);
+ H5Pclose(dapl);
+ H5Pclose(dcpl);
+ H5Pclose(sid);
+ H5Dclose(did);
+ H5Pclose(ffapl);
} H5E_END_TRY;
return 1;
} /* test_dataset_append_vary() */
/*-------------------------------------------------------------------------
- * Function: Main function
+ * Function: Main function
*
- * Purpose: Test H5Pset/get_object_flush_cb() and H5Pset/get_append_flush()
- * along with H5DOappend().
+ * Purpose: Test H5Pset/get_object_flush_cb() and H5Pset/get_append_flush()
+ * along with H5DOappend().
*
- * Return: Success: 0
- * Failure: 1
+ * Return: EXIT_SUCCESS/EXIT_FAILURE
*
* Programmer: Vailin Choi; Jan 2014
*
@@ -1223,69 +1221,67 @@ error:
*/
int main(void)
{
- hid_t fid = -1; /* File ID */
- hid_t fapl = -1; /* File access property list */
- unsigned flush_ct = 0; /* The # of flushes */
- int nerrors = 0; /* The # of errors encountered */
+ hid_t fid = -1; /* File ID */
+ hid_t fapl = -1; /* File access property list */
+ unsigned flush_ct = 0; /* The # of flushes */
+ int nerrors = 0; /* The # of errors encountered */
/* Get a copy of file access property list */
if((fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set to use the latest library format */
if(H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Set object flush property */
if(H5Pset_object_flush_cb(fapl, flush_func, &flush_ct) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Create the test file */
- if((fid = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0)
- FAIL_STACK_ERROR;
+ if((fid = H5Fcreate(FILENAME, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0)
+ FAIL_STACK_ERROR;
nerrors += test_dataset_append_notset(fid);
nerrors += test_dataset_append_rows(fid);
- flush_ct = 0; /* Reset flush counter */
+ flush_ct = 0; /* Reset flush counter */
nerrors += test_dataset_append_columns(fid);
- flush_ct = 0; /* Reset flush counter */
+ flush_ct = 0; /* Reset flush counter */
nerrors += test_dataset_append_rows_columns(fid);
-#ifdef BUG1_BUG2
/*
* The following tests illustrate the scenarios when H5DOappend does not work with extensible array indexing:
- * - when the the dataset has 1 unlimited dimension and the other dimension is fixed but extendible
- * - the dataset expands along 1 dimension and then expands along the other dimension
+ * - when the the dataset has 1 unlimited dimension and the other dimension is fixed but extendible
+ * - the dataset expands along 1 dimension and then expands along the other dimension
*/
- flush_ct = 0; /* Reset flush counter */
- nerrors += test_dataset_append_BUG1(fid);
+ flush_ct = 0; /* Reset flush counter */
+ nerrors += test_dataset_append_BUG1(fid);
- flush_ct = 0; /* Reset flush counter */
- nerrors += test_dataset_append_BUG2(fid);
-#endif
+ flush_ct = 0; /* Reset flush counter */
+ nerrors += test_dataset_append_BUG2(fid);
- flush_ct = 0; /* Reset flush counter */
+ flush_ct = 0; /* Reset flush counter */
nerrors += test_dataset_append_less(fid);
- flush_ct = 0; /* Reset flush counter */
+ flush_ct = 0; /* Reset flush counter */
nerrors += test_dataset_append_vary(fid);
/* Closing */
if(H5Pclose(fapl) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
if(H5Fclose(fid) < 0)
- FAIL_STACK_ERROR;
+ FAIL_STACK_ERROR;
/* Check for errors */
if(nerrors)
goto error;
- return 0;
+ return EXIT_SUCCESS;
error:
- return 1;
+ return EXIT_FAILURE;
}
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index 7c9bbfa..01d8658 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -48,13 +48,22 @@ New Features
Configuration:
-------------
+ - Windows PDB files are installed incorrectly
+
+ For static builds, the PDB files for windows should be installed next
+ to the static libraries in the lib folder. Also the debug versions of
+ libraries and PDB files are now correctly built using the default
+ CMAKE_DEBUG_POSTFIX setting.
+
+ (ADB - 2019/07/09, HDFFV-10581)
+
- Add option to build only shared libs
A request was made to prevent building static libraries and only build
shared. A new option was added to CMake, ONLY_SHARED_LIBS, which will
skip building static libraries. Certain utility functions will build with
static libs but are not published. Tests are adjusted to use the correct
- libraries.
+ libraries depending on SHARED/STATIC settings.
(ADB - 2019/06/12, HDFFV-10805)
@@ -73,8 +82,8 @@ New Features
- Change tools test that test the error stack
There are some use cases which can cause the error stack of tools to be
- different then the expected. These tests now use grepTest.cmake, this was
- changed to allow the error file to be searched for an expected string.
+ different then the expected output. These tests now use grepTest.cmake,
+ this was changed to allow the error file to be searched for an expected string.
(ADB - 2019/04/15, HDFFV-10741)
@@ -286,13 +295,13 @@ Bug Fixes since HDF5-1.10.3 release
together with library high bound setting to H5F_LIBVER_V18.
When setting non-default file space info in fcpl via
- H5Pset_file_space_strategy() and then creating a file with
+ H5Pset_file_space_strategy() and then creating a file with
both high and low library bounds set to
H5F_LIBVER_V18 in fapl, the library succeeds in creating the file.
File creation should fail because the feature of setting non-default
file space info does not exist in library release 1.8 or earlier.
- This was fixed by setting and checking the proper version in the
+ This was fixed by setting and checking the proper version in the
file space info message based on the library low and high bounds
when creating and opening the HDF5 file.
diff --git a/src/H5PLpath.c b/src/H5PLpath.c
index 44d9cb9..e270c73 100644
--- a/src/H5PLpath.c
+++ b/src/H5PLpath.c
@@ -678,7 +678,7 @@ H5PL__find_plugin_in_path(const H5PL_search_params_t *search_params, hbool_t *fo
if (NULL == (path = (char *)H5MM_calloc(len)))
HGOTO_ERROR(H5E_PLUGIN, H5E_CANTALLOC, FAIL, "can't allocate memory for path")
- HDsnprintf(path, len, "%s%s%s", dir, H5PL_PATH_SEPARATOR, dp->d_name);
+ HDsnprintf(path, len, "%s/%s", dir, dp->d_name);
/* Get info for directory entry */
if (HDstat(path, &my_stat) == -1)