summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAllen Byrne <byrn@hdfgroup.org>2017-11-16 17:29:01 (GMT)
committerAllen Byrne <byrn@hdfgroup.org>2017-11-16 17:29:01 (GMT)
commit1d8776ea30454dde9acae84c5f2093c62c233365 (patch)
tree35c6b9bffbc6ae136d501d91581db8412c6e96ad
parentdc81497bd0840c8adab0cea51ca814359b80305d (diff)
parent191f4c1e6d4060541958d960c7f96ec014e8194f (diff)
downloadhdf5-1d8776ea30454dde9acae84c5f2093c62c233365.zip
hdf5-1d8776ea30454dde9acae84c5f2093c62c233365.tar.gz
hdf5-1d8776ea30454dde9acae84c5f2093c62c233365.tar.bz2
Merge pull request #785 in HDFFV/hdf5 from ~BYRN/hdf5_adb:hdf5_1_8 to hdf5_1_8
* commit '191f4c1e6d4060541958d960c7f96ec014e8194f': Another fix for error handling HDFFV-10328 fix whitespace HDFFV-10328 set external reference before copy Fix windows link error HDFFV-10328 Updated notes HDFFV-10328 - move declaration to top of function HDFFV-10328 - fix typo HDFFV-10328 -Allocate H5Zclass structure from dlopened plugin. HDFFV-10329 - add new jira issue. Fix linking of libs
-rw-r--r--CMakeFilters.cmake16
-rw-r--r--CMakeLists.txt3
-rw-r--r--fortran/src/CMakeLists.txt10
-rw-r--r--release_docs/RELEASE.txt31
-rw-r--r--src/CMakeLists.txt4
-rw-r--r--src/H5PL.c43
-rw-r--r--src/H5Z.c1374
-rw-r--r--test/CMakeLists.txt4
-rw-r--r--test/dsets.c1232
-rw-r--r--test/tattr.c740
10 files changed, 1710 insertions, 1747 deletions
diff --git a/CMakeFilters.cmake b/CMakeFilters.cmake
index 804b16b..71dabb1 100644
--- a/CMakeFilters.cmake
+++ b/CMakeFilters.cmake
@@ -45,8 +45,8 @@ if (HDF5_ENABLE_Z_LIB_SUPPORT)
if (NOT ZLIB_FOUND)
find_package (ZLIB) # Legacy find
if (ZLIB_FOUND)
- set (LINK_LIBS ${LINK_LIBS} ${ZLIB_LIBRARIES})
- set (LINK_SHARED_LIBS ${LINK_SHARED_LIBS} ${ZLIB_LIBRARIES})
+ set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${ZLIB_LIBRARIES})
+ set (LINK_COMP_SHARED_LIBS ${LINK_COMP_SHARED_LIBS} ${ZLIB_LIBRARIES})
endif ()
endif ()
endif ()
@@ -78,9 +78,9 @@ if (HDF5_ENABLE_Z_LIB_SUPPORT)
set (EXTERNAL_FILTERS "${EXTERNAL_FILTERS} DEFLATE")
endif ()
if (BUILD_SHARED_LIBS)
- set (LINK_SHARED_LIBS ${LINK_SHARED_LIBS} ${ZLIB_SHARED_LIBRARY})
+ set (LINK_COMP_SHARED_LIBS ${LINK_COMP_SHARED_LIBS} ${ZLIB_SHARED_LIBRARY})
endif ()
- set (LINK_LIBS ${LINK_LIBS} ${ZLIB_STATIC_LIBRARY})
+ set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${ZLIB_STATIC_LIBRARY})
INCLUDE_DIRECTORIES (${ZLIB_INCLUDE_DIRS})
message (STATUS "Filter ZLIB is ON")
endif ()
@@ -96,8 +96,8 @@ if (HDF5_ENABLE_SZIP_SUPPORT)
if (NOT SZIP_FOUND)
find_package (SZIP) # Legacy find
if (SZIP_FOUND)
- set (LINK_LIBS ${LINK_LIBS} ${SZIP_LIBRARIES})
- set (LINK_SHARED_LIBS ${LINK_SHARED_LIBS} ${SZIP_LIBRARIES})
+ set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_LIBRARIES})
+ set (LINK_COMP_SHARED_LIBS ${LINK_COMP_SHARED_LIBS} ${SZIP_LIBRARIES})
endif ()
endif ()
endif ()
@@ -119,9 +119,9 @@ if (HDF5_ENABLE_SZIP_SUPPORT)
endif ()
endif ()
if (BUILD_SHARED_LIBS)
- set (LINK_SHARED_LIBS ${LINK_SHARED_LIBS} ${SZIP_SHARED_LIBRARY})
+ set (LINK_COMP_SHARED_LIBS ${LINK_COMP_SHARED_LIBS} ${SZIP_SHARED_LIBRARY})
endif ()
- set (LINK_LIBS ${LINK_LIBS} ${SZIP_STATIC_LIBRARY})
+ set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_STATIC_LIBRARY})
INCLUDE_DIRECTORIES (${SZIP_INCLUDE_DIRS})
message (STATUS "Filter SZIP is ON")
if (H5_HAVE_FILTER_SZIP)
diff --git a/CMakeLists.txt b/CMakeLists.txt
index d45c0b6..02bb516 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -401,7 +401,6 @@ option (BUILD_SHARED_LIBS "Build Shared Libraries" ON)
set (H5_ENABLE_SHARED_LIB NO)
if (BUILD_SHARED_LIBS)
set (H5_ENABLE_SHARED_LIB YES)
- set (LINK_SHARED_LIBS ${LINK_LIBS})
endif ()
set (H5_ENABLE_STATIC_LIB YES)
set (CMAKE_POSITION_INDEPENDENT_CODE ON)
@@ -576,6 +575,8 @@ include (UserMacros.cmake)
#-----------------------------------------------------------------------------
# Include filter (zlib, szip, etc.) macros
#-----------------------------------------------------------------------------
+set (LINK_COMP_LIBS)
+set (LINK_COMP_SHARED_LIBS)
include (CMakeFilters.cmake)
#-----------------------------------------------------------------------------
diff --git a/fortran/src/CMakeLists.txt b/fortran/src/CMakeLists.txt
index 9527bc0..89fc1a5 100644
--- a/fortran/src/CMakeLists.txt
+++ b/fortran/src/CMakeLists.txt
@@ -184,7 +184,7 @@ set (f90CStub_C_HDRS
add_library (${HDF5_F90_C_LIB_TARGET} STATIC ${f90CStub_C_SRCS} ${f90CStub_C_HDRS})
TARGET_C_PROPERTIES (${HDF5_F90_C_LIB_TARGET} STATIC " " " ")
-target_link_libraries (${HDF5_F90_C_LIB_TARGET} ${HDF5_LIB_TARGET} ${LINK_LIBS})
+target_link_libraries (${HDF5_F90_C_LIB_TARGET} PUBLIC ${HDF5_LIB_TARGET})
set_global_variable (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_F90_C_LIB_TARGET}")
H5_SET_LIB_OPTIONS (${HDF5_F90_C_LIB_TARGET} ${HDF5_F90_C_LIB_NAME} STATIC)
set_target_properties (${HDF5_F90_C_LIB_TARGET} PROPERTIES
@@ -197,7 +197,7 @@ set (install_targets ${HDF5_F90_C_LIB_TARGET})
if (BUILD_SHARED_LIBS AND NOT SKIP_HDF5_FORTRAN_SHARED)
add_library (${HDF5_F90_C_LIBSH_TARGET} SHARED ${f90CStub_C_SRCS} ${f90CStub_C_HDRS})
TARGET_C_PROPERTIES (${HDF5_F90_C_LIBSH_TARGET} SHARED " " " ")
- target_link_libraries (${HDF5_F90_C_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} ${LINK_SHARED_LIBS})
+ target_link_libraries (${HDF5_F90_C_LIBSH_TARGET} PUBLIC ${HDF5_LIBSH_TARGET})
set_global_variable (HDF5_LIBRARIES_TO_EXPORT "${HDF5_LIBRARIES_TO_EXPORT};${HDF5_F90_C_LIBSH_TARGET}")
H5_SET_LIB_OPTIONS (${HDF5_F90_C_LIBSH_TARGET} ${HDF5_F90_C_LIB_NAME} SHARED ${HDF5_F_PACKAGE_SOVERSION})
set_target_properties (${HDF5_F90_C_LIBSH_TARGET} PROPERTIES
@@ -291,7 +291,8 @@ set_source_files_properties (${f90_F_SRCS} PROPERTIES LANGUAGE Fortran)
add_library (${HDF5_F90_LIB_TARGET} STATIC ${f90_F_SRCS})
set (SHARED_LINK_FLAGS " ")
TARGET_FORTRAN_PROPERTIES (${HDF5_F90_LIB_TARGET} STATIC " " " ")
-target_link_libraries (${HDF5_F90_LIB_TARGET} ${HDF5_F90_C_LIB_TARGET} ${HDF5_LIB_TARGET} ${LINK_Fortran_LIBS})
+target_link_libraries (${HDF5_F90_LIB_TARGET} PUBLIC ${HDF5_F90_C_LIB_TARGET})
+target_link_libraries (${HDF5_F90_LIB_TARGET} PRIVATE ${LINK_Fortran_LIBS})
if (H5_HAVE_PARALLEL AND MPI_Fortran_FOUND)
target_include_directories (${HDF5_F90_LIB_TARGET} PUBLIC ${MPI_Fortran_INCLUDE_DIRS})
endif ()
@@ -317,7 +318,8 @@ if (BUILD_SHARED_LIBS AND NOT SKIP_HDF5_FORTRAN_SHARED)
set (SHARED_LINK_FLAGS "/DLL /DEF:${HDF5_F90_SRC_BINARY_DIR}/hdf5_fortrandll.def")
endif ()
TARGET_FORTRAN_PROPERTIES (${HDF5_F90_LIBSH_TARGET} SHARED " " ${SHARED_LINK_FLAGS})
- target_link_libraries (${HDF5_F90_LIBSH_TARGET} ${HDF5_F90_C_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} ${LINK_Fortran_LIBS})
+ target_link_libraries (${HDF5_F90_LIBSH_TARGET} PUBLIC ${HDF5_F90_C_LIBSH_TARGET})
+ target_link_libraries (${HDF5_F90_LIBSH_TARGET} PRIVATE ${LINK_Fortran_LIBS})
if (H5_HAVE_PARALLEL AND MPI_Fortran_FOUND)
target_include_directories (${HDF5_F90_LIBSH_TARGET} PUBLIC ${MPI_Fortran_INCLUDE_DIRS})
endif ()
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index c332dc3..48e2f98 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -156,6 +156,18 @@ Bug Fixes since HDF5-1.8.19
Configuration
-------------
+ - cmake
+
+ The hdf5 library used shared szip and zlib, which needlessly required
+ applications to link with the same szip and zlib libraries.
+
+ Changed the target_link_libraries commands to use the static libs.
+ Removed improper link duplication of szip and zlib.
+ Adjusted the link dependencies and the link interface values of
+ the target_link_libraries commands.
+
+ (ADB - 2017/11/14, HDFFV-10329)
+
- cmake MPI
CMake implementation for MPI was problematic and would create incorrect
@@ -180,6 +192,25 @@ Bug Fixes since HDF5-1.8.19
Library
-------
+ - filter plugin handling in H5PL.c and H5Z.c
+
+ It was discovered that the dynamic loading process used by
+ filter plugins had issues with memory allocation and library
+ dependencies.
+
+ CMake build process changed to use LINK INTERFACE keywords, which
+ allowed HDF5 C library to make dependent libraries private. The
+ filter plugin libraries no longer require dependent libraries
+ (such as szip or zlib) to be available.
+ (ADB - 2017/11/16, HDFFV-10328)
+
+ The H5Z_class2_t structure returned by the filter plugins are
+ now dynamically allocated in H5Zregister function. Access to
+ the functions in the filter library now always use the dynamic
+ library API.
+
+ (ADB - 2017/11/16, HDFFV-10329)
+
- Fix rare object header corruption bug
In certain cases, such as when converting large attributes to dense
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index a6f7ddc..a825958 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -694,7 +694,7 @@ set_source_files_properties (${HDF5_BINARY_DIR}/H5overflow.h GENERATED)
add_library (${HDF5_LIB_TARGET} STATIC ${common_SRCS} ${H5_PUBLIC_HEADERS} ${H5_PRIVATE_HEADERS})
TARGET_C_PROPERTIES (${HDF5_LIB_TARGET} STATIC " " " ")
-target_link_libraries (${HDF5_LIB_TARGET} PUBLIC ${LINK_LIBS})
+target_link_libraries (${HDF5_LIB_TARGET} PRIVATE ${LINK_LIBS} ${LINK_COMP_LIBS})
if (NOT WIN32)
target_link_libraries (${HDF5_LIB_TARGET} PUBLIC dl)
endif ()
@@ -720,7 +720,7 @@ set (install_targets ${HDF5_LIB_TARGET})
if (BUILD_SHARED_LIBS)
add_library (${HDF5_LIBSH_TARGET} SHARED ${common_SRCS} ${H5_PUBLIC_HEADERS} ${H5_PRIVATE_HEADERS})
TARGET_C_PROPERTIES (${HDF5_LIBSH_TARGET} SHARED " " " ")
- target_link_libraries (${HDF5_LIBSH_TARGET} PUBLIC ${LINK_SHARED_LIBS})
+ target_link_libraries (${HDF5_LIBSH_TARGET} PRIVATE ${LINK_LIBS} ${LINK_COMP_LIBS})
if (NOT WIN32)
target_link_libraries (${HDF5_LIBSH_TARGET} PUBLIC dl)
endif ()
diff --git a/src/H5PL.c b/src/H5PL.c
index 5d750fa..c829bf9 100644
--- a/src/H5PL.c
+++ b/src/H5PL.c
@@ -881,6 +881,7 @@ H5PL__open(H5PL_type_t pl_type, char *libname, int pl_id, const void **pl_info)
} /* end if */
else {
const H5Z_class2_t *plugin_info;
+ H5Z_class2_t *plugin_copy = NULL;
/* Invoke H5PLget_plugin_info to verify this is the right library we are looking for.
* Move on if it isn't.
@@ -910,8 +911,22 @@ H5PL__open(H5PL_type_t pl_type, char *libname, int pl_id, const void **pl_info)
(H5PL_table_g[H5PL_table_used_g]).pl_id = plugin_info->id;
H5PL_table_used_g++;
+ /* allocate local copy of plugin info */
+ if (NULL == (plugin_copy = (H5Z_class2_t *)H5MM_calloc(sizeof(H5Z_class2_t))))
+ HGOTO_ERROR(H5E_PLUGIN, H5E_CANTALLOC, FAIL, "can't allocate memory for plugin info")
/* Set the plugin info to return */
- *pl_info = (const void *)plugin_info;
+ *pl_info = (const void *)plugin_copy;
+
+ plugin_copy->version = plugin_info->version;
+ plugin_copy->id = plugin_info->id;
+ plugin_copy->encoder_present = plugin_info->encoder_present;
+ plugin_copy->decoder_present = plugin_info->decoder_present;
+ plugin_copy->can_apply = plugin_info->can_apply;
+ plugin_copy->set_local = plugin_info->set_local;
+ plugin_copy->filter = plugin_info->filter;
+ /* copy the user's string into the property */
+ if(NULL == (plugin_copy->name = (char *)H5MM_xstrdup(plugin_info->name)))
+ HGOTO_ERROR(H5E_PLUGIN, H5E_NOSPACE, FAIL, "can't allocate memory for plugin info name")
/* Indicate success */
ret_value = TRUE;
@@ -923,6 +938,10 @@ H5PL__open(H5PL_type_t pl_type, char *libname, int pl_id, const void **pl_info)
} /* end else */
done:
+ /* unallocate local copy of plugin info on failure */
+ if (FAIL == ret_value && *pl_info) {
+ *pl_info = (H5Z_class2_t *)H5MM_xfree(*pl_info);
+ }
FUNC_LEAVE_NOAPI(ret_value)
} /* end H5PL__open() */
@@ -957,6 +976,7 @@ H5PL__search_table(H5PL_type_t plugin_type, int type_id, const void **info)
if((plugin_type == (H5PL_table_g[i]).pl_type) && (type_id == (H5PL_table_g[i]).pl_id)) {
H5PL_get_plugin_info_t get_plugin_info;
const H5Z_class2_t *plugin_info;
+ H5Z_class2_t *plugin_copy = NULL;
if(NULL == (get_plugin_info = (H5PL_get_plugin_info_t)H5PL_GET_LIB_FUNC((H5PL_table_g[i]).handle, "H5PLget_plugin_info")))
HGOTO_ERROR(H5E_DATATYPE, H5E_CANTGET, FAIL, "can't get function for H5PLget_plugin_info")
@@ -964,13 +984,32 @@ H5PL__search_table(H5PL_type_t plugin_type, int type_id, const void **info)
if(NULL == (plugin_info = (const H5Z_class2_t *)(*get_plugin_info)()))
HGOTO_ERROR(H5E_DATATYPE, H5E_CANTGET, FAIL, "can't get plugin info")
- *info = plugin_info;
+ if (NULL == (plugin_copy = (H5Z_class2_t *)H5MM_calloc(sizeof(H5Z_class2_t))))
+ HGOTO_ERROR(H5E_PLUGIN, H5E_CANTALLOC, FAIL, "can't allocate memory for plugin info")
+ /* Set the plugin info to return */
+ *info = (const void *)plugin_copy;
+
+ plugin_copy->version = plugin_info->version;
+ plugin_copy->id = plugin_info->id;
+ plugin_copy->encoder_present = plugin_info->encoder_present;
+ plugin_copy->decoder_present = plugin_info->decoder_present;
+ plugin_copy->can_apply = plugin_info->can_apply;
+ plugin_copy->set_local = plugin_info->set_local;
+ plugin_copy->filter = plugin_info->filter;
+ /* copy the user's string into the property */
+ if(NULL == (plugin_copy->name = (char *)H5MM_xstrdup(plugin_info->name)))
+ HGOTO_ERROR(H5E_PLUGIN, H5E_NOSPACE, FAIL, "can't allocate memory for plugin info name")
+
HGOTO_DONE(TRUE)
} /* end if */
} /* end for */
} /* end if */
done:
+ /* unallocate local copy of plugin info on failure */
+ if (FAIL == ret_value && *info) {
+ *info = (H5Z_class2_t *)H5MM_xfree(*info);
+ }
FUNC_LEAVE_NOAPI(ret_value)
} /* end H5PL__search_table() */
diff --git a/src/H5Z.c b/src/H5Z.c
index 0579c62..0c03f6f 100644
--- a/src/H5Z.c
+++ b/src/H5Z.c
@@ -11,23 +11,23 @@
* help@hdfgroup.org. *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-#define H5Z_PACKAGE /*suppress error about including H5Zpkg */
+#define H5Z_PACKAGE /* suppress error about including H5Zpkg */
/* Interface initialization */
-#define H5_INTERFACE_INIT_FUNC H5Z_init_interface
+#define H5_INTERFACE_INIT_FUNC H5Z_init_interface
-#include "H5private.h" /* Generic Functions */
-#include "H5Dprivate.h" /* Dataset functions */
-#include "H5Eprivate.h" /* Error handling */
-#include "H5Fprivate.h" /* File */
-#include "H5Iprivate.h" /* IDs */
-#include "H5MMprivate.h" /* Memory management */
-#include "H5Oprivate.h" /* Object headers */
-#include "H5Pprivate.h" /* Property lists */
-#include "H5PLprivate.h" /* Plugins */
-#include "H5Sprivate.h" /* Dataspace functions */
-#include "H5Zpkg.h" /* Data filters */
+#include "H5private.h" /* Generic Functions */
+#include "H5Dprivate.h" /* Dataset functions */
+#include "H5Eprivate.h" /* Error handling */
+#include "H5Fprivate.h" /* File */
+#include "H5Iprivate.h" /* IDs */
+#include "H5MMprivate.h" /* Memory management */
+#include "H5Oprivate.h" /* Object headers */
+#include "H5Pprivate.h" /* Property lists */
+#include "H5PLprivate.h" /* Plugins */
+#include "H5Sprivate.h" /* Dataspace functions */
+#include "H5Zpkg.h" /* Data filters */
#ifdef H5_HAVE_SZLIB_H
# include "szlib.h"
@@ -37,15 +37,15 @@
#ifdef H5Z_DEBUG
typedef struct H5Z_stats_t {
struct {
- hsize_t total; /*total number of bytes processed */
- hsize_t errors; /*bytes of total attributable to errors */
- H5_timer_t timer; /*execution time including errors */
- } stats[2]; /*0=output, 1=input */
+ hsize_t total; /* total number of bytes processed */
+ hsize_t errors; /* bytes of total attributable to errors */
+ H5_timer_t timer; /* execution time including errors */
+ } stats[2]; /* 0=output, 1=input */
} H5Z_stats_t;
#endif /* H5Z_DEBUG */
typedef struct H5Z_object_t {
- H5Z_filter_t filter_id; /* ID of the filter we're looking for */
+ H5Z_filter_t filter_id; /* ID of the filter we're looking for */
htri_t found; /* Whether we find an object using the filter */
} H5Z_object_t;
@@ -56,11 +56,11 @@ typedef enum {
} H5Z_prelude_type_t;
/* Local variables */
-static size_t H5Z_table_alloc_g = 0;
-static size_t H5Z_table_used_g = 0;
-static H5Z_class2_t *H5Z_table_g = NULL;
+static size_t H5Z_table_alloc_g = 0;
+static size_t H5Z_table_used_g = 0;
+static H5Z_class2_t *H5Z_table_g = NULL;
#ifdef H5Z_DEBUG
-static H5Z_stats_t *H5Z_stat_table_g = NULL;
+static H5Z_stats_t *H5Z_stat_table_g = NULL;
#endif /* H5Z_DEBUG */
/* Local functions */
@@ -71,44 +71,38 @@ static int H5Z__flush_file_cb(void *obj_ptr, hid_t obj_id, void *key);
/*-------------------------------------------------------------------------
- * Function: H5Z_init_interface
+ * Function: H5Z_init_interface
*
- * Purpose: Initializes the data filter layer.
- *
- * Return: Non-negative on success/Negative on failure
- *
- * Programmer: Robb Matzke
- * Thursday, April 16, 1998
- *
- * Modifications:
+ * Purpose: Initializes the data filter layer.
*
+ * Return: Non-negative on success/Negative on failure
*-------------------------------------------------------------------------
*/
static herr_t
H5Z_init_interface (void)
{
- herr_t ret_value=SUCCEED; /* Return value */
+ herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_NOAPI_NOINIT
/* Internal filters */
- if (H5Z_register (H5Z_SHUFFLE)<0)
+ if (H5Z_register (H5Z_SHUFFLE) < 0)
HGOTO_ERROR (H5E_PLINE, H5E_CANTINIT, FAIL, "unable to register shuffle filter")
- if (H5Z_register (H5Z_FLETCHER32)<0)
+ if (H5Z_register (H5Z_FLETCHER32) < 0)
HGOTO_ERROR (H5E_PLINE, H5E_CANTINIT, FAIL, "unable to register fletcher32 filter")
- if (H5Z_register (H5Z_NBIT)<0)
+ if (H5Z_register (H5Z_NBIT) < 0)
HGOTO_ERROR (H5E_PLINE, H5E_CANTINIT, FAIL, "unable to register nbit filter")
- if (H5Z_register (H5Z_SCALEOFFSET)<0)
+ if (H5Z_register (H5Z_SCALEOFFSET) < 0)
HGOTO_ERROR (H5E_PLINE, H5E_CANTINIT, FAIL, "unable to register scaleoffset filter")
/* External filters */
#ifdef H5_HAVE_FILTER_DEFLATE
- if (H5Z_register (H5Z_DEFLATE)<0)
+ if (H5Z_register (H5Z_DEFLATE) < 0)
HGOTO_ERROR (H5E_PLINE, H5E_CANTINIT, FAIL, "unable to register deflate filter")
#endif /* H5_HAVE_FILTER_DEFLATE */
#ifdef H5_HAVE_FILTER_SZIP
H5Z_SZIP->encoder_present = SZ_encoder_enabled();
- if (H5Z_register (H5Z_SZIP)<0)
+ if (H5Z_register (H5Z_SZIP) < 0)
HGOTO_ERROR (H5E_PLINE, H5E_CANTINIT, FAIL, "unable to register szip filter")
#endif /* H5_HAVE_FILTER_SZIP */
@@ -118,84 +112,85 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_term_interface
- *
- * Purpose: Terminate the H5Z layer.
+ * Function: H5Z_term_interface
*
- * Return: void
- *
- * Programmer: Robb Matzke
- * Thursday, April 16, 1998
- *
- * Modifications:
+ * Purpose: Terminate the H5Z layer.
*
+ * Return: void
*-------------------------------------------------------------------------
*/
int
H5Z_term_interface(void)
{
+ size_t i;
#ifdef H5Z_DEBUG
- size_t i;
- int dir, nprint=0;
- char comment[16], bandwidth[32];
+ int dir;
+ int nprint = 0;
+ char comment[16];
+ char bandwidth[32];
#endif
- if(H5_interface_initialize_g) {
+ if (H5_interface_initialize_g) {
#ifdef H5Z_DEBUG
- if (H5DEBUG(Z)) {
- for (i=0; i<H5Z_table_used_g; i++) {
- for (dir=0; dir<2; dir++) {
- if (0==H5Z_stat_table_g[i].stats[dir].total) continue;
-
- if (0==nprint++) {
- /* Print column headers */
- HDfprintf (H5DEBUG(Z), "H5Z: filter statistics "
- "accumulated over life of library:\n");
- HDfprintf (H5DEBUG(Z),
- " %-16s %10s %10s %8s %8s %8s %10s\n",
- "Filter", "Total", "Errors", "User",
- "System", "Elapsed", "Bandwidth");
- HDfprintf (H5DEBUG(Z),
- " %-16s %10s %10s %8s %8s %8s %10s\n",
- "------", "-----", "------", "----",
- "------", "-------", "---------");
- }
-
- /* Truncate the comment to fit in the field */
- HDstrncpy(comment, H5Z_table_g[i].name,
- sizeof comment);
- comment[sizeof(comment)-1] = '\0';
-
- /*
- * Format bandwidth to have four significant digits and
- * units of `B/s', `kB/s', `MB/s', `GB/s', or `TB/s' or
- * the word `Inf' if the elapsed time is zero.
- */
- H5_bandwidth(bandwidth,
- (double)(H5Z_stat_table_g[i].stats[dir].total),
- H5Z_stat_table_g[i].stats[dir].timer.etime);
-
- /* Print the statistics */
- HDfprintf (H5DEBUG(Z),
- " %s%-15s %10Hd %10Hd %8.2f %8.2f %8.2f "
- "%10s\n", dir?"<":">", comment,
- H5Z_stat_table_g[i].stats[dir].total,
- H5Z_stat_table_g[i].stats[dir].errors,
- H5Z_stat_table_g[i].stats[dir].timer.utime,
- H5Z_stat_table_g[i].stats[dir].timer.stime,
- H5Z_stat_table_g[i].stats[dir].timer.etime,
- bandwidth);
- }
- }
- }
+ if (H5DEBUG(Z)) {
+ for (i = 0; i < H5Z_table_used_g; i++) {
+ for (dir = 0; dir < 2; dir++) {
+ if (0 == H5Z_stat_table_g[i].stats[dir].total) continue;
+
+ if (0 == nprint++) {
+ /* Print column headers */
+ HDfprintf (H5DEBUG(Z), "H5Z: filter statistics "
+ "accumulated over life of library:\n");
+ HDfprintf (H5DEBUG(Z),
+ " %-16s %10s %10s %8s %8s %8s %10s\n",
+ "Filter", "Total", "Errors", "User",
+ "System", "Elapsed", "Bandwidth");
+ HDfprintf (H5DEBUG(Z),
+ " %-16s %10s %10s %8s %8s %8s %10s\n",
+ "------", "-----", "------", "----",
+ "------", "-------", "---------");
+ }
+
+ /* Truncate the comment to fit in the field */
+ HDstrncpy (comment, H5Z_table_g[i].name, sizeof comment);
+ comment[sizeof(comment)-1] = '\0';
+
+ /*
+ * Format bandwidth to have four significant digits and
+ * units of `B/s', `kB/s', `MB/s', `GB/s', or `TB/s' or
+ * the word `Inf' if the elapsed time is zero.
+ */
+ H5_bandwidth (bandwidth,
+ (double)(H5Z_stat_table_g[i].stats[dir].total),
+ H5Z_stat_table_g[i].stats[dir].timer.etime);
+
+ /* Print the statistics */
+ HDfprintf (H5DEBUG(Z),
+ " %s%-15s %10Hd %10Hd %8.2f %8.2f %8.2f "
+ "%10s\n", dir?"<":">", comment,
+ H5Z_stat_table_g[i].stats[dir].total,
+ H5Z_stat_table_g[i].stats[dir].errors,
+ H5Z_stat_table_g[i].stats[dir].timer.utime,
+ H5Z_stat_table_g[i].stats[dir].timer.stime,
+ H5Z_stat_table_g[i].stats[dir].timer.etime,
+ bandwidth);
+ }
+ }
+ }
#endif /* H5Z_DEBUG */
- /* Free the table of filters */
- H5Z_table_g = (H5Z_class2_t *)H5MM_xfree(H5Z_table_g);
+ for (i = 0; i < H5Z_table_used_g; i++) {
+ H5Z_class2_t *cls = (H5Z_class2_t *)(H5Z_table_g+i);
+ /* deallocate plugin info name */
+ if (cls->name)
+ cls->name = (char *)H5MM_xfree(cls->name);
+ }
+ /* Free the table of filters */
+ H5Z_table_g = (H5Z_class2_t *)H5MM_xfree(H5Z_table_g);
#ifdef H5Z_DEBUG
- H5Z_stat_table_g = (H5Z_stats_t *)H5MM_xfree(H5Z_stat_table_g);
+ H5Z_stat_table_g = (H5Z_stats_t *)H5MM_xfree(H5Z_stat_table_g);
#endif /* H5Z_DEBUG */
- H5Z_table_used_g = H5Z_table_alloc_g = 0;
- H5_interface_initialize_g = 0;
+ H5Z_table_used_g = H5Z_table_alloc_g = 0;
+ H5_interface_initialize_g = 0;
} /* end if */
return(0);
@@ -203,34 +198,26 @@ H5Z_term_interface(void)
/*-------------------------------------------------------------------------
- * Function: H5Zregister
- *
- * Purpose: This function registers new filter.
- *
- * Return: Non-negative on success/Negative on failure
+ * Function: H5Zregister
*
- * Programmer: Robb Matzke
- * Thursday, April 16, 1998
- *
- * Modifications:
- * Changed to pass in H5Z_class_t struct
- * Quincey Koziol, April 5, 2003
+ * Purpose: This function registers new filter.
*
+ * Return: Non-negative on success/Negative on failure
*-------------------------------------------------------------------------
*/
herr_t
H5Zregister(const void *cls)
{
const H5Z_class2_t *cls_real = (const H5Z_class2_t *) cls; /* "Real" class pointer */
- H5Z_class2_t cls_new; /* Translated class struct */
- herr_t ret_value=SUCCEED; /* Return value */
+ H5Z_class2_t cls_new; /* Translated class struct */
+ herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_API(FAIL)
H5TRACE1("e", "*x", cls);
/* Check args */
if (cls_real==NULL)
- HGOTO_ERROR (H5E_ARGS, H5E_BADVALUE, FAIL, "invalid filter class")
+ HGOTO_ERROR (H5E_ARGS, H5E_BADVALUE, FAIL, "invalid filter class")
/* Check H5Z_class_t version number; this is where a function to convert
* from an outdated version should be called.
@@ -242,7 +229,7 @@ H5Zregister(const void *cls)
* at least 256, there should be no overlap and the version of the struct
* can be determined by the value of the first field.
*/
- if(cls_real->version != H5Z_CLASS_T_VERS) {
+ if (cls_real->version != H5Z_CLASS_T_VERS) {
#ifndef H5_NO_DEPRECATED_SYMBOLS
/* Assume it is an old "H5Z_class1_t" instead */
const H5Z_class1_t *cls_old = (const H5Z_class1_t *) cls;
@@ -262,20 +249,20 @@ H5Zregister(const void *cls)
#else /* H5_NO_DEPRECATED_SYMBOLS */
/* Deprecated symbols not allowed, throw an error */
- HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "invalid H5Z_class_t version number");
+ HGOTO_ERROR (H5E_ARGS, H5E_BADVALUE, FAIL, "invalid H5Z_class_t version number");
#endif /* H5_NO_DEPRECATED_SYMBOLS */
} /* end if */
- if (cls_real->id<0 || cls_real->id>H5Z_FILTER_MAX)
- HGOTO_ERROR (H5E_ARGS, H5E_BADVALUE, FAIL, "invalid filter identification number")
- if (cls_real->id<H5Z_FILTER_RESERVED)
- HGOTO_ERROR (H5E_ARGS, H5E_BADVALUE, FAIL, "unable to modify predefined filters")
- if (cls_real->filter==NULL)
- HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no filter function specified")
+ if (cls_real->id < 0 || cls_real->id > H5Z_FILTER_MAX)
+ HGOTO_ERROR (H5E_ARGS, H5E_BADVALUE, FAIL, "invalid filter identification number")
+ if (cls_real->id < H5Z_FILTER_RESERVED)
+ HGOTO_ERROR (H5E_ARGS, H5E_BADVALUE, FAIL, "unable to modify predefined filters")
+ if (cls_real->filter == NULL)
+ HGOTO_ERROR (H5E_ARGS, H5E_BADVALUE, FAIL, "no filter function specified")
/* Do it */
- if (H5Z_register (cls_real)<0)
- HGOTO_ERROR (H5E_PLINE, H5E_CANTINIT, FAIL, "unable to register filter")
+ if (H5Z_register (cls_real) < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_CANTINIT, FAIL, "unable to register filter")
done:
FUNC_LEAVE_API(ret_value)
@@ -283,66 +270,67 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_register
- *
- * Purpose: Same as the public version except this one allows filters
- * to be set for predefined method numbers <H5Z_FILTER_RESERVED
+ * Function: H5Z_register
*
- * Return: Non-negative on success/Negative on failure
- *
- * Programmer: Robb Matzke
- * Thursday, April 16, 1998
- *
- * Modifications:
+ * Purpose: Same as the public version except this one allows filters
+ * to be set for predefined method numbers <H5Z_FILTER_RESERVED
*
+ * Return: Non-negative on success
+ * Negative on failure
*-------------------------------------------------------------------------
*/
herr_t
H5Z_register (const H5Z_class2_t *cls)
{
- size_t i;
- herr_t ret_value = SUCCEED; /* Return value */
+ size_t i;
+ herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_NOAPI(FAIL)
- HDassert(cls);
- HDassert(cls->id >= 0 && cls->id <= H5Z_FILTER_MAX);
+ HDassert (cls);
+ HDassert (cls->id >= 0 && cls->id <= H5Z_FILTER_MAX);
/* Is the filter already registered? */
- for(i = 0; i < H5Z_table_used_g; i++)
- if(H5Z_table_g[i].id == cls->id)
+ for (i = 0; i < H5Z_table_used_g; i++)
+ if (H5Z_table_g[i].id == cls->id)
break;
/* Filter not already registered */
- if(i >= H5Z_table_used_g) {
- if(H5Z_table_used_g >= H5Z_table_alloc_g) {
- size_t n = MAX(H5Z_MAX_NFILTERS, 2*H5Z_table_alloc_g);
- H5Z_class2_t *table = (H5Z_class2_t *)H5MM_realloc(H5Z_table_g, n * sizeof(H5Z_class2_t));
+ if (i >= H5Z_table_used_g) {
+ if (H5Z_table_used_g >= H5Z_table_alloc_g) {
+ size_t n = MAX(H5Z_MAX_NFILTERS, 2 * H5Z_table_alloc_g);
+ H5Z_class2_t *table = (H5Z_class2_t *)H5MM_realloc(H5Z_table_g, n * sizeof(H5Z_class2_t));
#ifdef H5Z_DEBUG
- H5Z_stats_t *stat_table = (H5Z_stats_t *)H5MM_realloc(H5Z_stat_table_g, n * sizeof(H5Z_stats_t));
+ H5Z_stats_t *stat_table = (H5Z_stats_t *)H5MM_realloc(H5Z_stat_table_g, n * sizeof(H5Z_stats_t));
#endif /* H5Z_DEBUG */
- if(!table)
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "unable to extend filter table")
- H5Z_table_g = table;
+ if (!table)
+ HGOTO_ERROR (H5E_RESOURCE, H5E_NOSPACE, FAIL, "unable to extend filter table")
+ H5Z_table_g = table;
#ifdef H5Z_DEBUG
- if(!stat_table)
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "unable to extend filter statistics table")
- H5Z_stat_table_g = stat_table;
+ if (!stat_table)
+ HGOTO_ERROR (H5E_RESOURCE, H5E_NOSPACE, FAIL, "unable to extend filter statistics table")
+ H5Z_stat_table_g = stat_table;
#endif /* H5Z_DEBUG */
- H5Z_table_alloc_g = n;
- } /* end if */
+ H5Z_table_alloc_g = n;
+ } /* end if */
- /* Initialize */
- i = H5Z_table_used_g++;
- HDmemcpy(H5Z_table_g+i, cls, sizeof(H5Z_class2_t));
+ /* Initialize */
+ i = H5Z_table_used_g++;
+ HDmemcpy (H5Z_table_g+i, cls, sizeof(H5Z_class2_t));
+ H5Z_table_g[i].name = (char *)H5MM_xstrdup(cls->name);
#ifdef H5Z_DEBUG
- HDmemset(H5Z_stat_table_g+i, 0, sizeof(H5Z_stats_t));
+ HDmemset (H5Z_stat_table_g+i, 0, sizeof(H5Z_stats_t));
#endif /* H5Z_DEBUG */
} /* end if */
/* Filter already registered */
else {
- /* Replace old contents */
- HDmemcpy(H5Z_table_g+i, cls, sizeof(H5Z_class2_t));
+ H5Z_class2_t *old_cls = (H5Z_class2_t *)(H5Z_table_g+i);
+ /* deallocate plugin info name */
+ if (old_cls->name)
+ old_cls->name = (char *)H5MM_xfree(old_cls->name);
+ /* Replace old contents */
+ HDmemcpy (old_cls, cls, sizeof(H5Z_class2_t));
+ old_cls->name = (char *)H5MM_xstrdup(cls->name);
} /* end else */
done:
@@ -351,34 +339,31 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Zunregister
+ * Function: H5Zunregister
*
- * Purpose: This function unregisters a filter.
- *
- * Return: Non-negative on success/Negative on failure
- *
- * Programmer: Quincey Koziol
- * Thursday, November 14, 2002
+ * Purpose: This function unregisters a filter.
*
+ * Return: Non-negative on success
+ * Negative on failure
*-------------------------------------------------------------------------
*/
herr_t
H5Zunregister(H5Z_filter_t id)
{
- herr_t ret_value=SUCCEED; /* Return value */
+ herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_API(FAIL)
H5TRACE1("e", "Zf", id);
/* Check args */
- if(id < 0 || id > H5Z_FILTER_MAX)
- HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "invalid filter identification number")
- if(id < H5Z_FILTER_RESERVED)
- HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "unable to modify predefined filters")
+ if (id < 0 || id > H5Z_FILTER_MAX)
+ HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "invalid filter identification number")
+ if (id < H5Z_FILTER_RESERVED)
+ HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "unable to modify predefined filters")
/* Do it */
- if(H5Z_unregister(id) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_CANTINIT, FAIL, "unable to unregister filter")
+ if (H5Z_unregister(id) < 0)
+ HGOTO_ERROR(H5E_PLINE, H5E_CANTINIT, FAIL, "unable to unregister filter")
done:
FUNC_LEAVE_API(ret_value)
@@ -386,36 +371,34 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_unregister
+ * Function: H5Z_unregister
*
- * Purpose: Same as the public version except this one allows filters
- * to be unset for predefined method numbers <H5Z_FILTER_RESERVED
- *
- * Return: Non-negative on success/Negative on failure
- *
- * Programmer: Quincey Koziol
- * Thursday, November 14, 2002
+ * Purpose: Same as the public version except this one allows filters
+ * to be unset for predefined method numbers <H5Z_FILTER_RESERVED
*
+ * Return: Non-negative on success
+ * Negative on failure
*-------------------------------------------------------------------------
*/
herr_t
H5Z_unregister(H5Z_filter_t filter_id)
{
- size_t filter_index; /* Local index variable for filter */
+ size_t filter_index; /* Local index variable for filter */
H5Z_object_t object;
- herr_t ret_value=SUCCEED; /* Return value */
+ herr_t ret_value = SUCCEED; /* Return value */
+ H5Z_class2_t *old_cls = NULL;
FUNC_ENTER_NOAPI(FAIL)
HDassert(filter_id>=0 && filter_id<=H5Z_FILTER_MAX);
/* Is the filter already registered? */
- for (filter_index=0; filter_index<H5Z_table_used_g; filter_index++)
- if (H5Z_table_g[filter_index].id==filter_id)
+ for (filter_index = 0; filter_index < H5Z_table_used_g; filter_index++)
+ if (H5Z_table_g[filter_index].id == filter_id)
break;
/* Fail if filter not found */
- if (filter_index>=H5Z_table_used_g)
+ if (filter_index >= H5Z_table_used_g)
HGOTO_ERROR(H5E_PLINE, H5E_NOTFOUND, FAIL, "filter is not registered")
/* Initialize the structure object for iteration */
@@ -423,28 +406,32 @@ H5Z_unregister(H5Z_filter_t filter_id)
object.found = FALSE;
/* Iterate through all opened datasets, returns a failure if any of them uses the filter */
- if(H5I_iterate(H5I_DATASET, H5Z__check_unregister_dset_cb, &object, FALSE) < 0)
+ if (H5I_iterate(H5I_DATASET, H5Z__check_unregister_dset_cb, &object, FALSE) < 0)
HGOTO_ERROR(H5E_FILE, H5E_BADITER, FAIL, "iteration failed")
- if(object.found)
+ if (object.found)
HGOTO_ERROR(H5E_PLINE, H5E_CANTRELEASE, FAIL, "can't unregister filter because a dataset is still using it")
/* Iterate through all opened groups, returns a failure if any of them uses the filter */
- if(H5I_iterate(H5I_GROUP, H5Z__check_unregister_group_cb, &object, FALSE) < 0)
+ if (H5I_iterate(H5I_GROUP, H5Z__check_unregister_group_cb, &object, FALSE) < 0)
HGOTO_ERROR(H5E_FILE, H5E_BADITER, FAIL, "iteration failed")
- if(object.found)
+ if (object.found)
HGOTO_ERROR(H5E_PLINE, H5E_CANTRELEASE, FAIL, "can't unregister filter because a group is still using it")
/* Iterate through all opened files and flush them */
- if(H5I_iterate(H5I_FILE, H5Z__flush_file_cb, NULL, FALSE) < 0)
+ if (H5I_iterate(H5I_FILE, H5Z__flush_file_cb, NULL, FALSE) < 0)
HGOTO_ERROR(H5E_FILE, H5E_BADITER, FAIL, "iteration failed")
+ /* deallocate plugin info name */
+ old_cls = (H5Z_class2_t *)(H5Z_table_g+filter_index);
+ if (old_cls->name)
+ old_cls->name = (char *)H5MM_xfree(old_cls->name);
/* Remove filter from table */
/* Don't worry about shrinking table size (for now) */
- HDmemmove(&H5Z_table_g[filter_index],&H5Z_table_g[filter_index+1],sizeof(H5Z_class2_t)*((H5Z_table_used_g-1)-filter_index));
+ HDmemmove (&H5Z_table_g[filter_index], &H5Z_table_g[filter_index+1], sizeof(H5Z_class2_t)*((H5Z_table_used_g-1)-filter_index));
#ifdef H5Z_DEBUG
- HDmemmove(&H5Z_stat_table_g[filter_index],&H5Z_stat_table_g[filter_index+1],sizeof(H5Z_stats_t)*((H5Z_table_used_g-1)-filter_index));
+ HDmemmove (&H5Z_stat_table_g[filter_index], &H5Z_stat_table_g[filter_index+1], sizeof(H5Z_stats_t)*((H5Z_table_used_g-1)-filter_index));
#endif /* H5Z_DEBUG */
H5Z_table_used_g--;
@@ -454,16 +441,12 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z__check_unregister
- *
- * Purpose: Check if an object uses the filter to be unregistered.
- *
- * Return: TRUE if the object uses the filter.
- * FALSE if not, NEGATIVE on error.
+ * Function: H5Z__check_unregister
*
- * Programmer: Quincey Koziol
- * 11 May 2013
+ * Purpose: Check if an object uses the filter to be unregistered.
*
+ * Return: TRUE if the object uses the filter.
+ * FALSE if not, NEGATIVE on error.
*-------------------------------------------------------------------------
*/
static htri_t
@@ -475,12 +458,12 @@ H5Z__check_unregister(hid_t ocpl_id, H5Z_filter_t filter_id)
FUNC_ENTER_STATIC
/* Get the plist structure of object creation */
- if(NULL == (plist = H5P_object_verify(ocpl_id, H5P_OBJECT_CREATE)))
- HGOTO_ERROR(H5E_PLINE, H5E_BADATOM, FAIL, "can't find object for ID")
+ if (NULL == (plist = H5P_object_verify(ocpl_id, H5P_OBJECT_CREATE)))
+ HGOTO_ERROR (H5E_PLINE, H5E_BADATOM, FAIL, "can't find object for ID")
/* Check if the object creation property list uses the filter */
- if((ret_value = H5P_filter_in_pline(plist, filter_id)) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_CANTGET, FAIL, "can't check filter in pipeline")
+ if ((ret_value = H5P_filter_in_pline(plist, filter_id)) < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_CANTGET, FAIL, "can't check filter in pipeline")
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -488,19 +471,15 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z__check_unregister_group_cb
+ * Function: H5Z__check_unregister_group_cb
*
- * Purpose: The callback function for H5Z_unregister. It iterates
- * through all opened objects. If the object is a dataset
- * or a group and it uses the filter to be unregistered, the
- * function returns TRUE.
- *
- * Return: TRUE if the object uses the filter.
- * FALSE otherwise.
- *
- * Programmer: Raymond Lu
- * 6 May 2013
+ * Purpose: The callback function for H5Z_unregister. It iterates
+ * through all opened objects. If the object is a dataset
+ * or a group and it uses the filter to be unregistered, the
+ * function returns TRUE.
*
+ * Return: TRUE if the object uses the filter.
+ * FALSE otherwise.
*-------------------------------------------------------------------------
*/
static int
@@ -516,44 +495,40 @@ H5Z__check_unregister_group_cb(void *obj_ptr, hid_t H5_ATTR_UNUSED obj_id, void
HDassert(obj_ptr);
/* Get the group creation property */
- if((ocpl_id = H5G_get_create_plist((H5G_t *)obj_ptr)) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_CANTGET, FAIL, "can't get group creation property list")
+ if ((ocpl_id = H5G_get_create_plist((H5G_t *)obj_ptr)) < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_CANTGET, FAIL, "can't get group creation property list")
/* Check if the filter is in the group creation property list */
- if((filter_in_pline = H5Z__check_unregister(ocpl_id, object->filter_id)) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_CANTGET, FAIL, "can't check filter in pipeline")
+ if ((filter_in_pline = H5Z__check_unregister(ocpl_id, object->filter_id)) < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_CANTGET, FAIL, "can't check filter in pipeline")
/* H5I_iterate expects TRUE to stop the loop over objects. Stop the loop and
* let H5Z_unregister return failure.
- */
- if(filter_in_pline) {
+ */
+ if (filter_in_pline) {
object->found = TRUE;
ret_value = TRUE;
} /* end if */
done:
- if(ocpl_id > 0)
- if(H5I_dec_app_ref(ocpl_id) < 0)
- HDONE_ERROR(H5E_PLINE, H5E_CANTDEC, FAIL, "can't release plist")
+ if (ocpl_id > 0)
+ if (H5I_dec_app_ref(ocpl_id) < 0)
+ HDONE_ERROR (H5E_PLINE, H5E_CANTDEC, FAIL, "can't release plist")
FUNC_LEAVE_NOAPI(ret_value)
} /* end H5Z__check_unregister_group_cb() */
/*-------------------------------------------------------------------------
- * Function: H5Z__check_unregister_dset_cb
+ * Function: H5Z__check_unregister_dset_cb
*
- * Purpose: The callback function for H5Z_unregister. It iterates
- * through all opened objects. If the object is a dataset
- * or a group and it uses the filter to be unregistered, the
- * function returns TRUE.
- *
- * Return: TRUE if the object uses the filter.
- * FALSE otherwise.
- *
- * Programmer: Raymond Lu
- * 6 May 2013
+ * Purpose: The callback function for H5Z_unregister. It iterates
+ * through all opened objects. If the object is a dataset
+ * or a group and it uses the filter to be unregistered, the
+ * function returns TRUE.
*
+ * Return: TRUE if the object uses the filter.
+ * FALSE otherwise.
*-------------------------------------------------------------------------
*/
static int
@@ -566,61 +541,57 @@ H5Z__check_unregister_dset_cb(void *obj_ptr, hid_t H5_ATTR_UNUSED obj_id, void *
FUNC_ENTER_STATIC
- HDassert(obj_ptr);
+ HDassert (obj_ptr);
/* Get the dataset creation property */
- if((ocpl_id = H5D_get_create_plist((H5D_t *)obj_ptr)) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_CANTGET, FAIL, "can't get dataset creation property list")
+ if ((ocpl_id = H5D_get_create_plist((H5D_t *)obj_ptr)) < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_CANTGET, FAIL, "can't get dataset creation property list")
/* Check if the filter is in the dataset creation property list */
- if((filter_in_pline = H5Z__check_unregister(ocpl_id, object->filter_id)) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_CANTGET, FAIL, "can't check filter in pipeline")
+ if ((filter_in_pline = H5Z__check_unregister(ocpl_id, object->filter_id)) < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_CANTGET, FAIL, "can't check filter in pipeline")
/* H5I_iterate expects TRUE to stop the loop over objects. Stop the loop and
* let H5Z_unregister return failure.
- */
- if(filter_in_pline) {
+ */
+ if (filter_in_pline) {
object->found = TRUE;
ret_value = TRUE;
} /* end if */
done:
- if(ocpl_id > 0)
- if(H5I_dec_app_ref(ocpl_id) < 0)
- HDONE_ERROR(H5E_PLINE, H5E_CANTDEC, FAIL, "can't release plist")
+ if (ocpl_id > 0)
+ if (H5I_dec_app_ref(ocpl_id) < 0)
+ HDONE_ERROR (H5E_PLINE, H5E_CANTDEC, FAIL, "can't release plist")
FUNC_LEAVE_NOAPI(ret_value)
} /* end H5Z__check_unregister_dset_cb() */
/*-------------------------------------------------------------------------
- * Function: H5Z__flush_file_cb
+ * Function: H5Z__flush_file_cb
*
- * Purpose: The callback function for H5Z_unregister. It iterates
- * through all opened files and flush them.
- *
- * Return: FALSE if finishes flushing and moves on
- * FAIL if there is an error
- *
- * Programmer: Raymond Lu
- * 6 May 2013
+ * Purpose: The callback function for H5Z_unregister. It iterates
+ * through all opened files and flush them.
*
+ * Return: FALSE if finishes flushing and moves on
+ * FAIL if there is an error
*-------------------------------------------------------------------------
*/
static int
H5Z__flush_file_cb(void *obj_ptr, hid_t H5_ATTR_UNUSED obj_id, void H5_ATTR_UNUSED *key)
{
- int ret_value = FALSE; /* Return value */
+ int ret_value = FALSE; /* Return value */
FUNC_ENTER_STATIC
- HDassert(obj_ptr);
+ HDassert (obj_ptr);
- /* Call the flush routine for mounted file hierarchies. Do a global flush
+ /* Call the flush routine for mounted file hierarchies. Do a global flush
* if the file is opened for write */
- if(H5F_ACC_RDWR & H5F_INTENT((H5F_t *)obj_ptr)) {
- if(H5F_flush_mounts((H5F_t *)obj_ptr, H5AC_dxpl_id) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_CANTFLUSH, FAIL, "unable to flush file hierarchy")
+ if (H5F_ACC_RDWR & H5F_INTENT((H5F_t *)obj_ptr)) {
+ if (H5F_flush_mounts((H5F_t *)obj_ptr, H5AC_dxpl_id) < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_CANTFLUSH, FAIL, "unable to flush file hierarchy")
} /* end if */
done:
@@ -629,33 +600,27 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Zfilter_avail
- *
- * Purpose: Check if a filter is available
- *
- * Return: Non-negative (TRUE/FALSE) on success/Negative on failure
- *
- * Programmer: Quincey Koziol
- * Thursday, November 14, 2002
+ * Function: H5Zfilter_avail
*
- * Modifications:
+ * Purpose: Check if a filter is available
*
+ * Return: Non-negative (TRUE/FALSE) on success/Negative on failure
*-------------------------------------------------------------------------
*/
htri_t
H5Zfilter_avail(H5Z_filter_t id)
{
- htri_t ret_value=FALSE; /* Return value */
+ htri_t ret_value = FALSE; /* Return value */
FUNC_ENTER_API(FAIL)
H5TRACE1("t", "Zf", id);
/* Check args */
- if(id<0 || id>H5Z_FILTER_MAX)
+ if (id < 0 || id > H5Z_FILTER_MAX)
HGOTO_ERROR (H5E_ARGS, H5E_BADVALUE, FAIL, "invalid filter identification number")
- if((ret_value = H5Z_filter_avail(id)) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_NOTFOUND, FAIL, "unable to check the availability of the filter")
+ if ((ret_value = H5Z_filter_avail(id)) < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_NOTFOUND, FAIL, "unable to check the availability of the filter")
done:
FUNC_LEAVE_API(ret_value)
@@ -663,35 +628,35 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_filter_avail
+ * Function: H5Z_filter_avail
*
- * Purpose: Private function to check if a filter is available
- *
- * Return: Non-negative (TRUE/FALSE) on success/Negative on failure
- *
- * Programmer: Raymond Lu
- * 13 February 2013
+ * Purpose: Private function to check if a filter is available
*
+ * Return: Non-negative (TRUE/FALSE) on success/Negative on failure
*-------------------------------------------------------------------------
*/
htri_t
H5Z_filter_avail(H5Z_filter_t id)
{
- size_t i; /* Local index variable */
- const H5Z_class2_t *filter_info;
- htri_t ret_value = FALSE; /* Return value */
+ size_t i; /* Local index variable */
+ H5Z_class2_t *filter_info;
+ htri_t ret_value = FALSE; /* Return value */
FUNC_ENTER_NOAPI(FAIL)
/* Is the filter already registered? */
- for(i = 0; i < H5Z_table_used_g; i++)
- if(H5Z_table_g[i].id == id)
- HGOTO_DONE(TRUE)
-
- if(NULL != (filter_info = (const H5Z_class2_t *)H5PL_load(H5PL_TYPE_FILTER, (int)id))) {
- if(H5Z_register(filter_info) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_CANTINIT, FAIL, "unable to register loaded filter")
- HGOTO_DONE(TRUE)
+ for (i = 0; i < H5Z_table_used_g; i++)
+ if (H5Z_table_g[i].id == id)
+ HGOTO_DONE (TRUE)
+
+ if (NULL != (filter_info = (const H5Z_class2_t *)H5PL_load(H5PL_TYPE_FILTER, (int)id))) {
+ herr_t status = H5Z_register(filter_info);
+ if (filter_info->name)
+ filter_info->name = (char *)H5MM_xfree(filter_info->name);
+ filter_info = (H5Z_class2_t *)H5MM_xfree(filter_info);
+ if (status < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_CANTINIT, FAIL, "unable to register loaded filter")
+ HGOTO_DONE (TRUE)
}
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -699,21 +664,16 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_prelude_callback
+ * Function: H5Z_prelude_callback
*
- * Purpose: Makes a dataset creation "prelude" callback for the "can_apply"
- * or "set_local" routines.
+ * Purpose: Makes a dataset creation "prelude" callback for the "can_apply"
+ * or "set_local" routines.
*
- * Return: Non-negative on success/Negative on failure
- *
- * Programmer: Quincey Koziol
- * Friday, April 4, 2003
- *
- * Notes:
- * The chunk dimensions are used to create a dataspace, instead
- * of passing in the dataset's dataspace, since the chunk
- * dimensions are what the I/O filter will actually see
+ * Return: Non-negative on success/Negative on failure
*
+ * Notes: The chunk dimensions are used to create a dataspace, instead
+ * of passing in the dataset's dataspace, since the chunk
+ * dimensions are what the I/O filter will actually see
*-------------------------------------------------------------------------
*/
static herr_t
@@ -722,59 +682,59 @@ H5Z_prelude_callback(const H5O_pline_t *pline, hid_t dcpl_id, hid_t type_id,
{
H5Z_class2_t *fclass; /* Individual filter information */
size_t u; /* Local index variable */
- htri_t ret_value = TRUE; /* Return value */
+ htri_t ret_value = TRUE; /* Return value */
FUNC_ENTER_NOAPI_NOINIT
HDassert(pline->nused > 0);
/* Iterate over filters */
- for(u = 0; u < pline->nused; u++) {
+ for (u = 0; u < pline->nused; u++) {
/* Get filter information */
- if(NULL == (fclass = H5Z_find(pline->filter[u].id))) {
+ if (NULL == (fclass = H5Z_find(pline->filter[u].id))) {
/* Ignore errors from optional filters */
- if(pline->filter[u].flags & H5Z_FLAG_OPTIONAL)
- H5E_clear_stack(NULL);
+ if (pline->filter[u].flags & H5Z_FLAG_OPTIONAL)
+ H5E_clear_stack (NULL);
else
- HGOTO_ERROR(H5E_PLINE, H5E_NOTFOUND, FAIL, "required filter was not located")
+ HGOTO_ERROR (H5E_PLINE, H5E_NOTFOUND, FAIL, "required filter was not located")
} /* end if */
else {
/* Make correct callback */
- switch(prelude_type) {
+ switch (prelude_type) {
case H5Z_PRELUDE_CAN_APPLY:
/* Check if filter is configured to be able to encode */
- if(!fclass->encoder_present)
- HGOTO_ERROR(H5E_PLINE, H5E_NOENCODER, FAIL, "Filter present but encoding is disabled.");
+ if (!fclass->encoder_present)
+ HGOTO_ERROR (H5E_PLINE, H5E_NOENCODER, FAIL, "Filter present but encoding is disabled.");
/* Check if there is a "can apply" callback */
- if(fclass->can_apply) {
+ if (fclass->can_apply) {
/* Make callback to filter's "can apply" function */
htri_t status = (fclass->can_apply)(dcpl_id, type_id, space_id);
/* Indicate error during filter callback */
- if(status < 0)
+ if (status < 0)
HGOTO_ERROR(H5E_PLINE, H5E_CANAPPLY, FAIL, "error during user callback")
- /* Indicate filter can't apply to this combination of parameters.
+ /* Indicate filter can't apply to this combination of parameters.
* If the filter is NOT optional, returns failure. */
- if(status == FALSE && !(pline->filter[u].flags & H5Z_FLAG_OPTIONAL))
+ if (status == FALSE && !(pline->filter[u].flags & H5Z_FLAG_OPTIONAL))
HGOTO_ERROR(H5E_PLINE, H5E_CANAPPLY, FAIL, "filter parameters not appropriate")
} /* end if */
break;
case H5Z_PRELUDE_SET_LOCAL:
/* Check if there is a "set local" callback */
- if(fclass->set_local) {
+ if (fclass->set_local) {
/* Make callback to filter's "set local" function */
- if((fclass->set_local)(dcpl_id, type_id, space_id) < 0)
+ if ((fclass->set_local)(dcpl_id, type_id, space_id) < 0)
/* Indicate error during filter callback */
- HGOTO_ERROR(H5E_PLINE, H5E_SETLOCAL, FAIL, "error during user callback")
+ HGOTO_ERROR (H5E_PLINE, H5E_SETLOCAL, FAIL, "error during user callback")
} /* end if */
break;
default:
- HDassert("invalid prelude type" && 0);
+ HDassert ("invalid prelude type" && 0);
} /* end switch */
} /* end else */
} /* end for */
@@ -786,82 +746,77 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_prepare_prelude_callback_dcpl
- *
- * Purpose: Prepares to make a dataset creation "prelude" callback
- * for the "can_apply" or "set_local" routines.
- *
- * Return: Non-negative on success/Negative on failure
+ * Function: H5Z_prepare_prelude_callback_dcpl
*
- * Programmer: Quincey Koziol
- * Friday, April 4, 2003
+ * Purpose: Prepares to make a dataset creation "prelude" callback
+ * for the "can_apply" or "set_local" routines.
*
- * Notes:
- * The chunk dimensions are used to create a dataspace, instead
- * of passing in the dataset's dataspace, since the chunk
- * dimensions are what the I/O filter will actually see
+ * Return: Non-negative on success/Negative on failure
*
+ * Notes: The chunk dimensions are used to create a dataspace, instead
+ * of passing in the dataset's dataspace, since the chunk
+ * dimensions are what the I/O filter will actually see
*-------------------------------------------------------------------------
*/
static herr_t
H5Z_prepare_prelude_callback_dcpl(hid_t dcpl_id, hid_t type_id, H5Z_prelude_type_t prelude_type)
{
- hid_t space_id = -1; /* ID for dataspace describing chunk */
- herr_t ret_value = SUCCEED; /* Return value */
+ hid_t space_id = -1; /* ID for dataspace describing chunk */
+ herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_NOAPI_NOINIT
- HDassert(H5I_GENPROP_LST == H5I_get_type(dcpl_id));
- HDassert(H5I_DATATYPE == H5I_get_type(type_id));
+ HDassert (H5I_GENPROP_LST == H5I_get_type(dcpl_id));
+ HDassert (H5I_DATATYPE == H5I_get_type(type_id));
/* Check if the property list is non-default */
- if(dcpl_id != H5P_DATASET_CREATE_DEFAULT) {
- H5P_genplist_t *dc_plist; /* Dataset creation property list object */
- H5O_layout_t dcpl_layout; /* Dataset's layout information */
+ if (dcpl_id != H5P_DATASET_CREATE_DEFAULT) {
+ H5P_genplist_t *dc_plist; /* Dataset creation property list object */
+ H5O_layout_t dcpl_layout; /* Dataset's layout information */
/* Get dataset creation property list object */
- if(NULL == (dc_plist = (H5P_genplist_t *)H5I_object(dcpl_id)))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "can't get dataset creation property list")
+ if (NULL == (dc_plist = (H5P_genplist_t *)H5I_object(dcpl_id)))
+ HGOTO_ERROR (H5E_ARGS, H5E_BADTYPE, FAIL, "can't get dataset creation property list")
/* Get layout information */
- if(H5P_get(dc_plist, H5D_CRT_LAYOUT_NAME, &dcpl_layout) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't retrieve layout")
+ if (H5P_get(dc_plist, H5D_CRT_LAYOUT_NAME, &dcpl_layout) < 0)
+ HGOTO_ERROR (H5E_PLIST, H5E_CANTGET, FAIL, "can't retrieve layout")
/* Check if the dataset is chunked */
- if(H5D_CHUNKED == dcpl_layout.type) {
+ if (H5D_CHUNKED == dcpl_layout.type) {
H5O_pline_t dcpl_pline; /* Object's I/O pipeline information */
/* Get I/O pipeline information */
- if(H5P_get(dc_plist, H5O_CRT_PIPELINE_NAME, &dcpl_pline) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't retrieve pipeline filter")
+ if (H5P_get(dc_plist, H5O_CRT_PIPELINE_NAME, &dcpl_pline) < 0)
+ HGOTO_ERROR (H5E_PLIST, H5E_CANTGET, FAIL, "can't retrieve pipeline filter")
/* Check if the chunks have filters */
- if(dcpl_pline.nused > 0) {
+ if (dcpl_pline.nused > 0) {
hsize_t chunk_dims[H5O_LAYOUT_NDIMS]; /* Size of chunk dimensions */
- H5S_t *space; /* Dataspace describing chunk */
- size_t u; /* Local index variable */
+ H5S_t *space; /* Dataspace describing chunk */
+ size_t u; /* Local index variable */
/* Create a dataspace for a chunk & set the extent */
- for(u = 0; u < dcpl_layout.u.chunk.ndims; u++)
+ for (u = 0; u < dcpl_layout.u.chunk.ndims; u++)
chunk_dims[u] = dcpl_layout.u.chunk.dim[u];
- if(NULL == (space = H5S_create_simple(dcpl_layout.u.chunk.ndims, chunk_dims, NULL)))
- HGOTO_ERROR(H5E_DATASPACE, H5E_CANTCREATE, FAIL, "can't create simple dataspace")
+ if (NULL == (space = H5S_create_simple(dcpl_layout.u.chunk.ndims, chunk_dims, NULL)))
+ HGOTO_ERROR (H5E_DATASPACE, H5E_CANTCREATE, FAIL, "can't create simple dataspace")
/* Get ID for dataspace to pass to filter routines */
- if((space_id = H5I_register(H5I_DATASPACE, space, FALSE)) < 0) {
- (void)H5S_close(space);
- HGOTO_ERROR(H5E_ATOM, H5E_CANTREGISTER, FAIL, "unable to register dataspace ID")
+ if ((space_id = H5I_register(H5I_DATASPACE, space, FALSE)) < 0) {
+ (void)H5S_close (space);
+ HGOTO_ERROR (H5E_ATOM, H5E_CANTREGISTER, FAIL, "unable to register dataspace ID")
} /* end if */
/* Make the callbacks */
- if(H5Z_prelude_callback(&dcpl_pline, dcpl_id, type_id, space_id, prelude_type) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_CANAPPLY, FAIL, "unable to apply filter")
+ if (H5Z_prelude_callback(&dcpl_pline, dcpl_id, type_id, space_id, prelude_type) < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_CANAPPLY, FAIL, "unable to apply filter")
} /* end if */
} /* end if */
} /* end if */
done:
- if(space_id > 0 && H5I_dec_ref(space_id) < 0)
+ if (space_id > 0 && H5I_dec_ref(space_id) < 0)
HDONE_ERROR(H5E_PLINE, H5E_CANTRELEASE, FAIL, "unable to close dataspace")
FUNC_LEAVE_NOAPI(ret_value)
@@ -869,22 +824,18 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_can_apply
+ * Function: H5Z_can_apply
*
- * Purpose: Checks if all the filters defined in the dataset creation
- * property list can be applied to a particular combination of
- * datatype and dataspace for a dataset.
+ * Purpose: Checks if all the filters defined in the dataset creation
+ * property list can be applied to a particular combination of
+ * datatype and dataspace for a dataset.
*
- * Return: Non-negative on success/Negative on failure
- *
- * Programmer: Quincey Koziol
- * Thursday, April 3, 2003
- *
- * Notes:
- * The chunk dimensions are used to create a dataspace, instead
- * of passing in the dataset's dataspace, since the chunk
- * dimensions are what the I/O filter will actually see
+ * Return: Non-negative on success
+ * Negative on failure
*
+ * Notes: The chunk dimensions are used to create a dataspace, instead
+ * of passing in the dataset's dataspace, since the chunk
+ * dimensions are what the I/O filter will actually see
*-------------------------------------------------------------------------
*/
herr_t
@@ -895,8 +846,8 @@ H5Z_can_apply(hid_t dcpl_id, hid_t type_id)
FUNC_ENTER_NOAPI(FAIL)
/* Make "can apply" callbacks for filters in pipeline */
- if(H5Z_prepare_prelude_callback_dcpl(dcpl_id, type_id, H5Z_PRELUDE_CAN_APPLY) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_CANAPPLY, FAIL, "unable to apply filter")
+ if (H5Z_prepare_prelude_callback_dcpl(dcpl_id, type_id, H5Z_PRELUDE_CAN_APPLY) < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_CANAPPLY, FAIL, "unable to apply filter")
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -904,22 +855,18 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_set_local
- *
- * Purpose: Makes callbacks to modify dataset creation list property
- * settings for filters on a new dataset, based on the datatype
- * and dataspace of that dataset (chunk).
+ * Function: H5Z_set_local
*
- * Return: Non-negative on success/Negative on failure
+ * Purpose: Makes callbacks to modify dataset creation list property
+ * settings for filters on a new dataset, based on the datatype
+ * and dataspace of that dataset (chunk).
*
- * Programmer: Quincey Koziol
- * Friday, April 4, 2003
- *
- * Notes:
- * The chunk dimensions are used to create a dataspace, instead
- * of passing in the dataset's dataspace, since the chunk
- * dimensions are what the I/O filter will actually see
+ * Return: Non-negative on success
+ * Negative on failure
*
+ * Notes: The chunk dimensions are used to create a dataspace, instead
+ * of passing in the dataset's dataspace, since the chunk
+ * dimensions are what the I/O filter will actually see
*-------------------------------------------------------------------------
*/
herr_t
@@ -930,8 +877,8 @@ H5Z_set_local(hid_t dcpl_id, hid_t type_id)
FUNC_ENTER_NOAPI(FAIL)
/* Make "set local" callbacks for filters in pipeline */
- if(H5Z_prepare_prelude_callback_dcpl(dcpl_id, type_id, H5Z_PRELUDE_SET_LOCAL) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_SETLOCAL, FAIL, "local filter parameters not set")
+ if (H5Z_prepare_prelude_callback_dcpl(dcpl_id, type_id, H5Z_PRELUDE_SET_LOCAL) < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_SETLOCAL, FAIL, "local filter parameters not set")
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -939,17 +886,14 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_can_apply_direct
- *
- * Purpose: Checks if all the filters defined in the pipeline can be
- * applied to an opaque byte stream (currently only a group).
- * The pipeline is assumed to have at least one filter.
+ * Function: H5Z_can_apply_direct
*
- * Return: Non-negative on success/Negative on failure
- *
- * Programmer: Neil Fortner
- * Tuesday, September 22, 2009
+ * Purpose: Checks if all the filters defined in the pipeline can be
+ * applied to an opaque byte stream (currently only a group).
+ * The pipeline is assumed to have at least one filter.
*
+ * Return: Non-negative on success
+ * Negative on failure
*-------------------------------------------------------------------------
*/
herr_t
@@ -959,11 +903,11 @@ H5Z_can_apply_direct(const H5O_pline_t *pline)
FUNC_ENTER_NOAPI(FAIL)
- HDassert(pline->nused > 0);
+ HDassert (pline->nused > 0);
/* Make "can apply" callbacks for filters in pipeline */
- if(H5Z_prelude_callback(pline, (hid_t)-1, (hid_t)-1, (hid_t)-1, H5Z_PRELUDE_CAN_APPLY) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_CANAPPLY, FAIL, "unable to apply filter")
+ if (H5Z_prelude_callback(pline, (hid_t)-1, (hid_t)-1, (hid_t)-1, H5Z_PRELUDE_CAN_APPLY) < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_CANAPPLY, FAIL, "unable to apply filter")
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -971,22 +915,18 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_set_local_direct
+ * Function: H5Z_set_local_direct
*
- * Purpose: Makes callbacks to modify local settings for filters on a
- * new opaque object. The pipeline is assumed to have at
- * least one filter.
+ * Purpose: Makes callbacks to modify local settings for filters on a
+ * new opaque object. The pipeline is assumed to have at
+ * least one filter.
*
- * Return: Non-negative on success/Negative on failure
- *
- * Programmer: Neil Fortner
- * Tuesday, September 22, 2009
- *
- * Notes:
- * This callback will almost certainly not do anything
- * useful, other than to make certain that the filter will
- * accept opque data.
+ * Return: Non-negative on success
+ * Negative on failure
*
+ * Notes: This callback will almost certainly not do anything
+ * useful, other than to make certain that the filter will
+ * accept opaque data.
*-------------------------------------------------------------------------
*/
herr_t
@@ -996,11 +936,11 @@ H5Z_set_local_direct(const H5O_pline_t *pline)
FUNC_ENTER_NOAPI(FAIL)
- HDassert(pline->nused > 0);
+ HDassert (pline->nused > 0);
/* Make "set local" callbacks for filters in pipeline */
- if(H5Z_prelude_callback(pline, (hid_t)-1, (hid_t)-1, (hid_t)-1, H5Z_PRELUDE_SET_LOCAL) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_SETLOCAL, FAIL, "local filter parameters not set")
+ if (H5Z_prelude_callback(pline, (hid_t)-1, (hid_t)-1, (hid_t)-1, H5Z_PRELUDE_SET_LOCAL) < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_SETLOCAL, FAIL, "local filter parameters not set")
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -1008,66 +948,61 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_modify
+ * Function: H5Z_modify
*
- * Purpose: Modify filter parameters for specified pipeline.
- *
- * Return: Non-negative on success/Negative on failure
- *
- * Programmer: Quincey Koziol
- * Friday, April 5, 2003
- *
- * Modifications:
+ * Purpose: Modify filter parameters for specified pipeline.
*
+ * Return: Non-negative on success
+ * Negative on failure
*-------------------------------------------------------------------------
*/
herr_t
H5Z_modify(const H5O_pline_t *pline, H5Z_filter_t filter, unsigned flags,
- size_t cd_nelmts, const unsigned int cd_values[/*cd_nelmts*/])
+ size_t cd_nelmts, const unsigned int cd_values[/*cd_nelmts*/])
{
- size_t idx; /* Index of filter in pipeline */
- herr_t ret_value = SUCCEED; /* Return value */
+ size_t idx; /* Index of filter in pipeline */
+ herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_NOAPI(FAIL)
- HDassert(pline);
- HDassert(filter >= 0 && filter <= H5Z_FILTER_MAX);
- HDassert(0 == (flags & ~((unsigned)H5Z_FLAG_DEFMASK)));
- HDassert(0 == cd_nelmts || cd_values);
+ HDassert (pline);
+ HDassert (filter >= 0 && filter <= H5Z_FILTER_MAX);
+ HDassert (0 == (flags & ~((unsigned)H5Z_FLAG_DEFMASK)));
+ HDassert (0 == cd_nelmts || cd_values);
/* Locate the filter in the pipeline */
- for(idx = 0; idx < pline->nused; idx++)
- if(pline->filter[idx].id == filter)
+ for (idx = 0; idx < pline->nused; idx++)
+ if (pline->filter[idx].id == filter)
break;
/* Check if the filter was not already in the pipeline */
- if(idx > pline->nused)
- HGOTO_ERROR(H5E_PLINE, H5E_NOTFOUND, FAIL, "filter not in pipeline")
+ if (idx > pline->nused)
+ HGOTO_ERROR (H5E_PLINE, H5E_NOTFOUND, FAIL, "filter not in pipeline")
/* Change parameters for filter */
pline->filter[idx].flags = flags;
pline->filter[idx].cd_nelmts = cd_nelmts;
/* Free any existing parameters */
- if(pline->filter[idx].cd_values != NULL && pline->filter[idx].cd_values != pline->filter[idx]._cd_values)
- H5MM_xfree(pline->filter[idx].cd_values);
+ if (pline->filter[idx].cd_values != NULL && pline->filter[idx].cd_values != pline->filter[idx]._cd_values)
+ H5MM_xfree(pline->filter[idx].cd_values);
/* Set parameters */
- if(cd_nelmts > 0) {
- size_t i; /* Local index variable */
+ if (cd_nelmts > 0) {
+ size_t i; /* Local index variable */
/* Allocate memory or point at internal buffer */
- if(cd_nelmts > H5Z_COMMON_CD_VALUES) {
+ if (cd_nelmts > H5Z_COMMON_CD_VALUES) {
pline->filter[idx].cd_values = (unsigned *)H5MM_malloc(cd_nelmts * sizeof(unsigned));
- if(NULL == pline->filter[idx].cd_values)
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed for filter parameters")
+ if (NULL == pline->filter[idx].cd_values)
+ HGOTO_ERROR (H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed for filter parameters")
} /* end if */
else
pline->filter[idx].cd_values = pline->filter[idx]._cd_values;
/* Copy client data values */
- for(i = 0; i < cd_nelmts; i++)
- pline->filter[idx].cd_values[i] = cd_values[i];
+ for (i = 0; i < cd_nelmts; i++)
+ pline->filter[idx].cd_values[i] = cd_values[i];
} /* end if */
else
pline->filter[idx].cd_values = NULL;
@@ -1078,47 +1013,42 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_append
- *
- * Purpose: Append another filter to the specified pipeline.
+ * Function: H5Z_append
*
- * Return: Non-negative on success/Negative on failure
- *
- * Programmer: Robb Matzke
- * Tuesday, August 4, 1998
- *
- * Modifications:
+ * Purpose: Append another filter to the specified pipeline.
*
+ * Return: Non-negative on success
+ * Negative on failure
*-------------------------------------------------------------------------
*/
herr_t
H5Z_append(H5O_pline_t *pline, H5Z_filter_t filter, unsigned flags,
- size_t cd_nelmts, const unsigned int cd_values[/*cd_nelmts*/])
+ size_t cd_nelmts, const unsigned int cd_values[/*cd_nelmts*/])
{
- size_t idx;
- herr_t ret_value = SUCCEED; /* Return value */
+ size_t idx;
+ herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_NOAPI(FAIL)
- HDassert(pline);
- HDassert(filter >= 0 && filter <= H5Z_FILTER_MAX);
- HDassert(0 == (flags & ~((unsigned)H5Z_FLAG_DEFMASK)));
- HDassert(0 == cd_nelmts || cd_values);
+ HDassert (pline);
+ HDassert (filter >= 0 && filter <= H5Z_FILTER_MAX);
+ HDassert (0 == (flags & ~((unsigned)H5Z_FLAG_DEFMASK)));
+ HDassert (0 == cd_nelmts || cd_values);
/*
* Check filter limit. We do it here for early warnings although we may
* decide to relax this restriction in the future.
*/
- if(pline->nused >= H5Z_MAX_NFILTERS)
- HGOTO_ERROR(H5E_PLINE, H5E_CANTINIT, FAIL, "too many filters in pipeline")
+ if (pline->nused >= H5Z_MAX_NFILTERS)
+ HGOTO_ERROR (H5E_PLINE, H5E_CANTINIT, FAIL, "too many filters in pipeline")
/* Check for freshly allocated filter pipeline */
- if(pline->version == 0)
+ if (pline->version == 0)
pline->version = H5O_PLINE_VERSION_1;
/* Allocate additional space in the pipeline if it's full */
- if(pline->nused >= pline->nalloc) {
- H5O_pline_t x;
+ if (pline->nused >= pline->nalloc) {
+ H5O_pline_t x;
size_t n;
/* Each filter's data may be stored internally or may be
@@ -1128,14 +1058,14 @@ H5Z_append(H5O_pline_t *pline, H5Z_filter_t filter, unsigned flags,
* filter struct is reallocated. Set these pointers to ~NULL
* so that we can reset them after reallocating the filters array.
*/
- for(n = 0; n < pline->nalloc; ++n)
- if(pline->filter[n].cd_values == pline->filter[n]._cd_values)
+ for (n = 0; n < pline->nalloc; ++n)
+ if (pline->filter[n].cd_values == pline->filter[n]._cd_values)
pline->filter[n].cd_values = (unsigned *)((void *) ~((size_t)NULL));
- x.nalloc = MAX(H5Z_MAX_NFILTERS, 2 * pline->nalloc);
- x.filter = (H5Z_filter_info_t *)H5MM_realloc(pline->filter, x.nalloc * sizeof(x.filter[0]));
- if(NULL == x.filter)
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed for filter pipeline")
+ x.nalloc = MAX(H5Z_MAX_NFILTERS, 2 * pline->nalloc);
+ x.filter = (H5Z_filter_info_t *)H5MM_realloc(pline->filter, x.nalloc * sizeof(x.filter[0]));
+ if (NULL == x.filter)
+ HGOTO_ERROR (H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed for filter pipeline")
/* Fix pointers in previous filters that need to point to their own
* internal data.
@@ -1145,8 +1075,8 @@ H5Z_append(H5O_pline_t *pline, H5Z_filter_t filter, unsigned flags,
x.filter[n].cd_values = x.filter[n]._cd_values;
/* Point to newly allocated buffer */
- pline->nalloc = x.nalloc;
- pline->filter = x.filter;
+ pline->nalloc = x.nalloc;
+ pline->filter = x.filter;
} /* end if */
/* Add the new filter to the pipeline */
@@ -1155,21 +1085,21 @@ H5Z_append(H5O_pline_t *pline, H5Z_filter_t filter, unsigned flags,
pline->filter[idx].flags = flags;
pline->filter[idx].name = NULL; /*we'll pick it up later*/
pline->filter[idx].cd_nelmts = cd_nelmts;
- if(cd_nelmts > 0) {
- size_t i; /* Local index variable */
+ if (cd_nelmts > 0) {
+ size_t i; /* Local index variable */
/* Allocate memory or point at internal buffer */
- if(cd_nelmts > H5Z_COMMON_CD_VALUES) {
+ if (cd_nelmts > H5Z_COMMON_CD_VALUES) {
pline->filter[idx].cd_values = (unsigned *)H5MM_malloc(cd_nelmts * sizeof(unsigned));
- if(NULL == pline->filter[idx].cd_values)
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed for filter")
+ if (NULL == pline->filter[idx].cd_values)
+ HGOTO_ERROR (H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed for filter")
} /* end if */
else
pline->filter[idx].cd_values = pline->filter[idx]._cd_values;
/* Copy client data values */
- for(i = 0; i < cd_nelmts; i++)
- pline->filter[idx].cd_values[i] = cd_values[i];
+ for (i = 0; i < cd_nelmts; i++)
+ pline->filter[idx].cd_values[i] = cd_values[i];
} /* end if */
else
pline->filter[idx].cd_values = NULL;
@@ -1182,32 +1112,26 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_find_idx
+ * Function: H5Z_find_idx
*
- * Purpose: Given a filter ID return the offset in the global array
- * that holds all the registered filters.
- *
- * Return: Success: Non-negative index of entry in global filter table.
- * Failure: Negative
- *
- * Programmer: Quincey Koziol
- * Friday, April 5, 2003
- *
- * Modifications:
+ * Purpose: Given a filter ID return the offset in the global array
+ * that holds all the registered filters.
*
+ * Return: Success: Non-negative index of entry in global filter table.
+ * Failure: Negative
*-------------------------------------------------------------------------
*/
static int
H5Z_find_idx(H5Z_filter_t id)
{
size_t i; /* Local index variable */
- int ret_value=FAIL; /* Return value */
+ int ret_value = FAIL; /* Return value */
FUNC_ENTER_NOAPI_NOINIT_NOERR
- for (i=0; i<H5Z_table_used_g; i++)
- if (H5Z_table_g[i].id == id)
- HGOTO_DONE((int)i)
+ for (i = 0; i < H5Z_table_used_g; i++)
+ if (H5Z_table_g[i].id == id)
+ HGOTO_DONE ((int)i)
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -1215,37 +1139,29 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_find
- *
- * Purpose: Given a filter ID return a pointer to a global struct that
- * defines the filter.
+ * Function: H5Z_find
*
- * Return: Success: Ptr to entry in global filter table.
- * Failure: NULL
- *
- * Programmer: Robb Matzke
- * Wednesday, August 5, 1998
- *
- * Modifications:
- * Use H5Z_find_idx now
- * Quincey Koziol, April 5, 2003
+ * Purpose: Given a filter ID return a pointer to a global struct that
+ * defines the filter.
*
+ * Return: Success: Ptr to entry in global filter table.
+ * Failure: NULL
*-------------------------------------------------------------------------
*/
H5Z_class2_t *
H5Z_find(H5Z_filter_t id)
{
- int idx; /* Filter index in global table */
- H5Z_class2_t *ret_value=NULL; /* Return value */
+ int idx; /* Filter index in global table */
+ H5Z_class2_t *ret_value = NULL; /* Return value */
FUNC_ENTER_NOAPI(NULL)
/* Get the index in the global table */
- if((idx=H5Z_find_idx(id))<0)
- HGOTO_ERROR(H5E_PLINE, H5E_NOTFOUND, NULL, "required filter is not registered")
+ if ((idx = H5Z_find_idx(id)) < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_NOTFOUND, NULL, "required filter %d is not registered", id)
/* Set return value */
- ret_value=H5Z_table_g+idx;
+ ret_value = H5Z_table_g+idx;
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -1253,171 +1169,173 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_pipeline
- *
- * Purpose: Process data through the filter pipeline. The FLAGS argument
- * is the filter invocation flags (definition flags come from
- * the PLINE->filter[].flags). The filters are processed in
- * definition order unless the H5Z_FLAG_REVERSE is set. The
- * FILTER_MASK is a bit-mask to indicate which filters to skip
- * and on exit will indicate which filters failed. Each
- * filter has an index number in the pipeline and that index
- * number is the filter's bit in the FILTER_MASK. NBYTES is the
- * number of bytes of data to filter and on exit should be the
- * number of resulting bytes while BUF_SIZE holds the total
- * allocated size of the buffer, which is pointed to BUF.
- *
- * If the buffer must grow during processing of the pipeline
- * then the pipeline function should free the original buffer
- * and return a fresh buffer, adjusting BUF_SIZE accordingly.
- *
- * Return: Non-negative on success/Negative on failure
- *
- * Programmer: Robb Matzke
- * Tuesday, August 4, 1998
- *
- * Modifications:
- *
+ * Function: H5Z_pipeline
+ *
+ * Purpose: Process data through the filter pipeline. The FLAGS argument
+ * is the filter invocation flags (definition flags come from
+ * the PLINE->filter[].flags). The filters are processed in
+ * definition order unless the H5Z_FLAG_REVERSE is set. The
+ * FILTER_MASK is a bit-mask to indicate which filters to skip
+ * and on exit will indicate which filters failed. Each
+ * filter has an index number in the pipeline and that index
+ * number is the filter's bit in the FILTER_MASK. NBYTES is the
+ * number of bytes of data to filter and on exit should be the
+ * number of resulting bytes while BUF_SIZE holds the total
+ * allocated size of the buffer, which is pointed to BUF.
+ *
+ * If the buffer must grow during processing of the pipeline
+ * then the pipeline function should free the original buffer
+ * and return a fresh buffer, adjusting BUF_SIZE accordingly.
+ *
+ * Return: Non-negative on success
+ * Negative on failure
*-------------------------------------------------------------------------
*/
herr_t
H5Z_pipeline(const H5O_pline_t *pline, unsigned flags,
- unsigned *filter_mask/*in,out*/, H5Z_EDC_t edc_read,
- H5Z_cb_t cb_struct, size_t *nbytes/*in,out*/,
- size_t *buf_size/*in,out*/, void **buf/*in,out*/)
+ unsigned *filter_mask/*in,out*/, H5Z_EDC_t edc_read,
+ H5Z_cb_t cb_struct, size_t *nbytes/*in,out*/,
+ size_t *buf_size/*in,out*/, void **buf/*in,out*/)
{
- size_t i, idx, new_nbytes;
- int fclass_idx; /* Index of filter class in global table */
- H5Z_class2_t *fclass=NULL; /* Filter class pointer */
+ size_t i, idx, new_nbytes;
+ int fclass_idx; /* Index of filter class in global table */
+ H5Z_class2_t *fclass=NULL; /* Filter class pointer */
#ifdef H5Z_DEBUG
- H5Z_stats_t *fstats=NULL; /* Filter stats pointer */
- H5_timer_t timer;
+ H5Z_stats_t *fstats=NULL; /* Filter stats pointer */
+ H5_timer_t timer;
#endif
- unsigned failed = 0;
- unsigned tmp_flags;
- herr_t ret_value=SUCCEED; /* Return value */
+ unsigned failed = 0;
+ unsigned tmp_flags;
+ herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_NOAPI(FAIL)
- HDassert(0==(flags & ~((unsigned)H5Z_FLAG_INVMASK)));
- HDassert(filter_mask);
- HDassert(nbytes && *nbytes>0);
- HDassert(buf_size && *buf_size>0);
- HDassert(buf && *buf);
- HDassert(!pline || pline->nused<H5Z_MAX_NFILTERS);
+ HDassert (0 == (flags & ~((unsigned)H5Z_FLAG_INVMASK)));
+ HDassert (filter_mask);
+ HDassert (nbytes && *nbytes>0);
+ HDassert (buf_size && *buf_size>0);
+ HDassert (buf && *buf);
+ HDassert (!pline || pline->nused<H5Z_MAX_NFILTERS);
if (pline && (flags & H5Z_FLAG_REVERSE)) { /* Read */
- for (i=pline->nused; i>0; --i) {
- idx = i-1;
+ for (i = pline->nused; i > 0; --i) {
+ idx = i-1;
- if (*filter_mask & ((unsigned)1<<idx)) {
- failed |= (unsigned)1 << idx;
- continue;/*filter excluded*/
- }
+ if (*filter_mask & ((unsigned)1 << idx)) {
+ failed |= (unsigned)1 << idx;
+ continue; /* filter excluded */
+ }
- /* If the filter isn't registered and the application doesn't
- * indicate no plugin through HDF5_PRELOAD_PLUG (using the symbol "::"),
+ /* If the filter isn't registered and the application doesn't
+ * indicate no plugin through HDF5_PRELOAD_PLUG (using the symbol "::"),
* try to load it dynamically and register it. Otherwise, return failure
*/
- if((fclass_idx = H5Z_find_idx(pline->filter[idx].id)) < 0) {
+ if ((fclass_idx = H5Z_find_idx(pline->filter[idx].id)) < 0) {
hbool_t issue_error = FALSE;
-
- const H5Z_class2_t *filter_info;
-
- /* Try loading the filter */
- if(NULL != (filter_info = (const H5Z_class2_t *)H5PL_load(H5PL_TYPE_FILTER, (int)(pline->filter[idx].id)))) {
- /* Register the filter we loaded */
- if(H5Z_register(filter_info) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_CANTINIT, FAIL, "unable to register filter")
-
- /* Search in the table of registered filters again to find the dynamic filter just loaded and registered */
- if((fclass_idx = H5Z_find_idx(pline->filter[idx].id)) < 0)
- issue_error = TRUE;
- } /* end if */
- else
+ H5Z_class2_t *filter_info;
+
+ /* Try loading the filter */
+ if (NULL != (filter_info = (const H5Z_class2_t *)H5PL_load(H5PL_TYPE_FILTER, (int)(pline->filter[idx].id)))) {
+ /* Register the filter we loaded */
+ herr_t status = H5Z_register(filter_info);
+ if (filter_info->name)
+ filter_info->name = (char *)H5MM_xfree(filter_info->name);
+ filter_info = (H5Z_class2_t *)H5MM_xfree(filter_info);
+ if (status < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_CANTINIT, FAIL, "unable to register filter")
+
+ /* Search in the table of registered filters again to find the dynamic filter just loaded and registered */
+ if ((fclass_idx = H5Z_find_idx(pline->filter[idx].id)) < 0)
issue_error = TRUE;
+ } /* end if */
+ else
+ issue_error = TRUE;
/* Check for error */
if(issue_error) {
- /* Print out the filter name to give more info. But the name is optional for
+ /* Print out the filter name to give more info. But the name is optional for
* the filter */
- if(pline->filter[idx].name)
+ if (pline->filter[idx].name)
HGOTO_ERROR(H5E_PLINE, H5E_READERROR, FAIL, "required filter '%s' is not registered", pline->filter[idx].name)
else
HGOTO_ERROR(H5E_PLINE, H5E_READERROR, FAIL, "required filter (name unavailable) is not registered")
} /* end if */
} /* end if */
- fclass=&H5Z_table_g[fclass_idx];
+ fclass = &H5Z_table_g[fclass_idx];
#ifdef H5Z_DEBUG
- fstats=&H5Z_stat_table_g[fclass_idx];
- H5_timer_begin(&timer);
+ fstats = &H5Z_stat_table_g[fclass_idx];
+ H5_timer_begin (&timer);
#endif
- tmp_flags=flags|(pline->filter[idx].flags);
- tmp_flags|=(edc_read== H5Z_DISABLE_EDC) ? H5Z_FLAG_SKIP_EDC : 0;
- new_nbytes = (fclass->filter)(tmp_flags, pline->filter[idx].cd_nelmts,
+ tmp_flags = flags | (pline->filter[idx].flags);
+ tmp_flags |= (edc_read== H5Z_DISABLE_EDC) ? H5Z_FLAG_SKIP_EDC : 0;
+ new_nbytes = (fclass->filter)(tmp_flags, pline->filter[idx].cd_nelmts,
pline->filter[idx].cd_values, *nbytes, buf_size, buf);
#ifdef H5Z_DEBUG
- H5_timer_end(&(fstats->stats[1].timer), &timer);
- fstats->stats[1].total += MAX(*nbytes, new_nbytes);
- if (0==new_nbytes) fstats->stats[1].errors += *nbytes;
+ H5_timer_end (&(fstats->stats[1].timer), &timer);
+ fstats->stats[1].total += MAX(*nbytes, new_nbytes);
+ if (0 == new_nbytes) fstats->stats[1].errors += *nbytes;
#endif
- if(0==new_nbytes) {
- if((cb_struct.func && (H5Z_CB_FAIL==cb_struct.func(pline->filter[idx].id, *buf, *buf_size, cb_struct.op_data)))
- || !cb_struct.func)
- HGOTO_ERROR(H5E_PLINE, H5E_READERROR, FAIL, "filter returned failure during read")
+ if (0 == new_nbytes) {
+ if ((cb_struct.func &&
+ (H5Z_CB_FAIL == cb_struct.func (pline->filter[idx].id, *buf, *buf_size, cb_struct.op_data))) ||
+ !cb_struct.func)
+ HGOTO_ERROR (H5E_PLINE, H5E_READERROR, FAIL, "filter returned failure during read")
*nbytes = *buf_size;
failed |= (unsigned)1 << idx;
- H5E_clear_stack(NULL);
- } else {
+ H5E_clear_stack (NULL);
+ }
+ else {
*nbytes = new_nbytes;
}
- }
- } else if (pline) { /* Write */
- for (idx=0; idx<pline->nused; idx++) {
- if (*filter_mask & ((unsigned)1<<idx)) {
- failed |= (unsigned)1 << idx;
- continue; /*filter excluded*/
- }
- if ((fclass_idx=H5Z_find_idx(pline->filter[idx].id))<0) {
+ }
+ }
+ else if (pline) { /* Write */
+ for (idx = 0; idx < pline->nused; idx++) {
+ if (*filter_mask & ((unsigned)1 << idx)) {
+ failed |= (unsigned)1 << idx;
+ continue; /*filter excluded*/
+ }
+ if ((fclass_idx = H5Z_find_idx(pline->filter[idx].id)) < 0) {
/* Check if filter is optional -- If it isn't, then error */
- if ((pline->filter[idx].flags & H5Z_FLAG_OPTIONAL) == 0)
- HGOTO_ERROR(H5E_PLINE, H5E_WRITEERROR, FAIL, "required filter is not registered")
-
- failed |= (unsigned)1 << idx;
- H5E_clear_stack(NULL);
- continue; /*filter excluded*/
- }
- fclass=&H5Z_table_g[fclass_idx];
+ if ((pline->filter[idx].flags & H5Z_FLAG_OPTIONAL) == 0)
+ HGOTO_ERROR(H5E_PLINE, H5E_WRITEERROR, FAIL, "required filter is not registered")
+
+ failed |= (unsigned)1 << idx;
+ H5E_clear_stack (NULL);
+ continue; /*filter excluded*/
+ }
+ fclass = &H5Z_table_g[fclass_idx];
#ifdef H5Z_DEBUG
- fstats=&H5Z_stat_table_g[fclass_idx];
- H5_timer_begin(&timer);
+ fstats = &H5Z_stat_table_g[fclass_idx];
+ H5_timer_begin (&timer);
#endif
- new_nbytes = (fclass->filter)(flags|(pline->filter[idx].flags), pline->filter[idx].cd_nelmts,
- pline->filter[idx].cd_values, *nbytes, buf_size, buf);
+ new_nbytes = (fclass->filter)(flags | (pline->filter[idx].flags), pline->filter[idx].cd_nelmts,
+ pline->filter[idx].cd_values, *nbytes, buf_size, buf);
#ifdef H5Z_DEBUG
- H5_timer_end(&(fstats->stats[0].timer), &timer);
- fstats->stats[0].total += MAX(*nbytes, new_nbytes);
- if (0==new_nbytes) fstats->stats[0].errors += *nbytes;
+ H5_timer_end (&(fstats->stats[0].timer), &timer);
+ fstats->stats[0].total += MAX(*nbytes, new_nbytes);
+ if (0 == new_nbytes) fstats->stats[0].errors += *nbytes;
#endif
- if(0==new_nbytes) {
- if (0==(pline->filter[idx].flags & H5Z_FLAG_OPTIONAL)) {
- if((cb_struct.func && (H5Z_CB_FAIL==cb_struct.func(pline->filter[idx].id, *buf, *nbytes, cb_struct.op_data)))
- || !cb_struct.func)
- HGOTO_ERROR(H5E_PLINE, H5E_WRITEERROR, FAIL, "filter returned failure")
+ if (0 == new_nbytes) {
+ if (0 == (pline->filter[idx].flags & H5Z_FLAG_OPTIONAL)) {
+ if ((cb_struct.func &&
+ (H5Z_CB_FAIL == cb_struct.func(pline->filter[idx].id, *buf, *nbytes, cb_struct.op_data))) ||
+ !cb_struct.func)
+ HGOTO_ERROR (H5E_PLINE, H5E_WRITEERROR, FAIL, "filter returned failure")
*nbytes = *buf_size;
}
failed |= (unsigned)1 << idx;
- H5E_clear_stack(NULL);
- } else {
- *nbytes = new_nbytes;
+ H5E_clear_stack (NULL);
}
- }
+ else
+ *nbytes = new_nbytes;
+ }
}
*filter_mask = failed;
@@ -1428,41 +1346,36 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_filter_info
- *
- * Purpose: Get pointer to filter info for pipeline
+ * Function: H5Z_filter_info
*
- * Return: Non-negative on success/Negative on failure
- *
- * Programmer: Quincey Koziol
- * Friday, April 5, 2003
- *
- * Modifications:
+ * Purpose: Get pointer to filter info for pipeline
*
+ * Return: Non-negative on success
+ * Negative on failure
*-------------------------------------------------------------------------
*/
H5Z_filter_info_t *
H5Z_filter_info(const H5O_pline_t *pline, H5Z_filter_t filter)
{
- size_t idx; /* Index of filter in pipeline */
- H5Z_filter_info_t *ret_value; /* Return value */
+ size_t idx; /* Index of filter in pipeline */
+ H5Z_filter_info_t *ret_value = NULL; /* Return value */
FUNC_ENTER_NOAPI(NULL)
- HDassert(pline);
- HDassert(filter>=0 && filter<=H5Z_FILTER_MAX);
+ HDassert (pline);
+ HDassert (filter >= 0 && filter <= H5Z_FILTER_MAX);
/* Locate the filter in the pipeline */
- for(idx=0; idx<pline->nused; idx++)
- if(pline->filter[idx].id==filter)
+ for (idx = 0; idx < pline->nused; idx++)
+ if (pline->filter[idx].id == filter)
break;
/* Check if the filter was not already in the pipeline */
- if(idx>=pline->nused)
- HGOTO_ERROR(H5E_PLINE, H5E_NOTFOUND, NULL, "filter not in pipeline")
+ if (idx >= pline->nused)
+ HGOTO_ERROR (H5E_PLINE, H5E_NOTFOUND, NULL, "filter not in pipeline")
/* Set return value */
- ret_value=&pline->filter[idx];
+ ret_value = &pline->filter[idx];
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -1470,41 +1383,35 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_filter_in_pline
- *
- * Purpose: Check wheter a filter is in the filter pipeline using the
- * filter ID. This function is very similar to H5Z_filter_info
+ * Function: H5Z_filter_in_pline
*
- * Return: TRUE - found filter
- * FALSE - not found
- * FAIL - error
- *
- * Programmer: Raymond Lu
- * 26 April 2013
- *
- * Modifications:
+ * Purpose: Check wheter a filter is in the filter pipeline using the
+ * filter ID. This function is very similar to H5Z_filter_info
*
+ * Return: TRUE - found filter
+ * FALSE - not found
+ * FAIL - error
*-------------------------------------------------------------------------
*/
htri_t
H5Z_filter_in_pline(const H5O_pline_t *pline, H5Z_filter_t filter)
{
- size_t idx; /* Index of filter in pipeline */
- htri_t ret_value = TRUE; /* Return value */
+ size_t idx; /* Index of filter in pipeline */
+ htri_t ret_value = TRUE; /* Return value */
FUNC_ENTER_NOAPI(FAIL)
- HDassert(pline);
- HDassert(filter>=0 && filter<=H5Z_FILTER_MAX);
+ HDassert (pline);
+ HDassert (filter >= 0 && filter <= H5Z_FILTER_MAX);
/* Locate the filter in the pipeline */
- for(idx=0; idx<pline->nused; idx++)
- if(pline->filter[idx].id==filter)
+ for (idx = 0; idx < pline->nused; idx++)
+ if (pline->filter[idx].id == filter)
break;
/* Check if the filter was not already in the pipeline */
- if(idx>=pline->nused)
- ret_value = FALSE;
+ if (idx >= pline->nused)
+ ret_value = FALSE;
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -1513,42 +1420,36 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5Z_all_filters_avail
- *
- * Purpose: Verify that all the filters in a pipeline are currently
- * available (i.e. registered)
- *
- * Return: Non-negative (TRUE/FALSE) on success/Negative on failure
+ * Function: H5Z_all_filters_avail
*
- * Programmer: Quincey Koziol
- * Tuesday, April 8, 2003
- *
- * Modifications:
+ * Purpose: Verify that all the filters in a pipeline are currently
+ * available (i.e. registered)
*
+ * Return: Non-negative (TRUE/FALSE) on success
+ * Negative on failure
*-------------------------------------------------------------------------
*/
htri_t
H5Z_all_filters_avail(const H5O_pline_t *pline)
{
- size_t i,j; /* Local index variable */
- htri_t ret_value=TRUE; /* Return value */
+ size_t i, j; /* Local index variable */
+ htri_t ret_value = TRUE; /* Return value */
FUNC_ENTER_NOAPI(FAIL)
/* Check args */
- HDassert(pline);
+ HDassert (pline);
/* Iterate through all the filters in pipeline */
- for(i=0; i<pline->nused; i++) {
-
+ for (i = 0; i < pline->nused; i++) {
/* Look for each filter in the list of registered filters */
- for(j=0; j<H5Z_table_used_g; j++)
- if(H5Z_table_g[j].id==pline->filter[i].id)
+ for (j = 0; j < H5Z_table_used_g; j++)
+ if (H5Z_table_g[j].id == pline->filter[i].id)
break;
/* Check if we didn't find the filter */
- if(j==H5Z_table_used_g)
- HGOTO_DONE(FALSE)
+ if (j == H5Z_table_used_g)
+ HGOTO_DONE (FALSE)
} /* end for */
done:
@@ -1560,16 +1461,11 @@ done:
/*-------------------------------------------------------------------------
* Function: H5Z_delete
*
- * Purpose: Delete filter FILTER from pipeline PLINE;
- * deletes all filters if FILTER is H5Z_FILTER_NONE
- *
- * Return: Non-negative on success/Negative on failure
- *
- * Programmer: Pedro Vicente
- * Monday, January 26, 2004
- *
- * Modifications:
+ * Purpose: Delete filter FILTER from pipeline PLINE;
+ * deletes all filters if FILTER is H5Z_FILTER_NONE
*
+ * Return: Non-negative on success
+ * Negative on failure
*-------------------------------------------------------------------------
*/
herr_t
@@ -1580,52 +1476,52 @@ H5Z_delete(H5O_pline_t *pline, H5Z_filter_t filter)
FUNC_ENTER_NOAPI(FAIL)
/* Check args */
- HDassert(pline);
- HDassert(filter >= 0 && filter <= H5Z_FILTER_MAX);
+ HDassert (pline);
+ HDassert (filter >= 0 && filter <= H5Z_FILTER_MAX);
/* if the pipeline has no filters, just return */
- if(pline->nused==0)
- HGOTO_DONE(SUCCEED)
+ if (pline->nused == 0)
+ HGOTO_DONE (SUCCEED)
/* Delete all filters */
- if(H5Z_FILTER_ALL == filter) {
- if(H5O_msg_reset(H5O_PLINE_ID, pline) < 0)
+ if (H5Z_FILTER_ALL == filter) {
+ if (H5O_msg_reset(H5O_PLINE_ID, pline) < 0)
HGOTO_ERROR(H5E_PLINE, H5E_CANTFREE, FAIL, "can't release pipeline info")
} /* end if */
/* Delete filter */
else {
- size_t idx; /* Index of filter in pipeline */
- hbool_t found = FALSE; /* Indicate filter was found in pipeline */
+ size_t idx; /* Index of filter in pipeline */
+ hbool_t found = FALSE; /* Indicate filter was found in pipeline */
/* Locate the filter in the pipeline */
- for(idx = 0; idx < pline->nused; idx++)
- if(pline->filter[idx].id == filter) {
+ for (idx = 0; idx < pline->nused; idx++)
+ if (pline->filter[idx].id == filter) {
found = TRUE;
break;
} /* end if */
/* filter was not found in the pipeline */
- if(!found)
- HGOTO_ERROR(H5E_PLINE, H5E_NOTFOUND, FAIL, "filter not in pipeline")
+ if (!found)
+ HGOTO_ERROR (H5E_PLINE, H5E_NOTFOUND, FAIL, "filter not in pipeline")
/* Free information for deleted filter */
- if(pline->filter[idx].name && pline->filter[idx].name != pline->filter[idx]._name)
- HDassert((HDstrlen(pline->filter[idx].name) + 1) > H5Z_COMMON_NAME_LEN);
- if(pline->filter[idx].name != pline->filter[idx]._name)
+ if (pline->filter[idx].name && pline->filter[idx].name != pline->filter[idx]._name)
+ HDassert ((HDstrlen(pline->filter[idx].name) + 1) > H5Z_COMMON_NAME_LEN);
+ if (pline->filter[idx].name != pline->filter[idx]._name)
pline->filter[idx].name = (char *)H5MM_xfree(pline->filter[idx].name);
- if(pline->filter[idx].cd_values && pline->filter[idx].cd_values != pline->filter[idx]._cd_values)
- HDassert(pline->filter[idx].cd_nelmts > H5Z_COMMON_CD_VALUES);
- if(pline->filter[idx].cd_values != pline->filter[idx]._cd_values)
+ if (pline->filter[idx].cd_values && pline->filter[idx].cd_values != pline->filter[idx]._cd_values)
+ HDassert (pline->filter[idx].cd_nelmts > H5Z_COMMON_CD_VALUES);
+ if (pline->filter[idx].cd_values != pline->filter[idx]._cd_values)
pline->filter[idx].cd_values = (unsigned *)H5MM_xfree(pline->filter[idx].cd_values);
/* Remove filter from pipeline array */
- if((idx + 1) < pline->nused) {
+ if ((idx + 1) < pline->nused) {
/* Copy filters down & fix up any client data value arrays using internal storage */
- for(; (idx + 1) < pline->nused; idx++) {
+ for (; (idx + 1) < pline->nused; idx++) {
pline->filter[idx] = pline->filter[idx + 1];
- if(pline->filter[idx].name && (HDstrlen(pline->filter[idx].name) + 1) <= H5Z_COMMON_NAME_LEN)
+ if (pline->filter[idx].name && (HDstrlen(pline->filter[idx].name) + 1) <= H5Z_COMMON_NAME_LEN)
pline->filter[idx].name = pline->filter[idx]._name;
- if(pline->filter[idx].cd_nelmts <= H5Z_COMMON_CD_VALUES)
+ if (pline->filter[idx].cd_nelmts <= H5Z_COMMON_CD_VALUES)
pline->filter[idx].cd_values = pline->filter[idx]._cd_values;
} /* end for */
} /* end if */
@@ -1634,7 +1530,7 @@ H5Z_delete(H5O_pline_t *pline, H5Z_filter_t filter)
pline->nused--;
/* Reset information for previous last filter in pipeline */
- HDmemset(&pline->filter[pline->nused], 0, sizeof(H5Z_filter_info_t));
+ HDmemset (&pline->filter[pline->nused], 0, sizeof(H5Z_filter_info_t));
} /* end else */
done:
@@ -1645,14 +1541,11 @@ done:
/*-------------------------------------------------------------------------
* Function: H5Zget_filter_info
*
- * Purpose: Gets information about a pipeline data filter and stores it
- * in filter_config_flags.
- *
- * Return: zero on success / negative on failure
- *
- * Programmer: James Laird and Nat Furrer
- * Monday, June 7, 2004
+ * Purpose: Gets information about a pipeline data filter and stores it
+ * in filter_config_flags.
*
+ * Return: zero on success
+ * negative on failure
*-------------------------------------------------------------------------
*/
herr_t
@@ -1664,8 +1557,8 @@ H5Zget_filter_info(H5Z_filter_t filter, unsigned int *filter_config_flags)
H5TRACE2("e", "Zf*Iu", filter, filter_config_flags);
/* Get the filter info */
- if(H5Z_get_filter_info(filter, filter_config_flags) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_CANTGET, FAIL, "Filter info not retrieved")
+ if (H5Z_get_filter_info(filter, filter_config_flags) < 0)
+ HGOTO_ERROR (H5E_PLINE, H5E_CANTGET, FAIL, "Filter info not retrieved")
done:
FUNC_LEAVE_API(ret_value)
@@ -1675,35 +1568,32 @@ done:
/*-------------------------------------------------------------------------
* Function: H5Z_get_filter_info
*
- * Purpose: Gets information about a pipeline data filter and stores it
- * in filter_config_flags.
- *
- * Return: zero on success / negative on failure
- *
- * Programmer: Quincey Koziol
- * Saturday, May 11, 2013
+ * Purpose: Gets information about a pipeline data filter and stores it
+ * in filter_config_flags.
*
+ * Return: zero on success
+ * negative on failure
*-------------------------------------------------------------------------
*/
herr_t
H5Z_get_filter_info(H5Z_filter_t filter, unsigned int *filter_config_flags)
{
H5Z_class2_t *fclass;
- herr_t ret_value = SUCCEED;
+ herr_t ret_value = SUCCEED;
FUNC_ENTER_NOAPI(FAIL)
/* Look up the filter class info */
- if(NULL == (fclass = H5Z_find(filter)))
- HGOTO_ERROR(H5E_PLINE, H5E_BADVALUE, FAIL, "Filter not defined")
+ if (NULL == (fclass = H5Z_find(filter)))
+ HGOTO_ERROR (H5E_PLINE, H5E_BADVALUE, FAIL, "Filter not defined")
/* Set the filter config flags for the application */
- if(filter_config_flags != NULL) {
+ if (filter_config_flags != NULL) {
*filter_config_flags = 0;
- if(fclass->encoder_present)
+ if (fclass->encoder_present)
*filter_config_flags |= H5Z_FILTER_CONFIG_ENCODE_ENABLED;
- if(fclass->decoder_present)
+ if (fclass->decoder_present)
*filter_config_flags |= H5Z_FILTER_CONFIG_DECODE_ENABLED;
} /* end if */
diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt
index 612d13d..8d889e4 100644
--- a/test/CMakeLists.txt
+++ b/test/CMakeLists.txt
@@ -34,7 +34,7 @@ endif ()
if (MINGW)
target_link_libraries (${HDF5_TEST_LIB_TARGET} "wsock32.lib")
endif ()
-target_link_libraries (${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET})
+target_link_libraries (${HDF5_TEST_LIB_TARGET} ${HDF5_LIB_TARGET} ${LINK_LIBS})
H5_SET_LIB_OPTIONS (${HDF5_TEST_LIB_TARGET} ${HDF5_TEST_LIB_NAME} STATIC)
set_target_properties (${HDF5_TEST_LIB_TARGET} PROPERTIES
FOLDER libraries/test
@@ -50,7 +50,7 @@ if (BUILD_SHARED_LIBS)
if (MINGW)
target_link_libraries (${HDF5_TEST_LIBSH_TARGET} "wsock32.lib")
endif ()
- target_link_libraries (${HDF5_TEST_LIBSH_TARGET} ${HDF5_LIBSH_TARGET})
+ target_link_libraries (${HDF5_TEST_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} ${LINK_LIBS})
H5_SET_LIB_OPTIONS (${HDF5_TEST_LIBSH_TARGET} ${HDF5_TEST_LIB_NAME} SHARED ${HDF5_PACKAGE_SOVERSION})
set_target_properties (${HDF5_TEST_LIBSH_TARGET} PROPERTIES
FOLDER libraries/test
diff --git a/test/dsets.c b/test/dsets.c
index c936e3c..865948a 100644
--- a/test/dsets.c
+++ b/test/dsets.c
@@ -12,10 +12,10 @@
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
/*
- * Programmer: Robb Matzke <matzke@llnl.gov>
- * Tuesday, December 9, 1997
+ * Programmer: Robb Matzke <matzke@llnl.gov>
+ * Tuesday, December 9, 1997
*
- * Purpose: Tests the dataset interface (H5D)
+ * Purpose: Tests the dataset interface (H5D)
*/
#include <stdlib.h>
@@ -57,39 +57,39 @@ const char *FILENAME[] = {
#define FILE_DEFLATE_NAME "deflate.h5"
/* Dataset names for testing filters */
-#define DSET_DEFAULT_NAME "default"
-#define DSET_CHUNKED_NAME "chunked"
+#define DSET_DEFAULT_NAME "default"
+#define DSET_CHUNKED_NAME "chunked"
#define DSET_COMPACT_NAME "compact"
-#define DSET_SIMPLE_IO_NAME "simple_io"
-#define DSET_USERBLOCK_IO_NAME "userblock_io"
+#define DSET_SIMPLE_IO_NAME "simple_io"
+#define DSET_USERBLOCK_IO_NAME "userblock_io"
#define DSET_COMPACT_IO_NAME "compact_io"
#define DSET_COMPACT_MAX_NAME "max_compact"
#define DSET_COMPACT_MAX2_NAME "max_compact_2"
-#define DSET_CONV_BUF_NAME "conv_buf"
-#define DSET_TCONV_NAME "tconv"
-#define DSET_DEFLATE_NAME "deflate"
-#define DSET_SHUFFLE_NAME "shuffle"
-#define DSET_FLETCHER32_NAME "fletcher32"
-#define DSET_FLETCHER32_NAME_2 "fletcher32_2"
-#define DSET_FLETCHER32_NAME_3 "fletcher32_3"
-#define DSET_SHUF_DEF_FLET_NAME "shuffle+deflate+fletcher32"
-#define DSET_SHUF_DEF_FLET_NAME_2 "shuffle+deflate+fletcher32_2"
+#define DSET_CONV_BUF_NAME "conv_buf"
+#define DSET_TCONV_NAME "tconv"
+#define DSET_DEFLATE_NAME "deflate"
+#define DSET_SHUFFLE_NAME "shuffle"
+#define DSET_FLETCHER32_NAME "fletcher32"
+#define DSET_FLETCHER32_NAME_2 "fletcher32_2"
+#define DSET_FLETCHER32_NAME_3 "fletcher32_3"
+#define DSET_SHUF_DEF_FLET_NAME "shuffle+deflate+fletcher32"
+#define DSET_SHUF_DEF_FLET_NAME_2 "shuffle+deflate+fletcher32_2"
#ifdef H5_HAVE_FILTER_SZIP
#define DSET_SZIP_NAME "szip"
-#define DSET_SHUF_SZIP_FLET_NAME "shuffle+szip+fletcher32"
-#define DSET_SHUF_SZIP_FLET_NAME_2 "shuffle+szip+fletcher32_2"
+#define DSET_SHUF_SZIP_FLET_NAME "shuffle+szip+fletcher32"
+#define DSET_SHUF_SZIP_FLET_NAME_2 "shuffle+szip+fletcher32_2"
#endif /* H5_HAVE_FILTER_SZIP */
-#define DSET_BOGUS_NAME "bogus"
-#define DSET_MISSING_NAME "missing"
-#define DSET_CAN_APPLY_NAME "can_apply"
-#define DSET_CAN_APPLY_NAME2 "can_apply2"
+#define DSET_BOGUS_NAME "bogus"
+#define DSET_MISSING_NAME "missing"
+#define DSET_CAN_APPLY_NAME "can_apply"
+#define DSET_CAN_APPLY_NAME2 "can_apply2"
#ifdef H5_HAVE_FILTER_SZIP
-#define DSET_CAN_APPLY_SZIP_NAME "can_apply_szip"
+#define DSET_CAN_APPLY_SZIP_NAME "can_apply_szip"
#endif /* H5_HAVE_FILTER_SZIP */
-#define DSET_SET_LOCAL_NAME "set_local"
-#define DSET_SET_LOCAL_NAME_2 "set_local_2"
-#define DSET_ONEBYTE_SHUF_NAME "onebyte_shuffle"
+#define DSET_SET_LOCAL_NAME "set_local"
+#define DSET_SET_LOCAL_NAME_2 "set_local_2"
+#define DSET_ONEBYTE_SHUF_NAME "onebyte_shuffle"
#define DSET_NBIT_INT_NAME "nbit_int"
#define DSET_NBIT_FLOAT_NAME "nbit_float"
#define DSET_NBIT_DOUBLE_NAME "nbit_double"
@@ -105,27 +105,27 @@ const char *FILENAME[] = {
#define DSET_SCALEOFFSET_FLOAT_NAME_2 "scaleoffset_float_2"
#define DSET_SCALEOFFSET_DOUBLE_NAME "scaleoffset_double"
#define DSET_SCALEOFFSET_DOUBLE_NAME_2 "scaleoffset_double_2"
-#define DSET_COMPARE_DCPL_NAME "compare_dcpl"
-#define DSET_COMPARE_DCPL_NAME_2 "compare_dcpl_2"
-#define DSET_COPY_DCPL_NAME_1 "copy_dcpl_1"
-#define DSET_COPY_DCPL_NAME_2 "copy_dcpl_2"
+#define DSET_COMPARE_DCPL_NAME "compare_dcpl"
+#define DSET_COMPARE_DCPL_NAME_2 "compare_dcpl_2"
+#define DSET_COPY_DCPL_NAME_1 "copy_dcpl_1"
+#define DSET_COPY_DCPL_NAME_2 "copy_dcpl_2"
#define COPY_DCPL_EXTFILE_NAME "ext_file"
-#define DSET_DEPREC_NAME "deprecated"
-#define DSET_DEPREC_NAME_CHUNKED "deprecated_chunked"
-#define DSET_DEPREC_NAME_COMPACT "deprecated_compact"
+#define DSET_DEPREC_NAME "deprecated"
+#define DSET_DEPREC_NAME_CHUNKED "deprecated_chunked"
+#define DSET_DEPREC_NAME_COMPACT "deprecated_compact"
#define DSET_DEPREC_NAME_FILTER "deprecated_filter"
#define USER_BLOCK 1024
#define SIXTY_FOUR_KB 65536
/* Temporary filter IDs used for testing */
-#define H5Z_FILTER_BOGUS 305
-#define H5Z_FILTER_CORRUPT 306
-#define H5Z_FILTER_CAN_APPLY_TEST 307
-#define H5Z_FILTER_SET_LOCAL_TEST 308
+#define H5Z_FILTER_BOGUS 305
+#define H5Z_FILTER_CORRUPT 306
+#define H5Z_FILTER_CAN_APPLY_TEST 307
+#define H5Z_FILTER_SET_LOCAL_TEST 308
#define H5Z_FILTER_DEPREC 309
-#define H5Z_FILTER_EXPAND 310
-#define H5Z_FILTER_CAN_APPLY_TEST2 311
+#define H5Z_FILTER_EXPAND 310
+#define H5Z_FILTER_CAN_APPLY_TEST2 311
/* Flags for testing filters */
#define DISABLE_FLETCHER32 0
@@ -195,15 +195,15 @@ const char *FILENAME[] = {
#define BYPASS_FILL_VALUE 7
/* Declarations for test_idx_compatible() */
-#define FIXED_IDX_FILE "fixed_idx.h5"
+#define FIXED_IDX_FILE "fixed_idx.h5"
#define DSET "dset"
#define DSET_FILTER "dset_filter"
/* Shared global arrays */
#define DSET_DIM1 100
#define DSET_DIM2 200
-int points[DSET_DIM1][DSET_DIM2], check[DSET_DIM1][DSET_DIM2];
-double points_dbl[DSET_DIM1][DSET_DIM2], check_dbl[DSET_DIM1][DSET_DIM2];
+int points[DSET_DIM1][DSET_DIM2], check[DSET_DIM1][DSET_DIM2];
+double points_dbl[DSET_DIM1][DSET_DIM2], check_dbl[DSET_DIM1][DSET_DIM2];
/* Local prototypes for filter functions */
static size_t filter_bogus(unsigned int flags, size_t cd_nelmts,
@@ -221,26 +221,26 @@ static size_t filter_expand(unsigned int flags, size_t cd_nelmts,
/*-------------------------------------------------------------------------
- * Function: test_create
+ * Function: test_create
*
- * Purpose: Attempts to create a dataset.
+ * Purpose: Attempts to create a dataset.
*
- * Return: Success: 0
+ * Return: Success: 0
*
- * Failure: -1
+ * Failure: -1
*
- * Programmer: Robb Matzke
- * Tuesday, December 9, 1997
+ * Programmer: Robb Matzke
+ * Tuesday, December 9, 1997
*
*-------------------------------------------------------------------------
*/
static herr_t
test_create(hid_t file)
{
- hid_t dataset, space, small_space, create_parms;
- hsize_t dims[2], small_dims[2];
- herr_t status;
- hsize_t csize[2];
+ hid_t dataset, space, small_space, create_parms;
+ hsize_t dims[2], small_dims[2];
+ herr_t status;
+ hsize_t csize[2];
TESTING("create, open, close");
@@ -257,11 +257,11 @@ test_create(hid_t file)
assert(space>=0);
/*
- * Create a dataset using the default dataset creation properties. We're
+ * Create a dataset using the default dataset creation properties. We're
* not sure what they are, so we won't check.
*/
dataset = H5Dcreate2(file, DSET_DEFAULT_NAME, H5T_NATIVE_DOUBLE, space,
- H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
if(dataset < 0) goto error;
/* Close the dataset */
@@ -277,13 +277,13 @@ test_create(hid_t file)
* reporting.
*/
H5E_BEGIN_TRY {
- dataset = H5Dcreate2(file, DSET_DEFAULT_NAME, H5T_NATIVE_DOUBLE, space,
- H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ dataset = H5Dcreate2(file, DSET_DEFAULT_NAME, H5T_NATIVE_DOUBLE, space,
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
} H5E_END_TRY;
if(dataset >= 0) {
- H5_FAILED();
- puts(" Library allowed overwrite of existing dataset.");
- goto error;
+ H5_FAILED();
+ puts(" Library allowed overwrite of existing dataset.");
+ goto error;
}
/*
@@ -300,12 +300,12 @@ test_create(hid_t file)
* reporting.
*/
H5E_BEGIN_TRY {
- dataset = H5Dopen2(file, "does_not_exist", H5P_DEFAULT);
+ dataset = H5Dopen2(file, "does_not_exist", H5P_DEFAULT);
} H5E_END_TRY;
if(dataset >= 0) {
- H5_FAILED();
- puts(" Opened a non-existent dataset.");
- goto error;
+ H5_FAILED();
+ puts(" Opened a non-existent dataset.");
+ goto error;
}
/*
@@ -322,12 +322,12 @@ test_create(hid_t file)
assert(status >= 0);
H5E_BEGIN_TRY {
dataset = H5Dcreate2(file, DSET_CHUNKED_NAME, H5T_NATIVE_DOUBLE, space,
- H5P_DEFAULT, create_parms, H5P_DEFAULT);
+ H5P_DEFAULT, create_parms, H5P_DEFAULT);
} H5E_END_TRY;
if(dataset >= 0) {
- H5_FAILED();
- puts(" Opened a dataset with incorrect chunking parameters.");
- goto error;
+ H5_FAILED();
+ puts(" Opened a dataset with incorrect chunking parameters.");
+ goto error;
}
csize[0] = 5;
@@ -336,7 +336,7 @@ test_create(hid_t file)
assert(status >= 0);
dataset = H5Dcreate2(file, DSET_CHUNKED_NAME, H5T_NATIVE_DOUBLE, space,
- H5P_DEFAULT, create_parms, H5P_DEFAULT);
+ H5P_DEFAULT, create_parms, H5P_DEFAULT);
if(dataset < 0) goto error;
H5Pclose(create_parms);
@@ -373,18 +373,18 @@ test_create(hid_t file)
/*-------------------------------------------------------------------------
- * Function: test_simple_io
+ * Function: test_simple_io
*
- * Purpose: Tests simple I/O. That is, reading and writing a complete
- * multi-dimensional array without data type or data space
- * conversions, without compression, and stored contiguously.
+ * Purpose: Tests simple I/O. That is, reading and writing a complete
+ * multi-dimensional array without data type or data space
+ * conversions, without compression, and stored contiguously.
*
- * Return: Success: 0
+ * Return: Success: 0
*
- * Failure: -1
+ * Failure: -1
*
- * Programmer: Robb Matzke
- * Wednesday, December 10, 1997
+ * Programmer: Robb Matzke
+ * Wednesday, December 10, 1997
*
*-------------------------------------------------------------------------
*/
@@ -392,10 +392,10 @@ static herr_t
test_simple_io(const char *env_h5_drvr, hid_t fapl)
{
char filename[FILENAME_BUF_SIZE];
- hid_t file, dataset, space, xfer;
- int i, j, n;
- hsize_t dims[2];
- void *tconv_buf = NULL;
+ hid_t file, dataset, space, xfer;
+ int i, j, n;
+ hsize_t dims[2];
+ void *tconv_buf = NULL;
int f;
haddr_t offset;
int rdata[DSET_DIM1][DSET_DIM2];
@@ -483,8 +483,8 @@ test_simple_io(const char *env_h5_drvr, hid_t fapl)
PASSED();
} /* end if */
else {
- SKIPPED();
- puts(" Current VFD doesn't support continuous address space");
+ SKIPPED();
+ puts(" Current VFD doesn't support continuous address space");
} /* end else */
return 0;
@@ -495,16 +495,16 @@ error:
/*-------------------------------------------------------------------------
- * Function: test_userblock_offset
+ * Function: test_userblock_offset
*
- * Purpose: Tests H5Dget_offset when user block exists.
+ * Purpose: Tests H5Dget_offset when user block exists.
*
- * Return: Success: 0
+ * Return: Success: 0
*
- * Failure: -1
+ * Failure: -1
*
- * Programmer: Raymond Lu
- * Wednesday, November 27, 2002
+ * Programmer: Raymond Lu
+ * Wednesday, November 27, 2002
*
*-------------------------------------------------------------------------
*/
@@ -512,9 +512,9 @@ static herr_t
test_userblock_offset(const char *env_h5_drvr, hid_t fapl)
{
char filename[FILENAME_BUF_SIZE];
- hid_t file, fcpl, dataset, space;
- int i, j;
- hsize_t dims[2];
+ hid_t file, fcpl, dataset, space;
+ int i, j;
+ hsize_t dims[2];
int f;
haddr_t offset;
int rdata[DSET_DIM1][DSET_DIM2];
@@ -573,8 +573,8 @@ test_userblock_offset(const char *env_h5_drvr, hid_t fapl)
PASSED();
} /* end if */
else {
- SKIPPED();
- puts(" Current VFD doesn't support continuous address space");
+ SKIPPED();
+ puts(" Current VFD doesn't support continuous address space");
} /* end else */
return 0;
@@ -607,7 +607,7 @@ test_compact_io(hid_t fapl)
hid_t file, dataset, space, plist;
hsize_t dims[2];
int wbuf[16][8], rbuf[16][8];
- char filename[FILENAME_BUF_SIZE];
+ char filename[FILENAME_BUF_SIZE];
int i, j, n;
TESTING("compact dataset I/O");
@@ -709,7 +709,7 @@ test_max_compact(hid_t fapl)
size_t compact_size;
int *wbuf = NULL;
int *rbuf = NULL;
- char filename[FILENAME_BUF_SIZE];
+ char filename[FILENAME_BUF_SIZE];
int n;
size_t u;
@@ -846,11 +846,11 @@ error:
* Function: test_layout_extend
*
* Purpose: Verify that the creation of extendible dataset with dataspace:
- * cur_dims < max_dims (max_dims can be fixed size or H5S_UNLIMITED)
- * will behave as follows:
- * H5D_COMPACT layout: fail
- * H5D_CONTIGUOUS layout: fail
- * H5D_CHUNKED layout: succeed
+ * cur_dims < max_dims (max_dims can be fixed size or H5S_UNLIMITED)
+ * will behave as follows:
+ * H5D_COMPACT layout: fail
+ * H5D_CONTIGUOUS layout: fail
+ * H5D_CHUNKED layout: succeed
*
* Return: Success: 0
* Failure: -1
@@ -862,15 +862,15 @@ error:
static herr_t
test_layout_extend(hid_t fapl)
{
- char filename[FILENAME_BUF_SIZE]; /* File name */
- hid_t fid = -1; /* File id */
- hid_t sid_fix = -1, sid_unlim = -1; /* Dataspace id */
+ char filename[FILENAME_BUF_SIZE]; /* File name */
+ hid_t fid = -1; /* File id */
+ hid_t sid_fix = -1, sid_unlim = -1; /* Dataspace id */
hid_t dcpl_compact = -1, dcpl_contig = -1, dcpl_chunked = -1; /* Dataset creation property list id */
- hid_t did_fixed = -1, did_unlim = -1; /* Dataset id */
- hsize_t cur_size[1] = {10}; /* Current size of dataspace */
- hsize_t max_unlim[1] = {H5S_UNLIMITED}; /* Maximum size of dataspace (unlimited) */
- hsize_t max_fix[1] = {100}; /* Maximum size of dataspace (fixed) */
- hsize_t chunk_dim[1] = {10}; /* Chunk size */
+ hid_t did_fixed = -1, did_unlim = -1; /* Dataset id */
+ hsize_t cur_size[1] = {10}; /* Current size of dataspace */
+ hsize_t max_unlim[1] = {H5S_UNLIMITED}; /* Maximum size of dataspace (unlimited) */
+ hsize_t max_fix[1] = {100}; /* Maximum size of dataspace (fixed) */
+ hsize_t chunk_dim[1] = {10}; /* Chunk size */
TESTING("extendible dataset with various layout");
@@ -893,14 +893,14 @@ test_layout_extend(hid_t fapl)
/* Create dataset with extendible dataspace (fixed max_dims) should fail */
H5E_BEGIN_TRY {
- if(H5Dcreate2(fid, "compact", H5T_NATIVE_INT, sid_fix, H5P_DEFAULT, dcpl_compact, H5P_DEFAULT) != FAIL)
- TEST_ERROR
+ if(H5Dcreate2(fid, "compact", H5T_NATIVE_INT, sid_fix, H5P_DEFAULT, dcpl_compact, H5P_DEFAULT) != FAIL)
+ TEST_ERROR
} H5E_END_TRY;
/* Create dataset with extendible dataspace (unlimited max_dims) should fail */
H5E_BEGIN_TRY {
- if(H5Dcreate2(fid, "compact", H5T_NATIVE_INT, sid_unlim, H5P_DEFAULT, dcpl_compact, H5P_DEFAULT) != FAIL)
- TEST_ERROR
+ if(H5Dcreate2(fid, "compact", H5T_NATIVE_INT, sid_unlim, H5P_DEFAULT, dcpl_compact, H5P_DEFAULT) != FAIL)
+ TEST_ERROR
} H5E_END_TRY;
/* Create property list for contiguous dataset creation */
@@ -911,14 +911,14 @@ test_layout_extend(hid_t fapl)
/* Create dataset with extendible dataspace (fixed max_dims) should fail */
H5E_BEGIN_TRY {
- if(H5Dcreate2(fid, "contig", H5T_NATIVE_INT, sid_fix, H5P_DEFAULT, dcpl_contig, H5P_DEFAULT) != FAIL)
- TEST_ERROR
+ if(H5Dcreate2(fid, "contig", H5T_NATIVE_INT, sid_fix, H5P_DEFAULT, dcpl_contig, H5P_DEFAULT) != FAIL)
+ TEST_ERROR
} H5E_END_TRY;
/* Create dataset with extendible dataspace (unlimited max_dims) should fail*/
H5E_BEGIN_TRY {
- if(H5Dcreate2(fid, "contig", H5T_NATIVE_INT, sid_unlim, H5P_DEFAULT, dcpl_contig, H5P_DEFAULT) != FAIL)
- TEST_ERROR
+ if(H5Dcreate2(fid, "contig", H5T_NATIVE_INT, sid_unlim, H5P_DEFAULT, dcpl_contig, H5P_DEFAULT) != FAIL)
+ TEST_ERROR
} H5E_END_TRY;
/* Create property list for chunked dataset creation */
@@ -930,11 +930,11 @@ test_layout_extend(hid_t fapl)
/* Create dataset with extendible dataspace (fixed max_dims) should succeed */
if((did_fixed = H5Dcreate2(fid, "chunked_fixed", H5T_NATIVE_INT, sid_fix, H5P_DEFAULT, dcpl_chunked, H5P_DEFAULT)) < 0)
- FAIL_STACK_ERROR
+ FAIL_STACK_ERROR
/* Create dataset with extendible dataspace (unlimited max_dims) should succeed */
if((did_unlim = H5Dcreate2(fid, "chunked_unlim", H5T_NATIVE_INT, sid_unlim, H5P_DEFAULT, dcpl_chunked, H5P_DEFAULT)) < 0)
- FAIL_STACK_ERROR
+ FAIL_STACK_ERROR
/* Closing */
if(H5Sclose(sid_fix) < 0) FAIL_STACK_ERROR
@@ -969,16 +969,16 @@ error:
/*-------------------------------------------------------------------------
- * Function: test_conv_buffer
+ * Function: test_conv_buffer
*
- * Purpose: Test size of data type conversion buffer.
+ * Purpose: Test size of data type conversion buffer.
*
- * Return: Success: 0
+ * Return: Success: 0
*
- * Failure: -1
+ * Failure: -1
*
- * Programmer: Raymond Lu
- * Monday, May 12, 2003
+ * Programmer: Raymond Lu
+ * Monday, May 12, 2003
*
*-------------------------------------------------------------------------
*/
@@ -1020,15 +1020,15 @@ test_conv_buffer(hid_t fid)
/* Populate the data members */
for(j = 0; j < DIM1; j++)
- for(k = 0; k < DIM2; k++)
- for(l = 0; l < DIM3; l++)
- cf->a[j][k][l] = 10*(j+1) + l + k;
+ for(k = 0; k < DIM2; k++)
+ for(l = 0; l < DIM3; l++)
+ cf->a[j][k][l] = 10*(j+1) + l + k;
for(j = 0; j < DIM2; j++)
- cf->b[j] = (float)(100.0f*(j+1) + 0.01f*j);
+ cf->b[j] = (float)(100.0f*(j+1) + 0.01f*j);
for(j = 0; j < DIM3; j++)
- cf->c[j] = 100.0f*(j+1) + 0.02f*j;
+ cf->c[j] = 100.0f*(j+1) + 0.02f*j;
/* Create data space */
@@ -1113,26 +1113,26 @@ error:
/*-------------------------------------------------------------------------
- * Function: test_tconv
+ * Function: test_tconv
*
- * Purpose: Test some simple data type conversion stuff.
+ * Purpose: Test some simple data type conversion stuff.
*
- * Return: Success: 0
+ * Return: Success: 0
*
- * Failure: -1
+ * Failure: -1
*
- * Programmer: Robb Matzke
- * Wednesday, January 14, 1998
+ * Programmer: Robb Matzke
+ * Wednesday, January 14, 1998
*
*-------------------------------------------------------------------------
*/
static herr_t
test_tconv(hid_t file)
{
- char *out = NULL, *in = NULL;
- hsize_t dims[1];
- hid_t space = -1, dataset = -1;
- int i;
+ char *out = NULL, *in = NULL;
+ hsize_t dims[1];
+ hid_t space = -1, dataset = -1;
+ int i;
if ((out = (char *)HDmalloc((size_t)(4 * 1000 * 1000))) == NULL)
goto error;
@@ -1155,7 +1155,7 @@ test_tconv(hid_t file)
/* Create the data set */
if((dataset = H5Dcreate2(file, DSET_TCONV_NAME, H5T_STD_I32LE, space,
- H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
goto error;
/* Write the data to the dataset */
@@ -1203,25 +1203,25 @@ error:
/* This message derives from H5Z */
const H5Z_class2_t H5Z_BOGUS[1] = {{
H5Z_CLASS_T_VERS, /* H5Z_class_t version */
- H5Z_FILTER_BOGUS, /* Filter id number */
+ H5Z_FILTER_BOGUS, /* Filter id number */
1, 1, /* Encoding and decoding enabled */
- "bogus", /* Filter name for debugging */
+ "bogus", /* Filter name for debugging */
NULL, /* The "can apply" callback */
NULL, /* The "set local" callback */
- filter_bogus, /* The actual filter function */
+ filter_bogus, /* The actual filter function */
}};
/*-------------------------------------------------------------------------
- * Function: can_apply_bogus
+ * Function: can_apply_bogus
*
- * Purpose: A bogus 'can apply' callback that returns 0 for H5T_NATIVE_DOUBLE
+ * Purpose: A bogus 'can apply' callback that returns 0 for H5T_NATIVE_DOUBLE
* dataype, but returns 1 for all other datatypes
*
- * Return: Success: Described above
- * Failure: 0
+ * Return: Success: Described above
+ * Failure: 0
*
- * Programmer: Quincey Koziol
+ * Programmer: Quincey Koziol
* Friday, April 5, 2003
*
*-------------------------------------------------------------------------
@@ -1239,15 +1239,15 @@ can_apply_bogus(hid_t H5_ATTR_UNUSED dcpl_id, hid_t type_id, hid_t H5_ATTR_UNUSE
/*-------------------------------------------------------------------------
- * Function: filter_bogus
+ * Function: filter_bogus
*
- * Purpose: A bogus compression method that doesn't do anything.
+ * Purpose: A bogus compression method that doesn't do anything.
*
- * Return: Success: Data chunk size
+ * Return: Success: Data chunk size
*
- * Failure: 0
+ * Failure: 0
*
- * Programmer: Robb Matzke
+ * Programmer: Robb Matzke
* Tuesday, April 21, 1998
*
*-------------------------------------------------------------------------
@@ -1262,16 +1262,16 @@ filter_bogus(unsigned int H5_ATTR_UNUSED flags, size_t H5_ATTR_UNUSED cd_nelmts,
/*-------------------------------------------------------------------------
- * Function: set_local_bogus2
+ * Function: set_local_bogus2
*
- * Purpose: A 'set local' callback that stores the size of the datatype
+ * Purpose: A 'set local' callback that stores the size of the datatype
* and adds it to all the H5T_NATIVE_INT values during
* filter operation.
*
- * Return: Success: non-negative
- * Failure: negative
+ * Return: Success: non-negative
+ * Failure: negative
*
- * Programmer: Quincey Koziol
+ * Programmer: Quincey Koziol
* Friday, April 5, 2003
*
*-------------------------------------------------------------------------
@@ -1312,17 +1312,17 @@ set_local_bogus2(hid_t dcpl_id, hid_t type_id, hid_t H5_ATTR_UNUSED space_id)
/*-------------------------------------------------------------------------
- * Function: filter_bogus2
+ * Function: filter_bogus2
*
- * Purpose: A filter method that adds a value to data values on writing
+ * Purpose: A filter method that adds a value to data values on writing
* (if the parameter is set), but does not modify data values on
* reading (so that correct operation of the filter can be
* checked).
*
- * Return: Success: Data chunk size
- * Failure: 0
+ * Return: Success: Data chunk size
+ * Failure: 0
*
- * Programmer: Quincey Koziol
+ * Programmer: Quincey Koziol
* Monday, April 7, 2003
*
*-------------------------------------------------------------------------
@@ -1370,15 +1370,15 @@ filter_bogus2(unsigned int flags, size_t cd_nelmts,
/*-------------------------------------------------------------------------
- * Function: filter_bogus3
+ * Function: filter_bogus3
*
- * Purpose: A bogus compression method that returns a failure.
+ * Purpose: A bogus compression method that returns a failure.
*
- * Return: Success: Data chunk size
+ * Return: Success: Data chunk size
*
- * Failure: 0
+ * Failure: 0
*
- * Programmer: Raymond Lu
+ * Programmer: Raymond Lu
* 4 August 2010
*
*-------------------------------------------------------------------------
@@ -1394,12 +1394,12 @@ filter_bogus3(unsigned int H5_ATTR_UNUSED flags, size_t H5_ATTR_UNUSED cd_nelmts
/* This message derives from H5Z */
const H5Z_class2_t H5Z_CORRUPT[1] = {{
H5Z_CLASS_T_VERS, /* H5Z_class_t version */
- H5Z_FILTER_CORRUPT, /* Filter id number */
+ H5Z_FILTER_CORRUPT, /* Filter id number */
1, 1, /* Encoding and decoding enabled */
- "corrupt", /* Filter name for debugging */
+ "corrupt", /* Filter name for debugging */
NULL, /* The "can apply" callback */
NULL, /* The "set local" callback */
- filter_corrupt, /* The actual filter function */
+ filter_corrupt, /* The actual filter function */
}};
@@ -1410,11 +1410,11 @@ const H5Z_class2_t H5Z_CORRUPT[1] = {{
* writing so that when data is read back, the checksum should
* fail.
*
- * Return: Success: Data chunk size
+ * Return: Success: Data chunk size
*
- * Failure: 0
+ * Failure: 0
*
- * Programmer: Raymond Lu
+ * Programmer: Raymond Lu
* Jan 14, 2003
*
*-------------------------------------------------------------------------
@@ -1439,7 +1439,7 @@ filter_corrupt(unsigned int flags, size_t cd_nelmts,
if(offset > nbytes || (offset + length) > nbytes || length < sizeof(unsigned int))
TEST_ERROR
- if(NULL == (data = HDmalloc((size_t)length)))
+ if(NULL == (data = HDmalloc((size_t)length)))
TEST_ERROR
HDmemset(data, (int)value, (size_t)length);
@@ -1474,7 +1474,7 @@ error:
*
* Return: continue
*
- * Programmer: Raymond Lu
+ * Programmer: Raymond Lu
* Jan 14, 2003
*
*-------------------------------------------------------------------------
@@ -1497,7 +1497,7 @@ filter_cb_cont(H5Z_filter_t filter, void H5_ATTR_UNUSED *buf, size_t H5_ATTR_UNU
*
* Return: fail
*
- * Programmer: Raymond Lu
+ * Programmer: Raymond Lu
* Jan 14, 2003
*
*-------------------------------------------------------------------------
@@ -1514,17 +1514,17 @@ filter_cb_fail(H5Z_filter_t filter, void H5_ATTR_UNUSED *buf, size_t H5_ATTR_UNU
/*-------------------------------------------------------------------------
- * Function: test_filter_internal
+ * Function: test_filter_internal
*
- * Purpose: Tests dataset compression. If compression is requested when
- * it hasn't been compiled into the library (such as when
- * updating an existing compressed dataset) then data is sent to
- * the file uncompressed but no errors are returned.
+ * Purpose: Tests dataset compression. If compression is requested when
+ * it hasn't been compiled into the library (such as when
+ * updating an existing compressed dataset) then data is sent to
+ * the file uncompressed but no errors are returned.
*
- * Return: Success: 0
- * Failure: -1
+ * Return: Success: 0
+ * Failure: -1
*
- * Programmer: Robb Matzke
+ * Programmer: Robb Matzke
* Wednesday, April 15, 1998
*
*-------------------------------------------------------------------------
@@ -1533,15 +1533,15 @@ static herr_t
test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
int corrupted, hsize_t *dset_size)
{
- hid_t dataset; /* Dataset ID */
- hid_t dxpl; /* Dataset xfer property list ID */
- hid_t write_dxpl; /* Dataset xfer property list ID for writing */
- hid_t sid; /* Dataspace ID */
- const hsize_t size[2] = {DSET_DIM1, DSET_DIM2}; /* Dataspace dimensions */
- const hsize_t hs_offset[2] = {FILTER_HS_OFFSET1, FILTER_HS_OFFSET2}; /* Hyperslab offset */
- const hsize_t hs_size[2] = {FILTER_HS_SIZE1, FILTER_HS_SIZE2}; /* Hyperslab size */
- void *tconv_buf = NULL; /* Temporary conversion buffer */
- size_t i, j, n; /* Local index variables */
+ hid_t dataset; /* Dataset ID */
+ hid_t dxpl; /* Dataset xfer property list ID */
+ hid_t write_dxpl; /* Dataset xfer property list ID for writing */
+ hid_t sid; /* Dataspace ID */
+ const hsize_t size[2] = {DSET_DIM1, DSET_DIM2}; /* Dataspace dimensions */
+ const hsize_t hs_offset[2] = {FILTER_HS_OFFSET1, FILTER_HS_OFFSET2}; /* Hyperslab offset */
+ const hsize_t hs_size[2] = {FILTER_HS_SIZE1, FILTER_HS_SIZE2}; /* Hyperslab size */
+ void *tconv_buf = NULL; /* Temporary conversion buffer */
+ size_t i, j, n; /* Local index variables */
herr_t status; /* Error status */
/* Create the data space */
@@ -1574,7 +1574,7 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
/* Create the dataset */
if((dataset = H5Dcreate2(fid, name, H5T_NATIVE_INT, sid, H5P_DEFAULT,
- dcpl, H5P_DEFAULT)) < 0) goto error;
+ dcpl, H5P_DEFAULT)) < 0) goto error;
PASSED();
@@ -1585,18 +1585,18 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
TESTING(" filters (uninitialized read)");
if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
- TEST_ERROR;
+ TEST_ERROR;
for(i=0; i<(size_t)size[0]; i++) {
- for(j=0; j<(size_t)size[1]; j++) {
- if(0!=check[i][j]) {
- H5_FAILED();
- printf(" Read a non-zero value.\n");
- printf(" At index %lu,%lu\n",
- (unsigned long)i, (unsigned long)j);
- goto error;
- }
- }
+ for(j=0; j<(size_t)size[1]; j++) {
+ if(0!=check[i][j]) {
+ H5_FAILED();
+ printf(" Read a non-zero value.\n");
+ printf(" At index %lu,%lu\n",
+ (unsigned long)i, (unsigned long)j);
+ goto error;
+ }
+ }
}
PASSED();
@@ -1608,13 +1608,13 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
TESTING(" filters (write)");
for(i=n=0; i<size[0]; i++) {
- for(j=0; j<size[1]; j++) {
- points[i][j] = (int)(n++);
- }
+ for(j=0; j<size[1]; j++) {
+ points[i][j] = (int)(n++);
+ }
}
if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, points) < 0)
- TEST_ERROR;
+ TEST_ERROR;
if((*dset_size=H5Dget_storage_size(dataset))==0) TEST_ERROR;
@@ -1649,20 +1649,20 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
if(status>=0) TEST_ERROR;
} else {
if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
- TEST_ERROR;
+ TEST_ERROR;
/* Check that the values read are the same as the values written */
for(i=0; i<size[0]; i++) {
- for(j=0; j<size[1]; j++) {
- if(points[i][j] != check[i][j]) {
- H5_FAILED();
- fprintf(stderr," Read different values than written.\n");
- fprintf(stderr," At index %lu,%lu\n", (unsigned long)i, (unsigned long)j);
- fprintf(stderr," At original: %d\n", (int)points[i][j]);
- fprintf(stderr," At returned: %d\n", (int)check[i][j]);
- goto error;
- }
- }
+ for(j=0; j<size[1]; j++) {
+ if(points[i][j] != check[i][j]) {
+ H5_FAILED();
+ fprintf(stderr," Read different values than written.\n");
+ fprintf(stderr," At index %lu,%lu\n", (unsigned long)i, (unsigned long)j);
+ fprintf(stderr," At original: %d\n", (int)points[i][j]);
+ fprintf(stderr," At returned: %d\n", (int)check[i][j]);
+ goto error;
+ }
+ }
}
}
@@ -1678,12 +1678,12 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
TESTING(" filters (modify)");
for(i=0; i<size[0]; i++) {
- for(j=0; j<size[1]/2; j++) {
- points[i][j] = (int)HDrandom ();
- }
+ for(j=0; j<size[1]/2; j++) {
+ points[i][j] = (int)HDrandom ();
+ }
}
if(H5Dwrite (dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, write_dxpl, points) < 0)
- TEST_ERROR;
+ TEST_ERROR;
if(corrupted) {
/* Default behavior is failure when data is corrupted. */
@@ -1708,19 +1708,19 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
} else {
/* Read the dataset back and check it */
if(H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, dxpl, check) < 0)
- TEST_ERROR;
+ TEST_ERROR;
/* Check that the values read are the same as the values written */
for(i=0; i<size[0]; i++) {
- for(j=0; j<size[1]; j++) {
- if(points[i][j] != check[i][j]) {
- H5_FAILED();
- printf(" Read different values than written.\n");
- printf(" At index %lu,%lu\n",
- (unsigned long)i, (unsigned long)j);
- goto error;
- }
- }
+ for(j=0; j<size[1]; j++) {
+ if(points[i][j] != check[i][j]) {
+ H5_FAILED();
+ printf(" Read different values than written.\n");
+ printf(" At index %lu,%lu\n",
+ (unsigned long)i, (unsigned long)j);
+ goto error;
+ }
+ }
}
}
@@ -1766,14 +1766,14 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
/* Check that the values read are the same as the values written */
for(i = 0; i < size[0]; i++)
- for(j = 0; j < size[1]; j++)
- if(points[i][j] != check[i][j]) {
- H5_FAILED();
- printf(" Read different values than written.\n");
- printf(" At index %lu,%lu\n",
- (unsigned long)i, (unsigned long)j);
- goto error;
- } /* end if */
+ for(j = 0; j < size[1]; j++)
+ if(points[i][j] != check[i][j]) {
+ H5_FAILED();
+ printf(" Read different values than written.\n");
+ printf(" At index %lu,%lu\n",
+ (unsigned long)i, (unsigned long)j);
+ goto error;
+ } /* end if */
} /* end else */
PASSED();
@@ -1788,15 +1788,15 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
TESTING(" filters (partial I/O)");
for(i=0; i<(size_t)hs_size[0]; i++) {
- for(j=0; j<(size_t)hs_size[1]; j++) {
- points[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j] = (int)HDrandom();
- }
+ for(j=0; j<(size_t)hs_size[1]; j++) {
+ points[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j] = (int)HDrandom();
+ }
}
if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, hs_offset, NULL, hs_size,
- NULL) < 0) TEST_ERROR;
+ NULL) < 0) TEST_ERROR;
/* (Use the "read" DXPL because partial I/O on corrupted data test needs to ignore errors during writing) */
if(H5Dwrite (dataset, H5T_NATIVE_INT, sid, sid, dxpl, points) < 0)
- TEST_ERROR;
+ TEST_ERROR;
if(corrupted) {
/* Default behavior is failure when data is corrupted. */
@@ -1820,25 +1820,25 @@ test_filter_internal(hid_t fid, const char *name, hid_t dcpl, int if_fletcher32,
if(status>=0) TEST_ERROR;
} else {
if(H5Dread (dataset, H5T_NATIVE_INT, sid, sid, dxpl, check) < 0)
- TEST_ERROR;
+ TEST_ERROR;
/* Check that the values read are the same as the values written */
for(i=0; i<(size_t)hs_size[0]; i++) {
- for(j=0; j<(size_t)hs_size[1]; j++) {
- if(points[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j] !=
+ for(j=0; j<(size_t)hs_size[1]; j++) {
+ if(points[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j] !=
check[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j]) {
- H5_FAILED();
- fprintf(stderr," Read different values than written.\n");
- fprintf(stderr," At index %lu,%lu\n",
- (unsigned long)((size_t)hs_offset[0]+i),
- (unsigned long)((size_t)hs_offset[1]+j));
- fprintf(stderr," At original: %d\n",
- (int)points[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j]);
- fprintf(stderr," At returned: %d\n",
- (int)check[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j]);
- goto error;
- }
- }
+ H5_FAILED();
+ fprintf(stderr," Read different values than written.\n");
+ fprintf(stderr," At index %lu,%lu\n",
+ (unsigned long)((size_t)hs_offset[0]+i),
+ (unsigned long)((size_t)hs_offset[1]+j));
+ fprintf(stderr," At original: %d\n",
+ (int)points[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j]);
+ fprintf(stderr," At returned: %d\n",
+ (int)check[(size_t)hs_offset[0]+i][(size_t)hs_offset[1]+j]);
+ goto error;
+ }
+ }
}
}
@@ -1861,16 +1861,16 @@ error:
}
/*-------------------------------------------------------------------------
- * Function: test_filter_noencoder
+ * Function: test_filter_noencoder
*
- * Purpose: Tests filters with no encoder present. Ensures that data
- * can still be decoded correctly and that errors are thrown
- * when the application tries to write.
+ * Purpose: Tests filters with no encoder present. Ensures that data
+ * can still be decoded correctly and that errors are thrown
+ * when the application tries to write.
*
- * Return: Success: 0
- * Failure: -1
+ * Return: Success: 0
+ * Failure: -1
*
- * Programmer: Nat Furrer and James Laird
+ * Programmer: Nat Furrer and James Laird
* Monday, June 7, 2004
*
*-------------------------------------------------------------------------
@@ -2030,7 +2030,7 @@ test_get_filter_info(void)
#ifdef H5_HAVE_FILTER_SZIP
if(H5Zget_filter_info(H5Z_FILTER_SZIP, &flags) < 0) TEST_ERROR
- if(SZ_encoder_enabled()) {
+ if(H5Z_SZIP->encoder_present) {
if(((flags & H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
((flags & H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0))
TEST_ERROR
@@ -2057,14 +2057,14 @@ error:
}
/*-------------------------------------------------------------------------
- * Function: test_filters
+ * Function: test_filters
*
- * Purpose: Tests dataset filter.
+ * Purpose: Tests dataset filter.
*
- * Return: Success: 0
- * Failure: -1
+ * Return: Success: 0
+ * Failure: -1
*
- * Programmer: Robb Matzke
+ * Programmer: Robb Matzke
* Wednesday, April 15, 1998
*
*-------------------------------------------------------------------------
@@ -2076,7 +2076,7 @@ H5_ATTR_UNUSED
#endif /* H5_HAVE_FILTER_SZIP */
fapl)
{
- hid_t dc; /* Dataset creation property list ID */
+ hid_t dc; /* Dataset creation property list ID */
const hsize_t chunk_size[2] = {FILTER_CHUNK_DIM1, FILTER_CHUNK_DIM2}; /* Chunk dimensions */
hsize_t null_size; /* Size of dataset with null filter */
@@ -2191,21 +2191,21 @@ H5_ATTR_UNUSED
if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
- puts("");
- if(H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block) < 0) goto error;
- if(test_filter_internal(file,DSET_SZIP_NAME,dc,DISABLE_FLETCHER32,DATA_NOT_CORRUPTED,&szip_size) < 0) goto error;
+ puts("");
+ if(H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block) < 0) goto error;
+ if(test_filter_internal(file,DSET_SZIP_NAME,dc,DISABLE_FLETCHER32,DATA_NOT_CORRUPTED,&szip_size) < 0) goto error;
if(H5Pclose (dc) < 0) goto error;
} else {
- SKIPPED();
+ SKIPPED();
}
TESTING("szip filter (without encoder)");
if( h5_szip_can_encode() != 1) {
- puts("");
- if(test_filter_noencoder(NOENCODER_SZIP_DATASET) < 0) goto error;
+ puts("");
+ if(test_filter_noencoder(NOENCODER_SZIP_DATASET) < 0) goto error;
} else {
- SKIPPED();
+ SKIPPED();
}
#else /* H5_HAVE_FILTER_SZIP */
@@ -2279,22 +2279,22 @@ H5_ATTR_UNUSED
if(H5Pset_fletcher32 (dc) < 0) goto error;
if(H5Pset_shuffle (dc) < 0) goto error;
- /* Make sure encoding is enabled */
+ /* Make sure encoding is enabled */
if( h5_szip_can_encode() == 1) {
- puts("");
- if(H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block) < 0) goto error;
- if(test_filter_internal(file,DSET_SHUF_SZIP_FLET_NAME,dc,ENABLE_FLETCHER32,DATA_NOT_CORRUPTED,&combo_size) < 0) goto error;
+ puts("");
+ if(H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block) < 0) goto error;
+ if(test_filter_internal(file,DSET_SHUF_SZIP_FLET_NAME,dc,ENABLE_FLETCHER32,DATA_NOT_CORRUPTED,&combo_size) < 0) goto error;
} else {
- SKIPPED();
+ SKIPPED();
}
TESTING("shuffle+szip+checksum filters(checksum first, without encoder)");
if( h5_szip_can_encode() != 1) {
- puts("");
- if(test_filter_noencoder(NOENCODER_SZIP_SHUFF_FLETCH_DATASET) < 0) goto error;
+ puts("");
+ if(test_filter_noencoder(NOENCODER_SZIP_SHUFF_FLETCH_DATASET) < 0) goto error;
} else {
- SKIPPED();
+ SKIPPED();
}
/* Clean up objects used for this test */
@@ -2304,20 +2304,20 @@ H5_ATTR_UNUSED
/* Make sure encoding is enabled */
if( h5_szip_can_encode() == 1) {
- puts("");
- if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
- if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
- if(H5Pset_shuffle (dc) < 0) goto error;
- if(H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block) < 0) goto error;
- if(H5Pset_fletcher32 (dc) < 0) goto error;
+ puts("");
+ if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
+ if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
+ if(H5Pset_shuffle (dc) < 0) goto error;
+ if(H5Pset_szip(dc, szip_options_mask, szip_pixels_per_block) < 0) goto error;
+ if(H5Pset_fletcher32 (dc) < 0) goto error;
- if(test_filter_internal(file,DSET_SHUF_SZIP_FLET_NAME_2,dc,ENABLE_FLETCHER32,DATA_NOT_CORRUPTED,&combo_size) < 0) goto error;
+ if(test_filter_internal(file,DSET_SHUF_SZIP_FLET_NAME_2,dc,ENABLE_FLETCHER32,DATA_NOT_CORRUPTED,&combo_size) < 0) goto error;
- /* Clean up objects used for this test */
- if(H5Pclose (dc) < 0) goto error;
+ /* Clean up objects used for this test */
+ if(H5Pclose (dc) < 0) goto error;
} else {
- SKIPPED();
+ SKIPPED();
}
#else /* H5_HAVE_FILTER_SZIP */
@@ -2333,14 +2333,14 @@ error:
/*-------------------------------------------------------------------------
- * Function: test_missing_filter
+ * Function: test_missing_filter
*
- * Purpose: Tests library behavior when filter is missing
+ * Purpose: Tests library behavior when filter is missing
*
- * Return: Success: 0
- * Failure: -1
+ * Return: Success: 0
+ * Failure: -1
*
- * Programmer: Quincey Koziol
+ * Programmer: Quincey Koziol
* Thursday, November 14, 2002
*
*-------------------------------------------------------------------------
@@ -2468,16 +2468,16 @@ test_missing_filter(hid_t file)
/* Compare data */
/* Check that the values read are the same as the values written */
for(i=0; i<(size_t)dims[0]; i++) {
- for(j=0; j<(size_t)dims[1]; j++) {
- if(points[i][j] != check[i][j]) {
- H5_FAILED();
- printf(" Line %d: Read different values than written.\n",__LINE__);
- printf(" At index %lu,%lu\n", (unsigned long)(i), (unsigned long)(j));
- printf(" At original: %d\n",points[i][j]);
- printf(" At returned: %d\n",check[i][j]);
- goto error;
- } /* end if */
- } /* end for */
+ for(j=0; j<(size_t)dims[1]; j++) {
+ if(points[i][j] != check[i][j]) {
+ H5_FAILED();
+ printf(" Line %d: Read different values than written.\n",__LINE__);
+ printf(" At index %lu,%lu\n", (unsigned long)(i), (unsigned long)(j));
+ printf(" At original: %d\n",points[i][j]);
+ printf(" At returned: %d\n",check[i][j]);
+ goto error;
+ } /* end if */
+ } /* end for */
} /* end for */
/* Close dataset */
@@ -2506,8 +2506,8 @@ test_missing_filter(hid_t file)
/* Compose the name of the file to open, using the srcdir, if appropriate */
if(srcdir && ((HDstrlen(srcdir) + HDstrlen(FILE_DEFLATE_NAME) + 1) < sizeof(testfile))){
- HDstrcpy(testfile, srcdir);
- HDstrcat(testfile, "/");
+ HDstrcpy(testfile, srcdir);
+ HDstrcat(testfile, "/");
}
HDstrcat(testfile, FILE_DEFLATE_NAME);
@@ -2581,17 +2581,17 @@ error:
/*-------------------------------------------------------------------------
- * Function: test_onebyte_shuffle
+ * Function: test_onebyte_shuffle
*
- * Purpose: Tests the 8-bit array with shuffling algorithm.
+ * Purpose: Tests the 8-bit array with shuffling algorithm.
* The shuffled array should be the same result as
* that before the shuffling.
*
- * Return: Success: 0
+ * Return: Success: 0
*
- * Failure: -1
+ * Failure: -1
*
- * Programmer: Kent Yang
+ * Programmer: Kent Yang
* Wednesday, Nov. 13th, 2002
*
*-------------------------------------------------------------------------
@@ -2599,12 +2599,12 @@ error:
static herr_t
test_onebyte_shuffle(hid_t file)
{
- hid_t dataset, space,dc;
- const hsize_t size[2] = {10, 20};
+ hid_t dataset, space,dc;
+ const hsize_t size[2] = {10, 20};
const hsize_t chunk_size[2] = {10, 20};
unsigned char orig_data[10][20];
unsigned char new_data[10][20];
- size_t i, j;
+ size_t i, j;
TESTING("8-bit shuffling (setup)");
@@ -2618,11 +2618,11 @@ test_onebyte_shuffle(hid_t file)
/* Create the dataset */
if((dataset = H5Dcreate2(file, DSET_ONEBYTE_SHUF_NAME, H5T_NATIVE_UCHAR,
- space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
+ space, H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
for(i= 0;i< 10; i++)
for(j = 0; j < 20; j++)
- orig_data[i][j] = (unsigned char)HDrandom();
+ orig_data[i][j] = (unsigned char)HDrandom();
PASSED();
@@ -2634,8 +2634,8 @@ test_onebyte_shuffle(hid_t file)
TESTING("8-bit shuffling (write)");
if(H5Dwrite(dataset, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- orig_data) < 0)
- goto error;
+ orig_data) < 0)
+ goto error;
PASSED();
@@ -2647,20 +2647,20 @@ test_onebyte_shuffle(hid_t file)
/* Read the dataset back */
if(H5Dread(dataset, H5T_NATIVE_UCHAR, H5S_ALL, H5S_ALL, H5P_DEFAULT,
- new_data) < 0)
- goto error;
+ new_data) < 0)
+ goto error;
/* Check that the values read are the same as the values written */
for(i=0; i<(size_t)size[0]; i++) {
- for(j=0; j<(size_t)size[1]; j++) {
- if(new_data[i][j] != orig_data[i][j]) {
- H5_FAILED();
- printf(" Read different values than written.\n");
- printf(" At index %lu,%lu\n",
- (unsigned long)i, (unsigned long)j);
- goto error;
- }
- }
+ for(j=0; j<(size_t)size[1]; j++) {
+ if(new_data[i][j] != orig_data[i][j]) {
+ H5_FAILED();
+ printf(" Read different values than written.\n");
+ printf(" At index %lu,%lu\n",
+ (unsigned long)i, (unsigned long)j);
+ goto error;
+ }
+ }
}
/*----------------------------------------------------------------------
@@ -2944,12 +2944,12 @@ test_nbit_double(hid_t file)
H5_DOUBLE(1.2677579992621376e-61),
H5_DOUBLE(64568.289448797700),
H5_DOUBLE(-1.0619721778839084e-75)
- },
+ },
{
H5_DOUBLE(2.1499497833454840e+56),
- H5_DOUBLE(6.6562295504670740e-3),
- H5_DOUBLE(-1.5747263393432150),
- H5_DOUBLE(1.0711093225222612),
+ H5_DOUBLE(6.6562295504670740e-3),
+ H5_DOUBLE(-1.5747263393432150),
+ H5_DOUBLE(1.0711093225222612),
H5_DOUBLE(-9.8971679387636870e-1)
}};
double new_data[2][5];
@@ -3198,7 +3198,7 @@ test_nbit_compound(hid_t file)
hid_t dataset, space, dc;
const hsize_t size[2] = {2, 5};
const hsize_t chunk_size[2] = {2, 5};
- const float float_val[2][5] = {{188384.0F, 19.103516F, -1.0831790e9F, -84.242188F, 5.2045898F},
+ const float float_val[2][5] = {{188384.0F, 19.103516F, -1.0831790e9F, -84.242188F, 5.2045898F},
{-49140.0F, 2350.25F, -3.2110596e-1F, 6.4998865e-5F, -0.0F}};
atomic orig_data[2][5];
atomic new_data[2][5];
@@ -3544,7 +3544,7 @@ test_nbit_compound_2(hid_t file)
/* Check that the values read are the same as the values written
* Use mask for checking the significant bits, ignoring the padding bits
*/
- /* The original code
+ /* The original code
* i_mask = ~((unsigned)~0 << (precision[0] + offset[0])) & ((unsigned)~0 << offset[0]);
* left shift a 32-bit integer for 32-bit. The result is undefined by C language. A user
* discovered it using clang compiler with -fcatch-undefined-behavior option (see Issue 7674
@@ -3865,7 +3865,7 @@ test_nbit_int_size(hid_t file)
printf(" line %d: H5Pset_order failed\n",__LINE__);
goto error;
} /* end if */
-
+
if(H5Tset_size(datatype, 4)<0) {
H5_FAILED();
printf(" line %d: H5Pset_size failed\n",__LINE__);
@@ -3919,7 +3919,7 @@ test_nbit_int_size(hid_t file)
* Create a new dataset within the file.
*/
if((dataset = H5Dcreate2 (file, DSET_NBIT_INT_SIZE_NAME, datatype,
- dataspace, H5P_DEFAULT,
+ dataspace, H5P_DEFAULT,
dset_create_props, H5P_DEFAULT))<0) {
H5_FAILED();
printf(" line %d: H5dwrite failed\n",__LINE__);
@@ -3936,7 +3936,7 @@ test_nbit_int_size(hid_t file)
goto error;
} /* end if */
- /*
+ /*
* Get the precision of the data type
*/
if((precision = H5Tget_precision(datatype)) == 0) {
@@ -3945,10 +3945,10 @@ test_nbit_int_size(hid_t file)
goto error;
} /* end if */
- /*
+ /*
* The size of the dataset after compression should around 2 * DSET_DIM1 * DSET_DIM2
*/
- if((dset_size = H5Dget_storage_size(dataset)) < DSET_DIM1*DSET_DIM2*(precision/8) ||
+ if((dset_size = H5Dget_storage_size(dataset)) < DSET_DIM1*DSET_DIM2*(precision/8) ||
dset_size > DSET_DIM1*DSET_DIM2*(precision/8) + 1*KB) {
H5_FAILED();
HDfprintf(stdout, " Line %d: wrong dataset size: %Hu\n",__LINE__, dset_size);
@@ -3972,7 +3972,7 @@ error:
/*-------------------------------------------------------------------------
* Function: test_nbit_flt_size
*
- * Purpose: Tests the correct size of the floating-number datatype for
+ * Purpose: Tests the correct size of the floating-number datatype for
* nbit filter
*
* Return: Success: 0
@@ -4063,15 +4063,15 @@ test_nbit_flt_size(hid_t file)
printf(" line %d: H5Pset_order failed\n",__LINE__);
goto error;
} /* end if */
-
+
if(H5Tset_ebias(datatype, 31)<0) {
H5_FAILED();
printf(" line %d: H5Pset_size failed\n",__LINE__);
goto error;
} /* end if */
- /*
- * Initiliaze data buffer with random data
+ /*
+ * Initiliaze data buffer with random data
*/
for (i=0; i < DSET_DIM1; i++)
for (j=0; j < DSET_DIM2; j++)
@@ -4117,7 +4117,7 @@ test_nbit_flt_size(hid_t file)
* Create a new dataset within the file.
*/
if((dataset = H5Dcreate2 (file, DSET_NBIT_FLT_SIZE_NAME, datatype,
- dataspace, H5P_DEFAULT,
+ dataspace, H5P_DEFAULT,
dset_create_props, H5P_DEFAULT))<0) {
H5_FAILED();
printf(" line %d: H5dwrite failed\n",__LINE__);
@@ -4134,7 +4134,7 @@ test_nbit_flt_size(hid_t file)
goto error;
} /* end if */
- /*
+ /*
* Get the precision of the data type
*/
if((precision = H5Tget_precision(datatype)) == 0) {
@@ -4143,10 +4143,10 @@ test_nbit_flt_size(hid_t file)
goto error;
} /* end if */
- /*
+ /*
* The size of the dataset after compression should around 2 * DSET_DIM1 * DSET_DIM2
*/
- if((dset_size = H5Dget_storage_size(dataset)) < DSET_DIM1*DSET_DIM2*(precision/8) ||
+ if((dset_size = H5Dget_storage_size(dataset)) < DSET_DIM1*DSET_DIM2*(precision/8) ||
dset_size > DSET_DIM1*DSET_DIM2*(precision/8) + 1*KB) {
H5_FAILED();
HDfprintf(stdout, " Line %d: wrong dataset size: %Hu\n",__LINE__, dset_size);
@@ -4891,18 +4891,18 @@ error:
/*-------------------------------------------------------------------------
- * Function: test_multiopen
+ * Function: test_multiopen
*
- * Purpose: Tests that a bug no longer exists. If a dataset is opened
- * twice and one of the handles is used to extend the dataset,
- * then the other handle should return the new size when
- * queried.
+ * Purpose: Tests that a bug no longer exists. If a dataset is opened
+ * twice and one of the handles is used to extend the dataset,
+ * then the other handle should return the new size when
+ * queried.
*
- * Return: Success: 0
+ * Return: Success: 0
*
- * Failure: -1
+ * Failure: -1
*
- * Programmer: Robb Matzke
+ * Programmer: Robb Matzke
* Tuesday, June 9, 1998
*
*-------------------------------------------------------------------------
@@ -4910,10 +4910,10 @@ error:
static herr_t
test_multiopen (hid_t file)
{
- hid_t dcpl = -1, space = -1, dset1 = -1, dset2 = -1;
- hsize_t cur_size[1] = {10};
- static hsize_t max_size[1] = {H5S_UNLIMITED};
- hsize_t tmp_size[1];
+ hid_t dcpl = -1, space = -1, dset1 = -1, dset2 = -1;
+ hsize_t cur_size[1] = {10};
+ static hsize_t max_size[1] = {H5S_UNLIMITED};
+ hsize_t tmp_size[1];
TESTING("multi-open with extending");
@@ -4933,9 +4933,9 @@ test_multiopen (hid_t file)
if((space = H5Dget_space(dset2)) < 0) goto error;
if(H5Sget_simple_extent_dims(space, tmp_size, NULL) < 0) goto error;
if(cur_size[0] != tmp_size[0]) {
- H5_FAILED();
- printf(" Got %d instead of %d!\n", (int)tmp_size[0], (int)cur_size[0]);
- goto error;
+ H5_FAILED();
+ printf(" Got %d instead of %d!\n", (int)tmp_size[0], (int)cur_size[0]);
+ goto error;
} /* end if */
if(H5Dclose(dset1) < 0) goto error;
@@ -4948,25 +4948,25 @@ test_multiopen (hid_t file)
error:
H5E_BEGIN_TRY {
- H5Dclose(dset1);
- H5Dclose(dset2);
- H5Sclose(space);
- H5Pclose(dcpl);
+ H5Dclose(dset1);
+ H5Dclose(dset2);
+ H5Sclose(space);
+ H5Pclose(dcpl);
} H5E_END_TRY;
return -1;
}
/*-------------------------------------------------------------------------
- * Function: test_types
+ * Function: test_types
*
- * Purpose: Make some datasets with various types so we can test h5ls.
+ * Purpose: Make some datasets with various types so we can test h5ls.
*
- * Return: Success: 0
+ * Return: Success: 0
*
- * Failure: -1
+ * Failure: -1
*
- * Programmer: Robb Matzke
+ * Programmer: Robb Matzke
* Monday, June 7, 1999
*
*-------------------------------------------------------------------------
@@ -4974,10 +4974,10 @@ test_multiopen (hid_t file)
static herr_t
test_types(hid_t file)
{
- hid_t grp=-1, type=-1, space=-1, dset=-1;
- size_t i;
- hsize_t nelmts;
- unsigned char buf[32];
+ hid_t grp=-1, type=-1, space=-1, dset=-1;
+ size_t i;
+ hsize_t nelmts;
+ unsigned char buf[32];
TESTING("various datatypes");
if((grp = H5Gcreate2(file, "typetests", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error;
@@ -4985,12 +4985,12 @@ test_types(hid_t file)
/* bitfield_1 */
nelmts = sizeof(buf);
if((type=H5Tcopy(H5T_STD_B8LE)) < 0 ||
- (space=H5Screate_simple(1, &nelmts, NULL)) < 0 ||
- (dset=H5Dcreate2(grp, "bitfield_1", type, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
- goto error;
+ (space=H5Screate_simple(1, &nelmts, NULL)) < 0 ||
+ (dset=H5Dcreate2(grp, "bitfield_1", type, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ goto error;
for(i=0; i<sizeof buf; i++) buf[i] = (unsigned char)0xff ^ (unsigned char)i;
if(H5Dwrite(dset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0)
- goto error;
+ goto error;
if(H5Sclose(space) < 0) goto error;
if(H5Tclose(type) < 0) goto error;
@@ -4999,12 +4999,12 @@ test_types(hid_t file)
/* bitfield_2 */
nelmts = sizeof(buf)/2;
if((type=H5Tcopy(H5T_STD_B16LE)) < 0 ||
- (space=H5Screate_simple(1, &nelmts, NULL)) < 0 ||
- (dset=H5Dcreate2(grp, "bitfield_2", type, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
- goto error;
+ (space=H5Screate_simple(1, &nelmts, NULL)) < 0 ||
+ (dset=H5Dcreate2(grp, "bitfield_2", type, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ goto error;
for(i=0; i<sizeof buf; i++) buf[i] = (unsigned char)0xff ^ (unsigned char)i;
if(H5Dwrite(dset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0)
- goto error;
+ goto error;
if(H5Sclose(space) < 0) goto error;
if(H5Tclose(type) < 0) goto error;
if(H5Dclose(dset) < 0) goto error;
@@ -5015,7 +5015,7 @@ test_types(hid_t file)
H5Tset_tag(type, "testing 1-byte opaque type") < 0 ||
(space = H5Screate_simple(1, &nelmts, NULL)) < 0 ||
(dset = H5Dcreate2(grp, "opaque_1", type, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
- goto error;
+ goto error;
for(i = 0; i < sizeof buf; i++)
buf[i] = (unsigned char)0xff ^ (unsigned char)i;
if(H5Dwrite(dset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) goto error;
@@ -5029,7 +5029,7 @@ test_types(hid_t file)
H5Tset_tag(type, "testing 4-byte opaque type") < 0 ||
(space = H5Screate_simple(1, &nelmts, NULL)) < 0 ||
(dset = H5Dcreate2(grp, "opaque_2", type, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
- goto error;
+ goto error;
for(i = 0; i < sizeof buf; i++)
buf[i] = (unsigned char)0xff ^ (unsigned char)i;
if(H5Dwrite(dset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) goto error;
@@ -5044,10 +5044,10 @@ test_types(hid_t file)
error:
H5E_BEGIN_TRY {
- H5Gclose(grp);
- H5Tclose(type);
- H5Sclose(space);
- H5Dclose(dset);
+ H5Gclose(grp);
+ H5Tclose(type);
+ H5Sclose(space);
+ H5Dclose(dset);
} H5E_END_TRY;
return -1;
}
@@ -5055,27 +5055,27 @@ test_types(hid_t file)
/* This message derives from H5Z */
const H5Z_class2_t H5Z_CAN_APPLY_TEST[1] = {{
H5Z_CLASS_T_VERS,
- H5Z_FILTER_CAN_APPLY_TEST, /* Filter id number */
+ H5Z_FILTER_CAN_APPLY_TEST, /* Filter id number */
1, 1,
- "can_apply_test", /* Filter name for debugging */
+ "can_apply_test", /* Filter name for debugging */
can_apply_bogus, /* The "can apply" callback */
NULL, /* The "set local" callback */
- filter_bogus, /* The actual filter function */
+ filter_bogus, /* The actual filter function */
}};
/*-------------------------------------------------------------------------
- * Function: test_can_apply
+ * Function: test_can_apply
*
- * Purpose: Tests library behavior when filter indicates it can't
+ * Purpose: Tests library behavior when filter indicates it can't
* apply to certain combinations of creation parameters.
* The filter is mandate. If the CAN_APPLY callback function
* indicates wrong datatype, the dataset creation should fail.
*
- * Return: Success: 0
- * Failure: -1
+ * Return: Success: 0
+ * Failure: -1
*
- * Programmer: Quincey Koziol
+ * Programmer: Quincey Koziol
* Friday, April 5, 2003
*
*-------------------------------------------------------------------------
@@ -5124,7 +5124,7 @@ test_can_apply(hid_t file)
} /* end if */
/* Create new dataset */
- /* (Should fail because the 'can apply' function should indicate inappropriate
+ /* (Should fail because the 'can apply' function should indicate inappropriate
* combination. And the filter is mandate.) */
H5E_BEGIN_TRY {
dsid = H5Dcreate2(file, DSET_CAN_APPLY_NAME, H5T_NATIVE_DOUBLE, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
@@ -5192,16 +5192,16 @@ test_can_apply(hid_t file)
/* Compare data */
/* Check that the values read are the same as the values written */
for(i=0; i<(size_t)dims[0]; i++) {
- for(j=0; j<(size_t)dims[1]; j++) {
- if(points[i][j] != check[i][j]) {
- H5_FAILED();
- printf(" Line %d: Read different values than written.\n",__LINE__);
- printf(" At index %lu,%lu\n", (unsigned long)(i), (unsigned long)(j));
- printf(" At original: %d\n",points[i][j]);
- printf(" At returned: %d\n",check[i][j]);
- goto error;
- } /* end if */
- } /* end for */
+ for(j=0; j<(size_t)dims[1]; j++) {
+ if(points[i][j] != check[i][j]) {
+ H5_FAILED();
+ printf(" Line %d: Read different values than written.\n",__LINE__);
+ printf(" At index %lu,%lu\n", (unsigned long)(i), (unsigned long)(j));
+ printf(" At original: %d\n",points[i][j]);
+ printf(" At returned: %d\n",check[i][j]);
+ goto error;
+ } /* end if */
+ } /* end for */
} /* end for */
/* Close dataset */
@@ -5236,29 +5236,29 @@ error:
/* This message derives from H5Z */
const H5Z_class2_t H5Z_CAN_APPLY_TEST2[1] = {{
H5Z_CLASS_T_VERS,
- H5Z_FILTER_CAN_APPLY_TEST2, /* Filter id number */
+ H5Z_FILTER_CAN_APPLY_TEST2, /* Filter id number */
1, 1,
- "can_apply_test", /* Filter name for debugging */
+ "can_apply_test", /* Filter name for debugging */
can_apply_bogus, /* The "can apply" callback */
NULL, /* The "set local" callback */
- filter_bogus3, /* The actual filter function */
+ filter_bogus3, /* The actual filter function */
}};
/*-------------------------------------------------------------------------
- * Function: test_can_apply2
+ * Function: test_can_apply2
*
- * Purpose: Tests library behavior when an optional filter indicates
- * it can't apply to certain combinations of creation
+ * Purpose: Tests library behavior when an optional filter indicates
+ * it can't apply to certain combinations of creation
* parameters. The filter function FILTER_BOGUS3 does nothing
- * than returning a failure. Because the filter is optional,
+ * than returning a failure. Because the filter is optional,
* the library skips the filter even though the CAN_APPLY_BOGUS
- * indicates the datatype DOUBLE can't apply to the dataset.
+ * indicates the datatype DOUBLE can't apply to the dataset.
*
- * Return: Success: 0
- * Failure: -1
+ * Return: Success: 0
+ * Failure: -1
*
- * Programmer: Raymond Lu
+ * Programmer: Raymond Lu
* 4 August 2010
*
*-------------------------------------------------------------------------
@@ -5351,16 +5351,16 @@ test_can_apply2(hid_t file)
/* Compare data */
/* Check that the values read are the same as the values written */
for(i=0; i<(size_t)dims[0]; i++) {
- for(j=0; j<(size_t)dims[1]; j++) {
- if(points[i][j] != check[i][j]) {
- H5_FAILED();
- printf(" Line %d: Read different values than written.\n",__LINE__);
- printf(" At index %lu,%lu\n", (unsigned long)(i), (unsigned long)(j));
- printf(" At original: %d\n",points[i][j]);
- printf(" At returned: %d\n",check[i][j]);
- goto error;
- } /* end if */
- } /* end for */
+ for(j=0; j<(size_t)dims[1]; j++) {
+ if(points[i][j] != check[i][j]) {
+ H5_FAILED();
+ printf(" Line %d: Read different values than written.\n",__LINE__);
+ printf(" At index %lu,%lu\n", (unsigned long)(i), (unsigned long)(j));
+ printf(" At original: %d\n",points[i][j]);
+ printf(" At returned: %d\n",check[i][j]);
+ goto error;
+ } /* end if */
+ } /* end for */
} /* end for */
/* Close dataset */
@@ -5395,15 +5395,15 @@ error:
/*-------------------------------------------------------------------------
- * Function: test_can_apply_szip
+ * Function: test_can_apply_szip
*
- * Purpose: Tests library behavior when szip filter indicates it can't
+ * Purpose: Tests library behavior when szip filter indicates it can't
* apply to certain combinations of creation parameters
*
- * Return: Success: 0
- * Failure: -1
+ * Return: Success: 0
+ * Failure: -1
*
- * Programmer: Quincey Koziol
+ * Programmer: Quincey Koziol
* Monday, April 7, 2003
*
*-------------------------------------------------------------------------
@@ -5590,24 +5590,24 @@ error:
/* This message derives from H5Z */
const H5Z_class2_t H5Z_SET_LOCAL_TEST[1] = {{
H5Z_CLASS_T_VERS,
- H5Z_FILTER_SET_LOCAL_TEST, /* Filter id number */
+ H5Z_FILTER_SET_LOCAL_TEST, /* Filter id number */
1, 1,
- "set_local_test", /* Filter name for debugging */
+ "set_local_test", /* Filter name for debugging */
NULL, /* The "can apply" callback */
set_local_bogus2, /* The "set local" callback */
- filter_bogus2, /* The actual filter function */
+ filter_bogus2, /* The actual filter function */
}};
/*-------------------------------------------------------------------------
- * Function: test_set_local
+ * Function: test_set_local
*
- * Purpose: Tests library behavior for "set local" filter callback
+ * Purpose: Tests library behavior for "set local" filter callback
*
- * Return: Success: 0
- * Failure: -1
+ * Return: Success: 0
+ * Failure: -1
*
- * Programmer: Quincey Koziol
+ * Programmer: Quincey Koziol
* Monday, April 7, 2003
*
*-------------------------------------------------------------------------
@@ -5634,16 +5634,16 @@ test_set_local(hid_t fapl)
/* Initialize the integer & floating-point dataset */
n=1.0F;
for(i = 0; i < DSET_DIM1; i++)
- for(j = 0; j < DSET_DIM2; j++) {
- points[i][j] = (int)n++;
- points_dbl[i][j] = (double)1.5F*n++;
- }
+ for(j = 0; j < DSET_DIM2; j++) {
+ points[i][j] = (int)n++;
+ points_dbl[i][j] = (double)1.5F*n++;
+ }
/* Open file */
if((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) {
H5_FAILED();
printf(" Line %d: Can't open file\n",__LINE__);
- goto error;
+ goto error;
}
/* Create dcpl with special filter */
@@ -5777,16 +5777,16 @@ test_set_local(hid_t fapl)
/* Compare data */
/* Check that the values read are the modified version of what was written */
for(i=0; i<dims[0]; i++) {
- for(j=0; j<dims[1]; j++) {
- if((points[i][j]+(int)sizeof(int)) != check[i][j]) {
- H5_FAILED();
- printf(" Line %d: Read different values than written.\n",__LINE__);
- printf(" At index %lu,%lu\n", (unsigned long)(i), (unsigned long)(j));
- printf(" At original: %d\n",points[i][j]);
- printf(" At returned: %d\n",check[i][j]);
- goto error;
- } /* end if */
- } /* end for */
+ for(j=0; j<dims[1]; j++) {
+ if((points[i][j]+(int)sizeof(int)) != check[i][j]) {
+ H5_FAILED();
+ printf(" Line %d: Read different values than written.\n",__LINE__);
+ printf(" At index %lu,%lu\n", (unsigned long)(i), (unsigned long)(j));
+ printf(" At original: %d\n",points[i][j]);
+ printf(" At returned: %d\n",check[i][j]);
+ goto error;
+ } /* end if */
+ } /* end for */
} /* end for */
/* Close dataset */
@@ -5827,18 +5827,18 @@ test_set_local(hid_t fapl)
/* Compare data */
/* Check that the values read are the modified version of what was written */
for(i=0; i<dims[0]; i++) {
- for(j=0; j<dims[1]; j++) {
- /* If the difference between two values is greater than 0.001%, they're
+ for(j=0; j<dims[1]; j++) {
+ /* If the difference between two values is greater than 0.001%, they're
* considered not equal. */
if(!DBL_REL_EQUAL(points_dbl[i][j],check_dbl[i][j],0.00001F)) {
- H5_FAILED();
- printf(" Line %d: Read different values than written.\n",__LINE__);
- printf(" At index %lu,%lu\n", (unsigned long)(i), (unsigned long)(j));
- printf(" At original: %f\n",points_dbl[i][j]);
- printf(" At returned: %f\n",check_dbl[i][j]);
- goto error;
- } /* end if */
- } /* end for */
+ H5_FAILED();
+ printf(" Line %d: Read different values than written.\n",__LINE__);
+ printf(" At index %lu,%lu\n", (unsigned long)(i), (unsigned long)(j));
+ printf(" At original: %f\n",points_dbl[i][j]);
+ printf(" At returned: %f\n",check_dbl[i][j]);
+ goto error;
+ } /* end if */
+ } /* end for */
} /* end for */
/* Close dataset */
@@ -5865,16 +5865,16 @@ error:
/*-------------------------------------------------------------------------
- * Function: test_compare_dcpl
+ * Function: test_compare_dcpl
*
- * Purpose: Verifies that if the same DCPL was used to create two
+ * Purpose: Verifies that if the same DCPL was used to create two
* datasets, the DCPLs retrieved from each dataset should
* compare equal.
*
- * Return: Success: 0
- * Failure: -1
+ * Return: Success: 0
+ * Failure: -1
*
- * Programmer: Quincey Koziol
+ * Programmer: Quincey Koziol
* Wednesday, January 7, 2004
*
*-------------------------------------------------------------------------
@@ -5950,17 +5950,17 @@ error:
/*-------------------------------------------------------------------------
- * Function: test_copy_dcpl
+ * Function: test_copy_dcpl
*
- * Purpose: Verifies whether the copy of dataset creation property
+ * Purpose: Verifies whether the copy of dataset creation property
* list works. It tests the DCPL for chunked layout with
* filter and for contiguous layout with external storage.
* (Please see #1608 in Bugzilla)
*
- * Return: Success: 0
- * Failure: -1
+ * Return: Success: 0
+ * Failure: -1
*
- * Programmer: Raymond Lu
+ * Programmer: Raymond Lu
* 28 January 2010
*
*-------------------------------------------------------------------------
@@ -5976,7 +5976,7 @@ test_copy_dcpl(hid_t file, hid_t fapl)
hid_t dcpl1_copy=(-1),dcpl2_copy=(-1);/* Copies of creation property list IDs */
const hsize_t dims[2] = {500, 4096}; /* Dataspace dimensions */
const hsize_t chunk_dims[2] = {250, 2048}; /* Chunk dimensions */
- char filename[FILENAME_BUF_SIZE];
+ char filename[FILENAME_BUF_SIZE];
hid_t new_file=(-1);
TESTING("copying dataset creation property lists");
@@ -6038,10 +6038,10 @@ test_copy_dcpl(hid_t file, hid_t fapl)
TEST_ERROR
if((new_dsid1 = H5Dcreate2(new_file, DSET_COPY_DCPL_NAME_1, H5T_NATIVE_INT, sid,
- H5P_DEFAULT, dcpl1_copy, H5P_DEFAULT)) < 0) TEST_ERROR
+ H5P_DEFAULT, dcpl1_copy, H5P_DEFAULT)) < 0) TEST_ERROR
if((new_dsid2 = H5Dcreate2(new_file, DSET_COPY_DCPL_NAME_2, H5T_NATIVE_INT, sid,
- H5P_DEFAULT, dcpl2_copy, H5P_DEFAULT)) < 0) TEST_ERROR
+ H5P_DEFAULT, dcpl2_copy, H5P_DEFAULT)) < 0) TEST_ERROR
/* Close dataspace */
if(H5Sclose(sid) < 0) TEST_ERROR
@@ -6384,8 +6384,8 @@ test_zero_dims(hid_t file)
TESTING("I/O on datasets with zero-sized dims");
- /*
- * One-dimensional dataset
+ /*
+ * One-dimensional dataset
*/
if((s = H5Screate_simple(1, &dsize, &dmax)) < 0) FAIL_STACK_ERROR
@@ -6463,14 +6463,14 @@ static herr_t
test_missing_chunk(hid_t file)
{
hid_t s = -1, d = -1, dcpl = -1;
- hsize_t hs_start[1];
- hsize_t hs_stride[1],
+ hsize_t hs_start[1];
+ hsize_t hs_stride[1],
hs_count[1],
hs_block[1];
int wdata[MISSING_CHUNK_DIM],
rdata[MISSING_CHUNK_DIM];
hsize_t dsize=100, dmax=H5S_UNLIMITED;
- hsize_t csize=5;
+ hsize_t csize=5;
size_t u;
TESTING("Read dataset with unwritten chunk & undefined fill value");
@@ -6502,7 +6502,7 @@ test_missing_chunk(hid_t file)
hs_count[0]=10;
hs_block[0]=5;
if(H5Sselect_hyperslab(s, H5S_SELECT_SET, hs_start, hs_stride, hs_count,
- hs_block) < 0) TEST_ERROR;
+ hs_block) < 0) TEST_ERROR;
/* Write selected data */
if(H5Dwrite(d, H5T_NATIVE_INT, s, s, H5P_DEFAULT, wdata) < 0) TEST_ERROR;
@@ -6784,11 +6784,11 @@ set_local_deprec(hid_t H5_ATTR_UNUSED dcpl_id, hid_t H5_ATTR_UNUSED type_id, hid
/* Old style H5Z_class_t, essentially a copy of the "bogus" filter */
const H5Z_class1_t H5Z_DEPREC[1] = {{
- H5Z_FILTER_DEPREC, /* Filter id number */
- "deprec", /* Filter name for debugging */
+ H5Z_FILTER_DEPREC, /* Filter id number */
+ "deprec", /* Filter name for debugging */
can_apply_deprec, /* The "can apply" callback */
set_local_deprec, /* The "set local" callback */
- filter_bogus, /* The actual filter function */
+ filter_bogus, /* The actual filter function */
}};
@@ -6808,11 +6808,11 @@ const H5Z_class1_t H5Z_DEPREC[1] = {{
static herr_t
test_deprec(hid_t file)
{
- hid_t dataset, space, small_space, create_parms, dcpl;
- hsize_t dims[2], small_dims[2];
+ hid_t dataset, space, small_space, create_parms, dcpl;
+ hsize_t dims[2], small_dims[2];
hsize_t deprec_size;
- herr_t status;
- hsize_t csize[2];
+ herr_t status;
+ hsize_t csize[2];
TESTING("deprecated API routines");
@@ -6829,7 +6829,7 @@ test_deprec(hid_t file)
assert(space>=0);
/*
- * Create a dataset using the default dataset creation properties. We're
+ * Create a dataset using the default dataset creation properties. We're
* not sure what they are, so we won't check.
*/
if((dataset = H5Dcreate1(file, DSET_DEPREC_NAME, H5T_NATIVE_DOUBLE, space, H5P_DEFAULT)) < 0) goto error;
@@ -6843,13 +6843,13 @@ test_deprec(hid_t file)
* reporting.
*/
H5E_BEGIN_TRY {
- dataset = H5Dcreate1(file, DSET_DEFAULT_NAME, H5T_NATIVE_DOUBLE, space,
- H5P_DEFAULT);
+ dataset = H5Dcreate1(file, DSET_DEFAULT_NAME, H5T_NATIVE_DOUBLE, space,
+ H5P_DEFAULT);
} H5E_END_TRY;
if(dataset >= 0) {
- H5_FAILED();
- puts(" Library allowed overwrite of existing dataset.");
- goto error;
+ H5_FAILED();
+ puts(" Library allowed overwrite of existing dataset.");
+ goto error;
}
/*
@@ -6865,12 +6865,12 @@ test_deprec(hid_t file)
* reporting.
*/
H5E_BEGIN_TRY {
- dataset = H5Dopen1(file, "does_not_exist");
+ dataset = H5Dopen1(file, "does_not_exist");
} H5E_END_TRY;
if(dataset >= 0) {
- H5_FAILED();
- puts(" Opened a non-existent dataset.");
- goto error;
+ H5_FAILED();
+ puts(" Opened a non-existent dataset.");
+ goto error;
}
/*
@@ -6914,12 +6914,12 @@ test_deprec(hid_t file)
assert(status >= 0);
H5E_BEGIN_TRY {
dataset = H5Dcreate1(file, DSET_DEPREC_NAME_CHUNKED, H5T_NATIVE_DOUBLE, space,
- create_parms);
+ create_parms);
} H5E_END_TRY;
if(dataset >= 0) {
- H5_FAILED();
- puts(" Opened a dataset with incorrect chunking parameters.");
- goto error;
+ H5_FAILED();
+ puts(" Opened a dataset with incorrect chunking parameters.");
+ goto error;
}
csize[0] = 5;
@@ -7024,7 +7024,7 @@ test_huge_chunks(hid_t fapl)
ret = H5Pset_chunk(dcpl, 1, &chunk_dim);
} H5E_END_TRY;
if(ret >= 0)
- FAIL_PUTS_ERROR(" Set chunk size with too large of chunk dimensions.")
+ FAIL_PUTS_ERROR(" Set chunk size with too large of chunk dimensions.")
/* Try to set too large of a chunk for n-D dataset (# of elements) */
chunk_dim2[0] = TOO_HUGE_CHUNK_DIM2_0;
@@ -7034,7 +7034,7 @@ test_huge_chunks(hid_t fapl)
ret = H5Pset_chunk(dcpl, 3, chunk_dim2);
} H5E_END_TRY;
if(ret >= 0)
- FAIL_PUTS_ERROR(" Set chunk size with too large of chunk dimensions.")
+ FAIL_PUTS_ERROR(" Set chunk size with too large of chunk dimensions.")
/* Set 1-D chunk size */
chunk_dim = HUGE_CHUNK_DIM;
@@ -7049,7 +7049,7 @@ test_huge_chunks(hid_t fapl)
dsid = H5Dcreate2(fid, HUGE_DATASET, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
} H5E_END_TRY;
if(dsid >= 0)
- FAIL_PUTS_ERROR(" 1-D Dataset with too large of chunk dimensions created.")
+ FAIL_PUTS_ERROR(" 1-D Dataset with too large of chunk dimensions created.")
/* Close 1-D dataspace */
if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
@@ -7072,7 +7072,7 @@ test_huge_chunks(hid_t fapl)
dsid = H5Dcreate2(fid, HUGE_DATASET2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
} H5E_END_TRY;
if(dsid >= 0)
- FAIL_PUTS_ERROR(" n-D Dataset with too large of chunk dimensions created.")
+ FAIL_PUTS_ERROR(" n-D Dataset with too large of chunk dimensions created.")
/* Close n-D dataspace */
if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
@@ -7468,12 +7468,12 @@ error:
/* This message derives from H5Z */
const H5Z_class2_t H5Z_EXPAND[1] = {{
H5Z_CLASS_T_VERS, /* H5Z_class_t version */
- H5Z_FILTER_EXPAND, /* Filter id number */
+ H5Z_FILTER_EXPAND, /* Filter id number */
1, 1, /* Encoding and decoding enabled */
- "expand", /* Filter name for debugging */
+ "expand", /* Filter name for debugging */
NULL, /* The "can apply" callback */
NULL, /* The "set local" callback */
- filter_expand, /* The actual filter function */
+ filter_expand, /* The actual filter function */
}};
/* Global "expansion factor" for filter_expand() routine */
@@ -7486,15 +7486,15 @@ static size_t filter_expand_factor_g = 0;
* Purpose: For testing library's behavior when a filter expands a chunk
* too much.
*
- * Note: This filter doesn't actually re-allocate the buffer to be
- * larger, it just changes the buffer size to a value that's too
- * large. The library should throw an error before using the
- * incorrect buffer information.
+ * Note: This filter doesn't actually re-allocate the buffer to be
+ * larger, it just changes the buffer size to a value that's too
+ * large. The library should throw an error before using the
+ * incorrect buffer information.
*
- * Return: Success: Data chunk size
- * Failure: 0
+ * Return: Success: Data chunk size
+ * Failure: 0
*
- * Programmer: Quincey Koziol
+ * Programmer: Quincey Koziol
* Mar 31, 2009
*
*-------------------------------------------------------------------------
@@ -7553,8 +7553,8 @@ test_chunk_expand(hid_t fapl)
hid_t scalar_sid = -1;/* Scalar dataspace ID */
hid_t dsid = -1; /* Dataset ID */
hsize_t dim, max_dim, chunk_dim; /* Dataset and chunk dimensions */
- hsize_t hs_offset; /* Hyperslab offset */
- hsize_t hs_size; /* Hyperslab size */
+ hsize_t hs_offset; /* Hyperslab offset */
+ hsize_t hs_size; /* Hyperslab size */
H5D_alloc_time_t alloc_time; /* Storage allocation time */
unsigned write_elem, read_elem; /* Element written/read */
unsigned u; /* Local index variable */
@@ -7567,8 +7567,8 @@ test_chunk_expand(hid_t fapl)
size = sizeof(size_t);
if(size <= 4) {
- SKIPPED();
- puts(" Current machine can't test for error");
+ SKIPPED();
+ puts(" Current machine can't test for error");
} /* end if */
else {
/* Register "expansion" filter */
@@ -7932,17 +7932,17 @@ error:
/*-------------------------------------------------------------------------
*
* test_idx_compatible():
- * Verify that the 1.8 branch cannot read datasets that use
- * Fixed Array indexing method.
+ * Verify that the 1.8 branch cannot read datasets that use
+ * Fixed Array indexing method.
*
*-------------------------------------------------------------------------
*/
static herr_t
test_idx_compatible(void)
{
- hid_t fid = -1; /* File id */
- hid_t did = -1; /* Dataset id */
- char *srcdir = HDgetenv("srcdir"); /* where the src code is located */
+ hid_t fid = -1; /* File id */
+ hid_t did = -1; /* Dataset id */
+ char *srcdir = HDgetenv("srcdir"); /* where the src code is located */
char filename[FILENAME_BUF_SIZE] = ""; /* old test file name */
/* Output message about test being performed */
@@ -7950,29 +7950,29 @@ test_idx_compatible(void)
/* Generate correct name for test file by prepending the source path */
if(srcdir && ((HDstrlen(srcdir) + HDstrlen(FIXED_IDX_FILE) + 1) < sizeof(filename))) {
- HDstrcpy(filename, srcdir);
- HDstrcat(filename, "/");
+ HDstrcpy(filename, srcdir);
+ HDstrcat(filename, "/");
}
HDstrcat(filename, FIXED_IDX_FILE);
/* Open the file */
if((fid = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0)
- FAIL_STACK_ERROR
+ FAIL_STACK_ERROR
/* Should not able to read the dataset w/o filter that use Fixed Array indexing */
H5E_BEGIN_TRY {
- if((did = H5Dopen2(fid, DSET, H5P_DEFAULT)) != FAIL)
- TEST_ERROR
+ if((did = H5Dopen2(fid, DSET, H5P_DEFAULT)) != FAIL)
+ TEST_ERROR
} H5E_END_TRY;
/* Should not able to read the dataset w/ filter that use Fixed Array indexing */
H5E_BEGIN_TRY {
- if((did = H5Dopen2(fid, DSET_FILTER, H5P_DEFAULT)) != FAIL)
- TEST_ERROR
+ if((did = H5Dopen2(fid, DSET_FILTER, H5P_DEFAULT)) != FAIL)
+ TEST_ERROR
} H5E_END_TRY;
if(H5Fclose(fid) < 0)
- FAIL_STACK_ERROR
+ FAIL_STACK_ERROR
PASSED();
return 0;
@@ -7980,7 +7980,7 @@ test_idx_compatible(void)
error:
H5E_BEGIN_TRY {
H5Dclose(did);
- H5Fclose(fid);
+ H5Fclose(fid);
} H5E_END_TRY;
return -1;
} /* test_idx_compatible */
@@ -7989,7 +7989,7 @@ error:
/*-------------------------------------------------------------------------
* Function: test_zero_dim_dset
*
- * Purpose: Tests support for reading a 1D chunled dataset with
+ * Purpose: Tests support for reading a 1D chunled dataset with
* dimension size = 0.
*
* Return: Success: 0
@@ -9104,7 +9104,7 @@ test_compact_dirty(hid_t fapl)
hid_t dcpl = -1; /* Dataset creation property list */
hsize_t dims[1] = {10}; /* Dimension */
int wbuf[10]; /* Data buffer */
- char filename[FILENAME_BUF_SIZE]; /* Filename */
+ char filename[FILENAME_BUF_SIZE]; /* Filename */
int i; /* Local index variable */
hbool_t dirty; /* The dirty flag */
@@ -9120,15 +9120,15 @@ test_compact_dirty(hid_t fapl)
wbuf[i] = i;
/* Create dataspace */
- if((sid = H5Screate_simple(1, dims, NULL)) < 0)
+ if((sid = H5Screate_simple(1, dims, NULL)) < 0)
TEST_ERROR
/* Set compact layout */
- if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
TEST_ERROR
- if(H5Pset_layout(dcpl, H5D_COMPACT) < 0)
+ if(H5Pset_layout(dcpl, H5D_COMPACT) < 0)
TEST_ERROR
- if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0)
+ if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0)
TEST_ERROR
/* Create a compact dataset */
@@ -9140,7 +9140,7 @@ test_compact_dirty(hid_t fapl)
TEST_ERROR
/* Close the dataset */
- if(H5Dclose(did) < 0)
+ if(H5Dclose(did) < 0)
TEST_ERROR
/* Open the dataset */
@@ -9156,11 +9156,11 @@ test_compact_dirty(hid_t fapl)
TEST_ERROR
/* Close the dataset */
- if(H5Dclose(did) < 0)
+ if(H5Dclose(did) < 0)
TEST_ERROR
/* Close the dataspace */
- if(H5Sclose(sid) < 0)
+ if(H5Sclose(sid) < 0)
TEST_ERROR
/* Close the dataset creation property list */
@@ -9186,30 +9186,30 @@ error:
/*-------------------------------------------------------------------------
- * Function: main
+ * Function: main
*
- * Purpose: Tests the dataset interface (H5D)
+ * Purpose: Tests the dataset interface (H5D)
*
- * Return: Success: exit(0)
+ * Return: Success: exit(0)
*
- * Failure: exit(1)
+ * Failure: exit(1)
*
- * Programmer: Robb Matzke
- * Tuesday, December 9, 1997
+ * Programmer: Robb Matzke
+ * Tuesday, December 9, 1997
*
*-------------------------------------------------------------------------
*/
int
main(void)
{
- char filename[FILENAME_BUF_SIZE];
- hid_t file, grp, fapl, fapl2;
+ char filename[FILENAME_BUF_SIZE];
+ hid_t file, grp, fapl, fapl2;
hbool_t new_format;
int mdc_nelmts;
size_t rdcc_nelmts;
size_t rdcc_nbytes;
double rdcc_w0;
- int nerrors = 0;
+ int nerrors = 0;
const char *envval;
/* Don't run this test using certain file drivers */
@@ -9265,56 +9265,56 @@ main(void)
if(H5Gclose(grp) < 0)
goto error;
- nerrors += (test_create(file) < 0 ? 1 : 0);
- nerrors += (test_simple_io(envval, my_fapl) < 0 ? 1 : 0);
- nerrors += (test_compact_io(my_fapl) < 0 ? 1 : 0);
- nerrors += (test_max_compact(my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_create(file) < 0 ? 1 : 0);
+ nerrors += (test_simple_io(envval, my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_compact_io(my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_max_compact(my_fapl) < 0 ? 1 : 0);
nerrors += (test_compact_dirty(my_fapl) < 0 ? 1 : 0);
- nerrors += (test_conv_buffer(file) < 0 ? 1 : 0);
- nerrors += (test_tconv(file) < 0 ? 1 : 0);
- nerrors += (test_filters(file, my_fapl) < 0 ? 1 : 0);
- nerrors += (test_onebyte_shuffle(file) < 0 ? 1 : 0);
- nerrors += (test_nbit_int(file) < 0 ? 1 : 0);
- nerrors += (test_nbit_float(file) < 0 ? 1 : 0);
- nerrors += (test_nbit_double(file) < 0 ? 1 : 0);
- nerrors += (test_nbit_array(file) < 0 ? 1 : 0);
- nerrors += (test_nbit_compound(file) < 0 ? 1 : 0);
- nerrors += (test_nbit_compound_2(file) < 0 ? 1 : 0);
- nerrors += (test_nbit_compound_3(file) < 0 ? 1 : 0);
- nerrors += (test_nbit_int_size(file) < 0 ? 1 : 0);
- nerrors += (test_nbit_flt_size(file) < 0 ? 1 : 0);
- nerrors += (test_scaleoffset_int(file) < 0 ? 1 : 0);
- nerrors += (test_scaleoffset_int_2(file) < 0 ? 1 : 0);
- nerrors += (test_scaleoffset_float(file) < 0 ? 1 : 0);
- nerrors += (test_scaleoffset_float_2(file) < 0 ? 1 : 0);
- nerrors += (test_scaleoffset_double(file) < 0 ? 1 : 0);
- nerrors += (test_scaleoffset_double_2(file) < 0 ? 1 : 0);
- nerrors += (test_multiopen (file) < 0 ? 1 : 0);
- nerrors += (test_types(file) < 0 ? 1 : 0);
- nerrors += (test_userblock_offset(envval, my_fapl) < 0 ? 1 : 0);
- nerrors += (test_missing_filter(file) < 0 ? 1 : 0);
- nerrors += (test_can_apply(file) < 0 ? 1 : 0);
- nerrors += (test_can_apply2(file) < 0 ? 1 : 0);
- nerrors += (test_set_local(my_fapl) < 0 ? 1 : 0);
- nerrors += (test_can_apply_szip(file) < 0 ? 1 : 0);
- nerrors += (test_compare_dcpl(file) < 0 ? 1 : 0);
- nerrors += (test_copy_dcpl(file, my_fapl) < 0 ? 1 : 0);
- nerrors += (test_filter_delete(file) < 0 ? 1 : 0);
- nerrors += (test_filters_endianess() < 0 ? 1 : 0);
- nerrors += (test_zero_dims(file) < 0 ? 1 : 0);
- nerrors += (test_missing_chunk(file) < 0 ? 1 : 0);
- nerrors += (test_random_chunks(my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_conv_buffer(file) < 0 ? 1 : 0);
+ nerrors += (test_tconv(file) < 0 ? 1 : 0);
+ nerrors += (test_filters(file, my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_onebyte_shuffle(file) < 0 ? 1 : 0);
+ nerrors += (test_nbit_int(file) < 0 ? 1 : 0);
+ nerrors += (test_nbit_float(file) < 0 ? 1 : 0);
+ nerrors += (test_nbit_double(file) < 0 ? 1 : 0);
+ nerrors += (test_nbit_array(file) < 0 ? 1 : 0);
+ nerrors += (test_nbit_compound(file) < 0 ? 1 : 0);
+ nerrors += (test_nbit_compound_2(file) < 0 ? 1 : 0);
+ nerrors += (test_nbit_compound_3(file) < 0 ? 1 : 0);
+ nerrors += (test_nbit_int_size(file) < 0 ? 1 : 0);
+ nerrors += (test_nbit_flt_size(file) < 0 ? 1 : 0);
+ nerrors += (test_scaleoffset_int(file) < 0 ? 1 : 0);
+ nerrors += (test_scaleoffset_int_2(file) < 0 ? 1 : 0);
+ nerrors += (test_scaleoffset_float(file) < 0 ? 1 : 0);
+ nerrors += (test_scaleoffset_float_2(file) < 0 ? 1 : 0);
+ nerrors += (test_scaleoffset_double(file) < 0 ? 1 : 0);
+ nerrors += (test_scaleoffset_double_2(file) < 0 ? 1 : 0);
+ nerrors += (test_multiopen (file) < 0 ? 1 : 0);
+ nerrors += (test_types(file) < 0 ? 1 : 0);
+ nerrors += (test_userblock_offset(envval, my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_missing_filter(file) < 0 ? 1 : 0);
+ nerrors += (test_can_apply(file) < 0 ? 1 : 0);
+ nerrors += (test_can_apply2(file) < 0 ? 1 : 0);
+ nerrors += (test_set_local(my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_can_apply_szip(file) < 0 ? 1 : 0);
+ nerrors += (test_compare_dcpl(file) < 0 ? 1 : 0);
+ nerrors += (test_copy_dcpl(file, my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_filter_delete(file) < 0 ? 1 : 0);
+ nerrors += (test_filters_endianess() < 0 ? 1 : 0);
+ nerrors += (test_zero_dims(file) < 0 ? 1 : 0);
+ nerrors += (test_missing_chunk(file) < 0 ? 1 : 0);
+ nerrors += (test_random_chunks(my_fapl) < 0 ? 1 : 0);
#ifndef H5_NO_DEPRECATED_SYMBOLS
- nerrors += (test_deprec(file) < 0 ? 1 : 0);
+ nerrors += (test_deprec(file) < 0 ? 1 : 0);
#endif /* H5_NO_DEPRECATED_SYMBOLS */
- nerrors += (test_huge_chunks(my_fapl) < 0 ? 1 : 0);
- nerrors += (test_chunk_cache(my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_huge_chunks(my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_chunk_cache(my_fapl) < 0 ? 1 : 0);
nerrors += (test_big_chunks_bypass_cache(my_fapl) < 0 ? 1 : 0);
- nerrors += (test_chunk_expand(my_fapl) < 0 ? 1 : 0);
- nerrors += (test_idx_compatible() < 0 ? 1 : 0);
- nerrors += (test_layout_extend(my_fapl) < 0 ? 1 : 0);
- nerrors += (test_large_chunk_shrink(my_fapl) < 0 ? 1 : 0);
- nerrors += (test_zero_dim_dset(my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_chunk_expand(my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_idx_compatible() < 0 ? 1 : 0);
+ nerrors += (test_layout_extend(my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_large_chunk_shrink(my_fapl) < 0 ? 1 : 0);
+ nerrors += (test_zero_dim_dset(my_fapl) < 0 ? 1 : 0);
if(H5Fclose(file) < 0)
goto error;
diff --git a/test/tattr.c b/test/tattr.c
index 85e82b0..135b46c 100644
--- a/test/tattr.c
+++ b/test/tattr.c
@@ -13,7 +13,7 @@
/***********************************************************
*
-* Test program: tattr
+* Test program: tattr
*
* Test the attribute functionality
*
@@ -29,7 +29,7 @@
*/
#define H5O_PACKAGE
#define H5O_TESTING
-#include "H5Opkg.h" /* Object headers */
+#include "H5Opkg.h" /* Object headers */
/*
* This file needs to access private information from the H5A package.
@@ -37,7 +37,7 @@
*/
#define H5A_PACKAGE
#define H5A_TESTING
-#include "H5Apkg.h" /* Attributes */
+#include "H5Apkg.h" /* Attributes */
/*
* This file needs to access private information from the H5F package.
@@ -45,7 +45,7 @@
*/
#define H5F_PACKAGE
#define H5F_TESTING
-#include "H5Fpkg.h" /* File access */
+#include "H5Fpkg.h" /* File access */
#define FILENAME "tattr.h5"
#define NAME_BUF_SIZE 1024
@@ -55,10 +55,10 @@
#define CORDER_ITER_STOP 3
/* 3-D dataset with fixed dimensions */
-#define SPACE1_RANK 3
-#define SPACE1_DIM1 3
-#define SPACE1_DIM2 15
-#define SPACE1_DIM3 13
+#define SPACE1_RANK 3
+#define SPACE1_DIM1 3
+#define SPACE1_DIM2 15
+#define SPACE1_DIM3 13
/* Dataset Information */
#define DSET1_NAME "Dataset1"
@@ -76,8 +76,8 @@
/* Attribute Rank & Dimensions */
#define ATTR1_NAME "Attr1"
-#define ATTR1_RANK 1
-#define ATTR1_DIM1 3
+#define ATTR1_RANK 1
+#define ATTR1_DIM1 3
int attr_data1[ATTR1_DIM1]={512,-234,98123}; /* Test data for 1st attribute */
/* rank & dimensions for another attribute */
@@ -85,25 +85,25 @@ int attr_data1[ATTR1_DIM1]={512,-234,98123}; /* Test data for 1st attribute */
int attr_data1a[ATTR1_DIM1]={256,11945,-22107};
#define ATTR2_NAME "Attr2"
-#define ATTR2_RANK 2
-#define ATTR2_DIM1 2
-#define ATTR2_DIM2 2
+#define ATTR2_RANK 2
+#define ATTR2_DIM1 2
+#define ATTR2_DIM2 2
int attr_data2[ATTR2_DIM1][ATTR2_DIM2]={{7614,-416},{197814,-3}}; /* Test data for 2nd attribute */
#define ATTR3_NAME "Attr3"
-#define ATTR3_RANK 3
-#define ATTR3_DIM1 2
-#define ATTR3_DIM2 2
-#define ATTR3_DIM3 2
+#define ATTR3_RANK 3
+#define ATTR3_DIM1 2
+#define ATTR3_DIM2 2
+#define ATTR3_DIM3 2
double attr_data3[ATTR3_DIM1][ATTR3_DIM2][ATTR3_DIM3]={{{2.3F,-26.1F}, {0.123F,-10.0F}},{{973.23F,-0.91827F},{2.0F,23.0F}}}; /* Test data for 3rd attribute */
#define ATTR4_NAME "Attr4"
-#define ATTR4_RANK 2
-#define ATTR4_DIM1 2
-#define ATTR4_DIM2 2
-#define ATTR4_FIELDNAME1 "i"
-#define ATTR4_FIELDNAME2 "d"
-#define ATTR4_FIELDNAME3 "c"
+#define ATTR4_RANK 2
+#define ATTR4_DIM1 2
+#define ATTR4_DIM2 2
+#define ATTR4_FIELDNAME1 "i"
+#define ATTR4_FIELDNAME2 "d"
+#define ATTR4_FIELDNAME3 "c"
size_t attr4_field1_off=0;
size_t attr4_field2_off=0;
size_t attr4_field3_off=0;
@@ -115,13 +115,13 @@ struct attr4_struct {
{{-23,981724.2F,'Q'},{0,2.0F,'\n'}}}; /* Test data for 4th attribute */
#define ATTR5_NAME "Attr5"
-#define ATTR5_RANK 0
+#define ATTR5_RANK 0
float attr_data5=-5.123F; /* Test data for 5th attribute */
-#define ATTR6_RANK 3
-#define ATTR6_DIM1 100
-#define ATTR6_DIM2 100
-#define ATTR6_DIM3 100
+#define ATTR6_RANK 3
+#define ATTR6_DIM1 100
+#define ATTR6_DIM2 100
+#define ATTR6_DIM3 100
#define ATTR7_NAME "attr 1 - 000000"
#define ATTR8_NAME "attr 2"
@@ -163,20 +163,20 @@ static herr_t attr_op1(hid_t loc_id, const char *name, const H5A_info_t *ainfo,
static void
test_attr_basic_write(hid_t fapl)
{
- hid_t fid1; /* HDF5 File IDs */
- hid_t dataset; /* Dataset ID */
- hid_t group; /* Group ID */
- hid_t sid1,sid2; /* Dataspace ID */
- hid_t attr, attr2; /* Attribute ID */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t group; /* Group ID */
+ hid_t sid1,sid2; /* Dataspace ID */
+ hid_t attr, attr2; /* Attribute ID */
hsize_t attr_size; /* storage size for attribute */
ssize_t attr_name_size; /* size of attribute name */
char *attr_name=NULL; /* name of attribute */
- hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
- hsize_t dims2[] = {ATTR1_DIM1};
- hsize_t dims3[] = {ATTR2_DIM1,ATTR2_DIM2};
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {ATTR1_DIM1};
+ hsize_t dims3[] = {ATTR2_DIM1,ATTR2_DIM2};
int read_data1[ATTR1_DIM1]={0}; /* Buffer for reading 1st attribute */
int i;
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Basic Scalar Attribute Writing Functions\n"));
@@ -277,7 +277,7 @@ test_attr_basic_write(hid_t fapl)
if(attr_name_size > 0) {
attr_name = (char*)HDcalloc((size_t)(attr_name_size + 1), sizeof(char));
CHECK(attr_name, NULL, "HDcalloc");
-
+
if(attr_name) {
ret = (herr_t)H5Aget_name(attr, (size_t)(attr_name_size + 1), attr_name);
CHECK(ret, FAIL, "H5Aget_name");
@@ -313,7 +313,7 @@ test_attr_basic_write(hid_t fapl)
if(attr_name_size > 0) {
attr_name = (char*)HDcalloc((size_t)(attr_name_size+1), sizeof(char));
CHECK(attr_name, NULL, "HDcalloc");
-
+
if(attr_name) {
ret = (herr_t)H5Aget_name(attr2, (size_t)(attr_name_size + 1), attr_name);
CHECK(ret, FAIL, "H5Aget_name");
@@ -401,15 +401,15 @@ test_attr_basic_write(hid_t fapl)
static void
test_attr_basic_read(hid_t fapl)
{
- hid_t fid1; /* HDF5 File IDs */
- hid_t dataset; /* Dataset ID */
- hid_t group; /* Group ID */
- hid_t attr; /* Attribute ID */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t group; /* Group ID */
+ hid_t attr; /* Attribute ID */
H5O_info_t oinfo; /* Object info */
int read_data1[ATTR1_DIM1] = {0}; /* Buffer for reading 1st attribute */
int read_data2[ATTR2_DIM1][ATTR2_DIM2] = {{0}}; /* Buffer for reading 2nd attribute */
int i, j; /* Local index variables */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Basic Attribute Functions\n"));
@@ -499,7 +499,7 @@ test_attr_flush(hid_t fapl)
set; /* Dataset ID */
double wdata=3.14159F; /* Data to write */
double rdata; /* Data read in */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Attribute Flushing\n"));
@@ -559,15 +559,15 @@ test_attr_flush(hid_t fapl)
static void
test_attr_plist(hid_t fapl)
{
- hid_t fid1; /* HDF5 File IDs */
- hid_t dataset; /* Dataset ID */
- hid_t sid1,sid2; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1,sid2; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
hid_t plist; /* Property list ID */
- hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
- hsize_t dims2[] = {ATTR1_DIM1};
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {ATTR1_DIM1};
H5T_cset_t cset; /* Character set for attributes */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Attribute Property Lists\n"));
@@ -673,14 +673,14 @@ test_attr_plist(hid_t fapl)
static void
test_attr_compound_write(hid_t fapl)
{
- hid_t fid1; /* HDF5 File IDs */
- hid_t dataset; /* Dataset ID */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
hid_t tid1; /* Attribute datatype ID */
- hid_t sid1,sid2; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
- hsize_t dims2[] = {ATTR4_DIM1,ATTR4_DIM2};
- herr_t ret; /* Generic return value */
+ hid_t sid1,sid2; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {ATTR4_DIM1,ATTR4_DIM2};
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Multiple Attribute Functions\n"));
@@ -760,11 +760,11 @@ test_attr_compound_write(hid_t fapl)
static void
test_attr_compound_read(hid_t fapl)
{
- hid_t fid1; /* HDF5 File ID */
- hid_t dataset; /* Dataset ID */
+ hid_t fid1; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
hid_t space; /* Attribute dataspace */
hid_t type; /* Attribute datatype */
- hid_t attr; /* Attribute ID */
+ hid_t attr; /* Attribute ID */
char attr_name[ATTR_NAME_LEN]; /* Buffer for attribute names */
int rank; /* Attribute rank */
hsize_t dims[ATTR_MAX_DIMS]; /* Attribute dimensions */
@@ -779,7 +779,7 @@ test_attr_compound_read(hid_t fapl)
size_t name_len; /* Length of attribute name */
H5O_info_t oinfo; /* Object info */
int i, j; /* Local index variables */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Basic Attribute Functions\n"));
@@ -910,12 +910,12 @@ test_attr_compound_read(hid_t fapl)
static void
test_attr_scalar_write(hid_t fapl)
{
- hid_t fid1; /* HDF5 File IDs */
- hid_t dataset; /* Dataset ID */
- hid_t sid1,sid2; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
- herr_t ret; /* Generic return value */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1,sid2; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Basic Attribute Functions\n"));
@@ -975,14 +975,14 @@ test_attr_scalar_write(hid_t fapl)
static void
test_attr_scalar_read(hid_t fapl)
{
- hid_t fid1; /* HDF5 File IDs */
- hid_t dataset; /* Dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
H5S_class_t stype; /* Dataspace class */
float rdata = 0.0F; /* Buffer for reading 1st attribute */
H5O_info_t oinfo; /* Object info */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Basic Scalar Attribute Reading Functions\n"));
@@ -1010,8 +1010,8 @@ test_attr_scalar_read(hid_t fapl)
/* Verify the floating-poing value in this way to avoid compiler warning. */
if(!FLT_ABS_EQUAL(rdata, attr_data5))
- printf("*** UNEXPECTED VALUE from %s should be %f, but is %f at line %4d in %s\n",
- "H5Aread", attr_data5, rdata, (int)__LINE__, __FILE__);
+ printf("*** UNEXPECTED VALUE from %s should be %f, but is %f at line %4d in %s\n",
+ "H5Aread", attr_data5, rdata, (int)__LINE__, __FILE__);
/* Get the attribute's dataspace */
sid = H5Aget_space(attr);
@@ -1047,15 +1047,15 @@ test_attr_scalar_read(hid_t fapl)
static void
test_attr_mult_write(hid_t fapl)
{
- hid_t fid1; /* HDF5 File IDs */
- hid_t dataset; /* Dataset ID */
- hid_t sid1,sid2; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
- hsize_t dims2[] = {ATTR1_DIM1};
- hsize_t dims3[] = {ATTR2_DIM1,ATTR2_DIM2};
- hsize_t dims4[] = {ATTR3_DIM1,ATTR3_DIM2,ATTR3_DIM3};
- herr_t ret; /* Generic return value */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1,sid2; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {ATTR1_DIM1};
+ hsize_t dims3[] = {ATTR2_DIM1,ATTR2_DIM2};
+ hsize_t dims4[] = {ATTR3_DIM1,ATTR3_DIM2,ATTR3_DIM3};
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Multiple Attribute Functions\n"));
@@ -1166,11 +1166,11 @@ test_attr_mult_write(hid_t fapl)
static void
test_attr_mult_read(hid_t fapl)
{
- hid_t fid1; /* HDF5 File ID */
- hid_t dataset; /* Dataset ID */
+ hid_t fid1; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
hid_t space; /* Attribute dataspace */
hid_t type; /* Attribute datatype */
- hid_t attr; /* Attribute ID */
+ hid_t attr; /* Attribute ID */
char attr_name[ATTR_NAME_LEN]; /* Buffer for attribute names */
char temp_name[ATTR_NAME_LEN]; /* Buffer for mangling attribute names */
int rank; /* Attribute rank */
@@ -1184,7 +1184,7 @@ test_attr_mult_read(hid_t fapl)
size_t name_len; /* Length of attribute name */
H5O_info_t oinfo; /* Object info */
int i, j, k; /* Local index values */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Basic Attribute Functions\n"));
@@ -1428,12 +1428,12 @@ attr_op1(hid_t H5_ATTR_UNUSED loc_id, const char *name, const H5A_info_t H5_ATTR
static void
test_attr_iterate(hid_t fapl)
{
- hid_t file; /* HDF5 File ID */
- hid_t dataset; /* Dataset ID */
- hid_t sid; /* Dataspace ID */
+ hid_t file; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
int count; /* operator data for the iterator */
H5O_info_t oinfo; /* Object info */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Basic Attribute Functions\n"));
@@ -1500,13 +1500,13 @@ test_attr_iterate(hid_t fapl)
static void
test_attr_delete(hid_t fapl)
{
- hid_t fid1; /* HDF5 File ID */
- hid_t dataset; /* Dataset ID */
- hid_t attr; /* Attribute ID */
+ hid_t fid1; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t attr; /* Attribute ID */
char attr_name[ATTR_NAME_LEN]; /* Buffer for attribute names */
size_t name_len; /* Length of attribute name */
H5O_info_t oinfo; /* Object info */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Basic Attribute Functions\n"));
@@ -1631,7 +1631,7 @@ test_attr_dtype_shared(hid_t fapl)
H5O_info_t oinfo; /* Object's information */
h5_stat_size_t empty_filesize; /* Size of empty file */
h5_stat_size_t filesize; /* Size of file after modifications */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Shared Datatypes with Attributes\n"));
@@ -1798,17 +1798,17 @@ test_attr_dtype_shared(hid_t fapl)
static void
test_attr_duplicate_ids(hid_t fapl)
{
- hid_t fid1; /* HDF5 File IDs */
- hid_t dataset; /* Dataset ID */
- hid_t gid1, gid2; /* Group ID */
- hid_t sid1,sid2; /* Dataspace ID */
- hid_t attr, attr2; /* Attribute ID */
- hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
- hsize_t dims2[] = {ATTR1_DIM1};
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t gid1, gid2; /* Group ID */
+ hid_t sid1,sid2; /* Dataspace ID */
+ hid_t attr, attr2; /* Attribute ID */
+ hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
+ hsize_t dims2[] = {ATTR1_DIM1};
int read_data1[ATTR1_DIM1]={0}; /* Buffer for reading 1st attribute */
int rewrite_data[ATTR1_DIM1]={1234, -423, 9907256}; /* Test data for rewrite */
int i;
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing operations with two ID handles\n"));
@@ -2043,12 +2043,12 @@ test_attr_duplicate_ids(hid_t fapl)
static int
test_attr_dense_verify(hid_t loc_id, unsigned max_attr)
{
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
- hid_t attr; /* Attribute ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ hid_t attr; /* Attribute ID */
unsigned value; /* Attribute value */
unsigned u; /* Local index variable */
int old_nerrs; /* Number of errors when entering this check */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Retrieve the current # of reported errors */
old_nerrs = GetTestNumErrs();
@@ -2113,19 +2113,19 @@ test_attr_dense_verify(hid_t loc_id, unsigned max_attr)
static void
test_attr_dense_create(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dataset; /* Dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned max_compact; /* Maximum # of attributes to store compactly */
unsigned min_dense; /* Minimum # of attributes to store "densely" */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_dense; /* Are attributes stored densely? */
unsigned u; /* Local index variable */
h5_stat_size_t empty_filesize; /* Size of empty file */
h5_stat_size_t filesize; /* Size of file after modifications */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Dense Attribute Storage Creation\n"));
@@ -2244,19 +2244,19 @@ test_attr_dense_create(hid_t fcpl, hid_t fapl)
static void
test_attr_dense_open(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dataset; /* Dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned max_compact; /* Maximum # of attributes to store compactly */
unsigned min_dense; /* Minimum # of attributes to store "densely" */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_dense; /* Are attributes stored densely? */
unsigned u; /* Local index variable */
h5_stat_size_t empty_filesize; /* Size of empty file */
h5_stat_size_t filesize; /* Size of file after modifications */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Opening Attributes in Dense Storage\n"));
@@ -2383,20 +2383,20 @@ test_attr_dense_open(hid_t fcpl, hid_t fapl)
static void
test_attr_dense_delete(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dataset; /* Dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned max_compact; /* Maximum # of attributes to store compactly */
unsigned min_dense; /* Minimum # of attributes to store "densely" */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_dense; /* Are attributes stored densely? */
unsigned u; /* Local index variable */
h5_stat_size_t empty_filesize; /* Size of empty file */
h5_stat_size_t filesize; /* Size of file after modifications */
H5O_info_t oinfo; /* Object info */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Deleting Attributes in Dense Storage\n"));
@@ -2561,21 +2561,21 @@ test_attr_dense_delete(hid_t fcpl, hid_t fapl)
static void
test_attr_dense_rename(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dataset; /* Dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
- char new_attrname[NAME_BUF_SIZE]; /* New name of attribute */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ char new_attrname[NAME_BUF_SIZE]; /* New name of attribute */
unsigned max_compact; /* Maximum # of attributes to store compactly */
unsigned min_dense; /* Minimum # of attributes to store "densely" */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_dense; /* Are attributes stored densely? */
h5_stat_size_t empty_filesize; /* Size of empty file */
h5_stat_size_t filesize; /* Size of file after modifications */
H5O_info_t oinfo; /* Object info */
unsigned u; /* Local index variable */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Renaming Attributes in Dense Storage\n"));
@@ -2720,21 +2720,21 @@ test_attr_dense_rename(hid_t fcpl, hid_t fapl)
static void
test_attr_dense_unlink(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dataset; /* Dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned max_compact; /* Maximum # of attributes to store compactly */
unsigned min_dense; /* Minimum # of attributes to store "densely" */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_dense; /* Are attributes stored densely? */
size_t mesg_count; /* # of shared messages */
h5_stat_size_t empty_filesize; /* Size of empty file */
h5_stat_size_t filesize; /* Size of file after modifications */
H5O_info_t oinfo; /* Object info */
unsigned u; /* Local index variable */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Unlinking Object with Attributes in Dense Storage\n"));
@@ -2850,19 +2850,19 @@ test_attr_dense_unlink(hid_t fcpl, hid_t fapl)
static void
test_attr_dense_limits(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dataset; /* Dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned max_compact, rmax_compact; /* Maximum # of attributes to store compactly */
unsigned min_dense, rmin_dense; /* Minimum # of attributes to store "densely" */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_dense; /* Are attributes stored densely? */
unsigned u; /* Local index variable */
h5_stat_size_t empty_filesize; /* Size of empty file */
h5_stat_size_t filesize; /* Size of file after modifications */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Phase Change Limits For Attributes in Dense Storage\n"));
@@ -3015,23 +3015,23 @@ test_attr_dense_limits(hid_t fcpl, hid_t fapl)
static void
test_attr_dense_dup_ids(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dataset; /* Dataset ID */
- hid_t gid1, gid2; /* Group ID */
- hid_t sid, sid2; /* Dataspace ID */
- hid_t attr, attr2, add_attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
- hsize_t dims[] = {ATTR1_DIM1};
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t gid1, gid2; /* Group ID */
+ hid_t sid, sid2; /* Dataspace ID */
+ hid_t attr, attr2, add_attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ hsize_t dims[] = {ATTR1_DIM1};
int read_data1[ATTR1_DIM1]={0}; /* Buffer for reading attribute */
int rewrite_data[ATTR1_DIM1]={1234, -423, 9907256}; /* Test data for rewrite */
unsigned scalar_data = 1317; /* scalar data for attribute */
unsigned read_scalar; /* variable for reading attribute*/
unsigned max_compact; /* Maximum # of attributes to store compactly */
unsigned min_dense; /* Minimum # of attributes to store "densely" */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_dense; /* Are attributes stored densely? */
unsigned u, i; /* Local index variable */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing operations with two IDs for Dense Storage\n"));
@@ -3538,24 +3538,24 @@ test_attr_dense_dup_ids(hid_t fcpl, hid_t fapl)
static void
test_attr_big(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dataset; /* Dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t big_sid; /* "Big" dataspace ID */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t big_sid; /* "Big" dataspace ID */
hsize_t dims[ATTR6_RANK] = {ATTR6_DIM1, ATTR6_DIM2, ATTR6_DIM3}; /* Attribute dimensions */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned max_compact; /* Maximum # of attributes to store compactly */
unsigned min_dense; /* Minimum # of attributes to store "densely" */
unsigned nshared_indices; /* # of shared message indices */
H5F_libver_t low, high; /* File format bounds */
- htri_t is_empty; /* Are there any attributes? */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
unsigned u; /* Local index variable */
h5_stat_size_t empty_filesize; /* Size of empty file */
h5_stat_size_t filesize; /* Size of file after modifications */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Storing 'Big' Attributes in Dense Storage\n"));
@@ -3808,20 +3808,20 @@ test_attr_big(hid_t fcpl, hid_t fapl)
static void
test_attr_null_space(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dataset; /* Dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t null_sid; /* "null" dataspace ID */
- hid_t attr_sid; /* Attribute's dataspace ID */
- hid_t attr; /* Attribute ID */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t null_sid; /* "null" dataspace ID */
+ hid_t attr_sid; /* Attribute's dataspace ID */
+ hid_t attr; /* Attribute ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned value; /* Attribute value */
htri_t cmp; /* Results of comparison */
hsize_t storage_size; /* Size of storage for attribute */
H5A_info_t ainfo; /* Attribute info */
h5_stat_size_t empty_filesize; /* Size of empty file */
h5_stat_size_t filesize; /* Size of file after modifications */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Storing Attributes with 'null' dataspace\n"));
@@ -4027,11 +4027,11 @@ static void
test_attr_deprec(hid_t fcpl, hid_t fapl)
{
#ifndef H5_NO_DEPRECATED_SYMBOLS
- hid_t fid; /* HDF5 File ID */
- hid_t dataset; /* Dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- herr_t ret; /* Generic return value */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Deprecated Attribute Routines\n"));
@@ -4135,15 +4135,15 @@ test_attr_deprec(hid_t fcpl, hid_t fapl)
static void
test_attr_many(hbool_t new_format, hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t gid; /* Group ID */
- hid_t sid; /* Dataspace ID */
- hid_t aid; /* Attribute ID */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ hid_t fid; /* HDF5 File ID */
+ hid_t gid; /* Group ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t aid; /* Attribute ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned nattr = (new_format ? NATTR_MANY_NEW : NATTR_MANY_OLD); /* Number of attributes */
htri_t exists; /* Whether the attribute exists or not */
unsigned u; /* Local index variable */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Storing Many Attributes\n"));
@@ -4264,14 +4264,14 @@ test_attr_many(hbool_t new_format, hid_t fcpl, hid_t fapl)
static void
test_attr_corder_create_basic(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dataset; /* Dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t dcpl; /* Dataset creation property list ID */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t dcpl; /* Dataset creation property list ID */
unsigned crt_order_flags;/* Creation order flags */
- htri_t is_empty; /* Are there any attributes? */
- htri_t is_dense; /* Are attributes stored densely? */
- herr_t ret; /* Generic return value */
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Basic Code for Attributes with Creation Order Info\n"));
@@ -4380,21 +4380,21 @@ test_attr_corder_create_basic(hid_t fcpl, hid_t fapl)
static void
test_attr_corder_create_compact(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dset1, dset2, dset3; /* Dataset IDs */
- hid_t my_dataset; /* Current dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
unsigned max_compact; /* Maximum # of links to store in group compactly */
unsigned min_dense; /* Minimum # of links to store in group "densely" */
- htri_t is_empty; /* Are there any attributes? */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
hsize_t nattrs; /* Number of attributes on object */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned curr_dset; /* Current dataset to work on */
unsigned u; /* Local index variable */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Compact Storage of Attributes with Creation Order Info\n"));
@@ -4578,23 +4578,23 @@ test_attr_corder_create_compact(hid_t fcpl, hid_t fapl)
static void
test_attr_corder_create_dense(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dset1, dset2, dset3; /* Dataset IDs */
- hid_t my_dataset; /* Current dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
unsigned max_compact; /* Maximum # of links to store in group compactly */
unsigned min_dense; /* Minimum # of links to store in group "densely" */
- htri_t is_empty; /* Are there any attributes? */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
hsize_t nattrs; /* Number of attributes on object */
hsize_t name_count; /* # of records in name index */
hsize_t corder_count; /* # of records in creation order index */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned curr_dset; /* Current dataset to work on */
unsigned u; /* Local index variable */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Dense Storage of Attributes with Creation Order Info\n"));
@@ -4812,7 +4812,7 @@ test_attr_corder_create_reopen(hid_t fcpl, hid_t fapl)
hid_t sid = -1; /* Dataspace ID */
hid_t aid = -1; /* Attribute ID */
int buf; /* Attribute data */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Creating Attributes w/New & Old Format\n"));
@@ -4913,23 +4913,23 @@ test_attr_corder_create_reopen(hid_t fcpl, hid_t fapl)
static void
test_attr_corder_transition(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dset1, dset2, dset3; /* Dataset IDs */
- hid_t my_dataset; /* Current dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
unsigned max_compact; /* Maximum # of links to store in group compactly */
unsigned min_dense; /* Minimum # of links to store in group "densely" */
- htri_t is_empty; /* Are there any attributes? */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
hsize_t nattrs; /* Number of attributes on object */
hsize_t name_count; /* # of records in name index */
hsize_t corder_count; /* # of records in creation order index */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned curr_dset; /* Current dataset to work on */
unsigned u; /* Local index variable */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Storage Transitions of Attributes with Creation Order Info\n"));
@@ -5319,28 +5319,28 @@ test_attr_corder_transition(hid_t fcpl, hid_t fapl)
static void
test_attr_corder_delete(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dset1, dset2, dset3; /* Dataset IDs */
- hid_t my_dataset; /* Current dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
unsigned max_compact; /* Maximum # of links to store in group compactly */
unsigned min_dense; /* Minimum # of links to store in group "densely" */
- htri_t is_empty; /* Are there any attributes? */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
hsize_t nattrs; /* Number of attributes on object */
hsize_t name_count; /* # of records in name index */
hsize_t corder_count; /* # of records in creation order index */
hbool_t reopen_file; /* Whether to re-open the file before deleting group */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
#ifdef LATER
h5_stat_size_t empty_size; /* Size of empty file */
h5_stat_size_t file_size; /* Size of file after operating on it */
#endif /* LATER */
unsigned curr_dset; /* Current dataset to work on */
unsigned u; /* Local index variable */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Deleting Object w/Dense Attribute Storage and Creation Order Info\n"));
@@ -5517,7 +5517,7 @@ test_attr_corder_delete(hid_t fcpl, hid_t fapl)
* Purpose: Support routine for attr_info_by_idx, to verify the attribute
* info is correct for a attribute
*
- * Note: This routine assumes that the attributes have been added to the
+ * Note: This routine assumes that the attributes have been added to the
* object in alphabetical order.
*
* Return: Success: 0
@@ -5535,7 +5535,7 @@ attr_info_by_idx_check(hid_t obj_id, const char *attrname, hsize_t n,
char tmpname[NAME_BUF_SIZE]; /* Temporary attribute name */
H5A_info_t ainfo; /* Attribute info struct */
int old_nerrs; /* Number of errors when entering this check */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Retrieve the current # of reported errors */
old_nerrs = GetTestNumErrs();
@@ -5667,26 +5667,26 @@ attr_info_by_idx_check(hid_t obj_id, const char *attrname, hsize_t n,
static void
test_attr_info_by_idx(hbool_t new_format, hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dset1, dset2, dset3; /* Dataset IDs */
- hid_t my_dataset; /* Current dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
H5A_info_t ainfo; /* Attribute information */
unsigned max_compact; /* Maximum # of links to store in group compactly */
unsigned min_dense; /* Minimum # of links to store in group "densely" */
- htri_t is_empty; /* Are there any attributes? */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
hsize_t nattrs; /* Number of attributes on object */
hsize_t name_count; /* # of records in name index */
hsize_t corder_count; /* # of records in creation order index */
hbool_t use_index; /* Use index on creation order values */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
char tmpname[NAME_BUF_SIZE]; /* Temporary attribute name */
unsigned curr_dset; /* Current dataset to work on */
unsigned u; /* Local index variable */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Create dataspace for dataset & attributes */
sid = H5Screate(H5S_SCALAR);
@@ -5877,28 +5877,28 @@ test_attr_info_by_idx(hbool_t new_format, hid_t fcpl, hid_t fapl)
static void
test_attr_delete_by_idx(hbool_t new_format, hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dset1, dset2, dset3; /* Dataset IDs */
- hid_t my_dataset; /* Current dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
H5A_info_t ainfo; /* Attribute information */
unsigned max_compact; /* Maximum # of links to store in group compactly */
unsigned min_dense; /* Minimum # of links to store in group "densely" */
- htri_t is_empty; /* Are there any attributes? */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
hsize_t nattrs; /* Number of attributes on object */
hsize_t name_count; /* # of records in name index */
hsize_t corder_count; /* # of records in creation order index */
H5_index_t idx_type; /* Type of index to operate on */
H5_iter_order_t order; /* Order within in the index */
hbool_t use_index; /* Use index on creation order values */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
char tmpname[NAME_BUF_SIZE]; /* Temporary attribute name */
unsigned curr_dset; /* Current dataset to work on */
unsigned u; /* Local index variable */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Create dataspace for dataset & attributes */
sid = H5Screate(H5S_SCALAR);
@@ -6818,16 +6818,16 @@ attr_iterate_check(hid_t fid, const char *dsetname, hid_t obj_id,
static void
test_attr_iterate2(hbool_t new_format, hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dset1, dset2, dset3; /* Dataset IDs */
- hid_t my_dataset; /* Current dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
unsigned max_compact; /* Maximum # of links to store in group compactly */
unsigned min_dense; /* Minimum # of links to store in group "densely" */
- htri_t is_empty; /* Are there any attributes? */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
hsize_t nattrs; /* Number of attributes on object */
hsize_t name_count; /* # of records in name index */
hsize_t corder_count; /* # of records in creation order index */
@@ -6838,10 +6838,10 @@ test_attr_iterate2(hbool_t new_format, hid_t fcpl, hid_t fapl)
hsize_t idx; /* Start index for iteration */
hbool_t use_index; /* Use index on creation order values */
const char *dsetname; /* Name of dataset for attributes */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned curr_dset; /* Current dataset to work on */
unsigned u; /* Local index variable */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Create dataspace for dataset & attributes */
sid = H5Screate(H5S_SCALAR);
@@ -7134,7 +7134,7 @@ attr_open_by_idx_check(hid_t obj_id, H5_index_t idx_type, H5_iter_order_t order,
H5A_info_t ainfo; /* Attribute info */
int old_nerrs; /* Number of errors when entering this check */
unsigned u; /* Local index variable */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Retrieve the current # of reported errors */
old_nerrs = GetTestNumErrs();
@@ -7182,26 +7182,26 @@ attr_open_by_idx_check(hid_t obj_id, H5_index_t idx_type, H5_iter_order_t order,
static void
test_attr_open_by_idx(hbool_t new_format, hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dset1, dset2, dset3; /* Dataset IDs */
- hid_t my_dataset; /* Current dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
unsigned max_compact; /* Maximum # of links to store in group compactly */
unsigned min_dense; /* Minimum # of links to store in group "densely" */
- htri_t is_empty; /* Are there any attributes? */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
hsize_t nattrs; /* Number of attributes on object */
hsize_t name_count; /* # of records in name index */
hsize_t corder_count; /* # of records in creation order index */
H5_index_t idx_type; /* Type of index to operate on */
H5_iter_order_t order; /* Order within in the index */
hbool_t use_index; /* Use index on creation order values */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned curr_dset; /* Current dataset to work on */
unsigned u; /* Local index variable */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Create dataspace for dataset & attributes */
sid = H5Screate(H5S_SCALAR);
@@ -7453,10 +7453,10 @@ attr_open_check(hid_t fid, const char *dsetname, hid_t obj_id,
{
hid_t attr_id; /* ID of attribute to test */
H5A_info_t ainfo; /* Attribute info */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
int old_nerrs; /* Number of errors when entering this check */
unsigned u; /* Local index variable */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Retrieve the current # of reported errors */
old_nerrs = GetTestNumErrs();
@@ -7529,25 +7529,25 @@ attr_open_check(hid_t fid, const char *dsetname, hid_t obj_id,
static void
test_attr_open_by_name(hbool_t new_format, hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dset1, dset2, dset3; /* Dataset IDs */
- hid_t my_dataset; /* Current dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
unsigned max_compact; /* Maximum # of links to store in group compactly */
unsigned min_dense; /* Minimum # of links to store in group "densely" */
- htri_t is_empty; /* Are there any attributes? */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
hsize_t nattrs; /* Number of attributes on object */
hsize_t name_count; /* # of records in name index */
hsize_t corder_count; /* # of records in creation order index */
hbool_t use_index; /* Use index on creation order values */
const char *dsetname; /* Name of dataset for attributes */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned curr_dset; /* Current dataset to work on */
unsigned u; /* Local index variable */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Create dataspace for dataset & attributes */
sid = H5Screate(H5S_SCALAR);
@@ -7783,25 +7783,25 @@ test_attr_open_by_name(hbool_t new_format, hid_t fcpl, hid_t fapl)
static void
test_attr_create_by_name(hbool_t new_format, hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t dset1, dset2, dset3; /* Dataset IDs */
- hid_t my_dataset; /* Current dataset ID */
- hid_t sid; /* Dataspace ID */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
+ hid_t fid; /* HDF5 File ID */
+ hid_t dset1, dset2, dset3; /* Dataset IDs */
+ hid_t my_dataset; /* Current dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
unsigned max_compact; /* Maximum # of links to store in group compactly */
unsigned min_dense; /* Minimum # of links to store in group "densely" */
- htri_t is_empty; /* Are there any attributes? */
- htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_empty; /* Are there any attributes? */
+ htri_t is_dense; /* Are attributes stored densely? */
hsize_t nattrs; /* Number of attributes on object */
hsize_t name_count; /* # of records in name index */
hsize_t corder_count; /* # of records in creation order index */
hbool_t use_index; /* Use index on creation order values */
const char *dsetname; /* Name of dataset for attributes */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned curr_dset; /* Current dataset to work on */
unsigned u; /* Local index variable */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Create dataspace for dataset & attributes */
sid = H5Screate(H5S_SCALAR);
@@ -8006,28 +8006,28 @@ test_attr_create_by_name(hbool_t new_format, hid_t fcpl, hid_t fapl)
static void
test_attr_shared_write(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* File ID */
- hid_t my_fcpl; /* File creation property list ID */
- hid_t dataset, dataset2; /* Dataset IDs */
- hid_t attr_tid; /* Attribute's datatype ID */
- hid_t sid, big_sid; /* Dataspace IDs */
- hsize_t big_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3}; /* Dimensions for "big" attribute */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute */
+ hid_t fid; /* File ID */
+ hid_t my_fcpl; /* File creation property list ID */
+ hid_t dataset, dataset2; /* Dataset IDs */
+ hid_t attr_tid; /* Attribute's datatype ID */
+ hid_t sid, big_sid; /* Dataspace IDs */
+ hsize_t big_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3}; /* Dimensions for "big" attribute */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute */
unsigned max_compact; /* Maximum # of attributes to store compactly */
unsigned min_dense; /* Minimum # of attributes to store "densely" */
- htri_t is_dense; /* Are attributes stored densely? */
- htri_t is_shared; /* Is attributes shared? */
+ htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_shared; /* Is attributes shared? */
hsize_t shared_refcount; /* Reference count of shared attribute */
unsigned attr_value; /* Attribute value */
- unsigned big_value[SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3]; /* Data for "big" attribute */
+ unsigned big_value[SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3]; /* Data for "big" attribute */
size_t mesg_count; /* # of shared messages */
unsigned test_shared; /* Index over shared component type */
unsigned u; /* Local index variable */
h5_stat_size_t empty_filesize; /* Size of empty file */
h5_stat_size_t filesize; /* Size of file after modifications */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Writing Shared & Unshared Attributes in Compact & Dense Storage\n"));
@@ -8331,29 +8331,29 @@ test_attr_shared_write(hid_t fcpl, hid_t fapl)
static void
test_attr_shared_rename(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* HDF5 File ID */
- hid_t my_fcpl; /* File creation property list ID */
- hid_t dataset, dataset2; /* Dataset ID2 */
- hid_t attr_tid; /* Attribute's datatype ID */
- hid_t sid, big_sid; /* Dataspace IDs */
- hsize_t big_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3}; /* Dimensions for "big" attribute */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute on first dataset */
- char attrname2[NAME_BUF_SIZE]; /* Name of attribute on second dataset */
+ hid_t fid; /* HDF5 File ID */
+ hid_t my_fcpl; /* File creation property list ID */
+ hid_t dataset, dataset2; /* Dataset ID2 */
+ hid_t attr_tid; /* Attribute's datatype ID */
+ hid_t sid, big_sid; /* Dataspace IDs */
+ hsize_t big_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3}; /* Dimensions for "big" attribute */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute on first dataset */
+ char attrname2[NAME_BUF_SIZE]; /* Name of attribute on second dataset */
unsigned max_compact; /* Maximum # of attributes to store compactly */
unsigned min_dense; /* Minimum # of attributes to store "densely" */
- htri_t is_dense; /* Are attributes stored densely? */
- htri_t is_shared; /* Is attributes shared? */
+ htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_shared; /* Is attributes shared? */
hsize_t shared_refcount; /* Reference count of shared attribute */
unsigned attr_value; /* Attribute value */
- unsigned big_value[SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3]; /* Data for "big" attribute */
+ unsigned big_value[SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3]; /* Data for "big" attribute */
size_t mesg_count; /* # of shared messages */
unsigned test_shared; /* Index over shared component type */
unsigned u; /* Local index variable */
h5_stat_size_t empty_filesize; /* Size of empty file */
h5_stat_size_t filesize; /* Size of file after modifications */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Renaming Shared & Unshared Attributes in Compact & Dense Storage\n"));
@@ -8773,28 +8773,28 @@ test_attr_shared_rename(hid_t fcpl, hid_t fapl)
static void
test_attr_shared_delete(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* File ID */
- hid_t my_fcpl; /* File creation property list ID */
- hid_t dataset, dataset2; /* Dataset IDs */
- hid_t attr_tid; /* Attribute's datatype ID */
- hid_t sid, big_sid; /* Dataspace IDs */
- hsize_t big_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3}; /* Dimensions for "big" attribute */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute on first dataset */
+ hid_t fid; /* File ID */
+ hid_t my_fcpl; /* File creation property list ID */
+ hid_t dataset, dataset2; /* Dataset IDs */
+ hid_t attr_tid; /* Attribute's datatype ID */
+ hid_t sid, big_sid; /* Dataspace IDs */
+ hsize_t big_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3}; /* Dimensions for "big" attribute */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute on first dataset */
unsigned max_compact; /* Maximum # of attributes to store compactly */
unsigned min_dense; /* Minimum # of attributes to store "densely" */
- htri_t is_dense; /* Are attributes stored densely? */
- htri_t is_shared; /* Is attributes shared? */
+ htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_shared; /* Is attributes shared? */
hsize_t shared_refcount; /* Reference count of shared attribute */
unsigned attr_value; /* Attribute value */
- unsigned big_value[SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3]; /* Data for "big" attribute */
+ unsigned big_value[SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3]; /* Data for "big" attribute */
size_t mesg_count; /* # of shared messages */
unsigned test_shared; /* Index over shared component type */
unsigned u; /* Local index variable */
h5_stat_size_t empty_filesize; /* Size of empty file */
h5_stat_size_t filesize; /* Size of file after modifications */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Deleting Shared & Unshared Attributes in Compact & Dense Storage\n"));
@@ -9137,28 +9137,28 @@ test_attr_shared_delete(hid_t fcpl, hid_t fapl)
static void
test_attr_shared_unlink(hid_t fcpl, hid_t fapl)
{
- hid_t fid; /* File ID */
- hid_t my_fcpl; /* File creation property list ID */
- hid_t dataset, dataset2; /* Dataset IDs */
- hid_t attr_tid; /* Attribute's datatype ID */
- hid_t sid, big_sid; /* Dataspace IDs */
- hsize_t big_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3}; /* Dimensions for "big" attribute */
- hid_t attr; /* Attribute ID */
- hid_t dcpl; /* Dataset creation property list ID */
- char attrname[NAME_BUF_SIZE]; /* Name of attribute on first dataset */
+ hid_t fid; /* File ID */
+ hid_t my_fcpl; /* File creation property list ID */
+ hid_t dataset, dataset2; /* Dataset IDs */
+ hid_t attr_tid; /* Attribute's datatype ID */
+ hid_t sid, big_sid; /* Dataspace IDs */
+ hsize_t big_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3}; /* Dimensions for "big" attribute */
+ hid_t attr; /* Attribute ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ char attrname[NAME_BUF_SIZE]; /* Name of attribute on first dataset */
unsigned max_compact; /* Maximum # of attributes to store compactly */
unsigned min_dense; /* Minimum # of attributes to store "densely" */
- htri_t is_dense; /* Are attributes stored densely? */
- htri_t is_shared; /* Is attributes shared? */
+ htri_t is_dense; /* Are attributes stored densely? */
+ htri_t is_shared; /* Is attributes shared? */
hsize_t shared_refcount; /* Reference count of shared attribute */
unsigned attr_value; /* Attribute value */
- unsigned big_value[SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3]; /* Data for "big" attribute */
+ unsigned big_value[SPACE1_DIM1 * SPACE1_DIM2 * SPACE1_DIM3]; /* Data for "big" attribute */
size_t mesg_count; /* # of shared messages */
unsigned test_shared; /* Index over shared component type */
unsigned u; /* Local index variable */
h5_stat_size_t empty_filesize; /* Size of empty file */
h5_stat_size_t filesize; /* Size of file after modifications */
- herr_t ret; /* Generic return value */
+ herr_t ret; /* Generic return value */
/* Output message about test being performed */
MESSAGE(5, ("Testing Unlinking Object with Shared Attributes in Compact & Dense Storage\n"));
@@ -10613,7 +10613,7 @@ test_attr_bug9(hid_t fcpl, hid_t fapl)
/* Create enough attributes to cause a change to dense storage */
for(i = 0; i < max_compact + 1; i++) {
/* Create attribute */
- snprintf(aname, sizeof(aname), "%u", i);
+ HDsnprintf(aname, sizeof(aname), "%u", i);
aid = H5Acreate2(gid, aname, H5T_NATIVE_CHAR, sid, H5P_DEFAULT, H5P_DEFAULT);
CHECK(aid, FAIL, "H5Acreate2");
@@ -10654,8 +10654,8 @@ test_attr_bug9(hid_t fcpl, hid_t fapl)
void
test_attr(void)
{
- hid_t fapl = (-1), fapl2 = (-1); /* File access property lists */
- hid_t fcpl = (-1), fcpl2 = (-1); /* File creation property lists */
+ hid_t fapl = (-1), fapl2 = (-1); /* File access property lists */
+ hid_t fcpl = (-1), fcpl2 = (-1); /* File creation property lists */
hbool_t new_format; /* Whether to use the new format or not */
hbool_t use_shared; /* Whether to use shared attributes or not */
herr_t ret; /* Generic return value */
@@ -10842,13 +10842,13 @@ test_attr(void)
/*-------------------------------------------------------------------------
- * Function: cleanup_attr
+ * Function: cleanup_attr
*
- * Purpose: Cleanup temporary test files
+ * Purpose: Cleanup temporary test files
*
- * Return: none
+ * Return: none
*
- * Programmer: Albert Cheng
+ * Programmer: Albert Cheng
* July 2, 1998
*
* Modifications: