From 2a147be72f29bbe5077bdb7b21b74fc5d24f1b44 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Sat, 11 Jun 2022 23:06:49 -0500 Subject: Hdf5 1 12 sync (#1794) --- .github/workflows/pr-check.yml | 11 +- CMakeInstallation.cmake | 8 +- CMakeLists.txt | 3 + Makefile.am | 4 +- config/cmake/HDF5_Examples.cmake.in | 2 +- config/cmake/HDFCompilerFlags.cmake | 1 + config/cmake/libh5cc.in | 2 +- config/cmake_ext_mod/ConfigureChecks.cmake | 4 +- config/cmake_ext_mod/HDFMacros.cmake | 8 +- config/gnu-flags | 1 + config/gnu-warnings/7 | 1 - config/gnu-warnings/error-7 | 1 + config/ibm-aix | 8 +- configure.ac | 6 +- doxygen/CMakeLists.txt | 2 +- doxygen/Doxyfile.in | 2 +- doxygen/dox/ReferenceManual.dox | 2 +- examples/testh5cc.sh.in | 2 +- fortran/src/h5fc.in | 2 +- hl/test/test_h5do_compat.c | 20 +- hl/test/test_ld.c | 282 +++++++++++----------- java/examples/datasets/JavaDatasetExample.sh.in | 16 +- java/examples/datatypes/JavaDatatypeExample.sh.in | 16 +- java/examples/groups/JavaGroupExample.sh.in | 16 +- java/examples/intro/JavaIntroExample.sh.in | 16 +- java/src/hdf/hdf5lib/HDF5Constants.java | 4 + java/test/junit.sh.in | 13 +- release_docs/README_HDF5_CMake | 22 +- release_docs/RELEASE.txt | 46 +++- test/CMakeTests.cmake | 32 +-- test/h5test.c | 2 +- utils/mirror_vfd/mirror_server_stop.c | 2 +- 32 files changed, 310 insertions(+), 247 deletions(-) create mode 100644 config/gnu-warnings/error-7 diff --git a/.github/workflows/pr-check.yml b/.github/workflows/pr-check.yml index 65bf42b..5dd923f 100644 --- a/.github/workflows/pr-check.yml +++ b/.github/workflows/pr-check.yml @@ -1,10 +1,17 @@ -name: hdf5 dev CI +name: PR hdf5 dev CI # Controls when the action will run. Triggers the workflow on push or pull request on: pull_request: branches: [ develop, hdf5_1_12, hdf5_1_10, hdf5_1_8 ] - + paths-ignore: + - '.github/**' + - 'doc/**' + - 'release_docs/**' + - 'ACKNOWLEDGEMENTS' + - 'COPYING**' + - '**.md' + # A workflow run is made up of one or more jobs that can run sequentially or in parallel jobs: # This workflow contains a single job called "build" diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index cd1f986..1fe516c 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -444,11 +444,11 @@ The HDF5 data model, file format, API, library, and tools are open and distribut if (HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "GIT" OR HDF5_ALLOW_EXTERNAL_SUPPORT MATCHES "TGZ") if (ZLIB_FOUND AND ZLIB_USE_EXTERNAL) if (WIN32) - set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;ALL;/") + set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};HDF5_ZLIB;ALL;/") else () - set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;libraries;/") - set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;headers;/") - set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};ZLIB;configinstall;/") + set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};HDF5_ZLIB;libraries;/") + set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};HDF5_ZLIB;headers;/") + set (CPACK_INSTALL_CMAKE_PROJECTS "${CPACK_INSTALL_CMAKE_PROJECTS};${ZLIB_INCLUDE_DIR_GEN};HDF5_ZLIB;configinstall;/") endif () endif () if (SZIP_FOUND AND SZIP_USE_EXTERNAL) diff --git a/CMakeLists.txt b/CMakeLists.txt index 18b7b87..6ef148e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -2,10 +2,13 @@ cmake_minimum_required (VERSION 3.12) project (HDF5 C) if (POLICY CMP0074) + # find_package() uses _ROOT variables. cmake_policy (SET CMP0074 NEW) endif () if (POLICY CMP0083) + # To control generation of Position Independent Executable (PIE) or not, + # some flags are required at link time. cmake_policy (SET CMP0083 NEW) endif () diff --git a/Makefile.am b/Makefile.am index 8518114..2a544f4 100644 --- a/Makefile.am +++ b/Makefile.am @@ -88,9 +88,9 @@ else TOOLS_DIR= endif -SUBDIRS = src $(TESTSERIAL_DIR) $(TESTPARALLEL_DIR) bin utils $(TOOLS_DIR) . \ +SUBDIRS = src $(TESTSERIAL_DIR) $(TESTPARALLEL_DIR) bin $(TOOLS_DIR) utils . \ $(CXX_DIR) $(FORTRAN_DIR) $(JAVA_DIR) $(HDF5_HL_DIR) -DIST_SUBDIRS = src test testpar utils tools . c++ fortran hl examples java +DIST_SUBDIRS = src test testpar tools utils . c++ fortran hl examples java # Some files generated during configure that should be cleaned DISTCLEANFILES=config/stamp1 config/stamp2 diff --git a/config/cmake/HDF5_Examples.cmake.in b/config/cmake/HDF5_Examples.cmake.in index 40bc204..4e55786 100644 --- a/config/cmake/HDF5_Examples.cmake.in +++ b/config/cmake/HDF5_Examples.cmake.in @@ -77,7 +77,7 @@ set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DSITE:STRING=${CTEST_SITE} -DBUILDN #TAR_SOURCE - name of tarfile #if(NOT DEFINED TAR_SOURCE) -# set(CTEST_USE_TAR_SOURCE "HDF5Examples-1.12.9-Source") +# set(CTEST_USE_TAR_SOURCE "HDF5Examples-1.12.10-Source") #endif() ############################################################################################################### diff --git a/config/cmake/HDFCompilerFlags.cmake b/config/cmake/HDFCompilerFlags.cmake index eb43b73..61218dc 100644 --- a/config/cmake/HDFCompilerFlags.cmake +++ b/config/cmake/HDFCompilerFlags.cmake @@ -231,6 +231,7 @@ if (CMAKE_C_COMPILER_ID STREQUAL "GNU") # Append more extra warning flags that only gcc 7.x+ knows about if (NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 7.0) ADD_H5_FLAGS (H5_CFLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/7") + ADD_H5_FLAGS (H5_CFLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/error-7") if (HDF5_ENABLE_DEV_WARNINGS) ADD_H5_FLAGS (H5_CFLAGS "${HDF5_SOURCE_DIR}/config/gnu-warnings/developer-7") #else () diff --git a/config/cmake/libh5cc.in b/config/cmake/libh5cc.in index f5d8d4c..6d54088 100644 --- a/config/cmake/libh5cc.in +++ b/config/cmake/libh5cc.in @@ -29,7 +29,7 @@ printf 'dir is %s\n' "$dir" # Show the configuration summary of the library recorded in the -# libhdf5.settings file reside in the lib directory. +# libhdf5.settings file residing in the lib directory. showconfigure() { cat $dir/lib/libhdf5.settings diff --git a/config/cmake_ext_mod/ConfigureChecks.cmake b/config/cmake_ext_mod/ConfigureChecks.cmake index 7b2c4eb..41f53e1 100644 --- a/config/cmake_ext_mod/ConfigureChecks.cmake +++ b/config/cmake_ext_mod/ConfigureChecks.cmake @@ -293,10 +293,10 @@ if (MINGW OR NOT WINDOWS) else () set (TEST_LFS_WORKS "" CACHE INTERNAL ${msg}) if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "${msg}... no") + message (VERBOSE "${msg}... no") endif () file (APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log - "Test TEST_LFS_WORKS Run failed with the following exit code:\n ${TEST_LFS_WORKS_RUN}\n" + "Test TEST_LFS_WORKS Run failed with the following exit code:\n ${TEST_LFS_WORKS_RUN}\n" ) endif () else () diff --git a/config/cmake_ext_mod/HDFMacros.cmake b/config/cmake_ext_mod/HDFMacros.cmake index 0f9b367..260fd55 100644 --- a/config/cmake_ext_mod/HDFMacros.cmake +++ b/config/cmake_ext_mod/HDFMacros.cmake @@ -267,10 +267,14 @@ macro (HDF_README_PROPERTIES target_fortran) elseif (${CMAKE_C_COMPILER_VERSION} MATCHES "^19.*") if (${CMAKE_C_COMPILER_VERSION} MATCHES "^19.0.*") set (BINARY_PLATFORM "${BINARY_PLATFORM}, using VISUAL STUDIO 2015") - elseif (${CMAKE_C_COMPILER_VERSION} MATCHES "^19.16.*") + elseif (${CMAKE_C_COMPILER_VERSION} MATCHES "^19.1.*") set (BINARY_PLATFORM "${BINARY_PLATFORM}, using VISUAL STUDIO 2017") - else () #19.23 + elseif (${CMAKE_C_COMPILER_VERSION} MATCHES "^19.2.*") set (BINARY_PLATFORM "${BINARY_PLATFORM}, using VISUAL STUDIO 2019") + elseif (${CMAKE_C_COMPILER_VERSION} MATCHES "^19.3.*") + set (BINARY_PLATFORM "${BINARY_PLATFORM}, using VISUAL STUDIO 2022") + else () + set (BINARY_PLATFORM "${BINARY_PLATFORM}, using VISUAL STUDIO ???") endif () else () set (BINARY_PLATFORM "${BINARY_PLATFORM}, using VISUAL STUDIO ${CMAKE_C_COMPILER_VERSION}") diff --git a/config/gnu-flags b/config/gnu-flags index fe6626d..e25f59b 100644 --- a/config/gnu-flags +++ b/config/gnu-flags @@ -254,6 +254,7 @@ if test "X-gcc" = "X-$cc_vendor"; then # gcc >= 7 if test $cc_vers_major -ge 7; then H5_CFLAGS="$H5_CFLAGS $(load_gnu_arguments 7)" + H5_ECFLAGS="$H5_ECFLAGS $(load_gnu_arguments error-7)" DEVELOPER_WARNING_CFLAGS="$DEVELOPER_WARNING_CFLAGS $(load_gnu_arguments developer-7)" fi diff --git a/config/gnu-warnings/7 b/config/gnu-warnings/7 index 9d5b44d..cb8834a 100644 --- a/config/gnu-warnings/7 +++ b/config/gnu-warnings/7 @@ -3,4 +3,3 @@ -Wduplicated-branches -Wformat-overflow=2 -Wformat-truncation=1 --Wrestrict diff --git a/config/gnu-warnings/error-7 b/config/gnu-warnings/error-7 new file mode 100644 index 0000000..8eb9fe4 --- /dev/null +++ b/config/gnu-warnings/error-7 @@ -0,0 +1 @@ +-Werror=restrict diff --git a/config/ibm-aix b/config/ibm-aix index b4a558d..f48f9d8 100644 --- a/config/ibm-aix +++ b/config/ibm-aix @@ -47,10 +47,10 @@ if test "X-" = "X-$FC"; then fi fi -# While we try to avoid setting FCFLAGS directly for use in compilation, in -# this case we need the -k flag present for some configure checks. As such, -# the configure script saves the user's set FCFLAGS before running, and -# restores them when complete. We must then set up both FCFLAGS and H5_FCFLAGS +# While we try to avoid setting FCFLAGS directly for use in compilation, in +# this case we need the -k flag present for some configure checks. As such, +# the configure script saves the user's set FCFLAGS before running, and +# restores them when complete. We must then set up both FCFLAGS and H5_FCFLAGS # to ensure the flag is present for both configure as well as for the build. if test "X-" = "X-$f9x_flags_set"; then F9XSUFFIXFLAG="-qsuffix=f=f90" diff --git a/configure.ac b/configure.ac index b3ba87c..d32fdd5 100644 --- a/configure.ac +++ b/configure.ac @@ -868,7 +868,7 @@ AC_MSG_CHECKING([if the high-level tools are enabled]) AC_ARG_ENABLE([hltools], [AS_HELP_STRING([--enable-hltools], [Enable the high-level tools. - [default=yes)] + [default=yes] ])], [HDF5_HL_TOOLS=$enableval]) @@ -1135,8 +1135,8 @@ TEST_EXPRESS_LEVEL_DEFAULT=3 AC_MSG_CHECKING([if HDF5 testing intensity level is set]) AC_ARG_ENABLE([test-express], - [AS_HELP_STRING([--enable-test-express], - [Set HDF5 testing intensity level (0-3) [0 = exhaustive testing; 3 = quicker testing; default=3]])], + [AS_HELP_STRING([--enable-test-express=(0-3)], + [Set HDF5 testing intensity level (0-3) [0 = exhaustive testing; 3 = quicker testing; default=3] Set environment variable HDF5TestExpress to override configured setting])], [TEST_EXPRESS_LEVEL_DEFAULT=$enableval]) case "X-$TEST_EXPRESS_LEVEL_DEFAULT" in diff --git a/doxygen/CMakeLists.txt b/doxygen/CMakeLists.txt index 920fafa..e75c899 100644 --- a/doxygen/CMakeLists.txt +++ b/doxygen/CMakeLists.txt @@ -21,7 +21,7 @@ if (DOXYGEN_FOUND) set (DOXYGEN_HTML_HEADER ${HDF5_DOXYGEN_DIR}/hdf5_header.html) set (DOXYGEN_HTML_FOOTER ${HDF5_DOXYGEN_DIR}/hdf5_footer.html) set (DOXYGEN_HTML_EXTRA_STYLESHEET ${HDF5_DOXYGEN_DIR}/hdf5doxy.css) - set (DOXYGEN_HTML_EXTRA_FILES "${HDF5_DOXYGEN_DIR}/hdf5_navtree_hacks.js ${HDF5_DOXYGEN_DIR}/img/FF-IH_FileGroup.gif ${HDF5_DOXYGEN_DIR}/img/FF-IH_FileObject.gif ${HDF5_DOXYGEN_DIR}/img/FileFormatSpecChunkDiagram.jpg ${HDF5_DOXYGEN_DIR}/img/ftv2node.png ${HDF5_DOXYGEN_DIR}/img/ftv2pnode.png ${HDF5_DOXYGEN_DIR}/img/HDFG-logo.png ${HDF5_DOXYGEN_DIR}/img/IOFlow2.gif ${HDF5_DOXYGEN_DIR}/img/IOFlow3.gif ${HDF5_DOXYGEN_DIR}/img/IOFlow.gif ${HDF5_DOXYGEN_DIR}/img/PaletteExample1.gif ${HDF5_DOXYGEN_DIR}/img/Palettes.fm.anc.gif") + set (DOXYGEN_HTML_EXTRA_FILES "${HDF5_DOXYGEN_DIR}/hdf5_navtree_hacks.js") set (DOXYGEN_TAG_FILE ${HDF5_BINARY_DIR}/hdf5.tag) set (DOXYGEN_SERVER_BASED_SEARCH NO) set (DOXYGEN_EXTERNAL_SEARCH NO) diff --git a/doxygen/Doxyfile.in b/doxygen/Doxyfile.in index 198ebd9..6d82765 100644 --- a/doxygen/Doxyfile.in +++ b/doxygen/Doxyfile.in @@ -942,7 +942,7 @@ EXAMPLE_RECURSIVE = NO # that contain images that are to be included in the documentation (see the # \image command). -IMAGE_PATH = +IMAGE_PATH = @HDF5_DOXYGEN_DIR@/img # The INPUT_FILTER tag can be used to specify a program that doxygen should # invoke to filter for each input file. Doxygen will invoke the filter program diff --git a/doxygen/dox/ReferenceManual.dox b/doxygen/dox/ReferenceManual.dox index cc0f99b..c477813 100644 --- a/doxygen/dox/ReferenceManual.dox +++ b/doxygen/dox/ReferenceManual.dox @@ -1,4 +1,4 @@ -/** \page RM Reference Manual +/** \page RM HDF5 Reference Manual The functions provided by the HDF5 C-API are grouped into the following \Emph{modules}: diff --git a/examples/testh5cc.sh.in b/examples/testh5cc.sh.in index e90f94b..1e68ad8 100644 --- a/examples/testh5cc.sh.in +++ b/examples/testh5cc.sh.in @@ -400,7 +400,7 @@ echo "***"Version compatibility tests. # else if H5_USE_16_API_DEFAULT; # then v16main works. # else v18main works and -DH5_USE_16_API_DEFAULT v16main also works. -# as new versions with versioned functions are added, they will work with and +# As new versions with versioned functions are added, they will work with and # should be added to H5_NO_DEPRECATED_SYMBOLS and to the else section, with and # without the -DH5_USE__API_DEFAULT flag. A new H5_USE__API_DEFAULT section # should also be added. diff --git a/fortran/src/h5fc.in b/fortran/src/h5fc.in index 661fde5..79401f8 100644 --- a/fortran/src/h5fc.in +++ b/fortran/src/h5fc.in @@ -38,7 +38,7 @@ HL="@HL@" ## $FLINKER $FCFLAGS $H5BLD_FCFLAGS $F9XSUFFIXFLAG $LDFLAGS $LIBS ## ## $fmodules $link_objs $link_args $shared_link ## ## ## -## These settings can be overridden by setting HDF5_FCFLAGS, ## +## These settings can be overridden by setting HDF5_FCFLAGS, ## ## HDF5_LDFLAGS, or HDF5_LIBS in the environment. ## ## ## ############################################################################ diff --git a/hl/test/test_h5do_compat.c b/hl/test/test_h5do_compat.c index 6f19d4b..95ae98c 100644 --- a/hl/test/test_h5do_compat.c +++ b/hl/test/test_h5do_compat.c @@ -125,7 +125,7 @@ test_direct_chunk_write(hid_t did) offset[0] = 0; for (i = 0; i < NX / CHUNK_NX; i++) { if (H5DOwrite_chunk(did, H5P_DEFAULT, filter_mask, offset, data_size, chunk_data) < 0) - TEST_ERROR + TEST_ERROR; offset[0] += CHUNK_NX; } @@ -171,9 +171,9 @@ test_direct_chunk_read(hid_t did) /* Create dataspaces for reading */ if ((mem_sid = H5Screate_simple(1, chunk_dims, NULL)) < 0) - TEST_ERROR + TEST_ERROR; if ((file_sid = H5Screate_simple(1, dims, NULL)) < 0) - TEST_ERROR + TEST_ERROR; /* For each chunk in the dataset, compare the result of H5Dread and H5DOread_chunk. */ for (i = 0; i < NX / CHUNK_NX; i++) { @@ -186,34 +186,34 @@ test_direct_chunk_read(hid_t did) /* Hyperslab selection equals single chunk */ if (H5Sselect_hyperslab(file_sid, H5S_SELECT_SET, start, stride, count, block) < 0) - TEST_ERROR + TEST_ERROR; /* Read the chunk back */ if (H5Dread(did, H5T_NATIVE_INT, mem_sid, file_sid, H5P_DEFAULT, check) < 0) - TEST_ERROR + TEST_ERROR; /* Read the raw chunk back */ HDmemset(chunk_data, 0, CHUNK_NX * sizeof(int)); filter_mask = UINT_MAX; offset[0] = (hsize_t)i * CHUNK_NX; if (H5DOread_chunk(did, H5P_DEFAULT, offset, &filter_mask, chunk_data) < 0) - TEST_ERROR + TEST_ERROR; /* Check filter mask return value */ if (filter_mask != 0) - TEST_ERROR + TEST_ERROR; /* Check that the values are correct */ for (j = 0; j < CHUNK_NX; j++) if (chunk_data[i] != check[i]) - TEST_ERROR + TEST_ERROR; } /* Close */ if (H5Sclose(mem_sid) < 0) - TEST_ERROR + TEST_ERROR; if (H5Sclose(file_sid) < 0) - TEST_ERROR + TEST_ERROR; PASSED(); return 0; diff --git a/hl/test/test_ld.c b/hl/test/test_ld.c index 150d04c..4b01f49 100644 --- a/hl/test/test_ld.c +++ b/hl/test/test_ld.c @@ -69,7 +69,7 @@ int two_tests[TWO_NTESTS][2] = {{2, 2}, {2, -1}, {2, 0}, {-1, 2}, {-1, -1}, {-1, { \ long __x = (long)_x, __y = (long)_y; \ if (__x != __y) \ - TEST_ERROR \ + TEST_ERROR; \ } /* Temporary buffer for reading in the test file */ @@ -247,7 +247,7 @@ test_LD_dims_params(const char *file) /* Open the copied file */ if ((fid = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* * 1. Verify failure with negative dataset id @@ -263,7 +263,7 @@ test_LD_dims_params(const char *file) * 2. Verify failure for NULL cur_dims */ if ((did = H5Dopen2(fid, DSET_ALLOC_EARLY, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; H5E_BEGIN_TRY { ret = H5LDget_dset_dims(did, NULL); @@ -271,30 +271,30 @@ test_LD_dims_params(const char *file) H5E_END_TRY; VERIFY_EQUAL(ret, FAIL) if (H5Dclose(did) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* * 3. Verify for nonNULL cur_dims */ if ((did = H5Dopen2(fid, DSET_ALLOC_LATE, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5LDget_dset_dims(did, one_cur_dims) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; VERIFY_EQUAL(one_cur_dims[0], 10) if (H5Dclose(did) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* * 4. Verify nonNULL cur_dims for a 2-dimensional dataset */ if ((did = H5Dopen2(fid, DSET_CMPD_TWO, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5LDget_dset_dims(did, two_cur_dims) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; VERIFY_EQUAL(two_cur_dims[0], TWO_DIM_1) VERIFY_EQUAL(two_cur_dims[1], TWO_DIM_2) if (H5Dclose(did) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* * 5. Verify nonNULL cur_dims for dataset with H5S_NULL dataspace @@ -302,13 +302,13 @@ test_LD_dims_params(const char *file) one_cur_dims[0] = 0; if ((did = H5Dopen2(fid, DSET_NULL, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5LDget_dset_dims(did, one_cur_dims) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; VERIFY_EQUAL(one_cur_dims[0], 0) if (H5Dclose(did) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* * 6. Verify nonNULL cur_dims for dataset with H5S_SCALAR dataspace @@ -316,17 +316,17 @@ test_LD_dims_params(const char *file) one_cur_dims[0] = 0; if ((did = H5Dopen2(fid, DSET_SCALAR, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5LDget_dset_dims(did, one_cur_dims) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; VERIFY_EQUAL(one_cur_dims[0], 0) if (H5Dclose(did) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Close the file */ if (H5Fclose(fid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; PASSED(); return 0; @@ -396,21 +396,21 @@ test_LD_dims(const char *file) /* Make a copy of the test file */ if (h5_make_local_copy(file, COPY_FILENAME) < 0) - TEST_ERROR + TEST_ERROR; /* Open the copied file */ if ((fid = H5Fopen(COPY_FILENAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* * Testing with one-dimensional dataset: DSET_ONE */ if ((did = H5Dopen2(fid, DSET_ONE, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Retrieve dimension sizes */ if (H5LDget_dset_dims(did, one_prev_dims) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; for (i = 0; i < ONE_NTESTS; i++) { @@ -419,11 +419,11 @@ test_LD_dims(const char *file) /* Change the dimension size */ if (H5Dset_extent(did, one_ext_dims) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Retrieve the dimension size */ if (H5LDget_dset_dims(did, one_cur_dims) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Verify that the retrieved dimension size is correct as expected */ VERIFY_EQUAL(one_cur_dims[0], one_ext_dims[0]) @@ -431,17 +431,17 @@ test_LD_dims(const char *file) /* Close the dataset */ if (H5Dclose(did) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* * Testing with two-dimensional dataset: DSET_TWO */ if ((did = H5Dopen2(fid, DSET_TWO, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Retrieve the dimension sizes */ if (H5LDget_dset_dims(did, two_prev_dims) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; for (i = 0; i < TWO_NTESTS; i++) { @@ -451,11 +451,11 @@ test_LD_dims(const char *file) /* Change the dimension sizes */ if (H5Dset_extent(did, two_ext_dims) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Retrieve the dimension sizes */ if (H5LDget_dset_dims(did, two_cur_dims) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Verify that the retrieved dimension sizes are correct as expected */ VERIFY_EQUAL(two_cur_dims[0], two_ext_dims[0]) @@ -464,11 +464,11 @@ test_LD_dims(const char *file) /* Close the dataset */ if (H5Dclose(did) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Close the file */ if (H5Fclose(fid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Remove the copied file */ HDremove(COPY_FILENAME); @@ -537,7 +537,7 @@ test_LD_size(const char *file) /* Open the file */ if ((fid = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* * Verify failure with an invalid dataset id @@ -556,90 +556,90 @@ test_LD_size(const char *file) /* Open dataset DSET_CMPD */ if ((did = H5Dopen2(fid, DSET_CMPD, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Return size of the whole element */ if ((dsize = H5LDget_dset_type_size(did, NULL)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Get the dataset's datatype and then its datatype size */ if ((dtid = H5Tget_native_type(H5Dget_type(did), H5T_DIR_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((ck_dsize = H5Tget_size(dtid)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Verify case #1 */ VERIFY_EQUAL(dsize, ck_dsize) /* Get datatype id for each member */ if ((memb0_tid = H5Tget_member_type(dtid, 0)) < 0) /* "field1" */ - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((memb1_tid = H5Tget_member_type(dtid, 1)) < 0) /* "field2" */ - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((memb2_tid = H5Tget_member_type(dtid, 2)) < 0) /* "field3" */ - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((memb3_tid = H5Tget_member_type(dtid, 3)) < 0) /* "field4" */ - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Obtain size for VALID_FIELDS1: "field1,field2.a,field3,field4" */ if ((dsize = H5LDget_dset_type_size(did, VALID_FIELDS1)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Get the datatype size for "field1" */ if ((ck_dsize = H5Tget_size(memb0_tid)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Add the datatype size for "field2.a" */ if ((memb_tid = H5Tget_member_type(memb1_tid, 0)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((ck_dsize += H5Tget_size(memb_tid)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(memb_tid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Add the datatype size for "field3" */ if ((ck_dsize += H5Tget_size(memb2_tid)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Add the datatype size for "field4" */ if ((ck_dsize += H5Tget_size(memb3_tid)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Verify case #2 */ VERIFY_EQUAL(dsize, ck_dsize) /* Obtain datatype size for VALID_FIELDS2: "field2.b.a,field2.c,field4.b" */ if ((dsize = H5LDget_dset_type_size(did, VALID_FIELDS2)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Get the datatype size for "field2.b.a" */ if ((memb_tid = H5Tget_member_type(memb1_tid, 1)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((memb_tid2 = H5Tget_member_type(memb_tid, 0)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((ck_dsize = H5Tget_size(memb_tid2)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(memb_tid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(memb_tid2) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Add the datatype size for "field2.c" */ if ((memb_tid = H5Tget_member_type(memb1_tid, 2)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((ck_dsize += H5Tget_size(memb_tid)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(memb_tid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Add the datatype size for "field4.b" */ if ((memb_tid = H5Tget_member_type(memb3_tid, 1)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((ck_dsize += H5Tget_size(memb_tid)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(memb_tid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Verify case #3 */ VERIFY_EQUAL(dsize, ck_dsize) @@ -664,17 +664,17 @@ test_LD_size(const char *file) /* Closing */ if (H5Tclose(memb0_tid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(memb1_tid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(memb2_tid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(memb3_tid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(dtid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Dclose(did) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* * Testing one-dimensional dataset with compound datatype and @@ -684,54 +684,54 @@ test_LD_size(const char *file) /* Open dataset DSET_CMPD_ESC */ if ((did = H5Dopen2(fid, DSET_CMPD_ESC, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Return size of the whole element */ if ((dsize = H5LDget_dset_type_size(did, NULL)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Get the dataset's datatype and then its datatype size */ if ((dtid = H5Tget_native_type(H5Dget_type(did), H5T_DIR_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((ck_dsize = H5Tget_size(dtid)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Verify case #1 */ VERIFY_EQUAL(dsize, ck_dsize) /* Get datatype id for each member */ if ((memb0_tid = H5Tget_member_type(dtid, 0)) < 0) /* "field,1" */ - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((memb1_tid = H5Tget_member_type(dtid, 1)) < 0) /* "field2." */ - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((memb2_tid = H5Tget_member_type(dtid, 2)) < 0) /* "field\3" */ - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((memb3_tid = H5Tget_member_type(dtid, 3)) < 0) /* "field4," */ - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Obtain size for VALID_ESC_FIELDS1: "field\\,1,field2\\..\\.a,field\\\\3,field4\\," */ if ((dsize = H5LDget_dset_type_size(did, VALID_ESC_FIELDS1)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Get the datatype size for "field\\,1" */ if ((ck_dsize = H5Tget_size(memb0_tid)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Add the datatype size for "field2\\..\\.a" */ if ((memb_tid = H5Tget_member_type(memb1_tid, 0)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((ck_dsize += H5Tget_size(memb_tid)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(memb_tid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Add the datatype size for "field\\\\3" */ if ((ck_dsize += H5Tget_size(memb2_tid)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Add the datatype size for "field4\\," */ if ((ck_dsize += H5Tget_size(memb3_tid)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Verify case #2 */ VERIFY_EQUAL(dsize, ck_dsize) @@ -739,35 +739,35 @@ test_LD_size(const char *file) /* Obtain datatype size for VALID_ESC_FIELDS2: "field2\\..\\,b.a,field2\\..\\\\c,field4\\,.b\\," */ if ((dsize = H5LDget_dset_type_size(did, VALID_ESC_FIELDS2)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Get the datatype size for "field2\..,b.a" */ if ((memb_tid = H5Tget_member_type(memb1_tid, 1)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((memb_tid2 = H5Tget_member_type(memb_tid, 0)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((ck_dsize = H5Tget_size(memb_tid2)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(memb_tid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(memb_tid2) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Add the datatype size for "field2\..\\c" */ if ((memb_tid = H5Tget_member_type(memb1_tid, 2)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((ck_dsize += H5Tget_size(memb_tid)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(memb_tid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Add the datatype size for "field4\,.b\," */ if ((memb_tid = H5Tget_member_type(memb3_tid, 1)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if ((ck_dsize += H5Tget_size(memb_tid)) == 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(memb_tid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Verify case #3 */ VERIFY_EQUAL(dsize, ck_dsize) @@ -792,20 +792,20 @@ test_LD_size(const char *file) /* Closing */ if (H5Tclose(memb0_tid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(memb1_tid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(memb2_tid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(memb3_tid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Tclose(dtid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Dclose(did) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Fclose(fid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; PASSED(); return 0; @@ -868,7 +868,7 @@ test_LD_elmts_invalid(const char *file) /* Open the copied file */ if ((fid = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* * Testing two-dimensional dataset: DSET_TWO @@ -876,7 +876,7 @@ test_LD_elmts_invalid(const char *file) /* Open dataset: DSET_TWO */ if ((did = H5Dopen2(fid, DSET_TWO, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Verify failure from case #1: cur_dims and prev_dims are NULL */ ret = H5LDget_dset_elmts(did, NULL, NULL, NULL, NULL); @@ -891,11 +891,11 @@ test_LD_elmts_invalid(const char *file) VERIFY_EQUAL(ret, FAIL) if ((sid = H5Dget_space(did)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Get the # of dimensions and current dimension sizes */ if ((ndims = H5Sget_simple_extent_dims(sid, cur_dims, NULL)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Set up valid cur_dims and prev_dims */ for (i = 0; i < ndims; i++) @@ -917,7 +917,7 @@ test_LD_elmts_invalid(const char *file) /* Close DSET_TWO */ if (H5Dclose(did) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* * Testing one-dimensional dataset with compound datatype: @@ -934,11 +934,11 @@ test_LD_elmts_invalid(const char *file) /* Open dataset: DSET_CMPD */ if ((did = H5Dopen2(fid, DSET_CMPD, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Retrieve the current dimension sizes */ if (H5LDget_dset_dims(did, cur_dims) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Set up valid cur_dims, prev_dims */ prev_dims[0] = cur_dims[0] - 1; @@ -949,11 +949,11 @@ test_LD_elmts_invalid(const char *file) /* Close DSET_CMPD */ if (H5Dclose(did) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Close the file */ if (H5Fclose(fid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; PASSED(); return 0; @@ -1004,7 +1004,7 @@ test_LD_elmts_one(const char *file, const char *dname, const char *fields) /* Copy the test file */ if (h5_make_local_copy(file, COPY_FILENAME) < 0) - TEST_ERROR + TEST_ERROR; for (i = 0; i < TEST_BUF_SIZE; i++) { cbuf[i].field1 = i; @@ -1021,19 +1021,19 @@ test_LD_elmts_one(const char *file, const char *dname, const char *fields) /* Open the copied file */ if ((fid = H5Fopen(COPY_FILENAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Open the dataset */ if ((did = H5Dopen2(fid, dname, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Get the dataset's data type */ if ((dtype = H5Tget_native_type(H5Dget_type(did), H5T_DIR_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Get current dimension sizes before extending the dataset's dimension sizes */ if (H5LDget_dset_dims(did, prev_dims) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Loop through different variations of extending the dataset */ for (i = 0; i < ONE_NTESTS; i++) { @@ -1046,16 +1046,16 @@ test_LD_elmts_one(const char *file, const char *dname, const char *fields) /* Change the dimension sizes of the dataset */ if (H5Dset_extent(did, ext_dims) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Initialize data */ if (!HDstrcmp(dname, DSET_CMPD) || !HDstrcmp(dname, DSET_CMPD_ESC)) { if (H5Dwrite(did, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, cbuf) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; } /* end if */ else if (!HDstrcmp(dname, DSET_ONE)) { if (H5Dwrite(did, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, ibuf) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; } /* end if */ /* There are changes in dimension sizes */ @@ -1065,24 +1065,24 @@ test_LD_elmts_one(const char *file, const char *dname, const char *fields) if (!HDstrcmp(fields, VALID_FIELDS1) || !HDstrcmp(fields, VALID_ESC_FIELDS1)) { /* Retrieve the elmemts in BUF */ if (H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, vbuf1) < 0) - TEST_ERROR + TEST_ERROR; for (j = 0; j < one_tests[i]; j++) VERIFY_ELMTS_VALID1(vbuf1[j], cbuf[prev_dims[0] + (hsize_t)j]) } /* end if */ else if (!HDstrcmp(fields, VALID_FIELDS2) || !HDstrcmp(fields, VALID_ESC_FIELDS2)) { /* Retrieve the elmemts in BUF */ if (H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, vbuf2) < 0) - TEST_ERROR + TEST_ERROR; for (j = 0; j < one_tests[i]; j++) VERIFY_ELMTS_VALID2(vbuf2[j], cbuf[prev_dims[0] + (hsize_t)j]) } /* end else-if */ else - TEST_ERROR + TEST_ERROR; } /* end if */ else { /* Retrieve the elmemts in BUF */ if (H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, ccbuf) < 0) - TEST_ERROR + TEST_ERROR; for (j = 0; j < one_tests[i]; j++) VERIFY_ELMTS_ALL(ccbuf[j], cbuf[prev_dims[0] + (hsize_t)j]) } /* end else-if */ @@ -1090,7 +1090,7 @@ test_LD_elmts_one(const char *file, const char *dname, const char *fields) else { /* Retrieve the elmemts in BUF */ if (H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, iibuf) < 0) - TEST_ERROR + TEST_ERROR; for (j = 0; j < one_tests[i]; j++) VERIFY_EQUAL(iibuf[j], ibuf[prev_dims[0] + (hsize_t)j]) } /* end else */ @@ -1104,11 +1104,11 @@ test_LD_elmts_one(const char *file, const char *dname, const char *fields) /* Closing */ if (H5Tclose(dtype) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Dclose(did) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; if (H5Fclose(fid) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Remove the copied file */ HDremove(COPY_FILENAME); @@ -1215,7 +1215,7 @@ test_LD_elmts_two(const char *file, const char *dname, const char *fields) /* Copy the test file */ if (h5_make_local_copy(file, COPY_FILENAME) < 0) - TEST_ERROR + TEST_ERROR; for (i = 0; i < TEST_BUF_SIZE; i++) { cbuf[i].field1 = i; @@ -1232,19 +1232,19 @@ test_LD_elmts_two(const char *file, const char *dname, const char *fields) /* Open the copied file */ if ((fid = H5Fopen(COPY_FILENAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Open the dataset */ if ((did = H5Dopen2(fid, dname, H5P_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Get the dataset's data type */ if ((dtype = H5Tget_native_type(H5Dget_type(did), H5T_DIR_DEFAULT)) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Get current dimension sizes before extending the dataset's dimension sizes */ if (H5LDget_dset_dims(did, prev_dims) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Loop through different variations of extending the dataset */ for (i = 0; i < TWO_NTESTS; i++) { @@ -1258,19 +1258,19 @@ test_LD_elmts_two(const char *file, const char *dname, const char *fields) /* Change the dimension sizes of the dataset */ if (H5Dset_extent(did, ext_dims) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; /* Initialize data */ if (!HDstrcmp(dname, DSET_CMPD_TWO)) { if (H5Dwrite(did, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, cbuf) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; } /* end if */ else if (!HDstrcmp(dname, DSET_TWO)) { if (H5Dwrite(did, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, ibuf) < 0) - FAIL_STACK_ERROR + FAIL_STACK_ERROR; } /* end else-if */ else - TEST_ERROR + TEST_ERROR; /* There are changes in dimension sizes */ if (two_tests[i][0] > 0 || two_tests[i][1] > 0) { @@ -1279,34 +1279,34 @@ test_LD_elmts_two(const char *file, const char *dname, const char *fields) if (!HDstrcmp(fields, VALID_FIELDS1) || !HDstrcmp(fields, VALID_ESC_FIELDS1)) { /* Retrieve the elmemts in BUF */ if (H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, vbuf1) < 0) - TEST_ERROR + TEST_ERROR; if (verify_elmts_two(TWO_CMPD_VALID1, ext_dims, prev_dims, vbuf1, cbuf) < 0) - TEST_ERROR + TEST_ERROR; } /* end if */ else if (!HDstrcmp(fields, VALID_FIELDS2) || !HDstrcmp(fields, VALID_ESC_FIELDS2)) { /* Retrieve the elmemts in BUF */ if (H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, vbuf2) < 0) - TEST_ERROR + TEST_ERROR; if (verify_elmts_two(TWO_CMPD_VALID2, ext_dims, prev_dims, vbuf2, cbuf) < 0) - TEST_ERROR + TEST_ERROR; } /* end else-if */ else - TEST_ERROR + TEST_ERROR; } /* end if */ else { /* Retrieve the elmemts in BUF */ if (H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, ccbuf) < 0) - TEST_ERROR + TEST_ERROR; if (verify_elmts_two(TWO_CMPD_NULL, ext_dims, prev_dims, ccbuf, cbuf) < 0) - TEST_ERROR + TEST_ERROR; } /* end else */ } /* end if */ else { /* DSET_TWO */ /* Retrieve the elmemts in BUF */ if (H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, iibuf) < 0) - TEST_ERROR + TEST_ERROR; if (verify_elmts_two(TWO_NONE, ext_dims, prev_dims, iibuf, ibuf) < 0) - TEST_ERROR + TEST_ERROR; } /* end else */ } /* end if */ else { diff --git a/java/examples/datasets/JavaDatasetExample.sh.in b/java/examples/datasets/JavaDatasetExample.sh.in index 9a5ddd8..b299ff2 100644 --- a/java/examples/datasets/JavaDatasetExample.sh.in +++ b/java/examples/datasets/JavaDatasetExample.sh.in @@ -29,6 +29,7 @@ CMP='cmp' DIFF='diff -c' CP='cp' DIRNAME='dirname' +BASENAME='basename' LS='ls' AWK='awk' @@ -103,17 +104,21 @@ COPY_LIBFILES_TO_BLDLIBDIR() INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then - $CP -f $tstfile $BLDLIBDIR + $CP -fR $tstfile $BLDLIBDIR if [ $? -ne 0 ]; then echo "Error: FAILED to copy $tstfile ." # Comment out this to CREATE expected file exit $EXIT_FAILURE fi + BNAME=`$BASENAME $tstfile` + if [ "$BNAME" = "libhdf5_java.dylib" ]; then + COPIED_LIBHDF5_JAVA=1 + fi fi fi done - if [ "$IS_DARWIN" = "yes" ]; then + if [[ "$IS_DARWIN" = "yes" ]] && [[ $COPIED_LIBHDF5_JAVA -eq 1 ]]; then (cd $BLDLIBDIR; \ install_name_tool -add_rpath @loader_path libhdf5_java.dylib; \ exist_path=` otool -l libhdf5_java.dylib | grep libhdf5 | grep -v java | awk '{print $2}'`; \ @@ -134,7 +139,7 @@ COPY_LIBFILES_TO_BLDLIBDIR() INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then - $CP -f $tstfile $BLDLIBDIR + $CP -fR $tstfile $BLDLIBDIR if [ $? -ne 0 ]; then echo "Error: FAILED to copy $tstfile ." @@ -155,10 +160,7 @@ CLEAN_LIBFILES_AND_BLDLIBDIR() INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then - for tstfile in $COPY_JARTESTFILES - do - $RM $BLDLIBDIR/tstfile - done + $RM -rf $BLDLIBDIR fi } diff --git a/java/examples/datatypes/JavaDatatypeExample.sh.in b/java/examples/datatypes/JavaDatatypeExample.sh.in index c6f5dbc..a82d883 100644 --- a/java/examples/datatypes/JavaDatatypeExample.sh.in +++ b/java/examples/datatypes/JavaDatatypeExample.sh.in @@ -26,6 +26,7 @@ CMP='cmp' DIFF='diff -c' CP='cp' DIRNAME='dirname' +BASENAME='basename' LS='ls' AWK='awk' @@ -100,17 +101,21 @@ COPY_LIBFILES_TO_BLDLIBDIR() INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then - $CP -f $tstfile $BLDLIBDIR + $CP -fR $tstfile $BLDLIBDIR if [ $? -ne 0 ]; then echo "Error: FAILED to copy $tstfile ." # Comment out this to CREATE expected file exit $EXIT_FAILURE fi + BNAME=`$BASENAME $tstfile` + if [ "$BNAME" = "libhdf5_java.dylib" ]; then + COPIED_LIBHDF5_JAVA=1 + fi fi fi done - if [ "$IS_DARWIN" = "yes" ]; then + if [[ "$IS_DARWIN" = "yes" ]] && [[ $COPIED_LIBHDF5_JAVA -eq 1 ]]; then (cd $BLDLIBDIR; \ install_name_tool -add_rpath @loader_path libhdf5_java.dylib; \ exist_path=` otool -l libhdf5_java.dylib | grep libhdf5 | grep -v java | awk '{print $2}'`; \ @@ -131,7 +136,7 @@ COPY_LIBFILES_TO_BLDLIBDIR() INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then - $CP -f $tstfile $BLDLIBDIR + $CP -fR $tstfile $BLDLIBDIR if [ $? -ne 0 ]; then echo "Error: FAILED to copy $tstfile ." @@ -152,10 +157,7 @@ CLEAN_LIBFILES_AND_BLDLIBDIR() INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then - for tstfile in $COPY_JARTESTFILES - do - $RM $BLDLIBDIR/tstfile - done + $RM -rf $BLDLIBDIR fi } diff --git a/java/examples/groups/JavaGroupExample.sh.in b/java/examples/groups/JavaGroupExample.sh.in index 86f90bf..1b84ed3 100644 --- a/java/examples/groups/JavaGroupExample.sh.in +++ b/java/examples/groups/JavaGroupExample.sh.in @@ -26,6 +26,7 @@ CMP='cmp' DIFF='diff -c' CP='cp' DIRNAME='dirname' +BASENAME='basename' LS='ls' AWK='awk' @@ -95,17 +96,21 @@ COPY_LIBFILES_TO_BLDLIBDIR() INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then - $CP -f $tstfile $BLDLIBDIR + $CP -fR $tstfile $BLDLIBDIR if [ $? -ne 0 ]; then echo "Error: FAILED to copy $tstfile ." # Comment out this to CREATE expected file exit $EXIT_FAILURE fi + BNAME=`$BASENAME $tstfile` + if [ "$BNAME" = "libhdf5_java.dylib" ]; then + COPIED_LIBHDF5_JAVA=1 + fi fi fi done - if [ "$IS_DARWIN" = "yes" ]; then + if [[ "$IS_DARWIN" = "yes" ]] && [[ $COPIED_LIBHDF5_JAVA -eq 1 ]]; then (cd $BLDLIBDIR; \ install_name_tool -add_rpath @loader_path libhdf5_java.dylib; \ exist_path=` otool -l libhdf5_java.dylib | grep libhdf5 | grep -v java | awk '{print $2}'`; \ @@ -126,7 +131,7 @@ COPY_LIBFILES_TO_BLDLIBDIR() INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then - $CP -f $tstfile $BLDLIBDIR + $CP -fR $tstfile $BLDLIBDIR if [ $? -ne 0 ]; then echo "Error: FAILED to copy $tstfile ." @@ -147,10 +152,7 @@ CLEAN_LIBFILES_AND_BLDLIBDIR() INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then - for tstfile in $COPY_JARTESTFILES - do - $RM $BLDLIBDIR/tstfile - done + $RM -rf $BLDLIBDIR fi } diff --git a/java/examples/intro/JavaIntroExample.sh.in b/java/examples/intro/JavaIntroExample.sh.in index a095f27..41ed694 100644 --- a/java/examples/intro/JavaIntroExample.sh.in +++ b/java/examples/intro/JavaIntroExample.sh.in @@ -26,6 +26,7 @@ CMP='cmp' DIFF='diff -c' CP='cp' DIRNAME='dirname' +BASENAME='basename' LS='ls' AWK='awk' @@ -89,17 +90,21 @@ COPY_LIBFILES_TO_BLDLIBDIR() INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then - $CP -f $tstfile $BLDLIBDIR + $CP -fR $tstfile $BLDLIBDIR if [ $? -ne 0 ]; then echo "Error: FAILED to copy $tstfile ." # Comment out this to CREATE expected file exit $EXIT_FAILURE fi + BNAME=`$BASENAME $tstfile` + if [ "$BNAME" = "libhdf5_java.dylib" ]; then + COPIED_LIBHDF5_JAVA=1 + fi fi fi done - if [ "$IS_DARWIN" = "yes" ]; then + if [[ "$IS_DARWIN" = "yes" ]] && [[ $COPIED_LIBHDF5_JAVA -eq 1 ]]; then (cd $BLDLIBDIR; \ install_name_tool -add_rpath @loader_path libhdf5_java.dylib; \ exist_path=` otool -l libhdf5_java.dylib | grep libhdf5 | grep -v java | awk '{print $2}'`; \ @@ -120,7 +125,7 @@ COPY_LIBFILES_TO_BLDLIBDIR() INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then - $CP -f $tstfile $BLDLIBDIR + $CP -fR $tstfile $BLDLIBDIR if [ $? -ne 0 ]; then echo "Error: FAILED to copy $tstfile ." @@ -141,10 +146,7 @@ CLEAN_LIBFILES_AND_BLDLIBDIR() INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then - for tstfile in $COPY_JARTESTFILES - do - $RM $BLDLIBDIR/tstfile - done + $RM -rf $BLDLIBDIR fi } diff --git a/java/src/hdf/hdf5lib/HDF5Constants.java b/java/src/hdf/hdf5lib/HDF5Constants.java index 63e5262..1b85c1f 100644 --- a/java/src/hdf/hdf5lib/HDF5Constants.java +++ b/java/src/hdf/hdf5lib/HDF5Constants.java @@ -914,6 +914,8 @@ public class HDF5Constants { /** */ public static final int H5R_OBJ_REF_BUF_SIZE = H5R_OBJ_REF_BUF_SIZE(); /** */ + public static final int H5R_DSET_REG_REF_BUF_SIZE = H5R_DSET_REG_REF_BUF_SIZE(); + /** */ public static final int H5R_OBJECT = H5R_OBJECT(); /** */ public static final int H5R_OBJECT1 = H5R_OBJECT1(); @@ -2395,6 +2397,8 @@ public class HDF5Constants { private static native final int H5R_OBJ_REF_BUF_SIZE(); + private static native final int H5R_DSET_REG_REF_BUF_SIZE(); + private static native final int H5R_OBJECT(); private static native final int H5R_OBJECT1(); diff --git a/java/test/junit.sh.in b/java/test/junit.sh.in index ca2b3a9..4e5152b 100644 --- a/java/test/junit.sh.in +++ b/java/test/junit.sh.in @@ -31,6 +31,7 @@ CMP='cmp' DIFF='diff -c' CP='cp' DIRNAME='dirname' +BASENAME='basename' LS='ls' AWK='awk' @@ -141,17 +142,21 @@ COPY_LIBFILES_TO_BLDLIBDIR() INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then - $CP -f $tstfile $BLDLIBDIR + $CP -fR $tstfile $BLDLIBDIR if [ $? -ne 0 ]; then echo "Error: FAILED to copy $tstfile ." # Comment out this to CREATE expected file exit $EXIT_FAILURE fi + BNAME=`$BASENAME $tstfile` + if [ "$BNAME" = "libhdf5_java.dylib" ]; then + COPIED_LIBHDF5_JAVA=1 + fi fi fi done - if [ "$IS_DARWIN" = "yes" ]; then + if [[ "$IS_DARWIN" = "yes" ]] && [[ $COPIED_LIBHDF5_JAVA -eq 1 ]]; then (cd testlibs; \ install_name_tool -add_rpath @loader_path libhdf5_java.dylib; \ exist_path=` otool -l libhdf5_java.dylib | grep libhdf5 | grep -v java | awk '{print $2}'`; \ @@ -172,7 +177,7 @@ COPY_LIBFILES_TO_BLDLIBDIR() INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then - $CP -f $tstfile $BLDLIBDIR + $CP -fR $tstfile $BLDLIBDIR if [ $? -ne 0 ]; then echo "Error: FAILED to copy $tstfile ." @@ -195,7 +200,7 @@ COPY_LIBFILES_TO_BLDLIBDIR() INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'` INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'` if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then - $CP -f $tstfile $BLDLIBDIR + $CP -fR $tstfile $BLDLIBDIR if [ $? -ne 0 ]; then echo "Error: FAILED to copy $tstfile ." diff --git a/release_docs/README_HDF5_CMake b/release_docs/README_HDF5_CMake index d5ef7bf..dfc4a8e 100644 --- a/release_docs/README_HDF5_CMake +++ b/release_docs/README_HDF5_CMake @@ -1,23 +1,23 @@ This tar file contains - build-unix.sh script to build HDF5 with CMake on unix machines - build-unix-hpc.sh script to build HDF5 with CMake on unix machines and run - tests with batch scripts (sbatch). + build-unix.sh script to build HDF5 with CMake on unix machines + build-unix-hpc.sh script to build HDF5 with CMake on unix machines and run + tests with batch scripts (sbatch). CTestScript.cmake - HDF5config.cmake CMake scripts for building HDF5 + HDF5config.cmake CMake scripts for building HDF5 HDF5options.cmake - hdf5-1.12.2 HDF5 1.12.2 source - LIBAEC.tar.gz source for building SZIP replacement - ZLib.tar.gz source for building ZLIB - hdf5_plugins.tar.gz source for building compression plugins - HDF5Examples-1.12.1-Source.tar.gz source for building examples + hdf5-1.12.3 HDF5 1.12.3 source + LIBAEC.tar.gz source for building SZIP replacement + ZLib.tar.gz source for building ZLIB + hdf5_plugins.tar.gz source for building compression plugins + HDF5Examples-1.12.10-Source.tar.gz source for building examples For more information about building HDF5 with CMake, see USING_HDF5_CMake.txt in -hdf5-1.12.2/release_docs, or +hdf5-1.12.3/release_docs, or https://portal.hdfgroup.org/display/support/Building+HDF5+with+CMake. For more information about building HDF5 with CMake on HPC machines, including -cross compiling on Cray XC40, see README_HPC in hdf5-1.12.2/release_docs. +cross compiling on Cray XC40, see README_HPC in hdf5-1.12.3/release_docs. diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index cebd09f..8fe0df8 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -80,6 +80,26 @@ New Features - + High-Level APIs: + ---------------- + - + + + C Packet Table API: + ------------------- + - + + + Internal header file: + --------------------- + - + + + Documentation: + -------------- + - + + Support for new platforms, languages and compilers ================================================== - @@ -102,6 +122,16 @@ Bug Fixes since HDF5-1.12.1 release - + Tools + ----- + - + + + Performance + ------------- + - + + Fortran API ----------- - @@ -207,7 +237,7 @@ Platforms Tested macOS Apple M1 11.6 Apple clang version 12.0.5 (clang-1205.0.22.11) Darwin 20.6.0 arm64 gfortran GNU Fortran (Homebrew GCC 11.2.0) 11.1.0 - (macmini-m1) Intel icc/icpc/ifort version 2021.3.0 202106092021.3.0 20210609 + (macmini-m1) Intel icc/icpc/ifort version 2021.3.0 20210609 macOS Big Sur 11.3.1 Apple clang version 12.0.5 (clang-1205.0.22.9) Darwin 20.4.0 x86_64 gfortran GNU Fortran (Homebrew GCC 10.2.0_3) 10.2.0 @@ -225,7 +255,6 @@ Platforms Tested 64-bit gfortran GNU Fortran (GCC) 5.2.0 (osx1011test) Intel icc/icpc/ifort version 16.0.2 - Linux 2.6.32-573.22.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) #1 SMP x86_64 GNU/Linux compilers: Centos6 Version 4.4.7 20120313 @@ -239,7 +268,8 @@ Platforms Tested Visual Studio 2017 w/ Intel C/C++/Fortran 19 (cmake) Visual Studio 2019 w/ clang 12.0.0 with MSVC-like command-line (C/C++ only - cmake) - Visual Studio 2019 w/ Intel C/C++/Fortran oneAPI 2021 (cmake) + Visual Studio 2019 + w/ Intel C/C++/Fortran oneAPI 2021 (cmake) Visual Studio 2019 w/ MSMPI 10.1 (C only - cmake) @@ -312,10 +342,10 @@ a number of CMake specific files for support of CMake's find_package and support for the HDF5 Examples CMake project. The issues with the gif tool are: -HDFFV-10592 CVE-2018-17433 -HDFFV-10593 CVE-2018-17436 -HDFFV-11048 CVE-2020-10809 + HDFFV-10592 CVE-2018-17433 + HDFFV-10593 CVE-2018-17436 + HDFFV-11048 CVE-2020-10809 These CVE issues have not yet been addressed and can be avoided by not building the gif tool. Disable building the High-Level tools with these options: -autotools: --disable-hltools -cmake: HDF5_BUILD_HL_TOOLS=OFF + autotools: --disable-hltools + cmake: HDF5_BUILD_HL_TOOLS=OFF diff --git a/test/CMakeTests.cmake b/test/CMakeTests.cmake index 40b9304..3dafb2e 100644 --- a/test/CMakeTests.cmake +++ b/test/CMakeTests.cmake @@ -664,18 +664,18 @@ set_tests_properties (H5TEST-tcheck_version-release PROPERTIES # flushrefresh ############################################################################## # autotools script tests -# error_test and err_compat are built at the same time as the other tests, but executed by testerror.sh. -# NOT CONVERTED accum_swmr_reader is used by accum.c. -# NOT CONVERTED atomic_writer and atomic_reader are standalone programs. -# links_env is used by testlinks_env.sh -# filenotclosed and del_many_dense_attrs are used by testabort_fail.sh -# NOT CONVERTED flushrefresh is used by testflushrefresh.sh. -# NOT CONVERTED use_append_chunk, use_append_mchunks and use_disable_mdc_flushes are used by test_usecases.sh -# NOT CONVERTED swmr_* files (besides swmr.c) are used by testswmr.sh. -# NOT CONVERTED vds_swmr_* files are used by testvdsswmr.sh -# NOT CONVERTED 'make check' doesn't run them directly, so they are not included in TEST_PROG. -# NOT CONVERTED Also build testmeta, which is used for timings test. It builds quickly, -# NOT CONVERTED and this lets automake keep all its test programs in one place. +# error_test and err_compat are built at the same time as the other tests, but executed by test_error.sh +# NOT CONVERTED accum_swmr_reader is used by accum.c +# NOT CONVERTED atomic_writer and atomic_reader are stand-alone programs +# links_env is used by test_links_env.sh +# filenotclosed and del_many_dense_attrs are used by test_abort_fail.sh +# NOT CONVERTED flushrefresh is used by test_flush_refresh.sh +# NOT CONVERTED use_append_chunk, use_append_mchunks and use_disable_mdc_flushes are used by test_use_cases.sh +# NOT CONVERTED swmr_* files (besides swmr.c) are used by test_swmr.sh +# NOT CONVERTED vds_swmr_* files are used by test_vds_swmr.sh +# 'make check' doesn't run them directly, so they are not included in TEST_PROG. +# Also build testmeta, which is used for timings test. It builds quickly +# and this lets automake keep all its test programs in one place. ############################################################################## #-- Adding test for filenotclosed @@ -865,10 +865,10 @@ if (ENABLE_EXTENDED_TESTS) ############################################################################## ### S W M R T E S T S ############################################################################## -# testflushrefresh.sh: flushrefresh -# test_usecases.sh: use_append_chunk, use_append_mchunks, use_disable_mdc_flushes -# testswmr.sh: swmr* -# testvdsswmr.sh: vds_swmr* +# test_flush_refresh.sh: flushrefresh +# test_use_cases.sh: use_append_chunk, use_append_mchunks, use_disable_mdc_flushes +# test_swmr.sh: swmr* +# test_vds_swmr.sh: vds_swmr* #-- Adding test for flushrefresh file (MAKE_DIRECTORY "${PROJECT_BINARY_DIR}/H5TEST/flushrefresh_test") diff --git a/test/h5test.c b/test/h5test.c index 0a7f2fd..870a64b 100644 --- a/test/h5test.c +++ b/test/h5test.c @@ -633,7 +633,7 @@ h5_fixname_real(const char *base_name, hid_t fapl, const char *_suffix, char *fu */ if (isppdriver) { #ifdef H5_HAVE_PARALLEL - if (getenv_all(MPI_COMM_WORLD, 0, "HDF5_NOCLEANUP")) + if (getenv_all(MPI_COMM_WORLD, 0, HDF5_NOCLEANUP)) SetTestNoCleanup(); #endif /* H5_HAVE_PARALLEL */ } diff --git a/utils/mirror_vfd/mirror_server_stop.c b/utils/mirror_vfd/mirror_server_stop.c index 024b33a..abc4c1e 100644 --- a/utils/mirror_vfd/mirror_server_stop.c +++ b/utils/mirror_vfd/mirror_server_stop.c @@ -60,7 +60,7 @@ struct mshs_opts { static void usage(void) { - HDprintf("mirror_server_halten_sie [options]\n" + HDprintf("mirror_server_stop [options]\n" "System-independent Mirror Server shutdown program.\n" "Sends shutdown message to Mirror Server at given IP:port\n" "\n" -- cgit v0.12