From d31a017e6c15fcb5f8ed16806b7259eadb8c350a Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Mon, 19 Jun 2023 00:21:40 -0500 Subject: Various merges from develop (#3135) * Merges from develop/1.14 * Fix doxygen warnings * Fix spelling * Fix doxygen ref * Add braces * Fix format * Remove unused file --- .github/workflows/cmake-ctest.yml | 12 +- .github/workflows/main.yml | 62 ++++++-- .github/workflows/release.yml | 34 ++--- CMakeFilters.cmake | 12 +- config/cmake/ConfigureChecks.cmake | 10 +- config/cmake/FindSZIP.cmake | 129 ---------------- config/cmake/HDF5UseFortran.cmake | 35 +++-- config/cmake/HDFCompilerFlags.cmake | 4 +- config/cmake/HDFLibMacros.cmake | 4 +- config/cmake/HDFTests.c | 4 +- config/cmake/UserMacros/Windows_MT.cmake | 1 + config/cmake/scripts/HDF5options.cmake | 2 +- config/sanitizer/tools.cmake | 135 ++++++++--------- configure.ac | 53 +++---- doc/branches-explained.md | 31 ++-- doxygen/Doxyfile.in | 2 +- doxygen/dox/LearnBasics3.dox | 2 +- doxygen/dox/ReferenceManual.dox | 1 - doxygen/examples/menus/core_menu.md | 4 - doxygen/examples/tables/predefinedDatatypes.dox | 2 +- fortran/src/H5Lff.F90 | 2 +- java/examples/datasets/CMakeLists.txt | 7 +- java/examples/datatypes/CMakeLists.txt | 5 + java/examples/groups/CMakeLists.txt | 7 + java/examples/intro/CMakeLists.txt | 5 + java/src/Makefile.am | 1 - java/src/hdf/hdf5lib/CMakeLists.txt | 1 - java/src/hdf/hdf5lib/H5.java | 2 +- java/src/hdf/hdf5lib/HDF5GroupInfo.java | 188 ------------------------ java/src/hdf/hdf5lib/HDFArray.java | 2 + java/src/jni/h5fImp.c | 4 +- java/src/jni/h5lImp.c | 8 +- java/src/jni/h5pDAPLImp.c | 2 + java/src/jni/h5pFAPLImp.c | 2 + java/src/jni/h5tImp.c | 2 +- m4/aclocal_fc.f90 | 20 +-- release_docs/RELEASE.txt | 20 ++- src/H5Dpublic.h | 40 ++--- src/H5Fpublic.h | 16 +- src/H5Tmodule.h | 2 +- src/H5Tpublic.h | 2 +- 41 files changed, 318 insertions(+), 559 deletions(-) delete mode 100644 config/cmake/FindSZIP.cmake delete mode 100644 java/src/hdf/hdf5lib/HDF5GroupInfo.java diff --git a/.github/workflows/cmake-ctest.yml b/.github/workflows/cmake-ctest.yml index fd10815..5a06919 100644 --- a/.github/workflows/cmake-ctest.yml +++ b/.github/workflows/cmake-ctest.yml @@ -96,7 +96,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Install CMake Dependencies (Linux) - run: sudo apt-get install ninja-build + run: sudo apt-get install ninja-build doxygen graphviz - name: Set file base name (Linux) id: set-file-base @@ -151,6 +151,14 @@ jobs: path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204.tar.gz if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` + # Save doxygen files created by ctest script + - name: Save published doxygen (Linux) + uses: actions/upload-artifact@v3 + with: + name: docs-doxygen + path: ${{ runner.workspace }}/hdf5/build/ci-StdShar-GNUC/hdf5lib_docs/html + if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` + build_and_test_mac: # MacOS w/ Clang + CMake # @@ -158,7 +166,7 @@ jobs: runs-on: macos-11 steps: - name: Install Dependencies (MacOS) - run: brew install ninja + run: brew install ninja doxygen - name: Set file base name (MacOS) id: set-file-base diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 258c5ea..226d1ed 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -81,6 +81,7 @@ jobs: cpp: ON fortran: OFF java: ON + docs: OFF libaecfc: ON localaec: OFF zlibfc: ON @@ -88,6 +89,7 @@ jobs: parallel: OFF mirror_vfd: OFF direct_vfd: OFF + ros3_vfd: OFF generator: "-G \"Visual Studio 17 2022\" -A x64" run_tests: true @@ -100,6 +102,7 @@ jobs: cpp: ON fortran: ON java: ON + docs: ON libaecfc: ON localaec: OFF zlibfc: ON @@ -107,6 +110,7 @@ jobs: parallel: OFF mirror_vfd: ON direct_vfd: ON + ros3_vfd: OFF toolchain: "config/toolchain/gcc.cmake" generator: "-G Ninja" run_tests: true @@ -120,9 +124,11 @@ jobs: cpp: enable fortran: enable java: enable + docs: disable parallel: disable mirror_vfd: enable direct_vfd: enable + ros3_vfd: enable deprec_sym: enable default_api: v110 szip: yes @@ -141,9 +147,11 @@ jobs: cpp: disable fortran: enable java: disable + docs: disable parallel: enable mirror_vfd: disable direct_vfd: disable + ros3_vfd: disable deprec_sym: enable default_api: v110 szip: yes @@ -157,10 +165,11 @@ jobs: # We could also build with the Autotools via brew installing them, # but that seems unnecessary - name: "MacOS Clang CMake" - os: macos-11 + os: macos-13 cpp: ON fortran: OFF java: ON + docs: OFF libaecfc: ON localaec: OFF zlibfc: ON @@ -168,6 +177,7 @@ jobs: parallel: OFF mirror_vfd: ON direct_vfd: OFF + ros3_vfd: OFF toolchain: "config/toolchain/clang.cmake" generator: "-G Ninja" run_tests: true @@ -186,9 +196,11 @@ jobs: cpp: enable fortran: enable java: enable + docs: disable parallel: disable mirror_vfd: enable direct_vfd: enable + ros3_vfd: enable deprec_sym: enable default_api: v16 szip: yes @@ -209,9 +221,11 @@ jobs: cpp: enable fortran: enable java: enable + docs: disable parallel: disable mirror_vfd: enable direct_vfd: enable + ros3_vfd: enable deprec_sym: enable default_api: v18 szip: yes @@ -232,9 +246,11 @@ jobs: cpp: enable fortran: enable java: enable + docs: disable parallel: disable mirror_vfd: enable direct_vfd: enable + ros3_vfd: enable deprec_sym: enable default_api: v110 szip: yes @@ -255,9 +271,11 @@ jobs: cpp: enable fortran: enable java: enable + docs: disable parallel: disable mirror_vfd: enable direct_vfd: enable + ros3_vfd: enable deprec_sym: disable default_api: default szip: yes @@ -294,17 +312,18 @@ jobs: run: echo '${{ toJSON(matrix) }}' - name: Install CMake Dependencies (Linux) - run: sudo apt-get install ninja-build + run: sudo apt-get install ninja-build doxygen graphviz if: matrix.os == 'ubuntu-latest' - name: Install Autotools Dependencies (Linux, serial) run: | sudo apt update sudo apt install automake autoconf libtool libtool-bin - sudo apt install gcc-11 g++-11 gfortran-11 - echo "CC=gcc-11" >> $GITHUB_ENV - echo "CXX=g++-11" >> $GITHUB_ENV - echo "FC=gfortran-11" >> $GITHUB_ENV + sudo apt install gcc-12 g++-12 gfortran-12 + sudo apt install libssl3 libssl-dev libcurl4 libcurl4-openssl-dev + echo "CC=gcc-12" >> $GITHUB_ENV + echo "CXX=g++-12" >> $GITHUB_ENV + echo "FC=gfortran-12" >> $GITHUB_ENV sudo apt install libaec0 libaec-dev if: (matrix.generator == 'autogen') && (matrix.parallel != 'enable') @@ -323,8 +342,8 @@ jobs: if: matrix.os == 'windows-latest' - name: Install Dependencies (macOS) - run: brew install ninja - if: matrix.os == 'macos-11' + run: brew install ninja doxygen + if: matrix.os == 'macos-13' - name: Set environment for MSVC (Windows) run: | @@ -346,7 +365,7 @@ jobs: sh ./autogen.sh mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --${{ matrix.deprec_sym }}-deprecated-symbols --with-default-api-version=${{ matrix.default_api }} --enable-shared --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --with-szlib=${{ matrix.szip }} + ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --${{ matrix.deprec_sym }}-deprecated-symbols --with-default-api-version=${{ matrix.default_api }} --enable-shared --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --${{ matrix.ros3_vfd }}-ros3-vfd --with-szlib=${{ matrix.szip }} shell: bash if: (matrix.generator == 'autogen') && ! (matrix.thread_safety.enabled) @@ -355,7 +374,7 @@ jobs: sh ./autogen.sh mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --enable-shared --enable-threadsafe --disable-hl --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --with-szlib=${{ matrix.szip }} + ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --enable-shared --enable-threadsafe --disable-hl --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --${{ matrix.ros3_vfd }}-ros3-vfd --with-szlib=${{ matrix.szip }} shell: bash if: (matrix.generator == 'autogen') && (matrix.thread_safety.enabled) @@ -367,7 +386,7 @@ jobs: run: | mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE + cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} -DHDF5_BUILD_DOC=${{ matrix.docs }} -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} -DHDF5_ENABLE_ROS3_VFD:BOOL=${{ matrix.ros3_vfd }} $GITHUB_WORKSPACE shell: bash if: (matrix.generator != 'autogen') && ! (matrix.thread_safety.enabled) @@ -376,7 +395,7 @@ jobs: run: | mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=OFF -DHDF5_BUILD_FORTRAN:BOOL=OFF -DHDF5_BUILD_JAVA:BOOL=OFF -DHDF5_BUILD_HL_LIB:BOOL=OFF -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} $GITHUB_WORKSPACE + cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=OFF -DHDF5_BUILD_FORTRAN:BOOL=OFF -DHDF5_BUILD_JAVA:BOOL=OFF -DHDF5_BUILD_HL_LIB:BOOL=OFF -DHDF5_BUILD_DOC=OFF -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} -DHDF5_ENABLE_ROS3_VFD:BOOL=${{ matrix.ros3_vfd }} $GITHUB_WORKSPACE shell: bash if: (matrix.generator != 'autogen') && (matrix.thread_safety.enabled) @@ -406,10 +425,23 @@ jobs: if: (matrix.generator == 'autogen') && (matrix.run_tests) && ! (matrix.thread_safety.enabled) - name: CMake Run Tests - run: ctest --build . --parallel 2 -C ${{ matrix.build_mode.cmake }} -V + run: ctest . --parallel 2 -C ${{ matrix.build_mode.cmake }} -V working-directory: ${{ runner.workspace }}/build - # Skip Debug MSVC while we investigate H5L Java test timeouts - if: (matrix.generator != 'autogen') && (matrix.run_tests) && ! ((matrix.name == 'Windows MSVC CMake') && (matrix.build_mode.cmake == 'Debug')) + if: (matrix.generator != 'autogen') && (matrix.run_tests) && ! (matrix.thread_safety.enabled) + + # THREAD-SAFE + + - name: Autotools Run Thread-Safe Tests + run: | + cd test + ./ttsafe + working-directory: ${{ runner.workspace }}/build + if: (matrix.generator == 'autogen') && (matrix.run_tests) && (matrix.thread_safety.enabled) + + - name: CMake Run Thread-Safe Tests + run: ctest . --parallel 2 -C ${{ matrix.build_mode.cmake }} -V -R ttsafe + working-directory: ${{ runner.workspace }}/build + if: (matrix.generator != 'autogen') && (matrix.run_tests) && (matrix.thread_safety.enabled) # # INSTALL (note that this runs even when we don't run the tests) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 721f7b7..4be5edb 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -45,16 +45,26 @@ jobs: - run: | echo "Tag already present: ${{ steps.tag_create.outputs.tag_exists }}" - getfiles: + PreRelease-getfiles: runs-on: ubuntu-latest + needs: create-tag + environment: snapshots_1_10 + permissions: + contents: write steps: - - name: Set file base name - id: set-file-base + - name: Get file base name + id: get-file-base run: | FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT # Get files created by tarball script + - name: Get doxygen (Linux) + uses: actions/download-artifact@v3 + with: + name: docs-doxygen + path: ${{ github.workspace }}/${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen + - name: Get tgz-tarball (Linux) uses: actions/download-artifact@v3 with: @@ -86,27 +96,13 @@ jobs: name: tgz-ubuntu-2204-binary path: ${{ github.workspace }} - # Get files used by release script - - PreRelease: - runs-on: ubuntu-latest - needs: [create-tag, getfiles] - environment: snapshots_1_10 - permissions: - contents: write - steps: - - name: Set file base name - id: get-file-base - run: | - FILE_NAME_BASE=$(echo "${{ inputs.file_base }}") - echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT - - name: PreRelease tag uses: softprops/action-gh-release@v1 with: - tag_name: "snapshot" + tag_name: "snapshot_1_10" prerelease: true files: | + ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}.zip ${{ steps.get-file-base.outputs.FILE_BASE }}-osx12.tar.gz diff --git a/CMakeFilters.cmake b/CMakeFilters.cmake index b81d6e2..200634e 100644 --- a/CMakeFilters.cmake +++ b/CMakeFilters.cmake @@ -164,11 +164,11 @@ if (HDF5_ENABLE_SZIP_SUPPORT) if (NOT SZIP_FOUND) find_package (SZIP) # Legacy find endif () - if (SZIP_FOUND) - set (SZIP_INCLUDE_DIR_GEN ${SZIP_INCLUDE_DIR}) - set (SZIP_INCLUDE_DIRS ${SZIP_INCLUDE_DIRS} ${SZIP_INCLUDE_DIR}) - set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_LIBRARIES}) - endif () + endif () + if (SZIP_FOUND) + set (SZIP_INCLUDE_DIR_GEN ${SZIP_INCLUDE_DIR}) + set (SZIP_INCLUDE_DIRS ${SZIP_INCLUDE_DIRS} ${SZIP_INCLUDE_DIR}) + set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_LIBRARIES}) endif () else () if (BUILD_SZIP_WITH_FETCHCONTENT) @@ -200,8 +200,6 @@ if (HDF5_ENABLE_SZIP_SUPPORT) set (SZIP_PACKAGE_NAME ${SZIP_PACKAGE_NAME}) endif () set (LINK_COMP_LIBS ${LINK_COMP_LIBS} ${SZIP_STATIC_LIBRARY}) - else () - message (FATAL_ERROR "SZIP is Required for SZIP support in HDF5") endif () endif () if (SZIP_FOUND) diff --git a/config/cmake/ConfigureChecks.cmake b/config/cmake/ConfigureChecks.cmake index dbf395b..8900d4c 100644 --- a/config/cmake/ConfigureChecks.cmake +++ b/config/cmake/ConfigureChecks.cmake @@ -272,6 +272,7 @@ if (MINGW OR NOT WINDOWS) set (HDF_EXTRA_C_FLAGS ${HDF_EXTRA_C_FLAGS} -D_GNU_SOURCE) option (HDF_ENABLE_LARGE_FILE "Enable support for large (64-bit) files on Linux." ON) + mark_as_advanced (HDF_ENABLE_LARGE_FILE) if (HDF_ENABLE_LARGE_FILE AND NOT DEFINED TEST_LFS_WORKS_RUN) set (msg "Performing TEST_LFS_WORKS") try_run (TEST_LFS_WORKS_RUN TEST_LFS_WORKS_COMPILE @@ -694,11 +695,6 @@ endif () #----------------------------------------------------------------------------- set (${HDF_PREFIX}_CONVERT_DENORMAL_FLOAT 1) -# ---------------------------------------------------------------------- -# Set a special flag when using memory sanity checkers like Valgrind. -# This disables the free lists, as the memory reuse scheme they implement -# can hide memory problems. -#----------------------------------------------------------------------------- if (HDF5_ENABLE_USING_MEMCHECKER) set (${HDF_PREFIX}_USING_MEMCHECKER 1) endif () @@ -707,6 +703,7 @@ endif () # Option for --enable-strict-format-checks #----------------------------------------------------------------------------- option (HDF5_STRICT_FORMAT_CHECKS "Whether to perform strict file format checks" OFF) +mark_as_advanced (HDF5_STRICT_FORMAT_CHECKS) if (HDF5_STRICT_FORMAT_CHECKS) set (${HDF_PREFIX}_STRICT_FORMAT_CHECKS 1) endif () @@ -719,6 +716,7 @@ MARK_AS_ADVANCED (HDF5_STRICT_FORMAT_CHECKS) # support denormalized floating values) to maximize speed. #----------------------------------------------------------------------------- option (HDF5_WANT_DATA_ACCURACY "IF data accuracy is guaranteed during data conversions" ON) +mark_as_advanced (HDF5_WANT_DATA_ACCURACY) if (HDF5_WANT_DATA_ACCURACY) set (${HDF_PREFIX}_WANT_DATA_ACCURACY 1) endif () @@ -731,6 +729,7 @@ MARK_AS_ADVANCED (HDF5_WANT_DATA_ACCURACY) # actually benefit little. #----------------------------------------------------------------------------- option (HDF5_WANT_DCONV_EXCEPTION "exception handling functions is checked during data conversions" ON) +mark_as_advanced (HDF5_WANT_DCONV_EXCEPTION) if (HDF5_WANT_DCONV_EXCEPTION) set (${HDF_PREFIX}_WANT_DCONV_EXCEPTION 1) endif () @@ -740,6 +739,7 @@ MARK_AS_ADVANCED (HDF5_WANT_DCONV_EXCEPTION) # Check if they would like the function stack support compiled in #----------------------------------------------------------------------------- option (HDF5_ENABLE_CODESTACK "Enable the function stack tracing (for developer debugging)." OFF) +mark_as_advanced (HDF5_ENABLE_CODESTACK) if (HDF5_ENABLE_CODESTACK) set (${HDF_PREFIX}_HAVE_CODESTACK 1) endif () diff --git a/config/cmake/FindSZIP.cmake b/config/cmake/FindSZIP.cmake deleted file mode 100644 index 846a3d1..0000000 --- a/config/cmake/FindSZIP.cmake +++ /dev/null @@ -1,129 +0,0 @@ -# -# Copyright by The HDF Group. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://www.hdfgroup.org/licenses. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. -# -######################################################################### - -# - Derived from the FindTiff.cmake and FindJPEG.cmake that is included with cmake -# FindSZIP - -# Find the native SZIP includes and library - -# Imported targets -################## - -# This module defines the following :prop_tgt:`IMPORTED` targets: -# -# SZIP::SZIP -# The SZIP library, if found. -# -# Result variables -################### - -# This module will set the following variables in your project: - -# SZIP_FOUND, true if the SZIP headers and libraries were found. -# SZIP_INCLUDE_DIR, the directory containing the SZIP headers. -# SZIP_INCLUDE_DIRS, the directory containing the SZIP headers. -# SZIP_LIBRARIES, libraries to link against to use SZIP. - -# Cache variables -################# - -# The following variables may also be set: - -# SZIP_LIBRARY, where to find the SZIP library. -# SZIP_LIBRARY_DEBUG - Debug version of SZIP library -# SZIP_LIBRARY_RELEASE - Release Version of SZIP library - -# message (STATUS "Finding SZIP library and headers..." ) -######################################################################### - - -find_path(SZIP_INCLUDE_DIR szlib.h) - -set(szip_names ${SZIP_NAMES} sz szip szip-static libsz libszip libszip-static) -foreach(name ${szip_names}) - list (APPEND szip_names_debug "${name}d") -endforeach() - -if(NOT SZIP_LIBRARY) - find_library(SZIP_LIBRARY_RELEASE NAMES ${szip_names}) - find_library(SZIP_LIBRARY_DEBUG NAMES ${szip_names_debug}) - include(SelectLibraryConfigurations) - select_library_configurations(SZIP) - mark_as_advanced(SZIP_LIBRARY_RELEASE SZIP_LIBRARY_DEBUG) -endif() -unset(szip_names) -unset(szip_names_debug) - -if(SZIP_INCLUDE_DIR AND EXISTS "${SZIP_INCLUDE_DIR}/SZconfig.h") - file(STRINGS "${SZIP_INCLUDE_DIR}/SZconfig.h" szip_version_str - REGEX "^#define[\t ]+SZIP_PACKAGE_VERSION[\t ]+.*") - - string(REGEX REPLACE "^#define[\t ]+SZIP_PACKAGE_VERSION[\t ]+([0-9]+).*" - "\\1" SZIP_VERSION "${szip_version_str}") - unset(szip_version_str) -endif() - -include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(SZIP - REQUIRED_VARS SZIP_LIBRARY SZIP_INCLUDE_DIR - VERSION_VAR SZIP_VERSION) - -if(SZIP_FOUND) - set(SZIP_LIBRARIES ${SZIP_LIBRARY}) - set(SZIP_INCLUDE_DIRS "${SZIP_INCLUDE_DIR}") - - if(NOT TARGET SZIP::SZIP) - add_library(SZIP::SZIP UNKNOWN IMPORTED) - if(SZIP_INCLUDE_DIRS) - set_target_properties(SZIP::SZIP PROPERTIES - INTERFACE_INCLUDE_DIRECTORIES "${SZIP_INCLUDE_DIRS}") - endif() - if(EXISTS "${SZIP_LIBRARY}") - set_target_properties(SZIP::SZIP PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES "C" - IMPORTED_LOCATION "${SZIP_LIBRARY}") - endif() - if(EXISTS "${SZIP_LIBRARY_RELEASE}") - set_property(TARGET SZIP::SZIP APPEND PROPERTY - IMPORTED_CONFIGURATIONS RELEASE) - set_target_properties(SZIP::SZIP PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES_RELEASE "C" - IMPORTED_LOCATION_RELEASE "${SZIP_LIBRARY_RELEASE}") - endif() - if(EXISTS "${SZIP_LIBRARY_DEBUG}") - set_property(TARGET SZIP::SZIP APPEND PROPERTY - IMPORTED_CONFIGURATIONS DEBUG) - set_target_properties(SZIP::SZIP PROPERTIES - IMPORTED_LINK_INTERFACE_LANGUAGES_DEBUG "C" - IMPORTED_LOCATION_DEBUG "${SZIP_LIBRARY_DEBUG}") - endif() - endif() -endif() - -mark_as_advanced(SZIP_LIBRARY SZIP_INCLUDE_DIR) - -# Report the results. -if (NOT SZIP_FOUND) - set (SZIP_DIR_MESSAGE - "SZip was not found. Make sure SZIP_LIBRARY and SZIP_INCLUDE_DIR are set or set the SZIP_INSTALL environment variable." - ) - if (NOT SZIP_FIND_QUIETLY) - if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSE "${SZIP_DIR_MESSAGE}") - endif () - else () - if (SZIP_FIND_REQUIRED) - message (FATAL_ERROR "SZip was NOT found and is Required by this project") - endif () - endif () -endif () diff --git a/config/cmake/HDF5UseFortran.cmake b/config/cmake/HDF5UseFortran.cmake index 43e006c..fd09c51 100644 --- a/config/cmake/HDF5UseFortran.cmake +++ b/config/cmake/HDF5UseFortran.cmake @@ -17,6 +17,12 @@ enable_language (Fortran) set (HDF_PREFIX "H5") + +# Force lowercase Fortran module file names +if (CMAKE_Fortran_COMPILER_ID STREQUAL "Cray") + set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} -ef") +endif () + include (CheckFortranFunctionExists) if (NOT CMAKE_VERSION VERSION_LESS "3.14.0") @@ -43,11 +49,16 @@ macro (FORTRAN_RUN FUNCTION_NAME SOURCE_CODE RUN_RESULT_VAR1 COMPILE_RESULT_VAR1 ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testFortranCompiler1.f90 "${SOURCE_CODE}" ) + if (CMAKE_VERSION VERSION_LESS 3.25) + set (_RUN_OUTPUT_VARIABLE "RUN_OUTPUT_VARIABLE") + else () + set (_RUN_OUTPUT_VARIABLE "RUN_OUTPUT_STDOUT_VARIABLE") + endif() TRY_RUN (RUN_RESULT_VAR COMPILE_RESULT_VAR ${CMAKE_BINARY_DIR} ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/testFortranCompiler1.f90 LINK_LIBRARIES "${HDF5_REQUIRED_LIBRARIES}" - RUN_OUTPUT_VARIABLE OUTPUT_VAR + ${_RUN_OUTPUT_VARIABLE} OUTPUT_VAR ) set (${RETURN_OUTPUT_VAR} ${OUTPUT_VAR}) @@ -187,10 +198,10 @@ foreach (KIND ${VAR}) " PROGRAM main USE ISO_C_BINDING - USE, INTRINSIC :: ISO_FORTRAN_ENV, ONLY : stderr=>ERROR_UNIT + USE, INTRINSIC :: ISO_FORTRAN_ENV, ONLY : stdout=>OUTPUT_UNIT IMPLICIT NONE INTEGER (KIND=${KIND}) a - WRITE(stderr,'(I0)') ${FC_SIZEOF_A} + WRITE(stdout,'(I0)') ${FC_SIZEOF_A} END " ) @@ -230,10 +241,10 @@ foreach (KIND ${VAR} ) " PROGRAM main USE ISO_C_BINDING - USE, INTRINSIC :: ISO_FORTRAN_ENV, ONLY : stderr=>ERROR_UNIT + USE, INTRINSIC :: ISO_FORTRAN_ENV, ONLY : stdout=>OUTPUT_UNIT IMPLICIT NONE REAL (KIND=${KIND}) a - WRITE(stderr,'(I0)') ${FC_SIZEOF_A} + WRITE(stdout,'(I0)') ${FC_SIZEOF_A} END " ) @@ -272,17 +283,17 @@ set (PROG_SRC3 " PROGRAM main USE ISO_C_BINDING - USE, INTRINSIC :: ISO_FORTRAN_ENV, ONLY : stderr=>ERROR_UNIT + USE, INTRINSIC :: ISO_FORTRAN_ENV, ONLY : stdout=>OUTPUT_UNIT IMPLICIT NONE INTEGER a REAL b DOUBLE PRECISION c - WRITE(stderr,*) ${FC_SIZEOF_A} - WRITE(stderr,*) kind(a) - WRITE(stderr,*) ${FC_SIZEOF_B} - WRITE(stderr,*) kind(b) - WRITE(stderr,*) ${FC_SIZEOF_C} - WRITE(stderr,*) kind(c) + WRITE(stdout,*) ${FC_SIZEOF_A} + WRITE(stdout,*) kind(a) + WRITE(stdout,*) ${FC_SIZEOF_B} + WRITE(stdout,*) kind(b) + WRITE(stdout,*) ${FC_SIZEOF_C} + WRITE(stdout,*) kind(c) END " ) diff --git a/config/cmake/HDFCompilerFlags.cmake b/config/cmake/HDFCompilerFlags.cmake index 88670d7..b5c50b3 100644 --- a/config/cmake/HDFCompilerFlags.cmake +++ b/config/cmake/HDFCompilerFlags.cmake @@ -56,7 +56,9 @@ if (CMAKE_COMPILER_IS_GNUCC) set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Og -ftrapv -fno-common") endif () else () - if (CMAKE_C_COMPILER_ID STREQUAL "GNU" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 5.0) + if (CMAKE_C_COMPILER_ID STREQUAL "GNU" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 5.0 AND + NOT CMAKE_C_CLANG_TIDY) + # `clang-tidy` does not understand -fstdarg-opt set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fstdarg-opt") endif () if (CMAKE_C_COMPILER_ID STREQUAL "GNU" AND NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 10.0) diff --git a/config/cmake/HDFLibMacros.cmake b/config/cmake/HDFLibMacros.cmake index 6d77ab5..752d817 100644 --- a/config/cmake/HDFLibMacros.cmake +++ b/config/cmake/HDFLibMacros.cmake @@ -17,13 +17,14 @@ macro (ORIGINAL_ZLIB_LIBRARY compress_type) GIT_TAG ${ZLIB_BRANCH} ) elseif (${compress_type} MATCHES "TGZ") + message (VERBOSE "Filter ZLIB file ${ZLIB_URL}") FetchContent_Declare (HDF5_ZLIB URL ${ZLIB_URL} URL_HASH "" ) endif () FetchContent_GetProperties(HDF5_ZLIB) - if(NOT zlib_POPULATED) + if(NOT hdf5_zlib_POPULATED) FetchContent_Populate(HDF5_ZLIB) # Copy an additional/replacement files into the populated source @@ -51,6 +52,7 @@ macro (ORIGINAL_SZIP_LIBRARY compress_type encoding) GIT_TAG ${SZIP_BRANCH} ) elseif (${compress_type} MATCHES "TGZ") + message (VERBOSE "Filter SZIP file ${SZIP_URL}") FetchContent_Declare (SZIP URL ${SZIP_URL} URL_HASH "" diff --git a/config/cmake/HDFTests.c b/config/cmake/HDFTests.c index 2d7e1b4..133540b 100644 --- a/config/cmake/HDFTests.c +++ b/config/cmake/HDFTests.c @@ -188,8 +188,8 @@ int main(void) for (currentArg = llwidthArgs; *currentArg != NULL; currentArg++) { char formatString[64]; - sprintf(formatString, "%%%sd", *currentArg); - sprintf(s, formatString, x); + snprintf(formatString, sizeof(formatString), "%%%sd", *currentArg); + snprintf(s, 128, formatString, x); if (strcmp(s, "1099511627776") == 0) { printf("PRINTF_LL_WIDTH=[%s]\n", *currentArg); diff --git a/config/cmake/UserMacros/Windows_MT.cmake b/config/cmake/UserMacros/Windows_MT.cmake index 15cffba..c8edbe4 100644 --- a/config/cmake/UserMacros/Windows_MT.cmake +++ b/config/cmake/UserMacros/Windows_MT.cmake @@ -47,6 +47,7 @@ endmacro () #----------------------------------------------------------------------------- option (BUILD_STATIC_CRT_LIBS "Build With Static CRT Libraries" OFF) +mark_as_advanced (BUILD_STATIC_CRT_LIBS) if (BUILD_STATIC_CRT_LIBS) TARGET_STATIC_CRT_FLAGS () endif () diff --git a/config/cmake/scripts/HDF5options.cmake b/config/cmake/scripts/HDF5options.cmake index 92bfd37..ba5fc24 100644 --- a/config/cmake/scripts/HDF5options.cmake +++ b/config/cmake/scripts/HDF5options.cmake @@ -70,7 +70,7 @@ set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ALLOW_EXTERNAL_SUPPORT:STRIN ### disable using ext zlib #set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF") ### disable using ext szip -#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF") +#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=ON") #set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_ENCODING:BOOL=OFF") #### package examples #### diff --git a/config/sanitizer/tools.cmake b/config/sanitizer/tools.cmake index 3a41ad4..88d3baf 100644 --- a/config/sanitizer/tools.cmake +++ b/config/sanitizer/tools.cmake @@ -14,101 +14,90 @@ # the License. option(CLANG_TIDY "Turns on clang-tidy processing if it is found." OFF) -option(IWYU "Turns on include-what-you-use processing if it is found." OFF) -option(CPPCHECK "Turns on cppcheck processing if it is found." OFF) +if(CLANG_TIDY) + find_program(CLANG_TIDY_EXE NAMES "clang-tidy") + mark_as_advanced(FORCE CLANG_TIDY_EXE) + if(CLANG_TIDY_EXE) + message(STATUS "clang-tidy found: ${CLANG_TIDY_EXE}") + set(CMAKE_C_CLANG_TIDY ${CLANG_TIDY_EXE} --checks=-*,clang-analyzer-*)#${ARGN}) + set(CMAKE_CXX_CLANG_TIDY ${CLANG_TIDY_EXE} --checks=-*,clang-analyzer-*)#${ARGN}) + else() + message(STATUS "clang-tidy not found!") + set(CMAKE_C_CLANG_TIDY "" CACHE STRING "" FORCE) # delete it + set(CMAKE_CXX_CLANG_TIDY "" CACHE STRING "" FORCE) # delete it + endif() +else() + #message(STATUS "clang-tidy not enabled!") + set(CMAKE_C_CLANG_TIDY + "" + CACHE STRING "" FORCE) # delete it + set(CMAKE_CXX_CLANG_TIDY + "" + CACHE STRING "" FORCE) # delete it +endif() # Adds clang-tidy checks to the compilation, with the given arguments being used # as the options set. macro(clang_tidy) if(CLANG_TIDY AND CLANG_TIDY_EXE) + set(CMAKE_C_CLANG_TIDY ${CLANG_TIDY_EXE} ${ARGN}) set(CMAKE_CXX_CLANG_TIDY ${CLANG_TIDY_EXE} ${ARGN}) endif() endmacro() +option(IWYU "Turns on include-what-you-use processing if it is found." OFF) +if(IWYU) + find_program(IWYU_EXE NAMES "include-what-you-use") + mark_as_advanced(FORCE IWYU_EXE) + if(IWYU_EXE) + message(STATUS "include-what-you-use found: ${IWYU_EXE}") + else() + message(SEND_ERROR "Cannot enable include-what-you-use, as executable not found!") + set(CMAKE_C_INCLUDE_WHAT_YOU_USE "" CACHE STRING "" FORCE) # delete it + set(CMAKE_CXX_INCLUDE_WHAT_YOU_USE "" CACHE STRING "" FORCE) # delete it + endif() +else() + #message(STATUS "include-what-you-use NOT ENABLED via 'IWYU' variable!") + set(CMAKE_C_INCLUDE_WHAT_YOU_USE "" CACHE STRING "" FORCE) # delete it + set(CMAKE_CXX_INCLUDE_WHAT_YOU_USE "" CACHE STRING "" FORCE) # delete it +endif() + # Adds include_what_you_use to the compilation, with the given arguments being # used as the options set. macro(include_what_you_use) if(IWYU AND IWYU_EXE) + set(CMAKE_C_INCLUDE_WHAT_YOU_USE ${IWYU_EXE} ${ARGN}) set(CMAKE_CXX_INCLUDE_WHAT_YOU_USE ${IWYU_EXE} ${ARGN}) endif() endmacro() +option(CPPCHECK "Turns on cppcheck processing if it is found." OFF) +if(CPPCHECK) + if(CPPCHECK_EXE) + message(STATUS "cppcheck found: ${CPPCHECK_EXE}") + set(CMAKE_C_CPPCHECK + "${CPPCHECK_EXE};--enable=warning,performance,portability,missingInclude;--template=\"[{severity}][{id}] {message} {callstack} \(On {file}:{line}\)\";--suppress=missingIncludeSystem;--quiet;--verbose;--force" + ) + set(CMAKE_CXX_CPPCHECK + "${CPPCHECK_EXE};--enable=warning,performance,portability,missingInclude;--template=\"[{severity}][{id}] {message} {callstack} \(On {file}:{line}\)\";--suppress=missingIncludeSystem;--quiet;--verbose;--force" + ) + else() + message(SEND_ERROR "Cannot enable cppcheck, as executable not found!") + set(CMAKE_C_CPPCHECK "" CACHE STRING "" FORCE) # delete it + set(CMAKE_CXX_CPPCHECK "" CACHE STRING "" FORCE) # delete it + endif() +else() + # message(SEND_ERROR "cppcheck NOT ENABLED via 'CPPCHECK' variable!") + set(CMAKE_C_CPPCHECK "" CACHE STRING "" FORCE) # delete it + set(CMAKE_CXX_CPPCHECK "" CACHE STRING "" FORCE) # delete it +endif() + # Adds cppcheck to the compilation, with the given arguments being used as the # options set. macro(cppcheck) if(CPPCHECK AND CPPCHECK_EXE) + set(CMAKE_C_CPPCHECK ${CPPCHECK_EXE} ${ARGN}) set(CMAKE_CXX_CPPCHECK ${CPPCHECK_EXE} ${ARGN}) endif() endmacro() -find_program(CLANG_TIDY_EXE NAMES "clang-tidy") -mark_as_advanced(FORCE CLANG_TIDY_EXE) -if(CLANG_TIDY_EXE) - message(STATUS "clang-tidy found: ${CLANG_TIDY_EXE}") - if(NOT CLANG_TIDY) - message(STATUS "clang-tidy NOT ENABLED via 'CLANG_TIDY' variable!") - set(CMAKE_CXX_CLANG_TIDY - "" - CACHE STRING "" FORCE) # delete it - endif() -elseif(CLANG_TIDY) - message(SEND_ERROR "Cannot enable clang-tidy, as executable not found!") - set(CMAKE_CXX_CLANG_TIDY - "" - CACHE STRING "" FORCE) # delete it -else() - message(STATUS "clang-tidy not found!") - set(CMAKE_CXX_CLANG_TIDY - "" - CACHE STRING "" FORCE) # delete it -endif() - -find_program(IWYU_EXE NAMES "include-what-you-use") -mark_as_advanced(FORCE IWYU_EXE) -if(IWYU_EXE) - message(STATUS "include-what-you-use found: ${IWYU_EXE}") - if(NOT IWYU) - message(STATUS "include-what-you-use NOT ENABLED via 'IWYU' variable!") - set(CMAKE_CXX_INCLUDE_WHAT_YOU_USE - "" - CACHE STRING "" FORCE) # delete it - endif() -elseif(IWYU) - message( - SEND_ERROR "Cannot enable include-what-you-use, as executable not found!") - set(CMAKE_CXX_INCLUDE_WHAT_YOU_USE - "" - CACHE STRING "" FORCE) # delete it -else() - message(STATUS "include-what-you-use not found!") - set(CMAKE_CXX_INCLUDE_WHAT_YOU_USE - "" - CACHE STRING "" FORCE) # delete it -endif() - -find_program(CPPCHECK_EXE NAMES "cppcheck") -mark_as_advanced(FORCE CPPCHECK_EXE) -if(CPPCHECK_EXE) - message(STATUS "cppcheck found: ${CPPCHECK_EXE}") - if(CPPCHECK) - set(CMAKE_CXX_CPPCHECK - "${CPPCHECK_EXE};--enable=warning,performance,portability,missingInclude;--template=\"[{severity}][{id}] {message} {callstack} \(On {file}:{line}\)\";--suppress=missingIncludeSystem;--quiet;--verbose;--force" - ) - endif() - if(NOT CPPCHECK) - message(STATUS "cppcheck NOT ENABLED via 'CPPCHECK' variable!") - set(CMAKE_CXX_CPPCHECK - "" - CACHE STRING "" FORCE) # delete it - endif() -elseif(CPPCHECK) - message(SEND_ERROR "Cannot enable cppcheck, as executable not found!") - set(CMAKE_CXX_CPPCHECK - "" - CACHE STRING "" FORCE) # delete it -else() - message(STATUS "cppcheck not found!") - set(CMAKE_CXX_CPPCHECK - "" - CACHE STRING "" FORCE) # delete it -endif() diff --git a/configure.ac b/configure.ac index d4ad151..fbc6455 100644 --- a/configure.ac +++ b/configure.ac @@ -535,31 +535,7 @@ AC_CHECK_SIZEOF([double]) AC_CHECK_SIZEOF([long double]) ## ---------------------------------------------------------------------- -## Check for non-standard extension __FLOAT128 -## -HAVE_FLOAT128=0 -HAVE_QUADMATH=0 -FLT128_DIG=0 -LDBL_DIG=0 - -AC_CHECK_SIZEOF([__float128]) -AC_CHECK_SIZEOF([_Quad]) -AC_CHECK_HEADERS([quadmath.h], [HAVE_QUADMATH=1], []) -PAC_FC_LDBL_DIG - -AC_SUBST([PAC_C_MAX_REAL_PRECISION]) - -if test "$ac_cv_sizeof___float128" != 0 && test "$FLT128_DIG" != 0 ; then - AC_DEFINE([HAVE_FLOAT128], [1], [Determine if __float128 is available]) - PAC_C_MAX_REAL_PRECISION=$FLT128_DIG -else - PAC_C_MAX_REAL_PRECISION=$LDBL_DIG -fi -AC_DEFINE_UNQUOTED([PAC_C_MAX_REAL_PRECISION], $PAC_C_MAX_REAL_PRECISION, [Determine the maximum decimal precision in C]) -AC_MSG_RESULT([$PAC_C_MAX_REAL_PRECISION]) - -## ---------------------------------------------------------------------- -## Check if they would like the Fortran interface compiled +## Check if the Fortran interface should be enabled ## ## This needs to be exposed for the library info file even if Fortran is disabled. @@ -579,6 +555,30 @@ AC_MSG_RESULT([$HDF_FORTRAN]) if test "X$HDF_FORTRAN" = "Xyes"; then +## ---------------------------------------------------------------------- +## Check for non-standard extension __FLOAT128 +## + HAVE_FLOAT128=0 + HAVE_QUADMATH=0 + FLT128_DIG=0 + LDBL_DIG=0 + + AC_CHECK_SIZEOF([__float128]) + AC_CHECK_SIZEOF([_Quad]) + AC_CHECK_HEADERS([quadmath.h], [HAVE_QUADMATH=1], []) + PAC_FC_LDBL_DIG + + AC_SUBST([PAC_C_MAX_REAL_PRECISION]) + + if test "$ac_cv_sizeof___float128" != 0 && test "$FLT128_DIG" != 0 ; then + AC_DEFINE([HAVE_FLOAT128], [1], [Determine if __float128 is available]) + PAC_C_MAX_REAL_PRECISION=$FLT128_DIG + else + PAC_C_MAX_REAL_PRECISION=$LDBL_DIG + fi + AC_DEFINE_UNQUOTED([PAC_C_MAX_REAL_PRECISION], $PAC_C_MAX_REAL_PRECISION, [Determine the maximum decimal precision in C]) + AC_MSG_RESULT([$PAC_C_MAX_REAL_PRECISION]) + ## We will output an include file for Fortran, H5config_f.inc which ## contains various configure definitions used by the Fortran Library. ## Prepend H5_ to all macro names. This avoids name conflict between HDF5 macro @@ -1136,7 +1136,7 @@ AC_SUBST([HDF5_TOOLS]) ## Default is to build tests and tools HDF5_TOOLS=yes -AC_MSG_CHECKING([if building tools is disabled]) +AC_MSG_CHECKING([if building tools is enabled]) AC_ARG_ENABLE([tools], [AS_HELP_STRING([--enable-tools], @@ -2369,6 +2369,7 @@ case "X-$DEV_WARNINGS" in ;; X-no) H5_CFLAGS="$H5_CFLAGS $NO_DEVELOPER_WARNING_CFLAGS" + H5_FCFLAGS="$H5_FCFLAGS $NO_DEVELOPER_WARNING_FCFLAGS" AC_MSG_RESULT([no]) ;; *) diff --git a/doc/branches-explained.md b/doc/branches-explained.md index 22b9c8f..5b55ec7 100644 --- a/doc/branches-explained.md +++ b/doc/branches-explained.md @@ -8,34 +8,33 @@ We encourage code contributors to check the status of their commits. If you have ## `develop` Develop is the main branch whose source code always reflects a state with the latest delivered development changes for the next major release of HDF5. -This is also considered the integration branch, as **all** new features are integrated into this branch from respective feature branches. +This is also considered the integration branch, as **all** new features are integrated into this branch from respective feature branches. Although +develop is considered an integration branch, it is not an unstable branch. All code merged to develop is expected to pass all GitHub actions and daily tests. ## `Maintenance branches` - -Each currently supported release-line of HDF5 (e.g. 1.8.x, 1.10.x, 1.12.x) has a support branch with the name 1_8, 1_10, 1_12. +Each currently supported release line of HDF5 (e.g. 1.8.x, 1.10.x, 1.12.x) has an associated branch with the name hdf5\_1\_10, etc.. Maintenance branches are similar to the develop branch, except the source code in a maintenance branch always reflects a state with the latest delivered development changes for the next **maintenance** release of that particular supported release-line of HDF5. **Some** new features will be integrated into a release maintenance branch, depending on whether or not those features can be introduced in minor releases. Maintenance branches are removed when a release-line is retired from support. +## `Release branches` +Release branches are used to prepare a new production release. They are primarily used to allow for last minute dotting of i's and crossing of t's +(things like setting the release version, finalizing release notes, and generating Autotools files) and do not include new development. +They are created from the maintenance branch at the time of the maintenance release and have +names like hdf5\_1\_10\_N, where N is the minor release number. Once the release is done it is tagged, with a slightly different format: hdf5-1\_\10\_N. +Release branches are deleted after the tag has been created. If we have to create a patch version of a release (which is rare), we create a branch off of the tag. + ## `feature/*` Feature branches are temporary branches used to develop new features in HDF5. Feature branches branch off of develop and exist as long as the feature is under development. When the feature is complete, the branch is merged back into develop, as well as into any support branches in which the change will be included, and then the feature branch is removed. -## `release/*` -Release branches are used to prepare a new production release. They are primarily used to allow for last minute dotting of i's and crossing of t's -(things like setting the release version, finalizing release notes, et cetera) and do not include new development. -They are created from the maintenance branch at the time of the maintenance release and have -names 1_8_N, 1_10_N, 1_12_N, where N is the minor release number. Once the release is done it is tagged. -Patches can be applied to the release branch for patch releases that are treated as "scaled down" maintenance releases as defined by Release coordinator. - -## `1.X/master/*` where X is 8, 10 or 12 -These branches are used to tag 1.X.* maintenance releases. +Ideally, all feature branches should contain a BRANCH.md file in the root directory that explains the purpose of the branch, contact information for the person responsible, and, if possible, some clues about the branch's life cycle (so we have an idea about when it can be deleted, merged, or declared inactive). -## `inactive//*` -These branches are for experimental features that were developed in the past and have not been merged to develop, and are not under active development. The features -can be out of sync with the develop branch. +Minor bug fixes and refactoring work usually takes place on personal forks, not feature branches. -This document was last updated on March 16, 2021 +## `inactive/*` +These branches are for experimental features that were developed in the past, have not been merged to develop, and are not under active development. The exception to this is that some feature branches are labeled inactive and preserved for a short time after merging to develop. Integration branches are usually not kept in sync with the develop branch. +As for feature branches, inactive branches should have a BRANCH.md file as described above. diff --git a/doxygen/Doxyfile.in b/doxygen/Doxyfile.in index 3d4a766..f35966d 100644 --- a/doxygen/Doxyfile.in +++ b/doxygen/Doxyfile.in @@ -625,7 +625,7 @@ WARN_NO_PARAMDOC = NO # a warning is encountered. # The default value is: NO. -WARN_AS_ERROR = NO +WARN_AS_ERROR = FAIL_ON_WARNINGS # The WARN_FORMAT tag determines the format of the warning messages that doxygen # can produce. The string should contain the $file, $line, and $text tags, which diff --git a/doxygen/dox/LearnBasics3.dox b/doxygen/dox/LearnBasics3.dox index 06afacd..7487458 100644 --- a/doxygen/dox/LearnBasics3.dox +++ b/doxygen/dox/LearnBasics3.dox @@ -945,7 +945,7 @@ For a more complete script (and to help resolve issues) see the script provided \subsection subsecLBCompilingCMakeExamples HDF5 Examples The installed HDF5 can be verified by compiling the HDF5 Examples project, included with the CMake built HDF5 binaries -in the share folder or you can go to the HDF5 Examples github repository. +in the share folder or you can go to the HDF5 Examples github repository. Go into the share directory and follow the instructions in USING_CMake_examples.txt to build the examples. diff --git a/doxygen/dox/ReferenceManual.dox b/doxygen/dox/ReferenceManual.dox index 5b46a19..eaf3912 100644 --- a/doxygen/dox/ReferenceManual.dox +++ b/doxygen/dox/ReferenceManual.dox @@ -131,7 +131,6 @@ The functions provided by the HDF5 API are grouped into the following \ref predefined_datatypes_tables
Deprecated functions
-Functions with \ref ASYNC
\ref api-compat-macros diff --git a/doxygen/examples/menus/core_menu.md b/doxygen/examples/menus/core_menu.md index 3fd7d11..8c82cc5 100644 --- a/doxygen/examples/menus/core_menu.md +++ b/doxygen/examples/menus/core_menu.md @@ -20,10 +20,6 @@ HDF5 datatypes describe the element type of HDF5 datasets and attributes.
HDF5 library error reporting. -- @ref H5ES "Event Set (H5ES)" -
-HDF5 event set life cycle used with HDF5 VOL connectors that enable the asynchronous feature in HDF5. - - @ref H5F "Files (H5F)"
Manage HDF5 files. diff --git a/doxygen/examples/tables/predefinedDatatypes.dox b/doxygen/examples/tables/predefinedDatatypes.dox index 1deb882..36d6b73 100644 --- a/doxygen/examples/tables/predefinedDatatypes.dox +++ b/doxygen/examples/tables/predefinedDatatypes.dox @@ -469,7 +469,7 @@ C-style double -#H5T_NATIVE_LDOUBLE +H5T_NATIVE_LDOUBLE C-style long double diff --git a/fortran/src/H5Lff.F90 b/fortran/src/H5Lff.F90 index add9b20..89f2dfa 100644 --- a/fortran/src/H5Lff.F90 +++ b/fortran/src/H5Lff.F90 @@ -472,7 +472,7 @@ CONTAINS !! \li H5L_TYPE_SOFT_F - Soft link !! \li H5L_TYPE_EXTERNAL_F - External link !! \li H5L_TYPE_ERROR_ F - Error -!! \param token If the link is a hard link, token specifies the object token that the link points to. +!! \param address If the link is a hard link, address specifies the file address that the link points to. !! \param val_size If the link is a symbolic link, val_size will be the length of the link value, e.g., !! the length of the name of the pointed-to object with a null terminator. !! \param hdferr \fortran_error diff --git a/java/examples/datasets/CMakeLists.txt b/java/examples/datasets/CMakeLists.txt index f6d733f..7542e8e 100644 --- a/java/examples/datasets/CMakeLists.txt +++ b/java/examples/datasets/CMakeLists.txt @@ -82,6 +82,7 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME) set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$,$>:${CMAKE_DEBUG_POSTFIX}>;") + set (last_test "") foreach (example ${HDF_JAVA_EXAMPLES}) if (example STREQUAL "H5Ex_D_External") add_test ( @@ -97,6 +98,9 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 ) endif () + if (last_test) + set_tests_properties (JAVA_datasets-${example}-clear-objects PROPERTIES DEPENDS ${last_test}) + endif () add_test ( NAME JAVA_datasets-${example}-copy-objects @@ -137,9 +141,10 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) COMMAND ${CMAKE_COMMAND} -E remove ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 ) + endif () set_tests_properties (JAVA_datasets-${example}-clean-objects PROPERTIES DEPENDS JAVA_datasets-${example} ) - endif () + set (last_test "JAVA_datasets-${example}-clean-objects") endforeach () endif () diff --git a/java/examples/datatypes/CMakeLists.txt b/java/examples/datatypes/CMakeLists.txt index 75dab75..c77a715 100644 --- a/java/examples/datatypes/CMakeLists.txt +++ b/java/examples/datatypes/CMakeLists.txt @@ -67,12 +67,16 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME) set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$,$>:${CMAKE_DEBUG_POSTFIX}>;") + set (last_test "") foreach (example ${HDF_JAVA_EXAMPLES}) add_test ( NAME JAVA_datatypes-${example}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 ) + if (last_test) + set_tests_properties (JAVA_datatypes-${example}-clear-objects PROPERTIES DEPENDS ${last_test}) + endif () add_test ( NAME JAVA_datatypes-${example}-copy-objects @@ -107,5 +111,6 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) set_tests_properties (JAVA_datatypes-${example}-clean-objects PROPERTIES DEPENDS JAVA_datatypes-${example} ) + set (last_test "JAVA_datatypes-${example}-clean-objects") endforeach () endif () diff --git a/java/examples/groups/CMakeLists.txt b/java/examples/groups/CMakeLists.txt index 5393afd..01da53c 100644 --- a/java/examples/groups/CMakeLists.txt +++ b/java/examples/groups/CMakeLists.txt @@ -66,6 +66,7 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME) set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$,$>:${CMAKE_DEBUG_POSTFIX}>;") + set (last_test "") foreach (example ${HDF_JAVA_EXAMPLES}) if (NOT example STREQUAL "H5Ex_G_Iterate" AND NOT example STREQUAL "H5Ex_G_Visit") if (example STREQUAL "H5Ex_G_Compact") @@ -88,6 +89,9 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) COMMAND ${CMAKE_COMMAND} -E echo "${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 exists" ) endif () + if (last_test) + set_tests_properties (JAVA_groups-${example}-clear-objects PROPERTIES DEPENDS ${last_test}) + endif () add_test ( NAME JAVA_groups-${example}-copy-objects @@ -132,6 +136,9 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) set_tests_properties (JAVA_groups-${example}-clean-objects PROPERTIES DEPENDS JAVA_groups-${example} ) + set (last_test "JAVA_groups-${example}-clean-objects") + else () + set (last_test "JAVA_groups-${example}") endif () endforeach () endif () diff --git a/java/examples/intro/CMakeLists.txt b/java/examples/intro/CMakeLists.txt index b1c35bc..518bbf8 100644 --- a/java/examples/intro/CMakeLists.txt +++ b/java/examples/intro/CMakeLists.txt @@ -56,12 +56,16 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME) set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$,$>:${CMAKE_DEBUG_POSTFIX}>;") + set (last_test "") foreach (example ${HDF_JAVA_EXAMPLES}) add_test ( NAME JAVA_intro-${example}-clear-objects COMMAND ${CMAKE_COMMAND} -E remove ${HDFJAVA_EXAMPLES_BINARY_DIR}/${example}.h5 ) + if (last_test) + set_tests_properties (JAVA_intro-${example}-clear-objects PROPERTIES DEPENDS ${last_test}) + endif () add_test ( NAME JAVA_intro-${example}-copy-objects @@ -97,6 +101,7 @@ if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) set_tests_properties (JAVA_intro-${example}-clean-objects PROPERTIES DEPENDS JAVA_intro-${example} ) + set (last_test "JAVA_intro-${example}-clean-objects") endforeach () endif () diff --git a/java/src/Makefile.am b/java/src/Makefile.am index 1a313e8..8820fd8 100644 --- a/java/src/Makefile.am +++ b/java/src/Makefile.am @@ -106,7 +106,6 @@ hdf5_java_JAVA = \ ${pkgpath}/structs/H5AC_cache_config_t.java \ ${pkgpath}/H5.java \ ${pkgpath}/HDF5Constants.java \ - ${pkgpath}/HDF5GroupInfo.java \ ${pkgpath}/HDFArray.java \ ${pkgpath}/HDFNativeData.java diff --git a/java/src/hdf/hdf5lib/CMakeLists.txt b/java/src/hdf/hdf5lib/CMakeLists.txt index e6072d9..b881cf7 100644 --- a/java/src/hdf/hdf5lib/CMakeLists.txt +++ b/java/src/hdf/hdf5lib/CMakeLists.txt @@ -99,7 +99,6 @@ set (HDF5_JAVADOC_HDF_HDF5_STRUCTS_SOURCES set (HDF5_JAVA_HDF_HDF5_SOURCES HDFArray.java HDF5Constants.java - HDF5GroupInfo.java HDFNativeData.java H5.java ) diff --git a/java/src/hdf/hdf5lib/H5.java b/java/src/hdf/hdf5lib/H5.java index 2da6ffa..601dc3f 100644 --- a/java/src/hdf/hdf5lib/H5.java +++ b/java/src/hdf/hdf5lib/H5.java @@ -13199,7 +13199,7 @@ public class H5 implements java.io.Serializable { /** * @ingroup JH5T * - * H5Tcreate creates a new dataype of the specified class with the specified number of bytes. + * H5Tcreate creates a new datatype of the specified class with the specified number of bytes. * * @param tclass * IN: Class of datatype to create. diff --git a/java/src/hdf/hdf5lib/HDF5GroupInfo.java b/java/src/hdf/hdf5lib/HDF5GroupInfo.java deleted file mode 100644 index 50c7db0..0000000 --- a/java/src/hdf/hdf5lib/HDF5GroupInfo.java +++ /dev/null @@ -1,188 +0,0 @@ -/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * - * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * - * All rights reserved. * - * * - * This file is part of HDF5. The full HDF5 copyright notice, including * - * terms governing use, modification, and redistribution, is contained in * - * the COPYING file, which can be found at the root of the source code * - * distribution tree, or in https://www.hdfgroup.org/licenses. * - * If you do not have access to either file, you may request a copy from * - * help@hdfgroup.org. * - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ - -package hdf.hdf5lib; - -/** - *

- * This class is a container for the information reported about an HDF5 Object - * from the H5Gget_obj_info() method. - *

- * The fileno and objno fields contain four values which uniquely identify an - * object among those HDF5 files which are open: if all four values are the same - * between two objects, then the two objects are the same (provided both files - * are still open). The nlink field is the number of hard links to the object or - * zero when information is being returned about a symbolic link (symbolic links - * do not have hard links but all other objects always have at least one). The - * type field contains the type of the object, one of H5G_GROUP, H5G_DATASET, or - * H5G_LINK. The mtime field contains the modification time. If information is - * being returned about a symbolic link then linklen will be the length of the - * link value (the name of the pointed-to object with the null terminator); - * otherwise linklen will be zero. Other fields may be added to this structure - * in the future. - * - * @deprecated Not for public use. It is not used by the library. - * This class assumes that an object can contain four values which uniquely identify an - * object among those HDF5 files which are open. This is no longer valid in future - * HDF5 releases. - */ - -@Deprecated -public class HDF5GroupInfo { - long[] fileno; - long[] objno; - int nlink; - int type; - long mtime; - int linklen; - - /** - * Container for the information reported about an HDF5 Object - * from the H5Gget_obj_info() method - */ - public HDF5GroupInfo() - { - fileno = new long[2]; - objno = new long[2]; - nlink = -1; - type = -1; - mtime = 0; - linklen = 0; - } - - /** - * Sets the HDF5 group information. Used by the JHI5. - * - * @param fn - * File id number - * @param on - * Object id number - * @param nl - * Number of links - * @param t - * Type of the object - * @param mt - * Modification time - * @param len - * Length of link - **/ - public void setGroupInfo(long[] fn, long[] on, int nl, int t, long mt, int len) - { - fileno = fn; - objno = on; - nlink = nl; - type = t; - mtime = mt; - linklen = len; - } - - /** Resets all the group information to defaults. */ - public void reset() - { - fileno[0] = 0; - fileno[1] = 0; - objno[0] = 0; - objno[1] = 0; - nlink = -1; - type = -1; - mtime = 0; - linklen = 0; - } - - /** - * fileno accessors - * @return the file number if successful - */ - public long[] getFileno() { return fileno; } - - /** - * accessors - * @return the object number if successful - */ - public long[] getObjno() { return objno; } - - /** - * accessors - * @return type of group if successful - */ - public int getType() { return type; } - - /** - * accessors - * @return the number of links in the group if successful - */ - public int getNlink() { return nlink; } - - /** - * accessors - * @return the modified time value if successful - */ - public long getMtime() { return mtime; } - - /** - * accessors - * @return a length of link name if successful - */ - public int getLinklen() { return linklen; } - - /** - * The fileno and objno fields contain four values which uniquely identify - * an object among those HDF5 files. - */ - @Override - public boolean equals(Object obj) - { - if (!(obj instanceof HDF5GroupInfo)) { - return false; - } - - HDF5GroupInfo target = (HDF5GroupInfo)obj; - if ((fileno[0] == target.fileno[0]) && (fileno[1] == target.fileno[1]) && - (objno[0] == target.objno[0]) && (objno[1] == target.objno[1])) { - return true; - } - else { - return false; - } - } - - /** - * Returns the object id. - * - * @return the object id - */ - public long getOID() { return objno[0]; } - - /** - * /** Converts this object to a String representation. - * - * @return a string representation of this object - */ - @Override - public String toString() - { - String fileStr = "fileno=null"; - String objStr = "objno=null"; - - if (fileno != null) { - fileStr = "fileno[0]=" + fileno[0] + ",fileno[1]=" + fileno[1]; - } - - if (objno != null) { - objStr = "objno[0]=" + objno[0] + ",objno[1]=" + objno[1]; - } - - return getClass().getName() + "[" + fileStr + "," + objStr + ",type=" + type + ",nlink=" + nlink + - ",mtime=" + mtime + ",linklen=" + linklen + "]"; - } -} diff --git a/java/src/hdf/hdf5lib/HDFArray.java b/java/src/hdf/hdf5lib/HDFArray.java index 28d5117..637a896 100644 --- a/java/src/hdf/hdf5lib/HDFArray.java +++ b/java/src/hdf/hdf5lib/HDFArray.java @@ -455,6 +455,7 @@ public class HDFArray { new HDF5JavaException("HDFArray: unsupported Object type: " + ArrayDescriptor.NT); throw(ex); } + break; } // end of statement for arrays of boxed objects default: HDF5JavaException ex = @@ -527,6 +528,7 @@ public class HDFArray { new HDF5JavaException("HDFArray: unsupported Object type: " + ArrayDescriptor.NT); throw(ex); } + break; } // end of statement for arrays of boxed numerics } // end of switch statement for arrays of primitives diff --git a/java/src/jni/h5fImp.c b/java/src/jni/h5fImp.c index 57b3cbe..2afccf1 100644 --- a/java/src/jni/h5fImp.c +++ b/java/src/jni/h5fImp.c @@ -683,7 +683,7 @@ Java_hdf_hdf5lib_H5_H5Fset_1dset_1no_1attrs_1hint(JNIEnv *env, jclass clss, jlon done: return; -} +} /* end Java_hdf_hdf5lib_H5_H5Fset_1dset_1no_1attrs_1hint */ /* * Class: hdf_hdf5lib_H5 @@ -706,7 +706,7 @@ Java_hdf_hdf5lib_H5_H5Fget_1dset_1no_1attrs_1hint(JNIEnv *env, jclass clss, jlon done: return bval; -} +} /* end Java_hdf_hdf5lib_H5_H5Fget_1dset_1no_1attrs_1hint */ /* * Class: hdf_hdf5lib_H5 diff --git a/java/src/jni/h5lImp.c b/java/src/jni/h5lImp.c index 872382e..8b32215 100644 --- a/java/src/jni/h5lImp.c +++ b/java/src/jni/h5lImp.c @@ -277,7 +277,7 @@ done: JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Lget_1info(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jlong access_id) { - H5L_info_t infobuf; + H5L_info_t infobuf = {H5L_TYPE_ERROR, FALSE, -1, H5T_CSET_ERROR, {0}}; const char *linkName = NULL; jvalue args[5]; herr_t status = FAIL; @@ -317,7 +317,7 @@ JNIEXPORT jobject JNICALL Java_hdf_hdf5lib_H5_H5Lget_1info_1by_1idx(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jint index_field, jint order, jlong link_n, jlong access_id) { - H5L_info_t infobuf; + H5L_info_t infobuf = {H5L_TYPE_ERROR, FALSE, -1, H5T_CSET_ERROR, {0}}; const char *groupName = NULL; jvalue args[5]; herr_t status = FAIL; @@ -408,7 +408,7 @@ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Lget_1value(JNIEnv *env, jclass clss, jlong loc_id, jstring name, jobjectArray link_value, jlong access_id) { - H5L_info_t infobuf; + H5L_info_t infobuf = {H5L_TYPE_ERROR, FALSE, -1, H5T_CSET_ERROR, {0}}; const char *file_name = NULL; const char *obj_name = NULL; const char *linkName = NULL; @@ -498,7 +498,7 @@ Java_hdf_hdf5lib_H5_H5Lget_1value_1by_1idx(JNIEnv *env, jclass clss, jlong loc_i jint index_field, jint order, jlong link_n, jobjectArray link_value, jlong access_id) { - H5L_info_t infobuf; + H5L_info_t infobuf = {H5L_TYPE_ERROR, FALSE, -1, H5T_CSET_ERROR, {0}}; const char *file_name = NULL; const char *obj_name = NULL; const char *grpName = NULL; diff --git a/java/src/jni/h5pDAPLImp.c b/java/src/jni/h5pDAPLImp.c index 01108b3..666f47d 100644 --- a/java/src/jni/h5pDAPLImp.c +++ b/java/src/jni/h5pDAPLImp.c @@ -80,6 +80,8 @@ Java_hdf_hdf5lib_H5_H5Pget_1chunk_1cache(JNIEnv *env, jclass clss, jlong dapl, j if (NULL != rdcc_nbytes) PIN_LONG_ARRAY(ENVONLY, rdcc_nbytes, nbytesArray, &isCopy, "H5Pget_chunk_cache: nbytesArray array not pinned"); + if (NULL == nbytesArray) + H5_NULL_ARGUMENT_ERROR(ENVONLY, "nbytesArray should not be NULL after pinning"); { /* direct cast (size_t *)variable fails on 32-bit environment */ diff --git a/java/src/jni/h5pFAPLImp.c b/java/src/jni/h5pFAPLImp.c index f094aaf..80ed6c6 100644 --- a/java/src/jni/h5pFAPLImp.c +++ b/java/src/jni/h5pFAPLImp.c @@ -1252,6 +1252,8 @@ Java_hdf_hdf5lib_H5_H5Pget_1cache(JNIEnv *env, jclass clss, jlong plist, jintArr if (NULL != rdcc_nbytes) PIN_LONG_ARRAY(ENVONLY, rdcc_nbytes, nbytesArray, &isCopy, "H5Pget_cache: nbytesArray array not pinned"); + if (NULL == nbytesArray) + H5_NULL_ARGUMENT_ERROR(ENVONLY, "nbytesArray should not be NULL after pinning"); { /* direct cast (size_t *)variable fails on 32-bit environment */ diff --git a/java/src/jni/h5tImp.c b/java/src/jni/h5tImp.c index 692825e..3c4d165 100644 --- a/java/src/jni/h5tImp.c +++ b/java/src/jni/h5tImp.c @@ -1659,7 +1659,7 @@ Java_hdf_hdf5lib_H5_H5Tflush(JNIEnv *env, jclass clss, jlong loc_id) done: return; -} +} /* end Java_hdf_hdf5lib_H5_H5Tflush */ /* * Class: hdf_hdf5lib_H5 diff --git a/m4/aclocal_fc.f90 b/m4/aclocal_fc.f90 index bcefab5..68a8f1b 100644 --- a/m4/aclocal_fc.f90 +++ b/m4/aclocal_fc.f90 @@ -82,7 +82,7 @@ END PROGRAM PROG_FC_C_LONG_DOUBLE_EQ_C_DOUBLE !---- START ----- Determine the available KINDs for REALs and INTEGERs PROGRAM FC_AVAIL_KINDS - USE, INTRINSIC :: ISO_FORTRAN_ENV, ONLY : stderr=>ERROR_UNIT + USE, INTRINSIC :: ISO_FORTRAN_ENV, ONLY : stdout=>OUTPUT_UNIT IMPLICIT NONE INTEGER :: ik, jk, k, kk, max_decimal_prec INTEGER :: prev_rkind, num_rkinds = 1, num_ikinds = 1 @@ -102,11 +102,11 @@ PROGRAM FC_AVAIL_KINDS ENDDO DO k = 1, num_ikinds - WRITE(stderr,'(I0)', ADVANCE='NO') list_ikinds(k) + WRITE(stdout,'(I0)', ADVANCE='NO') list_ikinds(k) IF(k.NE.num_ikinds)THEN - WRITE(stderr,'(A)',ADVANCE='NO') ',' + WRITE(stdout,'(A)',ADVANCE='NO') ',' ELSE - WRITE(stderr,'()') + WRITE(stdout,'()') ENDIF ENDDO @@ -139,17 +139,17 @@ PROGRAM FC_AVAIL_KINDS ENDDO prec DO k = 1, num_rkinds - WRITE(stderr,'(I0)', ADVANCE='NO') list_rkinds(k) + WRITE(stdout,'(I0)', ADVANCE='NO') list_rkinds(k) IF(k.NE.num_rkinds)THEN - WRITE(stderr,'(A)',ADVANCE='NO') ',' + WRITE(stdout,'(A)',ADVANCE='NO') ',' ELSE - WRITE(stderr,'()') + WRITE(stdout,'()') ENDIF ENDDO - WRITE(stderr,'(I0)') max_decimal_prec - WRITE(stderr,'(I0)') num_ikinds - WRITE(stderr,'(I0)') num_rkinds + WRITE(stdout,'(I0)') max_decimal_prec + WRITE(stdout,'(I0)') num_ikinds + WRITE(stdout,'(I0)') num_rkinds END PROGRAM FC_AVAIL_KINDS !---- END ----- Determine the available KINDs for REALs and INTEGERs diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 3034ba5..6c55488 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -89,7 +89,11 @@ New Features Java Library: ------------- - - + - HDF5GroupInfo class has been deprecated. + + This class assumes that an object can contain four values which uniquely identify an + object among those HDF5 files which are open. This is no longer valid in future + HDF5 releases. Tools: @@ -174,7 +178,7 @@ Bug Fixes since HDF5-1.10.10 release checkings will remove the potential invalid read of any of these values that could be triggered by a malformed file. - (BMR - 2023/04/12 GH-2603) + Fixes GitHub issue #2603 - Fixed potential buffer overrun issues in some object header decode routines @@ -210,7 +214,7 @@ Bug Fixes since HDF5-1.10.10 release The fix ensures each element is within bounds before reading. - Fix for Jira issue HDFFV-10712, CVE-2019-8396, GitHub issue #2209 + Fixes Jira issue HDFFV-10712, CVE-2019-8396, GitHub issue #2209 - Memory leak @@ -228,7 +232,15 @@ Bug Fixes since HDF5-1.10.10 release Java Library ------------ - - + - Fixed switch case 'L' block missing a break statement. + + The HDF5Array.arrayify method is missing a break statement in the case 'L': section + which causes it to fall through and throw an HDF5JavaException when attempting to + read an Array[Array[Long]]. + + The error was fixed by inserting a break statement at the end of the case 'L': sections. + + Fixes GitHub issue #3056 Configuration diff --git a/src/H5Dpublic.h b/src/H5Dpublic.h index f1167d3..63083f4 100644 --- a/src/H5Dpublic.h +++ b/src/H5Dpublic.h @@ -279,7 +279,7 @@ extern "C" { * * \p loc_id may specify a file, group, dataset, named datatype, * or attribute. If an attribute, dataset, or named datatype is - * specified then the dataset will be created at the location + * specified, then the dataset will be created at the location * where the attribute, dataset, or named datatype is attached. * * \p name may be either an absolute path in the file or a relative @@ -290,7 +290,7 @@ extern "C" { * file location where the dataset will be created, the datatype * is copied and converted to a transient type. * - * The link creation property list, \p lcpl_id, governs creation + * The link creation property list, \p lcpl_id, governs the creation * of the link(s) by which the new dataset is accessed and the * creation of any intermediate groups that may be missing. * @@ -334,12 +334,12 @@ H5_DLL hid_t H5Dcreate2(hid_t loc_id, const char *name, hid_t type_id, hid_t spa * * \p loc_id may specify a file, group, dataset, named datatype, * or attribute. If an attribute, dataset, or named datatype is - * specified then the dataset will be created at the location + * specified, then the dataset will be created at the location * where the attribute, dataset, or named datatype is attached. * * The dataset’s datatype and dataspace are specified by * \p type_id and \p space_id, respectively. These are the - * datatype and dataspace of the dataset as it will exist in + * datatype and dataspace of the dataset as they will exist in * the file, which may differ from the datatype and dataspace * in application memory. * @@ -668,7 +668,7 @@ H5_DLL herr_t H5Dget_chunk_info_by_coord(hid_t dset_id, const hsize_t *offset, u * context \p op_data. * * \par Example - * For each chunk, print the allocated chunk size (0 for un-allocated chunks). + * For each chunk, print the allocated chunk size (0 for unallocated chunks). * \snippet H5D_examples.c H5Dchunk_iter_cb * Iterate over all chunked datasets and chunks in a file. * \snippet H5D_examples.c H5Ovisit_cb @@ -700,22 +700,22 @@ H5_DLL herr_t H5Dchunk_iter(hid_t dset_id, hid_t dxpl_id, H5D_chunk_iter_op_t cb * specified by the index \p index. The chunk belongs to a set of * chunks in the selection specified by \p fspace_id. If the queried * chunk does not exist in the file, the size will be set to 0 and - * address to #HADDR_UNDEF. The value pointed to by filter_mask will + * address to HADDR_UNDEF. The value pointed to by filter_mask will * not be modified. \c NULL can be passed in for any \p out parameters. * * \p chk_idx is the chunk index in the selection. The index value * may have a value of 0 up to the number of chunks stored in - * the file that have a nonempty intersection with the file - * dataspace selection + * the file that has a nonempty intersection with the file + * dataspace selection. * * \note As of 1.10.5, the dataspace intersection is not yet - * supported, hence, the index is of all the written chunks. + * supported. Hence, the index is of all the written chunks. * * \p fspace_id specifies the file dataspace selection. It is - * intended to take #H5S_ALL for specifying the current selection. + * intended to take #H5S_ALL to specify the current selection. * * \note Please be aware that this function currently does not - * support non-trivial selections, thus \p fspace_id has no + * support non-trivial selections; thus \p fspace_id has no * effect. Also, the implementation does not handle the #H5S_ALL * macro correctly. As a workaround, an application can get * the dataspace for the dataset using H5Dget_space() and pass that @@ -735,7 +735,7 @@ H5_DLL herr_t H5Dget_chunk_info(hid_t dset_id, hid_t fspace_id, hsize_t chk_idx, * * \dset_id * - * \return Returns the offset in bytes; otherwise, returns #HADDR_UNDEF, + * \return Returns the offset in bytes; otherwise, returns HADDR_UNDEF, * a negative value. * * \details H5Dget_offset() returns the address in the file of @@ -983,7 +983,7 @@ H5_DLL herr_t H5Dwrite(hid_t dset_id, hid_t mem_type_id, hid_t mem_space_id, hid * the file. Only one chunk can be written with this function. * * \p filters is a mask providing a record of which filters are - * used with the the chunk. The default value of the mask is + * used with the chunk. The default value of the mask is * zero (0), indicating that all enabled filters are applied. A * filter is skipped if the bit corresponding to the filter’s * position in the pipeline (0 ≤ position < 32) is turned on. @@ -1102,6 +1102,8 @@ H5_DLL herr_t H5Dread_chunk(hid_t dset_id, hid_t dxpl_id, const hsize_t *offset, * be restarted at the point of exit; a second H5Diterate() * call will always restart at the beginning. * + * \warning Modifying the selection of \p space_id during iteration + * will lead to undefined behavior. * * \since 1.10.2 * @@ -1338,7 +1340,7 @@ H5_DLL herr_t H5Drefresh(hid_t dset_id); * \p op and scatters it to the supplied buffer \p dst_buf in a * manner similar to data being written to a dataset. * - * \p dst_space_id is a dataspace which defines the extent of \p + * \p dst_space_id is a dataspace that defines the extent of \p * dst_buf and the selection within it to scatter the data to. * * \p type_id is the datatype of the data to be scattered in both @@ -1395,7 +1397,7 @@ H5_DLL herr_t H5Dscatter(H5D_scatter_func_t op, void *op_data, hid_t type_id, hi * enough to hold all the data if the callback function \p op is * not provided. * - * \p op is a callback function which handles the gathered data. + * \p op is a callback function that handles the gathered data. * It is optional if \p dst_buf is large enough to hold all of the * gathered data; required otherwise. * @@ -1441,10 +1443,12 @@ H5_DLL herr_t H5Dgather(hid_t src_space_id, const void *src_buf, hid_t type_id, */ H5_DLL herr_t H5Dclose(hid_t dset_id); +/// \cond DEV /* Internal API routines */ H5_DLL herr_t H5Ddebug(hid_t dset_id); H5_DLL herr_t H5Dformat_convert(hid_t dset_id); H5_DLL herr_t H5Dget_chunk_index_type(hid_t did, H5D_chunk_index_t *idx_type); +/// \endcond /* Symbols defined for compatibility with previous versions of the HDF5 API. * @@ -1523,10 +1527,10 @@ H5_DLL herr_t H5Dget_chunk_index_type(hid_t did, H5D_chunk_index_t *idx_type); * * H5Dcreate() and H5Dcreate_anon() return a dataset identifier for * success or a negative value for failure. The dataset identifier - * should eventually be closed by calling H5Dclose() to release + * should eventually be closed by calling H5Dclose() to release the * resources it uses. * - * See H5Dcreate_anon() for discussion of the differences between + * See H5Dcreate_anon() for a discussion of the differences between * H5Dcreate() and H5Dcreate_anon(). * * The HDF5 library provides flexible means of specifying a fill value, @@ -1602,7 +1606,7 @@ H5_DLL hid_t H5Dopen1(hid_t loc_id, const char *name); * * This function ensures that the dataset dimensions are of at least * the sizes specified in size. The function H5Dset_extent() must be - * used if the dataset dimension sizes are are to be reduced. + * used if the dataset dimension sizes are to be reduced. * * \version 1.8.0 Function deprecated in this release. Parameter size * syntax changed to \Code{const hsize_t size[]} in this release. diff --git a/src/H5Fpublic.h b/src/H5Fpublic.h index 2b059c7..84f4218 100644 --- a/src/H5Fpublic.h +++ b/src/H5Fpublic.h @@ -379,7 +379,7 @@ H5_DLL hid_t H5Fcreate(const char *filename, unsigned flags, hid_t fcpl_id, hid_ * opened. * * The \p fapl_id parameter specifies the file access property list. - * Use of #H5P_DEFAULT specifies that default I/O access properties + * The use of #H5P_DEFAULT specifies that default I/O access properties * are to be used. * * The \p flags parameter specifies whether the file will be opened in @@ -489,7 +489,7 @@ H5_DLL hid_t H5Freopen(hid_t file_id); * \snippet H5F_examples.c flush * * \attention HDF5 does not possess full control over buffering. H5Fflush() - * flushes the internal HDF5 buffers then asks the operating system + * flushes the internal HDF5 buffers and then asks the operating system * (the OS) to flush the system buffers for the open files. After * that, the OS is responsible for ensuring that the data is * actually flushed to disk. @@ -516,7 +516,7 @@ H5_DLL herr_t H5Fflush(hid_t object_id, H5F_scope_t scope); * \snippet H5F_examples.c minimal * * \note \Bold{Delayed close:} Note the following deviation from the - * above-described behavior. If H5Fclose() is called for a file but one + * above-described behavior. If H5Fclose() is called for a file, but one * or more objects within the file remain open, those objects will remain * accessible until they are individually closed. Thus, if the dataset * \c data_sample is open when H5Fclose() is called for the file @@ -525,7 +525,7 @@ H5_DLL herr_t H5Fflush(hid_t object_id, H5F_scope_t scope); * automatically closed once all objects in the file have been closed.\n * Be warned, however, that there are circumstances where it is not * possible to delay closing a file. For example, an MPI-IO file close is - * a collective call; all of the processes that opened the file must + * a collective call; all of the processes that open the file must * close it collectively. The file cannot be closed at some time in the * future by each process in an independent fashion. Another example is * that an application using an AFS token-based file access privilege may @@ -1240,7 +1240,7 @@ H5_DLL herr_t H5Fstart_swmr_write(hid_t file_id); * \snippet this H5F_sect_info_t_snip * * This routine retrieves free-space section information for \p nsects - * sections or at most the maximum number of sections in the specified + * sections or, at most, the maximum number of sections in the specified * free-space manager. If the number of sections is not known, a * preliminary H5Fget_free_sections() call can be made by setting \p * sect_info to NULL and the total number of free-space sections for @@ -1506,7 +1506,7 @@ H5_DLL herr_t H5Fget_page_buffering_stats(hid_t file_id, unsigned accesses[2], u * \brief Obtains information about a cache image if it exists * * \file_id - * \param[out] image_addr Offset of the cache image if it exists, or #HADDR_UNDEF if it does not + * \param[out] image_addr Offset of the cache image if it exists, or HADDR_UNDEF if it does not * \param[out] image_size Length of the cache image if it exists, or 0 if it does not * \returns \herr_t * @@ -1548,7 +1548,7 @@ H5_DLL herr_t H5Fget_mdc_image_info(hid_t file_id, haddr_t *image_addr, hsize_t * file_id. This setting is used to inform the library to create * minimized dataset object headers when \c TRUE. * - * The setting's value is returned in the boolean pointer minimize. + * The setting's value is returned in the boolean pointer minimized. * * \since 1.10.5 * @@ -1747,7 +1747,7 @@ H5_DLL herr_t H5Fget_info1(hid_t obj_id, H5F_info1_t *file_info); /** * \ingroup H5F * - * \brief Sets thelatest version of the library to be used for writing objects + * \brief Sets the latest version of the library to be used for writing objects * * \file_id * \param[in] latest_format Latest format flag diff --git a/src/H5Tmodule.h b/src/H5Tmodule.h index 590a60c..58bbed6 100644 --- a/src/H5Tmodule.h +++ b/src/H5Tmodule.h @@ -734,7 +734,7 @@ * * * - * #H5T_NATIVE_LDOUBLE + * H5T_NATIVE_LDOUBLE * * * long double diff --git a/src/H5Tpublic.h b/src/H5Tpublic.h index 0c6d0c7..b47cfd4 100644 --- a/src/H5Tpublic.h +++ b/src/H5Tpublic.h @@ -2306,7 +2306,7 @@ H5_DLL htri_t H5Tis_variable_str(hid_t type_id); * * \li #H5T_NATIVE_FLOAT * \li #H5T_NATIVE_DOUBLE - * \li #H5T_NATIVE_LDOUBLE + * \li H5T_NATIVE_LDOUBLE * * \li #H5T_NATIVE_B8 * \li #H5T_NATIVE_B16 -- cgit v0.12