From 830bf561e1236308cdffe0c519c6e779ec0929e3 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Tue, 26 Sep 2023 08:46:15 -0500 Subject: Merge CMake,doxygen changes from develop,1.10 branches (#3578) * Merge CMake,doxygen changes from develop,1.10 branches * revert incorrect option in AT TS build * Use variable for ignore-eol usage. * Add last_test depends logic * Just print status on CMake below 3.14 --- .github/workflows/main.yml | 89 ++- .github/workflows/netcdf.yml | 3 + CMakeFilters.cmake | 2 +- CMakeInstallation.cmake | 8 +- config/cmake/fileCompareTest.cmake | 4 +- config/cmake/hdf5-config.cmake.in | 13 +- config/cmake/patch.xml | 11 - config/cmake/patch.xml.in | 11 + config/cmake/runTest.cmake | 4 + config/cmake/scripts/HDF5options.cmake | 6 +- config/sanitizer/README.md | 2 +- config/toolchain/build32.cmake | 2 +- config/toolchain/clang.cmake | 16 +- config/toolchain/crayle.cmake | 10 +- config/toolchain/gcc.cmake | 12 +- config/toolchain/mingw64.cmake | 2 +- config/toolchain/pgi.cmake | 12 +- doxygen/dox/ExamplesAPI.dox | 1010 ++++++++++++++++++++++++++++++++ doxygen/dox/GettingStarted.dox | 4 +- doxygen/dox/IntroHDF5.dox | 2 +- doxygen/dox/IntroParExamples.dox | 569 ++++++++++++++++++ doxygen/dox/IntroParHDF5.dox | 271 +++++++++ doxygen/dox/LearnBasics1.dox | 2 +- doxygen/img/pchunk_figa.gif | Bin 0 -> 2754 bytes doxygen/img/pchunk_figb.gif | Bin 0 -> 2094 bytes doxygen/img/pchunk_figc.gif | Bin 0 -> 3194 bytes doxygen/img/pchunk_figd.gif | Bin 0 -> 2984 bytes doxygen/img/pcont_hy_figa.gif | Bin 0 -> 3201 bytes doxygen/img/pcont_hy_figb.gif | Bin 0 -> 2450 bytes doxygen/img/pcont_hy_figc.gif | Bin 0 -> 3694 bytes doxygen/img/pcont_hy_figd.gif | Bin 0 -> 2723 bytes doxygen/img/ppatt_figa.gif | Bin 0 -> 2359 bytes doxygen/img/ppatt_figb.gif | Bin 0 -> 2431 bytes doxygen/img/ppatt_figc.gif | Bin 0 -> 2616 bytes doxygen/img/ppatt_figd.gif | Bin 0 -> 2505 bytes doxygen/img/preg_figa.gif | Bin 0 -> 2359 bytes doxygen/img/preg_figb.gif | Bin 0 -> 2033 bytes doxygen/img/preg_figc.gif | Bin 0 -> 3242 bytes doxygen/img/preg_figd.gif | Bin 0 -> 2367 bytes examples/CMakeTests.cmake | 4 +- java/examples/datasets/CMakeLists.txt | 2 +- tools/test/h5diff/CMakeTests.cmake | 4 +- tools/test/h5dump/CMakeTests.cmake | 8 +- tools/test/misc/CMakeTestsClear.cmake | 8 + 44 files changed, 2014 insertions(+), 77 deletions(-) delete mode 100644 config/cmake/patch.xml create mode 100644 config/cmake/patch.xml.in create mode 100644 doxygen/dox/ExamplesAPI.dox create mode 100644 doxygen/dox/IntroParExamples.dox create mode 100644 doxygen/dox/IntroParHDF5.dox create mode 100644 doxygen/img/pchunk_figa.gif create mode 100644 doxygen/img/pchunk_figb.gif create mode 100644 doxygen/img/pchunk_figc.gif create mode 100644 doxygen/img/pchunk_figd.gif create mode 100644 doxygen/img/pcont_hy_figa.gif create mode 100644 doxygen/img/pcont_hy_figb.gif create mode 100644 doxygen/img/pcont_hy_figc.gif create mode 100644 doxygen/img/pcont_hy_figd.gif create mode 100644 doxygen/img/ppatt_figa.gif create mode 100644 doxygen/img/ppatt_figb.gif create mode 100644 doxygen/img/ppatt_figc.gif create mode 100644 doxygen/img/ppatt_figd.gif create mode 100644 doxygen/img/preg_figa.gif create mode 100644 doxygen/img/preg_figb.gif create mode 100644 doxygen/img/preg_figc.gif create mode 100644 doxygen/img/preg_figd.gif diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e87b627..10b3a9c 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -336,30 +336,31 @@ jobs: - name: Dump matrix context run: echo '${{ toJSON(matrix) }}' - - name: Install CMake Dependencies (Linux) - run: sudo apt-get install ninja-build doxygen graphviz - if: matrix.os == 'ubuntu-latest' - - - name: Install Autotools Dependencies (Linux, serial) + # Only CMake need ninja-build, but we just install it unilaterally + # libssl, etc. are needed for the ros3 VFD + - name: Install Linux Dependencies run: | sudo apt update - sudo apt install automake autoconf libtool libtool-bin - sudo apt install gcc-12 g++-12 gfortran-12 + sudo apt-get install ninja-build doxygen graphviz sudo apt install libssl3 libssl-dev libcurl4 libcurl4-openssl-dev + sudo apt install gcc-12 g++-12 gfortran-12 echo "CC=gcc-12" >> $GITHUB_ENV echo "CXX=g++-12" >> $GITHUB_ENV echo "FC=gfortran-12" >> $GITHUB_ENV + if: matrix.os == 'ubuntu-latest' + + # CMake gets libaec from fetchcontent + - name: Install Autotools Dependencies (Linux) + run: | + sudo apt install automake autoconf libtool libtool-bin sudo apt install libaec0 libaec-dev - if: (matrix.generator == 'autogen') && (matrix.parallel != 'enable') + if: (matrix.generator == 'autogen') - name: Install Autotools Dependencies (Linux, parallel) run: | - sudo apt update - sudo apt install automake autoconf libtool libtool-bin sudo apt install openmpi-bin openmpi-common mpi-default-dev echo "CC=mpicc" >> $GITHUB_ENV echo "FC=mpif90" >> $GITHUB_ENV - sudo apt install libaec0 libaec-dev if: (matrix.generator == 'autogen') && (matrix.parallel == 'enable') - name: Install Dependencies (Windows) @@ -390,7 +391,19 @@ jobs: sh ./autogen.sh mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --${{ matrix.deprec_sym }}-deprecated-symbols --with-default-api-version=${{ matrix.default_api }} --enable-shared --${{ matrix.parallel }}-parallel --${{ matrix.cpp }}-cxx --${{ matrix.fortran }}-fortran --${{ matrix.java }}-java --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --${{ matrix.ros3_vfd }}-ros3-vfd --with-szlib=${{ matrix.szip }} + ${{ matrix.flags }} $GITHUB_WORKSPACE/configure \ + --enable-build-mode=${{ matrix.build_mode.autotools }} \ + --${{ matrix.deprec_sym }}-deprecated-symbols \ + --with-default-api-version=${{ matrix.default_api }} \ + --enable-shared \ + --${{ matrix.parallel }}-parallel \ + --${{ matrix.cpp }}-cxx \ + --${{ matrix.fortran }}-fortran \ + --${{ matrix.java }}-java \ + --${{ matrix.mirror_vfd }}-mirror-vfd \ + --${{ matrix.direct_vfd }}-direct-vfd \ + --${{ matrix.ros3_vfd }}-ros3-vfd \ + --with-szlib=${{ matrix.szip }} shell: bash if: (matrix.generator == 'autogen') && ! (matrix.thread_safety.enabled) @@ -399,7 +412,15 @@ jobs: sh ./autogen.sh mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - ${{ matrix.flags }} $GITHUB_WORKSPACE/configure --enable-build-mode=${{ matrix.build_mode.autotools }} --enable-shared --enable-threadsafe --disable-hl --${{ matrix.mirror_vfd }}-mirror-vfd --${{ matrix.direct_vfd }}-direct-vfd --${{ matrix.ros3_vfd }}-ros3-vfd --with-szlib=${{ matrix.szip }} + ${{ matrix.flags }} $GITHUB_WORKSPACE/configure \ + --enable-build-mode=${{ matrix.build_mode.autotools }} \ + --enable-shared \ + --enable-threadsafe \ + --disable-hl \ + --${{ matrix.mirror_vfd }}-mirror-vfd \ + --${{ matrix.direct_vfd }}-direct-vfd \ + --${{ matrix.ros3_vfd }}-ros3-vfd \ + --with-szlib=${{ matrix.szip }} shell: bash if: (matrix.generator == 'autogen') && (matrix.thread_safety.enabled) @@ -411,7 +432,25 @@ jobs: run: | mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} -DHDF5_BUILD_JAVA=${{ matrix.java }} -DHDF5_BUILD_DOC=${{ matrix.docs }} -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} -DHDF5_ENABLE_ROS3_VFD:BOOL=${{ matrix.ros3_vfd }} $GITHUB_WORKSPACE + cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake \ + ${{ matrix.generator }} \ + -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} \ + -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} \ + -DBUILD_SHARED_LIBS=ON \ + -DHDF5_ENABLE_ALL_WARNINGS=ON \ + -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} \ + -DHDF5_BUILD_CPP_LIB:BOOL=${{ matrix.cpp }} \ + -DHDF5_BUILD_FORTRAN=${{ matrix.fortran }} \ + -DHDF5_BUILD_JAVA=${{ matrix.java }} \ + -DHDF5_BUILD_DOC=${{ matrix.docs }} \ + -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} \ + -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} \ + -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} \ + -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} \ + -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} \ + -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} \ + -DHDF5_ENABLE_ROS3_VFD:BOOL=${{ matrix.ros3_vfd }} \ + $GITHUB_WORKSPACE shell: bash if: (matrix.generator != 'autogen') && ! (matrix.thread_safety.enabled) @@ -420,7 +459,27 @@ jobs: run: | mkdir "${{ runner.workspace }}/build" cd "${{ runner.workspace }}/build" - cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake ${{ matrix.generator }} -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_ALL_WARNINGS=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} -DHDF5_BUILD_CPP_LIB:BOOL=OFF -DHDF5_BUILD_FORTRAN:BOOL=OFF -DHDF5_BUILD_JAVA:BOOL=OFF -DHDF5_BUILD_HL_LIB:BOOL=OFF -DHDF5_BUILD_DOC=OFF -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} -DHDF5_ENABLE_ROS3_VFD:BOOL=${{ matrix.ros3_vfd }} $GITHUB_WORKSPACE + cmake -C $GITHUB_WORKSPACE/config/cmake/cacheinit.cmake \ + ${{ matrix.generator }} \ + -DCMAKE_BUILD_TYPE=${{ matrix.build_mode.cmake }} \ + -DCMAKE_TOOLCHAIN_FILE=${{ matrix.toolchain }} \ + -DBUILD_SHARED_LIBS=ON \ + -DHDF5_ENABLE_ALL_WARNINGS=ON \ + -DHDF5_ENABLE_THREADSAFE:BOOL=ON \ + -DHDF5_ENABLE_PARALLEL:BOOL=${{ matrix.parallel }} \ + -DHDF5_BUILD_CPP_LIB:BOOL=OFF \ + -DHDF5_BUILD_FORTRAN:BOOL=OFF \ + -DHDF5_BUILD_JAVA:BOOL=OFF \ + -DHDF5_BUILD_HL_LIB:BOOL=OFF \ + -DHDF5_BUILD_DOC=OFF \ + -DBUILD_SZIP_WITH_FETCHCONTENT=${{ matrix.libaecfc }} \ + -DLIBAEC_USE_LOCALCONTENT=${{ matrix.localaec }} \ + -DBUILD_ZLIB_WITH_FETCHCONTENT=${{ matrix.zlibfc }} \ + -DZLIB_USE_LOCALCONTENT=${{ matrix.localzlib }} \ + -DHDF5_ENABLE_MIRROR_VFD:BOOL=${{ matrix.mirror_vfd }} \ + -DHDF5_ENABLE_DIRECT_VFD:BOOL=${{ matrix.direct_vfd }} \ + -DHDF5_ENABLE_ROS3_VFD:BOOL=${{ matrix.ros3_vfd }} \ + $GITHUB_WORKSPACE shell: bash if: (matrix.generator != 'autogen') && (matrix.thread_safety.enabled) diff --git a/.github/workflows/netcdf.yml b/.github/workflows/netcdf.yml index 0ec7541..5b1ebf2 100644 --- a/.github/workflows/netcdf.yml +++ b/.github/workflows/netcdf.yml @@ -14,6 +14,9 @@ on: - 'COPYING**' - '**.md' +permissions: + contents: read + # Using concurrency to cancel any in-progress job or run concurrency: group: ${{ github.workflow }}-${{ github.sha || github.event.pull_request.number }} diff --git a/CMakeFilters.cmake b/CMakeFilters.cmake index 200634e..3a1a0de 100644 --- a/CMakeFilters.cmake +++ b/CMakeFilters.cmake @@ -9,7 +9,7 @@ # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # -option (USE_LIBAEC "Use AEC library as SZip Filter" OFF) +option (USE_LIBAEC "Use AEC library as SZip Filter" ON) option (USE_LIBAEC_STATIC "Use static AEC library " OFF) option (ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" 0) option (SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 0) diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index 1fb7714..3aa7981 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -390,7 +390,13 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES) set(CPACK_WIX_PROPERTY_ARPURLINFOABOUT "${HDF5_PACKAGE_URL}") set(CPACK_WIX_PROPERTY_ARPHELPLINK "${HDF5_PACKAGE_BUGREPORT}") if (BUILD_SHARED_LIBS) - set(CPACK_WIX_PATCH_FILE "${HDF_RESOURCES_DIR}/patch.xml") + if (${HDF_CFG_NAME} MATCHES "Debug" OR ${HDF_CFG_NAME} MATCHES "Developer") + set (WIX_CMP_NAME "${HDF5_LIB_NAME}${CMAKE_DEBUG_POSTFIX}") + else () + set (WIX_CMP_NAME "${HDF5_LIB_NAME}") + endif () + configure_file (${HDF_RESOURCES_DIR}/patch.xml.in ${HDF5_BINARY_DIR}/patch.xml @ONLY) + set(CPACK_WIX_PATCH_FILE "${HDF5_BINARY_DIR}/patch.xml") endif () elseif (APPLE) list (APPEND CPACK_GENERATOR "STGZ") diff --git a/config/cmake/fileCompareTest.cmake b/config/cmake/fileCompareTest.cmake index 4a8dc09..f4c46f6 100644 --- a/config/cmake/fileCompareTest.cmake +++ b/config/cmake/fileCompareTest.cmake @@ -59,7 +59,7 @@ if (TEST_STRINGS STREQUAL "YES") endif () else () if (CMAKE_VERSION VERSION_LESS "3.14.0") - message (FATAL_ERROR "CANNOT get file size, file command SIZE not supported") + message (STATUS "CANNOT get file size, file command SIZE not supported") else () file (SIZE ${TEST_FOLDER}/${TEST_ONEFILE} TEST_ONE_SIZE) file (SIZE ${TEST_FOLDER}/${TEST_TWOFILE} TEST_TWO_SIZE) @@ -74,7 +74,7 @@ else () elseif (TEST_FUNCTION MATCHES "LTEQ") if (TEST_ONE_SIZE LESS_EQUAL TEST_TWO_SIZE) if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.15.0") - message (VERBOSES "Passed: The size of ${TEST_FOLDER}/${TEST_ONEFILE} was less or equal ${TEST_FOLDER}/${TEST_TWOFILE}") + message (VERBOSE "Passed: The size of ${TEST_FOLDER}/${TEST_ONEFILE} was less or equal ${TEST_FOLDER}/${TEST_TWOFILE}") endif () else () message (FATAL_ERROR "The size of ${TEST_FOLDER}/${TEST_ONEFILE} was NOT less or equal ${TEST_FOLDER}/${TEST_TWOFILE}") diff --git a/config/cmake/hdf5-config.cmake.in b/config/cmake/hdf5-config.cmake.in index 699db89..496d260 100644 --- a/config/cmake/hdf5-config.cmake.in +++ b/config/cmake/hdf5-config.cmake.in @@ -44,6 +44,7 @@ set (${HDF5_PACKAGE_NAME}_ENABLE_PLUGIN_SUPPORT @HDF5_ENABLE_PLUGIN_SUPPORT@) set (${HDF5_PACKAGE_NAME}_ENABLE_Z_LIB_SUPPORT @HDF5_ENABLE_Z_LIB_SUPPORT@) set (${HDF5_PACKAGE_NAME}_ENABLE_SZIP_SUPPORT @HDF5_ENABLE_SZIP_SUPPORT@) set (${HDF5_PACKAGE_NAME}_ENABLE_SZIP_ENCODING @HDF5_ENABLE_SZIP_ENCODING@) +set (${HDF5_PACKAGE_NAME}_ENABLE_ROS3_VFD @HDF5_ENABLE_ROS3_VFD@) set (${HDF5_PACKAGE_NAME}_BUILD_SHARED_LIBS @H5_ENABLE_SHARED_LIB@) set (${HDF5_PACKAGE_NAME}_BUILD_STATIC_LIBS @H5_ENABLE_STATIC_LIB@) set (${HDF5_PACKAGE_NAME}_PACKAGE_EXTLIBS @HDF5_PACKAGE_EXTLIBS@) @@ -51,7 +52,8 @@ set (${HDF5_PACKAGE_NAME}_EXPORT_LIBRARIES @HDF5_LIBRARIES_TO_EXPORT@) set (${HDF5_PACKAGE_NAME}_ARCHITECTURE "@CMAKE_GENERATOR_ARCHITECTURE@") set (${HDF5_PACKAGE_NAME}_TOOLSET "@CMAKE_GENERATOR_TOOLSET@") set (${HDF5_PACKAGE_NAME}_DEFAULT_API_VERSION "@DEFAULT_API_VERSION@") -set (${HDF5_PACKAGE_NAME}_PARALLEL_FILTERED_WRITES "@PARALLEL_FILTERED_WRITES@") +set (${HDF5_PACKAGE_NAME}_PARALLEL_FILTERED_WRITES @PARALLEL_FILTERED_WRITES@) +set (${HDF5_PACKAGE_NAME}_INSTALL_MOD_FORTRAN "@HDF5_INSTALL_MOD_FORTRAN@") #----------------------------------------------------------------------------- # Dependencies @@ -67,6 +69,11 @@ if (${HDF5_PACKAGE_NAME}_ENABLE_PARALLEL) find_package(MPI QUIET REQUIRED) endif () +if (${HDF5_PACKAGE_NAME}_ENABLE_THREADSAFE) + set(THREADS_PREFER_PTHREAD_FLAG ON) + find_package(Threads QUIET REQUIRED) +endif () + if (${HDF5_PACKAGE_NAME}_BUILD_JAVA) set (${HDF5_PACKAGE_NAME}_JAVA_INCLUDE_DIRS @PACKAGE_CURRENT_BUILD_DIR@/lib/jarhdf5-@HDF5_VERSION_STRING@.jar @@ -143,14 +150,14 @@ foreach (comp IN LISTS ${HDF5_PACKAGE_NAME}_FIND_COMPONENTS) list (REMOVE_ITEM ${HDF5_PACKAGE_NAME}_FIND_COMPONENTS ${comp}) set (${HDF5_PACKAGE_NAME}_LIB_TYPE ${${HDF5_PACKAGE_NAME}_LIB_TYPE} ${comp}) - if (${HDF5_PACKAGE_NAME}_BUILD_FORTRAN) + if (${HDF5_PACKAGE_NAME}_BUILD_FORTRAN AND ${HDF5_PACKAGE_NAME}_INSTALL_MOD_FORTRAN STREQUAL "SHARED") set (${HDF5_PACKAGE_NAME}_INCLUDE_DIR_FORTRAN "@PACKAGE_INCLUDE_INSTALL_DIR@/shared") endif () elseif (comp STREQUAL "static") list (REMOVE_ITEM ${HDF5_PACKAGE_NAME}_FIND_COMPONENTS ${comp}) set (${HDF5_PACKAGE_NAME}_LIB_TYPE ${${HDF5_PACKAGE_NAME}_LIB_TYPE} ${comp}) - if (${HDF5_PACKAGE_NAME}_BUILD_FORTRAN) + if (${HDF5_PACKAGE_NAME}_BUILD_FORTRAN AND ${HDF5_PACKAGE_NAME}_INSTALL_MOD_FORTRAN STREQUAL "STATIC") set (${HDF5_PACKAGE_NAME}_INCLUDE_DIR_FORTRAN "@PACKAGE_INCLUDE_INSTALL_DIR@/static") endif () endif () diff --git a/config/cmake/patch.xml b/config/cmake/patch.xml deleted file mode 100644 index 1bdff3e..0000000 --- a/config/cmake/patch.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - diff --git a/config/cmake/patch.xml.in b/config/cmake/patch.xml.in new file mode 100644 index 0000000..d6843e1 --- /dev/null +++ b/config/cmake/patch.xml.in @@ -0,0 +1,11 @@ + + + + + diff --git a/config/cmake/runTest.cmake b/config/cmake/runTest.cmake index e26b8ea..b8abe92 100644 --- a/config/cmake/runTest.cmake +++ b/config/cmake/runTest.cmake @@ -122,6 +122,10 @@ if (NOT TEST_RESULT EQUAL TEST_EXPECT) file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM) message (STATUS "Output :\n${TEST_STREAM}") endif () + if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}.err") + file (READ ${TEST_FOLDER}/${TEST_OUTPUT}.err TEST_STREAM) + message (STATUS "Error Output :\n${TEST_STREAM}") + endif () endif () message (FATAL_ERROR "Failed: Test program ${TEST_PROGRAM} exited != ${TEST_EXPECT}.\n${TEST_ERROR}") endif () diff --git a/config/cmake/scripts/HDF5options.cmake b/config/cmake/scripts/HDF5options.cmake index 5267212..92bfd37 100644 --- a/config/cmake/scripts/HDF5options.cmake +++ b/config/cmake/scripts/HDF5options.cmake @@ -69,9 +69,9 @@ set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ALLOW_EXTERNAL_SUPPORT:STRIN ### disable using ext zlib #set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF") -### enable using ext szip -#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=ON") -#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_ENCODING:BOOL=ON") +### disable using ext szip +#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF") +#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_ENCODING:BOOL=OFF") #### package examples #### #set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_PACK_EXAMPLES:BOOL=ON -DHDF5_EXAMPLES_COMPRESSED:STRING=HDF5Examples-2.0.3-Source.tar.gz -DHDF5_EXAMPLES_COMPRESSED_DIR:PATH=${CTEST_SCRIPT_DIRECTORY}") diff --git a/config/sanitizer/README.md b/config/sanitizer/README.md index 308f9c3..b33c100 100644 --- a/config/sanitizer/README.md +++ b/config/sanitizer/README.md @@ -304,4 +304,4 @@ file(GLOB_RECURSE CMAKE_FILES ) cmake_format(TARGET_NAME ${CMAKE_FILES}) -``` \ No newline at end of file +``` diff --git a/config/toolchain/build32.cmake b/config/toolchain/build32.cmake index a2566c3..f636ea8 100644 --- a/config/toolchain/build32.cmake +++ b/config/toolchain/build32.cmake @@ -42,7 +42,7 @@ elseif(MINGW) set (CMAKE_CROSSCOMPILING_EMULATOR wine) include_directories(/usr/${TOOLCHAIN_PREFIX}/include) - set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS On CACHE BOOL "Export windows symbols") + set (CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS On CACHE BOOL "Export windows symbols") else () set (CMAKE_SYSTEM_NAME Linux) diff --git a/config/toolchain/clang.cmake b/config/toolchain/clang.cmake index af176aa..2d35641 100644 --- a/config/toolchain/clang.cmake +++ b/config/toolchain/clang.cmake @@ -1,16 +1,16 @@ # Uncomment the following to use cross-compiling -#set(CMAKE_SYSTEM_NAME Linux) +#set (CMAKE_SYSTEM_NAME Linux) -set(CMAKE_COMPILER_VENDOR "clang") +set (CMAKE_COMPILER_VENDOR "clang") if(WIN32) - set(CMAKE_C_COMPILER clang-cl) - set(CMAKE_CXX_COMPILER clang-cl) + set (CMAKE_C_COMPILER clang-cl) + set (CMAKE_CXX_COMPILER clang-cl) else() - set(CMAKE_C_COMPILER clang) - set(CMAKE_CXX_COMPILER clang++) + set (CMAKE_C_COMPILER clang) + set (CMAKE_CXX_COMPILER clang++) endif() -set(CMAKE_EXPORT_COMPILE_COMMANDS ON) +set (CMAKE_EXPORT_COMPILE_COMMANDS ON) # the following is used if cross-compiling -set(CMAKE_CROSSCOMPILING_EMULATOR "") +set (CMAKE_CROSSCOMPILING_EMULATOR "") diff --git a/config/toolchain/crayle.cmake b/config/toolchain/crayle.cmake index bf7cf69..02df8ff 100644 --- a/config/toolchain/crayle.cmake +++ b/config/toolchain/crayle.cmake @@ -1,10 +1,10 @@ # The following line will use cross-compiling -set(CMAKE_SYSTEM_NAME Linux) +set (CMAKE_SYSTEM_NAME Linux) -set(CMAKE_COMPILER_VENDOR "CrayLinuxEnvironment") +set (CMAKE_COMPILER_VENDOR "CrayLinuxEnvironment") -set(CMAKE_C_COMPILER cc) -set(CMAKE_Fortran_COMPILER ftn) +set (CMAKE_C_COMPILER cc) +set (CMAKE_Fortran_COMPILER ftn) # the following is used if cross-compiling -set(CMAKE_CROSSCOMPILING_EMULATOR "") +set (CMAKE_CROSSCOMPILING_EMULATOR "") diff --git a/config/toolchain/gcc.cmake b/config/toolchain/gcc.cmake index c41d0ca..f0771ed 100644 --- a/config/toolchain/gcc.cmake +++ b/config/toolchain/gcc.cmake @@ -1,11 +1,11 @@ # Uncomment the following line and the correct system name to use cross-compiling -#set(CMAKE_SYSTEM_NAME Linux) +#set (CMAKE_SYSTEM_NAME Linux) -set(CMAKE_COMPILER_VENDOR "GCC") +set (CMAKE_COMPILER_VENDOR "GCC") -set(CMAKE_C_COMPILER cc) -set(CMAKE_CXX_COMPILER c++) -set(CMAKE_Fortran_COMPILER gfortran) +set (CMAKE_C_COMPILER cc) +set (CMAKE_CXX_COMPILER c++) +set (CMAKE_Fortran_COMPILER gfortran) # the following is used if cross-compiling -set(CMAKE_CROSSCOMPILING_EMULATOR "") +set (CMAKE_CROSSCOMPILING_EMULATOR "") diff --git a/config/toolchain/mingw64.cmake b/config/toolchain/mingw64.cmake index 1830488..1b13891 100644 --- a/config/toolchain/mingw64.cmake +++ b/config/toolchain/mingw64.cmake @@ -11,4 +11,4 @@ set (CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) set (CMAKE_CROSSCOMPILING_EMULATOR wine64) include_directories(/usr/${TOOLCHAIN_PREFIX}/include) -set(CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS On CACHE BOOL "Export windows symbols") +set (CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS On CACHE BOOL "Export windows symbols") diff --git a/config/toolchain/pgi.cmake b/config/toolchain/pgi.cmake index ec58cbb..ff2f048 100644 --- a/config/toolchain/pgi.cmake +++ b/config/toolchain/pgi.cmake @@ -1,11 +1,11 @@ # Uncomment the following to use cross-compiling -#set(CMAKE_SYSTEM_NAME Linux) +#set (CMAKE_SYSTEM_NAME Linux) -set(CMAKE_COMPILER_VENDOR "PGI") +set (CMAKE_COMPILER_VENDOR "PGI") -set(CMAKE_C_COMPILER pgcc) -set(CMAKE_CXX_COMPILER pgc++) -set(CMAKE_Fortran_COMPILER pgf90) +set (CMAKE_C_COMPILER pgcc) +set (CMAKE_CXX_COMPILER pgc++) +set (CMAKE_Fortran_COMPILER pgf90) # the following is used if cross-compiling -set(CMAKE_CROSSCOMPILING_EMULATOR "") +set (CMAKE_CROSSCOMPILING_EMULATOR "") diff --git a/doxygen/dox/ExamplesAPI.dox b/doxygen/dox/ExamplesAPI.dox new file mode 100644 index 0000000..8f88c4e --- /dev/null +++ b/doxygen/dox/ExamplesAPI.dox @@ -0,0 +1,1010 @@ +/** @page ExAPI Examples by API + +Navigate back: \ref index "Main" / \ref GettingStarted +
+ +\section sec_exapi_desc Examples Description +The C, FORTRAN and Java examples below point to the examples in the hdf5-examples github repository. Examples for older versions of HDF5 +are handled by setting the appropriate USE_API_xxx definition. HDF5-1.6 examples are in a "16"-named subdirectory. + +The Java examples are in the HDF5-1.10 source code, and the Java Object package examples are in the HDFView source. +Please note that you must comment out the "package" statement at the top when downloading a Java Object example individually. + +The MATLAB and Python examples were generously provided by a user and are not tested. + +Languages are C, Fortran, Java (JHI5), Java Object Package, Python (High Level), and Python (Low Level APIs). + +\subsection sec_exapi_dsets Datasets + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FeatureLanguagesHDF5 FileOutputDDL
Set Space Allocation Time for Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_alloc.h5h5ex_d_alloc.tsth5ex_d_alloc.ddl
Read / Write Dataset using Fletcher32 Checksum Filter +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_checksum.h5h5ex_d_checksum.tsth5ex_d_checksum.ddl
Read / Write Chunked Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_chunk.h5h5ex_d_chunk.tsth5ex_d_chunk.ddl
Read / Write Compact Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_compact.h5h5ex_d_compact.tsth5ex_d_compact.ddl
Read / Write to External Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_extern.h5h5ex_d_extern.tsth5ex_d_extern.ddl
Read / Write Dataset w/ Fill Value +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_fillval.h5h5ex_d_fillval.tsth5ex_d_fillval.ddl
Read / Write GZIP Compressed Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_gzip.h5h5ex_d_gzip.tsth5ex_d_gzip.ddl
Read / Write Data by Hyperslabs +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_hyper.h5h5ex_d_hyper.tsth5ex_d_hyper.ddl
Read / Write Dataset with n-bit Filter +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_nbit.h5h5ex_d_nbit.tsth5ex_d_nbit.ddl
Read / Write Integer Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_rdwrc.h5h5ex_d_rdwrc.tsth5ex_d_rdwr.ddl
Read / Write Dataset w/ Shuffle Filter and GZIP Compression +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_shuffle.h5h5ex_d_shuffle.tsth5ex_d_shuffle.ddl
Read / Write Dataset using Scale-Offset Filter (float) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_sofloat.h5h5ex_d_sofloat.tsth5ex_d_sofloat.ddl
Read / Write Dataset using Scale-Offset Filter (integer) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_soint.h5h5ex_d_soint.tsth5ex_d_soint.ddl
Read / Write Dataset using SZIP Compression +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_szip.h5h5ex_d_szip.tsth5ex_d_szip.ddl
Read / Write Dataset using Data Transform Expression +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_transform.h5h5ex_d_transform.tsth5ex_d_transform.ddl
Read / Write Unlimited Dimension Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_unlimadd.h5h5ex_d_unlimadd.tsth5ex_d_unlimadd.ddl
Read / Write GZIP Compressed Unlimited Dimension Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_unlimgzip.h5h5ex_d_unlimgzip.tsth5ex_d_unlimgzip.ddl
Read / Write / Edit Unlimited Dimension Dataset +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_d_unlimmod.h5h5ex_d_unlimmod.tsth5ex_d_unlimmod.ddl
+ +\subsection sec_exapi_grps Groups + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FeatureLanguagesHDF5 FileOutputDDL
Create "compact-or-indexed" Format Groups +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_compact.h5h5ex_g_.tsth5ex_g_compact1.ddlh5ex_g_compact2.ddl
Track links in a Group by Creation Order +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_corder.h5h5ex_g_corder.tsth5ex_g_corder.ddl
Create / Open / Close a Group +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_create.h5h5ex_g_create.tsth5ex_g_create.ddl
Create Intermediate Groups +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_intermediate.h5h5ex_g_intermediate.tsth5ex_g_intermediate.ddl
Iterate over Groups w/ H5Literate +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_iterate.h5h5ex_g_iterate.tsth5ex_g_iterate.ddl
Set Conditions to Convert between Compact and Dense Groups +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_phase.h5h5ex_g_phase.tsth5ex_g_phase.ddl
Recursively Traverse a File with H5Literate +C + FORTRAN +Java + JavaObj MATLAB PyHigh PyLow +h5ex_g_traverse.h5h5ex_g_traverse.tsth5ex_g_traverse.ddl
Recursively Traverse a File with H5Ovisit / H5Lvisit +C + FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_g_visit.h5h5ex_g_visit.tsth5ex_g_visit.ddl
+ +\subsection sec_exapi_dtypes Datatypes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FeatureLanguagesHDF5 FileOutputDDL
Read / Write Array (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_arrayatt.h5h5ex_t_arrayatt.tsth5ex_t_arrayatt.ddl
Read / Write Array (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_array.h5h5ex_t_array.tsth5ex_t_array.ddl
Read / Write Bitfield (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_bitatt.h5h5ex_t_bitatt.tsth5ex_t_bitatt.ddl
Read / Write Bitfield (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_bit.h5h5ex_t_bit.tsth5ex_t_bit.ddl
Read / Write Compound (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_cmpdatt.h5h5ex_t_cmpdatt.tsth5ex_t_cmpdatt.ddl
Read / Write Compound (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_cmpd.h5h5ex_t_cmpd.tsth5ex_t_cmpd.ddl
Commit Named Datatype and Read Back +C + FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_commit.h5h5ex_t_commit.tsth5ex_t_commit.ddl
Convert Between Datatypes in Memory +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_t_convert.h5h5ex_t_convert.tsth5ex_t_convert.ddl
Read / Write Complex Compound (Attribute) +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_t_cpxcmpdatt.h5h5ex_t_cpxcmpdatt.tsth5ex_t_cpxcmpdatt.ddl
Read / Write Complex Compound (Dataset) +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_t_cpxcmpd.h5h5ex_t_cpxcmpd.tsth5ex_t_cpxcmpd.ddl
Read / Write Enumerated (Attribute) +C +FORTRAN + Java JavaObj MATLAB PyHigh PyLow +h5ex_t_enumatt.h5h5ex_t_enumatt.tsth5ex_t_enumatt.ddl
Read / Write Enumerated (Dataset) +C +FORTRAN + Java JavaObj MATLAB PyHigh PyLow +h5ex_t_enum.h5h5ex_t_enum.tsth5ex_t_enum.ddl
Read / Write Floating Point (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_floatatt.h5h5ex_t_floatatt.tsth5ex_t_floatatt.ddl
Read / Write Floating Point (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_float.h5h5ex_t_float.tsth5ex_t_float.ddl
Read / Write Integer Datatype (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_intatt.h5h5ex_t_intatt.tsth5ex_t_intatt.ddl
Read / Write Integer Datatype (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_int.h5h5ex_t_int.tsth5ex_t_int.ddl
Read / Write Object References (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_objrefatt.h5h5ex_t_objrefatt.tsth5ex_t_objrefatt.ddl
Read / Write Object References (Dataset) +C +FORTRAN +Java + JavaObj + MATLAB PyHigh PyLow +h5ex_t_objref.h5h5ex_t_objref.tsth5ex_t_objref.ddl
Read / Write Opaque (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_opaqueatt.h5h5ex_t_opaqueatt.tsth5ex_t_opaqueatt.ddl
Read / Write Opaque (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_opaque.h5h5ex_t_opaque.tsth5ex_t_opaque.ddl
Read / Write Region References (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_regrefatt.h5h5ex_t_regrefatt.tsth5ex_t_regrefatt.ddl
Read / Write Region References (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_regref.h5h5ex_t_regref.tsth5ex_t_regref.ddl
Read / Write String (Attribute) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_stringatt.h5h5ex_t_stringatt.tsth5ex_t_stringatt.ddl
Read / Write String (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_string.h5h5ex_t_string.tsth5ex_t_string.ddl
Read / Write Variable Length (Attribute) +C +FORTRAN + Java JavaObj MATLAB PyHigh PyLow +h5ex_t_vlenatt.h5h5ex_t_vlenatt.tsth5ex_t_vlenatt.ddl
Read / Write Variable Length (Dataset) +C +FORTRAN + Java JavaObj MATLAB PyHigh PyLow +h5ex_t_vlen.h5h5ex_t_vlen.tsth5ex_t_vlen.ddl
Read / Write Variable Length String (Attribute) +C +FORTRAN + Java JavaObj MATLAB PyHigh PyLow +h5ex_t_vlstringatt.h5h5ex_t_vlstringatt.tsth5ex_t_vlstringatt.ddl
Read / Write Variable Length String (Dataset) +C +FORTRAN +Java +JavaObj + MATLAB PyHigh PyLow +h5ex_t_vlstring.h5h5ex_t_vlstring.tsth5ex_t_vlstring.ddl
+ +\subsection sec_exapi_filts Filters + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FeatureLanguagesHDF5 FileOutputDDL
Read / Write Dataset using Blosc Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_blosc.h5h5ex_d_blosc.tsth5ex_d_blosc.ddl
Read / Write Dataset using Bit Shuffle Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_bshuf.h5h5ex_d_bshuf.tsth5ex_d_bshuf.ddl
Read / Write Dataset using BZip2 Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_bzip2.h5h5ex_d_bzip2.tsth5ex_d_bzip2.ddl
Read / Write Dataset using JPEG Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_jpeg.h5h5ex_d_jpeg.tsth5ex_d_jpeg.ddl
Read / Write Dataset using LZ4 Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_lz4.h5h5ex_d_lz4.tsth5ex_d_lz4.ddl
Read / Write Dataset using LZF Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_lzf.h5h5ex_d_lzf.tsth5ex_d_lzf.ddl
Read / Write Dataset using MAFISC Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_mafisc.h5h5ex_d_mafisc.tsth5ex_d_mafisc.ddl
Read / Write Dataset using ZFP Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_zfp.h5h5ex_d_zfp.tsth5ex_d_zfp.ddl
Read / Write Dataset using ZStd Compression +C + FORTRAN Java JavaObj MATLAB PyHigh PyLow +h5ex_d_zstd.h5h5ex_d_zstd.tsth5ex_d_zstd.ddl
+ +\subsection sec_exapi_java Java General + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FeatureLanguagesOutput
Create/Read/Write an Attribute +Java +JavaObj +HDF5AttributeCreate.txt
Create Datasets +Java +JavaObj +HDF5DatasetCreate.txt
Read/Write Datasets +Java +JavaObj +HDF5DatasetRead.txt
Create an Empty File +Java +JavaObj +HDF5FileCreate.txt
Retrieve the File Structure +Java + JavaObj +HDF5FileStructure.txt
Create Groups +Java +JavaObj +HDF5GroupCreate.txt
Select a Subset of a Dataset +Java + JavaObj +HDF5SubsetSelect.txt
Create Two Datasets Within Groups +Java +JavaObj +HDF5GroupDatasetCreate.txt
+ + +\subsection sec_exapi_par Parallel + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FeatureLanguagesHDF5 FileOutput
Creating and Accessing a File +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
Creating and Accessing a Dataset +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
Writing and Reading Contiguous Hyperslabs +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
Writing and Reading Regularly Spaced Data Hyperslabs +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
Writing and Reading Pattern Hyperslabs +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
Writing and Reading Chunk Hyperslabs +C +FORTRAN + MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
Using the Subfiling VFD to Write a File Striped Across Multiple Subfiles +C + FORTRAN MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
Write to Datasets with Filters Applied +C + FORTRAN MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
Collectively Write Datasets with Filters and Not All Ranks have Data +C + FORTRAN MATLAB PyHigh PyLow +ph5_.h5ph5_.tst
+ + +
+Navigate back: \ref index "Main" / \ref GettingStarted + +*/ diff --git a/doxygen/dox/GettingStarted.dox b/doxygen/dox/GettingStarted.dox index 29c5033..87f3566 100644 --- a/doxygen/dox/GettingStarted.dox +++ b/doxygen/dox/GettingStarted.dox @@ -50,10 +50,10 @@ Parallel HDF5, and the HDF5-1.10 VDS and SWMR new features: -Introduction to Parallel HDF5 +\ref IntroParHDF5 -A brief introduction to Parallel HDF5. If you are new to HDF5 please see the @ref LearnBasics topic first. +A brief introduction to Parallel HDF5. If you are new to HDF5 please see the @ref LearnBasics topic first. diff --git a/doxygen/dox/IntroHDF5.dox b/doxygen/dox/IntroHDF5.dox index 3ca7d00..2c25659 100644 --- a/doxygen/dox/IntroHDF5.dox +++ b/doxygen/dox/IntroHDF5.dox @@ -607,7 +607,7 @@ on the HDF-EOS Tools and Information Center pag \section secHDF5Examples Examples \li \ref LBExamples -\li Examples by API +\li \ref ExAPI \li Examples in the Source Code \li Other Examples diff --git a/doxygen/dox/IntroParExamples.dox b/doxygen/dox/IntroParExamples.dox new file mode 100644 index 0000000..3929106 --- /dev/null +++ b/doxygen/dox/IntroParExamples.dox @@ -0,0 +1,569 @@ +/** @page IntroParContHyperslab Writing by Contiguous Hyperslab + +Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 +
+ +This example shows how to write a contiguous buffer in memory to a contiguous hyperslab in a file. In this case, +each parallel process writes a contiguous hyperslab to the file. + +In the C example (figure a), each hyperslab in memory consists of an equal number of consecutive rows. In the FORTRAN +90 example (figure b), each hyperslab in memory consists of +an equal number of consecutive columns. This reflects the difference in the storage order for C and FORTRAN 90. + + + + + + + + +
Figure a C ExampleFigure b Fortran Example
+\image html pcont_hy_figa.gif + +\image html pcont_hy_figb.gif +
+ +\section secIntroParContHyperslabC Writing a Contiguous Hyperslab in C +In this example, you have a dataset of 8 (rows) x 5 (columns) and each process writes an equal number +of rows to the dataset. The dataset hyperslab is defined as follows: +\code + count [0] = dimsf [0] / number_processes + count [1] = dimsf [1] +\endcode +where, +\code + dimsf [0] is the number of rows in the dataset + dimsf [1] is the number of columns in the dataset +\endcode +The offset for the hyperslab is different for each process: +\code + offset [0] = k * count[0] + offset [1] = 0 +\endcode +where, +\code + "k" is the process id number + count [0] is the number of rows written in each hyperslab + offset [1] = 0 indicates to start at the beginning of the row +\endcode + +The number of processes that you could use would be 1, 2, 4, or 8. The number of rows that would be written by each slab is as follows: + + + + + + + + + + + + + +
ProcessesSize of count[0](\# of rows)
18
24
42
81
+ +If using 4 processes, then process 1 would look like: + + + + +
+\image html pcont_hy_figc.gif +
+ +The code would look like the following: +\code + 71 /* + 72 * Each process defines dataset in memory and writes it to the hyperslab + 73 * in the file. + 74 */ + 75 count[0] = dimsf[0]/mpi_size; + 76 count[1] = dimsf[1]; + 77 offset[0] = mpi_rank * count[0]; + 78 offset[1] = 0; + 79 memspace = H5Screate_simple(RANK, count, NULL); + 80 + 81 /* + 82 * Select hyperslab in the file. + 83 */ + 84 filespace = H5Dget_space(dset_id); + 85 H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL); +\endcode + +Below is the example program: + + + + +
+hyperslab_by_row.c +
+ +If using this example with 4 processes, then, +\li Process 0 writes "10"s to the file. +\li Process 1 writes "11"s. +\li Process 2 writes "12"s. +\li Process 3 writes "13"s. + +The following is the output from h5dump for the HDF5 file created by this example using 4 processes: +\code +HDF5 "SDS_row.h5" { +GROUP "/" { + DATASET "IntArray" { + DATATYPE H5T_STD_I32BE + DATASPACE SIMPLE { ( 8, 5 ) / ( 8, 5 ) } + DATA { + 10, 10, 10, 10, 10, + 10, 10, 10, 10, 10, + 11, 11, 11, 11, 11, + 11, 11, 11, 11, 11, + 12, 12, 12, 12, 12, + 12, 12, 12, 12, 12, + 13, 13, 13, 13, 13, + 13, 13, 13, 13, 13 + } + } +} +} +\endcode + + +\section secIntroParContHyperslabFort Writing a Contiguous Hyperslab in Fortran +In this example you have a dataset of 5 (rows) x 8 (columns). Since a contiguous hyperslab in Fortran 90 +consists of consecutive columns, each process will be writing an equal number of columns to the dataset. + +You would define the size of the hyperslab to write to the dataset as follows: +\code + count(1) = dimsf(1) + count(2) = dimsf(2) / number_of_processes +\endcode + +where, +\code + dimsf(1) is the number of rows in the dataset + dimsf(2) is the number of columns +\endcode + +The offset for the hyperslab dimension would be different for each process: +\code + offset (1) = 0 + offset (2) = k * count (2) +\endcode + +where, +\code + offset (1) = 0 indicates to start at the beginning of the column + "k" is the process id number + "count(2) is the number of columns to be written by each hyperslab +\endcode + +The number of processes that could be used in this example are 1, 2, 4, or 8. The number of +columns that could be written by each slab is as follows: + + + + + + + + + + + + + +
ProcessesSize of count (2)(\# of columns)
18
24
42
81
+ +If using 4 processes, the offset and count parameters for Process 1 would look like: + + + + +
+\image html pcont_hy_figd.gif +
+ +The code would look like the following: +\code + 69 ! Each process defines dataset in memory and writes it to the hyperslab + 70 ! in the file. + 71 ! + 72 count(1) = dimsf(1) + 73 count(2) = dimsf(2)/mpi_size + 74 offset(1) = 0 + 75 offset(2) = mpi_rank * count(2) + 76 CALL h5screate_simple_f(rank, count, memspace, error) + 77 ! + 78 ! Select hyperslab in the file. + 79 ! + 80 CALL h5dget_space_f(dset_id, filespace, error) + 81 CALL h5sselect_hyperslab_f (filespace, H5S_SELECT_SET_F, offset, count, error) +\endcode + +Below is the F90 example program which illustrates how to write contiguous hyperslabs by column in Parallel HDF5: + + + + +
+hyperslab_by_col.F90 +
+ +If you run this program with 4 processes and look at the output with h5dump you will notice that the output is +much like the output shown above for the C example. This is because h5dump is written in C. The data would be +displayed in columns if it was printed using Fortran 90 code. + +
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 + +@page IntroParRegularSpaced Writing by Regularly Spaced Data + +Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 +
+ +In this case, each process writes data from a contiguous buffer into disconnected locations in the file, using a regular pattern. + +In C it is done by selecting a hyperslab in a file that consists of regularly spaced columns. In F90, it is done by selecting a +hyperslab in a file that consists of regularly spaced rows. + + + + + + + + +
Figure a C ExampleFigure b Fortran Example
+\image html preg_figa.gif + +\image html preg_figb.gif +
+ +\section secIntroParRegularSpacedC Writing Regularly Spaced Columns in C +In this example, you have two processes that write to the same dataset, each writing to +every other column in the dataset. For each process the hyperslab in the file is set up as follows: +\code + 89 count[0] = 1; + 90 count[1] = dimsm[1]; + 91 offset[0] = 0; + 92 offset[1] = mpi_rank; + 93 stride[0] = 1; + 94 stride[1] = 2; + 95 block[0] = dimsf[0]; + 96 block[1] = 1; +\endcode + +The stride is 2 for dimension 1 to indicate that every other position along this +dimension will be written to. A stride of 1 indicates that every position along a dimension will be written to. + +For two processes, the mpi_rank will be either 0 or 1. Therefore: +\li Process 0 writes to even columns (0, 2, 4...) +\li Process 1 writes to odd columns (1, 3, 5...) + +The block size allows each process to write a column of data to every other position in the dataset. + + + + + +
+\image html preg_figc.gif +
+ +Below is an example program for writing hyperslabs by column in Parallel HDF5: + + + + +
+hyperslab_by_col.c +
+ +The following is the output from h5dump for the HDF5 file created by this example: +\code +HDF5 "SDS_col.h5" { +GROUP "/" { + DATASET "IntArray" { + DATATYPE H5T_STD_I32BE + DATASPACE SIMPLE { ( 8, 6 ) / ( 8, 6 ) } + DATA { + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200, + 1, 2, 10, 20, 100, 200 + } + } +} +} +\endcode + + +\section secIntroParRegularSpacedFort Writing Regularly Spaced Rows in Fortran +In this example, you have two processes that write to the same dataset, each writing to every +other row in the dataset. For each process the hyperslab in the file is set up as follows: + + +You would define the size of the hyperslab to write to the dataset as follows: +\code + 83 ! Each process defines dataset in memory and writes it to + 84 ! the hyperslab in the file. + 85 ! + 86 count(1) = dimsm(1) + 87 count(2) = 1 + 88 offset(1) = mpi_rank + 89 offset(2) = 0 + 90 stride(1) = 2 + 91 stride(2) = 1 + 92 block(1) = 1 + 93 block(2) = dimsf(2) +\endcode + +The stride is 2 for dimension 1 to indicate that every other position along this dimension will +be written to. A stride of 1 indicates that every position along a dimension will be written to. + +For two process, the mpi_rank will be either 0 or 1. Therefore: +\li Process 0 writes to even rows (0, 2, 4 ...) +\li Process 1 writes to odd rows (1, 3, 5 ...) + +The block size allows each process to write a row of data to every other position in the dataset, +rather than just a point of data. + +The following shows the data written by Process 1 to the file: + + + + +
+\image html preg_figd.gif +
+ +Below is the example program for writing hyperslabs by column in Parallel HDF5: + + + + +
+hyperslab_by_row.F90 +
+ +The output for h5dump on the file created by this program will look like the output as shown above for the C example. This is +because h5dump is written in C. The data would be displayed in rows if it were printed using Fortran 90 code. + +
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 + +@page IntroParPattern Writing by Pattern + +Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 +
+ +This is another example of writing data into disconnected locations in a file. Each process writes data from the contiguous +buffer into regularly scattered locations in the file. + +Each process defines a hyperslab in the file as described below and writes data to it. The C and Fortran 90 examples below +result in the same data layout in the file. + + + + + + + + + +
Figure a C ExampleFigure b Fortran Example
+\image html ppatt_figa.gif + +\image html ppatt_figb.gif +
+ +The C and Fortran 90 examples use four processes to write the pattern shown above. Each process defines a hyperslab by: +\li Specifying a stride of 2 for each dimension, which indicates that you wish to write to every other position along a dimension. +\li Specifying a different offset for each process: + + + + + + + + + + + + + + +
CProcess 0Process 1Process 2Process 3
offset[0] = 0offset[0] = 1offset[0] = 0offset[0] = 1
offset[1] = 0offset[1] = 0offset[1] = 1offset[1] = 1
FortranProcess 0Process 1Process 2Process 3
offset(1) = 0offset(1) = 0offset(1) = 1offset(1) = 1
offset(2) = 0offset(2) = 1offset(2) = 0offset(2) = 1
+\li Specifying the size of the slab to write. The count is the number of positions along a dimension to write to. If writing a 4 x 2 slab, +then the count would be: + + + + + + + + +
CFortran
count[0] = 4count(1) = 2
count[1] = 2count(2) = 4
+ +For example, the offset, count, and stride parameters for Process 2 would look like: + + + + + + + + +
Figure a C ExampleFigure b Fortran Example
+\image html ppatt_figc.gif + +\image html ppatt_figd.gif +
+ +Below are example programs for writing hyperslabs by pattern in Parallel HDF5: + + + + + + + +
+hyperslab_by_pattern.c +
+hyperslab_by_pattern.F90 +
+ +The following is the output from h5dump for the HDF5 file created in this example: +\code +HDF5 "SDS_pat.h5" { +GROUP "/" { + DATASET "IntArray" { + DATATYPE H5T_STD_I32BE + DATASPACE SIMPLE { ( 8, 4 ) / ( 8, 4 ) } + DATA { + 1, 3, 1, 3, + 2, 4, 2, 4, + 1, 3, 1, 3, + 2, 4, 2, 4, + 1, 3, 1, 3, + 2, 4, 2, 4, + 1, 3, 1, 3, + 2, 4, 2, 4 + } + } +} +} +\endcode +The h5dump utility is written in C so the output is in C order. + + +
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 + +@page IntroParChunk Writing by Chunk + +Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 +
+ +In this example each process writes a "chunk" of data to a dataset. The C and Fortran 90 +examples result in the same data layout in the file. + + + + + + + + + +
Figure a C ExampleFigure b Fortran Example
+\image html pchunk_figa.gif + +\image html pchunk_figb.gif +
+ +For this example, four processes are used, and a 4 x 2 chunk is written to the dataset by each process. + +To do this, you would: +\li Use the block parameter to specify a chunk of size 4 x 2 (or 2 x 4 for Fortran). +\li Use a different offset (start) for each process, based on the chunk size: + + + + + + + + + + + + + + +
CProcess 0Process 1Process 2Process 3
offset[0] = 0offset[0] = 0offset[0] = 4offset[0] = 4
offset[1] = 0offset[1] = 2offset[1] = 0offset[1] = 2
FortranProcess 0Process 1Process 2Process 3
offset(1) = 0offset(1) = 2offset(1) = 0offset(1) = 2
offset(2) = 0offset(2) = 0offset(2) = 4offset(2) = 4
+ +For example, the offset and block parameters for Process 2 would look like: + + + + + + + + +
Figure a C ExampleFigure b Fortran Example
+\image html pchunk_figc.gif + +\image html pchunk_figd.gif +
+ +Below are example programs for writing hyperslabs by pattern in Parallel HDF5: + + + + + + + +
+hyperslab_by_chunk.c +
+hyperslab_by_chunk.F90 +
+ +The following is the output from h5dump for the HDF5 file created in this example: +\code +HDF5 "SDS_chnk.h5" { +GROUP "/" { + DATASET "IntArray" { + DATATYPE H5T_STD_I32BE + DATASPACE SIMPLE { ( 8, 4 ) / ( 8, 4 ) } + DATA { + 1, 1, 2, 2, + 1, 1, 2, 2, + 1, 1, 2, 2, + 1, 1, 2, 2, + 3, 3, 4, 4, + 3, 3, 4, 4, + 3, 3, 4, 4, + 3, 3, 4, 4 + } + } +} +} +\endcode +The h5dump utility is written in C so the output is in C order. + +
+Navigate back: \ref index "Main" / \ref GettingStarted / \ref IntroParHDF5 + +*/ diff --git a/doxygen/dox/IntroParHDF5.dox b/doxygen/dox/IntroParHDF5.dox new file mode 100644 index 0000000..1f04e96 --- /dev/null +++ b/doxygen/dox/IntroParHDF5.dox @@ -0,0 +1,271 @@ +/** @page IntroParHDF5 A Brief Introduction to Parallel HDF5 + +Navigate back: \ref index "Main" / \ref GettingStarted +
+ +If you are new to HDF5 please see the @ref LearnBasics topic first. + +\section sec_pintro_overview Overview of Parallel HDF5 (PHDF5) Design +There were several requirements that we had for Parallel HDF5 (PHDF5). These were: +\li Parallel HDF5 files had to be compatible with serial HDF5 files and sharable +between different serial and parallel platforms. +\li Parallel HDF5 had to be designed to have a single file image to all processes, +rather than having one file per process. Having one file per process can cause expensive +post processing, and the files are not usable by different processes. +\li A standard parallel I/O interface had to be portable to different platforms. + +With these requirements of HDF5 our initial target was to support MPI programming, but not +for shared memory programming. We had done some experimentation with thread-safe support +for Pthreads and for OpenMP, and decided to use these. + +Implementation requirements were to: +\li Not use Threads, since they were not commonly supported in 1998 when we were looking at this. +\li Not have a reserved process, as this might interfere with parallel algorithms. +\li Not spawn any processes, as this is not even commonly supported now. + +The following shows the Parallel HDF5 implementation layers. + + +\subsection subsec_pintro_prog Parallel Programming with HDF5 +This tutorial assumes that you are somewhat familiar with parallel programming with MPI (Message Passing Interface). + +If you are not familiar with parallel programming, here is a tutorial that may be of interest: +Tutorial on HDF5 I/O tuning at NERSC + +Some of the terms that you must understand in this tutorial are: + + +Parallel HDF5 opens a parallel file with a communicator. It returns a file handle to be used for future access to the file. + +All processes are required to participate in the collective Parallel HDF5 API. Different files can be opened using different communicators. + +Examples of what you can do with the Parallel HDF5 collective API: +\li File Operation: Create, open and close a file +\li Object Creation: Create, open, and close a dataset +\li Object Structure: Extend a dataset (increase dimension sizes) +\li Dataset Operations: Write to or read from a dataset +(Array data transfer can be collective or independent.) + +Once a file is opened by the processes of a communicator: +\li All parts of the file are accessible by all processes. +\li All objects in the file are accessible by all processes. +\li Multiple processes write to the same dataset. +\li Each process writes to an individual dataset. + +Please refer to the Supported Configuration Features Summary in the release notes for the current release +of HDF5 for an up-to-date list of the platforms that we support Parallel HDF5 on. + + +\subsection subsec_pintro_create_file Creating and Accessing a File with PHDF5 +The programming model for creating and accessing a file is as follows: +
    +
  1. Set up an access template object to control the file access mechanism.
  2. +
  3. Open the file.
  4. +
  5. Close the file.
  6. +
+ +Each process of the MPI communicator creates an access template and sets it up with MPI parallel +access information. This is done with the #H5Pcreate call to obtain the file access property list +and the #H5Pset_fapl_mpio call to set up parallel I/O access. + +Following is example code for creating an access template in HDF5: +C +\code + 23 MPI_Comm comm = MPI_COMM_WORLD; + 24 MPI_Info info = MPI_INFO_NULL; + 25 + 26 /* + 27 * Initialize MPI + 28 */ + 29 MPI_Init(&argc, &argv); + 30 MPI_Comm_size(comm, &mpi_size); + 31 MPI_Comm_rank(comm, &mpi_rank); + 32 + 33 /* + 34 * Set up file access property list with parallel I/O access + 35 */ + 36 plist_id = H5Pcreate(H5P_FILE_ACCESS); 37 H5Pset_fapl_mpio(plist_id, comm, info); +\endcode + +Fortran +\code + 23 comm = MPI_COMM_WORLD + 24 info = MPI_INFO_NULL + 25 + 26 CALL MPI_INIT(mpierror) + 27 CALL MPI_COMM_SIZE(comm, mpi_size, mpierror) + 28 CALL MPI_COMM_RANK(comm, mpi_rank, mpierror) + 29 ! + 30 ! Initialize FORTRAN interface + 31 ! + 32 CALL h5open_f(error) + 33 + 34 ! + 35 ! Setup file access property list with parallel I/O access. + 36 ! + 37 CALL h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, error) 38 CALL h5pset_fapl_mpio_f(plist_id, comm, info, error) +\endcode + +The following example programs create an HDF5 file using Parallel HDF5: +C: file_create.c +F90: file_create.F90 + + +\subsection subsec_pintro_create_dset Creating and Accessing a Dataset with PHDF5 +The programming model for creating and accessing a dataset is as follows: +
    +
  1. +Create or open a Parallel HDF5 file with a collective call to: +#H5Dcreate +#H5Dopen +
  2. +
  3. +Obtain a copy of the file transfer property list and set it to use collective or independent I/O. + +
  4. +
+ +The following code demonstrates a collective write using Parallel HDF5: +C +\code + 95 /* + 96 * Create property list for collective dataset write. + 97 */ + 98 plist_id = H5Pcreate (H5P_DATASET_XFER); 99 H5Pset_dxpl_mpio (plist_id, H5FD_MPIO_COLLECTIVE); + 100 + 101 status = H5Dwrite (dset_id, H5T_NATIVE_INT, memspace, filespace, + 102 plist_id, data); +\endcode + +Fortran +\code + 108 ! Create property list for collective dataset write + 109 ! + 110 CALL h5pcreate_f (H5P_DATASET_XFER_F, plist_id, error) 111 CALL h5pset_dxpl_mpio_f (plist_id, H5FD_MPIO_COLLECTIVE_F, error) + 112 + 113 ! + 114 ! Write the dataset collectively. + 115 ! + 116 CALL h5dwrite_f (dset_id, H5T_NATIVE_INTEGER, data, dimsfi, error, & + 117 file_space_id = filespace, mem_space_id = memspace, xfer_prp = plist_id) +\endcode + +The following example programs create an HDF5 dataset using Parallel HDF5: +C: dataset.c +F90: dataset.F90 + + +\subsubsection subsec_pintro_hyperslabs Hyperslabs +The programming model for writing and reading hyperslabs is: +/li Each process defines the memory and file hyperslabs. +/li Each process executes a partial write/read call which is either collective or independent. + +The memory and file hyperslabs in the first step are defined with the #H5Sselect_hyperslab. + +The start (or offset), count, stride, and block parameters define the portion of the dataset +to write to. By changing the values of these parameters you can write hyperslabs with Parallel +HDF5 by contiguous hyperslab, by regularly spaced data in a column/row, by patterns, and by chunks: + + + + + + + + + + + + + + +
+\li @subpage IntroParContHyperslab +
+\li @subpage IntroParRegularSpaced +
+\li @subpage IntroParPattern +
+\li @subpage IntroParChunk +
+ + +
+Navigate back: \ref index "Main" / \ref GettingStarted + +*/ diff --git a/doxygen/dox/LearnBasics1.dox b/doxygen/dox/LearnBasics1.dox index a9b6d0e..53c8e0a 100644 --- a/doxygen/dox/LearnBasics1.dox +++ b/doxygen/dox/LearnBasics1.dox @@ -642,7 +642,7 @@ See the programming example for an illustration of the use of these calls. \subsection subsecLBDsetCreateContent File Contents The contents of the file dset.h5 (dsetf.h5 for FORTRAN) are shown below: - +
Contents of dset.h5 ( dsetf.h5)Contents of dset.h5 (dsetf.h5)
\image html imgLBDsetCreate.gif diff --git a/doxygen/img/pchunk_figa.gif b/doxygen/img/pchunk_figa.gif new file mode 100644 index 0000000..90b49c0 Binary files /dev/null and b/doxygen/img/pchunk_figa.gif differ diff --git a/doxygen/img/pchunk_figb.gif b/doxygen/img/pchunk_figb.gif new file mode 100644 index 0000000..c825fc3 Binary files /dev/null and b/doxygen/img/pchunk_figb.gif differ diff --git a/doxygen/img/pchunk_figc.gif b/doxygen/img/pchunk_figc.gif new file mode 100644 index 0000000..9975a87 Binary files /dev/null and b/doxygen/img/pchunk_figc.gif differ diff --git a/doxygen/img/pchunk_figd.gif b/doxygen/img/pchunk_figd.gif new file mode 100644 index 0000000..45da389 Binary files /dev/null and b/doxygen/img/pchunk_figd.gif differ diff --git a/doxygen/img/pcont_hy_figa.gif b/doxygen/img/pcont_hy_figa.gif new file mode 100644 index 0000000..1417d17 Binary files /dev/null and b/doxygen/img/pcont_hy_figa.gif differ diff --git a/doxygen/img/pcont_hy_figb.gif b/doxygen/img/pcont_hy_figb.gif new file mode 100644 index 0000000..a3b637b Binary files /dev/null and b/doxygen/img/pcont_hy_figb.gif differ diff --git a/doxygen/img/pcont_hy_figc.gif b/doxygen/img/pcont_hy_figc.gif new file mode 100644 index 0000000..91bab7d Binary files /dev/null and b/doxygen/img/pcont_hy_figc.gif differ diff --git a/doxygen/img/pcont_hy_figd.gif b/doxygen/img/pcont_hy_figd.gif new file mode 100644 index 0000000..2836b4f Binary files /dev/null and b/doxygen/img/pcont_hy_figd.gif differ diff --git a/doxygen/img/ppatt_figa.gif b/doxygen/img/ppatt_figa.gif new file mode 100644 index 0000000..5c86c93 Binary files /dev/null and b/doxygen/img/ppatt_figa.gif differ diff --git a/doxygen/img/ppatt_figb.gif b/doxygen/img/ppatt_figb.gif new file mode 100644 index 0000000..fe4e350 Binary files /dev/null and b/doxygen/img/ppatt_figb.gif differ diff --git a/doxygen/img/ppatt_figc.gif b/doxygen/img/ppatt_figc.gif new file mode 100644 index 0000000..aca8ef9 Binary files /dev/null and b/doxygen/img/ppatt_figc.gif differ diff --git a/doxygen/img/ppatt_figd.gif b/doxygen/img/ppatt_figd.gif new file mode 100644 index 0000000..e6c55c0 Binary files /dev/null and b/doxygen/img/ppatt_figd.gif differ diff --git a/doxygen/img/preg_figa.gif b/doxygen/img/preg_figa.gif new file mode 100644 index 0000000..0929bf4 Binary files /dev/null and b/doxygen/img/preg_figa.gif differ diff --git a/doxygen/img/preg_figb.gif b/doxygen/img/preg_figb.gif new file mode 100644 index 0000000..33e57fc Binary files /dev/null and b/doxygen/img/preg_figb.gif differ diff --git a/doxygen/img/preg_figc.gif b/doxygen/img/preg_figc.gif new file mode 100644 index 0000000..a4f98ff Binary files /dev/null and b/doxygen/img/preg_figc.gif differ diff --git a/doxygen/img/preg_figd.gif b/doxygen/img/preg_figd.gif new file mode 100644 index 0000000..fe345fb Binary files /dev/null and b/doxygen/img/preg_figd.gif differ diff --git a/examples/CMakeTests.cmake b/examples/CMakeTests.cmake index 30f73c4..e2cd826 100644 --- a/examples/CMakeTests.cmake +++ b/examples/CMakeTests.cmake @@ -117,8 +117,8 @@ if (H5_HAVE_PARALLEL AND HDF5_TEST_PARALLEL AND NOT WIN32) add_test (NAME MPI_TEST_EXAMPLES-${parallel_example} COMMAND ${MPIEXEC_EXECUTABLE} ${MPIEXEC_NUMPROC_FLAG} ${NUMPROCS} ${MPIEXEC_PREFLAGS} $ ${MPIEXEC_POSTFLAGS}) else () add_test (NAME MPI_TEST_EXAMPLES-${parallel_example} COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${MPIEXEC_EXECUTABLE};${MPIEXEC_NUMPROC_FLAG};${NUMPROCS};${MPIEXEC_PREFLAGS};$;${MPIEXEC_POSTFLAGS}" - -D "TEST_ARGS:STRING=" + -D "TEST_PROGRAM=${MPIEXEC_EXECUTABLE}" + -D "TEST_ARGS:STRING=${MPIEXEC_NUMPROC_FLAG};${NUMPROCS};${MPIEXEC_PREFLAGS};$;${MPIEXEC_POSTFLAGS}" -D "TEST_EXPECT=0" -D "TEST_SKIP_COMPARE=TRUE" -D "TEST_OUTPUT=${parallel_example}.out" diff --git a/java/examples/datasets/CMakeLists.txt b/java/examples/datasets/CMakeLists.txt index 6ed03ca..7542e8e 100644 --- a/java/examples/datasets/CMakeLists.txt +++ b/java/examples/datasets/CMakeLists.txt @@ -80,7 +80,7 @@ endforeach () if (BUILD_TESTING AND HDF5_TEST_EXAMPLES AND HDF5_TEST_SERIAL) get_property (target_name TARGET ${HDF5_JAVA_JNI_LIB_TARGET} PROPERTY OUTPUT_NAME) - set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$:${CMAKE_DEBUG_POSTFIX}>;") + set (CMD_ARGS "-Dhdf.hdf5lib.H5.loadLibraryName=${target_name}$<$,$>:${CMAKE_DEBUG_POSTFIX}>;") set (last_test "") foreach (example ${HDF_JAVA_EXAMPLES}) diff --git a/tools/test/h5diff/CMakeTests.cmake b/tools/test/h5diff/CMakeTests.cmake index 7e437af..ccb9380 100644 --- a/tools/test/h5diff/CMakeTests.cmake +++ b/tools/test/h5diff/CMakeTests.cmake @@ -422,8 +422,8 @@ add_test ( NAME MPI_TEST_H5DIFF-${resultfile} COMMAND "${CMAKE_COMMAND}" - -D "TEST_PROGRAM=${MPIEXEC_EXECUTABLE};${MPIEXEC_NUMPROC_FLAG};${MPIEXEC_MAX_NUMPROCS};${MPIEXEC_PREFLAGS};$;${MPIEXEC_POSTFLAGS}" - -D "TEST_ARGS:STRING=${ARGN}" + -D "TEST_PROGRAM=${MPIEXEC_EXECUTABLE}" + -D "TEST_ARGS:STRING=${MPIEXEC_NUMPROC_FLAG};${MPIEXEC_MAX_NUMPROCS};${MPIEXEC_PREFLAGS};$;${MPIEXEC_POSTFLAGS};${ARGN}" -D "TEST_FOLDER=${PROJECT_BINARY_DIR}/PAR/testfiles" -D "TEST_OUTPUT=${resultfile}.out" -D "TEST_EXPECT=0" diff --git a/tools/test/h5dump/CMakeTests.cmake b/tools/test/h5dump/CMakeTests.cmake index 0ae7bbd..cdc3be8 100644 --- a/tools/test/h5dump/CMakeTests.cmake +++ b/tools/test/h5dump/CMakeTests.cmake @@ -580,7 +580,7 @@ ) add_test ( NAME H5DUMP-${resultfile}-output-cmp - COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${resultfile}.txt ${resultfile}.exp + COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${resultfile}.txt ${resultfile}.exp ) set_tests_properties (H5DUMP-${resultfile}-output-cmp PROPERTIES DEPENDS H5DUMP-${resultfile} @@ -645,7 +645,7 @@ ) add_test ( NAME H5DUMP-${resultfile}-output-cmp - COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${resultfile}.txt ${resultfile}.exp + COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${resultfile}.txt ${resultfile}.exp ) set_tests_properties (H5DUMP-${resultfile}-output-cmp PROPERTIES DEPENDS H5DUMP-${resultfile} @@ -653,7 +653,7 @@ ) add_test ( NAME H5DUMP-${resultfile}-output-cmp-ddl - COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${ddlfile}.txt ${ddlfile}.exp + COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${ddlfile}.txt ${ddlfile}.exp ) set_tests_properties (H5DUMP-${resultfile}-output-cmp-ddl PROPERTIES DEPENDS H5DUMP-${resultfile}-output-cmp @@ -699,7 +699,7 @@ ) add_test ( NAME H5DUMP-output-cmp-${resultfile} - COMMAND ${CMAKE_COMMAND} -E compare_files --ignore-eol ${resultfile}.txt ${resultfile}.exp + COMMAND ${CMAKE_COMMAND} -E compare_files ${CMAKE_IGNORE_EOL} ${resultfile}.txt ${resultfile}.exp ) set_tests_properties (H5DUMP-output-cmp-${resultfile} PROPERTIES DEPENDS H5DUMP-output-${resultfile} diff --git a/tools/test/misc/CMakeTestsClear.cmake b/tools/test/misc/CMakeTestsClear.cmake index 5e307aa..a554972 100644 --- a/tools/test/misc/CMakeTestsClear.cmake +++ b/tools/test/misc/CMakeTestsClear.cmake @@ -99,6 +99,10 @@ -D "TEST_REFERENCE=${resultfile}.ddl" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) + if (last_test) + set_tests_properties (H5CLEAR_CMP-${testname} PROPERTIES DEPENDS ${last_test}) + endif () + set (last_test "H5CLEAR_CMP-${testname}") endif () endmacro () @@ -117,6 +121,10 @@ -D "TEST_ERRREF=${resultfile}.err" -P "${HDF_RESOURCES_DIR}/runTest.cmake" ) + if (last_test) + set_tests_properties (H5CLEAR_CMP-${testname} PROPERTIES DEPENDS ${last_test}) + endif () + set (last_test "H5CLEAR_CMP-${testname}") endif () endmacro () -- cgit v0.12