summaryrefslogtreecommitdiffstats
path: root/release_docs
diff options
context:
space:
mode:
authorJohn Ravi <jjravi@ncsu.edu>2020-09-28 18:47:52 (GMT)
committerJohn Ravi <jjravi@ncsu.edu>2020-09-28 18:47:52 (GMT)
commitd674a5d6444d4c637650b690bc9c083ab23d75d6 (patch)
tree3b4ce9d5db51b811c86034944cfbbedabef06ac3 /release_docs
parent6062930447dcf43cef2d7901be6846fe512df95a (diff)
parent8ee8b7abb55bc8d901e31b3eef9ba35abb352814 (diff)
downloadhdf5-inactive/cu_dev.zip
hdf5-inactive/cu_dev.tar.gz
hdf5-inactive/cu_dev.tar.bz2
Merge branch 'develop' into cu_devinactive/cu_dev
Diffstat (limited to 'release_docs')
-rw-r--r--release_docs/INSTALL_CMake.txt131
-rw-r--r--release_docs/README_HDF5_CMake1
-rw-r--r--release_docs/RELEASE.txt325
3 files changed, 443 insertions, 14 deletions
diff --git a/release_docs/INSTALL_CMake.txt b/release_docs/INSTALL_CMake.txt
index 5442f6c..478e2eb 100644
--- a/release_docs/INSTALL_CMake.txt
+++ b/release_docs/INSTALL_CMake.txt
@@ -65,6 +65,9 @@ External compression szip and zlib libraries:
SZip.tar.gz
ZLib.tar.gz
+External compression plugin libraries:
+ hdf5_plugins.tar.gz
+
Examples Source package:
HDF5Examples-1.14.x-Source.tar.gz
@@ -306,9 +309,28 @@ IV. Further considerations
TGZPATH:STRING="some_location"
where "some_location" is the URL or full path to the compressed
file and ext is the type of compression file. Also set CMAKE_BUILD_TYPE
- to the configuration type during configuration
+ to the configuration type during configuration. See the settings in the
+ config/cmake/cacheinit.cmake file HDF uses for testing.
+
+ 3. If you plan to use compression plugins:
+ A. Use source packages from an GIT server by adding the following CMake
+ options:
+ HDF5_ALLOW_EXTERNAL_SUPPORT:STRING="GIT"
+ PLUGIN_GIT_URL:STRING="http://some_location/plugins"
+ where "some_location" is the URL to the GIT repository. Also set
+ CMAKE_BUILD_TYPE to the configuration type.
- 3. If you are building on Apple Darwin platforms, you should add the
+ B. Use source packages from a compressed file by adding the following
+ CMake options:
+ HDF5_ALLOW_EXTERNAL_SUPPORT:STRING="TGZ"
+ PLUGIN_TGZ_NAME:STRING="plugin_src.ext"
+ TGZPATH:STRING="some_location"
+ where "some_location" is the URL or full path to the compressed
+ file and ext is the type of compression file. Also set CMAKE_BUILD_TYPE
+ to the configuration type during configuration. See the settings in the
+ config/cmake/cacheinit.cmake file HDF uses for testing.
+
+ 4. If you are building on Apple Darwin platforms, you should add the
following options:
Compiler choice - use xcode by setting the ENV variables of CC and CXX
Shared fortran is not supported, build static:
@@ -318,11 +340,11 @@ IV. Further considerations
CTEST_USE_LAUNCHERS:BOOL=ON
CMAKE_BUILD_WITH_INSTALL_RPATH:BOOL=OFF
- 4. Windows developers should install NSIS or WiX to create an install image with CPack.
+ 5. Windows developers should install NSIS or WiX to create an install image with CPack.
Visual Studio Express users will not be able to package HDF5 into
an install image executable.
- 5. Developers can copy the config/cmake/cacheinit.cmake file and alter the
+ 6. Developers can copy the config/cmake/cacheinit.cmake file and alter the
the settings for the developers' environment. Then the only options needed
on the command line are those options that are different. Example using HDF
default cache file:
@@ -330,7 +352,7 @@ IV. Further considerations
-DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF \
-DCMAKE_BUILD_TYPE:STRING=Release ..
- 6. CMake uses a toolchain of utilities to compile, link libraries and
+ 7. CMake uses a toolchain of utilities to compile, link libraries and
create archives, and other tasks to drive the build. The toolchain
utilities available are determined by the languages enabled. In normal
builds, CMake automatically determines the toolchain for host builds
@@ -481,6 +503,87 @@ These five steps are described in detail below.
set (ZLIB_PACKAGE_NAME "zlib" CACHE STRING "Name of ZLIB package" FORCE)
set (LIBAEC_PACKAGE_NAME "libaec" CACHE STRING "Name of AEC SZIP package" FORCE)
set (SZIP_PACKAGE_NAME "szip" CACHE STRING "Name of SZIP package" FORCE)
+ #######################
+ # filter plugin options
+ #######################
+ set (PLUGIN_TGZ_NAME "hdf5_plugins.tar.gz" CACHE STRING "Use PLUGINS from compressed file" FORCE)
+ set (PLUGIN_PACKAGE_NAME "pl" CACHE STRING "Name of PLUGIN package" FORCE)
+ ############
+ # bitshuffle
+ ###########
+ set (BSHUF_GIT_URL "https://git@bitbucket.hdfgroup.org/scm/test/bitshuffle.git" CACHE STRING "Use BSHUF from HDF repository" FORCE)
+ set (BSHUF_GIT_BRANCH "master" CACHE STRING "" FORCE)
+ set (BSHUF_TGZ_NAME "bitshuffle.tar.gz" CACHE STRING "Use BSHUF from compressed file" FORCE)
+ set (BSHUF_PACKAGE_NAME "bshuf" CACHE STRING "Name of BSHUF package" FORCE)
+ #######
+ # blosc
+ #######
+ set (BLOSC_GIT_URL "https://github.com/Blosc/c-blosc.git" CACHE STRING "Use BLOSC from Github" FORCE)
+ set (BLOSC_GIT_BRANCH "master" CACHE STRING "" FORCE)
+ set (BLOSC_TGZ_NAME "c-blosc.tar.gz" CACHE STRING "Use BLOSC from compressed file" FORCE)
+ set (BLOSC_PACKAGE_NAME "blosc" CACHE STRING "Name of BLOSC package" FORCE)
+ set (ZLIB_GIT_URL "https://git@bitbucket.hdfgroup.org/scm/test/zlib.git" CACHE STRING "Use ZLIB from HDF repo" FORCE)
+ set (ZLIB_GIT_BRANCH "master" CACHE STRING "" FORCE)
+ set (ZLIB_TGZ_NAME "ZLib.tar.gz" CACHE STRING "Use ZLib from compressed file" FORCE)
+ set (ZLIB_PACKAGE_NAME "zlib" CACHE STRING "Name of ZLIB package" FORCE)
+ #######
+ # bzip2
+ ######
+ #
+ set (BZ2_GIT_URL "https://git@bitbucket.hdfgroup.org/scm/test/bzip2.git" CACHE STRING "Use BZ2 from HDF repository" FORCE)
+ set (BZ2_GIT_BRANCH "master" CACHE STRING "" FORCE)
+ set (BZ2_TGZ_NAME "BZ2.tar.gz" CACHE STRING "Use BZ2 from compressed file" FORCE)
+ set (BZ2_PACKAGE_NAME "bz2" CACHE STRING "Name of BZ2 package" FORCE)
+ #######
+ # fpzip
+ #######
+ set (FPZIP_GIT_URL "https://https://github.com/LLNL/fpzip" CACHE STRING "Use FPZIP from github repository" FORCE)
+ set (FPZIP_GIT_BRANCH "master" CACHE STRING "" FORCE)
+ set (FPZIP_TGZ_NAME "fpzip.tar.gz" CACHE STRING "Use FPZIP from compressed file" FORCE)
+ set (FPZIP_PACKAGE_NAME "fpzip" CACHE STRING "Name of FPZIP package" FORCE)
+ ######
+ # jpeg
+ ######
+ set (JPEG_GIT_URL "https://git@bitbucket.hdfgroup.org/scm/test/jpeg.git" CACHE STRING "Use JPEG from HDF repository" FORCE)
+ set (JPEG_GIT_BRANCH "jpeg9c" CACHE STRING "" FORCE)
+ #set (JPEG_TGZ_NAME "JPEG9c.tar.gz" CACHE STRING "Use JPEG from compressed file" FORCE)
+ set (JPEG_TGZ_NAME "JPEG.tar.gz" CACHE STRING "Use JPEG from compressed file" FORCE)
+ set (JPEG_PACKAGE_NAME "jpeg" CACHE STRING "Name of JPEG package" FORCE)
+ ######
+ # lz4
+ ######
+ set (BUILD_LZ4_LIBRARY_SOURCE ON CACHE BOOL "build the lz4 library within the plugin" FORCE)
+ set (LZ4_GIT_URL "https://git@bitbucket.hdfgroup.org/scm/test/lz4.git" CACHE STRING "Use LZ4 from HDF repository" FORCE)
+ set (LZ4_GIT_BRANCH "master" CACHE STRING "" FORCE)
+ set (LZ4_TGZ_NAME "lz4.tar.gz" CACHE STRING "Use LZ4 from compressed file" FORCE)
+ set (LZ4_PACKAGE_NAME "lz4" CACHE STRING "Name of LZ4 package" FORCE)
+ ######
+ # lzf
+ ######
+ set (LZF_GIT_URL "https://git@bitbucket.hdfgroup.org/scm/test/lzf.git" CACHE STRING "Use LZF from HDF repository" FORCE)
+ set (LZF_GIT_BRANCH "master" CACHE STRING "" FORCE)
+ set (LZF_TGZ_NAME "lzf.tar.gz" CACHE STRING "Use LZF from compressed file" FORCE)
+ set (LZF_PACKAGE_NAME "lzf" CACHE STRING "Name of LZF package" FORCE)
+ ########
+ # mafisc
+ ########
+ #set (BUILD_MAFISC_LIBRARY_SOURCE OFF CACHE BOOL "build the mafisc library within the plugin" FORCE)
+ #set (MAFISC_PACKAGE_NAME "mafisc" CACHE STRING "Name of MAFISC package" FORCE)
+ ######
+ # sz
+ ######
+ set (SZ_GIT_URL "https://github.com/disheng222/SZ" CACHE STRING "Use SZ from github repository" FORCE)
+ set (SZ_GIT_BRANCH "master" CACHE STRING "" FORCE)
+ set (SZ_TGZ_NAME "sz.tar.gz" CACHE STRING "Use SZ from compressed file" FORCE)
+ set (SZ_PACKAGE_NAME "sz" CACHE STRING "Name of SZ package" FORCE)
+ ######
+ # zfp
+ ######
+ set (ZFP_GIT_URL "https://github.com/LLNL/zfp.git" CACHE STRING "Use ZFP from Github" FORCE)
+ set (ZFP_GIT_BRANCH "master" CACHE STRING "" FORCE)
+ set (ZFP_TGZ_NAME "zfp.tar.gz" CACHE STRING "Use ZFP from compressed file" FORCE)
+ set (ZFP_PACKAGE_NAME "zfp" CACHE STRING "Name of ZFP package" FORCE)
+
2. Configure the cache settings
@@ -532,7 +635,7 @@ These five steps are described in detail below.
file in your build directory. Be sure to select either Debug or
Release and build the solution.
- 3.2.1 The external libraries (zlib and szip) can be configured
+ 3.2.1 The external libraries (zlib, szip and plugins) can be configured
to allow building the libraries by downloading from a GIT repository.
The option is 'HDF5_ALLOW_EXTERNAL_SUPPORT'; by adding the following
configuration option:
@@ -541,10 +644,11 @@ These five steps are described in detail below.
The options to control the GIT URL (config/cmake/cacheinit.cmake file) are:
ZLIB_GIT_URL:STRING="http://${git_url}/zlib"
SZIP_GIT_URL:STRING="http://${git_url}/szip"
+ PLUGIN_GIT_URL:STRING="http://${git_url}/plugin"
${git_url} should be changed to your location. Also define CMAKE_BUILD_TYPE
to be the configuration type.
- 3.2.2 Or the external libraries (zlib and szip) can be configured
+ 3.2.2 Or the external libraries (zlib, szip and plugins) can be configured
to allow building the libraries by using a compressed file.
The option is 'HDF5_ALLOW_EXTERNAL_SUPPORT' and is enabled by
adding the following configuration option:
@@ -554,7 +658,8 @@ These five steps are described in detail below.
file) are:
ZLIB_TGZ_NAME:STRING="zlib_src.ext"
SZIP_TGZ_NAME:STRING="szip_src.ext"
- LIBAEC_TGZ_NAME:STRING="liaec_src.ext"
+ LIBAEC_TGZ_NAME:STRING="libaec_src.ext"
+ PLUGIN_TGZ_NAME:STRING="plugin_src.ext"
TGZPATH:STRING="some_location"
where "some_location/xxxx_src.ext" is the URL or full path to
the compressed file and where ext is the type of the compression
@@ -624,6 +729,7 @@ The config/cmake/cacheinit.cmake file can override the following values.
---------------- General Build Options ---------------------
BUILD_SHARED_LIBS "Build Shared Libraries" ON
+BUILD_STATIC_LIBS "Build Static Libraries" ON
BUILD_STATIC_EXECS "Build Static Executables" OFF
BUILD_TESTING "Build HDF5 Unit Testing" ON
@@ -636,6 +742,7 @@ HDF5_BUILD_HL_LIB "Build HIGH Level HDF5 Library" ON
HDF5_BUILD_TOOLS "Build HDF5 Tools" ON
---------------- HDF5 Advanced Options ---------------------
+ONLY_SHARED_LIBS "Only Build Shared Libraries" OFF
ALLOW_UNSUPPORTED "Allow unsupported combinations of configure options" OFF
HDF5_EXTERNAL_LIB_PREFIX "Use prefix for custom library naming." ""
HDF5_DISABLE_COMPILER_WARNINGS "Disable compiler warnings" OFF
@@ -676,11 +783,19 @@ if (CMAKE_BUILD_TYPE MATCHES Debug)
HDF5_ENABLE_INSTRUMENT "Instrument The library" OFF
if (HDF5_TEST_VFD)
HDF5_TEST_FHEAP_VFD "Execute fheap test with different VFDs" ON
+if (HDF5_BUILD_FORTRAN)
+ HDF5_INSTALL_MOD_FORTRAN "Copy FORTRAN mod files to include directory (NO SHARED STATIC)" "XX"
+ if (BUILD_SHARED_LIBS AND BUILD_STATIC_LIBS) default HDF5_INSTALL_MOD_FORTRAN is SHARED
+ if (BUILD_SHARED_LIBS AND NOT BUILD_STATIC_LIBS) default HDF5_INSTALL_MOD_FORTRAN is SHARED
+ if (NOT BUILD_SHARED_LIBS AND BUILD_STATIC_LIBS) default HDF5_INSTALL_MOD_FORTRAN is STATIC
+ if (NOT BUILD_SHARED_LIBS AND NOT BUILD_STATIC_LIBS) default HDF5_INSTALL_MOD_FORTRAN is SHARED
---------------- External Library Options ---------------------
HDF5_ALLOW_EXTERNAL_SUPPORT "Allow External Library Building (NO GIT TGZ)" "NO"
+HDF5_ENABLE_PLUGIN_SUPPORT "Enable PLUGIN Filters" OFF
HDF5_ENABLE_SZIP_SUPPORT "Use SZip Filter" OFF
HDF5_ENABLE_Z_LIB_SUPPORT "Enable Zlib Filters" OFF
+PLUGIN_USE_EXTERNAL "Use External Library Building for PLUGINS" 0
ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" 0
SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 0
if (HDF5_ENABLE_SZIP_SUPPORT)
diff --git a/release_docs/README_HDF5_CMake b/release_docs/README_HDF5_CMake
index 0584037..cf0ab6f 100644
--- a/release_docs/README_HDF5_CMake
+++ b/release_docs/README_HDF5_CMake
@@ -9,6 +9,7 @@ This tar file contains
hdf5-1.13.0 HDF5 1.13.0 source
LIBAEC.tar.gz source for building SZIP replacement
ZLib.tar.gz source for building ZLIB
+ hdf5_plugins.tar.gz source for building compression plugins
For more information about building HDF5 with CMake, see USING_HDF5_CMake.txt in
hdf5-1.13.0/release_docs, or
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index 665a156..1c5d96d 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -48,6 +48,56 @@ New Features
Configuration:
-------------
+ - CMake option to build the HDF filter plugins project as an external project
+
+ The HDF filter plugins project is a collection of registered compression
+ filters that can be dynamically loaded when needed to access data stored
+ in a hdf5 file. This CMake-only option allows the plugins to be built and
+ distributed with the hdf5 library and tools. Like the options for szip and
+ zlib, either a tgz file or a git repository can be specified for the source.
+
+ The necessary options are (see the INSTALL_CMake.txt file):
+ HDF5_ENABLE_PLUGIN_SUPPORT
+ PLUGIN_TGZ_NAME or PLUGIN_GIT_URL
+ There are more options necessary for various filters and the plugin project
+ documents should be referenced.
+
+ (ADB - 2020/09/27, OESS-98)
+
+ - Added CMake option to format source files
+
+ HDF5_ENABLE_FORMATTERS option will enable creation of targets using the
+ pattern - HDF5_*_SRC_FORMAT - where * corresponds to the source folder
+ or tool folder. All sources can be formatted by executing the format target;
+ make format
+
+ (ADB - 2020/08/24)
+
+ - CMake option to link the generated Fortran MOD files into the include
+ directory.
+
+ The Fortran generation of MOD files by a Fortran compile can produce
+ different binary files between SHARED and STATIC compiles with different
+ compilers and/or different platforms. Note that it has been found that
+ different versions of Fortran compilers will produce incompatible MOD
+ files. Currently, CMake will locate these MOD files in subfolders of
+ the include directory and add that path to the Fortran library target
+ in the CMake config file, which can be used by the CMake find library
+ process. For other build systems using the binary from a CMake install,
+ a new CMake configuration can be used to copy the pre-chosen version
+ of the Fortran MOD files into the install include directory.
+
+ The default will depend on the configuration of
+ BUILD_STATIC_LIBS and BUILD_SHARED_LIBS:
+ YES YES Default to SHARED
+ YES NO Default to STATIC
+ NO YES Default to SHARED
+ NO NO Default to SHARED
+ The defaults can be overriden by setting the config option
+ HDF5_INSTALL_MOD_FORTRAN to one of NO, SHARED, or STATIC
+
+ (ADB - 2020/07/9, HDFFV-11116)
+
- CMake option to use AEC (open source SZip) library instead of SZip
The open source AEC library is a replacement library for SZip. In
@@ -305,9 +355,102 @@ New Features
(ADB - 2018/07/16)
+ - Add file locking configure and CMake options
+
+ HDF5 1.10.0 introduced a file locking scheme, primarily to help
+ enforce SWMR setup. Formerly, the only user-level control of the scheme
+ was via the HDF5_USE_FILE_LOCKING environment variable.
+
+ This change introduces configure-time options that control whether
+ or not file locking will be used and whether or not the library
+ ignores errors when locking has been disabled on the file system
+ (useful on some HPC Lustre installations).
+
+ In both the Autotools and CMake, the settings have the effect of changing
+ the default property list settings (see the H5Pset/get_file_locking()
+ entry, below).
+
+ The yes/no/best-effort file locking configure setting has also been
+ added to the libhdf5.settings file.
+
+ Autotools:
+
+ An --enable-file-locking=(yes|no|best-effort) option has been added.
+
+ yes: Use file locking.
+ no: Do not use file locking.
+ best-effort: Use file locking and ignore "disabled" errors.
+
+ CMake:
+
+ Two self-explanatory options have been added:
+
+ HDF5_USE_FILE_LOCKING
+ HDF5_IGNORE_DISABLED_FILE_LOCKS
+
+ Setting both of these to ON is the equivalent to the Autotools'
+ best-effort setting.
+
+ NOTE:
+ The precedence order of the various file locking control mechanisms is:
+
+ 1) HDF5_USE_FILE_LOCKING environment variable (highest)
+
+ 2) H5Pset_file_locking()
+
+ 3) configure/CMake options (which set the property list defaults)
+
+ 4) library defaults (currently best-effort)
+
+ (DER - 2020/07/30, HDFFV-11092)
+
Library:
--------
+ - Add new public function H5Ssel_iter_reset
+
+ This function resets a dataspace selection iterator back to an
+ initial state so that it may be used for iteration once more.
+ This can be useful when needing to iterate over a selection
+ multiple times without having to repeatedly create/destroy
+ a selection iterator for that dataspace selection.
+
+ (JTH - 2020/09/18)
+
+ - Remove HDFS VFD stubs
+
+ The original implementation of the HDFS VFD included non-functional
+ versions of the following public API calls when the HDFS VFD is
+ not built as a part of the HDF5 library:
+
+ * H5FD_hdfs_init()
+ * H5Pget_fapl_hdfs()
+ * H5Pset_fapl_hdfs()
+
+ They will remain present in HDF5 1.10 and HDF5 1.12 releases
+ for binary compatibility purposes but have been removed as of 1.14.0.
+
+ Note that this has nothing to do with the real HDFS VFD API calls
+ that are fully functional when the HDFS VFD is configured and built.
+
+ We simply changed:
+
+ #ifdef LIBHDFS
+ <real API call>
+ #else
+ <useless stub>
+ #endif
+
+ to:
+
+ #ifdef LIBHDFS
+ <real API call>
+ #endif
+
+ Which is how the other optional VFDs are handled.
+
+ (DER - 2020/08/27)
+
- Add Mirror VFD
Use TCP/IP sockets to perform write-only (W/O) file I/O on a remote
@@ -482,6 +625,32 @@ New Features
(DER - 2020/03/18, HDFFV-11057)
+ - Add BEST_EFFORT value to HDF5_USE_FILE_LOCKING environment variable
+
+ This change adds a BEST_EFFORT to the TRUE/FALSE, 1/0 settings that
+ were previously accepted. This option turns on file locking but
+ ignores locking errors when the library detects that file locking
+ has been disabled on a file system (useful on some HPC Lustre
+ installations).
+
+ The capitalization of BEST_EFFORT is mandatory.
+
+ See the configure option discussion for HDFFV-11092 (above) for more
+ information on the file locking feature and how it's controlled.
+
+ (DER - 2020/07/30, HDFFV-11092)
+
+
+ - Add H5Pset/get_file_locking() API calls
+
+ This change adds new API calls which can be used to set or get the
+ file locking parameters. The single API call sets both the "use file
+ locking" flag and the "ignore disabled file locking" flag.
+
+ See the configure option discussion for HDFFV-11092 (above) for more
+ information on the file locking feature and how it's controlled.
+
+ (DER - 2020/07/30, HDFFV-11092)
Parallel Library:
-----------------
@@ -512,11 +681,21 @@ New Features
(MSB, 2019/01/08, HDFFV-10443)
+ - Add wrappers for H5Pset/get_file_locking() API calls
+
+ h5pget_file_locking_f()
+ h5pset_file_locking_f()
+
+ See the configure option discussion for HDFFV-11092 (above) for more
+ information on the file locking feature and how it's controlled.
+
+ (DER - 2020/07/30, HDFFV-11092)
+
C++ Library:
------------
- Added new wrappers for H5Pset/get_create_intermediate_group()
- LinkCreatPropList::setCreateIntermediateGroup()
LinkCreatPropList::getCreateIntermediateGroup()
+ LinkCreatPropList::setCreateIntermediateGroup()
(BMR - 2019/04/22, HDFFV-10622)
@@ -525,6 +704,16 @@ New Features
(BMR - 2019/02/14, HDFFV-10532)
+ - Add wrappers for H5Pset/get_file_locking() API calls
+
+ FileAccPropList::setFileLocking()
+ FileAccPropList::getFileLocking()
+
+ See the configure option discussion for HDFFV-11092 (above) for more
+ information on the file locking feature and how it's controlled.
+
+ (DER - 2020/07/30, HDFFV-11092)
+
Java Library:
----------------
@@ -562,9 +751,34 @@ New Features
(DER - 2018/12/08, HDFFV-10252)
+ - Add wrappers for H5Pset/get_file_locking() API calls
+
+ H5Pset_file_locking()
+ H5Pget_use_file_locking()
+ H5Pget_ignore_disabled_file_locking()
+
+ Unlike the C++ and Fortran wrappers, there are separate getters for the
+ two file locking settings, each of which returns a boolean value.
+
+ See the configure option discussion for HDFFV-11092 (above) for more
+ information on the file locking feature and how it's controlled.
+
+ (DER - 2020/07/30, HDFFV-11092)
+
Tools:
------
+ - h5repack added options to control how external links are handled.
+
+ Currently h5repack preserves external links and cannot copy and merge
+ data from the external files. Two options, merge and prune, were added to
+ control how to merge data from an external link into the resulting file.
+ --merge Follow external soft link recursively and merge data.
+ --prune Do not follow external soft links and remove link.
+ --merge --prune Follow external link, merge data and remove dangling link.
+
+ (ADB - 2020/08/05, HDFFV-9984)
+
- h5repack was fixed to repack the reference attributes properly.
The code line that checks if the update of reference inside a compound
datatype is misplaced outside the code block loop that carries out the
@@ -631,6 +845,50 @@ Bug Fixes since HDF5-1.10.3 release
Library
-------
+ - Creation of dataset with optional filter
+
+ When the combination of type, space, etc doesn't work for filter
+ and the filter is optional, it was supposed to be skipped but it was
+ not skipped and the creation failed.
+
+ Allowed the creation of the dataset in such situation.
+
+ (BMR - 2020/8/13, HDFFV-10933)
+
+ - Explicitly declared dlopen to use RTLD_LOCAL
+
+ dlopen documentation states that if neither RTLD_GLOBAL nor
+ RTLD_LOCAL are specified, then the default behavior is unspecified.
+ The default on linux is usually RTLD_LOCAL while macos will default
+ to RTLD_GLOBAL.
+
+ (ADB - 2020/08/12, HDFFV-11127)
+
+ - Fixed issues CVE-2018-13870 and CVE-2018-13869
+
+ When a buffer overflow occurred because a name length was corrupted
+ and became very large, h5dump crashed on memory access violation.
+
+ A check for reading pass the end of the buffer was added to multiple
+ locations to prevent the crashes and h5dump now simply fails with an
+ error message when this error condition occurs.
+
+ (BMR - 2020/07/22, HDFFV-11120 and HDFFV-11121)
+
+ - Fixed the segmentation fault when reading attributes with multiple threads
+
+ It was reported that the reading of attributes with variable length string
+ datatype will crash with segmentation fault particularly when the number of
+ threads is high (>16 threads). The problem was due to the file pointer that
+ was set in the variable length string datatype for the attribute. That file
+ pointer was already closed when the attribute was accessed.
+
+ The problem was fixed by setting the file pointer to the current opened file pointer
+ when the attribute was accessed. Similar patch up was done before when reading
+ dataset with variable length string datatype.
+
+ (VC - 2020/07/13, HDFFV-11080)
+
- Fixed CVE-2020-10810
The tool h5clear produced a segfault during an error recovery in
@@ -912,6 +1170,21 @@ Bug Fixes since HDF5-1.10.3 release
(DER - 2019/12/09, HDFFV-10945)
+ - H5Sset_extent_none() sets the dataspace class to H5S_NO_CLASS which
+ causes asserts/errors when passed to other dataspace API calls.
+
+ H5S_NO_CLASS is an internal class value that should not have been
+ exposed via a public API call.
+
+ In debug builds of the library, this can cause asserts to trip. In
+ non-debug builds, it will produce normal library errors.
+
+ The new library behavior is for H5Sset_extent_none() to convert
+ the dataspace into one of type H5S_NULL, which is better handled
+ by the library and easier for developers to reason about.
+
+ (DER - 2020/07/27, HDFFV-11027)
+
Java Library:
----------------
@@ -989,11 +1262,51 @@ Bug Fixes since HDF5-1.10.3 release
Tools
-----
- -
+ - The tools library was updated by standardizing the error stack process.
+
+ General sequence is:
+ h5tools_setprogname(PROGRAMNAME);
+ h5tools_setstatus(EXIT_SUCCESS);
+ h5tools_init();
+ ... process the command-line (check for error-stack enable) ...
+ h5tools_error_report();
+ ... (do work) ...
+ h5diff_exit(ret);
+
+ (ADB - 2020/07/20, HDFFV-11066)
+
+ - h5diff fixed a command line parsing error.
+
+ h5diff would ignore the argument to -d (delta) if it is smaller than DBL_EPSILON.
+ The macro H5_DBL_ABS_EQUAL was removed and a direct value comparision was used.
+
+ (ADB - 2020/07/20, HDFFV-10897)
+
+ - h5diff added a command line option to ignore attributes.
+
+ h5diff would ignore all objects with a supplied path if the exclude-path argument is used.
+ Adding the exclude-attribute argument will only eclude attributes, with the supplied path,
+ from comparision.
+
+ (ADB - 2020/07/20, HDFFV-5935)
+
+ - h5diff added another level to the verbose argument to print filenames.
+
+ Added verbose level 3 that is level 2 plus the filenames. The levels are:
+ 0 : Identical to '-v' or '--verbose'
+ 1 : All level 0 information plus one-line attribute status summary
+ 2 : All level 1 information plus extended attribute status report
+ 3 : All level 2 information plus file names
+
+ (ADB - 2020/07/20, HDFFV-10005)
High-Level APIs:
------
- -
+ - The H5DSis_scale function was updated to return "not a dimension scale" (0)
+ instead of failing (-1), when CLASS or DIMENSION_SCALE attributes are
+ not written according to Dimension Scales Specification.
+
+ (EIP - 2020/08/12, HDFFV-10436)
Fortran High-Level APIs:
------
@@ -1015,9 +1328,9 @@ Bug Fixes since HDF5-1.10.3 release
-------
- Stopped java/test/junit.sh.in installing libs for testing under ${prefix}
- Lib files needed are now copied to a subdirectory in the java/test
- directory, and on Macs the loader path for libhdf5.xxxs.so is changed
- in the temporary copy of libhdf5_java.dylib.
+ Lib files needed are now copied to a subdirectory in the java/test
+ directory, and on Macs the loader path for libhdf5.xxxs.so is changed
+ in the temporary copy of libhdf5_java.dylib.
(LRK, 2020/7/2, HDFFV-11063)