summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorAllen Byrne <byrn@hdfgroup.org>2020-02-27 14:56:44 (GMT)
committerAllen Byrne <byrn@hdfgroup.org>2020-02-27 14:56:44 (GMT)
commit9a0e84fb4779f40719aecc3330bffb6282728fc5 (patch)
tree640a17d88714ec872636592e31010c6d4ec49010
parent1775d6c9bc86862f120bd39865840ed152c47b0a (diff)
parent13f5b3aee20d1d65f56dc08f088e0f218da5cf37 (diff)
downloadhdf5-9a0e84fb4779f40719aecc3330bffb6282728fc5.zip
hdf5-9a0e84fb4779f40719aecc3330bffb6282728fc5.tar.gz
hdf5-9a0e84fb4779f40719aecc3330bffb6282728fc5.tar.bz2
Merge pull request #2409 in HDFFV/hdf5 from ~BYRN/hdf5_adb:develop to develop
* commit '13f5b3aee20d1d65f56dc08f088e0f218da5cf37': Update examples and fix old version references. Move MODEL check to before usage in CTestScript.cmake.
-rwxr-xr-xbin/release6
-rw-r--r--config/cmake/HDF5_Examples.cmake.in2
-rw-r--r--config/cmake/jrunTest.cmake12
-rw-r--r--config/cmake/scripts/CTestScript.cmake8
-rw-r--r--config/cmake/scripts/HDF5config.cmake13
-rw-r--r--config/cmake/scripts/HDF5options.cmake2
-rw-r--r--java/src/hdf/overview.html2
-rw-r--r--java/src/jni/h5pFAPLImp.c56
-rw-r--r--release_docs/INSTALL10
-rw-r--r--release_docs/INSTALL_CMake.txt62
-rw-r--r--release_docs/INSTALL_Cygwin.txt178
-rw-r--r--release_docs/README_HPC156
-rw-r--r--release_docs/RELEASE.txt2
-rw-r--r--release_docs/USING_CMake_Examples.txt4
-rw-r--r--release_docs/USING_HDF5_CMake.txt6
-rw-r--r--release_docs/USING_HDF5_VS.txt4
16 files changed, 257 insertions, 266 deletions
diff --git a/bin/release b/bin/release
index a9cf3f0..1568e02 100755
--- a/bin/release
+++ b/bin/release
@@ -233,7 +233,7 @@ tar2cmakezip()
# step 3: add SZIP.tar.gz, ZLib.tar.gz and cmake files
cp /mnt/scr1/pre-release/hdf5/CMake/SZip.tar.gz $cmziptmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmziptmpsubdir
- cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.0-Source.zip $cmziptmpsubdir
+ cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.1-Source.zip $cmziptmpsubdir
cp $cmziptmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmziptmpsubdir
cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmziptmpsubdir
cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5options.cmake $cmziptmpsubdir
@@ -328,7 +328,7 @@ tar2cmaketgz()
# step 3: add SZIP.tar.gz, ZLib.tar.gz and cmake files
cp /mnt/scr1/pre-release/hdf5/CMake/SZip.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir
- cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.0-Source.tar.gz $cmgztmpsubdir
+ cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.1-Source.tar.gz $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5options.cmake $cmgztmpsubdir
@@ -411,7 +411,7 @@ tar2hpccmaketgz()
# step 3: add SZIP.tar.gz, ZLib.tar.gz and cmake files
cp /mnt/scr1/pre-release/hdf5/CMake/SZip.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir
- cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.0-Source.tar.gz $cmgztmpsubdir
+ cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.1-Source.tar.gz $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir
diff --git a/config/cmake/HDF5_Examples.cmake.in b/config/cmake/HDF5_Examples.cmake.in
index dd064d3..273f32a 100644
--- a/config/cmake/HDF5_Examples.cmake.in
+++ b/config/cmake/HDF5_Examples.cmake.in
@@ -77,7 +77,7 @@ set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DSITE:STRING=${CTEST_SITE} -DBUILDN
#TAR_SOURCE - name of tarfile
#if(NOT DEFINED TAR_SOURCE)
-# set(CTEST_USE_TAR_SOURCE "HDF5Examples-1.14.0-Source")
+# set(CTEST_USE_TAR_SOURCE "HDF5Examples-1.14.1-Source")
#endif()
###############################################################################################################
diff --git a/config/cmake/jrunTest.cmake b/config/cmake/jrunTest.cmake
index 583613e..41570eb 100644
--- a/config/cmake/jrunTest.cmake
+++ b/config/cmake/jrunTest.cmake
@@ -284,11 +284,13 @@ endif ()
# dump the output unless nodisplay option is set
if (TEST_SKIP_COMPARE AND NOT TEST_NO_DISPLAY)
- file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
- execute_process (
- COMMAND ${CMAKE_COMMAND} -E echo ${TEST_STREAM}
- RESULT_VARIABLE TEST_RESULT
- )
+ if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}")
+ file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
+ execute_process (
+ COMMAND ${CMAKE_COMMAND} -E echo ${TEST_STREAM}
+ RESULT_VARIABLE TEST_RESULT
+ )
+ endif ()
endif ()
# everything went fine...
diff --git a/config/cmake/scripts/CTestScript.cmake b/config/cmake/scripts/CTestScript.cmake
index 01335b8..e3659f6 100644
--- a/config/cmake/scripts/CTestScript.cmake
+++ b/config/cmake/scripts/CTestScript.cmake
@@ -253,6 +253,14 @@ endforeach ()
message (STATUS "Dashboard script configuration:\n${vars}\n")
#-----------------------------------------------------------------------------
+
+###################################################################
+######### Following is for submission to CDash ############
+###################################################################
+if (NOT DEFINED MODEL)
+ set (MODEL "Experimental")
+endif ()
+
#-----------------------------------------------------------------------------
## NORMAL process
## -- LOCAL_UPDATE updates the source folder from svn
diff --git a/config/cmake/scripts/HDF5config.cmake b/config/cmake/scripts/HDF5config.cmake
index 10ab5f2..7b7560f 100644
--- a/config/cmake/scripts/HDF5config.cmake
+++ b/config/cmake/scripts/HDF5config.cmake
@@ -42,9 +42,9 @@ set (CTEST_SOURCE_VERSEXT "")
##############################################################################
# handle input parameters to script.
#BUILD_GENERATOR - which CMake generator to use, required
-#INSTALLDIR - HDF5-1.13.0 root folder
+#INSTALLDIR - HDF5-1.13.x root folder
#CTEST_CONFIGURATION_TYPE - Release, Debug, RelWithDebInfo
-#CTEST_SOURCE_NAME - name of source folder; HDF5-1.13.0
+#CTEST_SOURCE_NAME - name of source folder; HDF5-1.13.x
#MODEL - CDash group name
#HPC - run alternate configurations for HPC machines; sbatch, bsub, raybsub, qsub
#MPI - enable MPI
@@ -189,15 +189,6 @@ endif ()
###################################################################
###################################################################
-######### Following is for submission to CDash ############
-###################################################################
-if (NOT DEFINED MODEL)
- set (MODEL "Experimental")
-endif ()
-
-###################################################################
-
-###################################################################
##### Following controls CDash submission #####
#set (LOCAL_SUBMIT "TRUE")
##### Following controls test process #####
diff --git a/config/cmake/scripts/HDF5options.cmake b/config/cmake/scripts/HDF5options.cmake
index d1c14e9..f132234 100644
--- a/config/cmake/scripts/HDF5options.cmake
+++ b/config/cmake/scripts/HDF5options.cmake
@@ -67,7 +67,7 @@ set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ALLOW_EXTERNAL_SUPPORT:STRIN
#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_ENCODING:BOOL=OFF")
#### package examples ####
-#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_PACK_EXAMPLES:BOOL=ON -DHDF5_EXAMPLES_COMPRESSED:STRING=HDF5Examples-1.14.0-Source.tar.gz -DHDF5_EXAMPLES_COMPRESSED_DIR:PATH=${CTEST_SCRIPT_DIRECTORY}")
+#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_PACK_EXAMPLES:BOOL=ON -DHDF5_EXAMPLES_COMPRESSED:STRING=HDF5Examples-1.14.1-Source.tar.gz -DHDF5_EXAMPLES_COMPRESSED_DIR:PATH=${CTEST_SCRIPT_DIRECTORY}")
#############################################################################################
### enable parallel builds
diff --git a/java/src/hdf/overview.html b/java/src/hdf/overview.html
index e3a032b..f6a34fc 100644
--- a/java/src/hdf/overview.html
+++ b/java/src/hdf/overview.html
@@ -7,7 +7,7 @@ The <b>Java HD5 Interface (JHI5)</b> is a Java package
(<a href="../../hdf-java-html/javadocs/hdf/hdf5lib/package-summary.html">hdf.hdf5lib</a>)
that ``wraps around'' the HDF5 library.
<p>There are a large number of functions in the HDF5
-library (version 1.10). Some of the functions are not supported in JHI5. Most
+library (version 1.13). Some of the functions are not supported in JHI5. Most
of the unsupported functions have C function pointers, which is not currently
implemented in JHI5.</p>
diff --git a/java/src/jni/h5pFAPLImp.c b/java/src/jni/h5pFAPLImp.c
index 178b1af..9ae8775 100644
--- a/java/src/jni/h5pFAPLImp.c
+++ b/java/src/jni/h5pFAPLImp.c
@@ -395,29 +395,23 @@ Java_hdf_hdf5lib_H5_H5Pget_1fapl_1hdfs
if (H5Pget_fapl_hdfs((hid_t)fapl_id, &fa) < 0)
H5_LIBRARY_ERROR(ENVONLY);
- if (HDstrlen(fa.namenode_name) > 0) {
- if (NULL == (j_namenode_name = ENVPTR->NewStringUTF(ENVONLY, fa.namenode_name))) {
- CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
- H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create namenode_name string");
- }
+ if (NULL == (j_namenode_name = ENVPTR->NewStringUTF(ENVONLY, fa.namenode_name))) {
+ CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
+ H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create namenode_name string");
}
args[0].l = j_namenode_name;
args[1].i = (jint)fa.namenode_port;
- if (HDstrlen(fa.user_name) > 0) {
- if (NULL == (j_user_name = ENVPTR->NewStringUTF(ENVONLY, fa.user_name))) {
- CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
- H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create user_name string");
- }
+ if (NULL == (j_user_name = ENVPTR->NewStringUTF(ENVONLY, fa.user_name))) {
+ CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
+ H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create user_name string");
}
args[2].l = j_user_name;
- if (HDstrlen(fa.kerberos_ticket_cache) > 0) {
- if (NULL == (j_kerb_cache_path = ENVPTR->NewStringUTF(ENVONLY, fa.kerberos_ticket_cache))) {
- CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
- H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create kerberos_ticket_cache string");
- }
+ if (NULL == (j_kerb_cache_path = ENVPTR->NewStringUTF(ENVONLY, fa.kerberos_ticket_cache))) {
+ CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
+ H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create kerberos_ticket_cache string");
}
args[3].l = j_kerb_cache_path;
@@ -820,27 +814,21 @@ Java_hdf_hdf5lib_H5_H5Pget_1fapl_1ros3
if (H5Pget_fapl_ros3((hid_t)fapl_id, &fa) < 0)
H5_LIBRARY_ERROR(ENVONLY);
- if (HDstrlen(fa.aws_region) > 0) {
- if (NULL == (j_aws = ENVPTR->NewStringUTF(ENVONLY, fa.aws_region))) {
- CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
- H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create aws_region string");
- }
+ if (NULL == (j_aws = ENVPTR->NewStringUTF(ENVONLY, fa.aws_region))) {
+ CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
+ H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create aws_region string");
}
args[0].l = j_aws;
- if (HDstrlen(fa.secret_id) > 0) {
- if (NULL == (j_id = ENVPTR->NewStringUTF(ENVONLY, fa.secret_id))) {
- CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
- H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create secret_id string");
- }
+ if (NULL == (j_id = ENVPTR->NewStringUTF(ENVONLY, fa.secret_id))) {
+ CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
+ H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create secret_id string");
}
args[1].l = j_id;
- if (HDstrlen(fa.secret_key) > 0) {
- if (NULL == (j_key = ENVPTR->NewStringUTF(ENVONLY, fa.secret_key))) {
- CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
- H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create secret_key string");
- }
+ if (NULL == (j_key = ENVPTR->NewStringUTF(ENVONLY, fa.secret_key))) {
+ CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
+ H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create secret_key string");
}
args[2].l = j_key;
@@ -1685,11 +1673,9 @@ Java_hdf_hdf5lib_H5_H5Pget_1mdc_1config
args[2].z = cacheinfo.open_trace_file;
args[3].z = cacheinfo.close_trace_file;
- if (HDstrlen(cacheinfo.trace_file_name) > 0) {
- if (NULL == (j_str = ENVPTR->NewStringUTF(ENVONLY, cacheinfo.trace_file_name))) {
- CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
- H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_mdc_config: out of memory - unable to construct string from UTF characters");
- }
+ if (NULL == (j_str = ENVPTR->NewStringUTF(ENVONLY, cacheinfo.trace_file_name))) {
+ CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
+ H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_mdc_config: out of memory - unable to construct string from UTF characters");
}
args[4].l = j_str;
diff --git a/release_docs/INSTALL b/release_docs/INSTALL
index 5c54698..fcf9602 100644
--- a/release_docs/INSTALL
+++ b/release_docs/INSTALL
@@ -414,13 +414,17 @@ CONTENTS
4.3.11. Backward compatibility
- The 1.10 version of the HDF5 library can be configured to operate
- identically to the v1.8 library with the
+ The 1.13 version of the HDF5 library can be configured to operate
+ identically to the v1.12 library with the
+ --with-default-api-version=v112
+ configure flag, or identically to the v1.10 library with the
+ --with-default-api-version=v110
+ configure flag, or identically to the v1.8 library with the
--with-default-api-version=v18
configure flag, or identically to the v1.6 library with the
--with-default-api-version=v16
configure flag. This allows existing code to be compiled with the
- v1.10 library without requiring immediate changes to the application
+ v1.13 library without requiring immediate changes to the application
source code. For additional configuration options and other details,
see "API Compatibility Macros":
diff --git a/release_docs/INSTALL_CMake.txt b/release_docs/INSTALL_CMake.txt
index 652e1f4..e1a34c5 100644
--- a/release_docs/INSTALL_CMake.txt
+++ b/release_docs/INSTALL_CMake.txt
@@ -26,11 +26,11 @@ Obtaining HDF5 source code
2. Obtain compressed (*.tar or *.zip) HDF5 source from
https://portal.hdfgroup.org/display/support/Building+HDF5+with+CMake
and put it in "myhdfstuff".
- Uncompress the file. There should be a hdf5-1.10."X" folder.
+ Uncompress the file. There should be a hdf5-1.13."X" folder.
CMake version
1. We suggest you obtain the latest CMake from the Kitware web site.
- The HDF5 1.10."X" product requires a minimum CMake version 3.10,
+ The HDF5 1.13."X" product requires a minimum CMake version 3.12,
where "X" is the current HDF5 release version. If you are using
VS2019, the minimum version is 3.15.
@@ -53,7 +53,7 @@ The following files referenced below are available at the HDF web site:
https://portal.hdfgroup.org/display/support/Building+HDF5+with+CMake
Single compressed file with all the files needed, including source:
- CMake-hdf5-1.10.X.zip or CMake-hdf5-1.10.X.tar.gz
+ CMake-hdf5-1.13.X.zip or CMake-hdf5-1.13.X.tar.gz
Individual files included in the above mentioned compressed files
-----------------------------------------------
@@ -65,7 +65,7 @@ External compression szip and zlib libraries:
ZLib.tar.gz
Examples Source package:
- HDF5Examples-1.10.x-Source.tar.gz
+ HDF5Examples-1.14.x-Source.tar.gz
Configuration files:
HDF5config.cmake
@@ -78,10 +78,10 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to:
1. Change to the development directory "myhdfstuff".
- 2. Download the CMake-hdf5-1.10.X.zip(.tar.gz) file to "myhdfstuff".
+ 2. Download the CMake-hdf5-1.13.X.zip(.tar.gz) file to "myhdfstuff".
Uncompress the file.
- 3. Change to the source directory "hdf5-1.10.x".
+ 3. Change to the source directory "hdf5-1.13.x".
CTestScript.cmake file should not be modified.
4. Edit the platform configuration file, HDF5options.cmake, if you want to change
@@ -109,7 +109,7 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to:
The command above will configure, build, test, and create an install
package in the myhdfstuff folder. It will have the format:
- HDF5-1.10.NN-<platform>.<zip or tar.gz>
+ HDF5-1.13.NN-<platform>.<zip or tar.gz>
On Unix, <platform> will be "Linux". A similar .sh file will also be created.
On Windows, <platform> will be "win64" or "win32". If you have an
@@ -130,13 +130,13 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to:
6. To install, "X" is the current release version
On Windows (with WiX installed), execute:
- HDF5-1.10."X"-win32.msi or HDF5-1.10."X"-win64.msi
+ HDF5-1.13."X"-win32.msi or HDF5-1.13."X"-win64.msi
By default this program will install the hdf5 library into the
"C:\Program Files" directory and will create the following
directory structure:
HDF_Group
--HDF5
- ----1.10."X"
+ ----1.13."X"
------bin
------include
------lib
@@ -144,40 +144,40 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to:
On Linux, change to the install destination directory
(create it if doesn't exist) and execute:
- <path-to>/myhdfstuff/HDF5-1.10."X"-Linux.sh
+ <path-to>/myhdfstuff/HDF5-1.13."X"-Linux.sh
After accepting the license, the script will prompt:
By default the HDF5 will be installed in:
- "<current directory>/HDF5-1.10."X"-Linux"
- Do you want to include the subdirectory HDF5-1.10."X"-Linux?
+ "<current directory>/HDF5-1.13."X"-Linux"
+ Do you want to include the subdirectory HDF5-1.13."X"-Linux?
Saying no will install in: "<current directory>" [Yn]:
Note that the script will create the following directory structure
relative to the install point:
HDF_Group
--HDF5
- ----1.10."X"
+ ----1.13."X"
------bin
------include
------lib
------share
- On Mac you will find HDF5-1.10."X"-Darwin.dmg in the myhdfstuff folder. Click
+ On Mac you will find HDF5-1.13."X"-Darwin.dmg in the myhdfstuff folder. Click
on the dmg file to proceed with installation. After accepting the license,
there will be a folder with the following structure:
HDF_Group
--HDF5
- ----1.10."X"
+ ----1.13."X"
------bin
------include
------lib
------share
By default the installation will create the bin, include, lib and cmake
- folders in the <install destination directory>/HDF_Group/HDF5/1.10."X"
+ folders in the <install destination directory>/HDF_Group/HDF5/1.13."X"
The <install destination directory> depends on the build platform;
Windows will set the default to:
- C:/Program Files/HDF_Group/HDF5/1.10."X"
+ C:/Program Files/HDF_Group/HDF5/1.13."X"
Linux will set the default to:
- "myhdfstuff/HDF_Group/HDF5/1.10."X"
+ "myhdfstuff/HDF_Group/HDF5/1.13."X"
The default can be changed by adding ",INSTALLDIR=<my new dir>" to the
"ctest -S HDF5config.cmake..." command. For example on linux:
ctest -S HDF5config.cmake,INSTALLDIR=/usr/local/myhdf5,BUILD_GENERATOR=Unix -C Release -VV -O hdf5.log
@@ -204,13 +204,13 @@ Notes: This short set of instructions is written for users who want to
5. Configure the C library, tools and tests with one of the following commands:
On Windows 32 bit
- cmake -G "Visual Studio 12 2013" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.10."X"
+ cmake -G "Visual Studio 12 2013" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.13."X"
On Windows 64 bit
- cmake -G "Visual Studio 12 2013 Win64" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.10."X"
+ cmake -G "Visual Studio 12 2013 Win64" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.13."X"
On Linux and Mac
- cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ../hdf5-1.10."X"
+ cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ../hdf5-1.13."X"
where "X" is the current release version.
@@ -225,13 +225,13 @@ Notes: This short set of instructions is written for users who want to
9. To install
On Windows (with WiX installed), execute:
- HDF5-1.10."X"-win32.msi or HDF5-1.10."X"-win64.msi
+ HDF5-1.13."X"-win32.msi or HDF5-1.13."X"-win64.msi
By default this program will install the hdf5 library into the
"C:\Program Files" directory and will create the following
directory structure:
HDF_Group
--HDF5
- ----1.10."X"
+ ----1.13."X"
------bin
------include
------lib
@@ -239,28 +239,28 @@ Notes: This short set of instructions is written for users who want to
On Linux, change to the install destination directory
(create if doesn't exist) and execute:
- <path-to>/myhdfstuff/build/HDF5-1.10."X"-Linux.sh
+ <path-to>/myhdfstuff/build/HDF5-1.13."X"-Linux.sh
After accepting the license, the script will prompt:
By default the HDF5 will be installed in:
- "<current directory>/HDF5-1.10."X"-Linux"
- Do you want to include the subdirectory HDF5-1.10."X"-Linux?
+ "<current directory>/HDF5-1.13."X"-Linux"
+ Do you want to include the subdirectory HDF5-1.13."X"-Linux?
Saying no will install in: "<current directory>" [Yn]:
Note that the script will create the following directory structure
relative to the install point:
HDF_Group
--HDF5
- ----1.10."X"
+ ----1.13."X"
------bin
------include
------lib
------share
- On Mac you will find HDF5-1.10."X"-Darwin.dmg in the build folder. Click
+ On Mac you will find HDF5-1.13."X"-Darwin.dmg in the build folder. Click
on the dmg file to proceed with installation. After accepting the license,
there will be a folder with the following structure:
HDF_Group
--HDF5
- ----1.10."X"
+ ----1.13."X"
------bin
------include
------lib
@@ -272,7 +272,7 @@ IV. Further considerations
========================================================================
1. We suggest you obtain the latest CMake for windows from the Kitware
- web site. The HDF5 1.10."X" product requires a minimum CMake version 3.10.
+ web site. The HDF5 1.13."X" product requires a minimum CMake version 3.12.
2. If you plan to use Zlib or Szip:
A. Download the binary packages and install them in a central location.
@@ -656,7 +656,7 @@ HDF5_STRICT_FORMAT_CHECKS "Whether to perform strict file format checks"
HDF_TEST_EXPRESS "Control testing framework (0-3)" "0"
HDF5_TEST_VFD "Execute tests with different VFDs" OFF
HDF5_TEST_PASSTHROUGH_VOL "Execute tests with different passthrough VOL connectors" OFF
-DEFAULT_API_VERSION "Enable default API (v16, v18, v110, v112)" "v112"
+DEFAULT_API_VERSION "Enable default API (v16, v18, v110, v112, v114)" "v114"
HDF5_USE_FOLDERS "Enable folder grouping of projects in IDEs." ON
HDF5_WANT_DATA_ACCURACY "IF data accuracy is guaranteed during data conversions" ON
HDF5_WANT_DCONV_EXCEPTION "exception handling functions is checked during data conversions" ON
diff --git a/release_docs/INSTALL_Cygwin.txt b/release_docs/INSTALL_Cygwin.txt
index 74f494c..eebffba 100644
--- a/release_docs/INSTALL_Cygwin.txt
+++ b/release_docs/INSTALL_Cygwin.txt
@@ -2,8 +2,8 @@
HDF5 Build and Install Instructions for Cygwin
************************************************************************
-This document is a instruction on how to build, test and install HDF5 libary on
-Cygwin. See detailed information in hdf5/INSTALL.
+This document is a instruction on how to build, test and install HDF5 libary on
+Cygwin. See detailed information in hdf5/INSTALL.
NOTE: hdf5 can be built with CMake, see the INSTALL_CMake.txt file for more guidance.
@@ -12,16 +12,16 @@ Preconditions:
1. Installed Cygwin 1.7.25 or higher
- To install the Cygwin net release, go to http://www.cygwin.com and
+ To install the Cygwin net release, go to http://www.cygwin.com and
click on "setup-x86.exe" (32-bit installation) under the heading
- "Current Cygwin DLL version". This will download a GUI
- installer called setup-x86.exe which can be run to download a complete
- Cygwin installation via the internet. Then follow the instructions
+ "Current Cygwin DLL version". This will download a GUI
+ installer called setup-x86.exe which can be run to download a complete
+ Cygwin installation via the internet. Then follow the instructions
on each screen to install Cygwin.
- Cygwin uses packages to manage installing various software. Users can
+ Cygwin uses packages to manage installing various software. Users can
choose to install or uninstall certain packages by running setup.exe.
- http://www.cygwin.com/packages/ provides detailed information about
+ http://www.cygwin.com/packages/ provides detailed information about
Cygwin packages.
Most required dependencies can be satisfied by installing all packages in
@@ -31,32 +31,32 @@ Preconditions:
2. Compilers, Libraries and Utilities Installed
2.1 Compilers Supported
-
+
The following compilers are supported by HDF5 and included in the Cygwin
package system:
gcc (4.7.3 and 4.9.2), which includes:
gcc4-core : C compiler
gcc4-g++ : C++ compiler
gcc4-fortran : fortran compiler
-
+
2.1.1 Using Compilers Not Supported
-
+
By default the current configuration uses vendor compilers; to use
another compiler run the following commands before running
- configure:
-
+ configure:
+
setenv CC "foo -flags"
setenv FC "fffoo -flags"
- For example, if users want to use pgf90 as fortran compiler, then
+ For example, if users want to use pgf90 as fortran compiler, then
setenv FC pgf90
See the configure help page (configure --help) for a list of
environment variables that have an affect on building the
library.
-
+
2.2 HDF5 External Library Dependencies
2.2.1 Zlib
@@ -66,162 +66,162 @@ Preconditions:
2.2.2 Szip
The HDF5 library has a predefined compression filter that uses
the extended-Rice lossless compression algorithm for chunked
- datatsets. For more information on Szip compression, license terms,
+ datatsets. For more information on Szip compression, license terms,
and obtaining the Szip source code, see:
https://portal.hdfgroup.org/display/HDF5/Szip+Compression+in+HDF+Products
-
-
+
+
2.3 Additional Utilities
-
+
The following standard utilities are also required to build and test HDF5:
-
+
bison : yacc implementation
flex : flex utility
make : make utility
-
+
2.4 Alternate Build Process
-
- Download the CMake package and follow the notes in the "INSTALL_CMake.txt"
+
+ Download the CMake package and follow the notes in the "INSTALL_CMake.txt"
file to build HDF5 with the CMake utilities.
-
-
-
+
+
+
Build, Test and Install HDF5 on Cygwin
--------------------------------------
1. Get HDF5 source code package
Users can download HDF5 source code package from HDF website
- (http://hdfgroup.org).
-
+ (http://hdfgroup.org).
+
2. Unpacking the distribution
The HDF5 source code is distributed in a variety of formats which
- can be unpacked with the following commands, each of which creates
- an `hdf5-1.10.x' directory.
+ can be unpacked with the following commands, each of which creates
+ an `hdf5-1.13.x' directory.
2.1 Non-compressed tar archive (*.tar)
- $ tar xf hdf5-1.10.x.tar
+ $ tar xf hdf5-1.13.x.tar
2.2 Gzip'd tar archive (*.tar.gz)
- $ gunzip < hdf5-1.10.x.tar.gz | tar xf -
+ $ gunzip < hdf5-1.13.x.tar.gz | tar xf -
2.3 Bzip'd tar archive (*.tar.bz2)
- $ bunzip2 < hdf5-1.10.x.tar.bz2 | tar xf -
+ $ bunzip2 < hdf5-1.13.x.tar.bz2 | tar xf -
2. Setup Environment
In Cygwin, most compilers and setting are automatically detected during
- the configure script. However, if you are building Fortran we recommend
- that you explicitly set the "FC" variable in your environment to use the
+ the configure script. However, if you are building Fortran we recommend
+ that you explicitly set the "FC" variable in your environment to use the
gfortran compiler. For example, issue the command:
-
+
$ export FC=gfortran
-
-4. Configuring
-
- Notes: See detailed information in hdf5/release_docs/INSTALL,
- part 5. Full installation instructions for source
+
+4. Configuring
+
+ Notes: See detailed information in hdf5/release_docs/INSTALL,
+ part 5. Full installation instructions for source
distributions
- The host configuration file for cygwin i686-pc-cygwin is located
- in the `config' directory and are based on architecture name,
- vendor name, and operating system which are displayed near the
+ The host configuration file for cygwin i686-pc-cygwin is located
+ in the `config' directory and are based on architecture name,
+ vendor name, and operating system which are displayed near the
beginning of the `configure' output. The host config file influences
- the behavior of configure by setting or augmenting shell variables.
-
+ the behavior of configure by setting or augmenting shell variables.
+
In short,
-
- To configure HDF5 C Library, using
-
+
+ To configure HDF5 C Library, using
+
$ ./configure
-
+
To configure HDF5 C/C++ Library, using
$ ./configure --enable-cxx
-
+
To configure HDF5 C/Fortran Library, using
$ ./configure --enable-fortran
-
+
To configure HDF5 C with Szip library, using
$ ./configure --with-szlib="path to szlib"
-
- For example, if szip library was installed in the directory
+
+ For example, if szip library was installed in the directory
/cygdrive/c/szip, which is parent directory of "include" and
- "lib", then the following command will configure HDF5 C library
+ "lib", then the following command will configure HDF5 C library
with szip enabled:
-
+
$ ./configure --with-szlib=/cygdrive/c/szip
-
+
To configure HDF5 C without Zlib,
-
+
To disable zlib, using
$ ./configure --without-zlib
-
+
Two ways to configure HDF5 C with specified Zlib
-
+
Using
$ ./configure --with-zlib=INCDIR,LIBDIR
For example, if the zlib library is installed in
/cygdrive/c/usr, which is the parent directory of directories
- "include" and "lib",
+ "include" and "lib",
$ ./configure --with-zlib=/cygdrive/c/usr/include,/cygdrive/c/usr/lib
Through the CPPFLAGS and LDFLAGS Variables
-
- For example, if zlib was installed in the directory
- /cygdrive/c/usr then using the following command to configure
+
+ For example, if zlib was installed in the directory
+ /cygdrive/c/usr then using the following command to configure
HDF5 with zib
$ CPPFLAGS=-I/cygdrive/c/usr/include \
$ LDFLAGS=-L/cygdrive/c/usr/lib \
$ ./configure
- To specify the installation directories, using
+ To specify the installation directories, using
$ ./configure --prefix="path for installation"
-
- By default, HDF5 library, header files, examples, and
+
+ By default, HDF5 library, header files, examples, and
support programs will be installed in /usr/local/lib,
/usr/local/include, /usr/local/doc/hdf5/examples, and
- /usr/local/bin. To use a path other than /usr/local specify
+ /usr/local/bin. To use a path other than /usr/local specify
the path with the `--prefix=PATH' switch as in the above
command.
- Combination of Switches
+ Combination of Switches
- All of the above switches can be combined together. For
- example, if users want to configure HDF5 C/C++/Fortran
- library with szip library enabled, with zlib library at
- /cygdrive/c/usr/, and install HDF5 into directory
+ All of the above switches can be combined together. For
+ example, if users want to configure HDF5 C/C++/Fortran
+ library with szip library enabled, with zlib library at
+ /cygdrive/c/usr/, and install HDF5 into directory
/cygdrive/c/hdf5 using gcc/g++ as C/C++ compiler and gfortran
as fortran compiler
-
+
$ ./configure
--with-szlib=/cygdrive/c/szip
--with-zlib=/cygdrive/c/usr/include,/cygdrive/c/usr/lib
--prefix=/cygdrive/c/hdf5
--enable-cxx
- --enable-fortran
+ --enable-fortran
<"If no more switches, then hit Enter">
Notes: The command format above is for readilibity. In practice,
please type in the command above with at least one
- space between each line, No "Enter" until users finish
- the switches and want to run the configure.
+ space between each line, No "Enter" until users finish
+ the switches and want to run the configure.
+
-
or do it through CPPFLAGS and LDFLAGS variables:
-
+
$ CPPFLAGS=-I/cygdrive/c/usr/include \
$ LDFLAGS=-L/cygdrive/c/usr/lib \
@@ -229,38 +229,38 @@ Build, Test and Install HDF5 on Cygwin
--with-szlib=/cygdrive/c/szip
--prefix=/cygdrive/c/hdf5
--enable-cxx
- --enable-fortran
+ --enable-fortran
<"If no more switches, then hit Enter">
-
+
5. Make and Make Check
After configuration is done successfully, run the following series of
commands to build, test and install HDF5
-
+
$ make > "output file name"
$ make check > "output file name"
-
+
Before run "make install", check output file for "make check", there
should be no failures at all.
6. Make Install
$ make install > "output file name"
-
-
+
+
7. Check installed HDF5 library
- After step 6, go to your installation directory, there should be
+ After step 6, go to your installation directory, there should be
three subdirectories: "bin" "include" and "lib".
-8. Known Problems
-
+8. Known Problems
+
dt_arith tests may fail due to the use of fork. This is a known issue
with cygwin on Windows.
"make check" fails when building shared lib files is enabled. The default
on Cygwin has been changed to disable shared. It can be enabled with
- the --enable-shared configure option but is likely to fail "make check"
+ the --enable-shared configure option but is likely to fail "make check"
with GCC compilers.
-----------------------------------------------------------------------
diff --git a/release_docs/README_HPC b/release_docs/README_HPC
index 67a5d6c..513064c 100644
--- a/release_docs/README_HPC
+++ b/release_docs/README_HPC
@@ -1,6 +1,6 @@
************************************************************************
* Using CMake to build and test HDF5 source on HPC machines *
-************************************************************************
+************************************************************************
Contents
@@ -16,34 +16,34 @@ Section VI: Other cross compiling options
========================================================================
I. Prerequisites
========================================================================
- 1. Create a working directory that is accessible from the compute nodes for
+ 1. Create a working directory that is accessible from the compute nodes for
running tests; the working directory should be in a scratch space or a
parallel file system space since testing will use this space. Building
- from HDF5 source in a 'home' directory typically results in test
+ from HDF5 source in a 'home' directory typically results in test
failures and should be avoided.
-
- 2. Load modules for desired compilers, module for cmake version 3.10 or greater,
+
+ 2. Load modules for desired compilers, module for cmake version 3.12 or greater,
and set any needed environment variables for compilers (i.e., CC, FC, CXX).
Unload any problematic modules (i.e., craype-hugepages2M).
========================================================================
II. Obtain HDF5 source
========================================================================
-Obtain HDF5 source code from the HDF5 repository using a git command or
+Obtain HDF5 source code from the HDF5 repository using a git command or
from a release tar file in a working directory:
- git clone https://git@bitbucket.hdfgroup.org/scm/hdffv/hdf5.git
+ git clone https://git@bitbucket.hdfgroup.org/scm/hdffv/hdf5.git
[-b branch] [source directory]
If no branch is specified, then the 'develop' version will be checked out.
-If no source directory is specified, then the source will be located in the
-'hdf5' directory. The Cmake scripts expect the source to be in a directory
+If no source directory is specified, then the source will be located in the
+'hdf5' directory. The CMake scripts expect the source to be in a directory
named hdf5-<version string>, where 'version string' uses the format '1.xx.xx'.
-For example, for the current 'develop' version, the "hdf5" directory should
-be renamed "hdf5-1.11.4", or for the first hdf5_1_10_5 pre-release version,
-it should be renamed "hdf5-1.10.5-pre1".
+For example, for the current 'develop' version, the "hdf5" directory should
+be renamed "hdf5-1.13.0", or for the first hdf5_1_12_0 pre-release version,
+it should be renamed "hdf5-1.12.0-5".
-If the version number is not known a priori, the version string
+If the version number is not known a priori, the version string
can be obtained by running bin/h5vers in the top level directory of the source clone, and
the source directory renamed 'hdf5-<version string>'.
@@ -57,23 +57,23 @@ The ctest command [1]:
ctest -S HDF5config.cmake,BUILD_GENERATOR=Unix -C Release -V -O hdf5.log
-will configure, build, test and package HDF5 from the downloaded source
+will configure, build, test and package HDF5 from the downloaded source
after the setup steps outlined below are followed.
-CMake option variables are available to allow running test programs in batch
-scripts on compute nodes and to cross-compile for compute node hardware using
+CMake option variables are available to allow running test programs in batch
+scripts on compute nodes and to cross-compile for compute node hardware using
a cross-compiling emulator. The setup steps will make default settings for
-parallel or serial only builds available to the CMake command.
+parallel or serial only builds available to the CMake command.
- 1. For the current 'develop' version the "hdf5" directory should be renamed
- "hdf5-1.11.4".
+ 1. For the current 'develop' version the "hdf5" directory should be renamed
+ "hdf5-1.13.0".
2. Three cmake script files need to be copied to the working directory, or
have symbolic links to them, created in the working directory:
-
- hdf5-1.11.4/config/cmake/scripts/HDF5config.cmake
- hdf5-1.11.4/config/cmake/scripts/CTestScript.cmake
- hdf5-1.11.4/config/cmake/scripts/HDF5options.cmake
+
+ hdf5-1.13.0/config/cmake/scripts/HDF5config.cmake
+ hdf5-1.13.0/config/cmake/scripts/CTestScript.cmake
+ hdf5-1.13.0/config/cmake/scripts/HDF5options.cmake
should be copied to the working directory.
@@ -82,16 +82,16 @@ parallel or serial only builds available to the CMake command.
CTestScript.cmake
HDF5config.cmake
HDF5options.cmake
- hdf5-1.11.4
+ hdf5-1.13.0
- Additionally, when the ctest command runs [1], it will add a build directory
+ Additionally, when the ctest command runs [1], it will add a build directory
in the working directory.
4. The following options (among others) can be added to the ctest
command [1], following '-S HDF5config.cmake,' and separated by ',':
HPC=sbatch (or 'bsub' or 'raybsub') indicates which type of batch
- files to use for running tests. If omitted, test
+ files to use for running tests. If omitted, test
will run on the local machine or login node.
KNL=true to cross-compile for KNL compute nodes on CrayXC40
@@ -104,27 +104,27 @@ parallel or serial only builds available to the CMake command.
The HPC options will add BUILD_GENERATOR=Unix for the three HPC options.
An example ctest command for a parallel build on a system using sbatch is
-
+
ctest -S HDF5config.cmake,HPC=sbatch,MPI=true -C Release -V -O hdf5.log
- Adding the option 'KNL=true' to the above list will compile for KNL nodes,
+ Adding the option 'KNL=true' to the above list will compile for KNL nodes,
for example, on 'mutrino' and other CrayXC40 machines.
- Changing -V to -VV will produce more logging information in HDF5.log.
+ Changing -V to -VV will produce more logging information in HDF5.log.
- More detailed CMake information can be found in the HDF5 source in
+ More detailed CMake information can be found in the HDF5 source in
release_docs/INSTALL_CMake.txt.
========================================================================
IV. Cross-compiling
========================================================================
-For cross-compiling on Cray, set environment variables CC=cc, FC=ftn
+For cross-compiling on Cray, set environment variables CC=cc, FC=ftn
and CXX=CC (for c++) after all compiler modules are loaded since switching
compiler modules may unset or reset these variables.
-CMake provides options for cross-compiling. To cross-compile for KNL hardware
-on mutrino and other CrayXC40 machines, add HPC=sbatch,KNL=true to the
-ctest command line. This will set the following options from the
+CMake provides options for cross-compiling. To cross-compile for KNL hardware
+on mutrino and other CrayXC40 machines, add HPC=sbatch,KNL=true to the
+ctest command line. This will set the following options from the
config/cmake/scripts/HPC/sbatch-HDF5options.cmake file:
set (COMPILENODE_HWCOMPILE_MODULE "craype-haswell")
@@ -133,74 +133,74 @@ config/cmake/scripts/HPC/sbatch-HDF5options.cmake file:
set (LOCAL_BATCH_SCRIPT_PARALLEL_NAME "knl_ctestP.sl")
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DCMAKE_TOOLCHAIN_FILE:STRING=config/toolchain/crayle.cmake")
-On the Cray XC40 the craype-haswell module is needed for configuring, and the
+On the Cray XC40 the craype-haswell module is needed for configuring, and the
craype-mic-knl module is needed for building to run on the KNL nodes. CMake
-with the above options will swap modules after configuring is complete,
+with the above options will swap modules after configuring is complete,
but before compiling programs for KNL.
The sbatch script arguments for running jobs on KNL nodes may differ on CrayXC40
-machines other than mutrino. The batch scripts knl_ctestS.sl and knl_ctestP.sl
-have the correct arguments for mutrino: "#SBATCH -p knl -C quad,cache". For
-cori, another CrayXC40, that line is replaced by "#SBATCH -C knl,quad,cache".
-For cori (and other machines), the values in LOCAL_BATCH_SCRIPT_NAME and
-LOCAL_BATCH_SCRIPT_PARALLEL_NAME in the config/cmake/scripts/HPC/sbatch-HDF5options.cmake
-file can be replaced by cori_knl_ctestS.sl and cori_knl_ctestS.sl, or the lines
-can be edited in the batch files in hdf5-1.11.4/bin/batch.
+machines other than mutrino. The batch scripts knl_ctestS.sl and knl_ctestP.sl
+have the correct arguments for mutrino: "#SBATCH -p knl -C quad,cache". For
+cori, another CrayXC40, that line is replaced by "#SBATCH -C knl,quad,cache".
+For cori (and other machines), the values in LOCAL_BATCH_SCRIPT_NAME and
+LOCAL_BATCH_SCRIPT_PARALLEL_NAME in the config/cmake/scripts/HPC/sbatch-HDF5options.cmake
+file can be replaced by cori_knl_ctestS.sl and cori_knl_ctestS.sl, or the lines
+can be edited in the batch files in hdf5-1.13.0/bin/batch.
========================================================================
V. Manual alternatives
========================================================================
-If using ctest is undesirable, one can create a build directory and run the cmake
+If using ctest is undesirable, one can create a build directory and run the cmake
configure command, for example
-"/projects/Mutrino/hpcsoft/cle6.0/common/cmake/3.10.2/bin/cmake"
--C "<working directory>/hdf5-1.11.4/config/cmake/cacheinit.cmake"
--DCMAKE_BUILD_TYPE:STRING=Release -DHDF5_BUILD_FORTRAN:BOOL=ON
--DHDF5_BUILD_JAVA:BOOL=OFF
--DCMAKE_INSTALL_PREFIX:PATH=<working directory>/HDF_Group/HDF5/1.11.4
--DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF
--DHDF5_ENABLE_PARALLEL:BOOL=ON -DHDF5_BUILD_CPP_LIB:BOOL=OFF
--DHDF5_BUILD_JAVA:BOOL=OFF -DHDF5_ENABLE_THREADSAFE:BOOL=OFF
--DHDF5_PACKAGE_EXTLIBS:BOOL=ON -DLOCAL_BATCH_TEST:BOOL=ON
--DMPIEXEC_EXECUTABLE:STRING=srun -DMPIEXEC_NUMPROC_FLAG:STRING=-n
--DMPIEXEC_MAX_NUMPROCS:STRING=6
--DCMAKE_TOOLCHAIN_FILE:STRING=config/toolchain/crayle.cmake
--DLOCAL_BATCH_SCRIPT_NAME:STRING=knl_ctestS.sl
--DLOCAL_BATCH_SCRIPT_PARALLEL_NAME:STRING=knl_ctestP.sl -DSITE:STRING=mutrino
--DBUILDNAME:STRING=par-knl_GCC493-SHARED-Linux-4.4.156-94.61.1.16335.0.PTF.1107299-default-x86_64
-"-GUnix Makefiles" "" "<working directory>/hdf5-1.11.4"
+"/projects/Mutrino/hpcsoft/cle6.0/common/cmake/3.12/bin/cmake"
+-C "<working directory>/hdf5-1.13.0/config/cmake/cacheinit.cmake"
+-DCMAKE_BUILD_TYPE:STRING=Release -DHDF5_BUILD_FORTRAN:BOOL=ON
+-DHDF5_BUILD_JAVA:BOOL=OFF
+-DCMAKE_INSTALL_PREFIX:PATH=<working directory>/HDF_Group/HDF5/1.13.0
+-DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF
+-DHDF5_ENABLE_PARALLEL:BOOL=ON -DHDF5_BUILD_CPP_LIB:BOOL=OFF
+-DHDF5_BUILD_JAVA:BOOL=OFF -DHDF5_ENABLE_THREADSAFE:BOOL=OFF
+-DHDF5_PACKAGE_EXTLIBS:BOOL=ON -DLOCAL_BATCH_TEST:BOOL=ON
+-DMPIEXEC_EXECUTABLE:STRING=srun -DMPIEXEC_NUMPROC_FLAG:STRING=-n
+-DMPIEXEC_MAX_NUMPROCS:STRING=6
+-DCMAKE_TOOLCHAIN_FILE:STRING=config/toolchain/crayle.cmake
+-DLOCAL_BATCH_SCRIPT_NAME:STRING=knl_ctestS.sl
+-DLOCAL_BATCH_SCRIPT_PARALLEL_NAME:STRING=knl_ctestP.sl -DSITE:STRING=mutrino
+-DBUILDNAME:STRING=par-knl_GCC493-SHARED-Linux-4.4.156-94.61.1.16335.0.PTF.1107299-default-x86_64
+"-GUnix Makefiles" "" "<working directory>/hdf5-1.13.0"
followed by make and batch jobs to run tests.
-To cross-compile on CrayXC40, run the configure command with the craype-haswell
+To cross-compile on CrayXC40, run the configure command with the craype-haswell
module loaded, then switch to the craype-mic-knl module for the build process.
-Tests on machines using slurm can be run with
+Tests on machines using slurm can be run with
-"sbatch -p knl -C quad,cache ctestS.sl"
+"sbatch -p knl -C quad,cache ctestS.sl"
-or
+or
-"sbatch -p knl -C quad,cache ctestP.sl"
+"sbatch -p knl -C quad,cache ctestP.sl"
for parallel builds.
-
+
Tests on machines using LSF will typically use "bsub ctestS.lsf", etc.
========================================================================
VI. Other cross compiling options
========================================================================
-Settings for two other cross-compiling options are also in the config/toolchain
+Settings for two other cross-compiling options are also in the config/toolchain
files which do not seem to be necessary with the Cray PrgEnv-* modules
-1. HDF5_USE_PREGEN. This option, along with the HDF5_USE_PREGEN_DIR CMake
- variable would allow the use of an appropriate H5Tinit.c file with type
- information generated on a compute node to be used when cross compiling
- for those compute nodes. The use of the variables in lines 110 and 111
- of HDF5options.cmake file seem to preclude needing this option with the
- available Cray modules and CMake option.
-
-2. HDF5_BATCH_H5DETECT and associated CMake variables. This option when
- properly configured will run H5detect in a batch job on a compute node
- at the beginning of the CMake build process. It was also found to be
- unnecessary with the available Cray modules and CMake options.
+1. HDF5_USE_PREGEN. This option, along with the HDF5_USE_PREGEN_DIR CMake
+ variable would allow the use of an appropriate H5Tinit.c file with type
+ information generated on a compute node to be used when cross compiling
+ for those compute nodes. The use of the variables in lines 110 and 111
+ of HDF5options.cmake file seem to preclude needing this option with the
+ available Cray modules and CMake option.
+
+2. HDF5_BATCH_H5DETECT and associated CMake variables. This option when
+ properly configured will run H5detect in a batch job on a compute node
+ at the beginning of the CMake build process. It was also found to be
+ unnecessary with the available Cray modules and CMake options.
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index 6d06bbc..ff024c3 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -20,7 +20,7 @@ The official HDF5 releases can be obtained from:
https://www.hdfgroup.org/downloads/hdf5/
-Changes from Release to Release and New Features in the HDF5-1.10.x release series
+Changes from Release to Release and New Features in the HDF5-1.13.x release series
can be found at:
https://portal.hdfgroup.org/display/HDF5/HDF5+Application+Developer%27s+Guide
diff --git a/release_docs/USING_CMake_Examples.txt b/release_docs/USING_CMake_Examples.txt
index 21e153f..bd089a6 100644
--- a/release_docs/USING_CMake_Examples.txt
+++ b/release_docs/USING_CMake_Examples.txt
@@ -21,8 +21,8 @@ I. Preconditions
========================================================================
1. We suggest you obtain the latest CMake for windows from the Kitware
- web site. The HDF5 1.10.x product requires a minimum CMake version
- of 3.10.2. If you are using VS2019, the minimum version is 3.15.
+ web site. The HDF5 1.13.x product requires a minimum CMake version
+ of 3.12. If you are using VS2019, the minimum version is 3.15.
2. You have installed the HDF5 library built with CMake, by executing
the HDF Install Utility (the *.msi file in the binary package for
diff --git a/release_docs/USING_HDF5_CMake.txt b/release_docs/USING_HDF5_CMake.txt
index 751041d..f2d7754 100644
--- a/release_docs/USING_HDF5_CMake.txt
+++ b/release_docs/USING_HDF5_CMake.txt
@@ -36,8 +36,8 @@ I. Preconditions
========================================================================
1. We suggest you obtain the latest CMake for windows from the Kitware
- web site. The HDF5 1.10.x product requires a minimum CMake version
- of 3.10.1.
+ web site. The HDF5 1.13.x product requires a minimum CMake version
+ of 3.12.
2. You have installed the HDF5 library built with CMake, by executing
the HDF Install Utility (the *.msi file in the binary package for
@@ -47,7 +47,7 @@ I. Preconditions
3. Set the environment variable HDF5_DIR to the installed location of
the config files for HDF5. On Windows:
- HDF5_DIR=C:/Program Files/HDF_Group/HDF5/1.10.x/cmake
+ HDF5_DIR=C:/Program Files/HDF_Group/HDF5/1.13.x/cmake
(Note there are no quote characters used on Windows and all platforms
use forward slashes)
diff --git a/release_docs/USING_HDF5_VS.txt b/release_docs/USING_HDF5_VS.txt
index ba22753..5ec9996 100644
--- a/release_docs/USING_HDF5_VS.txt
+++ b/release_docs/USING_HDF5_VS.txt
@@ -54,11 +54,11 @@ Using Visual Studio 2008 with HDF5 Libraries built with Visual Studio 2008
and select "x64".
2.2 Find the box "Show directories for", choose "Include files", add the
- header path (i.e. c:\Program Files\HDF_Group\HDF5\1.10.x\include)
+ header path (i.e. c:\Program Files\HDF_Group\HDF5\1.13.x\include)
to the included directories.
2.3 Find the box "Show directories for", choose "Library files", add the
- library path (i.e. c:\Program Files\HDF_Group\HDF5\1.10.x\lib)
+ library path (i.e. c:\Program Files\HDF_Group\HDF5\1.13.x\lib)
to the library directories.
2.4 If using Fortran libraries, you will also need to setup the path