From 8837ed7bc22f8ef98df805b5f0a8bfcd32208099 Mon Sep 17 00:00:00 2001 From: Glenn Song <43005495+glennsong09@users.noreply.github.com> Date: Wed, 14 Feb 2024 07:37:18 -0600 Subject: Update install texts (#4010) * Update INSTALL_CMake.txt as necessary * Update remaining docs that use HDF5_USE_GNU_DIRS * Update options in HDFMacros in HDF5Examples --- HDF5Examples/config/cmake/HDFMacros.cmake | 8 +- config/cmake/HDF5ExampleCache.cmake | 2 +- config/cmake/HDF5PluginCache.cmake | 2 +- config/cmake/HDFMacros.cmake | 8 +- release_docs/INSTALL | 564 ------------------------------ release_docs/INSTALL_Auto.txt | 556 +++++++++++++++++++++++++++++ release_docs/INSTALL_CMake.txt | 22 +- 7 files changed, 578 insertions(+), 584 deletions(-) delete mode 100644 release_docs/INSTALL create mode 100644 release_docs/INSTALL_Auto.txt diff --git a/HDF5Examples/config/cmake/HDFMacros.cmake b/HDF5Examples/config/cmake/HDFMacros.cmake index 66a25aa..59efbfb 100644 --- a/HDF5Examples/config/cmake/HDFMacros.cmake +++ b/HDF5Examples/config/cmake/HDFMacros.cmake @@ -90,7 +90,7 @@ macro (HDFTEST_COPY_FILE src dest target) endmacro () macro (HDF_DIR_PATHS package_prefix) - option (H5EX_USE_GNU_DIRS "TRUE to use GNU Coding Standard install directory variables, FALSE to use historical settings" FALSE) + option (H5EX_USE_GNU_DIRS "ON to use GNU Coding Standard install directory variables, OFF to use historical settings" OFF) if (H5EX_USE_GNU_DIRS) include(GNUInstallDirs) if (NOT ${package_prefix}_INSTALL_BIN_DIR) @@ -121,7 +121,7 @@ macro (HDF_DIR_PATHS package_prefix) endif () if (APPLE) - option (${package_prefix}_BUILD_FRAMEWORKS "TRUE to build as frameworks libraries, FALSE to build according to BUILD_SHARED_LIBS" FALSE) + option (${package_prefix}_BUILD_FRAMEWORKS "ON to build as frameworks libraries, OFF to build according to BUILD_SHARED_LIBS" OFF) endif () if (NOT ${package_prefix}_INSTALL_BIN_DIR) @@ -170,10 +170,10 @@ macro (HDF_DIR_PATHS package_prefix) message(STATUS "Final: ${${package_prefix}_INSTALL_DOC_DIR}") # Always use full RPATH, i.e. don't skip the full RPATH for the build tree - set (CMAKE_SKIP_BUILD_RPATH FALSE) + set (CMAKE_SKIP_BUILD_RPATH OFF) # when building, don't use the install RPATH already # (but later on when installing) - set (CMAKE_INSTALL_RPATH_USE_LINK_PATH FALSE) + set (CMAKE_INSTALL_RPATH_USE_LINK_PATH OFF) # add the automatically determined parts of the RPATH # which point to directories outside the build tree to the install RPATH set (CMAKE_BUILD_WITH_INSTALL_RPATH ON) diff --git a/config/cmake/HDF5ExampleCache.cmake b/config/cmake/HDF5ExampleCache.cmake index 6ac9cc0..7c3bf13 100644 --- a/config/cmake/HDF5ExampleCache.cmake +++ b/config/cmake/HDF5ExampleCache.cmake @@ -14,7 +14,7 @@ set (HDF_BUILD_CPP_LIB ${HDF5_BUILD_CPP_LIB} CACHE BOOL "Build HDF5 C++ Library" set (HDF_BUILD_HL_LIB ${HDF5_BUILD_HL_LIB} CACHE BOOL "Build HIGH Level examples" FORCE) set (HDF_ENABLE_THREADSAFE ${HDF5_ENABLE_THREADSAFE} CACHE BOOL "Enable examples thread-safety" FORCE) set (HDF_ENABLE_PARALLEL ${HDF5_ENABLE_PARALLEL} CACHE BOOL "Enable examples parallel build (requires MPI)" FORCE) -set (H5EX_USE_GNU_DIRS ${HDF5_USE_GNU_DIRS} CACHE BOOL "TRUE to use GNU Coding Standard install directory variables, FALSE to use historical settings" FORCE) +set (H5EX_USE_GNU_DIRS ${HDF5_USE_GNU_DIRS} CACHE BOOL "ON to use GNU Coding Standard install directory variables, OFF to use historical settings" FORCE) #preset HDF5 cache vars to this projects libraries instead of searching set (H5EX_HDF5_HEADER "H5pubconf.h" CACHE STRING "Name of HDF5 header" FORCE) diff --git a/config/cmake/HDF5PluginCache.cmake b/config/cmake/HDF5PluginCache.cmake index d299d9f..7cdaf02 100644 --- a/config/cmake/HDF5PluginCache.cmake +++ b/config/cmake/HDF5PluginCache.cmake @@ -39,4 +39,4 @@ set (H5PL_TGZ_NAME "${PLUGIN_TGZ_NAME}" CACHE STRING "Use plugins from compresse set (PL_PACKAGE_NAME "${PLUGIN_PACKAGE_NAME}" CACHE STRING "Name of plugins package" FORCE) set (H5PL_CPACK_ENABLE OFF CACHE BOOL "Enable CPack include and components" FORCE) -set (H5PL_USE_GNU_DIRS ${HDF5_USE_GNU_DIRS} CACHE BOOL "TRUE to use GNU Coding Standard install directory variables" FORCE) +set (H5PL_USE_GNU_DIRS ${HDF5_USE_GNU_DIRS} CACHE BOOL "ON to use GNU Coding Standard install directory variables, OFF to use historical settings" FORCE) diff --git a/config/cmake/HDFMacros.cmake b/config/cmake/HDFMacros.cmake index 30c16e6..64e77c0 100644 --- a/config/cmake/HDFMacros.cmake +++ b/config/cmake/HDFMacros.cmake @@ -369,7 +369,7 @@ macro (HDFTEST_COPY_FILE src dest target) endmacro () macro (HDF_DIR_PATHS package_prefix) - option (HDF5_USE_GNU_DIRS "TRUE to use GNU Coding Standard install directory variables, FALSE to use historical settings" FALSE) + option (HDF5_USE_GNU_DIRS "ON to use GNU Coding Standard install directory variables, OFF to use historical settings" OFF) if (HDF5_USE_GNU_DIRS) include(GNUInstallDirs) if (NOT ${package_prefix}_INSTALL_BIN_DIR) @@ -400,7 +400,7 @@ macro (HDF_DIR_PATHS package_prefix) endif () if (APPLE) - option (${package_prefix}_BUILD_FRAMEWORKS "TRUE to build as frameworks libraries, FALSE to build according to BUILD_SHARED_LIBS" FALSE) + option (${package_prefix}_BUILD_FRAMEWORKS "ON to build as frameworks libraries, OFF to build according to BUILD_SHARED_LIBS" OFF) endif () if (NOT ${package_prefix}_INSTALL_BIN_DIR) @@ -449,10 +449,10 @@ macro (HDF_DIR_PATHS package_prefix) message(STATUS "Final: ${${package_prefix}_INSTALL_DOC_DIR}") # Always use full RPATH, i.e. don't skip the full RPATH for the build tree - set (CMAKE_SKIP_BUILD_RPATH FALSE) + set (CMAKE_SKIP_BUILD_RPATH OFF) # when building, don't use the install RPATH already # (but later on when installing) - set (CMAKE_INSTALL_RPATH_USE_LINK_PATH FALSE) + set (CMAKE_INSTALL_RPATH_USE_LINK_PATH OFF) # add the automatically determined parts of the RPATH # which point to directories outside the build tree to the install RPATH set (CMAKE_BUILD_WITH_INSTALL_RPATH ON) diff --git a/release_docs/INSTALL b/release_docs/INSTALL deleted file mode 100644 index dd70d0c..0000000 --- a/release_docs/INSTALL +++ /dev/null @@ -1,564 +0,0 @@ - -Instructions for the Installation of HDF5 Software -================================================== - -This file provides instructions for installing the HDF5 software. - -For help with installing, questions can be posted to the HDF Forum or sent to the HDF Helpdesk: - - HDF Forum: https://forum.hdfgroup.org/ - HDF Helpdesk: https://hdfgroup.atlassian.net/servicedesk/customer/portals - -CONTENTS --------- - 1. Obtaining HDF5 - - 2. Quick installation - 2.1. Windows - 2.2. RedStorm (Cray XT3) - - 3. HDF5 dependencies - 3.1. Zlib - 3.2 Szip (optional) - 3.3. MPI and MPI-IO - - 4. Full installation instructions for source distributions - 4.1. Unpacking the distribution - 4.1.1. Non-compressed tar archive (*.tar) - 4.1.2. Compressed tar archive (*.tar.Z) - 4.1.3. Gzip'd tar archive (*.tar.gz) - 4.1.4. Bzip'd tar archive (*.tar.bz2) - 4.2. Source versus build directories - 4.3. Configuring - 4.3.1. Specifying the installation directories - 4.3.2. Using an alternate C compiler - 4.3.3. Additional compilation flags - 4.3.4. Compiling HDF5 wrapper libraries - 4.3.5. Specifying other programs - 4.3.6. Specifying other libraries and headers - 4.3.7. Static versus shared linking - 4.3.8. Optimization versus symbolic debugging - 4.3.9. Parallel versus serial library - 4.3.10. Threadsafe capability - 4.3.11. Backward compatibility - 4.4. Building - 4.5. Testing - 4.6. Installing HDF5 - - 5. Using the Library - - -***************************************************************************** - -1. Obtaining HDF5 - The latest supported public releases of HDF5 are available on - https://github.com/HDFGroup/hdf5. - - -2. Quick installation - For those who don't like to read ;-) the following steps can be used - to configure, build, test, and install the HDF5 library, header files, - and support programs. For example, to install HDF5 version X.Y.Z at - location /usr/local/hdf5, use the following steps. - - $ cd hdf5-X.Y.Z - $ ./configure --prefix=/usr/local/hdf5 - $ make - $ make check # run test suite. - $ make install - $ make check-install # verify installation. - - - above refers to the configure flags appropriate - to your installation. For example, to install HDF5 with the - Fortran and C++ interfaces and with SZIP compression, the - configure line might read as follows: - - $ ./configure --prefix=/usr/local/hdf5 --enable-fortran \ - --enable-cxx --with-szlib=PATH_TO_SZIP - - In this case, PATH_TO_SZIP would be replaced with the path to the - installed location of the SZIP library. - -2.1. Windows - Users of Microsoft Windows should see the INSTALL_Windows files for - detailed instructions. - -2.2. RedStorm (Cray XT3) - Users of the Red Storm machine, after reading this file, should read - the Red Storm section in the INSTALL_parallel file for specific - instructions for the Red Storm machine. The same instructions would - probably work for other Cray XT3 systems, but they have not been - verified. - - -3. HDF5 dependencies -3.1. Zlib - The HDF5 library includes a predefined compression filter that - uses the "deflate" method for chunked datasets. If zlib-1.1.2 or - later is found, HDF5 will use it. Otherwise, HDF5's predefined - compression method will degenerate to a no-op; the compression - filter will succeed but the data will not be compressed. - -3.2. Szip (optional) - The HDF5 library includes a predefined compression filter that - uses the extended-Rice lossless compression algorithm for chunked - datasets. For information on Szip compression, license terms, - and obtaining the Szip source code, see: - - https://portal.hdfgroup.org/display/HDF5/Szip+Compression+in+HDF+Products - - Building instructions are available with the Szip source code. - - The HDF Group does not distribute separate Szip precompiled libraries, - but the HDF5 pre-built binaries provided on The HDF Group download page - include the Szip library with the encoder enabled. These can be found - here: - - https://www.hdfgroup.org/downloads/hdf5/ - - To configure the HDF5 library with the Szip compression filter, use - the '--with-szlib=/PATH_TO_SZIP' flag. For more information, see - section 4.3.7, "Specifying other libraries and headers." - - Please notice that if HDF5 configure cannot find a valid Szip library, - configure will not fail; in this case, the compression filter will - not be available to the applications. - - To check if Szip compression was successfully configured in, - check the "I/O filters (external):" line in the configure output, - summary section, printed to the standard output. - -3.3. MPI and MPI-IO - The parallel version of the library is built upon the foundation - provided by MPI and MPI-IO. If these libraries are not available - when HDF5 is configured, only a serial version of HDF5 can be built. - - -4. Full installation instructions for source distributions - -4.1. Unpacking the distribution - The HDF5 source code is distributed in a variety of formats which - can be unpacked with the following commands, each of which creates an - 'hdf5-X.Y.Z' directory, where X.Y.Z is the HDF5 version numbers. - -4.1.1. Non-compressed tar archive (*.tar) - - $ tar xf hdf5-X.Y.Z.tar - -4.1.2. Compressed tar archive (*.tar.Z) - - $ uncompress -c < hdf5-X.Y.Z.tar.Z | tar xf - - Or - $ tar Zxf hdf5-X.Y.Z.tar.Z - -4.1.3. Gzip'd tar archive (*.tar.gz) - - $ gunzip < hdf5-X.Y.Z.tar.gz | tar xf - - Or - $ tar zxf hdf5-X.Y.Z.tar.gz - -4.1.4. Bzip'd tar archive (*.tar.bz2) - - $ bunzip2 < hdf5-X.Y.Z.tar.bz2 | tar xf - - Or - $ tar jxf hdf5-X.Y.Z.tar.bz2 - -4.2. Source versus build directories - On most systems the build can occur in a directory other than the - source directory, allowing multiple concurrent builds and/or - read-only source code. In order to accomplish this, one should - create a build directory, cd into that directory, and run the - `configure' script found in the source directory (configure - details are below). For example, - $ mkdir built-fortran - $ cd build-fortran - $ ../hdf5-X.Y.Z/configure --enable-fortran ... - - -4.3. Configuring - HDF5 uses the GNU autoconf system for configuration, which - detects various features of the host system and creates the - Makefiles. On most systems it should be sufficient to say: - - $ ./configure - Or - $ sh configure - - The configuration process can be controlled through environment - variables, command-line switches, and host configuration files. - For a complete list of switches type: - - $ ./configure --help - - The host configuration files are located in the `config' - directory and are based on architecture name, vendor name, and/or - operating system which are displayed near the beginning of the - `configure' output. The host config file influences the behavior - of configure by setting or augmenting shell variables. - -4.3.1. Specifying the installation directories - The default installation location is the HDF5 directory created in - the build directory. Typing `make install' will install the HDF5 - library, header files, examples, and support programs in hdf5/lib, - hdf5/include, hdf5/doc/hdf5/examples, and hdf5/bin. To use a path - other than hdf5, specify the path with the `--prefix=PATH' switch: - - $ ./configure --prefix=/usr/local - - If shared libraries are being built (the default), the final - home of the shared library must be specified with this switch - before the library and executables are built. - - HDF5 can be installed into a different location than the prefix - specified at configure time; see section 4.6, "Installing HDF5," - for more details. - -4.3.2. Using an alternate C compiler - By default, configure will look for the C compiler by trying - `gcc' and `cc'. However, if the environment variable "CC" is set - then its value is used as the C compiler. For instance, one would - use the following line to specify the native C compiler on a system - that also has the GNU gcc compiler (users of csh and derivatives - will need to prefix the commands below with `env'): - - $ CC=cc ./configure - - A parallel version of HDF5 can be built by specifying `mpicc' - as the C compiler. (The `--enable-parallel' flag documented - below is optional in this case.) Using the `mpicc' compiler - will insure that the correct MPI and MPI-IO header files and - libraries are used. - - $ CC=/usr/local/mpi/bin/mpicc ./configure - - -4.3.3. Additional compilation flags - If additional flags must be passed to the compilation commands, - specify those flags with the CFLAGS variable. For instance, - to enable symbolic debugging of a production version of HDF5, one - might say: - - $ CFLAGS=-g ./configure --enable-build-mode=production - -4.3.4. Compiling HDF5 wrapper libraries - One can optionally build the Fortran, C++, and Java interfaces to - the HDF5 C library. By default, these options are disabled. To build - them, specify '--enable-fortran', '--enable-cxx', or '--enable-java', - respectively. - - $ ./configure --enable-fortran - $ ./configure --enable-cxx - $ ./configure --enable-java - - Configuration will halt if a working Fortran 90 or 95 compiler or - C++ compiler is not found. Currently, the Fortran configure tests - for these compilers in order: f90, pgf90, f95. To use an - alternate compiler specify it with the FC variable: - - $ FC=/usr/local/bin/g95 ./configure --enable-fortran - - -4.3.5. Specifying other programs - The build system has been tuned for use with GNU make but also - works with other versions of make. If the `make' command runs a - non-GNU version but a GNU version is available under a different - name (perhaps `gmake'), then HDF5 can be configured to use it by - setting the MAKE variable. Note that whatever value is used for - MAKE must also be used as the make command when building the - library: - - $ MAKE=gmake ./configure - $ gmake - - The `AR' and `RANLIB' variables can also be set to the names of - the `ar' and `ranlib' (or `:') commands to override values - detected by configure. - - The HDF5 library, include files, and utilities are installed - during `make install' (described below) with a BSD-compatible - install program detected automatically by configure. If none is - found, the shell script bin/install-sh is used. Configure does not - check that the install script actually works; if a bad install is - detected on your system (e.g., on the ASCI blue machine as of - March 2, 1999) you have two choices: - - 1. Copy the bin/install-sh program to your $HOME/bin - directory, name it `install', and make sure that $HOME/bin - is searched before the system bin directories. - - 2. Specify the full path name of the `install-sh' program - as the value of the INSTALL environment variable. Note: do - not use `cp' or some other program in place of install - because the HDF5 makefiles also use the install program to - change file ownership and/or access permissions. - -4.3.6. Specifying other libraries and headers - Configure searches the standard places (those places known by the - systems compiler) for include files and header files. However, - additional directories can be specified by using the CPPFLAGS - and/or LDFLAGS variables: - - $ CPPFLAGS=-I/home/robb/include \ - LDFLAGS=-L/home/robb/lib \ - ./configure - - HDF5 uses the zlib library to support the HDF5 deflate - data compression filter. Configure searches the standard places - (plus those specified above with the CPPFLAGS and LDFLAGS variables) - for the zlib headers and library. The search can be disabled by - specifying `--without-zlib' or alternate directories can be specified - with `--with-zlib=INCDIR,LIBDIR' or through the CPPFLAGS and LDFLAGS - variables: - - $ ./configure --with-zlib=/usr/unsup/include,/usr/unsup/lib - - $ CPPFLAGS=-I/usr/unsup/include \ - LDFLAGS=-L/usr/unsup/lib \ - ./configure - - HDF5 includes Szip as a predefined compression method (see 3.2). - To enable Szip compression, the HDF5 library must be configured - and built using the Szip library: - - $ ./configure --with-szlib=/Szip_Install_Directory - -4.3.7. Static versus shared linking - The build process will create static libraries on all systems and - shared libraries on systems that support dynamic linking to a - sufficient degree. Either form of the library may be suppressed by - saying `--disable-static' or `--disable-shared'. - - $ ./configure --disable-shared - - Shared C++ and Fortran libraries will be built if shared libraries - are enabled. - - To build only statically linked executables on platforms which - support shared libraries, use the `--enable-static-exec' flag. - - $ ./configure --enable-static-exec - -4.3.8. Optimization versus symbolic debugging - The library can be compiled to provide symbolic debugging support - so it can be debugged with gdb, dbx, ddd, etc., or it can be - compiled with various optimizations. To compile for symbolic - debugging (the default for snapshots), say - `--enable-build-mode=debug'; to compile with optimizations - (the default for supported public releases), - say `--enable-build-mode=production'. For a 'clean slate' configuration - with optimization disabled and nothing turned on, - say `--enable-build-mode=clean'. On some systems the - library can also be compiled for profiling with gprof by saying - `--enable-profiling'. - - $ ./configure --enable-build-mode=debug #symbolic debugging - $ ./configure --enable-build-mode=production #optimized code - $ ./configure --enable-build-mode=clean #'clean slate' - $ ./configure --enable-profiling #for use with gprof - - Regardless of whether support for symbolic debugging is enabled, - the library can also perform runtime debugging of certain packages - (such as type conversion execution times and extensive invariant - condition checking). To enable this debugging, supply a - comma-separated list of package names to the `--enable-internal-debug' - switch. - - Debugging can be disabled by saying `--disable-internal-debug'. - The default debugging level for snapshots is a subset of the - available packages; the default for supported releases is no - debugging (debugging can incur a significant runtime penalty). - - $ ./configure --enable-internal-debug=s,t #debug only H5S and H5T - $ ./configure --enable-internal-debug #debug normal packages - $ ./configure --enable-internal-debug=all #debug all packages - $ ./configure --disable-internal-debug #no debugging - - HDF5 can also print a trace of all API function calls, their - arguments, and the return values. To enable or disable the - ability to trace the API say `--enable-trace' (the default for - snapthots) or `--disable-trace' (the default for public releases). - The tracing must also be enabled at runtime to see any output. - -4.3.9. Parallel versus serial library - The HDF5 library can be configured to use MPI and MPI-IO for - parallelism on a distributed multi-processor system. Read the - file INSTALL_parallel for detailed information. - - The threadsafe, C++ and Java interfaces are not compatible - with the parallel option. - Unless --enable-unsupported has been specified on the configure line, - the following options must be disabled: - --enable-threadsafe, --enable-cxx, --enable-java - - -4.3.10. Threadsafe capability - The HDF5 library can be configured to be thread-safe (on a very - large scale) with the `--enable-threadsafe' flag to the configure - script. Some platforms may also require the '-with-pthread=INC,LIB' - (or '--with-pthread=DIR') flag to the configure script. - For further information, see: - - https://portal.hdfgroup.org/display/knowledge/Questions+about+thread-safety+and+concurrent+access - - The high-level, C++, Fortran and Java interfaces are not compatible - with the thread-safety option because the lock is not hoisted - into the higher-level API calls. - Unless --enable-unsupported has been specified on the configure line, - the following options must be disabled: - --enable-hl, --enable-cxx, --enable-fortran, --enable-java - - -4.3.11. Backward compatibility - The 1.14 version of the HDF5 library can be configured to operate - identically to the v1.12 library with the - --with-default-api-version=v112 - configure flag, or identically to the v1.10 library with the - --with-default-api-version=v110 - configure flag, or identically to the v1.8 library with the - --with-default-api-version=v18 - configure flag, or identically to the v1.6 library with the - --with-default-api-version=v16 - configure flag. This allows existing code to be compiled with the - v1.14 library without requiring immediate changes to the application - source code. For additional configuration options and other details, - see "API Compatibility Macros": - - https://portal.hdfgroup.org/display/HDF5/API+Compatibility+Macros - -4.4. Building - The library, confidence tests, and programs can be built by - specifying: - - $ make - - Note that if you have supplied some other make command via the MAKE - variable during the configuration step, that same command must be - used here. - - When using GNU make, you can add `-j -l6' to the make command to - compile in parallel on SMP machines. Do not give a number after - the `-j' since GNU make will turn it off for recursive invocations - of make. - - $ make -j -l6 - -4.4. Building doxygen - One can optionally build the doxygen files for the HDF5 C library. - By default, this option is disabled. To build the html files, specify - '--enable-doxygen'. - - $ ./configure --enable-doxygen - - Configuration will halt if the required applications are not available. - To build: - - $ make doxygen - - -4.5. Testing - HDF5 comes with various test suites, all of which can be run by - specifying: - - $ make check - - To run only the tests for the library, change to the `test' - directory before issuing the command. Similarly, tests for the - parallel aspects of the library are in `testpar' and tests for - the support programs are in `tools'. - - The `check' consists of two sub-tests, check-s and check-p, which - are for serial and parallel tests, respectively. Since serial tests - and parallel tests must be run with single and multiple processes - respectively, the two sub-tests work nicely for batch systems in - which the number of processes is fixed per batch job. One may submit - one batch job, requesting 1 process, to run all the serial tests by - "make check-s"; and submit another batch job, requesting multiple - processes, to run all the parallel tests by "make check-p". - - Temporary files will be deleted by each test when it completes, - but may continue to exist in an incomplete state if the test - fails. To prevent deletion of the files, define the HDF5_NOCLEANUP - environment variable. - - The HDF5 tests can take a long time to run on some systems. To perform - a faster (but less thorough) test, set the HDF5TestExpress environment - variable to 2 or 3 (with 3 being the shortest run). To perform a - longer test, set HDF5TestExpress to 0. 3 is the default. - -4.6. Installing HDF5 - The HDF5 library, include files, and support programs can be - installed by specifying `make install'. The files are installed under the - directory specified with `--prefix=DIR' (or if not specified, in 'hdf5' - in the top directory of the HDF5 source code). They will be - placed in directories named `lib', `include', and `bin'. The directories, - if not existing, will be created automatically, provided the mkdir command - supports the -p option. - - If `make install' fails because the install command at your site - somehow fails, you may use the install-sh that comes with the - source. You will need to run ./configure again. - - $ INSTALL="$PWD/bin/install-sh -c" ./configure ... - $ make install - - If you want to install HDF5 in a location other than the location - specified by the `--prefix=DIR' flag during configuration (or - instead of the default location, `hdf5'), you can do that - by running the deploy script: - - $ bin/deploy NEW_DIR - - This will install HDF5 in NEW_DIR. Alternately, you can do this - manually by issuing the command: - - $ make install prefix=NEW_DIR - - where NEW_DIR is the new directory where you wish to install HDF5. - If you do not use the deploy script, you should run h5redeploy in - NEW_DIR/bin directory. This utility will fix the h5cc, h5fc and - h5c++ scripts to reflect the new NEW_DIR location. - - The library can be used without installing it by pointing the - compiler at the `src' and 'src/.libs' directory for include files and - libraries. However, the minimum which must be installed to make - the library publicly available is: - - The library: - ./src/.libs/libhdf5.a - - The public header files: - ./src/H5*public.h, ./src/H5public.h - ./src/H5FD*.h except ./src/H5FDprivate.h, - ./src/H5api_adpt.h - - The main header file: - ./src/hdf5.h - - The configuration information: - ./src/H5pubconf.h - - The support programs that are useful are: - ./tools/h5ls/h5ls (list file contents) - ./tools/h5dump/h5dump (dump file contents) - ./tools/misc/h5repart (repartition file families) - ./tools/misc/h5debug (low-level file debugging) - ./tools/h5import/h5import (imports data to HDF5 file) - ./tools/h5diff/h5diff (compares two HDF5 files) - ./tools/gifconv/h52gif (HDF5 to GIF converter) - ./tools/gifconv/gif2h5 (GIF to HDF5 converter) - - -5. Using the Library - - For information on using HDF5 see the documentation, tutorials and examples - found here: - - https://portal.hdfgroup.org/display/HDF5/HDF5 - - A summary of the features included in the built HDF5 installation can be found - in the libhdf5.settings file in the same directory as the static and/or - shared HDF5 libraries. - - - diff --git a/release_docs/INSTALL_Auto.txt b/release_docs/INSTALL_Auto.txt new file mode 100644 index 0000000..969fbac --- /dev/null +++ b/release_docs/INSTALL_Auto.txt @@ -0,0 +1,556 @@ + +Instructions for the Installation of HDF5 Software +================================================== + +This file provides instructions for installing the HDF5 software. + +For help with installing, questions can be posted to the HDF Forum or sent to the HDF Helpdesk: + + HDF Forum: https://forum.hdfgroup.org/ + HDF Helpdesk: https://hdfgroup.atlassian.net/servicedesk/customer/portals + +CONTENTS +-------- + 1. Obtaining HDF5 + + 2. Quick installation + 2.1. Windows + + 3. HDF5 dependencies + 3.1. Zlib + 3.2 Szip (optional) + 3.3. MPI and MPI-IO + + 4. Full installation instructions for source distributions + 4.1. Unpacking the distribution + 4.1.1. Non-compressed tar archive (*.tar) + 4.1.2. Compressed tar archive (*.tar.Z) + 4.1.3. Gzip'd tar archive (*.tar.gz) + 4.1.4. Bzip'd tar archive (*.tar.bz2) + 4.2. Source versus build directories + 4.3. Configuring + 4.3.1. Specifying the installation directories + 4.3.2. Using an alternate C compiler + 4.3.3. Additional compilation flags + 4.3.4. Compiling HDF5 wrapper libraries + 4.3.5. Specifying other programs + 4.3.6. Specifying other libraries and headers + 4.3.7. Static versus shared linking + 4.3.8. Optimization versus symbolic debugging + 4.3.9. Parallel versus serial library + 4.3.10. Threadsafe capability + 4.3.11. Backward compatibility + 4.4. Building + 4.5. Testing + 4.6. Installing HDF5 + + 5. Using the Library + + +***************************************************************************** + +1. Obtaining HDF5 + The latest supported public releases of HDF5 are available on + https://github.com/HDFGroup/hdf5. + + +2. Quick installation + For those who don't like to read ;-) the following steps can be used + to configure, build, test, and install the HDF5 library, header files, + and support programs. For example, to install HDF5 version X.Y.Z at + location /usr/local/hdf5, use the following steps. + + $ cd hdf5-X.Y.Z + $ ./configure --prefix=/usr/local/hdf5 + $ make + $ make check # run test suite. + $ make install + $ make check-install # verify installation. + + + above refers to the configure flags appropriate + to your installation. For example, to install HDF5 with the + Fortran and C++ interfaces and with SZIP compression, the + configure line might read as follows: + + $ ./configure --prefix=/usr/local/hdf5 --enable-fortran \ + --enable-cxx --with-szlib=PATH_TO_SZIP + + In this case, PATH_TO_SZIP would be replaced with the path to the + installed location of the SZIP library. + +2.1. Windows + Users of Microsoft Windows should see the INSTALL_Windows files for + detailed instructions. + + +3. HDF5 dependencies +3.1. Zlib + The HDF5 library includes a predefined compression filter that + uses the "deflate" method for chunked datasets. If zlib-1.1.2 or + later is found, HDF5 will use it. Otherwise, HDF5's predefined + compression method will degenerate to a no-op; the compression + filter will succeed but the data will not be compressed. + +3.2. Szip (optional) + The HDF5 library includes a predefined compression filter that + uses the extended-Rice lossless compression algorithm for chunked + datasets. For information on Szip compression, license terms, + and obtaining the Szip source code, see: + + https://portal.hdfgroup.org/display/HDF5/Szip+Compression+in+HDF+Products + + Building instructions are available with the Szip source code. + + The HDF Group does not distribute separate Szip precompiled libraries, + but the HDF5 pre-built binaries provided on The HDF Group download page + include the Szip library with the encoder enabled. These can be found + here: + + https://www.hdfgroup.org/downloads/hdf5/ + + To configure the HDF5 library with the Szip compression filter, use + the '--with-szlib=/PATH_TO_SZIP' flag. For more information, see + section 4.3.7, "Specifying other libraries and headers." + + Please notice that if HDF5 configure cannot find a valid Szip library, + configure will not fail; in this case, the compression filter will + not be available to the applications. + + To check if Szip compression was successfully configured in, + check the "I/O filters (external):" line in the configure output, + summary section, printed to the standard output. + +3.3. MPI and MPI-IO + The parallel version of the library is built upon the foundation + provided by MPI and MPI-IO. If these libraries are not available + when HDF5 is configured, only a serial version of HDF5 can be built. + + +4. Full installation instructions for source distributions + +4.1. Unpacking the distribution + The HDF5 source code is distributed in a variety of formats which + can be unpacked with the following commands, each of which creates an + 'hdf5-X.Y.Z' directory, where X.Y.Z is the HDF5 version numbers. + +4.1.1. Non-compressed tar archive (*.tar) + + $ tar xf hdf5-X.Y.Z.tar + +4.1.2. Compressed tar archive (*.tar.Z) + + $ uncompress -c < hdf5-X.Y.Z.tar.Z | tar xf - + Or + $ tar Zxf hdf5-X.Y.Z.tar.Z + +4.1.3. Gzip'd tar archive (*.tar.gz) + + $ gunzip < hdf5-X.Y.Z.tar.gz | tar xf - + Or + $ tar zxf hdf5-X.Y.Z.tar.gz + +4.1.4. Bzip'd tar archive (*.tar.bz2) + + $ bunzip2 < hdf5-X.Y.Z.tar.bz2 | tar xf - + Or + $ tar jxf hdf5-X.Y.Z.tar.bz2 + +4.2. Source versus build directories + On most systems the build can occur in a directory other than the + source directory, allowing multiple concurrent builds and/or + read-only source code. In order to accomplish this, one should + create a build directory, cd into that directory, and run the + `configure' script found in the source directory (configure + details are below). For example, + $ mkdir built-fortran + $ cd build-fortran + $ ../hdf5-X.Y.Z/configure --enable-fortran ... + + +4.3. Configuring + HDF5 uses the GNU autoconf system for configuration, which + detects various features of the host system and creates the + Makefiles. On most systems it should be sufficient to say: + + $ ./configure + Or + $ sh configure + + The configuration process can be controlled through environment + variables, command-line switches, and host configuration files. + For a complete list of switches type: + + $ ./configure --help + + The host configuration files are located in the `config' + directory and are based on architecture name, vendor name, and/or + operating system which are displayed near the beginning of the + `configure' output. The host config file influences the behavior + of configure by setting or augmenting shell variables. + +4.3.1. Specifying the installation directories + The default installation location is the HDF5 directory created in + the build directory. Typing `make install' will install the HDF5 + library, header files, examples, and support programs in hdf5/lib, + hdf5/include, hdf5/doc/hdf5/examples, and hdf5/bin. To use a path + other than hdf5, specify the path with the `--prefix=PATH' switch: + + $ ./configure --prefix=/usr/local + + If shared libraries are being built (the default), the final + home of the shared library must be specified with this switch + before the library and executables are built. + + HDF5 can be installed into a different location than the prefix + specified at configure time; see section 4.6, "Installing HDF5," + for more details. + +4.3.2. Using an alternate C compiler + By default, configure will look for the C compiler by trying + `gcc' and `cc'. However, if the environment variable "CC" is set + then its value is used as the C compiler. For instance, one would + use the following line to specify the native C compiler on a system + that also has the GNU gcc compiler (users of csh and derivatives + will need to prefix the commands below with `env'): + + $ CC=cc ./configure + + A parallel version of HDF5 can be built by specifying `mpicc' + as the C compiler. (The `--enable-parallel' flag documented + below is optional in this case.) Using the `mpicc' compiler + will insure that the correct MPI and MPI-IO header files and + libraries are used. + + $ CC=/usr/local/mpi/bin/mpicc ./configure + + +4.3.3. Additional compilation flags + If additional flags must be passed to the compilation commands, + specify those flags with the CFLAGS variable. For instance, + to enable symbolic debugging of a production version of HDF5, one + might say: + + $ CFLAGS=-g ./configure --enable-build-mode=production + +4.3.4. Compiling HDF5 wrapper libraries + One can optionally build the Fortran, C++, and Java interfaces to + the HDF5 C library. By default, these options are disabled. To build + them, specify '--enable-fortran', '--enable-cxx', or '--enable-java', + respectively. + + $ ./configure --enable-fortran + $ ./configure --enable-cxx + $ ./configure --enable-java + + Configuration will halt if a working Fortran 90 or 95 compiler or + C++ compiler is not found. Currently, the Fortran configure tests + for these compilers in order: f90, pgf90, f95. To use an + alternate compiler specify it with the FC variable: + + $ FC=/usr/local/bin/g95 ./configure --enable-fortran + + +4.3.5. Specifying other programs + The build system has been tuned for use with GNU make but also + works with other versions of make. If the `make' command runs a + non-GNU version but a GNU version is available under a different + name (perhaps `gmake'), then HDF5 can be configured to use it by + setting the MAKE variable. Note that whatever value is used for + MAKE must also be used as the make command when building the + library: + + $ MAKE=gmake ./configure + $ gmake + + The `AR' and `RANLIB' variables can also be set to the names of + the `ar' and `ranlib' (or `:') commands to override values + detected by configure. + + The HDF5 library, include files, and utilities are installed + during `make install' (described below) with a BSD-compatible + install program detected automatically by configure. If none is + found, the shell script bin/install-sh is used. Configure does not + check that the install script actually works; if a bad install is + detected on your system (e.g., on the ASCI blue machine as of + March 2, 1999) you have two choices: + + 1. Copy the bin/install-sh program to your $HOME/bin + directory, name it `install', and make sure that $HOME/bin + is searched before the system bin directories. + + 2. Specify the full path name of the `install-sh' program + as the value of the INSTALL environment variable. Note: do + not use `cp' or some other program in place of install + because the HDF5 makefiles also use the install program to + change file ownership and/or access permissions. + +4.3.6. Specifying other libraries and headers + Configure searches the standard places (those places known by the + systems compiler) for include files and header files. However, + additional directories can be specified by using the CPPFLAGS + and/or LDFLAGS variables: + + $ CPPFLAGS=-I/home/robb/include \ + LDFLAGS=-L/home/robb/lib \ + ./configure + + HDF5 uses the zlib library to support the HDF5 deflate + data compression filter. Configure searches the standard places + (plus those specified above with the CPPFLAGS and LDFLAGS variables) + for the zlib headers and library. The search can be disabled by + specifying `--without-zlib' or alternate directories can be specified + with `--with-zlib=INCDIR,LIBDIR' or through the CPPFLAGS and LDFLAGS + variables: + + $ ./configure --with-zlib=/usr/unsup/include,/usr/unsup/lib + + $ CPPFLAGS=-I/usr/unsup/include \ + LDFLAGS=-L/usr/unsup/lib \ + ./configure + + HDF5 includes Szip as a predefined compression method (see 3.2). + To enable Szip compression, the HDF5 library must be configured + and built using the Szip library: + + $ ./configure --with-szlib=/Szip_Install_Directory + +4.3.7. Static versus shared linking + The build process will create static libraries on all systems and + shared libraries on systems that support dynamic linking to a + sufficient degree. Either form of the library may be suppressed by + saying `--disable-static' or `--disable-shared'. + + $ ./configure --disable-shared + + Shared C++ and Fortran libraries will be built if shared libraries + are enabled. + + To build only statically linked executables on platforms which + support shared libraries, use the `--enable-static-exec' flag. + + $ ./configure --enable-static-exec + +4.3.8. Optimization versus symbolic debugging + The library can be compiled to provide symbolic debugging support + so it can be debugged with gdb, dbx, ddd, etc., or it can be + compiled with various optimizations. To compile for symbolic + debugging (the default for snapshots), say + `--enable-build-mode=debug'; to compile with optimizations + (the default for supported public releases), + say `--enable-build-mode=production'. For a 'clean slate' configuration + with optimization disabled and nothing turned on, + say `--enable-build-mode=clean'. On some systems the + library can also be compiled for profiling with gprof by saying + `--enable-profiling'. + + $ ./configure --enable-build-mode=debug #symbolic debugging + $ ./configure --enable-build-mode=production #optimized code + $ ./configure --enable-build-mode=clean #'clean slate' + $ ./configure --enable-profiling #for use with gprof + + Regardless of whether support for symbolic debugging is enabled, + the library can also perform runtime debugging of certain packages + (such as type conversion execution times and extensive invariant + condition checking). To enable this debugging, supply a + comma-separated list of package names to the `--enable-internal-debug' + switch. + + Debugging can be disabled by saying `--disable-internal-debug'. + The default debugging level for snapshots is a subset of the + available packages; the default for supported releases is no + debugging (debugging can incur a significant runtime penalty). + + $ ./configure --enable-internal-debug=s,t #debug only H5S and H5T + $ ./configure --enable-internal-debug #debug normal packages + $ ./configure --enable-internal-debug=all #debug all packages + $ ./configure --disable-internal-debug #no debugging + + HDF5 can also print a trace of all API function calls, their + arguments, and the return values. To enable or disable the + ability to trace the API say `--enable-trace' (the default for + snapthots) or `--disable-trace' (the default for public releases). + The tracing must also be enabled at runtime to see any output. + +4.3.9. Parallel versus serial library + The HDF5 library can be configured to use MPI and MPI-IO for + parallelism on a distributed multi-processor system. Read the + file INSTALL_parallel for detailed information. + + The threadsafe, C++ and Java interfaces are not compatible + with the parallel option. + Unless --enable-unsupported has been specified on the configure line, + the following options must be disabled: + --enable-threadsafe, --enable-cxx, --enable-java + + +4.3.10. Threadsafe capability + The HDF5 library can be configured to be thread-safe (on a very + large scale) with the `--enable-threadsafe' flag to the configure + script. Some platforms may also require the '-with-pthread=INC,LIB' + (or '--with-pthread=DIR') flag to the configure script. + For further information, see: + + https://portal.hdfgroup.org/display/knowledge/Questions+about+thread-safety+and+concurrent+access + + The high-level, C++, Fortran and Java interfaces are not compatible + with the thread-safety option because the lock is not hoisted + into the higher-level API calls. + Unless --enable-unsupported has been specified on the configure line, + the following options must be disabled: + --enable-hl, --enable-cxx, --enable-fortran, --enable-java + + +4.3.11. Backward compatibility + The 1.14 version of the HDF5 library can be configured to operate + identically to the v1.12 library with the + --with-default-api-version=v112 + configure flag, or identically to the v1.10 library with the + --with-default-api-version=v110 + configure flag, or identically to the v1.8 library with the + --with-default-api-version=v18 + configure flag, or identically to the v1.6 library with the + --with-default-api-version=v16 + configure flag. This allows existing code to be compiled with the + v1.14 library without requiring immediate changes to the application + source code. For additional configuration options and other details, + see "API Compatibility Macros": + + https://portal.hdfgroup.org/display/HDF5/API+Compatibility+Macros + +4.4. Building + The library, confidence tests, and programs can be built by + specifying: + + $ make + + Note that if you have supplied some other make command via the MAKE + variable during the configuration step, that same command must be + used here. + + When using GNU make, you can add `-j -l6' to the make command to + compile in parallel on SMP machines. Do not give a number after + the `-j' since GNU make will turn it off for recursive invocations + of make. + + $ make -j -l6 + +4.4. Building doxygen + One can optionally build the doxygen files for the HDF5 C library. + By default, this option is disabled. To build the html files, specify + '--enable-doxygen'. + + $ ./configure --enable-doxygen + + Configuration will halt if the required applications are not available. + To build: + + $ make doxygen + + +4.5. Testing + HDF5 comes with various test suites, all of which can be run by + specifying: + + $ make check + + To run only the tests for the library, change to the `test' + directory before issuing the command. Similarly, tests for the + parallel aspects of the library are in `testpar' and tests for + the support programs are in `tools'. + + The `check' consists of two sub-tests, check-s and check-p, which + are for serial and parallel tests, respectively. Since serial tests + and parallel tests must be run with single and multiple processes + respectively, the two sub-tests work nicely for batch systems in + which the number of processes is fixed per batch job. One may submit + one batch job, requesting 1 process, to run all the serial tests by + "make check-s"; and submit another batch job, requesting multiple + processes, to run all the parallel tests by "make check-p". + + Temporary files will be deleted by each test when it completes, + but may continue to exist in an incomplete state if the test + fails. To prevent deletion of the files, define the HDF5_NOCLEANUP + environment variable. + + The HDF5 tests can take a long time to run on some systems. To perform + a faster (but less thorough) test, set the HDF5TestExpress environment + variable to 2 or 3 (with 3 being the shortest run). To perform a + longer test, set HDF5TestExpress to 0. 3 is the default. + +4.6. Installing HDF5 + The HDF5 library, include files, and support programs can be + installed by specifying `make install'. The files are installed under the + directory specified with `--prefix=DIR' (or if not specified, in 'hdf5' + in the top directory of the HDF5 source code). They will be + placed in directories named `lib', `include', and `bin'. The directories, + if not existing, will be created automatically, provided the mkdir command + supports the -p option. + + If `make install' fails because the install command at your site + somehow fails, you may use the install-sh that comes with the + source. You will need to run ./configure again. + + $ INSTALL="$PWD/bin/install-sh -c" ./configure ... + $ make install + + If you want to install HDF5 in a location other than the location + specified by the `--prefix=DIR' flag during configuration (or + instead of the default location, `hdf5'), you can do that + by running the deploy script: + + $ bin/deploy NEW_DIR + + This will install HDF5 in NEW_DIR. Alternately, you can do this + manually by issuing the command: + + $ make install prefix=NEW_DIR + + where NEW_DIR is the new directory where you wish to install HDF5. + If you do not use the deploy script, you should run h5redeploy in + NEW_DIR/bin directory. This utility will fix the h5cc, h5fc and + h5c++ scripts to reflect the new NEW_DIR location. + + The library can be used without installing it by pointing the + compiler at the `src' and 'src/.libs' directory for include files and + libraries. However, the minimum which must be installed to make + the library publicly available is: + + The library: + ./src/.libs/libhdf5.a + + The public header files: + ./src/H5*public.h, ./src/H5public.h + ./src/H5FD*.h except ./src/H5FDprivate.h, + ./src/H5api_adpt.h + + The main header file: + ./src/hdf5.h + + The configuration information: + ./src/H5pubconf.h + + The support programs that are useful are: + ./tools/h5ls/h5ls (list file contents) + ./tools/h5dump/h5dump (dump file contents) + ./tools/misc/h5repart (repartition file families) + ./tools/misc/h5debug (low-level file debugging) + ./tools/h5import/h5import (imports data to HDF5 file) + ./tools/h5diff/h5diff (compares two HDF5 files) + ./tools/gifconv/h52gif (HDF5 to GIF converter) + ./tools/gifconv/gif2h5 (GIF to HDF5 converter) + + +5. Using the Library + + For information on using HDF5 see the documentation, tutorials and examples + found here: + + https://portal.hdfgroup.org/display/HDF5/HDF5 + + A summary of the features included in the built HDF5 installation can be found + in the libhdf5.settings file in the same directory as the static and/or + shared HDF5 libraries. + + + diff --git a/release_docs/INSTALL_CMake.txt b/release_docs/INSTALL_CMake.txt index f6ba3a1..f794d3c 100644 --- a/release_docs/INSTALL_CMake.txt +++ b/release_docs/INSTALL_CMake.txt @@ -788,13 +788,14 @@ if (WINDOWS) DISABLE_PDB_FILES "Do not install PDB files" OFF ---------------- HDF5 Build Options --------------------- -HDF5_BUILD_CPP_LIB "Build HDF5 C++ Library" OFF -HDF5_BUILD_EXAMPLES "Build HDF5 Library Examples" ON -HDF5_BUILD_FORTRAN "Build FORTRAN support" OFF -HDF5_BUILD_JAVA "Build JAVA support" OFF -HDF5_BUILD_HL_LIB "Build HIGH Level HDF5 Library" ON -HDF5_BUILD_TOOLS "Build HDF5 Tools" ON -HDF5_BUILD_HL_GIF_TOOLS "Build HIGH Level HDF5 GIF Tools" OFF +HDF5_BUILD_CPP_LIB "Build HDF5 C++ Library" OFF +HDF5_BUILD_EXAMPLES "Build HDF5 Library Examples" ON +HDF5_BUILD_FORTRAN "Build FORTRAN support" OFF +HDF5_BUILD_JAVA "Build JAVA support" OFF +HDF5_BUILD_HL_LIB "Build HIGH Level HDF5 Library" ON +HDF5_BUILD_TOOLS "Build HDF5 Tools" ON +HDF5_BUILD_HL_GIF_TOOLS "Build HIGH Level HDF5 GIF Tools" OFF +HDF5_BUILD_PARALLEL_TOOLS "Build Parallel HDF5 Tools" OFF ---------------- HDF5 Folder Build Options --------------------- Defaults relative to $ @@ -809,8 +810,8 @@ else () HDF5_INSTALL_DATA_DIR "share" HDF5_INSTALL_DOC_DIR "HDF5_INSTALL_DATA_DIR" -HDF5_USE_GNU_DIRS "TRUE to use GNU Coding Standard install directory variables, - FALSE to use historical settings" FALSE +HDF5_USE_GNU_DIRS "ON to use GNU Coding Standard install directory variables, + OFF to use historical settings" OFF Defaults as defined by the `GNU Coding Standards` HDF5_INSTALL_BIN_DIR "bin" HDF5_INSTALL_LIB_DIR "lib" @@ -839,6 +840,7 @@ HDF5_ENABLE_EMBEDDED_LIBINFO "embed library info into executables" HDF5_ENABLE_PREADWRITE "Use pread/pwrite in sec2/log/core VFDs in place of read/write (when available)" ON HDF5_ENABLE_TRACE "Enable API tracing capability" OFF HDF5_ENABLE_USING_MEMCHECKER "Indicate that a memory checker is used" OFF +HDF5_ENABLE_MAP_API "Build the map API" OFF HDF5_GENERATE_HEADERS "Rebuild Generated Files" ON HDF5_BUILD_GENERATORS "Build Test Generators" OFF @@ -904,7 +906,7 @@ if (BUILD_TESTING) ---------------- External Library Options --------------------- HDF5_ALLOW_EXTERNAL_SUPPORT "Allow External Library Building (NO GIT TGZ)" "NO" HDF5_ENABLE_PLUGIN_SUPPORT "Enable PLUGIN Filters" OFF -HDF5_ENABLE_SZIP_SUPPORT "Use SZip Filter" ON +HDF5_ENABLE_SZIP_SUPPORT "Use SZip Filter" OFF HDF5_ENABLE_Z_LIB_SUPPORT "Enable Zlib Filters" ON ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" OFF -- cgit v0.12