summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--CMakeLists.txt6
-rw-r--r--MANIFEST62
-rw-r--r--README.txt2
-rw-r--r--c++/src/CMakeLists.txt2
-rw-r--r--c++/src/Makefile.in2
-rw-r--r--config/cmake/hdf5-config.cmake.build.in10
-rw-r--r--config/cmake/hdf5-config.cmake.install.in10
-rw-r--r--config/lt_vers.am2
-rwxr-xr-xconfigure26
-rw-r--r--configure.ac6
-rw-r--r--fortran/src/CMakeLists.txt6
-rw-r--r--fortran/src/Makefile.in2
-rw-r--r--hl/c++/src/CMakeLists.txt2
-rw-r--r--hl/c++/src/Makefile.in2
-rw-r--r--hl/fortran/src/CMakeLists.txt4
-rw-r--r--hl/fortran/src/Makefile.in2
-rw-r--r--hl/src/Makefile.in2
-rw-r--r--release_docs/INSTALL_CMake.txt (renamed from release_docs/CMake.txt)547
-rw-r--r--release_docs/INSTALL_MinGW.txt269
-rw-r--r--release_docs/INSTALL_Windows.txt11
-rw-r--r--release_docs/RELEASE.txt16
-rw-r--r--release_docs/USING_HDF5_CMake.txt (renamed from release_docs/USING_CMake.txt)245
-rw-r--r--release_docs/USING_HDF5_VS.txt88
-rw-r--r--release_docs/USING_Windows.txt35
-rw-r--r--src/H5D.c47
-rw-r--r--src/H5Dchunk.c8
-rw-r--r--src/H5Dprivate.h1
-rw-r--r--src/H5G.c46
-rw-r--r--src/H5Gprivate.h1
-rw-r--r--src/H5I.c42
-rw-r--r--src/H5Iprivate.h1
-rw-r--r--src/H5Pocpl.c37
-rw-r--r--src/H5Pprivate.h1
-rw-r--r--src/H5Z.c163
-rw-r--r--src/H5Zprivate.h1
-rw-r--r--src/H5public.h4
-rw-r--r--src/Makefile.in2
-rw-r--r--test/Makefile.am4
-rw-r--r--test/Makefile.in28
-rw-r--r--test/fillval.c262
-rw-r--r--test/unregister.c258
-rw-r--r--testpar/t_dset.c2
-rw-r--r--vms/src/h5pubconf.h6
43 files changed, 1507 insertions, 766 deletions
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 60655ae..b536980 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -487,6 +487,8 @@ IF (HDF5_ENABLE_PARALLEL)
# Used by Fortran + MPI
CHECK_SYMBOL_EXISTS (MPI_Comm_c2f "${MPI_C_INCLUDE_PATH}/mpi.h" H5_HAVE_MPI_MULTI_LANG_Comm)
CHECK_SYMBOL_EXISTS (MPI_Info_c2f "${MPI_C_INCLUDE_PATH}/mpi.h" H5_HAVE_MPI_MULTI_LANG_Info)
+ ELSE (MPI_C_FOUND)
+ MESSAGE (STATUS "Parallel libraries not found")
ENDIF (MPI_C_FOUND)
ENDIF (HDF5_ENABLE_PARALLEL)
@@ -962,8 +964,8 @@ IF (NOT HDF5_EXTERNALLY_CONFIGURED)
)
IF (EXISTS "${HDF5_SOURCE_DIR}/release_docs" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/release_docs")
SET (release_files
- ${HDF5_SOURCE_DIR}/release_docs/CMake.txt
- ${HDF5_SOURCE_DIR}/release_docs/USING_CMake.txt
+ ${HDF5_SOURCE_DIR}/release_docs/INSTALL_CMake.txt
+ ${HDF5_SOURCE_DIR}/release_docs/USING_HDF5_CMake.txt
${HDF5_SOURCE_DIR}/release_docs/COPYING
${HDF5_SOURCE_DIR}/release_docs/HISTORY-1_9.txt
${HDF5_SOURCE_DIR}/release_docs/INSTALL
diff --git a/MANIFEST b/MANIFEST
index afa2e20..50fecb3 100644
--- a/MANIFEST
+++ b/MANIFEST
@@ -529,19 +529,18 @@
./perform/sio_timer.h
./perform/zip_perf.c
-./release_docs/CMake.txt
+./release_docs/INSTALL_CMake.txt
./release_docs/COPYING
./release_docs/HISTORY-1_0-1_8_0_rc3.txt
./release_docs/HISTORY-1_9.txt
./release_docs/INSTALL
./release_docs/INSTALL_Cygwin.txt
-./release_docs/INSTALL_MinGW.txt
./release_docs/INSTALL_parallel
./release_docs/INSTALL_VMS.txt
./release_docs/INSTALL_Windows.txt
./release_docs/RELEASE.txt
-./release_docs/USING_CMake.txt
-./release_docs/USING_Windows.txt
+./release_docs/USING_HDF5_CMake.txt
+./release_docs/USING_HDF5_VS.txt
./src/.indent.pro _DO_NOT_DISTRIBUTE_
./src/hdf5.lnt _DO_NOT_DISTRIBUTE_
@@ -1012,30 +1011,32 @@
./test/flush1.c
./test/flush2.c
./test/flushrefresh.c
-./test/gen_bad_ohdr.c _DO_NOT_DISTRIBUTE_
-./test/gen_bad_compound.c _DO_NOT_DISTRIBUTE_
-./test/gen_bogus.c _DO_NOT_DISTRIBUTE_
-./test/gen_cross.c _DO_NOT_DISTRIBUTE_
-./test/gen_deflate.c _DO_NOT_DISTRIBUTE_
-./test/gen_file_image.c _DO_NOT_DISTRIBUTE_
-./test/gen_filespace.c _DO_NOT_DISTRIBUTE_
-./test/gen_idx.c _DO_NOT_DISTRIBUTE_
-./test/gen_mergemsg.c _DO_NOT_DISTRIBUTE_
-./test/gen_new_array.c _DO_NOT_DISTRIBUTE_
-./test/gen_new_fill.c _DO_NOT_DISTRIBUTE_
-./test/gen_new_group.c _DO_NOT_DISTRIBUTE_
-./test/gen_new_mtime.c _DO_NOT_DISTRIBUTE_
-./test/gen_new_super.c _DO_NOT_DISTRIBUTE_
-./test/gen_noencoder.c _DO_NOT_DISTRIBUTE_
-./test/gen_nullspace.c _DO_NOT_DISTRIBUTE_
-./test/gen_old_array.c _DO_NOT_DISTRIBUTE_
-./test/gen_old_group.c _DO_NOT_DISTRIBUTE_
-./test/gen_old_layout.c _DO_NOT_DISTRIBUTE_
-./test/gen_old_mtime.c _DO_NOT_DISTRIBUTE_
-./test/gen_plist.c _DO_NOT_DISTRIBUTE_
-./test/gen_sizes_lheap.c _DO_NOT_DISTRIBUTE_
-./test/gen_specmetaread.c _DO_NOT_DISTRIBUTE_
-./test/gen_udlinks.c _DO_NOT_DISTRIBUTE_
+# ====distribute this for now. See HDFFV-8236====
+./test/gen_bad_ohdr.c
+./test/gen_bad_compound.c
+./test/gen_bogus.c
+./test/gen_cross.c
+./test/gen_deflate.c
+./test/gen_file_image.c
+./test/gen_filespace.c
+./test/gen_idx.c
+./test/gen_mergemsg.c
+./test/gen_new_array.c
+./test/gen_new_fill.c
+./test/gen_new_group.c
+./test/gen_new_mtime.c
+./test/gen_new_super.c
+./test/gen_noencoder.c
+./test/gen_nullspace.c
+./test/gen_old_array.c
+./test/gen_old_group.c
+./test/gen_old_layout.c
+./test/gen_old_mtime.c
+./test/gen_plist.c
+./test/gen_sizes_lheap.c
+./test/gen_specmetaread.c
+./test/gen_udlinks.c
+# ====end distribute this for now. See HDFFV-8236====
./test/getname.c
./test/gheap.c
./test/group_old.h5
@@ -1062,7 +1063,9 @@
./test/reserved.c
./test/pool.c
./test/set_extent.c
-./test/space_overflow.c _DO_NOT_DISTRIBUTE_
+# ====distribute this for now. See HDFFV-8236====
+./test/space_overflow.c
+# ====end distribute this for now. See HDFFV-8236====
./test/specmetaread.h5
./test/stab.c
./test/swmr_addrem_writer.c
@@ -1127,6 +1130,7 @@
./test/tvlstr.c
./test/tvltypes.c
./test/unlink.c
+./test/unregister.c
./test/use_append_chunk.c
./test/use_append_mchunks.c
./test/use_common.c
diff --git a/README.txt b/README.txt
index 965dbf8..97235cb 100644
--- a/README.txt
+++ b/README.txt
@@ -1,4 +1,4 @@
-HDF5 version 1.9.149-FA_a5 currently under development
+HDF5 version 1.9.151-FA_a5 currently under development
Please refer to the release_docs/INSTALL file for installation instructions.
------------------------------------------------------------------------------
diff --git a/c++/src/CMakeLists.txt b/c++/src/CMakeLists.txt
index fea68cd..cf5bd07 100644
--- a/c++/src/CMakeLists.txt
+++ b/c++/src/CMakeLists.txt
@@ -96,7 +96,7 @@ INSTALL (
FILES
${CPP_HDRS}
DESTINATION
- ${HDF5_INSTALL_INCLUDE_DIR}/cpp
+ ${HDF5_INSTALL_INCLUDE_DIR}
COMPONENT
cppheaders
)
diff --git a/c++/src/Makefile.in b/c++/src/Makefile.in
index 37e5d1b..103da91 100644
--- a/c++/src/Makefile.in
+++ b/c++/src/Makefile.in
@@ -467,7 +467,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog
# Add libtool shared library version numbers to the HDF5 library
# See libtool versioning documentation online.
LT_VERS_INTERFACE = 6
-LT_VERS_REVISION = 139
+LT_VERS_REVISION = 141
LT_VERS_AGE = 0
# Include src directory
diff --git a/config/cmake/hdf5-config.cmake.build.in b/config/cmake/hdf5-config.cmake.build.in
index 881985e..fbe97ac 100644
--- a/config/cmake/hdf5-config.cmake.build.in
+++ b/config/cmake/hdf5-config.cmake.build.in
@@ -18,9 +18,17 @@ SET (HDF5_ENABLE_SZIP_ENCODING @HDF5_ENABLE_SZIP_ENCODING@)
SET (HDF5_BUILD_SHARED_LIBS @BUILD_SHARED_LIBS@)
#-----------------------------------------------------------------------------
+# Dependencies
+#-----------------------------------------------------------------------------
+IF(HDF5_ENABLE_PARALLEL)
+ SET(HDF5_MPI_C_INCLUDE_PATH "@MPI_C_INCLUDE_PATH@")
+ SET(HDF5_MPI_C_LIBRARIES "@MPI_C_LIBRARIES@")
+ENDIF(HDF5_ENABLE_PARALLEL)
+
+#-----------------------------------------------------------------------------
# Directories
#-----------------------------------------------------------------------------
-SET (HDF5_INCLUDE_DIR "@HDF5_INCLUDES_BUILD_TIME@")
+SET (HDF5_INCLUDE_DIR "@HDF5_INCLUDES_BUILD_TIME@" "${HDF5_MPI_C_INCLUDE_PATH}" )
IF (HDF5_BUILD_FORTRAN)
SET (HDF5_INCLUDE_DIR_FORTRAN "@CMAKE_Fortran_MODULE_DIRECTORY@" )
diff --git a/config/cmake/hdf5-config.cmake.install.in b/config/cmake/hdf5-config.cmake.install.in
index 6b90496..9c5a5a1 100644
--- a/config/cmake/hdf5-config.cmake.install.in
+++ b/config/cmake/hdf5-config.cmake.install.in
@@ -24,9 +24,17 @@ SET (HDF5_BUILD_SHARED_LIBS @BUILD_SHARED_LIBS@)
SET (HDF5_PACKAGE_EXTLIBS @HDF5_PACKAGE_EXTLIBS@)
#-----------------------------------------------------------------------------
+# Dependencies
+#-----------------------------------------------------------------------------
+IF(HDF5_ENABLE_PARALLEL)
+ SET(HDF5_MPI_C_INCLUDE_PATH "@MPI_C_INCLUDE_PATH@")
+ SET(HDF5_MPI_C_LIBRARIES "@MPI_C_LIBRARIES@")
+ENDIF(HDF5_ENABLE_PARALLEL)
+
+#-----------------------------------------------------------------------------
# Directories
#-----------------------------------------------------------------------------
-SET (HDF5_INCLUDE_DIR "${_IMPORT_PREFIX}/include" )
+SET (HDF5_INCLUDE_DIR "${_IMPORT_PREFIX}/include" "${HDF5_MPI_C_INCLUDE_PATH}" )
IF (HDF5_BUILD_FORTRAN)
SET (HDF5_INCLUDE_DIR_FORTRAN "${_IMPORT_PREFIX}/include/fortran" )
diff --git a/config/lt_vers.am b/config/lt_vers.am
index 8df77f3..ac9fc70 100644
--- a/config/lt_vers.am
+++ b/config/lt_vers.am
@@ -17,7 +17,7 @@
# Add libtool shared library version numbers to the HDF5 library
# See libtool versioning documentation online.
LT_VERS_INTERFACE = 6
-LT_VERS_REVISION = 139
+LT_VERS_REVISION = 141
LT_VERS_AGE = 0
## If the API changes *at all*, increment LT_VERS_INTERFACE and
diff --git a/configure b/configure
index ce46447..333db5d 100755
--- a/configure
+++ b/configure
@@ -1,7 +1,7 @@
#! /bin/sh
# From configure.ac Id: configure.ac 22697 2012-08-19 14:35:47Z hdftest .
# Guess values for system-dependent variables and create Makefiles.
-# Generated by GNU Autoconf 2.69 for HDF5 1.9.149-FA_a5.
+# Generated by GNU Autoconf 2.69 for HDF5 1.9.151-FA_a5.
#
# Report bugs to <help@hdfgroup.org>.
#
@@ -591,8 +591,8 @@ MAKEFLAGS=
# Identity of this package.
PACKAGE_NAME='HDF5'
PACKAGE_TARNAME='hdf5'
-PACKAGE_VERSION='1.9.149-FA_a5'
-PACKAGE_STRING='HDF5 1.9.149-FA_a5'
+PACKAGE_VERSION='1.9.151-FA_a5'
+PACKAGE_STRING='HDF5 1.9.151-FA_a5'
PACKAGE_BUGREPORT='help@hdfgroup.org'
PACKAGE_URL=''
@@ -1486,7 +1486,7 @@ if test "$ac_init_help" = "long"; then
# Omit some internal or obsolete options to make the list less imposing.
# This message is too long to be a string in the A/UX 3.1 sh.
cat <<_ACEOF
-\`configure' configures HDF5 1.9.149-FA_a5 to adapt to many kinds of systems.
+\`configure' configures HDF5 1.9.151-FA_a5 to adapt to many kinds of systems.
Usage: $0 [OPTION]... [VAR=VALUE]...
@@ -1556,7 +1556,7 @@ fi
if test -n "$ac_init_help"; then
case $ac_init_help in
- short | recursive ) echo "Configuration of HDF5 1.9.149-FA_a5:";;
+ short | recursive ) echo "Configuration of HDF5 1.9.151-FA_a5:";;
esac
cat <<\_ACEOF
@@ -1752,7 +1752,7 @@ fi
test -n "$ac_init_help" && exit $ac_status
if $ac_init_version; then
cat <<\_ACEOF
-HDF5 configure 1.9.149-FA_a5
+HDF5 configure 1.9.151-FA_a5
generated by GNU Autoconf 2.69
Copyright (C) 2012 Free Software Foundation, Inc.
@@ -2846,7 +2846,7 @@ cat >config.log <<_ACEOF
This file contains any messages produced by compilers while
running configure, to aid debugging if configure makes a mistake.
-It was created by HDF5 $as_me 1.9.149-FA_a5, which was
+It was created by HDF5 $as_me 1.9.151-FA_a5, which was
generated by GNU Autoconf 2.69. Invocation command line was
$ $0 $@
@@ -3678,7 +3678,7 @@ fi
# Define the identity of the package.
PACKAGE='hdf5'
- VERSION='1.9.149-FA_a5'
+ VERSION='1.9.151-FA_a5'
cat >>confdefs.h <<_ACEOF
@@ -26212,7 +26212,7 @@ fi
done
- if test "x$HAVE_DMALLOC" = "xyes"; then
+ if test "x$HAVE_PTHREAD" = "xyes"; then
{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for pthread_self in -lpthread" >&5
$as_echo_n "checking for pthread_self in -lpthread... " >&6; }
if ${ac_cv_lib_pthread_pthread_self+:} false; then :
@@ -26336,7 +26336,7 @@ done
fi
- if test "x$HAVE_DMALLOC" = "xyes"; then
+ if test "x$HAVE_PTHREAD" = "xyes"; then
if test -n "$pthread_lib"; then
saved_LDFLAGS="$LDFLAGS"
saved_AM_LDFLAGS="$AM_LDFLAGS"
@@ -31632,7 +31632,7 @@ Usage: $0 [OPTIONS]
Report bugs to <bug-libtool@gnu.org>."
lt_cl_version="\
-HDF5 config.lt 1.9.149-FA_a5
+HDF5 config.lt 1.9.151-FA_a5
configured by $0, generated by GNU Autoconf 2.69.
Copyright (C) 2011 Free Software Foundation, Inc.
@@ -33762,7 +33762,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
# report actual input values of CONFIG_FILES etc. instead of their
# values after options handling.
ac_log="
-This file was extended by HDF5 $as_me 1.9.149-FA_a5, which was
+This file was extended by HDF5 $as_me 1.9.151-FA_a5, which was
generated by GNU Autoconf 2.69. Invocation command line was
CONFIG_FILES = $CONFIG_FILES
@@ -33828,7 +33828,7 @@ _ACEOF
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
ac_cs_version="\\
-HDF5 config.status 1.9.149-FA_a5
+HDF5 config.status 1.9.151-FA_a5
configured by $0, generated by GNU Autoconf 2.69,
with options \\"\$ac_cs_config\\"
diff --git a/configure.ac b/configure.ac
index e0f9480..8e6b057 100644
--- a/configure.ac
+++ b/configure.ac
@@ -26,7 +26,7 @@ AC_PREREQ([2.69])
## NOTE: Do not forget to change the version number here when we do a
## release!!!
##
-AC_INIT([HDF5], [1.9.149-FA_a5], [help@hdfgroup.org])
+AC_INIT([HDF5], [1.9.151-FA_a5], [help@hdfgroup.org])
AC_CONFIG_SRCDIR([src/H5.c])
AC_CONFIG_HEADER([src/H5config.h])
@@ -1945,7 +1945,7 @@ AC_ARG_WITH([pthread],
case "$withval" in
yes)
AC_CHECK_HEADERS([pthread.h],, [unset HAVE_PTHREAD])
- if test "x$HAVE_DMALLOC" = "xyes"; then
+ if test "x$HAVE_PTHREAD" = "xyes"; then
AC_CHECK_LIB([pthread], [pthread_self],, [unset HAVE_PTHREAD])
fi
;;
@@ -1987,7 +1987,7 @@ case "$withval" in
AC_CHECK_HEADERS([pthread.h],, [unset HAVE_PTHREAD])
fi
- if test "x$HAVE_DMALLOC" = "xyes"; then
+ if test "x$HAVE_PTHREAD" = "xyes"; then
if test -n "$pthread_lib"; then
saved_LDFLAGS="$LDFLAGS"
saved_AM_LDFLAGS="$AM_LDFLAGS"
diff --git a/fortran/src/CMakeLists.txt b/fortran/src/CMakeLists.txt
index c41916a..1bddc56 100644
--- a/fortran/src/CMakeLists.txt
+++ b/fortran/src/CMakeLists.txt
@@ -254,7 +254,7 @@ INSTALL (
${HDF5_F90_BINARY_DIR}/H5f90i_gen.h
${HDF5_F90_BINARY_DIR}/H5fortran_types.f90
DESTINATION
- ${HDF5_INSTALL_INCLUDE_DIR}/fortran
+ ${HDF5_INSTALL_INCLUDE_DIR}
COMPONENT
fortheaders
)
@@ -264,7 +264,7 @@ IF (WIN32 AND NOT CYGWIN)
DIRECTORY
${CMAKE_Fortran_MODULE_DIRECTORY}/\${BUILD_TYPE}/
DESTINATION
- ${HDF5_INSTALL_INCLUDE_DIR}/fortran
+ ${HDF5_INSTALL_INCLUDE_DIR}
COMPONENT
fortheaders
)
@@ -273,7 +273,7 @@ ELSE (WIN32 AND NOT CYGWIN)
DIRECTORY
${CMAKE_Fortran_MODULE_DIRECTORY}/
DESTINATION
- ${HDF5_INSTALL_INCLUDE_DIR}/fortran
+ ${HDF5_INSTALL_INCLUDE_DIR}
COMPONENT
fortheaders
)
diff --git a/fortran/src/Makefile.in b/fortran/src/Makefile.in
index 7379980..bae0cae 100644
--- a/fortran/src/Makefile.in
+++ b/fortran/src/Makefile.in
@@ -518,7 +518,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog
# Add libtool shared library version numbers to the HDF5 library
# See libtool versioning documentation online.
LT_VERS_INTERFACE = 6
-LT_VERS_REVISION = 139
+LT_VERS_REVISION = 141
LT_VERS_AGE = 0
# Include src directory in both Fortran and C flags (C compiler is used
diff --git a/hl/c++/src/CMakeLists.txt b/hl/c++/src/CMakeLists.txt
index 5a2a7c4..89de4ca 100644
--- a/hl/c++/src/CMakeLists.txt
+++ b/hl/c++/src/CMakeLists.txt
@@ -27,7 +27,7 @@ INSTALL (
FILES
${HDF5_HL_CPP_HDRS}
DESTINATION
- ${HDF5_INSTALL_INCLUDE_DIR}/hl/cpp
+ ${HDF5_INSTALL_INCLUDE_DIR}
COMPONENT
hlcppheaders
)
diff --git a/hl/c++/src/Makefile.in b/hl/c++/src/Makefile.in
index b360bf3..717e0ae 100644
--- a/hl/c++/src/Makefile.in
+++ b/hl/c++/src/Makefile.in
@@ -458,7 +458,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog
# Add libtool shared library version numbers to the HDF5 library
# See libtool versioning documentation online.
LT_VERS_INTERFACE = 6
-LT_VERS_REVISION = 139
+LT_VERS_REVISION = 141
LT_VERS_AGE = 0
# Include src directory
diff --git a/hl/fortran/src/CMakeLists.txt b/hl/fortran/src/CMakeLists.txt
index c580516..9ab0fb2 100644
--- a/hl/fortran/src/CMakeLists.txt
+++ b/hl/fortran/src/CMakeLists.txt
@@ -76,7 +76,7 @@ IF (WIN32 AND NOT CYGWIN)
DIRECTORY
${CMAKE_Fortran_MODULE_DIRECTORY}/\${BUILD_TYPE}/
DESTINATION
- ${HDF5_INSTALL_INCLUDE_DIR}/fortran
+ ${HDF5_INSTALL_INCLUDE_DIR}
COMPONENT
fortheaders
)
@@ -85,7 +85,7 @@ ELSE (WIN32 AND NOT CYGWIN)
DIRECTORY
${CMAKE_Fortran_MODULE_DIRECTORY}/
DESTINATION
- ${HDF5_INSTALL_INCLUDE_DIR}/fortran
+ ${HDF5_INSTALL_INCLUDE_DIR}
COMPONENT
fortheaders
)
diff --git a/hl/fortran/src/Makefile.in b/hl/fortran/src/Makefile.in
index fb54a54..ff77de7 100644
--- a/hl/fortran/src/Makefile.in
+++ b/hl/fortran/src/Makefile.in
@@ -474,7 +474,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog
# Add libtool shared library version numbers to the HDF5 library
# See libtool versioning documentation online.
LT_VERS_INTERFACE = 6
-LT_VERS_REVISION = 139
+LT_VERS_REVISION = 141
LT_VERS_AGE = 0
INCLUDES = -I$(top_srcdir)/src -I$(top_srcdir)/hl/src -I$(top_builddir)/hl/src \
-I$(top_srcdir)/fortran/src -I$(top_builddir)/fortran/src
diff --git a/hl/src/Makefile.in b/hl/src/Makefile.in
index bbb788d..a2a47ad 100644
--- a/hl/src/Makefile.in
+++ b/hl/src/Makefile.in
@@ -457,7 +457,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog
# Add libtool shared library version numbers to the HDF5 library
# See libtool versioning documentation online.
LT_VERS_INTERFACE = 6
-LT_VERS_REVISION = 139
+LT_VERS_REVISION = 141
LT_VERS_AGE = 0
# This library is our main target.
diff --git a/release_docs/CMake.txt b/release_docs/INSTALL_CMake.txt
index 81dfb61..95ebec9 100644
--- a/release_docs/CMake.txt
+++ b/release_docs/INSTALL_CMake.txt
@@ -2,65 +2,112 @@
* Build and Install HDF5 C/C++ Library with CMake *
************************************************************************
-Notes: This short instruction is written for users who want to quickly build
- HDF5 library and tools from the HDF5 source code package using the
- CMake tools.
-
- More information about using CMake can be found at the KitWare site,
- www.cmake.org.
-
- CMake uses the command line, however the visual CMake tool is
- recommended for the configuration step. The steps are similar for
- all the operating systems supported by CMake.
+ Table of Contents
+
+Section I: Quick Step Building HDF5 Libraries with CMake
+Section II: Preconditions
+Section III: Building HDF5 C/C++ Libraries with CMake
+Section IV: All Options for HDF5 C/C++ Libraries with CMake
+Section V: APPENDIX
+
+************************************************************************
+
+
+
+========================================================================
+I. Quick Step Building HDF5 Libraries with CMake
+========================================================================
+Notes: This short set of instructions is written for users who want to
+ quickly build the HDF5 Library and tools from the HDF5 source code
+ package using the CMake tools.
- NOTES:
- 1. Using CMake for building and using HDF5 is under active development.
- While we have attempted to provide error-free files, please
- understand that development with CMake has not been extensively
- tested outside of HDF. The CMake specific files may change
- before the next release.
-
- 2. CMake was originally introduced to support development on Windows,
- however it should be usable on any system where CMake is supported.
- Please send us any comments on how CMake support can be improved on
- any system. Visit the KitWare site for more information about CMake.
-
- 3. Build and test results can be submitted to our CDash server at:
- cdash.hdfgroup.uiuc.edu.
- Please read the HDF and CDash document at:
- www.hdfgroup.org/CDash/HowToSubmit.
-
- 4. See the appendix at the bottom of this file for examples of using
- a ctest script for building and testing.
-
+ A. Windows Quick Step Building HDF5 Libraries with CMake Using VS2010
+
+ Go through these steps:
+
+ 1. Locate the source files in:
+ c:\MyHDFstuff\hdf5
+
+ 2. Create a build folder at:
+ c:\MyHDFstuff\hdf5\build
+
+ 3. Open a command prompt at:
+ c:\MyHDFstuff\hdf5\build
+
+ 4. Configure the C library, tools and tests with this command:
+ cmake -G "Visual Studio 10" -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..
+
+ 3. Build the C library, tools and tests with this command:
+ cmake --build . --config Release
+
+ 4. Test the C library and tools with this command:
+ ctest . -C Release
+
+ 5. Create an install image with this command:
+ cpack -C Release CPackConfig.cmake
+
+ 6. Install with this command:
+ HDF5-1.8.11-win32.exe
+
+ B. Linux Quick Step Building HDF5 Libraries with CMake Using GCC
+
+ Go through these steps:
+ 1. Locate the source files in:
+ ~\MyHDFstuff\hdf5
+
+ 2. Create a build folder at:
+ ~\MyHDFstuff\hdf5\build
+
+ 3. Open a command prompt at:
+ ~\MyHDFstuff\hdf5\build
+
+ 4. Configure the C library, tools and tests with this command:
+ cmake -G "Unix Makefiles" -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..
+
+ 3. Build the C library, tools and tests with this command:
+ cmake --build . --config Release
+
+ 4. Test the C library and tools with this command:
+ ctest . -C Release
+
+ 5. Create an install image with this command:
+ cpack -C Release CPackConfig.cmake
+
+ 6. Install with this command:
+ HDF5-1.8.11-Linux.sh
+
+
+
+
+========================================================================
+II. Preconditions
========================================================================
- Preconditions
-========================================================================
1. We suggest you obtain the latest CMake for windows from the Kitware
- web site. The HDF5 1.8.x product requires CMake version 2.8.10.
+ web site. The HDF5 1.8.x product requires a minimum CMake version 2.8.10.
- 2. If you plan to use Zlib or Szip;
- A. Download the packages and install them
- in a central location. For example on Windows, create a folder extlibs
- and install the packages there.
- B. Use source packages from a SVN server by adding the following CMake
+ 2. If you plan to use Zlib or Szip:
+ A. Download the packages and install them in a central location.
+ For example on Windows, create a folder extlibs and install the
+ packages there.
+ B. Use source packages from an SVN server by adding the following CMake
options:
HDF5_ALLOW_EXTERNAL_SUPPORT:STRING="SVN"
ZLIB_SVN_URL:STRING="http://some_location/zlib/trunk"
SZIP_SVN_URL:STRING="http://some_location/szip/trunk"
where "some_location" is the URL to the SVN repository.
- C. Use source packages from a compressed file by adding the following CMake
- options:
+ C. Use source packages from a compressed file by adding the following
+ CMake options:
HDF5_ALLOW_EXTERNAL_SUPPORT:STRING="TGZ"
ZLIB_TGZ_NAME:STRING="zlib_src.ext"
SZIP_TGZ_NAME:STRING="szip_src.ext"
TGZ_PATH:STRING="some_location"
- where "some_location" is the URL or full path to the compressed file and
- ext is the type of compression file.
+ where "some_location" is the URL or full path to the compressed
+ file and ext is the type of compression file.
- 3. Building on Apple Darwin platforms should add the following options:
+ 3. If you are building on Apple Darwin platforms, you should add the
+ following options:
Compiler choice - use xcode by setting the ENV variables of CC and CXX
Shared fortran is not supported, build static:
BUILD_SHARED_LIBS:BOOL=OFF
@@ -69,9 +116,55 @@ Notes: This short instruction is written for users who want to quickly build
CTEST_USE_LAUNCHERS:BOOL=ON
CMAKE_BUILD_WITH_INSTALL_RPATH:BOOL=OFF
-
+ 4. Windows developers should install NSIS to create an install image with CPack.
+ Visual Studio Express users will not be able to package HDF5 into
+ an install image executable.
+
+
+Notes: More information about using CMake can be found at the KitWare site,
+ www.cmake.org.
+
+ CMake uses the command line; however, the visual CMake tool is
+ recommended for the configuration step. The steps are similar for
+ all the operating systems supported by CMake.
+
+ NOTES:
+ 1. Using CMake for building and using HDF5 is under active development.
+ While we have attempted to provide error-free files, please
+ understand that development with CMake has not been extensively
+ tested outside of HDF. The CMake specific files may change
+ before the next release.
+
+ 2. CMake support for HDF5 development should be usable on any
+ system where CMake is supported. Please send us any comments on
+ how CMake support can be improved on any system. Visit the
+ KitWare site for more information about CMake.
+
+ 3. Build and test results can be submitted to our CDash server at:
+ cdash.hdfgroup.uiuc.edu.
+ Please read the HDF and CDash document at:
+ www.hdfgroup.org/CDash/HowToSubmit.
+
+ 4. See the appendix at the bottom of this file for examples of using
+ a ctest script for building and testing.
+
+
+
+
========================================================================
- Building HDF5 C/C++ Libraries with CMake
+III. Building HDF5 C/C++ Libraries with CMake
+========================================================================
+
+To build the HDF5 C/C++ Libraries with CMake, go through these five steps:
+
+ 1. Run CMake
+ 2. Configure the cache settings
+ 3. Build HDF5
+ 4. Test HDF5
+ 5. Packaging HDF5 (create install image)
+
+These five steps are described in detail below.
+
========================================================================
1. Run CMake
@@ -80,18 +173,21 @@ Notes: This short instruction is written for users who want to quickly build
available in your Start menu. For Linux, UNIX, and Mac users the
executable is named "cmake-gui" and can be found where CMake was
installed.
+
Specify the source and build directories.
- ***** It is recommemded that you choose a build directory ******
- ***** different then the source directory ******
- (for example on Windows, if the source is at c:\MyHDFstuff\hdf5, then
- use c:\MyHDFstuff\hdf5\build or c:\MyHDFstuff\build\hdf5).
+
+ ***** Make the build and source directories different. ******
+
+ For example on Windows, if the source is at c:\MyHDFstuff\hdf5,
+ then use c:\MyHDFstuff\hdf5\build or c:\MyHDFstuff\build\hdf5 as the
+ build directory.
OPTIONAL:
- Users can perform the configuration step without using the visual cmake-gui
- program. We use the file cacheinit.cmake in the config/cmake folder for
- our testing. This file enables all the basic options and we turn specific
- options on or off for testing using the following command line within the build
- directory:
+ Users can perform the configuration step without using the visual
+ cmake-gui program. We use the file cacheinit.cmake in the
+ config/cmake folder for our testing. This file enables all the
+ basic options and we turn specific options on or off for testing
+ using the following command line within the build directory:
cmake -C <sourcepath>/config/cmake/cacheinit.cmake -G "<generator>" [-D<options>] <sourcepath>
@@ -101,6 +197,8 @@ Notes: This short instruction is written for users who want to quickly build
* MinGW Makefiles
* NMake Makefiles
* Unix Makefiles
+ * Visual Studio 11
+ * Visual Studio 11 Win64
* Visual Studio 10
* Visual Studio 10 Win64
* Visual Studio 6
@@ -119,7 +217,7 @@ Notes: This short instruction is written for users who want to quickly build
* <HDF5OPTION>:BOOL=[ON | OFF]
<cacheinit.cmake> is:
- # This is the CMakeCache file.
+ # This is the CMakeCache file used by HDF Group for daily tests.
########################
# EXTERNAL cache entries
########################
@@ -167,7 +265,7 @@ Notes: This short instruction is written for users who want to quickly build
generator you wish to use (for example on Windows, Visual Studio 9 2008).
CMake will read in the CMakeLists.txt files from the source directory and
display options for the HDF5 project. After the first configure you
- can adjust the cache settings and/or specify locations of other programs.
+ can adjust the cache settings and/or specify the locations of other programs.
Any conflicts or new values will be highlighted by the configure
process in red. Once you are happy with all the settings and there are no
@@ -185,8 +283,8 @@ Notes: This short instruction is written for users who want to quickly build
cmake -C ../config/cmake/cacheinit.cmake -G "Visual Studio 9 2008" \
-DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF ..
- 2.3 On Windows, if you are using a VS Express version you must be sure that the
- following two options are correctly set/unset:
+ 2.3 On Windows, if you are using a Visual Studio Express version you must
+ be sure that the following two options are correctly set/unset:
HDF5_NO_PACKAGES:BOOL=ON
HDF5_USE_FOLDERS:BOOL=OFF
@@ -197,7 +295,7 @@ Notes: This short instruction is written for users who want to quickly build
or the command line. The command line is used on linux, Unix, and Mac.
To build from the command line, navigate to your build directory and
- execute the following;
+ execute the following:
cmake --build . --config {Debug | Release}
@@ -209,33 +307,34 @@ Notes: This short instruction is written for users who want to quickly build
Release and build the solution.
3.2.1 The external libraries (zlib and szip) can be configured
- to allow building the libraries by downloading from a SVN repository.
+ to allow building the libraries by downloading from an SVN repository.
The option is 'HDF5_ALLOW_EXTERNAL_SUPPORT'; by adding the following
configuration option:
-DHDF5_ALLOW_EXTERNAL_SUPPORT:STRING="SVN"
- The options to control the SVN URL (config/cmake/cacheinit.cmake file):
+ The options to control the SVN URL (config/cmake/cacheinit.cmake file) are:
ZLIB_SVN_URL:STRING="http://svn.hdfgroup.uiuc.edu/zlib/trunk"
SZIP_SVN_URL:STRING="http://svn.hdfgroup.uiuc.edu/szip/trunk"
These should be changed to your location.
3.2.2 Or the external libraries (zlib and szip) can be configured
to allow building the libraries by using a compressed file.
- The option is 'HDF5_ALLOW_EXTERNAL_SUPPORT'; by adding the following
- configuration option:
+ The option is 'HDF5_ALLOW_EXTERNAL_SUPPORT' and is enabled by
+ adding the following configuration option:
-DHDF5_ALLOW_EXTERNAL_SUPPORT:STRING="TGZ"
- The options to control the SVN URL (config/cmake/cacheinit.cmake file):
+ The options to control the SVN URL (config/cmake/cacheinit.cmake
+ file) are:
ZLIB_TGZ_NAME:STRING="zlib_src.ext"
SZIP_TGZ_NAME:STRING="szip_src.ext"
TGZ_PATH:STRING="some_location"
- where "some_location/xxxx_src.ext" is the URL or full path to the
- compressed file and where ext is the type of the compression file like:
- .bz2, .tar, .tar.gz, .tgz, .zip
+ where "some_location/xxxx_src.ext" is the URL or full path to
+ the compressed file and where ext is the type of the compression
+ file such as .bz2, .tar, .tar.gz, .tgz, or .zip.
- 4. Test HDF5.
+ 4. Test HDF5
- To test the build, navigate to your build directory and execute;
+ To test the build, navigate to your build directory and execute:
ctest . -C {Debug | Release}
@@ -243,7 +342,7 @@ Notes: This short instruction is written for users who want to quickly build
recommend choosing either Debug or Release to match the build
step on Windows.
- 5. Packaging HDF5
+ 5. Packaging HDF5 (create an install image)
To package the build into a simple installer using the NullSoft installer NSIS
on Windows, or into compressed files (.tar.gz, .sh, .zip), use the CPack tool.
@@ -252,17 +351,17 @@ Notes: This short instruction is written for users who want to quickly build
cpack -C {Debug | Release} CPackConfig.cmake
- NOTE: We have just introduced the packaging capability and it has not been
- extensively tested. Please send us comments on how it can be improved.
- See NSIS note 8 of this document.
- Also, if you are using a VS Express version or do not want to enable
- the packaging components, set HDF5_NO_PACKAGES to ON (on the command
- line add -DHDF5_NO_PACKAGES:BOOL=ON)
+ NOTE: See note 8 of this document for NSIS information.
+ Also, if you are using a Visual Studio Express version or do not
+ want to enable the packaging components, set HDF5_NO_PACKAGES
+ to ON (on the command line add -DHDF5_NO_PACKAGES:BOOL=ON)
6. The files that support building HDF5 with CMake are all the files in the
config/cmake folder, the CMakeLists.txt files in each source folder, and
CTestConfig.cmake. CTestConfig.cmake is specific to the internal testing
- performed by The HDF Group. It should be altered for the users
+ performed by The HDF Group. It should be altered for the user's
+ installation and needs. The cacheinit.cmake file settings are used by
+ The HDF Group for daily testing. It should be altered/ignored for the user's
installation and needs.
7. More information about using CMake can be found at the KitWare site,
@@ -276,90 +375,104 @@ Notes: This short instruction is written for users who want to quickly build
"output directory", which is where the program will be installed. These
options are case-sensitive, so be sure to type them in upper case.
-
+
+
========================================================================
- All options for HDF5 C/C++ Libraries with CMake
- Option Name --- Option Description --- Option Default
+IV. All Options for HDF5 C/C++ Libraries with CMake
========================================================================
+
+In the options listed below, there are three columns of information:
+Option Name, Option Description, and Option Default.
+
---------------- General Build Options ---------------------
-BUILD_SHARED_LIBS "Build Shared Libraries" OFF
+BUILD_SHARED_LIBS "Build Shared Libraries" OFF
BUILD_STATIC_EXECS "Build Static Executabless" OFF
-BUILD_STATIC_PIC "Build Static PIC" OFF
-BUILD_TESTING "Build HDF5 Unit Testing" OFF
+BUILD_STATIC_PIC "Build Static PIC" OFF
+BUILD_TESTING "Build HDF5 Unit Testing" OFF
---------------- HDF5 Build Options ---------------------
-HDF5_BUILD_CPP_LIB "Build HDF5 C++ Library" OFF
-HDF5_BUILD_EXAMPLES "Build HDF5 Library Examples" OFF
-HDF5_BUILD_FORTRAN "Build FORTRAN support" OFF
-HDF5_BUILD_HL_LIB "Build HIGH Level HDF5 Library" OFF
-HDF5_BUILD_TOOLS "Build HDF5 Tools" OFF
+HDF5_BUILD_CPP_LIB "Build HDF5 C++ Library" OFF
+HDF5_BUILD_EXAMPLES "Build HDF5 Library Examples" OFF
+HDF5_BUILD_FORTRAN "Build FORTRAN support" OFF
+HDF5_BUILD_HL_LIB "Build HIGH Level HDF5 Library" OFF
+HDF5_BUILD_TOOLS "Build HDF5 Tools" OFF
IF (HDF5_BUILD_FORTRAN)
HDF5_ENABLE_F2003 "Enable FORTRAN 2003 Standard" OFF
---------------- HDF5 Advanced Options ---------------------
-HDF5_DISABLE_COMPILER_WARNINGS "Disable compiler warnings" OFF
-HDF5_Enable_Clear_File_Buffers "Securely clear file buffers before writing to file" ON
-HDF5_Enable_Instrument "Instrument The library" HDF5_Enable_Instrument
-HDF5_ENABLE_CODESTACK "Enable the function stack tracing (for developer debugging)." OFF
-HDF5_ENABLE_COVERAGE "Enable code coverage for Libraries and Programs" OFF
-HDF5_ENABLE_DEPRECATED_SYMBOLS "Enable deprecated public API symbols" ON
-HDF5_ENABLE_EMBEDDED_LIBINFO "embed library info into executables" ON
-HDF5_ENABLE_GPFS "Enable GPFS hints for the MPI/POSIX file driver" OFF
-HDF5_ENABLE_HSIZET "Enable datasets larger than memory" ON
-HDF5_ENABLE_LARGE_FILE "Enable support for large (64-bit) files on Linux." ON
-HDF5_ENABLE_PARALLEL "Enable parallel build (requires MPI)" OFF
-HDF5_ENABLE_TRACE "Enable API tracing capability" OFF
-HDF5_ENABLE_USING_MEMCHECKER "Indicate that a memory checker is used" OFF
-HDF5_METADATA_TRACE_FILE "Enable metadata trace file collection" OFF
-HDF5_NO_PACKAGES "Do not include CPack Packaging" OFF
-HDF5_PACKAGE_EXTLIBS "CPACK - include external libraries" OFF
-HDF5_STRICT_FORMAT_CHECKS "Whether to perform strict file format checks" OFF
-HDF5_TEST_VFD "Execute tests with different VFDs" OFF
-HDF5_USE_16_API_DEFAULT "Use the HDF5 1.6.x API by default" OFF
-HDF5_USE_18_API_DEFAULT "Use the HDF5 1.8.x API by default" OFF
-HDF5_USE_FOLDERS "Enable folder grouping of projects in IDEs." OFF
-HDF5_WANT_DATA_ACCURACY "IF data accuracy is guaranteed during data conversions" ON
-HDF5_WANT_DCONV_EXCEPTION "exception handling functions is checked during data conversions" ON
+HDF5_DISABLE_COMPILER_WARNINGS "Disable compiler warnings" OFF
+HDF5_Enable_Clear_File_Buffers "Securely clear file buffers before writing to file" ON
+HDF5_Enable_Instrument "Instrument The library" OFF
+HDF5_ENABLE_CODESTACK "Enable the function stack tracing (for developer debugging)." OFF
+HDF5_ENABLE_COVERAGE "Enable code coverage for Libraries and Programs" OFF
+HDF5_ENABLE_DEPRECATED_SYMBOLS "Enable deprecated public API symbols" ON
+HDF5_ENABLE_EMBEDDED_LIBINFO "embed library info into executables" ON
+HDF5_ENABLE_GPFS "Enable GPFS hints for the MPI/POSIX file driver" OFF
+HDF5_ENABLE_HSIZET "Enable datasets larger than memory" ON
+HDF5_ENABLE_LARGE_FILE "Enable support for large (64-bit) files on Linux." ON
+HDF5_ENABLE_PARALLEL "Enable parallel build (requires MPI)" OFF
+HDF5_ENABLE_TRACE "Enable API tracing capability" OFF
+HDF5_ENABLE_USING_MEMCHECKER "Indicate that a memory checker is used" OFF
+HDF5_METADATA_TRACE_FILE "Enable metadata trace file collection" OFF
+HDF5_NO_PACKAGES "Do not include CPack Packaging" OFF
+HDF5_PACKAGE_EXTLIBS "CPACK - include external libraries" OFF
+HDF5_STRICT_FORMAT_CHECKS "Whether to perform strict file format checks" OFF
+HDF5_TEST_VFD "Execute tests with different VFDs" OFF
+HDF5_USE_16_API_DEFAULT "Use the HDF5 1.6.x API by default" OFF
+HDF5_USE_18_API_DEFAULT "Use the HDF5 1.8.x API by default" OFF
+HDF5_USE_FOLDERS "Enable folder grouping of projects in IDEs." OFF
+HDF5_WANT_DATA_ACCURACY "IF data accuracy is guaranteed during data conversions" ON
+HDF5_WANT_DCONV_EXCEPTION "exception handling functions is checked during data conversions" ON
+HDF5_ENABLE_THREADSAFE "Enable Threadsafety" OFF
IF (APPLE)
- HDF5_BUILD_WITH_INSTALL_NAME "Build with library install_name set to the installation path" OFF
+ HDF5_BUILD_WITH_INSTALL_NAME "Build with library install_name set to the installation path" OFF
IF (CMAKE_BUILD_TYPE MATCHES Debug)
- HDF5_ENABLE_TRACE "Enable API tracing capability" ON
+ HDF5_ENABLE_TRACE "Enable API tracing capability" ON
IF (HDF5_TEST_VFD)
- HDF5_TEST_FHEAP_VFD "Execute fheap test with different VFDs" ON
+ HDF5_TEST_FHEAP_VFD "Execute fheap test with different VFDs" ON
IF (WIN32 AND NOT CYGWIN)
- HDF_LEGACY_NAMING "Use Legacy Names for Libraries and Programs" OFF
-HDF5_ENABLE_THREADSAFE "Enable Threadsafety" OFF
+ HDF_LEGACY_NAMING "Use Legacy Names for Libraries and Programs" OFF
---------------- External Library Options ---------------------
-HDF5_ALLOW_EXTERNAL_SUPPORT "Allow External Library Building" "NO"
-HDF5_ENABLE_SZIP_SUPPORT "Use SZip Filter" OFF
-HDF5_ENABLE_Z_LIB_SUPPORT "Enable Zlib Filters" OFF
-ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" 0
-SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 0
-HDF5_USE_FILTER_FLETCHER32 "Use the FLETCHER32 Filter" ON
-HDF5_USE_FILTER_NBIT "Use the NBIT Filter" ON
-HDF5_USE_FILTER_SCALEOFFSET "Use the SCALEOFFSET Filter" ON
-HDF5_USE_FILTER_SHUFFLE "Use the SHUFFLE Filter" ON
+HDF5_ALLOW_EXTERNAL_SUPPORT "Allow External Library Building" "NO"
+HDF5_ENABLE_SZIP_SUPPORT "Use SZip Filter" OFF
+HDF5_ENABLE_Z_LIB_SUPPORT "Enable Zlib Filters" OFF
+ZLIB_USE_EXTERNAL "Use External Library Building for ZLIB" 0
+SZIP_USE_EXTERNAL "Use External Library Building for SZIP" 0
+HDF5_USE_FILTER_FLETCHER32 "Use the FLETCHER32 Filter" ON
+HDF5_USE_FILTER_NBIT "Use the NBIT Filter" ON
+HDF5_USE_FILTER_SCALEOFFSET "Use the SCALEOFFSET Filter" ON
+HDF5_USE_FILTER_SHUFFLE "Use the SHUFFLE Filter" ON
IF (HDF5_ENABLE_SZIP_SUPPORT)
- HDF5_ENABLE_SZIP_ENCODING "Use SZip Encoding" OFF
+ HDF5_ENABLE_SZIP_ENCODING "Use SZip Encoding" OFF
-************************************************************************
- APPENDIX
+
+========================================================================
+V. APPENDIX
+========================================================================
Below are examples of the ctest scripts used by The HDF Group.
See the script, cmakehdf5, in the bin folder for a smaller and limited
function shell script version of the following general scripts.
The example is for a linux machine, but the same scripts can be used on
-a windows machine by adjusting the CTEST_CMAKE_GENERATOR option in the
+a Windows machine by adjusting the CTEST_CMAKE_GENERATOR option in the
product specific script.
-
-************************************************************************
-CTestScript.cmake: common ctest script used to build, test and package
+CTestScript.cmake
+
+
+
========================================================================
+CTestScript.cmake
+========================================================================
+
+The CTestScript.cmake script, shown below, is a common ctest script that
+is used to build, test, and package HDF5 Library files.
+
-cmake_minimum_required(VERSION 2.8.6 FATAL_ERROR)
+
+cmake_minimum_required(VERSION 2.8.10 FATAL_ERROR)
########################################################
# This dashboard is maintained by The HDF Group
# For any comments please contact cdashhelp@hdfgroup.org
@@ -569,108 +682,86 @@ endforeach(v)
message("Dashboard script configuration:\n${vars}\n")
#-----------------------------------------------------------------------------
-if(${MODEL} STREQUAL "Continuous")
- ## Continuous mode is used for commit test processing
- ## --------------------------
- while (${CTEST_ELAPSED_TIME} LESS 36000)
- set(START_TIME ${CTEST_ELAPSED_TIME})
- CTEST_START (Continuous)
- CTEST_UPDATE (SOURCE "${CTEST_SOURCE_DIRECTORY}" RETURN_VALUE res)
- set (CTEST_CHECKOUT_COMMAND) # checkout on first iteration only
- message("Dashboard updated files: ${res}\n")
- if(res GREATER 0)
- CTEST_CONFIGURE (BUILD "${CTEST_BINARY_DIRECTORY}")
- CTEST_READ_CUSTOM_FILES ("${CTEST_BINARY_DIRECTORY}")
- CTEST_SUBMIT (PARTS Update Configure Notes)
- CTEST_BUILD (BUILD "${CTEST_BINARY_DIRECTORY}" APPEND)
- CTEST_SUBMIT (PARTS Build)
- CTEST_TEST (BUILD "${CTEST_BINARY_DIRECTORY}" APPEND ${ctest_test_args})
- CTEST_SUBMIT (PARTS Test)
-
- execute_process (COMMAND cpack -C ${CTEST_BUILD_CONFIGURATION}
- WORKING_DIRECTORY ${CTEST_BINARY_DIRECTORY}
- RESULT_VARIABLE cpackResult
- OUTPUT_VARIABLE cpackLog
- ERROR_VARIABLE cpackLog.err
- )
- file (WRITE ${CTEST_BINARY_DIRECTORY}/cpack.log "${cpackLog.err}" "${cpackLog}")
- endif(${res} GREATER 0)
-
- CTEST_SLEEP(${START_TIME} 300 ${CTEST_ELAPSED_TIME})
- endwhile (${CTEST_ELAPSED_TIME} LESS 36000)
-#-----------------------------------------------------------------------------
-else(${MODEL} STREQUAL "Continuous")
-#-----------------------------------------------------------------------------
- ## NORMAL process
- ## -- LOCAL_SKIP_UPDATE skips updating the source folder from svn
- ## -- LOCAL_NO_SUBMIT skips reporting to CDash server
- ## -- LOCAL_SKIP_TEST skips the test process (only builds)
- ## -- LOCAL_MEMCHECK_TEST executes the Valgrind testing
- ## -- LOCAL_COVERAGE_TEST executes code coverage process
- ## --------------------------
- CTEST_START (${MODEL} TRACK ${MODEL})
- if(NOT LOCAL_SKIP_UPDATE)
- CTEST_UPDATE (SOURCE "${CTEST_SOURCE_DIRECTORY}")
- endif(NOT LOCAL_SKIP_UPDATE)
- CTEST_CONFIGURE (BUILD "${CTEST_BINARY_DIRECTORY}")
- CTEST_READ_CUSTOM_FILES ("${CTEST_BINARY_DIRECTORY}")
- if(NOT LOCAL_NO_SUBMIT)
- CTEST_SUBMIT (PARTS Update Configure Notes)
- endif(NOT LOCAL_NO_SUBMIT)
- CTEST_BUILD (BUILD "${CTEST_BINARY_DIRECTORY}" APPEND)
- if(NOT LOCAL_NO_SUBMIT)
- CTEST_SUBMIT (PARTS Build)
- endif(NOT LOCAL_NO_SUBMIT)
- if(NOT LOCAL_SKIP_TEST)
- if(NOT LOCAL_MEMCHECK_TEST)
- CTEST_TEST (BUILD "${CTEST_BINARY_DIRECTORY}" APPEND ${ctest_test_args})
- if(NOT LOCAL_NO_SUBMIT)
- CTEST_SUBMIT (PARTS Test)
- endif(NOT LOCAL_NO_SUBMIT)
- else(NOT LOCAL_MEMCHECK_TEST)
- CTEST_MEMCHECK (BUILD "${CTEST_BINARY_DIRECTORY}" APPEND ${ctest_test_args})
- if(NOT LOCAL_NO_SUBMIT)
- CTEST_SUBMIT (PARTS MemCheck)
- endif(NOT LOCAL_NO_SUBMIT)
- endif(NOT LOCAL_MEMCHECK_TEST)
- if(LOCAL_COVERAGE_TEST)
- CTEST_COVERAGE (BUILD "${CTEST_BINARY_DIRECTORY}" APPEND)
- if(NOT LOCAL_NO_SUBMIT)
- CTEST_SUBMIT (PARTS Coverage)
- endif(NOT LOCAL_NO_SUBMIT)
- endif(LOCAL_COVERAGE_TEST)
- endif(NOT LOCAL_SKIP_TEST)
+## NORMAL process
+## -- LOCAL_SKIP_UPDATE skips updating the source folder from svn
+## -- LOCAL_NO_SUBMIT skips reporting to CDash server
+## -- LOCAL_SKIP_TEST skips the test process (only builds)
+## -- LOCAL_MEMCHECK_TEST executes the Valgrind testing
+## -- LOCAL_COVERAGE_TEST executes code coverage process
+## --------------------------
+CTEST_START (${MODEL} TRACK ${MODEL})
+if(NOT LOCAL_SKIP_UPDATE)
+ CTEST_UPDATE (SOURCE "${CTEST_SOURCE_DIRECTORY}")
+endif(NOT LOCAL_SKIP_UPDATE)
+CTEST_CONFIGURE (BUILD "${CTEST_BINARY_DIRECTORY}")
+CTEST_READ_CUSTOM_FILES ("${CTEST_BINARY_DIRECTORY}")
+if(NOT LOCAL_NO_SUBMIT)
+ CTEST_SUBMIT (PARTS Update Configure Notes)
+endif(NOT LOCAL_NO_SUBMIT)
+ CTEST_BUILD (BUILD "${CTEST_BINARY_DIRECTORY}" APPEND)
+if(NOT LOCAL_NO_SUBMIT)
+ CTEST_SUBMIT (PARTS Build)
+endif(NOT LOCAL_NO_SUBMIT)
+if(NOT LOCAL_SKIP_TEST)
if(NOT LOCAL_MEMCHECK_TEST)
- ##-----------------------------------------------
- ## Package the product
- ##-----------------------------------------------
- execute_process(COMMAND cpack -C ${CTEST_BUILD_CONFIGURATION} -V
- WORKING_DIRECTORY ${CTEST_BINARY_DIRECTORY}
- RESULT_VARIABLE cpackResult
- OUTPUT_VARIABLE cpackLog
- ERROR_VARIABLE cpackLog.err
- )
- file(WRITE ${CTEST_BINARY_DIRECTORY}/cpack.log "${cpackLog.err}" "${cpackLog}")
+ CTEST_TEST (BUILD "${CTEST_BINARY_DIRECTORY}" APPEND ${ctest_test_args})
+ if(NOT LOCAL_NO_SUBMIT)
+ CTEST_SUBMIT (PARTS Test)
+ endif(NOT LOCAL_NO_SUBMIT)
+ else(NOT LOCAL_MEMCHECK_TEST)
+ CTEST_MEMCHECK (BUILD "${CTEST_BINARY_DIRECTORY}" APPEND ${ctest_test_args})
+ if(NOT LOCAL_NO_SUBMIT)
+ CTEST_SUBMIT (PARTS MemCheck)
+ endif(NOT LOCAL_NO_SUBMIT)
endif(NOT LOCAL_MEMCHECK_TEST)
-endif(${MODEL} STREQUAL "Continuous")
+ if(LOCAL_COVERAGE_TEST)
+ CTEST_COVERAGE (BUILD "${CTEST_BINARY_DIRECTORY}" APPEND)
+ if(NOT LOCAL_NO_SUBMIT)
+ CTEST_SUBMIT (PARTS Coverage)
+ endif(NOT LOCAL_NO_SUBMIT)
+ endif(LOCAL_COVERAGE_TEST)
+endif(NOT LOCAL_SKIP_TEST)
+if(NOT LOCAL_MEMCHECK_TEST)
+ ##-----------------------------------------------
+ ## Package the product
+ ##-----------------------------------------------
+ execute_process(COMMAND cpack -C ${CTEST_BUILD_CONFIGURATION} -V
+ WORKING_DIRECTORY ${CTEST_BINARY_DIRECTORY}
+ RESULT_VARIABLE cpackResult
+ OUTPUT_VARIABLE cpackLog
+ ERROR_VARIABLE cpackLog.err
+ )
+ file(WRITE ${CTEST_BINARY_DIRECTORY}/cpack.log "${cpackLog.err}" "${cpackLog}")
+endif(NOT LOCAL_MEMCHECK_TEST)
#-----------------------------------------------------------------------------
-
message("DONE:CTestScript")
-************************************************************************
-************************************************************************
-Product specific script, HDF518Static.cmake, that uses the
-CTestScript.cmake file (see above). Usage:
-"ctest -S HDF518Static.cmake,hdf518 -C Release -O hdf518static.log"
-where hdf518 is the source folder relative to the location of these scripts
+
+
+========================================================================
+ctest
========================================================================
-cmake_minimum_required(VERSION 2.8.6 FATAL_ERROR)
+Below is an example of the ctest script used by The HDF Group. The
+CTestScript.cmake file used by this script is shown above. Adjust the values
+as necessary. Note that the source folder is entered on the command line
+and the build folder is created as a sub-folder.
+
+
+
+############################################################################
+# Product specific script, HDF518Static.cmake, that uses the
+# CTestScript.cmake file (see above). Usage:
+# "ctest -S HDF518Static.cmake,hdf518 -C Release -O hdf518static.log"
+# where hdf518 is the source folder relative to the location of these scripts
+############################################################################
+
+cmake_minimum_required(VERSION 2.8.10 FATAL_ERROR)
# CTEST_SCRIPT_ARG is the source folder passed on the command line
+set(CTEST_DASHBOARD_ROOT ${CTEST_SCRIPT_DIRECTORY})
set(CTEST_SOURCE_NAME ${CTEST_SCRIPT_ARG})
set(CTEST_BINARY_NAME ${CTEST_SOURCE_NAME}/buildstatic)
-set(CTEST_DASHBOARD_ROOT ${CTEST_SCRIPT_DIRECTORY})
set(CTEST_SOURCE_DIRECTORY "${CTEST_DASHBOARD_ROOT}/${CTEST_SOURCE_NAME}")
set(CTEST_BINARY_DIRECTORY "${CTEST_DASHBOARD_ROOT}/${CTEST_BINARY_NAME}")
set(CTEST_BUILD_CONFIGURATION "Release")
@@ -703,7 +794,11 @@ include(${CTEST_SCRIPT_DIRECTORY}/CTestScript.cmake)
message("DONE")
-************************************************************************
-Need further assistance, send email to help@hdfgroup.org
+
+========================================================================
+For further assistance, send email to help@hdfgroup.org
+========================================================================
+
+
diff --git a/release_docs/INSTALL_MinGW.txt b/release_docs/INSTALL_MinGW.txt
deleted file mode 100644
index 245e3ff..0000000
--- a/release_docs/INSTALL_MinGW.txt
+++ /dev/null
@@ -1,269 +0,0 @@
-************************************************************************
- HDF5 Build and Install Instructions for MinGW
-************************************************************************
-
-NOTE:
-We are no longer actively supporting MinGW as of 1.8.5.
------- 1.8.9 notes ------
-Autotools configure failed to correctly generate the *config.h files.
-CMake 2.8.6 can configure and build the library, however fortran programs did
- not execute correctly. Some tests may fail. Used the "MSYS Makefiles"
- generator for the "-G" parameter. Follow the CMake.txt document.
-
-Below are the old instructions from the 1.8.4 release.
-
-************************************************************************
-************************************************************************
-************************************************************************
-
-Preconditions:
---------------
-
-1. Installed MinGW (5.1.6 or higher) and MSYS (1.0.11 or higher)
-
- To install the MinGW net release, go to http://www.mingw.org and
- follow the instructions for a manual installation.
-
-2. Compilers Installed
-
- 2.1 C/C++ Compilers HDF5-1.8.4 Supported
-
- gcc-4.4.0 is included in MinGW, which includes:
- gcc : GNU C compiler
- gcc-g++: GNU C++ compiler
- gfortran: GNU Fortran compiler
-
- 2.2 Using Compilers Not Supported
-
- The compilers in 2.1 are supported and tested by The HDF
- Group. Any other compilers may still work but they are not
- guaranteed by HDF group.
-
- If users want to use other compilers except those in 2.1,
- try to set the following variables to override the default
- choices.
-
- CC : C compiler command
- CXX : C++ compiler command
- FC : Fortran compiler command
-
-3. HDF5 Dependencies
-
- 3.1 Zlib
-
- zlib-1.2.2 or later is supported and tested on MinGW.
-
- 3.2 Szip
- The HDF5 library has a predefined compression filter that uses
- the extended-Rice lossless compression algorithm for chunked
- datatsets. For more information about Szip compression and
- license terms see
- http://hdfgroup.org/HDF5/doc_resource/SZIP/index.html.
-
- Szip is currently not supported on MinGW, although we plan to add
- support in the future.
-
-
-Build HDF5 on MinGW
-----------------------
-
-1. Get HDF5 source code package
- Users can download HDF5 source code package from HDF website
- (http://hdfgroup.org).
-
-2. Unpacking the distribution
-
- The HDF5 source code is distributed in a variety of formats which
- can be unpacked with the following commands, each of which creates
- an `hdf5-1.8.4' directory.
-
- 2.1 Non-compressed tar archive (*.tar)
-
- $ tar xf hdf5-1.8.4.tar
-
- 2.2 Gzip'd tar archive (*.tar.gz)
-
- $ gunzip < hdf5-1.8.4.tar.gz | tar xf -
-
- 2.3 Bzip'd tar archive (*.tar.bz2)
-
- $ bunzip2 < hdf5-1.8.4.tar.bz2 | tar xf -
-
-3. Setup Environment
-
- Building HDF5 1.8.4 requires an explicit link to libws2_32.a
- to handle Windows Sockets. To do this, issue the command:
-
- $ export LIBS=-lws2_32
-
- Also, the default search path can cause trouble using ./configure in HDF5
- 1.8.4. Check that non-MinGW or non-msys directories are not added to the
- PATH. You can do this by:
-
- $ echo $PATH
-
- If there are spurious entries, specifically those related to other Windows
- compilers or tools, remove them by setting a new PATH without them. For
- example,
-
- $ export PATH=.:/usr/local/bin:/mingw/bin:/bin
-
-
-4. Remove Unsupported Source
-
- There are some projects which are built by default to test performance on
- POSIX systems. They are irrelevent on MinGW, and can cause compiler errors.
-
- To remove these projects from the build script, open ./perform/Makefile.in
- Find all instances of "h5perf_serial", and remove them (along with their
- respective extension or targets, if they exist). Then save the file.
-
-
-5. Remove Tests
-
- When building with MinGW, many tests must be removed from the
- test suite run with "make check". This is because of the way
- MinGW and Windows handles certain parsing. For example, MinGW
- treats any command parameter starting with '/' as a path, and
- replaces it with it's root directory on Windows, such as
- 'C:\msys\1.0\'.
-
- To remove the tests, open the given 'Makefile.in' and edit the
- line begining with "TEST_SCRIPT = " to remove the test script.
- For example, to remove the "testerror.sh" from ./test/Makefile.in:
-
- 1) Open ./test/Makefile.in
-
- 2) Find the line "TEST_SCRIPT = $(top_srcdir)/test/testerror.sh"
-
- 3) Change it to simply read "TEST_SCRIPT =", and save.
-
- Do this for the following Makefiles and tests:
-
- - ./test/Makefile.in: "testerror.sh testlibinfo.sh testcheckinfo.sh"
-
- - ./tools/h5diff/Makefile.in: "testh5diff.sh"
-
- - ./tools/h5ls/Makefile.in: "testh5ls.sh"
-
- - ./tools/misc/Makefile.in: "testh5mkgrp.sh"
-
- - ./tools/h5copy/Makefile.in: "testh5copy.sh"
-
- - ./tools/h5stat/Makefile.in: "testh5stat.sh"
-
- - ./tools/h5dump/Makefile.in: "testh5dump.sh" and "testh5dumpxml.sh"
-
-
-6. Configuring
-
- Notes:
- 1) Note: MinGW is c++ package is missing the libstdc++.dll.a file
- and c++ linking fails. Do not enable c++ option in configure.
-
- 2) See detailed information in hdf5/release_docs/INSTALL,
- part 5. Full installation instructions for source
- distributions
-
- In short,
-
- To configure HDF5 with C Library, use
-
- $ ./configure
-
- If you would like to build the C++ library, add the parameter:
-
- --enable-cxx (12-11-2009 MinGW C++ package is missing a file)
-
- If you would like to build without the Zlib library, add the parameter:
-
- --without-zlib
-
- If you would like to specify the the Zlib library, there are two ways:
-
- Using
-
- --with-zlib=INCDIR,LIBDIR
-
- For example, if the zlib library is installed in
- /usr, which is the parent directory of directories
- "include" and "lib",
-
- --with-zlib=/usr/include,/usr/lib
-
- Through the CPPFLAGS and LDFLAGS Variables
-
- For example, if zlib was installed in the directory
- /c/usr then using the following command to configure
- HDF5 with zib
-
- $ export CPPFLAGS=-I/usr/include
- $ export LDFLAGS=-L/usr/lib
-
- If you would like to specify the install directory, add the parameter:
-
- --prefix="path for installation"
-
- By default, HDF5 library, header files, examples, and
- support programs will be installed in /usr/local/lib,
- /usr/local/include, /usr/local/doc/hdf5/examples, and
- /usr/local/bin. To use a path other than /usr/local specify
- the path with the `--prefix=PATH' switch as in the above
- command.
-
- Combination of Switches
-
- All of the above switches can be combined together. For
- example, if users want to configure HDF5 C/Fortran
- library, with zlib library at /c/usr/, and
- install HDF5 into directory /c/hdf5 using
- gcc/gfortran as C/Fortran compiler:
-
- $ ./configure
- --with-zlib=/usr/include,/usr/lib
- --prefix=/c/hdf5
- --enable-fortran
- <"If no more switches, then hit Enter">
-
- Notes: The command format above is for readilibity. In practice,
- please type in the command above with at least one
- space between each line, No "Enter" until users finish
- the switches and want to run the configure.
-
-
- or do it through CPPFLAGS and LDFLAGS variables:
-
- $ CPPFLAGS=-I/usr/include \
- $ LDFLAGS=-L/usr/lib \
-
- $ ./configure
- --prefix=/c/hdf5
- --enable-fortran
- <"If no more switches, then hit Enter">
-
-7. Make and Make Check
-
- After configuration is done successfully, run the following series of
- commands to build, test and install HDF5
-
- $ make > "output file name"
- $ make check > "output file name"
-
- Before run "make install", check output file for "make check", there
- should be no failures at all.
-
-8. Make Install
-
- $ make install > "output file name"
-
-
-9. Check installed HDF5 library
-
- After step 8, go to your installation directory, there should be
- three subdirectories: "bin" "include" and "lib".
-
- $ make installcheck > "output file name"
-
------------------------------------------------------------------------
-
-Need Further assistance, email help@hdfgroup.org
diff --git a/release_docs/INSTALL_Windows.txt b/release_docs/INSTALL_Windows.txt
index 4344ea4..7873709 100644
--- a/release_docs/INSTALL_Windows.txt
+++ b/release_docs/INSTALL_Windows.txt
@@ -6,8 +6,11 @@
We now recommend that users build, test and install HDF5 using CMake.
-Instructions for building and testing HDF5 using CMake can be found in
-the CMake.txt file found in this folder.
+Instructions for building and testing HDF5 using CMake can be found in the
+INSTALL_CMake.txt file found in this folder.
-The old INSTALL_Windows documentation can be found in the
-obsolete_windows_docs\ folder located with this document.
+For instructions of building and testing an application with HDF5, see
+USING_HDF5_CMake.txt file found in this folder.
+
+Users who want to build and run an application with HDF5 in Visual Studio
+without using CMake should consult the USING_HDF5_VS.txt file.
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index 08420d0..fb57c81 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -1,4 +1,4 @@
-HDF5 version 1.9.149-FA_a5 currently under development
+HDF5 version 1.9.151-FA_a5 currently under development
INTRODUCTION
@@ -417,6 +417,12 @@ Bug Fixes since HDF5-1.8.0 release
Library
-------
+ - Fixed an error involving failure to write fill values to the user's
+ buffer when reading unallocated chunks from datasets that have a
+ fill value set to H5D_FILL_VALUE_DEFAULT. A consequence of this
+ was the reporting of spurious data values in h5dump and h5diff
+ output.
+ (HDFFV-8247; JP - 2013/05/03)
- Fixed an error that could occur when calling H5Ocopy within an
H5Literate callback (and possibly other situations).
(NAF - 2012/7/25 - HDFFV-5853)
@@ -1297,14 +1303,6 @@ The following platforms are not supported but have been tested for this release.
Known Problems
==============
-* The h5dump and h5diff utilities occasionally produce different output
- between Linux and Windows systems. This is caused by lower-level library
- routines that fail to write fill values to the user's buffer when reading
- unallocated chunks from datasets that have a fill value set to
- H5D_FILL_VALUE_DEFAULT. Due to platform differences the return of
- spurious data values have only been encountered on Windows 32-bit systems.
- (Issue HDFFV-8247; JP - 2013/03/27)
-
* The 5.9 C++ compiler on Sun failed to compile a C++ test ttypes.cpp. It
complains with this message:
"/home/hdf5/src/H5Vprivate.h", line 130: Error: __func__ is not defined.
diff --git a/release_docs/USING_CMake.txt b/release_docs/USING_HDF5_CMake.txt
index 0e980ea..2b3c02b 100644
--- a/release_docs/USING_CMake.txt
+++ b/release_docs/USING_HDF5_CMake.txt
@@ -1,79 +1,96 @@
************************************************************************
-* Build and Install HDF5 Applications with CMake *
+* Build and Install HDF5 Applications with CMake *
************************************************************************
-Notes: This short instruction is written for users who want to quickly build
- HDF5 Applications from the HDF5 Examples package using the CMake tools.
- Users can adapt these instructions for their own applications, see the
- "Minimum Project Files" section.
+Notes: This short instruction is written for users who want to quickly
+ build HDF5 applications using the CMake tools. Users can adapt
+ these instructions for their own applications. For more information,
+ see the "Minimum C Project Files for CMake" section.
- More information about using CMake can be found at the KitWare site,
- www.cmake.org.
+ More information about using CMake can be found at the KitWare
+ site, www.cmake.org.
- CMake uses the command line, however the visual CMake tool is
+ CMake uses the command line; however, the visual CMake tool is
available for the configuration step. The steps are similar for
- all the operating systems supported by CMake.
+ all of the operating systems supported by CMake.
NOTES:
- 1. Using CMake for building and using HDF5 is under active development.
- While we have attempted to provide error-free files, please
- understand that development with CMake has not been extensively
- tested outside of HDF. The CMake specific files may change
- before the next release.
+ 1. Using CMake for building and using HDF5 is under active
+ development. While we have attempted to provide error-free
+ files, please understand that development with CMake has not
+ been extensively tested outside of HDF. The CMake specific
+ files may change before the next release.
- 2. CMake was originally introduced to support development on Windows,
- however it should be usable on any system where CMake is supported.
- Please send us any comments on how CMake support can be improved on
- any system. Visit the KitWare site for more information about CMake.
-
- 3. HDF5 library build and test results can be submitted to our CDash server at:
- cdash.hdfgroup.uiuc.edu.
- Please read the HDF and CDash document at:
- www.hdfgroup.org/CDash/HowToSubmit.
-
- 4. See the appendix at the bottom of this file for an example of using
- a ctest script for building and testing. See CMake.txt for more
- information.
+ 2. CMake for HDF5 development should be usable on any system
+ where CMake is supported. Please send us any comments on how
+ CMake support can be improved on any system.
+ 3. See the appendix at the bottom of this file for an example
+ of using a ctest script for building and testing. See
+ CMake.txt for more information.
+
+
========================================================================
- Preconditions
-========================================================================
+I. Preconditions
+========================================================================
1. We suggest you obtain the latest CMake for windows from the Kitware
- web site. The HDF5 1.8.x product requires CMake version 2.8.10 (minimum).
-
- 2. You have installed the HDF5 library built with CMake, by executing the
- HDF Install Utility (The *.exe file in the binary package for Windows).
- If you are using a Windows platform, you can obtain a pre-built Windows
- binary from The HDF Group's website at www.hdfgroup.org.
-
- 3. On Windows with Visual Studio, if you have installed the static HDF5
- library, you will need to add the HDF5\lib folder to the library
- search list. See the "Using Static Libraries with Visual Studio" section.
-
- 4. Set the environment variable HDF5_ROOT to the installed location of HDF5.
- On Windows HDF5_ROOT=C:\Program Files\HDF Group\HDF5\hdf5-1.8.x
- (Note there are no quote characters used on windows)
+ web site. The HDF5 1.8.x product requires a minimum CMake version
+ of 2.8.10.
+ 2. You have installed the HDF5 library built with CMake, by executing
+ the HDF Install Utility (the *.exe file in the binary package for
+ Windows). If you are using a Windows platform, you can obtain a
+ pre-built Windows binary from The HDF Group's website at
+ www.hdfgroup.org.
+
+ 3. Set the environment variable HDF5_DIR to the installed location of
+ the config files for HDF5. On Windows:
+ HDF5_DIR=C:/Program Files/HDF_Group/HDF5/1.8.x/cmake/hdf5
+
+ (Note there are no quote characters used on Windows and all platforms
+ use forward slashes)
+
+ 4. Created separate source and build directories.
+ (CMake commands are executed in the build directory)
+
+ 5. Created a CMakeLists.txt file(s) for your source. See Section III
+ below.
+
+
+
========================================================================
- Building HDF5 Applications with CMake
+II. Building HDF5 Applications with CMake
========================================================================
+Go through these steps to build HDF5 applications with CMake.
+
+ 1. Run CMake
+ 2. Configure the cache settings
+ 3. Build HDF5 Applications
+ 4. Test HDF5 Applications.
+
+These steps are described in more detail below.
+
+
+
1. Run CMake
The CMake executable is named "cmake-gui.exe" on Windows and should be
available in your Start menu. For Linux, UNIX, and Mac users the
executable is named "cmake-gui" and can be found where CMake was
installed.
- Specify the source and build directories. It is recommended that you
- choose a build directory different then the source directory
- (for example on Windows, if the source is at c:\MyHDFstuff\hdf5, then
- use c:\MyHDFstuff\hdf5\build or c:\MyHDFstuff\build\hdf5).
+
+ Specify the source and build directories. Make the build and source
+ directories different. For example on Windows, if the source is at
+ c:\MyHDFstuff\hdf5, then use c:\MyHDFstuff\hdf5\build or
+ c:\MyHDFstuff\build\hdf5 for the build directory.
OPTIONAL:
- Users can perform the configuration step without using the visual cmake-gui
- program. Example configuration step executed within the build directory:
+ Users can perform the configuration step without using the visual
+ cmake-gui program. The following is an example command line
+ configuration step executed within the build directory:
cmake -G "<generator>" [-D<options>] <sourcepath>
@@ -83,6 +100,8 @@ Notes: This short instruction is written for users who want to quickly build
* MinGW Makefiles
* NMake Makefiles
* Unix Makefiles
+ * Visual Studio 11
+ * Visual Studio 11 Win64
* Visual Studio 10
* Visual Studio 10 Win64
* Visual Studio 6
@@ -127,7 +146,7 @@ Notes: This short instruction is written for users who want to quickly build
or the command line. The command line is normally used on linux, Unix, and Mac.
To build from the command line, navigate to your build directory and
- execute the following;
+ execute the following:
cmake --build . --config {Debug | Release}
@@ -139,9 +158,9 @@ Notes: This short instruction is written for users who want to quickly build
file in your build directory. Be sure to select either Debug or
Release and build the solution.
- 4. Test HDF5 Applications.
+ 4. Test HDF5 Applications
- To test the build, navigate to your build directory and execute;
+ To test the build, navigate to your build directory and execute:
ctest . -C {Debug | Release}
@@ -149,95 +168,74 @@ Notes: This short instruction is written for users who want to quickly build
recommend choosing either Debug or Release to match the build
step on Windows.
- 6. The files that support building with CMake are all the files in the
+ 5. The files that support building with CMake are all of the files in the
config/cmake folder, the CMakeLists.txt files in each source folder, and
CTestConfig.cmake. CTestConfig.cmake is specific to the internal testing
- performed by The HDF Group. It should be altered for the users
- installation and needs.
+ performed by The HDF Group. It should be altered for the user's
+ installation and needs. The cacheinit.cmake file settings are used by
+ The HDF Group for daily testing. It should be altered/ignored for the user's
+ installation and needs.
+
+
- 7. More information about using CMake can be found at the KitWare site,
- www.cmake.org.
-
-
========================================================================
- Using Visual Studio 2008 with HDF5 Libraries built with Visual Studio 2008
+III. Minimum C Project Files for CMake
========================================================================
- 8. Set up path for external libraries and headers
-
- Invoke Microsoft Visual Studio and go to "Tools" and select "Options",
- find "Projects", and then "VC++ Directories".
-
- 8.1 If you are building on 64-bit Windows, find the "Platform" dropdown
- and select "x64".
+ 6. Create a CMakeLists.txt file at the source root. Include the
+ following text in the file:
- 8.2 Find the box "Show directories for", choose "Include files", add the
- header path (i.e. c:\Program Files\HDF Group\HDF5\hdf5-1.8.x\include)
- to the included directories.
+##########################################################
+cmake_minimum_required (VERSION 2.8.10)
+PROJECT (HDF5MyApp C CXX)
- 8.3 Find the box "Show directories for", choose "Library files", add the
- library path (i.e. c:\Program Files\HDF Group\HDF5\hdf5-1.8.x\lib)
- to the library directories.
-
- 8.4 If using Fortran libraries, you will also need to setup the path
- for the Intel Fortran compiler.
+FIND_PACKAGE (HDF5 NAMES hdf5)
+# FIND_PACKAGE (HDF5) # Find non-cmake built HDF5
+INCLUDE_DIRECTORIES (${HDF5_INCLUDE_DIR})
+SET (LINK_LIBS ${LINK_LIBS} ${HDF5_LIBRARIES})
-
-========================================================================
- Using Visual Studio 2010 with HDF5 Libraries built with Visual Studio 2010
-========================================================================
+SET (example hdf_example)
- 9. Set up path for external libraries and headers
+ADD_EXECUTABLE (${example} ${PROJECT_SOURCE_DIR}/${example}.c)
+TARGET_LINK_LIBRARIES (${example} ${LINK_LIBS})
- The path settings will need to be in project property sheets per project.
- Go to "Project" and select "Properties", find "Configuration Properties",
- and then "VC++ Directories".
-
- 9.1 If you are building on 64-bit Windows, find the "Platform" dropdown
- and select "x64".
-
- 9.2 Add the header path to the "Include Directories" setting.
-
- 9.3 Add the library path to the "Library Directories" setting.
+ENABLE_TESTING ()
+INCLUDE (CTest)
-
-========================================================================
- Minimum C Project Files for CMake
-========================================================================
+ADD_TEST (NAME test_example COMMAND ${example})
+##########################################################
- 10. Create a CMakeLists.txt file at the source root.
-..........................................................................
-cmake_minimum_required (VERSION 2.8.10)
-PROJECT (HDF5MyApp C CXX)
-FIND_PACKAGE (HDF5 REQURIED)
-INCLUDE_DIRECTORIES (${HDF5_INCLUDE_DIRS})
-SET (LINK_LIBS ${LINK_LIBS} ${HDF5_LIBRARIES})
-ADD_EXECUTABLE (hdf_example ${PROJECT_SOURCE_DIR}/hdf_example.c)
-TARGET_LINK_LIBRARIES (hdf_example ${LINK_LIBS})
-..........................................................................
+========================================================================
+IV. APPENDIX
+========================================================================
+Below is an example of the ctest script used by The HDF Group. See the
+Appendix in the INSTALL_CMake.txt file for the CTestScript.cmake file used
+by this script. Adjust the values as necessary. Note that the source folder
+is entered on the command line and the build folder is created as a sub-folder.
+Windows should adjust the forward slash to double backslashes, except for
+the HDF_DIR environment variable.
-************************************************************************
- APPENDIX
-Below ia an example of the ctest script used by The HDF Group. See the
-Appendix in the CMake.txt file for the CTestScript.cmake file used by this
-script.
-************************************************************************
-Product specific script, HDF518Example.cmake, that uses the
-CTestScript.cmake file (see Appendix in the CMake.txt). Usage:
-"ctest -S HDF518Example.cmake,hdf518Examples -C Release -O hdf518EX.log"
-where hdf518Examples is the source folder relative to the location of these scripts
+========================================================================
+ctest
========================================================================
-cmake_minimum_required(VERSION 2.8.6 FATAL_ERROR)
+############################################################################
+# Product specific script, HDF518Example.cmake, that uses the
+# CTestScript.cmake file (see Appendix in the CMake.txt). Usage:
+# "ctest -S HDF518Example.cmake,hdf518Examples -C Release -O hdf518EX.log"
+# where hdf518Examples is the source folder relative to the location of these scripts
+############################################################################
+cmake_minimum_required(VERSION 2.8.10 FATAL_ERROR)
+
+set(CTEST_DASHBOARD_ROOT ${CTEST_SCRIPT_DIRECTORY})
set(CTEST_SOURCE_NAME ${CTEST_SCRIPT_ARG})
set(CTEST_BINARY_NAME ${CTEST_SOURCE_NAME}/build)
-set(CTEST_DASHBOARD_ROOT ${CTEST_SCRIPT_DIRECTORY})
set(CTEST_SOURCE_DIRECTORY "${CTEST_DASHBOARD_ROOT}/${CTEST_SOURCE_NAME}")
set(CTEST_BINARY_DIRECTORY "${CTEST_DASHBOARD_ROOT}/${CTEST_BINARY_NAME}")
set(CTEST_BUILD_CONFIGURATION "Release")
@@ -254,6 +252,10 @@ set(SITE_OS_BITS "os size")
set(SITE_COMPILER_NAME "compiler name")
set(SITE_COMPILER_VERSION "compiler version")
+# skip submit to cdash
+set(LOCAL_NO_SUBMIT "True")
+# skip update source from repository
+set(LOCAL_SKIP_UPDATE "True")
# needed for source updates, change as required
set(REPOSITORY_URL "http://svn.hdfgroup.uiuc.edu/hdf5-examples/trunk/1_8")
@@ -266,7 +268,12 @@ set(ENV{HDF5_DIR} "/usr/share/cmake/hdf5")
include(${CTEST_SCRIPT_DIRECTORY}/CTestScript.cmake)
message("DONE")
+#################################################################################
+
-Need further assistance, send email to help@hdfgroup.org
+========================================================================
+For further assistance, send email to help@hdfgroup.org
+========================================================================
+
diff --git a/release_docs/USING_HDF5_VS.txt b/release_docs/USING_HDF5_VS.txt
new file mode 100644
index 0000000..bd928bc
--- /dev/null
+++ b/release_docs/USING_HDF5_VS.txt
@@ -0,0 +1,88 @@
+
+***********************************************************************
+* HDF5 Build and Install Suggestions for Windows and Visual Studio *
+* (Full Version) *
+***********************************************************************
+
+These suggestions are for Visual Studio users.
+
+Instructions for building and testing HDF5 applications using CMake can
+be found in the USING_CMake.txt file found in this folder.
+
+The following two sections are helpful if you do not use CMake to build
+your applications.
+
+========================================================================
+Using Visual Studio 2010 with HDF5 Libraries built with Visual Studio 2010
+========================================================================
+
+ 1. Set up path for external libraries and headers
+
+ The path settings will need to be in project property sheets per project.
+ Go to "Project" and select "Properties", find "Configuration Properties",
+ and then "VC++ Directories".
+
+ 1.1 If you are building on 64-bit Windows, find the "Platform" dropdown
+ and select "x64".
+
+ 1.2 Add the header path to the "Include Directories" setting.
+
+ 1.3 Add the library path to the "Library Directories" setting.
+
+ 1.4 Select Linker->Input and beginning with the
+ "Additional Dependencies" line, enter the library names. The
+ external libraries should be listed first, followed by the HDF5
+ library, and then optionally the HDF5 High Level, Fortran or C++
+ libraries. For example, to compile a C++ application, enter:
+
+ szip.lib zlib.lib hdf5dll.lib hdf5_cppdll.lib
+
+
+==========================================================================
+Using Visual Studio 2008 with HDF5 Libraries built with Visual Studio 2008
+==========================================================================
+
+ 2. Set up path for external libraries and headers
+
+ Invoke Microsoft Visual Studio and go to "Tools" and select "Options",
+ find "Projects", and then "VC++ Directories".
+
+ 2.1 If you are building on 64-bit Windows, find the "Platform" dropdown
+ and select "x64".
+
+ 2.2 Find the box "Show directories for", choose "Include files", add the
+ header path (i.e. c:\Program Files\HDF_Group\HDF5\1.8.x\include)
+ to the included directories.
+
+ 2.3 Find the box "Show directories for", choose "Library files", add the
+ library path (i.e. c:\Program Files\HDF_Group\HDF5\1.8.x\lib)
+ to the library directories.
+
+ 2.4 If using Fortran libraries, you will also need to setup the path
+ for the Intel Fortran compiler.
+
+ 2.5 Select Project->Properties->Linker->Input and beginning with the
+ "Additional Dependencies" line, enter the library names. The
+ external libraries should be listed first, followed by the HDF5
+ library, and then optionally the HDF5 High Level, Fortran or C++
+ libraries. For example, to compile a C++ application, enter:
+
+ szip.lib zlib.lib hdf5dll.lib hdf5_cppdll.lib
+
+========================================================================
+3. Helpful Pointers
+========================================================================
+
+ 3.1 FAQ
+
+ Many other common questions and hints are located online and being updated
+ in the HDF5 FAQ. For Windows-specific questions, please see:
+
+ http://www.hdfgroup.org/windows/faq.html
+
+ For all other general questions, you can look in the general FAQ:
+
+ http://hdfgroup.org/HDF5-FAQ.html
+
+************************************************************************
+ Please send email to help@hdfgroup.org for further assistance.
diff --git a/release_docs/USING_Windows.txt b/release_docs/USING_Windows.txt
deleted file mode 100644
index 97ac4ad..0000000
--- a/release_docs/USING_Windows.txt
+++ /dev/null
@@ -1,35 +0,0 @@
-
-***********************************************************************
-* HDF5 Build and Install Instructions for Windows *
-* (Full Version) *
-***********************************************************************
-
-We now recommend that users build, test and install HDF5 using CMake.
-
-Instructions for building and testing HDF5 applications using CMake can be found in
-the USING_CMake.txt file found in this folder.
-
-
-========================================================================
- Section VI: Misc.
-========================================================================
-
-1. Helpful Pointers
-
-Here are some helpful notes if you are not familiar with
-using the Visual C++ Development Environment.
-
- 1.1 FAQ
-
- Many other common questions and hints are located online and being updated
- in the HDF5 FAQ. For Windows-specific questions, please see:
-
- http://www.hdfgroup.org/windows/faq.html
-
- For all other general questions, you can look in the general FAQ:
-
- http://hdfgroup.org/HDF5-FAQ.html
-
-
-************************************************************************
- Please send email to help@hdfgroup.org for further assistance.
diff --git a/src/H5D.c b/src/H5D.c
index c3fb72a..90e5b01 100644
--- a/src/H5D.c
+++ b/src/H5D.c
@@ -572,19 +572,56 @@ done:
hid_t
H5Dget_create_plist(hid_t dset_id)
{
- H5D_t *dset; /* Dataset structure */
+ H5D_t *dataset; /* Dataset structure */
H5P_genplist_t *dcpl_plist; /* Dataset's DCPL */
H5P_genplist_t *new_plist; /* Copy of dataset's DCPL */
H5O_fill_t copied_fill; /* Fill value to tweak */
hid_t new_dcpl_id = FAIL;
- hid_t ret_value; /* Return value */
+ hid_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_API(FAIL)
H5TRACE1("i", "i", dset_id);
/* Check args */
- if(NULL == (dset = (H5D_t *)H5I_object_verify(dset_id, H5I_DATASET)))
+ if(NULL == (dataset = (H5D_t *)H5I_object_verify(dset_id, H5I_DATASET)))
HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a dataset")
+
+ if((ret_value = H5D_get_create_plist(dataset)) < 0)
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a dataset")
+
+done:
+ FUNC_LEAVE_API(ret_value)
+} /* end H5Dget_create_plist() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: H5D_get_create_plist
+ *
+ * Purpose: Private function for H5Dget_create_plist
+ *
+ * Return: Success: ID for a copy of the dataset creation
+ * property list. The template should be
+ * released by calling H5P_close().
+ *
+ * Failure: FAIL
+ *
+ * Programmer: Robb Matzke
+ * Tuesday, February 3, 1998
+ *
+ *-------------------------------------------------------------------------
+ */
+hid_t
+H5D_get_create_plist(H5D_t *dset)
+{
+ H5P_genplist_t *dcpl_plist; /* Dataset's DCPL */
+ H5P_genplist_t *new_plist; /* Copy of dataset's DCPL */
+ H5O_fill_t copied_fill; /* Fill value to tweak */
+ hid_t new_dcpl_id = FAIL;
+ hid_t ret_value; /* Return value */
+
+ FUNC_ENTER_NOAPI(FAIL)
+
+ /* Check args */
if(NULL == (dcpl_plist = (H5P_genplist_t *)H5I_object(dset->shared->dcpl_id)))
HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "can't get property list")
@@ -670,8 +707,8 @@ done:
if(H5I_dec_app_ref(new_dcpl_id) < 0)
HDONE_ERROR(H5E_DATASET, H5E_CANTDEC, FAIL, "unable to close temporary object")
- FUNC_LEAVE_API(ret_value)
-} /* end H5Dget_create_plist() */
+ FUNC_LEAVE_NOAPI(ret_value)
+} /* end H5D_get_create_plist() */
/*-------------------------------------------------------------------------
diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c
index 0678eba..e16496c 100644
--- a/src/H5Dchunk.c
+++ b/src/H5Dchunk.c
@@ -1750,8 +1750,9 @@ H5D__chunk_cacheable(const H5D_io_info_t *io_info, haddr_t caddr, hbool_t write_
/* If the fill value needs to be written then we will need
* to use the cache to write the fill value */
if(fill->fill_time == H5D_FILL_TIME_ALLOC ||
- (fill->fill_time == H5D_FILL_TIME_IFSET
- && fill_status == H5D_FILL_VALUE_USER_DEFINED))
+ (fill->fill_time == H5D_FILL_TIME_IFSET &&
+ (fill_status == H5D_FILL_VALUE_USER_DEFINED ||
+ fill_status == H5D_FILL_VALUE_DEFAULT)))
ret_value = TRUE;
else
ret_value = FALSE;
@@ -1839,7 +1840,8 @@ H5D__chunk_read(H5D_io_info_t *io_info, const H5D_type_info_t *type_info,
* but they aren't set, set the flag to skip missing chunks.
*/
if(fill->fill_time == H5D_FILL_TIME_NEVER ||
- (fill->fill_time == H5D_FILL_TIME_IFSET && fill_status != H5D_FILL_VALUE_USER_DEFINED))
+ (fill->fill_time == H5D_FILL_TIME_IFSET && fill_status != H5D_FILL_VALUE_USER_DEFINED &&
+ fill_status != H5D_FILL_VALUE_DEFAULT))
skip_missing_chunks = TRUE;
}
diff --git a/src/H5Dprivate.h b/src/H5Dprivate.h
index 2f0cceb..e154f60 100644
--- a/src/H5Dprivate.h
+++ b/src/H5Dprivate.h
@@ -163,6 +163,7 @@ H5_DLL H5O_loc_t *H5D_oloc(H5D_t *dataset);
H5_DLL H5G_name_t *H5D_nameof(H5D_t *dataset);
H5_DLL H5T_t *H5D_typeof(const H5D_t *dset);
H5_DLL herr_t H5D_flush(const H5F_t *f, hid_t dxpl_id);
+H5_DLL hid_t H5D_get_create_plist(H5D_t *dset);
/* Functions that operate on vlen data */
H5_DLL herr_t H5D_vlen_reclaim(hid_t type_id, H5S_t *space, hid_t plist_id,
diff --git a/src/H5G.c b/src/H5G.c
index a8a9231..25e1949 100644
--- a/src/H5G.c
+++ b/src/H5G.c
@@ -499,7 +499,7 @@ H5Gget_create_plist(hid_t group_id)
htri_t ginfo_exists;
htri_t linfo_exists;
htri_t pline_exists;
- H5G_t *grp = NULL;
+ H5G_t *group = NULL;
H5P_genplist_t *gcpl_plist;
H5P_genplist_t *new_plist;
hid_t new_gcpl_id = FAIL;
@@ -509,9 +509,47 @@ H5Gget_create_plist(hid_t group_id)
H5TRACE1("i", "i", group_id);
/* Check args */
- if(NULL == (grp = (H5G_t *)H5I_object_verify(group_id, H5I_GROUP)))
+ if(NULL == (group = (H5G_t *)H5I_object_verify(group_id, H5I_GROUP)))
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a group")
+
+ if((ret_value = H5G_get_create_plist(group)) < 0)
HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a group")
+done:
+ FUNC_LEAVE_API(ret_value)
+} /* end H5Gget_create_plist() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: H5G_get_create_plist
+ *
+ * Purpose: Private function for H5Gget_create_plist
+ *
+ * Return: Success: ID for a copy of the group creation
+ * property list. The property list ID should be
+ * released by calling H5Pclose().
+ *
+ * Failure: FAIL
+ *
+ * Programmer: Quincey Koziol
+ * Tuesday, October 25, 2005
+ *
+ *-------------------------------------------------------------------------
+ */
+hid_t
+H5G_get_create_plist(H5G_t *grp)
+{
+ H5O_linfo_t linfo; /* Link info message */
+ htri_t ginfo_exists;
+ htri_t linfo_exists;
+ htri_t pline_exists;
+ H5P_genplist_t *gcpl_plist;
+ H5P_genplist_t *new_plist;
+ hid_t new_gcpl_id = FAIL;
+ hid_t ret_value = FAIL;
+
+ FUNC_ENTER_NOAPI(FAIL)
+
/* Copy the default group creation property list */
if(NULL == (gcpl_plist = (H5P_genplist_t *)H5I_object(H5P_LST_GROUP_CREATE_g)))
HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "can't get default group creation property list")
@@ -573,8 +611,8 @@ done:
HDONE_ERROR(H5E_SYM, H5E_CANTDEC, FAIL, "can't free")
} /* end if */
- FUNC_LEAVE_API(ret_value)
-} /* end H5Gget_create_plist() */
+ FUNC_LEAVE_NOAPI(ret_value)
+} /* end H5G_get_create_plist() */
/*-------------------------------------------------------------------------
diff --git a/src/H5Gprivate.h b/src/H5Gprivate.h
index 0c01cd2..c87ca51 100644
--- a/src/H5Gprivate.h
+++ b/src/H5Gprivate.h
@@ -243,6 +243,7 @@ H5_DLL herr_t H5G_obj_remove_by_idx(const struct H5O_loc_t *grp_oloc, H5RS_str_t
H5_index_t idx_type, H5_iter_order_t order, hsize_t n, hid_t dxpl_id);
H5_DLL herr_t H5G_obj_lookup_by_idx(const struct H5O_loc_t *grp_oloc, H5_index_t idx_type,
H5_iter_order_t order, hsize_t n, struct H5O_link_t *lnk, hid_t dxpl_id);
+H5_DLL hid_t H5G_get_create_plist(H5G_t *grp);
/*
* These functions operate on symbol table nodes.
diff --git a/src/H5I.c b/src/H5I.c
index 465e607..ffdbcdf 100644
--- a/src/H5I.c
+++ b/src/H5I.c
@@ -2079,21 +2079,55 @@ H5Isearch(H5I_type_t type, H5I_search_func_t func, void *key)
if(H5I_IS_LIB_TYPE(type))
HGOTO_ERROR(H5E_ATOM, H5E_BADGROUP, NULL, "cannot call public function on library type")
+ if((ret_value = H5I_search(type, func, key)) == NULL)
+ HGOTO_ERROR(H5E_ATOM, H5E_BADGROUP, NULL, "cannot call public function on library type")
+
+done:
+ FUNC_LEAVE_API(ret_value)
+} /* end H5Isearch() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: H5I_search
+ *
+ * Purpose: Private function for H5Isearch
+ *
+ * Return: Success: The first object in the type for which FUNC
+ * returns non-zero. NULL if FUNC returned zero
+ * for every object in the type.
+ *
+ * Failure: NULL
+ *
+ * Programmer: James Laird
+ * Nathaniel Furrer
+ * Friday, April 23, 2004
+ *
+ *-------------------------------------------------------------------------
+ */
+void *
+H5I_search(H5I_type_t _type, H5I_search_func_t _func, void *_key)
+{
+ H5I_search_ud_t udata; /* Context for iteration */
+ void *ret_value; /* Return value */
+
+ FUNC_ENTER_NOAPI(NULL)
+
/* Set up udata struct */
- udata.app_cb = func;
- udata.app_key = key;
+ udata.app_cb = _func;
+ udata.app_key = _key;
udata.ret_obj = NULL;
/* Note that H5I_iterate returns an error code. We ignore it
* here, as we can't do anything with it without revising the API.
*/
- H5I_iterate(type, H5I_search_cb, &udata, TRUE);
+ if(H5I_iterate(_type, H5I_search_cb, &udata, TRUE) < 0)
+ HGOTO_ERROR(H5E_ATOM, H5E_BADGROUP, NULL, "cannot call public function on library type")
/* Set return value */
ret_value = udata.ret_obj;
done:
- FUNC_LEAVE_API(ret_value)
+ FUNC_LEAVE_NOAPI(ret_value)
} /* end H5Isearch() */
diff --git a/src/H5Iprivate.h b/src/H5Iprivate.h
index 125c3f6..42810f5 100644
--- a/src/H5Iprivate.h
+++ b/src/H5Iprivate.h
@@ -79,6 +79,7 @@ H5_DLL H5I_type_t H5I_get_type(hid_t id);
H5_DLL hid_t H5I_get_file_id(hid_t obj_id, hbool_t app_ref);
H5_DLL void *H5I_remove(hid_t id);
H5_DLL void *H5I_remove_verify(hid_t id, H5I_type_t id_type);
+H5_DLL void *H5I_search(H5I_type_t _type, H5I_search_func_t _func, void *_key);
H5_DLL herr_t H5I_iterate(H5I_type_t type, H5I_search_func_t func, void *udata, hbool_t app_ref);
H5_DLL int H5I_get_ref(hid_t id, hbool_t app_ref);
H5_DLL int H5I_inc_ref(hid_t id, hbool_t app_ref);
diff --git a/src/H5Pocpl.c b/src/H5Pocpl.c
index 880b46a..f80e4e7 100644
--- a/src/H5Pocpl.c
+++ b/src/H5Pocpl.c
@@ -1177,6 +1177,43 @@ done:
/*-------------------------------------------------------------------------
+ * Function: H5P_filter_in_pline
+ *
+ * Purpose: Check whether the filter is in the pipeline of the object
+ * creation property list.
+ *
+ * Return: TRUE: found
+ * FALSE: not found
+ * FAIL: error
+ *
+ * Programmer: Raymond Lu
+ * 26 April 2013
+ *
+ *-------------------------------------------------------------------------
+ */
+htri_t
+H5P_filter_in_pline(H5P_genplist_t *plist, H5Z_filter_t id)
+{
+ H5O_pline_t pline; /* Filter pipeline */
+ H5Z_filter_info_t *filter; /* Pointer to filter information */
+ htri_t ret_value = SUCCEED; /* Return value */
+
+ FUNC_ENTER_NOAPI(FAIL)
+
+ /* Get pipeline info */
+ if(H5P_get(plist, H5O_CRT_PIPELINE_NAME, &pline) < 0)
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't get pipeline")
+
+ /* Check if the file is in the pipeline */
+ if((ret_value = H5Z_filter_in_pline(&pline, id)) < 0)
+ HGOTO_ERROR(H5E_PLINE, H5E_CANTCOMPARE, FAIL, "can't find filter")
+
+done:
+ FUNC_LEAVE_NOAPI(ret_value)
+} /* end H5P_get_filter_by_id() */
+
+
+/*-------------------------------------------------------------------------
* Function: H5Premove_filter
*
* Purpose: Deletes a filter from the dataset creation property list;
diff --git a/src/H5Pprivate.h b/src/H5Pprivate.h
index 6560064..083619b 100644
--- a/src/H5Pprivate.h
+++ b/src/H5Pprivate.h
@@ -108,6 +108,7 @@ H5_DLL herr_t H5P_modify_filter(H5P_genplist_t *plist, H5Z_filter_t filter,
H5_DLL herr_t H5P_get_filter_by_id(H5P_genplist_t *plist, H5Z_filter_t id,
unsigned int *flags, size_t *cd_nelmts, unsigned cd_values[],
size_t namelen, char name[], unsigned *filter_config);
+H5_DLL htri_t H5P_filter_in_pline(H5P_genplist_t *plist, H5Z_filter_t id);
/* *SPECIAL* Don't make more of these! -QAK */
H5_DLL htri_t H5P_isa_class(hid_t plist_id, hid_t pclass_id);
diff --git a/src/H5Z.c b/src/H5Z.c
index 9f8b59d..5d68c25 100644
--- a/src/H5Z.c
+++ b/src/H5Z.c
@@ -22,6 +22,7 @@
#include "H5private.h" /* Generic Functions */
#include "H5Dprivate.h" /* Dataset functions */
#include "H5Eprivate.h" /* Error handling */
+#include "H5Fprivate.h" /* File */
#include "H5Iprivate.h" /* IDs */
#include "H5MMprivate.h" /* Memory management */
#include "H5Oprivate.h" /* Object headers */
@@ -51,6 +52,9 @@ typedef enum {
H5Z_PRELUDE_SET_LOCAL /* Call "set local" callback */
} H5Z_prelude_type_t;
+/* Maximal number of the list of opened objects (2^16) */
+#define NUM_OBJS 65536
+
/* Local variables */
static size_t H5Z_table_alloc_g = 0;
static size_t H5Z_table_used_g = 0;
@@ -403,33 +407,133 @@ done:
*-------------------------------------------------------------------------
*/
herr_t
-H5Z_unregister (H5Z_filter_t id)
+H5Z_unregister (H5Z_filter_t filter_id)
{
- size_t i; /* Local index variable */
+ hid_t *file_list = NULL;
+ hid_t *obj_id_list = NULL;
+ size_t num_obj_id = 0;
+ size_t num_file_id = 0;
+ H5F_t *f = NULL; /* File to query */
+ H5I_type_t id_type;
+ hid_t ocpl_id;
+ H5P_genplist_t *plist; /* Property list */
+ size_t filter_index; /* Local index variable for filter */
+ int i;
+ htri_t filter_in_pline = FALSE;
herr_t ret_value=SUCCEED; /* Return value */
FUNC_ENTER_NOAPI(FAIL)
- assert (id>=0 && id<=H5Z_FILTER_MAX);
+ assert (filter_id>=0 && filter_id<=H5Z_FILTER_MAX);
/* Is the filter already registered? */
- for (i=0; i<H5Z_table_used_g; i++)
- if (H5Z_table_g[i].id==id)
+ for (filter_index=0; filter_index<H5Z_table_used_g; filter_index++)
+ if (H5Z_table_g[filter_index].id==filter_id)
break;
/* Fail if filter not found */
- if (i>=H5Z_table_used_g)
+ if (filter_index>=H5Z_table_used_g)
HGOTO_ERROR(H5E_PLINE, H5E_NOTFOUND, FAIL, "filter is not registered")
+ /* Count the number of opened datasets and groups among all opened files */
+ if(H5F_get_obj_count(NULL, H5F_OBJ_DATASET | H5F_OBJ_GROUP, FALSE, &num_obj_id) < 0)
+ HGOTO_ERROR(H5E_FILE, H5E_CANTGET, FAIL, "can't get object number")
+
+ if(num_obj_id) {
+ if(NULL == (obj_id_list = (hid_t *)H5MM_malloc(num_obj_id*sizeof(hid_t))))
+ HGOTO_ERROR(H5E_RESOURCE, H5E_CANTALLOC, FAIL, "failed to allocate space")
+
+ /* Find all opened objects that may use the filter (datasets and groups). Passing NULL as a pointer to
+ * file structure indicates searching among all opened files */
+ if(H5F_get_obj_ids(NULL, H5F_OBJ_DATASET | H5F_OBJ_GROUP, num_obj_id, obj_id_list, FALSE, &num_obj_id) < 0)
+ HGOTO_ERROR(H5E_FILE, H5E_CANTGET, FAIL, "can't get object IDs")
+ }
+
+ /* Check if any opened object (dataset or group) uses the filter. If so, fail with a message */
+ for(i=0; i<num_obj_id; i++) {
+ if((id_type = H5I_get_type(obj_id_list[i])) < 0)
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "bad object id");
+
+ switch(id_type) {
+ case H5I_GROUP:
+ {
+ H5G_t *group = NULL;
+
+ if(NULL == (group = (H5G_t *)H5I_object_verify(obj_id_list[i], H5I_GROUP)))
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a group")
+
+ if((ocpl_id = H5G_get_create_plist(group)) < 0)
+ HGOTO_ERROR(H5E_ARGS, H5E_CANTGET, FAIL, "can't get group creation property list")
+
+ }
+ break;
+
+ case H5I_DATASET:
+ {
+ H5D_t *dataset = NULL;
+
+ if(NULL == (dataset = (H5D_t *)H5I_object_verify(obj_id_list[i], H5I_DATASET)))
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a dataset")
+
+ if((ocpl_id = H5D_get_create_plist(dataset)) < 0)
+ HGOTO_ERROR(H5E_ARGS, H5E_CANTGET, FAIL, "can't get dataset creation property list")
+ }
+ break;
+
+ default:
+ HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "not a valid object")
+ } /* end switch */
+
+ /* Get the plist structure of object creation */
+ if(NULL == (plist = H5P_object_verify(ocpl_id, H5P_OBJECT_CREATE)))
+ HGOTO_ERROR(H5E_ATOM, H5E_BADATOM, FAIL, "can't find object for ID")
+
+ /* Check if the object creation property list uses the filter */
+ if((filter_in_pline = H5P_filter_in_pline(plist, filter_id)) < 0)
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't check filter in pipeline")
+
+ if(filter_in_pline)
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't unregister filter because some object is still using it")
+ }
+
+ /* Count the number of opened files */
+ if(H5F_get_obj_count(NULL, H5F_OBJ_FILE, FALSE, &num_file_id) < 0)
+ HGOTO_ERROR(H5E_FILE, H5E_CANTGET, FAIL, "can't get file number")
+
+ /* Get the list of IDs for all opened files */
+ if(num_file_id) {
+ if(NULL == (file_list = (hid_t *)H5MM_malloc(num_file_id*sizeof(hid_t))))
+ HGOTO_ERROR(H5E_RESOURCE, H5E_CANTALLOC, FAIL, "failed to allocate space")
+
+ if(H5F_get_obj_ids(NULL, H5F_OBJ_FILE, num_file_id, file_list, FALSE, &num_file_id) < 0)
+ HGOTO_ERROR(H5E_FILE, H5E_CANTGET, FAIL, "can't get file IDs")
+ }
+
+ /* Flush all opened files in case any file uses the filter */
+ for(i=0; i<num_file_id; i++) {
+ if(NULL == (f = (H5F_t *)H5I_object_verify(file_list[i], H5I_FILE)))
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a file")
+
+ /* Call the flush routine for mounted file hierarchies. Do a global flush
+ * if the file is opened for write */
+ if(H5F_ACC_RDWR & H5F_INTENT(f)) {
+ if(H5F_flush_mounts(f, H5AC_dxpl_id) < 0)
+ HGOTO_ERROR(H5E_FILE, H5E_CANTFLUSH, FAIL, "unable to flush file hierarchy")
+ } /* end if */
+ }
+
/* Remove filter from table */
/* Don't worry about shrinking table size (for now) */
- HDmemmove(&H5Z_table_g[i],&H5Z_table_g[i+1],sizeof(H5Z_class2_t)*((H5Z_table_used_g-1)-i));
+ HDmemmove(&H5Z_table_g[filter_index],&H5Z_table_g[filter_index+1],sizeof(H5Z_class2_t)*((H5Z_table_used_g-1)-filter_index));
#ifdef H5Z_DEBUG
- HDmemmove(&H5Z_stat_table_g[i],&H5Z_stat_table_g[i+1],sizeof(H5Z_stats_t)*((H5Z_table_used_g-1)-i));
+ HDmemmove(&H5Z_stat_table_g[filter_index],&H5Z_stat_table_g[filter_index+1],sizeof(H5Z_stats_t)*((H5Z_table_used_g-1)-filter_index));
#endif /* H5Z_DEBUG */
H5Z_table_used_g--;
done:
+ if(file_list) H5MM_free(file_list);
+ if(obj_id_list) H5MM_free(obj_id_list);
+
FUNC_LEAVE_NOAPI(ret_value)
} /* end H5Z_unregister() */
@@ -1275,6 +1379,49 @@ done:
/*-------------------------------------------------------------------------
+ * Function: H5Z_filter_in_pline
+ *
+ * Purpose: Check wheter a filter is in the filter pipeline using the
+ * filter ID. This function is very similar to H5Z_filter_info
+ *
+ * Return: TRUE - found filter
+ * FALSE - not found
+ * FAIL - error
+ *
+ * Programmer: Raymond Lu
+ * 26 April 2013
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+htri_t
+H5Z_filter_in_pline(const H5O_pline_t *pline, H5Z_filter_t filter)
+{
+ size_t idx; /* Index of filter in pipeline */
+ htri_t ret_value = TRUE; /* Return value */
+
+ FUNC_ENTER_NOAPI(FAIL)
+
+ assert(pline);
+ assert(filter>=0 && filter<=H5Z_FILTER_MAX);
+
+ /* Locate the filter in the pipeline */
+ for(idx=0; idx<pline->nused; idx++)
+ if(pline->filter[idx].id==filter)
+ break;
+
+ /* Check if the filter was not already in the pipeline */
+ if(idx>=pline->nused)
+ ret_value = FALSE;
+
+done:
+ FUNC_LEAVE_NOAPI(ret_value)
+} /* end H5Z_filter_in_pline() */
+
+
+
+/*-------------------------------------------------------------------------
* Function: H5Z_all_filters_avail
*
* Purpose: Verify that all the filters in a pipeline are currently
diff --git a/src/H5Zprivate.h b/src/H5Zprivate.h
index f53b50c..6c0a46f 100644
--- a/src/H5Zprivate.h
+++ b/src/H5Zprivate.h
@@ -91,6 +91,7 @@ H5_DLL herr_t H5Z_can_apply_direct(const struct H5O_pline_t *pline);
H5_DLL herr_t H5Z_set_local_direct(const struct H5O_pline_t *pline);
H5_DLL H5Z_filter_info_t *H5Z_filter_info(const struct H5O_pline_t *pline,
H5Z_filter_t filter);
+H5_DLL htri_t H5Z_filter_in_pline(const struct H5O_pline_t *pline, H5Z_filter_t filter);
H5_DLL htri_t H5Z_all_filters_avail(const struct H5O_pline_t *pline);
H5_DLL htri_t H5Z_filter_avail(H5Z_filter_t id);
H5_DLL herr_t H5Z_delete(struct H5O_pline_t *pline, H5Z_filter_t filter);
diff --git a/src/H5public.h b/src/H5public.h
index 7eb291f..391b705 100644
--- a/src/H5public.h
+++ b/src/H5public.h
@@ -75,10 +75,10 @@ extern "C" {
/* Version numbers */
#define H5_VERS_MAJOR 1 /* For major interface/format changes */
#define H5_VERS_MINOR 9 /* For minor interface/format changes */
-#define H5_VERS_RELEASE 149 /* For tweaks, bug-fixes, or development */
+#define H5_VERS_RELEASE 151 /* For tweaks, bug-fixes, or development */
#define H5_VERS_SUBRELEASE "FA_a5" /* For pre-releases like snap0 */
/* Empty string for real releases. */
-#define H5_VERS_INFO "HDF5 library version: 1.9.149-FA_a5" /* Full version string */
+#define H5_VERS_INFO "HDF5 library version: 1.9.151-FA_a5" /* Full version string */
#define H5check() H5check_version(H5_VERS_MAJOR,H5_VERS_MINOR, \
H5_VERS_RELEASE)
diff --git a/src/Makefile.in b/src/Makefile.in
index c2d29f9..652f4ec 100644
--- a/src/Makefile.in
+++ b/src/Makefile.in
@@ -525,7 +525,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog
# Add libtool shared library version numbers to the HDF5 library
# See libtool versioning documentation online.
LT_VERS_INTERFACE = 6
-LT_VERS_REVISION = 139
+LT_VERS_REVISION = 141
LT_VERS_AGE = 0
H5detect_CFLAGS = -g $(AM_CFLAGS)
diff --git a/test/Makefile.am b/test/Makefile.am
index 0c77646..22b7224 100644
--- a/test/Makefile.am
+++ b/test/Makefile.am
@@ -47,7 +47,7 @@ TEST_PROG= testhdf5 lheap ohdr stab gheap cache cache_api cache_tagging \
big mtime fillval mount flush1 flush2 app_ref enum \
set_extent ttsafe enc_dec_plist enc_dec_plist_with_endianess\
getname vfd ntypes dangle dtransform reserved cross_read \
- freespace mf farray earray btree2 fheap file_image
+ freespace mf farray earray btree2 fheap file_image unregister
bin_PROGRAMS=swmr_generator swmr_reader swmr_writer swmr_remove_reader \
swmr_remove_writer swmr_addrem_writer swmr_sparse_reader swmr_sparse_writer \
@@ -158,7 +158,7 @@ CHECK_CLEANFILES+=accum.h5 cmpd_dset.h5 compact_dataset.h5 dataset.h5 \
earray.h5 efc[0-5].h5 log_vfd_out.log \
new_multi_file_v16-r.h5 new_multi_file_v16-s.h5 \
split_get_file_image_test-m.h5 split_get_file_image_test-r.h5 \
- file_image_core_test.h5.copy \
+ file_image_core_test.h5.copy unregister_filter_1.h5 unregister_filter_2.h5 \
swmr_data.h5 use_append_chunk.h5 use_append_mchunks.h5 \
flushrefresh.h5 flushrefresh_VERIFICATION_START \
flushrefresh_VERIFICATION_CHECKPOINT1 flushrefresh_VERIFICATION_CHECKPOINT2 \
diff --git a/test/Makefile.in b/test/Makefile.in
index faed943..73da8bd 100644
--- a/test/Makefile.in
+++ b/test/Makefile.in
@@ -176,7 +176,8 @@ am__EXEEXT_1 = testhdf5$(EXEEXT) lheap$(EXEEXT) ohdr$(EXEEXT) \
vfd$(EXEEXT) ntypes$(EXEEXT) dangle$(EXEEXT) \
dtransform$(EXEEXT) reserved$(EXEEXT) cross_read$(EXEEXT) \
freespace$(EXEEXT) mf$(EXEEXT) farray$(EXEEXT) earray$(EXEEXT) \
- btree2$(EXEEXT) fheap$(EXEEXT) file_image$(EXEEXT)
+ btree2$(EXEEXT) fheap$(EXEEXT) file_image$(EXEEXT) \
+ unregister$(EXEEXT)
@HAVE_SHARED_CONDITIONAL_TRUE@am__EXEEXT_2 = plugin$(EXEEXT)
am__EXEEXT_3 = gen_bad_ohdr$(EXEEXT) gen_bogus$(EXEEXT) \
gen_cross$(EXEEXT) gen_deflate$(EXEEXT) gen_filters$(EXEEXT) \
@@ -547,6 +548,10 @@ unlink_SOURCES = unlink.c
unlink_OBJECTS = unlink.$(OBJEXT)
unlink_LDADD = $(LDADD)
unlink_DEPENDENCIES = libh5test.la $(LIBHDF5)
+unregister_SOURCES = unregister.c
+unregister_OBJECTS = unregister.$(OBJEXT)
+unregister_LDADD = $(LDADD)
+unregister_DEPENDENCIES = libh5test.la $(LIBHDF5)
am_use_append_chunk_OBJECTS = use_append_chunk.$(OBJEXT) \
use_common.$(OBJEXT)
use_append_chunk_OBJECTS = $(am_use_append_chunk_OBJECTS)
@@ -616,8 +621,9 @@ SOURCES = $(libdynlib1_la_SOURCES) $(libdynlib2_la_SOURCES) \
swmr_generator.c swmr_reader.c swmr_remove_reader.c \
swmr_remove_writer.c swmr_sparse_reader.c swmr_sparse_writer.c \
swmr_writer.c tcheck_version.c $(testhdf5_SOURCES) testmeta.c \
- $(ttsafe_SOURCES) unlink.c $(use_append_chunk_SOURCES) \
- $(use_append_mchunks_SOURCES) vfd.c
+ $(ttsafe_SOURCES) unlink.c unregister.c \
+ $(use_append_chunk_SOURCES) $(use_append_mchunks_SOURCES) \
+ vfd.c
DIST_SOURCES = $(am__libdynlib1_la_SOURCES_DIST) \
$(am__libdynlib2_la_SOURCES_DIST) \
$(am__libdynlib3_la_SOURCES_DIST) $(libh5test_la_SOURCES) \
@@ -640,8 +646,9 @@ DIST_SOURCES = $(am__libdynlib1_la_SOURCES_DIST) \
swmr_generator.c swmr_reader.c swmr_remove_reader.c \
swmr_remove_writer.c swmr_sparse_reader.c swmr_sparse_writer.c \
swmr_writer.c tcheck_version.c $(testhdf5_SOURCES) testmeta.c \
- $(ttsafe_SOURCES) unlink.c $(use_append_chunk_SOURCES) \
- $(use_append_mchunks_SOURCES) vfd.c
+ $(ttsafe_SOURCES) unlink.c unregister.c \
+ $(use_append_chunk_SOURCES) $(use_append_mchunks_SOURCES) \
+ vfd.c
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
@@ -963,8 +970,9 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog accum.h5 cmpd_dset.h5 \
earray.h5 efc[0-5].h5 log_vfd_out.log new_multi_file_v16-r.h5 \
new_multi_file_v16-s.h5 split_get_file_image_test-m.h5 \
split_get_file_image_test-r.h5 file_image_core_test.h5.copy \
- swmr_data.h5 use_append_chunk.h5 use_append_mchunks.h5 \
- flushrefresh.h5 flushrefresh_VERIFICATION_START \
+ unregister_filter_1.h5 unregister_filter_2.h5 swmr_data.h5 \
+ use_append_chunk.h5 use_append_mchunks.h5 flushrefresh.h5 \
+ flushrefresh_VERIFICATION_START \
flushrefresh_VERIFICATION_CHECKPOINT1 \
flushrefresh_VERIFICATION_CHECKPOINT2 \
flushrefresh_VERIFICATION_DONE atomic_data accum_swmr_big.h5 \
@@ -992,7 +1000,7 @@ TEST_PROG = testhdf5 lheap ohdr stab gheap cache cache_api cache_tagging \
big mtime fillval mount flush1 flush2 app_ref enum \
set_extent ttsafe enc_dec_plist enc_dec_plist_with_endianess\
getname vfd ntypes dangle dtransform reserved cross_read \
- freespace mf farray earray btree2 fheap file_image
+ freespace mf farray earray btree2 fheap file_image unregister
# These programs generate test files for the tests. They don't need to be
@@ -1488,6 +1496,9 @@ ttsafe$(EXEEXT): $(ttsafe_OBJECTS) $(ttsafe_DEPENDENCIES) $(EXTRA_ttsafe_DEPENDE
unlink$(EXEEXT): $(unlink_OBJECTS) $(unlink_DEPENDENCIES) $(EXTRA_unlink_DEPENDENCIES)
@rm -f unlink$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(unlink_OBJECTS) $(unlink_LDADD) $(LIBS)
+unregister$(EXEEXT): $(unregister_OBJECTS) $(unregister_DEPENDENCIES) $(EXTRA_unregister_DEPENDENCIES)
+ @rm -f unregister$(EXEEXT)
+ $(AM_V_CCLD)$(LINK) $(unregister_OBJECTS) $(unregister_LDADD) $(LIBS)
use_append_chunk$(EXEEXT): $(use_append_chunk_OBJECTS) $(use_append_chunk_DEPENDENCIES) $(EXTRA_use_append_chunk_DEPENDENCIES)
@rm -f use_append_chunk$(EXEEXT)
$(AM_V_CCLD)$(LINK) $(use_append_chunk_OBJECTS) $(use_append_chunk_LDADD) $(LIBS)
@@ -1626,6 +1637,7 @@ distclean-compile:
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/tvlstr.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/tvltypes.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/unlink.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/unregister.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/use_append_chunk.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/use_append_mchunks.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/use_common.Po@am__quote@
diff --git a/test/fillval.c b/test/fillval.c
index 847dfbd..9271f80 100644
--- a/test/fillval.c
+++ b/test/fillval.c
@@ -37,6 +37,7 @@ const char *FILENAME[] = {
"fillval_6",
"fillval_7",
"fillval_8",
+ "fillval_9",
NULL
};
@@ -2085,6 +2086,266 @@ error:
return 1;
}
+/*-------------------------------------------------------------------------
+ * Function: test_partalloc_cases
+ *
+ * Purpose: Tests fill values read and write for datasets.
+ *
+ * Return: Success: 0
+ *
+ * Failure: 1
+ *
+ * Programmer: Joel Plutchak
+ * April 15, 2013
+ *
+ * Modifications:
+ * This function is called by test_rdwr to write and read
+ * dataset for different cases of chunked datasets with
+ * unallocated chunks.
+ *
+ *-------------------------------------------------------------------------
+ */
+
+static int
+test_partalloc_cases(hid_t file, hid_t dcpl, const char *dname, H5D_fill_time_t fill_time)
+{
+ hid_t fspace=-1, dset1=-1, rspace = -1;
+ herr_t ret;
+ hsize_t ds_size[2] = {4, 4};
+ hsize_t max_size[2] = {H5S_UNLIMITED,4};
+ hsize_t chunk_size[2] = {1, 4};
+ int fillval=(-1);
+ int w_values[] = {42}; /* New value to be written */
+ int f_values[4] = {88,88,88,88}; /* pre-seed read buffer with known values */
+ int r_values[4] = {88,88,88,88}; /* pre-seed read buffer with known values */
+ hsize_t coord[1][2]; /* coordinate(s) of point to write */
+ hsize_t start[2], count[2];
+
+ fillval = 0; /* default fill value is zero */
+
+ /* Create dataset with 4x4 integer dataset */
+ if((fspace = H5Screate_simple(2, ds_size, max_size)) < 0)
+ goto error;
+ if((dset1 = H5Dcreate2(file, dname, H5T_NATIVE_INT, fspace, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ goto error;
+
+ /*
+ * Select a point in the file dataspace.
+ */
+ coord[0][0]=0; coord[0][1]=0;
+ if (H5Sselect_elements( fspace, H5S_SELECT_SET, (size_t)1, (const hsize_t *)coord))
+ goto error;
+
+ /*
+ * Write single data point to the dataset.
+ */
+ if ((ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, fspace, H5P_DEFAULT, w_values))< 0) {
+ goto error;
+ }
+
+ /* Read a line/chunk and make sure values are right */
+ rspace = H5Screate_simple(2, chunk_size, NULL);
+
+ /* Read the first row of elements: one known plus three fill */
+ start[0] = 0;
+ start[1] = 0;
+ count[0] = 1;
+ count[1] = 4;
+ if ((ret = H5Sselect_hyperslab(fspace, H5S_SELECT_SET, start, NULL, count, NULL)) < 0)
+ goto error;
+ if ((ret = H5Sselect_all(rspace)) < 0)
+ goto error;
+ if(H5Dread(dset1, H5T_NATIVE_INT, rspace, fspace, H5P_DEFAULT, &r_values) < 0)
+ goto error;
+
+ /* Read the third row of elements: all fill */
+ start[0] = 2;
+ start[1] = 0;
+ count[0] = 1;
+ count[1] = 4;
+ if ((ret = H5Sselect_hyperslab(fspace, H5S_SELECT_SET, start, NULL, count, NULL)) < 0)
+ goto error;
+ if(H5Dread(dset1, H5T_NATIVE_INT, rspace, fspace, H5P_DEFAULT, &f_values) < 0)
+ goto error;
+
+ if(fill_time != H5D_FILL_TIME_NEVER) {
+ /* check allocated chunk */
+ if ((r_values[0] != w_values[0]) ||
+ (r_values[1] != fillval) ||
+ (r_values[2] != fillval) ||
+ (r_values[3] != fillval)) {
+ H5_FAILED();
+ HDfprintf(stdout, "%u: Allocated chunk value read was not correct.\n", (unsigned)__LINE__);
+ printf(" {%ld,%ld,%ld,%ld} should be {%ld,%ld,%ld,%ld}\n",
+ (long)r_values[0], (long)r_values[1],
+ (long)r_values[2], (long)r_values[3],
+ (long)w_values[0], (long)fillval,
+ (long)fillval, (long)fillval );
+ goto error;
+ }
+ /* check unallocated chunk */
+ if ((f_values[0] != fillval) ||
+ (f_values[1] != fillval) ||
+ (f_values[2] != fillval) ||
+ (f_values[3] != fillval)) {
+ H5_FAILED();
+ HDfprintf(stdout, "%u: Unallocated chunk value read was not correct.\n", (unsigned)__LINE__);
+ printf(" {%ld,%ld,%ld,%ld} should be {%ld,%ld,%ld,%ld}\n",
+ (long)f_values[0], (long)f_values[1],
+ (long)f_values[2], (long)f_values[3],
+ (long)fillval, (long)fillval,
+ (long)fillval, (long)fillval );
+ goto error;
+ }
+ /* for the "never fill" case expect to get trash values, so skip */
+ }
+ else if(fill_time == H5D_FILL_TIME_NEVER) {
+ }
+
+ if(H5Sclose(rspace) < 0) goto error;
+ if(H5Dclose(dset1) < 0) goto error;
+ if(H5Sclose(fspace) < 0) goto error;
+ return 0;
+
+ error:
+ H5E_BEGIN_TRY {
+ H5Dclose(dset1);
+ H5Sclose(fspace);
+ H5Sclose(rspace);
+ } H5E_END_TRY;
+
+ return 1;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: test_partalloc
+ *
+ * Purpose: Tests fill values for chunked, partially-allocated datasets.
+ * Regression test for HDFFV-8247.
+ *
+ * Return: Success: 0
+ *
+ * Failure: number of errors
+ *
+ * Programmer: Joel Plutchak
+ * April 15, 2013
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+test_partalloc(hid_t fapl, const char *base_name)
+{
+ char filename[1024];
+ hid_t file=-1, dcpl=-1;
+ hsize_t ch_size[2] = {1, 4};
+ int nerrors=0;
+
+ TESTING("chunked dataset partially allocated I/O");
+
+ h5_fixname(base_name, fapl, filename, sizeof filename);
+ if((file=H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0)
+ goto error;
+
+ if((dcpl=H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
+ if(H5Pset_chunk(dcpl, 2, ch_size) < 0) goto error;
+
+ /* I. Test H5D_ALLOC_TIME_LATE space allocation cases */
+ if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_LATE) < 0) goto error;
+#ifdef DEBUG
+ fprintf( stdout, "\nALLOC_TIME_LATE\n" );
+#endif
+
+ /* case for H5D_FILL_TIME_ALLOC as fill write time and fill value to be default */
+ if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_ALLOC) < 0) goto error;
+#ifdef DEBUG
+ fprintf( stdout, " FILL_TIME_ALLOC\n" );
+#endif
+ nerrors += test_partalloc_cases(file, dcpl, "dset1", H5D_FILL_TIME_ALLOC);
+
+ /* case for H5D_FILL_TIME_NEVER as fill write time and fill value to be default */
+ if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_NEVER) < 0) goto error;
+#ifdef DEBUG
+ fprintf( stdout, " FILL_TIME_NEVER\n" );
+#endif
+ nerrors += test_partalloc_cases(file, dcpl, "dset2", H5D_FILL_TIME_NEVER );
+
+ /* case for H5D_FILL_TIME_IFSET as fill write time and fill value to be default */
+ if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_IFSET) < 0) goto error;
+#ifdef DEBUG
+ fprintf( stdout, " FILL_TIME_IFSET\n" );
+#endif
+ nerrors += test_partalloc_cases(file, dcpl, "dset3", H5D_FILL_TIME_IFSET );
+
+ /* II. Test H5D_ALLOC_TIME_INCR space allocation cases */
+ if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_INCR) < 0) goto error;
+#ifdef DEBUG
+ fprintf( stdout, "\nALLOC_TIME_INCR\n" );
+#endif
+
+ /* case for H5D_FILL_TIME_ALLOC as fill write time and fill value to be default */
+ if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_ALLOC) < 0) goto error;
+#ifdef DEBUG
+ fprintf( stdout, " FILL_TIME_ALLOC\n" );
+#endif
+ nerrors += test_partalloc_cases(file, dcpl, "dset4", H5D_FILL_TIME_ALLOC );
+
+ /* case for H5D_FILL_TIME_NEVER as fill write time and fill value to be default */
+ if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_NEVER) < 0) goto error;
+#ifdef DEBUG
+ fprintf( stdout, " FILL_TIME_NEVER\n" );
+#endif
+ nerrors += test_partalloc_cases(file, dcpl, "dset5", H5D_FILL_TIME_NEVER );
+
+ /* case for H5D_FILL_TIME_IFSET as fill write time and fill value to be default */
+ if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_IFSET) < 0) goto error;
+#ifdef DEBUG
+ fprintf( stdout, " FILL_TIME_IFSET\n" );
+#endif
+ nerrors += test_partalloc_cases(file, dcpl, "dset6", H5D_FILL_TIME_IFSET );
+
+ /* III. Test H5D_ALLOC_TIME_EARLY space allocation cases */
+ if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0) goto error;
+#ifdef DEBUG
+ fprintf( stdout, "\nALLOC_TIME_EARLY\n" );
+#endif
+
+ /* case for H5D_FILL_TIME_ALLOC as fill write time and fill value to be default */
+ if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_ALLOC) < 0) goto error;
+#ifdef DEBUG
+ fprintf( stdout, " FILL_TIME_ALLOC\n" );
+#endif
+ nerrors += test_partalloc_cases(file, dcpl, "dset7", H5D_FILL_TIME_ALLOC );
+
+ /* case for H5D_FILL_TIME_NEVER as fill write time and fill value to be default */
+ if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_NEVER) < 0) goto error;
+#ifdef DEBUG
+ fprintf( stdout, " FILL_TIME_NEVER\n" );
+#endif
+ nerrors += test_partalloc_cases(file, dcpl, "dset8", H5D_FILL_TIME_NEVER );
+
+ /* case for H5D_FILL_TIME_IFSET as fill write time and fill value to be default */
+ if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_IFSET) < 0) goto error;
+#ifdef DEBUG
+ fprintf( stdout, " FILL_TIME_IFSET\n" );
+#endif
+ nerrors += test_partalloc_cases(file, dcpl, "dset9", H5D_FILL_TIME_IFSET );
+
+ if(nerrors)
+ goto error;
+ if(H5Pclose(dcpl) < 0) goto error;
+ if(H5Fclose(file) < 0) goto error;
+ PASSED();
+ return 0;
+
+ error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Fclose(file);
+ } H5E_END_TRY;
+ return nerrors;
+}
+
/*-------------------------------------------------------------------------
* Function: main
@@ -2157,6 +2418,7 @@ main(int argc, char *argv[])
nerrors += test_create(my_fapl, FILENAME[0], H5D_CHUNKED);
nerrors += test_rdwr (my_fapl, FILENAME[2], H5D_CHUNKED);
nerrors += test_extend(my_fapl, FILENAME[4], H5D_CHUNKED);
+ nerrors += test_partalloc(my_fapl, FILENAME[8]);
} /* end if */
/* Contiguous storage layout tests */
diff --git a/test/unregister.c b/test/unregister.c
new file mode 100644
index 0000000..88f4d1b
--- /dev/null
+++ b/test/unregister.c
@@ -0,0 +1,258 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic document set and is *
+ * linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have access *
+ * to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+/*
+ * Programmer: Raymond Lu
+ * 24 April 2013
+ *
+ * Purpose: Tests H5Zunregister function
+ */
+#include "h5test.h"
+
+const char *FILENAME[] = {
+ "unregister_filter_1",
+ "unregister_filter_2",
+ NULL
+};
+
+#define GROUP_NAME "group"
+#define DSET_NAME "dataset"
+#define FILENAME_BUF_SIZE 1024
+#define DSET_DIM1 100
+#define DSET_DIM2 200
+#define FILTER_CHUNK_DIM1 2
+#define FILTER_CHUNK_DIM2 25
+#define GROUP_ITERATION 1000
+
+#define H5Z_FILTER_DUMMY 312
+
+static size_t filter_dummy(unsigned int flags, size_t cd_nelmts,
+ const unsigned int *cd_values, size_t nbytes, size_t *buf_size, void **buf);
+
+/* Dummy filter for test_unregister_filters only */
+const H5Z_class2_t H5Z_DUMMY[1] = {{
+ H5Z_CLASS_T_VERS, /* H5Z_class_t version */
+ H5Z_FILTER_DUMMY, /* Filter id number */
+ 1, 1, /* Encoding and decoding enabled */
+ "dummy", /* Filter name for debugging */
+ NULL, /* The "can apply" callback */
+ NULL, /* The "set local" callback */
+ filter_dummy, /* The actual filter function */
+}};
+
+
+/*-------------------------------------------------------------------------
+ * Function: filter_dummy
+ *
+ * Purpose: A dummy compression method that doesn't do anything. This
+ * filter is only for test_unregister_filters. Please don't
+ * use it for other tests because it may mess up this test.
+ *
+ * Return: Success: Data chunk size
+ *
+ * Failure: 0
+ *
+ * Programmer: Raymond Lu
+ * April 24, 2013
+ *
+ *-------------------------------------------------------------------------
+ */
+static size_t
+filter_dummy(unsigned int UNUSED flags, size_t UNUSED cd_nelmts,
+ const unsigned int UNUSED *cd_values, size_t nbytes,
+ size_t UNUSED *buf_size, void UNUSED **buf)
+{
+ return nbytes;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: test_unregister_filters
+ *
+ * Purpose: Tests unregistering filter before closing the file
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Raymond Lu
+ * 11 April 2013
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+test_unregister_filters(hid_t my_fapl)
+{
+ hid_t file1, file2;
+ hid_t dc;
+ hid_t gcpl, gid, group;
+ hid_t dataset, space;
+ int i, j, n;
+ char gname[256];
+ char filename[FILENAME_BUF_SIZE];
+ const hsize_t chunk_size[2] = {FILTER_CHUNK_DIM1, FILTER_CHUNK_DIM2}; /* Chunk dimensions */
+ hsize_t dims[2];
+ int points[DSET_DIM1][DSET_DIM2];
+ herr_t ret;
+
+ TESTING("Unregistering filter");
+
+ /* Create first file */
+ h5_fixname(FILENAME[0], my_fapl, filename, sizeof filename);
+ if((file1 = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, my_fapl)) < 0) goto error;
+
+ /* Create second file */
+ h5_fixname(FILENAME[1], my_fapl, filename, sizeof filename);
+ if((file2 = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, my_fapl)) < 0) goto error;
+
+ /* Register DUMMY filter */
+ if(H5Zregister(H5Z_DUMMY) < 0) goto error;
+
+ if(H5Zfilter_avail(H5Z_FILTER_DUMMY)!=TRUE) goto error;
+
+ if((gcpl = H5Pcreate(H5P_GROUP_CREATE)) < 0) goto error;
+
+ /* Use DUMMY filter for creating groups */
+ if(H5Pset_filter (gcpl, H5Z_FILTER_DUMMY, H5Z_FLAG_MANDATORY, (size_t)0, NULL) < 0) goto error;
+
+ /* Create a group using this filter */
+ if((gid = H5Gcreate2(file1, GROUP_NAME, H5P_DEFAULT, gcpl, H5P_DEFAULT)) < 0) goto error;
+
+ /* Create multiple groups under the main group */
+ for (i=0; i < GROUP_ITERATION; i++) {
+ sprintf(gname, "group_%d", i);
+ if((group = H5Gcreate2(gid, gname, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error;
+ if(H5Gclose(group) < 0) goto error;
+ }
+
+ if(H5Fflush(file1, H5F_SCOPE_GLOBAL) < 0) goto error;
+
+ /* Unregister the filter before closing the group. It should fail */
+ H5E_BEGIN_TRY {
+ ret = H5Zunregister(H5Z_FILTER_DUMMY);
+ } H5E_END_TRY;
+ if(ret>=0) {
+ H5_FAILED();
+ printf(" Line %d: Should not be able to unregister filter\n", __LINE__);
+ goto error;
+ } /* end if */
+
+
+ /* Close the group */
+ if(H5Gclose(gid) < 0) goto error;
+
+ /* Clean up objects used for this test */
+ if(H5Pclose (gcpl) < 0) goto error;
+
+ if((dc = H5Pcreate(H5P_DATASET_CREATE)) < 0) goto error;
+ if(H5Pset_chunk (dc, 2, chunk_size) < 0) goto error;
+
+ if(H5Pset_filter(dc, H5Z_FILTER_DUMMY, 0, (size_t)0, NULL) < 0) goto error;
+
+ /* Initialize the dataset */
+ for(i = n = 0; i < DSET_DIM1; i++)
+ for(j = 0; j < DSET_DIM2; j++)
+ points[i][j] = n++;
+
+ /* Create the data space */
+ dims[0] = DSET_DIM1;
+ dims[1] = DSET_DIM2;
+ if((space = H5Screate_simple(2, dims, NULL)) < 0) goto error;
+
+ /* Create a dataset in the first file */
+ if((dataset = H5Dcreate2(file1, DSET_NAME, H5T_NATIVE_INT, space,
+ H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
+
+ /* Write the data to the dataset */
+ if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points) < 0)
+ goto error;
+
+ /* Unregister the filter before closing the dataset. It should fail */
+ /*if(H5Zunregister(H5Z_FILTER_DUMMY) < 0) goto error;*/
+ H5E_BEGIN_TRY {
+ ret = H5Zunregister(H5Z_FILTER_DUMMY);
+ } H5E_END_TRY;
+ if(ret>=0) {
+ H5_FAILED();
+ printf(" Line %d: Should not be able to unregister filter\n", __LINE__);
+ goto error;
+ } /* end if */
+
+ if(H5Dclose(dataset) < 0) goto error;
+
+ /* Create a dataset in the second file */
+ if((dataset = H5Dcreate2(file2, DSET_NAME, H5T_NATIVE_INT, space,
+ H5P_DEFAULT, dc, H5P_DEFAULT)) < 0) goto error;
+
+ /* Write the data to the dataset */
+ if(H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, points) < 0)
+ goto error;
+
+ if(H5Dclose(dataset) < 0) goto error;
+
+ /* Unregister the filter after closing all objects but before closing files.
+ * It should flush all files. */
+ if(H5Zunregister(H5Z_FILTER_DUMMY) < 0) goto error;
+
+ /* Clean up objects used for this test */
+ if(H5Pclose (dc) < 0) goto error;
+ if(H5Fclose(file1) < 0) goto error;
+ if(H5Fclose(file2) < 0) goto error;
+
+ PASSED();
+ return 0;
+
+error:
+ return -1;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: main
+ *
+ * Purpose: Tests unregistering filter with H5Zunregister
+ *
+ * Return: Success: exit(0)
+ *
+ * Failure: exit(1)
+ *
+ * Programmer: Raymond Lu
+ * 11 April 2013
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+main(void)
+{
+ hid_t fapl;
+ int nerrors = 0;
+
+ /* Testing setup */
+ h5_reset();
+ fapl = h5_fileaccess();
+
+ /* Test unregistering filter in its own file */
+ nerrors += (test_unregister_filters(fapl) < 0 ? 1 : 0);
+
+ if(nerrors)
+ goto error;
+ printf("All dataset tests passed.\n");
+ h5_cleanup(FILENAME, fapl);
+
+ return 0;
+
+error:
+ nerrors = MAX(1, nerrors);
+ printf("***** %d DATASET TEST%s FAILED! *****\n",
+ nerrors, 1 == nerrors ? "" : "S");
+ return 1;
+}
+
diff --git a/testpar/t_dset.c b/testpar/t_dset.c
index 610739c..39ca686 100644
--- a/testpar/t_dset.c
+++ b/testpar/t_dset.c
@@ -3638,7 +3638,7 @@ test_no_collective_cause_mode_filter(int selection_mode)
VRFY((fapl_read >= 0), "create_faccess_plist() succeeded");
fid = H5Fopen (filename, H5F_ACC_RDONLY, fapl_read);
- dataset = H5Dopen (fid, DSET_NOCOLCAUSE, H5P_DEFAULT);
+ dataset = H5Dopen2 (fid, DSET_NOCOLCAUSE, H5P_DEFAULT);
/* Set collective I/O properties in the dxpl. */
ret = H5Pset_dxpl_mpio(dxpl, H5FD_MPIO_COLLECTIVE);
diff --git a/vms/src/h5pubconf.h b/vms/src/h5pubconf.h
index c79ef97..6c12d6c 100644
--- a/vms/src/h5pubconf.h
+++ b/vms/src/h5pubconf.h
@@ -502,7 +502,7 @@
#define H5_PACKAGE_NAME "HDF5"
/* Define to the full name and version of this package. */
-#define H5_PACKAGE_STRING "HDF5 1.9.149-FA_a5"
+#define H5_PACKAGE_STRING "HDF5 1.9.151-FA_a5"
/* Define to the one symbol short name of this package. */
#define H5_PACKAGE_TARNAME "hdf5"
@@ -511,7 +511,7 @@
#define H5_PACKAGE_URL ""
/* Define to the version of this package. */
-#define H5_PACKAGE_VERSION "1.9.149-FA_a5"
+#define H5_PACKAGE_VERSION "1.9.151-FA_a5"
/* Width for printf() for type `long long' or `__int64', use `ll' */
#define H5_PRINTF_LL_WIDTH "ll"
@@ -674,7 +674,7 @@
/* #undef H5_USING_MEMCHECKER */
/* Version number of package */
-#define H5_VERSION "1.9.149-FA_a5"
+#define H5_VERSION "1.9.151-FA_a5"
/* Define if vsnprintf() returns the correct value for formatted strings that
don't fit into size allowed */