summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--MANIFEST8
-rw-r--r--Makefile.in1
-rw-r--r--README.txt2
-rw-r--r--c++/Makefile.in1
-rw-r--r--c++/examples/Makefile.in1
-rw-r--r--c++/src/Makefile.in1
-rw-r--r--c++/test/Makefile.in1
-rw-r--r--config/lt_vers.am2
-rwxr-xr-xconfigure89
-rw-r--r--configure.in11
-rw-r--r--examples/Makefile.in1
-rw-r--r--fortran/Makefile.in1
-rw-r--r--fortran/examples/Makefile.in1
-rw-r--r--fortran/src/Makefile.in1
-rw-r--r--fortran/test/Makefile.in1
-rw-r--r--fortran/testpar/Makefile.in1
-rwxr-xr-xhl/Makefile.in1
-rw-r--r--hl/c++/Makefile.in1
-rw-r--r--hl/c++/examples/Makefile.in1
-rw-r--r--hl/c++/src/Makefile.in1
-rw-r--r--hl/c++/test/Makefile.in1
-rw-r--r--hl/examples/Makefile.in1
-rw-r--r--hl/fortran/Makefile.in1
-rw-r--r--hl/fortran/examples/Makefile.in1
-rw-r--r--hl/fortran/src/Makefile.in1
-rw-r--r--hl/fortran/test/Makefile.in1
-rw-r--r--hl/src/Makefile.in1
-rw-r--r--hl/test/Makefile.in1
-rw-r--r--hl/tools/Makefile.in1
-rw-r--r--hl/tools/gif2h5/Makefile.in1
-rw-r--r--perform/Makefile.in1
-rw-r--r--release_docs/RELEASE.txt4
-rw-r--r--src/H5Dchunk.c529
-rw-r--r--src/H5Dearray.c2
-rw-r--r--src/H5Dmpio.c22
-rw-r--r--src/H5Dpkg.h6
-rw-r--r--src/H5Gdense.c111
-rw-r--r--src/H5public.h4
-rw-r--r--src/Makefile.in3
-rw-r--r--test/Makefile.in12
-rw-r--r--test/h5test.h2
-rw-r--r--test/set_extent.c81
-rw-r--r--testpar/Makefile.in1
-rw-r--r--tools/Makefile.in1
-rw-r--r--tools/h5copy/Makefile.in1
-rw-r--r--tools/h5diff/Makefile.in1
-rw-r--r--tools/h5dump/Makefile.in1
-rwxr-xr-xtools/h5import/Makefile.in1
-rw-r--r--tools/h5jam/Makefile.in1
-rw-r--r--tools/h5ls/Makefile.am2
-rw-r--r--tools/h5ls/Makefile.in9
-rw-r--r--[-rwxr-xr-x]tools/h5ls/testh5ls.sh.in (renamed from tools/h5ls/testh5ls.sh)16
-rw-r--r--tools/h5repack/Makefile.in1
-rw-r--r--tools/h5stat/Makefile.in1
-rw-r--r--tools/lib/Makefile.in1
-rw-r--r--tools/lib/h5diff_array.c21
-rw-r--r--tools/misc/Makefile.in1
-rw-r--r--tools/testfiles/tdataregbe.ls14
-rw-r--r--tools/testfiles/tdataregle.ls (renamed from tools/testfiles/tdatareg.ls)0
-rw-r--r--tools/testfiles/tvldtypes2be.ls22
-rw-r--r--tools/testfiles/tvldtypes2le.ls (renamed from tools/testfiles/tvldtypes2.ls)0
-rw-r--r--vms/src/h5pubconf.h6
-rwxr-xr-xwindows/src/H5pubconf.h6
-rw-r--r--windows/tools/h5ls/testh5ls.bat4
64 files changed, 643 insertions, 382 deletions
diff --git a/MANIFEST b/MANIFEST
index a0307d4..c1c0391 100644
--- a/MANIFEST
+++ b/MANIFEST
@@ -1026,7 +1026,7 @@
./tools/h5ls/Makefile.am
./tools/h5ls/Makefile.in
./tools/h5ls/h5ls.c
-./tools/h5ls/testh5ls.sh
+./tools/h5ls/testh5ls.sh.in
./tools/h5copy/testh5copy.sh
./tools/h5copy/Makefile.in
@@ -1328,8 +1328,10 @@
./tools/testfiles/tslink-1.ls
./tools/testfiles/tstr-1.ls
./tools/testfiles/tattr2.ls
-./tools/testfiles/tdatareg.ls
-./tools/testfiles/tvldtypes2.ls
+./tools/testfiles/tvldtypes2le.ls
+./tools/testfiles/tdataregle.ls
+./tools/testfiles/tvldtypes2be.ls
+./tools/testfiles/tdataregbe.ls
#additional test input and output for h5dump XML
diff --git a/Makefile.in b/Makefile.in
index eace2ff..fd4ed36 100644
--- a/Makefile.in
+++ b/Makefile.in
@@ -251,6 +251,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/README.txt b/README.txt
index 9acec5d..6c59bf3 100644
--- a/README.txt
+++ b/README.txt
@@ -1,4 +1,4 @@
-HDF5 version 1.9.32 currently under development
+HDF5 version 1.9.34 currently under development
==> README.txt <==
Messages to be sent to the list should be sent to "<list>@hdfgroup.org".
diff --git a/c++/Makefile.in b/c++/Makefile.in
index e62f3bd..710f178 100644
--- a/c++/Makefile.in
+++ b/c++/Makefile.in
@@ -222,6 +222,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/c++/examples/Makefile.in b/c++/examples/Makefile.in
index db04254..7b29ccf 100644
--- a/c++/examples/Makefile.in
+++ b/c++/examples/Makefile.in
@@ -216,6 +216,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/c++/src/Makefile.in b/c++/src/Makefile.in
index e7c23e2..377dad7 100644
--- a/c++/src/Makefile.in
+++ b/c++/src/Makefile.in
@@ -252,6 +252,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/c++/test/Makefile.in b/c++/test/Makefile.in
index 5aa270c..d852e87 100644
--- a/c++/test/Makefile.in
+++ b/c++/test/Makefile.in
@@ -238,6 +238,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/config/lt_vers.am b/config/lt_vers.am
index f18b5c7..1298c07 100644
--- a/config/lt_vers.am
+++ b/config/lt_vers.am
@@ -17,7 +17,7 @@
# Add libtool shared library version numbers to the HDF5 library
# See libtool versioning documentation online.
LT_VERS_INTERFACE = 6
-LT_VERS_REVISION = 22
+LT_VERS_REVISION = 24
LT_VERS_AGE = 0
## If the API changes *at all*, increment LT_VERS_INTERFACE and
diff --git a/configure b/configure
index 2e3e34e..dce4da5 100755
--- a/configure
+++ b/configure
@@ -1,7 +1,7 @@
#! /bin/sh
-# From configure.in Id: configure.in 16453 2009-02-08 11:15:02Z hdftest .
+# From configure.in Id: configure.in 16518 2009-02-26 21:21:50Z pvn .
# Guess values for system-dependent variables and create Makefiles.
-# Generated by GNU Autoconf 2.61 for HDF5 1.9.32.
+# Generated by GNU Autoconf 2.61 for HDF5 1.9.34.
#
# Report bugs to <help@hdfgroup.org>.
#
@@ -724,8 +724,8 @@ SHELL=${CONFIG_SHELL-/bin/sh}
# Identity of this package.
PACKAGE_NAME='HDF5'
PACKAGE_TARNAME='hdf5'
-PACKAGE_VERSION='1.9.32'
-PACKAGE_STRING='HDF5 1.9.32'
+PACKAGE_VERSION='1.9.34'
+PACKAGE_STRING='HDF5 1.9.34'
PACKAGE_BUGREPORT='help@hdfgroup.org'
ac_unique_file="src/H5.c"
@@ -961,6 +961,7 @@ CONFIG_DATE
CONFIG_USER
CONFIG_MODE
BYTESEX
+WORDS_BIGENDIAN
CC_VERSION
ROOT
DYNAMIC_DIRS
@@ -1501,7 +1502,7 @@ if test "$ac_init_help" = "long"; then
# Omit some internal or obsolete options to make the list less imposing.
# This message is too long to be a string in the A/UX 3.1 sh.
cat <<_ACEOF
-\`configure' configures HDF5 1.9.32 to adapt to many kinds of systems.
+\`configure' configures HDF5 1.9.34 to adapt to many kinds of systems.
Usage: $0 [OPTION]... [VAR=VALUE]...
@@ -1571,7 +1572,7 @@ fi
if test -n "$ac_init_help"; then
case $ac_init_help in
- short | recursive ) echo "Configuration of HDF5 1.9.32:";;
+ short | recursive ) echo "Configuration of HDF5 1.9.34:";;
esac
cat <<\_ACEOF
@@ -1753,7 +1754,7 @@ fi
test -n "$ac_init_help" && exit $ac_status
if $ac_init_version; then
cat <<\_ACEOF
-HDF5 configure 1.9.32
+HDF5 configure 1.9.34
generated by GNU Autoconf 2.61
Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
@@ -1767,7 +1768,7 @@ cat >config.log <<_ACEOF
This file contains any messages produced by compilers while
running configure, to aid debugging if configure makes a mistake.
-It was created by HDF5 $as_me 1.9.32, which was
+It was created by HDF5 $as_me 1.9.34, which was
generated by GNU Autoconf 2.61. Invocation command line was
$ $0 $@
@@ -2462,7 +2463,7 @@ fi
# Define the identity of the package.
PACKAGE='hdf5'
- VERSION='1.9.32'
+ VERSION='1.9.34'
cat >>confdefs.h <<_ACEOF
@@ -8077,13 +8078,13 @@ if test "${lt_cv_nm_interface+set}" = set; then
else
lt_cv_nm_interface="BSD nm"
echo "int some_variable = 0;" > conftest.$ac_ext
- (eval echo "\"\$as_me:8080: $ac_compile\"" >&5)
+ (eval echo "\"\$as_me:8081: $ac_compile\"" >&5)
(eval "$ac_compile" 2>conftest.err)
cat conftest.err >&5
- (eval echo "\"\$as_me:8083: $NM \\\"conftest.$ac_objext\\\"\"" >&5)
+ (eval echo "\"\$as_me:8084: $NM \\\"conftest.$ac_objext\\\"\"" >&5)
(eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out)
cat conftest.err >&5
- (eval echo "\"\$as_me:8086: output\"" >&5)
+ (eval echo "\"\$as_me:8087: output\"" >&5)
cat conftest.out >&5
if $GREP 'External.*some_variable' conftest.out > /dev/null; then
lt_cv_nm_interface="MS dumpbin"
@@ -9304,7 +9305,7 @@ ia64-*-hpux*)
;;
*-*-irix6*)
# Find out which ABI we are using.
- echo '#line 9307 "configure"' > conftest.$ac_ext
+ echo '#line 9308 "configure"' > conftest.$ac_ext
if { (eval echo "$as_me:$LINENO: \"$ac_compile\"") >&5
(eval $ac_compile) 2>&5
ac_status=$?
@@ -12175,11 +12176,11 @@ else
-e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:12178: $lt_compile\"" >&5)
+ (eval echo "\"\$as_me:12179: $lt_compile\"" >&5)
(eval "$lt_compile" 2>conftest.err)
ac_status=$?
cat conftest.err >&5
- echo "$as_me:12182: \$? = $ac_status" >&5
+ echo "$as_me:12183: \$? = $ac_status" >&5
if (exit $ac_status) && test -s "$ac_outfile"; then
# The compiler can only warn and ignore the option if not recognized
# So say no if there are warnings other than the usual output.
@@ -12514,11 +12515,11 @@ else
-e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:12517: $lt_compile\"" >&5)
+ (eval echo "\"\$as_me:12518: $lt_compile\"" >&5)
(eval "$lt_compile" 2>conftest.err)
ac_status=$?
cat conftest.err >&5
- echo "$as_me:12521: \$? = $ac_status" >&5
+ echo "$as_me:12522: \$? = $ac_status" >&5
if (exit $ac_status) && test -s "$ac_outfile"; then
# The compiler can only warn and ignore the option if not recognized
# So say no if there are warnings other than the usual output.
@@ -12619,11 +12620,11 @@ else
-e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:12622: $lt_compile\"" >&5)
+ (eval echo "\"\$as_me:12623: $lt_compile\"" >&5)
(eval "$lt_compile" 2>out/conftest.err)
ac_status=$?
cat out/conftest.err >&5
- echo "$as_me:12626: \$? = $ac_status" >&5
+ echo "$as_me:12627: \$? = $ac_status" >&5
if (exit $ac_status) && test -s out/conftest2.$ac_objext
then
# The compiler can only warn and ignore the option if not recognized
@@ -12674,11 +12675,11 @@ else
-e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:12677: $lt_compile\"" >&5)
+ (eval echo "\"\$as_me:12678: $lt_compile\"" >&5)
(eval "$lt_compile" 2>out/conftest.err)
ac_status=$?
cat out/conftest.err >&5
- echo "$as_me:12681: \$? = $ac_status" >&5
+ echo "$as_me:12682: \$? = $ac_status" >&5
if (exit $ac_status) && test -s out/conftest2.$ac_objext
then
# The compiler can only warn and ignore the option if not recognized
@@ -15510,7 +15511,7 @@ else
lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
lt_status=$lt_dlunknown
cat > conftest.$ac_ext <<_LT_EOF
-#line 15513 "configure"
+#line 15514 "configure"
#include "confdefs.h"
#if HAVE_DLFCN_H
@@ -15606,7 +15607,7 @@ else
lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
lt_status=$lt_dlunknown
cat > conftest.$ac_ext <<_LT_EOF
-#line 15609 "configure"
+#line 15610 "configure"
#include "confdefs.h"
#if HAVE_DLFCN_H
@@ -17634,11 +17635,11 @@ else
-e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:17637: $lt_compile\"" >&5)
+ (eval echo "\"\$as_me:17638: $lt_compile\"" >&5)
(eval "$lt_compile" 2>conftest.err)
ac_status=$?
cat conftest.err >&5
- echo "$as_me:17641: \$? = $ac_status" >&5
+ echo "$as_me:17642: \$? = $ac_status" >&5
if (exit $ac_status) && test -s "$ac_outfile"; then
# The compiler can only warn and ignore the option if not recognized
# So say no if there are warnings other than the usual output.
@@ -17733,11 +17734,11 @@ else
-e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:17736: $lt_compile\"" >&5)
+ (eval echo "\"\$as_me:17737: $lt_compile\"" >&5)
(eval "$lt_compile" 2>out/conftest.err)
ac_status=$?
cat out/conftest.err >&5
- echo "$as_me:17740: \$? = $ac_status" >&5
+ echo "$as_me:17741: \$? = $ac_status" >&5
if (exit $ac_status) && test -s out/conftest2.$ac_objext
then
# The compiler can only warn and ignore the option if not recognized
@@ -17785,11 +17786,11 @@ else
-e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:17788: $lt_compile\"" >&5)
+ (eval echo "\"\$as_me:17789: $lt_compile\"" >&5)
(eval "$lt_compile" 2>out/conftest.err)
ac_status=$?
cat out/conftest.err >&5
- echo "$as_me:17792: \$? = $ac_status" >&5
+ echo "$as_me:17793: \$? = $ac_status" >&5
if (exit $ac_status) && test -s out/conftest2.$ac_objext
then
# The compiler can only warn and ignore the option if not recognized
@@ -19330,11 +19331,11 @@ else
-e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:19333: $lt_compile\"" >&5)
+ (eval echo "\"\$as_me:19334: $lt_compile\"" >&5)
(eval "$lt_compile" 2>conftest.err)
ac_status=$?
cat conftest.err >&5
- echo "$as_me:19337: \$? = $ac_status" >&5
+ echo "$as_me:19338: \$? = $ac_status" >&5
if (exit $ac_status) && test -s "$ac_outfile"; then
# The compiler can only warn and ignore the option if not recognized
# So say no if there are warnings other than the usual output.
@@ -19429,11 +19430,11 @@ else
-e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:19432: $lt_compile\"" >&5)
+ (eval echo "\"\$as_me:19433: $lt_compile\"" >&5)
(eval "$lt_compile" 2>out/conftest.err)
ac_status=$?
cat out/conftest.err >&5
- echo "$as_me:19436: \$? = $ac_status" >&5
+ echo "$as_me:19437: \$? = $ac_status" >&5
if (exit $ac_status) && test -s out/conftest2.$ac_objext
then
# The compiler can only warn and ignore the option if not recognized
@@ -19481,11 +19482,11 @@ else
-e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-e 's:$: $lt_compiler_flag:'`
- (eval echo "\"\$as_me:19484: $lt_compile\"" >&5)
+ (eval echo "\"\$as_me:19485: $lt_compile\"" >&5)
(eval "$lt_compile" 2>out/conftest.err)
ac_status=$?
cat out/conftest.err >&5
- echo "$as_me:19488: \$? = $ac_status" >&5
+ echo "$as_me:19489: \$? = $ac_status" >&5
if (exit $ac_status) && test -s out/conftest2.$ac_objext
then
# The compiler can only warn and ignore the option if not recognized
@@ -51043,6 +51044,14 @@ else
BYTESEX="little-endian"
fi
+
+if test "X$ac_cv_c_bigendian" = "Xyes"; then
+ WORDS_BIGENDIAN="yes"
+else
+ WORDS_BIGENDIAN="no"
+fi
+
+
PARALLEL=${PARALLEL:-no}
@@ -51437,7 +51446,7 @@ if test -n "$TESTPARALLEL"; then
fi
fi
-ac_config_files="$ac_config_files src/libhdf5.settings Makefile src/Makefile test/Makefile test/testerror.sh testpar/Makefile testpar/testph5.sh perform/Makefile tools/Makefile tools/h5dump/Makefile tools/h5dump/testh5dump.sh tools/h5dump/testh5dumpxml.sh tools/h5import/Makefile tools/h5diff/Makefile tools/h5jam/Makefile tools/h5jam/testh5jam.sh tools/h5repack/Makefile tools/h5repack/h5repack.sh tools/h5ls/Makefile tools/h5copy/Makefile tools/lib/Makefile tools/misc/Makefile tools/misc/h5cc tools/misc/testh5repart.sh tools/h5stat/testh5stat.sh tools/h5stat/Makefile examples/Makefile examples/testh5cc.sh c++/Makefile c++/src/Makefile c++/src/h5c++ c++/test/Makefile c++/examples/Makefile c++/examples/testh5c++.sh fortran/Makefile fortran/src/h5fc fortran/src/Makefile fortran/test/Makefile fortran/testpar/Makefile fortran/examples/Makefile fortran/examples/testh5fc.sh hl/Makefile hl/src/Makefile hl/test/Makefile hl/tools/Makefile hl/tools/gif2h5/Makefile hl/examples/Makefile hl/c++/Makefile hl/c++/src/Makefile hl/c++/test/Makefile hl/c++/examples/Makefile hl/fortran/Makefile hl/fortran/src/Makefile hl/fortran/test/Makefile hl/fortran/examples/Makefile"
+ac_config_files="$ac_config_files src/libhdf5.settings Makefile src/Makefile test/Makefile test/testerror.sh testpar/Makefile testpar/testph5.sh perform/Makefile tools/Makefile tools/h5dump/Makefile tools/h5dump/testh5dump.sh tools/h5dump/testh5dumpxml.sh tools/h5ls/testh5ls.sh tools/h5import/Makefile tools/h5diff/Makefile tools/h5jam/Makefile tools/h5jam/testh5jam.sh tools/h5repack/Makefile tools/h5repack/h5repack.sh tools/h5ls/Makefile tools/h5copy/Makefile tools/lib/Makefile tools/misc/Makefile tools/misc/h5cc tools/misc/testh5repart.sh tools/h5stat/testh5stat.sh tools/h5stat/Makefile examples/Makefile examples/testh5cc.sh c++/Makefile c++/src/Makefile c++/src/h5c++ c++/test/Makefile c++/examples/Makefile c++/examples/testh5c++.sh fortran/Makefile fortran/src/h5fc fortran/src/Makefile fortran/test/Makefile fortran/testpar/Makefile fortran/examples/Makefile fortran/examples/testh5fc.sh hl/Makefile hl/src/Makefile hl/test/Makefile hl/tools/Makefile hl/tools/gif2h5/Makefile hl/examples/Makefile hl/c++/Makefile hl/c++/src/Makefile hl/c++/test/Makefile hl/c++/examples/Makefile hl/fortran/Makefile hl/fortran/src/Makefile hl/fortran/test/Makefile hl/fortran/examples/Makefile"
cat >confcache <<\_ACEOF
@@ -51948,7 +51957,7 @@ exec 6>&1
# report actual input values of CONFIG_FILES etc. instead of their
# values after options handling.
ac_log="
-This file was extended by HDF5 $as_me 1.9.32, which was
+This file was extended by HDF5 $as_me 1.9.34, which was
generated by GNU Autoconf 2.61. Invocation command line was
CONFIG_FILES = $CONFIG_FILES
@@ -52001,7 +52010,7 @@ Report bugs to <bug-autoconf@gnu.org>."
_ACEOF
cat >>$CONFIG_STATUS <<_ACEOF
ac_cs_version="\\
-HDF5 config.status 1.9.32
+HDF5 config.status 1.9.34
configured by $0, generated by GNU Autoconf 2.61,
with options \\"`echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`\\"
@@ -52567,6 +52576,7 @@ do
"tools/h5dump/Makefile") CONFIG_FILES="$CONFIG_FILES tools/h5dump/Makefile" ;;
"tools/h5dump/testh5dump.sh") CONFIG_FILES="$CONFIG_FILES tools/h5dump/testh5dump.sh" ;;
"tools/h5dump/testh5dumpxml.sh") CONFIG_FILES="$CONFIG_FILES tools/h5dump/testh5dumpxml.sh" ;;
+ "tools/h5ls/testh5ls.sh") CONFIG_FILES="$CONFIG_FILES tools/h5ls/testh5ls.sh" ;;
"tools/h5import/Makefile") CONFIG_FILES="$CONFIG_FILES tools/h5import/Makefile" ;;
"tools/h5diff/Makefile") CONFIG_FILES="$CONFIG_FILES tools/h5diff/Makefile" ;;
"tools/h5jam/Makefile") CONFIG_FILES="$CONFIG_FILES tools/h5jam/Makefile" ;;
@@ -52949,6 +52959,7 @@ ac_delim='%!_!# '
for ac_last_try in false false false false false :; do
cat >conf$$subs.sed <<_ACEOF
BYTESEX!$BYTESEX$ac_delim
+WORDS_BIGENDIAN!$WORDS_BIGENDIAN$ac_delim
CC_VERSION!$CC_VERSION$ac_delim
ROOT!$ROOT$ac_delim
DYNAMIC_DIRS!$DYNAMIC_DIRS$ac_delim
@@ -52973,7 +52984,7 @@ LIBOBJS!$LIBOBJS$ac_delim
LTLIBOBJS!$LTLIBOBJS$ac_delim
_ACEOF
- if test `sed -n "s/.*$ac_delim\$/X/p" conf$$subs.sed | grep -c X` = 23; then
+ if test `sed -n "s/.*$ac_delim\$/X/p" conf$$subs.sed | grep -c X` = 24; then
break
elif $ac_last_try; then
{ { echo "$as_me:$LINENO: error: could not make $CONFIG_STATUS" >&5
diff --git a/configure.in b/configure.in
index abe2b3f..c5176a5 100644
--- a/configure.in
+++ b/configure.in
@@ -26,7 +26,7 @@ dnl
dnl NOTE: Don't forget to change the version number here when we do a
dnl release!!!
dnl
-AC_INIT([HDF5], [1.9.32], [help@hdfgroup.org])
+AC_INIT([HDF5], [1.9.34], [help@hdfgroup.org])
AC_CONFIG_SRCDIR([src/H5.c])
AM_CONFIG_HEADER([src/H5config.h])
@@ -3606,6 +3606,14 @@ else
BYTESEX="little-endian"
fi
+
+if test "X$ac_cv_c_bigendian" = "Xyes"; then
+ WORDS_BIGENDIAN="yes"
+else
+ WORDS_BIGENDIAN="no"
+fi
+AC_SUBST([WORDS_BIGENDIAN])
+
dnl Parallel support? (set above except empty if none)
PARALLEL=${PARALLEL:-no}
@@ -3934,6 +3942,7 @@ AC_CONFIG_FILES([src/libhdf5.settings
tools/h5dump/Makefile
tools/h5dump/testh5dump.sh
tools/h5dump/testh5dumpxml.sh
+ tools/h5ls/testh5ls.sh
tools/h5import/Makefile
tools/h5diff/Makefile
tools/h5jam/Makefile
diff --git a/examples/Makefile.in b/examples/Makefile.in
index ae26cdb..ff818b6 100644
--- a/examples/Makefile.in
+++ b/examples/Makefile.in
@@ -216,6 +216,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/fortran/Makefile.in b/fortran/Makefile.in
index 53addbe..cf5740f 100644
--- a/fortran/Makefile.in
+++ b/fortran/Makefile.in
@@ -226,6 +226,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/fortran/examples/Makefile.in b/fortran/examples/Makefile.in
index 81fb9f2..9cec95a 100644
--- a/fortran/examples/Makefile.in
+++ b/fortran/examples/Makefile.in
@@ -216,6 +216,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/fortran/src/Makefile.in b/fortran/src/Makefile.in
index 82bf8c2..05cb0cf 100644
--- a/fortran/src/Makefile.in
+++ b/fortran/src/Makefile.in
@@ -281,6 +281,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/fortran/test/Makefile.in b/fortran/test/Makefile.in
index a796ec6..c5c5fb7 100644
--- a/fortran/test/Makefile.in
+++ b/fortran/test/Makefile.in
@@ -278,6 +278,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/fortran/testpar/Makefile.in b/fortran/testpar/Makefile.in
index cbae29f..ea886c8 100644
--- a/fortran/testpar/Makefile.in
+++ b/fortran/testpar/Makefile.in
@@ -229,6 +229,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hl/Makefile.in b/hl/Makefile.in
index 1c153e7..d642c83 100755
--- a/hl/Makefile.in
+++ b/hl/Makefile.in
@@ -226,6 +226,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hl/c++/Makefile.in b/hl/c++/Makefile.in
index a425867..859757a 100644
--- a/hl/c++/Makefile.in
+++ b/hl/c++/Makefile.in
@@ -222,6 +222,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hl/c++/examples/Makefile.in b/hl/c++/examples/Makefile.in
index d729dea..6977f10 100644
--- a/hl/c++/examples/Makefile.in
+++ b/hl/c++/examples/Makefile.in
@@ -216,6 +216,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hl/c++/src/Makefile.in b/hl/c++/src/Makefile.in
index ef258aa..71c7f85 100644
--- a/hl/c++/src/Makefile.in
+++ b/hl/c++/src/Makefile.in
@@ -241,6 +241,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hl/c++/test/Makefile.in b/hl/c++/test/Makefile.in
index b8fd420..d1c689e 100644
--- a/hl/c++/test/Makefile.in
+++ b/hl/c++/test/Makefile.in
@@ -232,6 +232,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hl/examples/Makefile.in b/hl/examples/Makefile.in
index ccb5a79..714d8da 100644
--- a/hl/examples/Makefile.in
+++ b/hl/examples/Makefile.in
@@ -216,6 +216,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hl/fortran/Makefile.in b/hl/fortran/Makefile.in
index 7ac4958..3923b06 100644
--- a/hl/fortran/Makefile.in
+++ b/hl/fortran/Makefile.in
@@ -226,6 +226,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hl/fortran/examples/Makefile.in b/hl/fortran/examples/Makefile.in
index 3cbc54a..7cc30e4 100644
--- a/hl/fortran/examples/Makefile.in
+++ b/hl/fortran/examples/Makefile.in
@@ -216,6 +216,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hl/fortran/src/Makefile.in b/hl/fortran/src/Makefile.in
index 2e0b27f..0b269a4 100644
--- a/hl/fortran/src/Makefile.in
+++ b/hl/fortran/src/Makefile.in
@@ -246,6 +246,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hl/fortran/test/Makefile.in b/hl/fortran/test/Makefile.in
index a89fdb2..e43ab95 100644
--- a/hl/fortran/test/Makefile.in
+++ b/hl/fortran/test/Makefile.in
@@ -236,6 +236,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hl/src/Makefile.in b/hl/src/Makefile.in
index 3ac59b8..85a2cbf 100644
--- a/hl/src/Makefile.in
+++ b/hl/src/Makefile.in
@@ -242,6 +242,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hl/test/Makefile.in b/hl/test/Makefile.in
index d1891ae..7f01027 100644
--- a/hl/test/Makefile.in
+++ b/hl/test/Makefile.in
@@ -252,6 +252,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hl/tools/Makefile.in b/hl/tools/Makefile.in
index ec18d1e..3801bc9 100644
--- a/hl/tools/Makefile.in
+++ b/hl/tools/Makefile.in
@@ -223,6 +223,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/hl/tools/gif2h5/Makefile.in b/hl/tools/gif2h5/Makefile.in
index f3e70d6..c330309 100644
--- a/hl/tools/gif2h5/Makefile.in
+++ b/hl/tools/gif2h5/Makefile.in
@@ -245,6 +245,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/perform/Makefile.in b/perform/Makefile.in
index 5e43c7f..73ca09c 100644
--- a/perform/Makefile.in
+++ b/perform/Makefile.in
@@ -281,6 +281,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt
index 84bdaf3..b756130 100644
--- a/release_docs/RELEASE.txt
+++ b/release_docs/RELEASE.txt
@@ -1,4 +1,4 @@
-HDF5 version 1.9.32 currently under development
+HDF5 version 1.9.34 currently under development
================================================================================
@@ -142,6 +142,8 @@ Bug Fixes since HDF5-1.8.0 release
Library
-------
+ - Fixed various bugs that could prevent the fill value from being written
+ in certain rare cases. (NAF - 2009/02/26)
- Fixed a bug that prevented more than one dataset chunk from being cached
at a time. (NAF - 2009/02/12)
- Fixed an assertion failure caused by opening an attribute multiple times
diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c
index a99998f..5e45886 100644
--- a/src/H5Dchunk.c
+++ b/src/H5Dchunk.c
@@ -102,14 +102,21 @@
/* Local Typedefs */
/******************/
+/* Stack of chunks to remove during a "prune" iteration */
+typedef struct H5D_chunk_prune_stack_t {
+ H5D_chunk_rec_t rec; /* Chunk record */
+ struct H5D_chunk_prune_stack_t *next; /* Next chunk in stack */
+} H5D_chunk_prune_stack_t;
+
/* Callback info for iteration to prune chunks */
typedef struct H5D_chunk_it_ud1_t {
H5D_chunk_common_ud_t common; /* Common info for B-tree user data (must be first) */
const H5D_chk_idx_info_t *idx_info; /* Chunked index info */
const H5D_io_info_t *io_info; /* I/O info for dataset operation */
const hsize_t *dims; /* New dataset dimensions */
+ const hbool_t *shrunk_dims; /* Dimensions which have been shrunk */
const hsize_t *down_chunks; /* "down" size of number of chunks in each dimension */
- H5SL_t *outside; /* Skip list to hold chunks outside the new dimensions */
+ H5D_chunk_prune_stack_t *rm_stack; /* Stack of chunks outside the new dimensions */
H5S_t *chunk_space; /* Dataspace for a chunk */
uint32_t elmts_per_chunk;/* Elements in chunk */
hsize_t *hyper_start; /* Starting location of hyperslab */
@@ -117,18 +124,6 @@ typedef struct H5D_chunk_it_ud1_t {
hbool_t fb_info_init; /* Whether the fill value buffer has been initialized */
} H5D_chunk_it_ud1_t;
-/* Skip list node for storing chunks to remove during a "prune" iteration */
-typedef struct H5D_chunk_sl_ck_t {
- hsize_t index; /* Index of chunk to remove (must be first) */
- H5D_chunk_rec_t rec; /* Chunk record */
-} H5D_chunk_sl_ck_t;
-
-/* Skip list callback info when destroying list & removing chunks during "prune" */
-typedef struct H5D_chunk_sl_rm_t {
- const H5D_chk_idx_info_t *idx_info; /* I/O info for dataset operation */
- const H5O_layout_t *mesg; /* Layout message */
-} H5D_chunk_sl_rm_t;
-
/* Callback info for iteration to obtain chunk address and the index of the chunk for all chunks in the B-tree. */
typedef struct H5D_chunk_id_ud2_t {
/* down */
@@ -272,7 +267,7 @@ H5FL_DEFINE(H5D_chunk_info_t);
H5FL_BLK_DEFINE_STATIC(chunk);
/* Declare a free list to manage H5D_chunk_sl_ck_t objects */
-H5FL_DEFINE_STATIC(H5D_chunk_sl_ck_t);
+H5FL_DEFINE_STATIC(H5D_chunk_prune_stack_t);
@@ -1375,13 +1370,13 @@ done:
*
*-------------------------------------------------------------------------
*/
-hbool_t
-H5D_chunk_cacheable(const H5D_io_info_t *io_info)
+htri_t
+H5D_chunk_cacheable(const H5D_io_info_t *io_info, haddr_t caddr, hbool_t write_op)
{
const H5D_t *dataset = io_info->dset;
- hbool_t ret_value;
+ htri_t ret_value = FAIL;
- FUNC_ENTER_NOAPI_NOINIT_NOFUNC(H5D_chunk_cacheable)
+ FUNC_ENTER_NOAPI_NOINIT(H5D_chunk_cacheable)
HDassert(io_info);
HDassert(dataset);
@@ -1400,20 +1395,38 @@ H5D_chunk_cacheable(const H5D_io_info_t *io_info)
ret_value = FALSE;
else {
#endif /* H5_HAVE_PARALLEL */
- /* If the chunk is too large to keep in the cache and if the address
- * for the chunk has been defined, then don't load the chunk into the
+ /* If the chunk is too large to keep in the cache and if we don't
+ * need to write the fill value, then don't load the chunk into the
* cache, just write the data to it directly.
*/
H5_CHECK_OVERFLOW(dataset->shared->layout.u.chunk.size, uint32_t, size_t);
- if((size_t)dataset->shared->layout.u.chunk.size > dataset->shared->cache.chunk.nbytes_max)
- ret_value = FALSE;
- else
+ if((size_t)dataset->shared->layout.u.chunk.size > dataset->shared->cache.chunk.nbytes_max) {
+ if(write_op && !H5F_addr_defined(caddr)) {
+ const H5O_fill_t *fill = &(dataset->shared->dcpl_cache.fill); /* Fill value info */
+ H5D_fill_value_t fill_status; /* Fill value status */
+
+ /* Revtrieve the fill value status */
+ if(H5P_is_fill_value_defined(fill, &fill_status) < 0)
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't tell if fill value defined")
+
+ /* If the fill value needs to be written then we will need
+ * to use the cache to write the fill value */
+ if(fill->fill_time == H5D_FILL_TIME_ALLOC ||
+ (fill->fill_time == H5D_FILL_TIME_IFSET
+ && fill_status == H5D_FILL_VALUE_USER_DEFINED))
+ ret_value = TRUE;
+ else
+ ret_value = FALSE;
+ } else
+ ret_value = FALSE;
+ } else
ret_value = TRUE;
#ifdef H5_HAVE_PARALLEL
} /* end else */
#endif /* H5_HAVE_PARALLEL */
} /* end else */
+done:
FUNC_LEAVE_NOAPI(ret_value)
} /* end H5D_chunk_cacheable() */
@@ -1545,6 +1558,7 @@ H5D_chunk_read(H5D_io_info_t *io_info, const H5D_type_info_t *type_info,
H5D_io_info_t *chk_io_info; /* Pointer to I/O info object for this chunk */
void *chunk; /* Pointer to locked chunk buffer */
H5D_chunk_ud_t udata; /* B-tree pass-through */
+ htri_t cacheable; /* Whether the chunk is cacheable */
/* Get the actual chunk information from the skip list node */
chunk_info = H5D_CHUNK_GET_NODE_INFO(fm, chunk_node);
@@ -1557,7 +1571,9 @@ H5D_chunk_read(H5D_io_info_t *io_info, const H5D_type_info_t *type_info,
if(H5F_addr_defined(udata.addr) || H5D_chunk_in_cache(io_info->dset, chunk_info->coords, chunk_info->index)
|| !skip_missing_chunks) {
/* Load the chunk into cache and lock it. */
- if(H5D_chunk_cacheable(io_info)) {
+ if((cacheable = H5D_chunk_cacheable(io_info, udata.addr, FALSE)) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't tell if chunk is cacheable")
+ if(cacheable) {
/* Pass in chunk's coordinates in a union. */
io_info->store->chunk.offset = chunk_info->coords;
io_info->store->chunk.index = chunk_info->index;
@@ -1599,7 +1615,7 @@ H5D_chunk_read(H5D_io_info_t *io_info, const H5D_type_info_t *type_info,
HGOTO_ERROR(H5E_DATASET, H5E_READERROR, FAIL, "chunked read failed")
/* Release the cache lock on the chunk. */
- if(chunk && H5D_chunk_unlock(io_info, FALSE, idx_hint, chunk, src_accessed_bytes) < 0)
+ if(chunk && H5D_chunk_unlock(io_info, &udata, FALSE, idx_hint, chunk, src_accessed_bytes) < 0)
HGOTO_ERROR(H5E_IO, H5E_READERROR, FAIL, "unable to unlock raw data chunk")
} /* end if */
@@ -1676,6 +1692,7 @@ H5D_chunk_write(H5D_io_info_t *io_info, const H5D_type_info_t *type_info,
H5D_io_info_t *chk_io_info; /* Pointer to I/O info object for this chunk */
void *chunk; /* Pointer to locked chunk buffer */
H5D_chunk_ud_t udata; /* B-tree pass-through */
+ htri_t cacheable; /* Whether the chunk is cacheable */
/* Get the actual chunk information from the skip list node */
chunk_info = H5D_CHUNK_GET_NODE_INFO(fm, chunk_node);
@@ -1684,7 +1701,9 @@ H5D_chunk_write(H5D_io_info_t *io_info, const H5D_type_info_t *type_info,
* simply allocate space instead of load the chunk. */
if(H5D_chunk_get_info(io_info->dset, io_info->dxpl_id, chunk_info->coords, &udata) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "error looking up chunk address")
- if(H5D_chunk_cacheable(io_info)) {
+ if((cacheable = H5D_chunk_cacheable(io_info, udata.addr, TRUE)) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't tell if chunk is cacheable")
+ if(cacheable) {
hbool_t entire_chunk = TRUE; /* Whether whole chunk is selected */
/* Pass in chunk's coordinates in a union. */
@@ -1751,7 +1770,7 @@ H5D_chunk_write(H5D_io_info_t *io_info, const H5D_type_info_t *type_info,
HGOTO_ERROR(H5E_DATASET, H5E_READERROR, FAIL, "chunked write failed")
/* Release the cache lock on the chunk. */
- if(chunk && H5D_chunk_unlock(io_info, TRUE, idx_hint, chunk, dst_accessed_bytes) < 0)
+ if(chunk && H5D_chunk_unlock(io_info, &udata, TRUE, idx_hint, chunk, dst_accessed_bytes) < 0)
HGOTO_ERROR(H5E_IO, H5E_READERROR, FAIL, "unable to unlock raw data chunk")
/* Advance to next chunk in list */
@@ -2778,8 +2797,8 @@ done:
*-------------------------------------------------------------------------
*/
herr_t
-H5D_chunk_unlock(const H5D_io_info_t *io_info, hbool_t dirty, unsigned idx_hint,
- void *chunk, uint32_t naccessed)
+H5D_chunk_unlock(const H5D_io_info_t *io_info, const H5D_chunk_ud_t *udata,
+ hbool_t dirty, unsigned idx_hint, void *chunk, uint32_t naccessed)
{
const H5O_layout_t *layout = &(io_info->dset->shared->layout); /* Dataset layout */
const H5D_rdcc_t *rdcc = &(io_info->dset->shared->cache.chunk);
@@ -2789,6 +2808,7 @@ H5D_chunk_unlock(const H5D_io_info_t *io_info, hbool_t dirty, unsigned idx_hint,
FUNC_ENTER_NOAPI_NOINIT(H5D_chunk_unlock)
HDassert(io_info);
+ HDassert(udata);
if(UINT_MAX == idx_hint) {
/*
@@ -2804,7 +2824,7 @@ H5D_chunk_unlock(const H5D_io_info_t *io_info, hbool_t dirty, unsigned idx_hint,
ent.dirty = TRUE;
HDmemcpy(ent.offset, io_info->store->chunk.offset, layout->u.chunk.ndims * sizeof(ent.offset[0]));
HDassert(layout->u.chunk.size > 0);
- ent.chunk_addr = HADDR_UNDEF;
+ ent.chunk_addr = udata->addr;
ent.chunk_size = layout->u.chunk.size;
H5_ASSIGN_OVERFLOW(ent.alloc_size, ent.chunk_size, uint32_t, size_t);
ent.chunk = (uint8_t *)chunk;
@@ -3267,10 +3287,10 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5D_chunk_prune_cb
+ * Function: H5D_chunk_prune_fill
*
- * Purpose: Search for chunks that are no longer inside the pruned
- * dataset's extent
+ * Purpose: Write the fill value to the parts of the chunk that are no
+ * longer part of the dataspace
*
* Return: Non-negative on success/Negative on failure
*
@@ -3279,201 +3299,186 @@ done:
*
*-------------------------------------------------------------------------
*/
-/* ARGSUSED */
-static int
-H5D_chunk_prune_cb(const H5D_chunk_rec_t *chunk_rec, void *_udata)
+static herr_t
+H5D_chunk_prune_fill(const H5D_chunk_rec_t *chunk_rec, H5D_chunk_it_ud1_t *udata)
{
- H5D_chunk_it_ud1_t *udata = (H5D_chunk_it_ud1_t *)_udata; /* User data */
- H5D_chunk_sl_ck_t *sl_node = NULL; /* Skip list node for chunk to remove */
- unsigned rank; /* Current # of dimensions */
- hbool_t should_delete = FALSE; /* Whether the chunk should be deleted */
- hbool_t needs_fill = FALSE; /* Whether the chunk overlaps the new extent and needs fill valiues */
- unsigned u; /* Local index variable */
- int ret_value = H5_ITER_CONT; /* Return value */
-
- FUNC_ENTER_NOAPI_NOINIT(H5D_chunk_prune_cb)
-
- /* Figure out what chunks are no longer in use for the specified extent and release them */
- rank = udata->common.mesg->u.chunk.ndims - 1;
- for(u = 0; u < rank; u++)
- /* The chunk record points to a chunk of storage that contains the
- * beginning of the logical address space represented by UDATA.
- */
- if(chunk_rec->offset[u] >= udata->dims[u]) {
- /* Indicate that the chunk will be deleted */
- should_delete = TRUE;
-
- /* Break out of loop, we know the chunk is outside the current dimensions */
- break;
- } /* end if */
- /* Check for chunk that overlaps new extent and will need fill values */
- else if((chunk_rec->offset[u] + udata->common.mesg->u.chunk.dim[u]) > udata->dims[u])
- /* Indicate that the chunk needs filling */
- /* (but continue in loop, since it could be outside the extent in
- * another dimension -QAK)
- */
- needs_fill = TRUE;
-
- /* Check for chunk to delete */
- if(should_delete) {
- /* Allocate space for the shared structure */
- if(NULL == (sl_node = H5FL_MALLOC(H5D_chunk_sl_ck_t)))
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, H5_ITER_ERROR, "memory allocation failed for shared B-tree info")
-
- /* Calculate the index of this chunk */
- if(H5V_chunk_index(rank, chunk_rec->offset, udata->common.mesg->u.chunk.dim, udata->down_chunks, &sl_node->index) < 0)
- HGOTO_ERROR(H5E_IO, H5E_BADRANGE, H5_ITER_ERROR, "can't get chunk index")
+ const H5D_io_info_t *io_info = udata->io_info; /* Local pointer to I/O info */
+ H5D_t *dset = io_info->dset; /* Local pointer to the dataset info */
+ const H5O_layout_t *layout = &(dset->shared->layout); /* Dataset's layout */
+ unsigned rank = udata->common.mesg->u.chunk.ndims - 1; /* Dataset rank */
+ H5S_sel_iter_t chunk_iter; /* Memory selection iteration info */
+ hssize_t sel_nelmts; /* Number of elements in selection */
+ hsize_t count[H5O_LAYOUT_NDIMS]; /* Element count of hyperslab */
+ void *chunk; /* The file chunk */
+ unsigned idx_hint; /* Which chunk we're dealing with */
+ H5D_chunk_ud_t chk_udata; /* User data for locking chunk */
+ uint32_t bytes_accessed; /* Bytes accessed in chunk */
+ unsigned u; /* Local index variable */
+ herr_t ret_value = SUCCEED; /* Return value */
- /* Store the key for the chunk */
- sl_node->rec = *chunk_rec;
+ FUNC_ENTER_NOAPI_NOINIT(H5D_chunk_prune_fill)
- /* Insert the chunk description in the skip list */
- if(H5SL_insert(udata->outside, sl_node, &sl_node->index) < 0)
- HGOTO_ERROR(H5E_IO, H5E_CANTINSERT, H5_ITER_ERROR, "can't insert chunk into skip list")
+ /* Initialize the fill value buffer, if necessary */
+ if(!udata->fb_info_init) {
+ H5_CHECK_OVERFLOW(udata->elmts_per_chunk, uint32_t, size_t);
+ if(H5D_fill_init(&udata->fb_info, NULL, FALSE, NULL, NULL, NULL, NULL,
+ &dset->shared->dcpl_cache.fill,
+ dset->shared->type, dset->shared->type_id, (size_t)udata->elmts_per_chunk,
+ io_info->dxpl_cache->max_temp_buf, io_info->dxpl_id) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't initialize fill buffer info")
+ udata->fb_info_init = TRUE;
} /* end if */
- /* Check for chunk that overlaps the new dataset dimensions and needs filling */
- else if(needs_fill) {
- const H5D_io_info_t *io_info = udata->io_info; /* Local pointer to I/O info */
- H5D_t *dset = io_info->dset; /* Local pointer to the dataset info */
- const H5O_layout_t *layout = &(dset->shared->layout); /* Dataset's layout */
- H5S_sel_iter_t chunk_iter; /* Memory selection iteration info */
- hssize_t sel_nelmts; /* Number of elements in selection */
- hsize_t count[H5O_LAYOUT_NDIMS]; /* Element count of hyperslab */
- void *chunk; /* The file chunk */
- unsigned idx_hint; /* Which chunk we're dealing with */
- H5D_chunk_ud_t chk_udata; /* User data for locking chunk */
- uint32_t bytes_accessed; /* Bytes accessed in chunk */
-
- /* Initialize the fill value buffer, if necessary */
- if(!udata->fb_info_init) {
- H5_CHECK_OVERFLOW(udata->elmts_per_chunk, uint32_t, size_t);
- if(H5D_fill_init(&udata->fb_info, NULL, FALSE, NULL, NULL, NULL, NULL,
- &dset->shared->dcpl_cache.fill,
- dset->shared->type, dset->shared->type_id, (size_t)udata->elmts_per_chunk,
- io_info->dxpl_cache->max_temp_buf, io_info->dxpl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, H5_ITER_ERROR, "can't initialize fill buffer info")
- udata->fb_info_init = TRUE;
- } /* end if */
- /* Compute the # of elements to leave with existing value, in each dimension */
- for(u = 0; u < rank; u++) {
- count[u] = MIN(layout->u.chunk.dim[u], (udata->dims[u] - chunk_rec->offset[u]));
- HDassert(count[u] > 0);
- } /* end for */
+ /* Compute the # of elements to leave with existing value, in each dimension */
+ for(u = 0; u < rank; u++) {
+ count[u] = MIN(layout->u.chunk.dim[u], (udata->dims[u] - chunk_rec->offset[u]));
+ HDassert(count[u] > 0);
+ } /* end for */
- /* Select all elements in chunk, to begin with */
- if(H5S_select_all(udata->chunk_space, TRUE) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSELECT, H5_ITER_ERROR, "unable to select space")
+ /* Select all elements in chunk, to begin with */
+ if(H5S_select_all(udata->chunk_space, TRUE) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSELECT, FAIL, "unable to select space")
- /* "Subtract out" the elements to keep */
- if(H5S_select_hyperslab(udata->chunk_space, H5S_SELECT_NOTB, udata->hyper_start, NULL, count, NULL) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSELECT, H5_ITER_ERROR, "unable to select hyperslab")
+ /* "Subtract out" the elements to keep */
+ if(H5S_select_hyperslab(udata->chunk_space, H5S_SELECT_NOTB, udata->hyper_start, NULL, count, NULL) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSELECT, FAIL, "unable to select hyperslab")
- /* Calculate the index of this chunk */
- if(H5V_chunk_index(rank, chunk_rec->offset, layout->u.chunk.dim, udata->down_chunks, &io_info->store->chunk.index) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, H5_ITER_ERROR, "can't get chunk index")
+ /* Calculate the index of this chunk */
+ if(H5V_chunk_index(rank, chunk_rec->offset, layout->u.chunk.dim, udata->down_chunks,
+ &io_info->store->chunk.index) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, H5_ITER_ERROR, "can't get chunk index")
- /* Lock the chunk into the cache, to get a pointer to the chunk buffer */
- /* (Casting away const OK -QAK) */
- io_info->store->chunk.offset = (hsize_t *)chunk_rec->offset;
- chk_udata.common.mesg = layout;
- chk_udata.common.offset = chunk_rec->offset;
- chk_udata.nbytes = chunk_rec->nbytes;
- chk_udata.filter_mask = chunk_rec->filter_mask;
- chk_udata.addr = chunk_rec->chunk_addr;
- if(NULL == (chunk = (void *)H5D_chunk_lock(udata->io_info, &chk_udata, FALSE, &idx_hint)))
- HGOTO_ERROR(H5E_DATASET, H5E_READERROR, H5_ITER_ERROR, "unable to lock raw data chunk")
-
-
- /* Fill the selection in the memory buffer */
- /* Use the size of the elements in the chunk directly instead of */
- /* relying on the fill.size, which might be set to 0 if there is */
- /* no fill-value defined for the dataset -QAK */
-
- /* Get the number of elements in the selection */
- sel_nelmts = H5S_GET_SELECT_NPOINTS(udata->chunk_space);
- HDassert(sel_nelmts >= 0);
- H5_CHECK_OVERFLOW(sel_nelmts, hssize_t, size_t);
-
- /* Check for VL datatype & non-default fill value */
- if(udata->fb_info.has_vlen_fill_type)
- /* Re-fill the buffer to use for this I/O operation */
- if(H5D_fill_refill_vl(&udata->fb_info, (size_t)sel_nelmts, io_info->dxpl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCONVERT, H5_ITER_ERROR, "can't refill fill value buffer")
-
- /* Create a selection iterator for scattering the elements to memory buffer */
- if(H5S_select_iter_init(&chunk_iter, udata->chunk_space, layout->u.chunk.dim[rank]) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, H5_ITER_ERROR, "unable to initialize chunk selection information")
-
- /* Scatter the data into memory */
- if(H5D_scatter_mem(udata->fb_info.fill_buf, udata->chunk_space, &chunk_iter, (size_t)sel_nelmts, io_info->dxpl_cache, chunk/*out*/) < 0) {
- H5S_SELECT_ITER_RELEASE(&chunk_iter);
- HGOTO_ERROR(H5E_DATASET, H5E_READERROR, H5_ITER_ERROR, "scatter failed")
- } /* end if */
+ /* Lock the chunk into the cache, to get a pointer to the chunk buffer */
+ /* (Casting away const OK -QAK) */
+ io_info->store->chunk.offset = (hsize_t *)chunk_rec->offset;
+ chk_udata.common.mesg = layout;
+ chk_udata.common.offset = chunk_rec->offset;
+ chk_udata.nbytes = chunk_rec->nbytes;
+ chk_udata.filter_mask = chunk_rec->filter_mask;
+ chk_udata.addr = chunk_rec->chunk_addr;
+ if(NULL == (chunk = (void *)H5D_chunk_lock(udata->io_info, &chk_udata, FALSE, &idx_hint)))
+ HGOTO_ERROR(H5E_DATASET, H5E_READERROR, FAIL, "unable to lock raw data chunk")
+
+
+ /* Fill the selection in the memory buffer */
+ /* Use the size of the elements in the chunk directly instead of */
+ /* relying on the fill.size, which might be set to 0 if there is */
+ /* no fill-value defined for the dataset -QAK */
+
+ /* Get the number of elements in the selection */
+ sel_nelmts = H5S_GET_SELECT_NPOINTS(udata->chunk_space);
+ HDassert(sel_nelmts >= 0);
+ H5_CHECK_OVERFLOW(sel_nelmts, hssize_t, size_t);
+
+ /* Check for VL datatype & non-default fill value */
+ if(udata->fb_info.has_vlen_fill_type)
+ /* Re-fill the buffer to use for this I/O operation */
+ if(H5D_fill_refill_vl(&udata->fb_info, (size_t)sel_nelmts, io_info->dxpl_id) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTCONVERT, FAIL, "can't refill fill value buffer")
+
+ /* Create a selection iterator for scattering the elements to memory buffer */
+ if(H5S_select_iter_init(&chunk_iter, udata->chunk_space, layout->u.chunk.dim[rank]) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize chunk selection information")
+
+ /* Scatter the data into memory */
+ if(H5D_scatter_mem(udata->fb_info.fill_buf, udata->chunk_space, &chunk_iter, (size_t)sel_nelmts, io_info->dxpl_cache, chunk/*out*/) < 0) {
+ H5S_SELECT_ITER_RELEASE(&chunk_iter);
+ HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "scatter failed")
+ } /* end if */
- /* Release the selection iterator */
- if(H5S_SELECT_ITER_RELEASE(&chunk_iter) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTFREE, H5_ITER_ERROR, "Can't release selection iterator")
+ /* Release the selection iterator */
+ if(H5S_SELECT_ITER_RELEASE(&chunk_iter) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTFREE, FAIL, "Can't release selection iterator")
- /* The number of bytes accessed in the chunk */
- /* (i.e. the bytes replaced with fill values) */
- H5_CHECK_OVERFLOW(sel_nelmts, hssize_t, uint32_t);
- bytes_accessed = (uint32_t)sel_nelmts * layout->u.chunk.dim[rank];
+ /* The number of bytes accessed in the chunk */
+ /* (i.e. the bytes replaced with fill values) */
+ H5_CHECK_OVERFLOW(sel_nelmts, hssize_t, uint32_t);
+ bytes_accessed = (uint32_t)sel_nelmts * layout->u.chunk.dim[rank];
- /* Release lock on chunk */
- if(H5D_chunk_unlock(io_info, TRUE, idx_hint, chunk, bytes_accessed) < 0)
- HGOTO_ERROR(H5E_IO, H5E_WRITEERROR, H5_ITER_ERROR, "unable to unlock raw data chunk")
- } /* end else-if */
+ /* Release lock on chunk */
+ if(H5D_chunk_unlock(io_info, &chk_udata, TRUE, idx_hint, chunk, bytes_accessed) < 0)
+ HGOTO_ERROR(H5E_IO, H5E_WRITEERROR, FAIL, "unable to unlock raw data chunk")
done:
- if(ret_value != H5_ITER_CONT && sl_node)
- (void)H5FL_FREE(H5D_chunk_sl_ck_t, sl_node);
-
FUNC_LEAVE_NOAPI(ret_value)
-} /* end H5D_chunk_prune_cb() */
+} /* H5D_chunk_prune_fill */
/*-------------------------------------------------------------------------
- * Function: H5D_chunk_prune_sl_rm_cb
+ * Function: H5D_chunk_prune_cb
*
- * Purpose: Destroy a skip list node for "pruning" chunks, also removes
- * the chunk from the index.
+ * Purpose: Search for chunks that are no longer inside the pruned
+ * dataset's extent
*
* Return: Non-negative on success/Negative on failure
*
- * Programmer: Quincey Koziol, koziol@hdfgroup.org
- * May 3, 2007
+ * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu
+ * March 26, 2002
*
*-------------------------------------------------------------------------
*/
-static herr_t
-H5D_chunk_prune_sl_rm_cb(void *item, void UNUSED *key, void *op_data)
+/* ARGSUSED */
+static int
+H5D_chunk_prune_cb(const H5D_chunk_rec_t *chunk_rec, void *_udata)
{
- H5D_chunk_sl_ck_t *sl_node = (H5D_chunk_sl_ck_t *)item; /* Temporary pointer to chunk to remove */
- H5D_chunk_sl_rm_t *rm_info = (H5D_chunk_sl_rm_t *)op_data; /* Information needed for removing chunk from B-tree */
- H5D_chunk_common_ud_t idx_udata; /* User data for index removal routine */
- herr_t ret_value = H5_ITER_CONT; /* Return value */
+ H5D_chunk_it_ud1_t *udata = (H5D_chunk_it_ud1_t *)_udata; /* User data */
+ H5D_chunk_prune_stack_t *stack_node = NULL; /* Stack node for chunk to remove */
+ unsigned rank; /* Current # of dimensions */
+ hbool_t should_delete = FALSE; /* Whether the chunk should be deleted */
+ hbool_t needs_fill = FALSE; /* Whether the chunk overlaps the new extent and needs fill valiues */
+ unsigned u; /* Local index variable */
+ int ret_value = H5_ITER_CONT; /* Return value */
- FUNC_ENTER_NOAPI_NOINIT(H5D_chunk_prune_sl_rm_cb)
+ FUNC_ENTER_NOAPI_NOINIT(H5D_chunk_prune_cb)
- /* Sanity checks */
- HDassert(sl_node);
- HDassert(rm_info);
+ /* Figure out what chunks are no longer in use for the specified extent and release them */
+ rank = udata->common.mesg->u.chunk.ndims - 1;
+ for(u = 0; u < rank; u++)
+ /* The chunk record points to a chunk of storage that contains the
+ * beginning of the logical address space represented by UDATA.
+ */
+ if(udata->shrunk_dims[u]) {
+ if(chunk_rec->offset[u] >= udata->dims[u]) {
+ /* Indicate that the chunk will be deleted */
+ should_delete = TRUE;
+
+ /* Break out of loop, we know the chunk is outside the current dimensions */
+ break;
+ } /* end if */
+ /* Check for chunk that overlaps new extent and will need fill values */
+ else if((chunk_rec->offset[u] + udata->common.mesg->u.chunk.dim[u]) > udata->dims[u])
+ /* Indicate that the chunk needs filling */
+ /* (but continue in loop, since it could be outside the extent in
+ * another dimension -QAK)
+ */
+ needs_fill = TRUE;
+ } /* end if */
+
+ /* Check for chunk to delete */
+ if(should_delete) {
+ /* Allocate space for the removal stack node */
+ if(NULL == (stack_node = H5FL_MALLOC(H5D_chunk_prune_stack_t)))
+ HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, H5_ITER_ERROR, "memory allocation failed for removal stack node")
- /* Initialize the user data for the index callback */
- idx_udata.mesg = rm_info->mesg;
- idx_udata.offset = sl_node->rec.offset;
+ /* Store the record for the chunk */
+ stack_node->rec = *chunk_rec;
- /* Remove */
- if((rm_info->idx_info->layout->u.chunk.ops->remove)(rm_info->idx_info, &idx_udata) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTDELETE, H5_ITER_ERROR, "unable to remove chunk entry from index")
+ /* Push the chunk description onto the stack */
+ stack_node->next = udata->rm_stack;
+ udata->rm_stack = stack_node;
+ } /* end if */
+ /* Check for chunk that overlaps the new dataset dimensions and needs filling */
+ else if(needs_fill)
+ /* Write the fill value */
+ if(H5D_chunk_prune_fill(chunk_rec, udata) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, H5_ITER_ERROR, "unable to write fill value")
done:
- (void)H5FL_FREE(H5D_chunk_sl_ck_t, sl_node);
-
+ /* It is currently impossible to fail after the stack node has been
+ * malloc'ed. No need to free it here on failure. */
FUNC_LEAVE_NOAPI(ret_value)
-} /* H5D_chunk_prune_sl_rm_cb() */
+} /* end H5D_chunk_prune_cb() */
/*-------------------------------------------------------------------------
@@ -3587,9 +3592,13 @@ H5D_chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dims)
const H5D_rdcc_t *rdcc = &(dset->shared->cache.chunk); /*raw data chunk cache */
H5D_rdcc_ent_t *ent = NULL, *next = NULL; /* Cache entries */
hsize_t curr_dims[H5O_LAYOUT_NDIMS]; /* Current dataspace dimensions */
+ hbool_t shrunk_dims[H5O_LAYOUT_NDIMS]; /* Dimensions which have shrunk */
H5D_chunk_it_ud1_t udata; /* Chunk index iterator user data */
hbool_t udata_init = FALSE; /* Whether the chunk index iterator user data has been initialized */
- H5D_chunk_sl_rm_t rm_info; /* User data for skip list destroy callback */
+ hbool_t needs_fill; /* Whether we need to write the fill value */
+ H5D_chunk_prune_stack_t *fill_stack = NULL; /* Stack of chunks to fill */
+ H5D_chunk_prune_stack_t *tmp_stack; /* Temporary stack node pointer */
+ H5D_chunk_common_ud_t idx_udata; /* User data for index removal routine */
H5S_t *chunk_space = NULL; /* Dataspace for a chunk */
hsize_t chunk_dims[H5O_LAYOUT_NDIMS]; /* Chunk dimensions */
hsize_t chunks[H5O_LAYOUT_NDIMS]; /* Current number of chunks in each dimension */
@@ -3611,6 +3620,10 @@ H5D_chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dims)
H5D_COPS_BTREE == layout->u.chunk.ops));
HDassert(dxpl_cache);
+ /* set the removal stack pointer in udata to NULL, so if the function fails
+ * early it will not try to free the nonexistent stack */
+ udata.rm_stack = NULL;
+
/* Fill the DXPL cache values for later use */
if(H5D_get_dxpl_cache(dxpl_id, &dxpl_cache) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't fill dxpl cache")
@@ -3621,35 +3634,17 @@ H5D_chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dims)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions")
curr_dims[rank] = layout->u.chunk.dim[rank];
- /*-------------------------------------------------------------------------
- * Figure out what chunks are no longer in use for the specified extent
- * and release them from the linked list raw data cache
- *-------------------------------------------------------------------------
- */
- for(ent = rdcc->head; ent; ent = next) {
- /* Get pointer to next extry in cache, in case this one is evicted */
- next = ent->next;
-
- /* Check for chunk offset outside of new dimensions */
- for(u = 0; u < rank; u++)
- if((hsize_t)ent->offset[u] >= curr_dims[u]) {
- /* Evict the entry from the cache, but do not flush it to disk */
- if(H5D_chunk_cache_evict(dset, dxpl_id, dxpl_cache, ent, FALSE) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTREMOVE, FAIL, "unable to evict chunk")
-
- /* Break out of loop, chunk is evicted */
- break;
- } /* end if */
- } /* end for */
-
/* Round up to the next integer # of chunks, to accomodate partial chunks */
+ /* Use current dims because the indices have already been updated! -NAF */
/* (also compute the number of elements per chunk) */
/* (also copy the chunk dimensions into 'hsize_t' array for creating dataspace) */
+ /* (also compute the dimensions which have been shrunk) */
elmts_per_chunk = 1;
for(u = 0; u < rank; u++) {
- chunks[u] = ((old_dims[u] + layout->u.chunk.dim[u]) - 1) / layout->u.chunk.dim[u];
+ chunks[u] = ((curr_dims[u] + layout->u.chunk.dim[u]) - 1) / layout->u.chunk.dim[u];
elmts_per_chunk *= layout->u.chunk.dim[u];
chunk_dims[u] = layout->u.chunk.dim[u];
+ shrunk_dims[u] = curr_dims[u] < old_dims[u];
} /* end for */
/* Get the "down" sizes for each dimension */
@@ -3680,26 +3675,90 @@ H5D_chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dims)
udata.io_info = &chk_io_info;
udata.idx_info = &idx_info;
udata.dims = curr_dims;
+ udata.shrunk_dims = shrunk_dims;
udata.down_chunks = down_chunks;
udata.elmts_per_chunk = elmts_per_chunk;
udata.chunk_space = chunk_space;
udata.hyper_start = hyper_start;
udata_init = TRUE;
- /* Initialize the skip list that will hold the chunks outside the dimensions */
- if(NULL == (udata.outside = H5SL_create(H5SL_TYPE_HSIZE, 0.5, (size_t)H5D_CHUNK_DEFAULT_SKIPLIST_HEIGHT)))
- HGOTO_ERROR(H5E_IO, H5E_CANTCREATE, FAIL, "can't create skip list for chunks outside new dimensions")
+ /*-------------------------------------------------------------------------
+ * Figure out what chunks are no longer in use for the specified extent
+ * and release them from the linked list raw data cache
+ *-------------------------------------------------------------------------
+ */
+ for(ent = rdcc->head; ent; ent = next) {
+ /* Get pointer to next extry in cache, in case this one is evicted */
+ next = ent->next;
+
+ needs_fill = FALSE;
+
+ /* Check for chunk offset outside of new dimensions */
+ for(u = 0; u < rank; u++) {
+ if((hsize_t)ent->offset[u] >= curr_dims[u]) {
+ /* Evict the entry from the cache, but do not flush it to disk */
+ if(H5D_chunk_cache_evict(dset, dxpl_id, dxpl_cache, ent, FALSE) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTREMOVE, FAIL, "unable to evict chunk")
+
+ /* We don't need to write the fill value */
+ needs_fill = FALSE;
+
+ /* Break out of loop, chunk is evicted */
+ break;
+ } else if(!H5F_addr_defined(ent->chunk_addr) && shrunk_dims[u]
+ && (ent->offset[u] + chunk_dims[u]) > curr_dims[u])
+ /* We need to write the fill value to the unused parts of chunk */
+ needs_fill = TRUE;
+ } /* end for */
+
+ if(needs_fill) {
+ /* Allocate space for the stack node */
+ if(NULL == (tmp_stack = H5FL_MALLOC(H5D_chunk_prune_stack_t)))
+ HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, H5_ITER_ERROR, "memory allocation failed for stack node")
+
+ /* Set up chunk record for fill routine */
+ tmp_stack->rec.nbytes = ent->chunk_size;
+ HDmemcpy(tmp_stack->rec.offset, ent->offset, sizeof(tmp_stack->rec.offset));
+ tmp_stack->rec.filter_mask = 0; /* Since the chunk is already in cache this doesn't matter */
+ tmp_stack->rec.chunk_addr = ent->chunk_addr;
+
+ /* Push the chunk description onto the stack */
+ tmp_stack->next = fill_stack;
+ fill_stack = tmp_stack;
+ } /* end if */
+ } /* end for */
+
+ /* Traverse the stack of chunks to be filled, filling each. We will free
+ * the nodes later in the "done" section. */
+ tmp_stack = fill_stack;
+ while(tmp_stack) {
+ /* Write the fill value */
+ if(H5D_chunk_prune_fill(&(tmp_stack->rec), &udata) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, H5_ITER_ERROR, "unable to write fill value")
+
+ /* Advance the stack pointer */
+ tmp_stack = tmp_stack->next;
+ } /* end while */
/* Iterate over the chunks */
if((dset->shared->layout.u.chunk.ops->iterate)(&idx_info, H5D_chunk_prune_cb, &udata) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to retrieve prune chunks from index")
- /* Set up user data for skip list callback */
- rm_info.idx_info = &idx_info;
- rm_info.mesg = layout;
+ /* Traverse the stack of chunks to be deleted, removing each. We will free
+ * the nodes later in the "done" section. */
+ idx_udata.mesg = layout;
+ tmp_stack = udata.rm_stack;
+ while(tmp_stack) {
+ /* Update the offset in idx_udata */
+ idx_udata.offset = tmp_stack->rec.offset;
- /* Destroy the skip list, deleting the chunks in the callback */
- H5SL_destroy(udata.outside, H5D_chunk_prune_sl_rm_cb, &rm_info);
+ /* Remove the chunk from disk */
+ if((layout->u.chunk.ops->remove)(&idx_info, &idx_udata) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTDELETE, H5_ITER_ERROR, "unable to remove chunk entry from index")
+
+ /* Advance the stack pointer */
+ tmp_stack = tmp_stack->next;
+ } /* end while */
/* Reset any cached chunk info for this dataset */
H5D_chunk_cinfo_cache_reset(&dset->shared->cache.chunk.last);
@@ -3713,6 +3772,24 @@ done:
HDONE_ERROR(H5E_DATASET, H5E_CANTFREE, FAIL, "Can't release fill buffer info")
} /* end if */
+ /* Free stack of filled chunks */
+ tmp_stack = fill_stack;
+ while(tmp_stack) {
+ /* Free the stack node and advance the stack pointer */
+ tmp_stack = tmp_stack->next;
+ (void)H5FL_FREE(H5D_chunk_prune_stack_t, fill_stack);
+ fill_stack = tmp_stack;
+ } /* end while */
+
+ /* Free stack of removed chunks */
+ tmp_stack = udata.rm_stack;
+ while(tmp_stack) {
+ /* Free the stack node and advance the stack pointer */
+ tmp_stack = tmp_stack->next;
+ (void)H5FL_FREE(H5D_chunk_prune_stack_t, udata.rm_stack);
+ udata.rm_stack = tmp_stack;
+ } /* end while */
+
FUNC_LEAVE_NOAPI(ret_value)
} /* end H5D_chunk_prune_by_extent() */
@@ -4703,7 +4780,7 @@ H5D_nonexistent_readvv(const H5D_io_info_t *io_info,
size = mem_len_arr[u];
/* Compute offset in memory */
- buf = (unsigned char *)io_info->u.rbuf + mem_offset_arr[v];
+ buf = (unsigned char *)io_info->u.rbuf + mem_offset_arr[u];
/* Initialize the fill value buffer */
if(H5D_fill_init(&fb_info, buf, FALSE,
diff --git a/src/H5Dearray.c b/src/H5Dearray.c
index 5ba1c5f..59df243 100644
--- a/src/H5Dearray.c
+++ b/src/H5Dearray.c
@@ -924,7 +924,7 @@ H5D_earray_idx_iterate(const H5D_chk_idx_info_t *idx_info,
{
H5EA_t *ea; /* Pointer to extensible array structure */
H5EA_stat_t ea_stat; /* Extensible array statistics */
- int ret_value; /* Return value */
+ int ret_value = H5_ITER_CONT; /* Return value */
FUNC_ENTER_NOAPI_NOINIT(H5D_earray_idx_iterate)
diff --git a/src/H5Dmpio.c b/src/H5Dmpio.c
index 19be413..e4dd8b5 100644
--- a/src/H5Dmpio.c
+++ b/src/H5Dmpio.c
@@ -1209,6 +1209,7 @@ if(H5DEBUG(D))
void *chunk; /* Pointer to the data chunk in cache */
uint32_t accessed_bytes; /* Total accessed size in a chunk */
unsigned idx_hint = 0; /* Cache index hint */
+ htri_t cacheable; /* Whether the chunk is cacheable */
/* Switch to independent I/O */
if(last_xfer_mode != H5FD_MPIO_INDEPENDENT) {
@@ -1224,7 +1225,10 @@ if(H5DEBUG(D))
HGOTO_ERROR(H5E_STORAGE, H5E_CANTGET, FAIL, "couldn't get chunk info from skipped list")
/* Load the chunk into cache and lock it. */
- if(H5D_chunk_cacheable(io_info)) {
+ if((cacheable = H5D_chunk_cacheable(io_info, udata.addr,
+ io_info->op_type == H5D_IO_OP_WRITE)) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't tell if chunk is cacheable")
+ if(cacheable) {
hbool_t entire_chunk = TRUE; /* Whether whole chunk is selected */
/* Compute # of bytes accessed in chunk */
@@ -1268,7 +1272,7 @@ if(H5DEBUG(D))
} /* end else */
/* Release the cache lock on the chunk. */
- if(chunk && H5D_chunk_unlock(io_info, (io_info->op_type == H5D_IO_OP_WRITE), idx_hint, chunk, accessed_bytes) < 0)
+ if(chunk && H5D_chunk_unlock(io_info, &udata, (io_info->op_type == H5D_IO_OP_WRITE), idx_hint, chunk, accessed_bytes) < 0)
HGOTO_ERROR(H5E_IO, H5E_READERROR, FAIL, "unable to unlock raw data chunk")
} /* end if */
#else /* !defined(H5_MPI_COMPLEX_DERIVED_DATATYPE_WORKS) || !defined(H5_MPI_SPECIAL_COLLECTIVE_IO_WORKS) */
@@ -1439,17 +1443,21 @@ if(H5DEBUG(D)) {
/* Independent I/O */
if(make_ind) {
- void *chunk; /* Pointer to the data chunk in cache */
+ void *chunk; /* Pointer to the data chunk in cache */
H5D_io_info_t *chk_io_info; /* Pointer to I/O info object for this chunk */
- uint32_t accessed_bytes = 0; /* Total accessed size in a chunk */
- unsigned idx_hint = 0; /* Cache index hint */
+ uint32_t accessed_bytes = 0; /* Total accessed size in a chunk */
+ unsigned idx_hint = 0; /* Cache index hint */
+ htri_t cacheable; /* Whether the chunk is cacheable */
/* Switch to independent I/O */
if(H5D_ioinfo_xfer_mode(io_info, dx_plist, H5FD_MPIO_INDEPENDENT) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't switch to independent I/O")
/* Load the chunk into cache and lock it. */
- if(H5D_chunk_cacheable(io_info)) {
+ if((cacheable = H5D_chunk_cacheable(io_info, udata.addr,
+ io_info->op_type == H5D_IO_OP_WRITE)) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't tell if chunk is cacheable")
+ if(cacheable) {
hbool_t entire_chunk = TRUE; /* Whether whole chunk is selected */
/* Compute # of bytes accessed in chunk */
@@ -1494,7 +1502,7 @@ if(H5DEBUG(D)) {
/* Release the cache lock on the chunk. */
if(chunk)
- if(H5D_chunk_unlock(io_info, (io_info->op_type == H5D_IO_OP_WRITE), idx_hint, chunk, accessed_bytes) < 0)
+ if(H5D_chunk_unlock(io_info, &udata, (io_info->op_type == H5D_IO_OP_WRITE), idx_hint, chunk, accessed_bytes) < 0)
HGOTO_ERROR(H5E_IO, H5E_READERROR, FAIL, "unable to unlock raw data chunk")
} /* end if */
else { /*collective I/O */
diff --git a/src/H5Dpkg.h b/src/H5Dpkg.h
index 4d5bd45..4122326 100644
--- a/src/H5Dpkg.h
+++ b/src/H5Dpkg.h
@@ -574,7 +574,8 @@ H5_DLL herr_t H5D_contig_copy(H5F_t *f_src, const H5O_layout_t *layout_src, H5F_
H5O_layout_t *layout_dst, H5T_t *src_dtype, H5O_copy_t *cpy_info, hid_t dxpl_id);
/* Functions that operate on chunked dataset storage */
-H5_DLL hbool_t H5D_chunk_cacheable(const H5D_io_info_t *io_info);
+H5_DLL htri_t H5D_chunk_cacheable(const H5D_io_info_t *io_info, haddr_t caddr,
+ hbool_t write_op);
H5_DLL herr_t H5D_chunk_cinfo_cache_reset(H5D_chunk_cached_t *last);
H5_DLL herr_t H5D_chunk_create(H5D_t *dset /*in,out*/, hid_t dxpl_id);
H5_DLL herr_t H5D_chunk_init(H5F_t *f, hid_t dapl_id, hid_t dxpl_id, const H5D_t *dset);
@@ -584,7 +585,8 @@ H5_DLL herr_t H5D_chunk_get_info(const H5D_t *dset, hid_t dxpl_id,
H5_DLL void *H5D_chunk_lock(const H5D_io_info_t *io_info,
H5D_chunk_ud_t *udata, hbool_t relax, unsigned *idx_hint/*in,out*/);
H5_DLL herr_t H5D_chunk_unlock(const H5D_io_info_t *io_info,
- hbool_t dirty, unsigned idx_hint, void *chunk, uint32_t naccessed);
+ const H5D_chunk_ud_t *udata, hbool_t dirty, unsigned idx_hint, void *chunk,
+ uint32_t naccessed);
H5_DLL herr_t H5D_chunk_flush(H5D_t *dset, hid_t dxpl_id, unsigned flags);
H5_DLL herr_t H5D_chunk_allocated(H5D_t *dset, hid_t dxpl_id, hsize_t *nbytes);
H5_DLL herr_t H5D_chunk_allocate(H5D_t *dset, hid_t dxpl_id, hbool_t full_overwrite);
diff --git a/src/H5Gdense.c b/src/H5Gdense.c
index 2bca4fa..6cc15b8 100644
--- a/src/H5Gdense.c
+++ b/src/H5Gdense.c
@@ -656,29 +656,34 @@ H5G_dense_lookup_by_idx(H5F_t *f, hid_t dxpl_id, const H5O_linfo_t *linfo,
/* Determine the address of the index to use */
if(idx_type == H5_INDEX_NAME) {
- /* Check if "native" order is OK - since names are hashed, getting them
- * in strictly increasing or decreasing order requires building a
- * table and sorting it.
+ /* Since names are hashed, getting them in strictly increasing or
+ * decreasing order requires building a table and sorting it.
+ * If the order is native, use the B-tree for names.
*/
- if(order == H5_ITER_NATIVE) {
- bt2_addr = linfo->name_bt2_addr;
- bt2_class = H5G_BT2_NAME;
- HDassert(H5F_addr_defined(bt2_addr));
- } /* end if */
- else
- bt2_addr = HADDR_UNDEF;
+ bt2_addr = HADDR_UNDEF;
} /* end if */
else {
HDassert(idx_type == H5_INDEX_CRT_ORDER);
/* This address may not be defined if creation order is tracked, but
* there's no index on it. If there's no v2 B-tree that indexes
- * the links, a table will be built.
+ * the links and the order is native, use the B-tree for names.
+ * Otherwise, build a table.
*/
bt2_addr = linfo->corder_bt2_addr;
bt2_class = H5G_BT2_CORDER;
} /* end else */
+ /* If the order is native and there's no B-tree for indexing the links,
+ * use the B-tree for names instead of building a table to speed up the
+ * process.
+ */
+ if(order == H5_ITER_NATIVE && !H5F_addr_defined(bt2_addr)) {
+ bt2_addr = linfo->name_bt2_addr;
+ bt2_class = H5G_BT2_NAME;
+ HDassert(H5F_addr_defined(bt2_addr));
+ } /* end if */
+
/* If there is an index defined for the field, use it */
if(H5F_addr_defined(bt2_addr)) {
H5G_bt2_ud_lbi_t udata; /* User data for v2 B-tree link lookup */
@@ -953,31 +958,37 @@ H5G_dense_iterate(H5F_t *f, hid_t dxpl_id, const H5O_linfo_t *linfo,
/* Determine the address of the index to use */
if(idx_type == H5_INDEX_NAME) {
- /* Check if "native" order is OK - since names are hashed, getting them
- * in strictly increasing or decreasing order requires building a
- * table and sorting it.
+ /* Since names are hashed, getting them in strictly increasing or
+ * decreasing order requires building a table and sorting it. If
+ * the order is native, use the B-tree for names.
*/
- if(order == H5_ITER_NATIVE) {
- HDassert(H5F_addr_defined(linfo->name_bt2_addr));
- bt2_addr = linfo->name_bt2_addr;
- bt2_class = H5G_BT2_NAME;
- } /* end if */
- else
- bt2_addr = HADDR_UNDEF;
+ bt2_addr = HADDR_UNDEF;
} /* end if */
else {
HDassert(idx_type == H5_INDEX_CRT_ORDER);
/* This address may not be defined if creation order is tracked, but
* there's no index on it. If there's no v2 B-tree that indexes
- * the links, a table will be built.
+ * the links and the order is native, use the B-tree for names.
+ * Otherwise, build a table.
*/
bt2_addr = linfo->corder_bt2_addr;
bt2_class = H5G_BT2_CORDER;
} /* end else */
+ /* If the order is native and there's no B-tree for indexing the links,
+ * use the B-tree for names instead of building a table to speed up the
+ * process.
+ */
+ if(order == H5_ITER_NATIVE && !H5F_addr_defined(bt2_addr)) {
+ HDassert(H5F_addr_defined(linfo->name_bt2_addr));
+ bt2_addr = linfo->name_bt2_addr;
+ bt2_class = H5G_BT2_NAME;
+ } /* end if */
+
/* Check on iteration order */
- if(order == H5_ITER_NATIVE && H5F_addr_defined(bt2_addr)) {
+ if(order == H5_ITER_NATIVE) {
+ HDassert(H5F_addr_defined(bt2_addr));
H5G_bt2_ud_it_t udata; /* User data for iterator callback */
/* Open the fractal heap */
@@ -1146,29 +1157,34 @@ H5G_dense_get_name_by_idx(H5F_t *f, hid_t dxpl_id, H5O_linfo_t *linfo,
/* Determine the address of the index to use */
if(idx_type == H5_INDEX_NAME) {
- /* Check if "native" order is OK - since names are hashed, getting them
- * in strictly increasing or decreasing order requires building a
- * table and sorting it.
+ /* Since names are hashed, getting them in strictly increasing or
+ * decreasing order requires building a table and sorting it. If
+ * the order is native, use the B-tree for names.
*/
- if(order == H5_ITER_NATIVE) {
- bt2_addr = linfo->name_bt2_addr;
- bt2_class = H5G_BT2_NAME;
- HDassert(H5F_addr_defined(bt2_addr));
- } /* end if */
- else
- bt2_addr = HADDR_UNDEF;
+ bt2_addr = HADDR_UNDEF;
} /* end if */
else {
HDassert(idx_type == H5_INDEX_CRT_ORDER);
/* This address may not be defined if creation order is tracked, but
* there's no index on it. If there's no v2 B-tree that indexes
- * the links, a table will be built.
+ * the links and the order is native, use the B-tree for names.
+ * Otherwise, build a table.
*/
bt2_addr = linfo->corder_bt2_addr;
bt2_class = H5G_BT2_CORDER;
} /* end else */
+ /* If the order is native and there's no B-tree for indexing the links,
+ * use the B-tree for names instead of building a table to speed up the
+ * process.
+ */
+ if(order == H5_ITER_NATIVE && !H5F_addr_defined(bt2_addr)) {
+ bt2_addr = linfo->name_bt2_addr;
+ bt2_class = H5G_BT2_NAME;
+ HDassert(H5F_addr_defined(bt2_addr));
+ } /* end if */
+
/* If there is an index defined for the field, use it */
if(H5F_addr_defined(bt2_addr)) {
H5G_bt2_ud_gnbi_t udata; /* User data for v2 B-tree callback */
@@ -1561,29 +1577,34 @@ H5G_dense_remove_by_idx(H5F_t *f, hid_t dxpl_id, const H5O_linfo_t *linfo,
/* Determine the address of the index to use */
if(idx_type == H5_INDEX_NAME) {
- /* Check if "native" order is OK - since names are hashed, getting them
- * in strictly increasing or decreasing order requires building a
- * table and sorting it.
+ /* Since names are hashed, getting them in strictly increasing or
+ * decreasing order requires building a table and sorting it. If
+ * the order is native, use the B-tree for names.
*/
- if(order == H5_ITER_NATIVE) {
- bt2_addr = linfo->name_bt2_addr;
- bt2_class = H5G_BT2_NAME;
- HDassert(H5F_addr_defined(bt2_addr));
- } /* end if */
- else
- bt2_addr = HADDR_UNDEF;
+ bt2_addr = HADDR_UNDEF;
} /* end if */
else {
HDassert(idx_type == H5_INDEX_CRT_ORDER);
/* This address may not be defined if creation order is tracked, but
* there's no index on it. If there's no v2 B-tree that indexes
- * the links, a table will be built.
+ * the links and the order is native, use the B-tree for names.
+ * Otherwise, build a table.
*/
bt2_addr = linfo->corder_bt2_addr;
bt2_class = H5G_BT2_CORDER;
} /* end else */
+ /* If the order is native and there's no B-tree for indexing the links,
+ * use the B-tree for names instead of building a table to speed up the
+ * process.
+ */
+ if(order == H5_ITER_NATIVE && !H5F_addr_defined(bt2_addr)) {
+ bt2_addr = linfo->name_bt2_addr;
+ bt2_class = H5G_BT2_NAME;
+ HDassert(H5F_addr_defined(bt2_addr));
+ } /* end if */
+
/* If there is an index defined for the field, use it */
if(H5F_addr_defined(bt2_addr)) {
H5G_bt2_ud_rmbi_t udata; /* User data for v2 B-tree record removal */
diff --git a/src/H5public.h b/src/H5public.h
index e13091b..e0a9049 100644
--- a/src/H5public.h
+++ b/src/H5public.h
@@ -71,10 +71,10 @@ extern "C" {
/* Version numbers */
#define H5_VERS_MAJOR 1 /* For major interface/format changes */
#define H5_VERS_MINOR 9 /* For minor interface/format changes */
-#define H5_VERS_RELEASE 32 /* For tweaks, bug-fixes, or development */
+#define H5_VERS_RELEASE 34 /* For tweaks, bug-fixes, or development */
#define H5_VERS_SUBRELEASE "" /* For pre-releases like snap0 */
/* Empty string for real releases. */
-#define H5_VERS_INFO "HDF5 library version: 1.9.32" /* Full version string */
+#define H5_VERS_INFO "HDF5 library version: 1.9.34" /* Full version string */
#define H5check() H5check_version(H5_VERS_MAJOR,H5_VERS_MINOR, \
H5_VERS_RELEASE)
diff --git a/src/Makefile.in b/src/Makefile.in
index dc6e787..e49d68b 100644
--- a/src/Makefile.in
+++ b/src/Makefile.in
@@ -305,6 +305,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
@@ -408,7 +409,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog
# Add libtool shared library version numbers to the HDF5 library
# See libtool versioning documentation online.
LT_VERS_INTERFACE = 6
-LT_VERS_REVISION = 22
+LT_VERS_REVISION = 24
LT_VERS_AGE = 0
H5detect_CFLAGS = -g
diff --git a/test/Makefile.in b/test/Makefile.in
index 45f3aa7..29f30d0 100644
--- a/test/Makefile.in
+++ b/test/Makefile.in
@@ -523,6 +523,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
@@ -630,11 +631,12 @@ TRACE = perl $(top_srcdir)/bin/trace
CHECK_CLEANFILES = *.chkexe *.chklog *.clog cmpd_dset.h5 \
compact_dataset.h5 dataset.h5 dset_offset.h5 \
max_compact_dataset.h5 simple.h5 set_local.h5 random_chunks.h5 \
- huge_chunks.h5 extend.h5 istore.h5 extlinks*.h5 frspace.h5 \
- links*.h5 tfile[1-4].h5 th5s[1-3].h5 lheap.h5 fheap.h5 ohdr.h5 \
- stab.h5 extern_[1-3].h5 extern_[1-4][ab].raw gheap[0-4].h5 \
- dt_arith[1-2] links.h5 links[0-6]*.h5 extlinks[0-15].h5 tmp \
- big.data big[0-9][0-9][0-9][0-9][0-9].h5 stdio.h5 sec2.h5 \
+ huge_chunks.h5 chunk_cache.h5 big_chunk.h5 extend.h5 istore.h5 \
+ extlinks*.h5 frspace.h5 links*.h5 tfile[1-4].h5 th5s[1-3].h5 \
+ lheap.h5 fheap.h5 ohdr.h5 stab.h5 extern_[1-3].h5 \
+ extern_[1-4][ab].raw gheap[0-4].h5 dt_arith[1-2] links.h5 \
+ links[0-6]*.h5 extlinks[0-15].h5 tmp big.data \
+ big[0-9][0-9][0-9][0-9][0-9].h5 stdio.h5 sec2.h5 \
dtypes[1-8].h5 dt_arith[1-2].h5 tattr.h5 tselect.h5 mtime.h5 \
unlink.h5 unicode.h5 coord.h5 fillval_[0-9].h5 fillval.raw \
mount_[0-9].h5 testmeta.h5 ttime.h5 trefer[1-3].h5 tvltypes.h5 \
diff --git a/test/h5test.h b/test/h5test.h
index 64d43cc..e3b3596 100644
--- a/test/h5test.h
+++ b/test/h5test.h
@@ -99,7 +99,7 @@ extern MPI_Info h5_io_info_g; /* MPI INFO object for IO */
* the H5_FAILED() macro is invoked automatically when an API function fails.
*/
#define TESTING(WHAT) {printf("Testing %-62s",WHAT); fflush(stdout);}
-#define TESTING2(WHAT) {printf(" Testing %-62s",WHAT); fflush(stdout);}
+#define TESTING_2(WHAT) {printf(" Testing %-62s",WHAT); fflush(stdout);}
#define PASSED() {puts(" PASSED");fflush(stdout);}
#define H5_FAILED() {puts("*FAILED*");fflush(stdout);}
#define H5_WARNING() {puts("*WARNING*");fflush(stdout);}
diff --git a/test/set_extent.c b/test/set_extent.c
index 87ff91c..b4e61b4 100644
--- a/test/set_extent.c
+++ b/test/set_extent.c
@@ -46,12 +46,12 @@ const char *FILENAME[] = {
#define RANK1 1
#define RANK2 2
#define RANK3 3
-#define DIM0 4
-#define DIM1 4
-#define DIM2 4
-#define DIMS0 2
-#define DIMS1 2
-#define DIMS2 2
+#define DIM0 5
+#define DIM1 5
+#define DIM2 5
+#define DIMS0 3
+#define DIMS1 3
+#define DIMS2 3
#define DIME0 7
#define DIME1 7
#define DIME2 7
@@ -89,6 +89,7 @@ int main( void )
hid_t fapl; /* file access property list */
hid_t fapl2; /* file access property list w/latest format set */
hbool_t new_format; /* Whether to use the latest file format */
+ hbool_t chunk_cache; /* Whether to enable chunk caching */
int nerrors = 0;
h5_reset();
@@ -97,6 +98,9 @@ int main( void )
/* Copy the file access property list */
if((fapl2 = H5Pcopy(fapl)) < 0) TEST_ERROR
+ /* Disable chunk caching on fapl2 */
+ if(H5Pset_cache(fapl2, 521, 0, 0, 0.) < 0) TEST_ERROR
+
/* Set the "use the latest version of the format" bounds for creating objects in the file */
if(H5Pset_libver_bounds(fapl2, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) TEST_ERROR
@@ -104,19 +108,44 @@ int main( void )
for(new_format = FALSE; new_format <= TRUE; new_format++) {
hid_t my_fapl;
- /* Set the FAPL for the type of format */
- if(new_format) {
- puts("Testing with new file format:");
- my_fapl = fapl2;
- } /* end if */
- else {
- puts("Testing with old file format:");
- my_fapl = fapl;
- } /* end else */
-
- nerrors += do_ranks( my_fapl ) < 0 ? 1 : 0;
- nerrors += test_external( my_fapl ) < 0 ? 1 : 0;
- nerrors += do_layouts( my_fapl ) < 0 ? 1 : 0;
+ /* Test chunked datasets with and without chunk cache */
+ for(chunk_cache = FALSE; chunk_cache <= TRUE; chunk_cache++) {
+ /* Output message about the type of format */
+ if(new_format)
+ printf("Testing with new file format");
+ else
+ printf("Testing with old file format");
+
+ /* Set the FAPL for the chunk cache settings */
+ if(chunk_cache) {
+ puts(" and chunk cache enabled:");
+ my_fapl = fapl;
+ } /* end if */
+ else {
+ puts(" and chunk cache disabled:");
+ my_fapl = fapl2;
+ } /* end else */
+
+ /* Set the FAPL for the type of format */
+ if(new_format) {
+ /* Set the "use the latest version of the format" bounds for
+ * creating objects in the file */
+ if(H5Pset_libver_bounds(my_fapl, H5F_LIBVER_LATEST,
+ H5F_LIBVER_LATEST) < 0) TEST_ERROR
+ } /* end if */
+ else
+ /* Set the "use the earliest version of the format" bounds for
+ * creating objects in the file */
+ if(H5Pset_libver_bounds(my_fapl, H5F_LIBVER_EARLIEST,
+ H5F_LIBVER_LATEST) < 0) TEST_ERROR
+
+ /* Tests which use chunked datasets */
+ nerrors += do_ranks( my_fapl ) < 0 ? 1 : 0;
+ } /* end for */
+
+ /* Tests which do not use chunked datasets */
+ nerrors += test_external( fapl ) < 0 ? 1 : 0;
+ nerrors += do_layouts( fapl ) < 0 ? 1 : 0;
} /* end for */
/* Close 2nd FAPL */
@@ -153,7 +182,7 @@ static int do_ranks( hid_t fapl )
hbool_t set_istore_k = 0;
- TESTING2("with fill value, no compression");
+ TESTING_2("with fill value, no compression");
do_fillvalue = 1;
@@ -187,7 +216,7 @@ static int do_ranks( hid_t fapl )
PASSED();
- TESTING2("no fill value, no compression");
+ TESTING_2("no fill value, no compression");
do_fillvalue = 0;
@@ -208,7 +237,7 @@ static int do_ranks( hid_t fapl )
PASSED();
- TESTING2("with fill value, with compression");
+ TESTING_2("with fill value, with compression");
#ifdef H5_HAVE_FILTER_DEFLATE
@@ -246,7 +275,7 @@ static int do_ranks( hid_t fapl )
SKIPPED();
#endif
- TESTING2("no fill value, with compression");
+ TESTING_2("no fill value, with compression");
#ifdef H5_HAVE_FILTER_DEFLATE
@@ -270,7 +299,7 @@ static int do_ranks( hid_t fapl )
SKIPPED();
#endif
- TESTING2("with non-default indexed storage B-tree");
+ TESTING_2("with non-default indexed storage B-tree");
do_fillvalue = 1;
set_istore_k = 1;
@@ -299,7 +328,7 @@ error:
static int do_layouts( hid_t fapl )
{
- TESTING2("storage layout use");
+ TESTING_2("storage layout use");
if (test_layouts( H5D_COMPACT, fapl ) < 0)
{
@@ -2080,7 +2109,7 @@ static int test_external( hid_t fapl )
}
}
- TESTING2("external file use");
+ TESTING_2("external file use");
/* create a new file */
h5_fixname(FILENAME[3], fapl, filename, sizeof filename);
diff --git a/testpar/Makefile.in b/testpar/Makefile.in
index d409fbe..249f3f8 100644
--- a/testpar/Makefile.in
+++ b/testpar/Makefile.in
@@ -259,6 +259,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/tools/Makefile.in b/tools/Makefile.in
index b2cdc7c..52ece40 100644
--- a/tools/Makefile.in
+++ b/tools/Makefile.in
@@ -223,6 +223,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/tools/h5copy/Makefile.in b/tools/h5copy/Makefile.in
index 3cefdc5..5928706 100644
--- a/tools/h5copy/Makefile.in
+++ b/tools/h5copy/Makefile.in
@@ -240,6 +240,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/tools/h5diff/Makefile.in b/tools/h5diff/Makefile.in
index fe8bf84..dc0b25c 100644
--- a/tools/h5diff/Makefile.in
+++ b/tools/h5diff/Makefile.in
@@ -247,6 +247,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/tools/h5dump/Makefile.in b/tools/h5dump/Makefile.in
index 5aa15ea..1ecb430 100644
--- a/tools/h5dump/Makefile.in
+++ b/tools/h5dump/Makefile.in
@@ -245,6 +245,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/tools/h5import/Makefile.in b/tools/h5import/Makefile.in
index b8b5176..3cf4217 100755
--- a/tools/h5import/Makefile.in
+++ b/tools/h5import/Makefile.in
@@ -240,6 +240,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/tools/h5jam/Makefile.in b/tools/h5jam/Makefile.in
index fb28865..81425ec 100644
--- a/tools/h5jam/Makefile.in
+++ b/tools/h5jam/Makefile.in
@@ -251,6 +251,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/tools/h5ls/Makefile.am b/tools/h5ls/Makefile.am
index c2222a1..439ef07 100644
--- a/tools/h5ls/Makefile.am
+++ b/tools/h5ls/Makefile.am
@@ -24,7 +24,7 @@ include $(top_srcdir)/config/commence.am
INCLUDES=-I$(top_srcdir)/src -I$(top_srcdir)/tools/lib
# Test programs and scripts
-TEST_SCRIPT=$(srcdir)/testh5ls.sh
+TEST_SCRIPT=testh5ls.sh
check_SCRIPTS=$(TEST_SCRIPT)
SCRIPT_DEPEND=h5ls$(EXEEXT)
diff --git a/tools/h5ls/Makefile.in b/tools/h5ls/Makefile.in
index f4fc5e8..771bd20 100644
--- a/tools/h5ls/Makefile.in
+++ b/tools/h5ls/Makefile.in
@@ -50,7 +50,7 @@ POST_UNINSTALL = :
build_triplet = @build@
host_triplet = @host@
DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in \
- $(top_srcdir)/config/commence.am \
+ $(srcdir)/testh5ls.sh.in $(top_srcdir)/config/commence.am \
$(top_srcdir)/config/conclude.am
bin_PROGRAMS = h5ls$(EXEEXT)
TESTS = $(check_SCRIPTS)
@@ -61,7 +61,7 @@ am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
CONFIG_HEADER = $(top_builddir)/src/H5config.h
-CONFIG_CLEAN_FILES =
+CONFIG_CLEAN_FILES = testh5ls.sh
am__installdirs = "$(DESTDIR)$(bindir)"
binPROGRAMS_INSTALL = $(INSTALL_PROGRAM)
PROGRAMS = $(bin_PROGRAMS)
@@ -234,6 +234,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
@@ -338,7 +339,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog
INCLUDES = -I$(top_srcdir)/src -I$(top_srcdir)/tools/lib
# Test programs and scripts
-TEST_SCRIPT = $(srcdir)/testh5ls.sh
+TEST_SCRIPT = testh5ls.sh
check_SCRIPTS = $(TEST_SCRIPT)
SCRIPT_DEPEND = h5ls$(EXEEXT)
@@ -393,6 +394,8 @@ $(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+testh5ls.sh: $(top_builddir)/config.status $(srcdir)/testh5ls.sh.in
+ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@
install-binPROGRAMS: $(bin_PROGRAMS)
@$(NORMAL_INSTALL)
test -z "$(bindir)" || $(MKDIR_P) "$(DESTDIR)$(bindir)"
diff --git a/tools/h5ls/testh5ls.sh b/tools/h5ls/testh5ls.sh.in
index 8194bfb..ddb09b5 100755..100644
--- a/tools/h5ls/testh5ls.sh
+++ b/tools/h5ls/testh5ls.sh.in
@@ -22,6 +22,8 @@ CMP='cmp -s'
DIFF='diff -c'
NLINES=20 # Max. lines of output to display if test fails
+WORDS_BIGENDIAN="@WORDS_BIGENDIAN@"
+
nerrors=0
verbose=yes
@@ -174,10 +176,20 @@ TOOLTEST tattr2.ls 0 -w80 -v -S tattr2.h5
TOOLTEST nosuchfile.ls 0 nosuchfile.h5
# test for variable length data types in verbose mode
-TOOLTEST tvldtypes2.ls 0 -v tvldtypes1.h5
+if test $WORDS_BIGENDIAN != "yes"; then
+ TOOLTEST tvldtypes2le.ls 0 -v tvldtypes1.h5
+else
+ TOOLTEST tvldtypes2be.ls 0 -v tvldtypes1.h5
+fi
+
# test for dataset region references data types in verbose mode
-TOOLTEST tdatareg.ls 0 -v tdatareg.h5
+if test $WORDS_BIGENDIAN != "yes"; then
+ TOOLTEST tdataregle.ls 0 -v tdatareg.h5
+else
+ TOOLTEST tdataregbe.ls 0 -v tdatareg.h5
+fi
+
if test $nerrors -eq 0 ; then
echo "All h5ls tests passed."
diff --git a/tools/h5repack/Makefile.in b/tools/h5repack/Makefile.in
index 9f7c184..59b3668 100644
--- a/tools/h5repack/Makefile.in
+++ b/tools/h5repack/Makefile.in
@@ -254,6 +254,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/tools/h5stat/Makefile.in b/tools/h5stat/Makefile.in
index 52c886b..67b1280 100644
--- a/tools/h5stat/Makefile.in
+++ b/tools/h5stat/Makefile.in
@@ -243,6 +243,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/tools/lib/Makefile.in b/tools/lib/Makefile.in
index 721a393..b08c0a3 100644
--- a/tools/lib/Makefile.in
+++ b/tools/lib/Makefile.in
@@ -239,6 +239,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/tools/lib/h5diff_array.c b/tools/lib/h5diff_array.c
index 5d57a52..ebf541e 100644
--- a/tools/lib/h5diff_array.c
+++ b/tools/lib/h5diff_array.c
@@ -156,13 +156,22 @@ static void h5diff_print_char(char ch);
* NaN detection
*-------------------------------------------------------------------------
*/
+
+#if H5_SIZEOF_LONG_DOUBLE !=0
typedef enum dtype_t
{
- FLT_FLOAT, FLT_DOUBLE,
-#if H5_SIZEOF_LONG_DOUBLE !=0
- FLT_LDOUBLE,
-#endif
+ FLT_FLOAT,
+ FLT_DOUBLE,
+ FLT_LDOUBLE
} dtype_t;
+#else
+
+typedef enum dtype_t
+{
+ FLT_FLOAT,
+ FLT_DOUBLE
+} dtype_t;
+#endif
static int my_isnan(dtype_t type, void *val);
@@ -356,9 +365,9 @@ hsize_t diff_array( void *_mem1,
* H5T_COMPOUND
* Recursively call this function for each member
* H5T_ARRAY
- * Recursively call this function for each element�
+ * Recursively call this function for each element
* H5T_VLEN
- * Recursively call this function for each element�
+ * Recursively call this function for each element
* H5T_STRING
* compare byte by byte in a cycle from 0 to type_size. this type_size is the
* value obtained by the get_size function but it is the string lenght for
diff --git a/tools/misc/Makefile.in b/tools/misc/Makefile.in
index b52ef09..e70a93f 100644
--- a/tools/misc/Makefile.in
+++ b/tools/misc/Makefile.in
@@ -258,6 +258,7 @@ USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
USE_FILTER_SZIP = @USE_FILTER_SZIP@
USINGMEMCHECKER = @USINGMEMCHECKER@
VERSION = @VERSION@
+WORDS_BIGENDIAN = @WORDS_BIGENDIAN@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
diff --git a/tools/testfiles/tdataregbe.ls b/tools/testfiles/tdataregbe.ls
new file mode 100644
index 0000000..47b6994
--- /dev/null
+++ b/tools/testfiles/tdataregbe.ls
@@ -0,0 +1,14 @@
+#############################
+ output for 'h5ls -v tdatareg.h5'
+#############################
+Opened "tdatareg.h5" with sec2 driver.
+Dataset1 Dataset {4/4}
+ Location: 1:1284
+ Links: 1
+ Storage: information not available
+ Type: dataset region reference
+Dataset2 Dataset {10/10, 10/10}
+ Location: 1:744
+ Links: 1
+ Storage: 100 logical bytes, 100 allocated bytes, 100.00% utilization
+ Type: native unsigned char
diff --git a/tools/testfiles/tdatareg.ls b/tools/testfiles/tdataregle.ls
index b07b274..b07b274 100644
--- a/tools/testfiles/tdatareg.ls
+++ b/tools/testfiles/tdataregle.ls
diff --git a/tools/testfiles/tvldtypes2be.ls b/tools/testfiles/tvldtypes2be.ls
new file mode 100644
index 0000000..ee84185
--- /dev/null
+++ b/tools/testfiles/tvldtypes2be.ls
@@ -0,0 +1,22 @@
+#############################
+ output for 'h5ls -v tvldtypes1.h5'
+#############################
+Opened "tvldtypes1.h5" with sec2 driver.
+Dataset1.0 Dataset {4/4}
+ Location: 1:976
+ Links: 1
+ Storage: information not available
+ Type: variable length of
+ 32-bit little-endian integer
+Dataset2.0 Dataset {4/4}
+ Location: 1:1576
+ Links: 1
+ Storage: information not available
+ Type: variable length of
+ IEEE 32-bit little-endian float
+Dataset3.0 Dataset {SCALAR}
+ Location: 1:6272
+ Links: 1
+ Storage: information not available
+ Type: variable length of
+ 32-bit little-endian integer
diff --git a/tools/testfiles/tvldtypes2.ls b/tools/testfiles/tvldtypes2le.ls
index 62dfa61..62dfa61 100644
--- a/tools/testfiles/tvldtypes2.ls
+++ b/tools/testfiles/tvldtypes2le.ls
diff --git a/vms/src/h5pubconf.h b/vms/src/h5pubconf.h
index dfaa7e6..da54a4f 100644
--- a/vms/src/h5pubconf.h
+++ b/vms/src/h5pubconf.h
@@ -480,13 +480,13 @@
#define H5_PACKAGE_NAME "HDF5"
/* Define to the full name and version of this package. */
-#define H5_PACKAGE_STRING "HDF5 1.9.32"
+#define H5_PACKAGE_STRING "HDF5 1.9.34"
/* Define to the one symbol short name of this package. */
#define H5_PACKAGE_TARNAME "hdf5"
/* Define to the version of this package. */
-#define H5_PACKAGE_VERSION "1.9.32"
+#define H5_PACKAGE_VERSION "1.9.34"
/* Width for printf() for type `long long' or `__int64', use `ll' */
#define H5_PRINTF_LL_WIDTH "ll"
@@ -639,7 +639,7 @@
/* #undef H5_USING_MEMCHECKER */
/* Version number of package */
-#define H5_VERSION "1.9.32"
+#define H5_VERSION "1.9.34"
/* Define if vsnprintf() returns the correct value for formatted strings that
don't fit into size allowed */
diff --git a/windows/src/H5pubconf.h b/windows/src/H5pubconf.h
index 95b53d8..bf591d7 100755
--- a/windows/src/H5pubconf.h
+++ b/windows/src/H5pubconf.h
@@ -479,13 +479,13 @@
#define H5_PACKAGE_NAME "HDF5"
/* Define to the full name and version of this package. */
-#define H5_PACKAGE_STRING "HDF5 1.9.32"
+#define H5_PACKAGE_STRING "HDF5 1.9.34"
/* Define to the one symbol short name of this package. */
#define H5_PACKAGE_TARNAME "hdf5"
/* Define to the version of this package. */
-#define H5_PACKAGE_VERSION "1.9.32"
+#define H5_PACKAGE_VERSION "1.9.34"
/* Width for printf() for type `long long' or `__int64', use `ll' */
#define H5_PRINTF_LL_WIDTH "I64"
@@ -642,7 +642,7 @@
/* #undef H5_USING_MEMCHECKER */
/* Version number of package */
-#define H5_VERSION "1.9.32"
+#define H5_VERSION "1.9.34"
/* Define if vsnprintf() returns the correct value for formatted strings that
don't fit into size allowed */
diff --git a/windows/tools/h5ls/testh5ls.bat b/windows/tools/h5ls/testh5ls.bat
index 469e8df..bf0cb5e 100644
--- a/windows/tools/h5ls/testh5ls.bat
+++ b/windows/tools/h5ls/testh5ls.bat
@@ -219,10 +219,10 @@ rem ############################################################################
call :tooltest nosuchfile.ls 0 nosuchfile.h5
rem test for variable length data types in verbose mode
- call :tooltest tvldtypes2.ls 0 -v tvldtypes1.h5
+ call :tooltest tvldtypes2le.ls 0 -v tvldtypes1.h5
rem test for dataset region references data types in verbose mode
- call :tooltest tdatareg.ls 0 -v tdatareg.h5
+ call :tooltest tdataregle.ls 0 -v tdatareg.h5
if %nerrors% equ 0 (
echo.All h5ls tests passed.