summaryrefslogtreecommitdiffstats
path: root/tools/h5stat
diff options
context:
space:
mode:
authorVailin Choi <vchoi@hdfgroup.org>2007-07-27 17:36:36 (GMT)
committerVailin Choi <vchoi@hdfgroup.org>2007-07-27 17:36:36 (GMT)
commitd65d8084ff8068befb28251c9dbf62ded4f24f5c (patch)
tree47d845473d9a46b68725f46e70e022cab6c391bb /tools/h5stat
parent4a5b97b930b69cd9b9897ca4698c26f9ca0dc203 (diff)
downloadhdf5-d65d8084ff8068befb28251c9dbf62ded4f24f5c.zip
hdf5-d65d8084ff8068befb28251c9dbf62ded4f24f5c.tar.gz
hdf5-d65d8084ff8068befb28251c9dbf62ded4f24f5c.tar.bz2
[svn-r14023] Purpose:
reorganization. Description: move the h5stat tool into its own directory. Platform tested: kagiso.
Diffstat (limited to 'tools/h5stat')
-rw-r--r--tools/h5stat/Makefile.am52
-rw-r--r--tools/h5stat/Makefile.in891
-rw-r--r--tools/h5stat/h5stat.c1377
-rw-r--r--tools/h5stat/h5stat_gentest.c84
-rw-r--r--tools/h5stat/testfiles/h5stat_filters-F.ddl21
-rw-r--r--tools/h5stat/testfiles/h5stat_filters-d.ddl33
-rw-r--r--tools/h5stat/testfiles/h5stat_filters-dT.ddl42
-rw-r--r--tools/h5stat/testfiles/h5stat_filters-file.ddl13
-rw-r--r--tools/h5stat/testfiles/h5stat_filters-g.ddl9
-rw-r--r--tools/h5stat/testfiles/h5stat_filters.ddl78
-rw-r--r--tools/h5stat/testfiles/h5stat_filters.h5bin0 -> 46272 bytes
-rw-r--r--tools/h5stat/testfiles/h5stat_help1.ddl24
-rw-r--r--tools/h5stat/testfiles/h5stat_help2.ddl24
-rw-r--r--tools/h5stat/testfiles/h5stat_newgrat.ddl74
-rw-r--r--tools/h5stat/testfiles/h5stat_newgrat.h5bin0 -> 72236 bytes
-rw-r--r--tools/h5stat/testfiles/h5stat_tsohm.ddl72
-rw-r--r--tools/h5stat/testfiles/h5stat_tsohm.h5bin0 -> 3603 bytes
-rw-r--r--tools/h5stat/testh5stat.sh.in135
18 files changed, 2929 insertions, 0 deletions
diff --git a/tools/h5stat/Makefile.am b/tools/h5stat/Makefile.am
new file mode 100644
index 0000000..b1c2024
--- /dev/null
+++ b/tools/h5stat/Makefile.am
@@ -0,0 +1,52 @@
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+#
+# HDF5 Library Makefile(.in)
+#
+
+include $(top_srcdir)/config/commence.am
+
+# Include src directory
+INCLUDES=-I$(top_srcdir)/src -I$(top_srcdir)/tools/lib
+
+#test script and program
+TEST_PROG=h5stat_gentest
+TEST_SCRIPT=testh5stat.sh
+
+check_PROGRAMS=$(TEST_PROG)
+check_SCRIPTS=$(TEST_SCRIPT)
+SCRIPT_DEPEND=h5stat$(EXEEXT)
+
+# These are our main targets, the tools
+bin_PROGRAMS=h5stat
+bin_SCRIPTS=
+
+# Tell automake to clean h5redeploy script
+CLEANFILES=
+
+# Temporary files. *.h5 are generated by h5repart_gentest. They should
+# copied to the testfiles/ directory if update is required. fst_family*.h5
+# and scd_family*.h5 were created by setting the HDF5_NOCLEANUP variable.
+CHECK_CLEANFILES+=*.h5 ../testfiles/fst_family*.h5 ../testfiles/scd_family*.h5
+
+# These were generated by configure. Remove them only when distclean.
+DISTCLEANFILES=testh5stat.sh
+
+# All programs rely on hdf5 library and h5tools library
+LDADD=$(LIBH5TOOLS) $(LIBHDF5)
+
+include $(top_srcdir)/config/conclude.am
diff --git a/tools/h5stat/Makefile.in b/tools/h5stat/Makefile.in
new file mode 100644
index 0000000..33a3060
--- /dev/null
+++ b/tools/h5stat/Makefile.in
@@ -0,0 +1,891 @@
+# Makefile.in generated by automake 1.10 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
+# 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+@SET_MAKE@
+
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+#
+# HDF5 Library Makefile(.in)
+#
+
+
+VPATH = @srcdir@
+pkgdatadir = $(datadir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in \
+ $(srcdir)/testh5stat.sh.in $(top_srcdir)/config/commence.am \
+ $(top_srcdir)/config/conclude.am
+check_PROGRAMS = $(am__EXEEXT_1)
+bin_PROGRAMS = h5stat$(EXEEXT)
+TESTS = $(check_PROGRAMS) $(check_SCRIPTS)
+subdir = tools/h5stat
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/configure.in
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+ $(ACLOCAL_M4)
+mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
+CONFIG_HEADER = $(top_builddir)/src/H5config.h
+CONFIG_CLEAN_FILES = testh5stat.sh
+am__installdirs = "$(DESTDIR)$(bindir)" "$(DESTDIR)$(bindir)"
+binPROGRAMS_INSTALL = $(INSTALL_PROGRAM)
+am__EXEEXT_1 = h5stat_gentest$(EXEEXT)
+PROGRAMS = $(bin_PROGRAMS)
+h5stat_SOURCES = h5stat.c
+h5stat_OBJECTS = h5stat.$(OBJEXT)
+h5stat_LDADD = $(LDADD)
+h5stat_DEPENDENCIES = $(LIBH5TOOLS) $(LIBHDF5)
+h5stat_gentest_SOURCES = h5stat_gentest.c
+h5stat_gentest_OBJECTS = h5stat_gentest.$(OBJEXT)
+h5stat_gentest_LDADD = $(LDADD)
+h5stat_gentest_DEPENDENCIES = $(LIBH5TOOLS) $(LIBHDF5)
+binSCRIPT_INSTALL = $(INSTALL_SCRIPT)
+SCRIPTS = $(bin_SCRIPTS)
+DEFAULT_INCLUDES = -I. -I$(top_builddir)/src@am__isrc@
+depcomp = $(SHELL) $(top_srcdir)/bin/depcomp
+am__depfiles_maybe = depfiles
+COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \
+ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+LTCOMPILE = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
+ --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
+ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS)
+CCLD = $(CC)
+LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
+ --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \
+ $(LDFLAGS) -o $@
+SOURCES = h5stat.c h5stat_gentest.c
+DIST_SOURCES = h5stat.c h5stat_gentest.c
+ETAGS = etags
+CTAGS = ctags
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+ACLOCAL = /home1/packages/automake/automake-1.9.6/bin/aclocal-1.9 -I /afs/ncsa/projects/hdf/packages/libtool_1.5.14/Linux_2.4/share/aclocal
+ADD_PARALLEL_FILES = @ADD_PARALLEL_FILES@
+AMTAR = @AMTAR@
+AM_MAKEFLAGS = @AM_MAKEFLAGS@
+AR = @AR@
+
+# Set the paths for AFS installs of autotools for Linux machines
+# Ideally, these tools should never be needed during the build.
+AUTOCONF = /home1/packages/autoconf/autoconf-2.60/bin/autoconf
+AUTOHEADER = /home1/packages/autoconf/autoconf-2.60/bin/autoheader
+AUTOMAKE = /home1/packages/automake/automake-1.9.6/bin/automake-1.9
+AWK = @AWK@
+BYTESEX = @BYTESEX@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CC_VERSION = @CC_VERSION@
+
+# H5_CFLAGS holds flags that should be used as CFLAGS when building hdf5,
+# but which shouldn't be exported to h5cc for building other programs.
+CFLAGS = @CFLAGS@ @H5_CFLAGS@
+CLEARFILEBUF = @CLEARFILEBUF@
+CONFIG_DATE = @CONFIG_DATE@
+CONFIG_MODE = @CONFIG_MODE@
+CONFIG_USER = @CONFIG_USER@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@ @H5_CPPFLAGS@
+CXX = @CXX@
+CXXCPP = @CXXCPP@
+CXXDEPMODE = @CXXDEPMODE@
+CXXFLAGS = @CXXFLAGS@ @H5_CXXFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEBUG_PKG = @DEBUG_PKG@
+DEFAULT_API_VERSION = @DEFAULT_API_VERSION@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DEPRECATED_SYMBOLS = @DEPRECATED_SYMBOLS@
+DYNAMIC_DIRS = @DYNAMIC_DIRS@
+ECHO = @ECHO@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+F77 = @F77@
+
+# Make sure that these variables are exported to the Makefiles
+F9XMODEXT = @F9XMODEXT@
+F9XMODFLAG = @F9XMODFLAG@
+F9XSUFFIXFLAG = @F9XSUFFIXFLAG@
+FC = @FC@
+FCFLAGS = @FCFLAGS@ @H5_FCFLAGS@
+FCFLAGS_f90 = @FCFLAGS_f90@
+FCLIBS = @FCLIBS@
+FFLAGS = @FFLAGS@
+FILTERS = @FILTERS@
+FSEARCH_DIRS = @FSEARCH_DIRS@
+GREP = @GREP@
+H5_CFLAGS = @H5_CFLAGS@
+H5_CPPFLAGS = @H5_CPPFLAGS@
+H5_CXXFLAGS = @H5_CXXFLAGS@
+H5_FCFLAGS = @H5_FCFLAGS@
+H5_LONE_COLON = @H5_LONE_COLON@
+H5_VERSION = @H5_VERSION@
+HADDR_T = @HADDR_T@
+HDF5_INTERFACES = @HDF5_INTERFACES@
+HID_T = @HID_T@
+HL = @HL@
+HL_FOR = @HL_FOR@
+HSIZET = @HSIZET@
+HSIZE_T = @HSIZE_T@
+HSSIZE_T = @HSSIZE_T@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+INSTRUMENT_LIBRARY = @INSTRUMENT_LIBRARY@
+LDFLAGS = @LDFLAGS@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LN_S = @LN_S@
+LTLIBOBJS = @LTLIBOBJS@
+LT_STATIC_EXEC = @LT_STATIC_EXEC@
+MAINT = @MAINT@
+MAKEINFO = @MAKEINFO@
+MKDIR_P = @MKDIR_P@
+MPE = @MPE@
+MPI_GET_SIZE = @MPI_GET_SIZE@
+OBJECT_NAMELEN_DEFAULT_F = @OBJECT_NAMELEN_DEFAULT_F@
+OBJEXT = @OBJEXT@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PARALLEL = @PARALLEL@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+PERL = @PERL@
+PTHREAD = @PTHREAD@
+RANLIB = @RANLIB@
+ROOT = @ROOT@
+RUNPARALLEL = @RUNPARALLEL@
+RUNSERIAL = @RUNSERIAL@
+R_INTEGER = @R_INTEGER@
+R_LARGE = @R_LARGE@
+SEARCH = @SEARCH@
+SETX = @SETX@
+SET_MAKE = @SET_MAKE@
+
+# Hardcode SHELL to be /bin/sh. Most machines have this shell, and
+# on at least one machine configure fails to detect its existence (janus).
+# Also, when HDF5 is configured on one machine but run on another,
+# configure's automatic SHELL detection may not work on the build machine.
+SHELL = /bin/sh
+SIZE_T = @SIZE_T@
+STATIC_SHARED = @STATIC_SHARED@
+STRIP = @STRIP@
+TESTPARALLEL = @TESTPARALLEL@
+TIME = @TIME@
+TR = @TR@
+TRACE_API = @TRACE_API@
+USE_FILTER_DEFLATE = @USE_FILTER_DEFLATE@
+USE_FILTER_FLETCHER32 = @USE_FILTER_FLETCHER32@
+USE_FILTER_NBIT = @USE_FILTER_NBIT@
+USE_FILTER_SCALEOFFSET = @USE_FILTER_SCALEOFFSET@
+USE_FILTER_SHUFFLE = @USE_FILTER_SHUFFLE@
+USE_FILTER_SZIP = @USE_FILTER_SZIP@
+USINGMEMCHECKER = @USINGMEMCHECKER@
+VERSION = @VERSION@
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_CXX = @ac_ct_CXX@
+ac_ct_F77 = @ac_ct_F77@
+ac_ct_FC = @ac_ct_FC@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = $(exec_prefix)/doc
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+
+# Install directories that automake doesn't know about
+includedir = $(exec_prefix)/include
+infodir = @infodir@
+install_sh = @install_sh@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+
+# Shell commands used in Makefiles
+RM = rm -f
+CP = cp
+
+# Some machines need a command to run executables; this is that command
+# so that our tests will run.
+# We use RUNTESTS instead of RUNSERIAL directly because it may be that
+# some tests need to be run with a different command. Older versions
+# of the makefiles used the command
+# $(LIBTOOL) --mode=execute
+# in some directories, for instance.
+RUNTESTS = $(RUNSERIAL)
+
+# Libraries to link to while building
+LIBHDF5 = $(top_builddir)/src/libhdf5.la
+LIBH5TEST = $(top_builddir)/test/libh5test.la
+LIBH5F = $(top_builddir)/fortran/src/libhdf5_fortran.la
+LIBH5FTEST = $(top_builddir)/fortran/test/libh5test_fortran.la
+LIBH5CPP = $(top_builddir)/c++/src/libhdf5_cpp.la
+LIBH5TOOLS = $(top_builddir)/tools/lib/libh5tools.la
+LIBH5_HL = $(top_builddir)/hl/src/libhdf5_hl.la
+LIBH5F_HL = $(top_builddir)/hl/fortran/src/libhdf5hl_fortran.la
+LIBH5CPP_HL = $(top_builddir)/hl/c++/src/libhdf5_hl_cpp.la
+
+# Scripts used to build examples
+# If only shared libraries have been installed, have h5cc build examples with
+# shared libraries instead of static libraries
+H5CC = $(bindir)/h5cc
+H5CC_PP = $(bindir)/h5pcc
+H5FC = $(bindir)/h5fc
+H5FC_PP = $(bindir)/h5pfc
+H5CPP = $(bindir)/h5c++
+
+# The trace script; this is used on source files from the C library to
+# insert tracing macros.
+TRACE = perl $(top_srcdir)/bin/trace
+
+# .chkexe files are used to mark tests that have run successfully.
+# .chklog files are output from those tests.
+# *.clog are from the MPE option.
+
+# Temporary files. *.h5 are generated by h5repart_gentest. They should
+# copied to the testfiles/ directory if update is required. fst_family*.h5
+# and scd_family*.h5 were created by setting the HDF5_NOCLEANUP variable.
+CHECK_CLEANFILES = *.chkexe *.chklog *.clog *.h5 \
+ ../testfiles/fst_family*.h5 ../testfiles/scd_family*.h5
+
+# Include src directory
+INCLUDES = -I$(top_srcdir)/src -I$(top_srcdir)/tools/lib
+
+#test script and program
+TEST_PROG = h5stat_gentest
+TEST_SCRIPT = testh5stat.sh
+check_SCRIPTS = $(TEST_SCRIPT)
+SCRIPT_DEPEND = h5stat$(EXEEXT)
+bin_SCRIPTS =
+
+# Tell automake to clean h5redeploy script
+CLEANFILES =
+
+# These were generated by configure. Remove them only when distclean.
+DISTCLEANFILES = testh5stat.sh
+
+# All programs rely on hdf5 library and h5tools library
+LDADD = $(LIBH5TOOLS) $(LIBHDF5)
+
+# Automake needs to be taught how to build lib, progs, and tests targets.
+# These will be filled in automatically for the most part (e.g.,
+# lib_LIBRARIES are built for lib target), but EXTRA_LIB, EXTRA_PROG, and
+# EXTRA_TEST variables are supplied to allow the user to force targets to
+# be built at certain times.
+LIB = $(lib_LIBRARIES) $(lib_LTLIBRARIES) $(noinst_LIBRARIES) \
+ $(noinst_LTLIBRARIES) $(check_LIBRARIES) $(check_LTLIBRARIES) $(EXTRA_LIB)
+
+PROGS = $(bin_PROGRAMS) $(bin_SCRIPTS) $(noinst_PROGRAMS) $(noinst_SCRIPTS) \
+ $(EXTRA_PROG)
+
+TEST_PROG_CHKEXE = $(TEST_PROG:=.chkexe_)
+TEST_PROG_PARA_CHKEXE = $(TEST_PROG_PARA:=.chkexe_)
+TEST_SCRIPT_CHKSH = $(TEST_SCRIPT:=.chkexe_)
+TEST_SCRIPT_PARA_CHKSH = $(TEST_SCRIPT_PARA:=.chkexe_)
+all: all-am
+
+.SUFFIXES:
+.SUFFIXES: .c .lo .o .obj
+$(srcdir)/Makefile.in: @MAINTAINER_MODE_TRUE@ $(srcdir)/Makefile.am $(top_srcdir)/config/commence.am $(top_srcdir)/config/conclude.am $(am__configure_deps)
+ @for dep in $?; do \
+ case '$(am__configure_deps)' in \
+ *$$dep*) \
+ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh \
+ && exit 0; \
+ exit 1;; \
+ esac; \
+ done; \
+ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign tools/h5stat/Makefile'; \
+ cd $(top_srcdir) && \
+ $(AUTOMAKE) --foreign tools/h5stat/Makefile
+.PRECIOUS: Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+ @case '$?' in \
+ *config.status*) \
+ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+ *) \
+ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
+ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
+ esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure: @MAINTAINER_MODE_TRUE@ $(am__configure_deps)
+ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps)
+ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+testh5stat.sh: $(top_builddir)/config.status $(srcdir)/testh5stat.sh.in
+ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@
+install-binPROGRAMS: $(bin_PROGRAMS)
+ @$(NORMAL_INSTALL)
+ test -z "$(bindir)" || $(MKDIR_P) "$(DESTDIR)$(bindir)"
+ @list='$(bin_PROGRAMS)'; for p in $$list; do \
+ p1=`echo $$p|sed 's/$(EXEEXT)$$//'`; \
+ if test -f $$p \
+ || test -f $$p1 \
+ ; then \
+ f=`echo "$$p1" | sed 's,^.*/,,;$(transform);s/$$/$(EXEEXT)/'`; \
+ echo " $(INSTALL_PROGRAM_ENV) $(LIBTOOL) --mode=install $(binPROGRAMS_INSTALL) '$$p' '$(DESTDIR)$(bindir)/$$f'"; \
+ $(INSTALL_PROGRAM_ENV) $(LIBTOOL) --mode=install $(binPROGRAMS_INSTALL) "$$p" "$(DESTDIR)$(bindir)/$$f" || exit 1; \
+ else :; fi; \
+ done
+
+uninstall-binPROGRAMS:
+ @$(NORMAL_UNINSTALL)
+ @list='$(bin_PROGRAMS)'; for p in $$list; do \
+ f=`echo "$$p" | sed 's,^.*/,,;s/$(EXEEXT)$$//;$(transform);s/$$/$(EXEEXT)/'`; \
+ echo " rm -f '$(DESTDIR)$(bindir)/$$f'"; \
+ rm -f "$(DESTDIR)$(bindir)/$$f"; \
+ done
+
+clean-binPROGRAMS:
+ @list='$(bin_PROGRAMS)'; for p in $$list; do \
+ f=`echo $$p|sed 's/$(EXEEXT)$$//'`; \
+ echo " rm -f $$p $$f"; \
+ rm -f $$p $$f ; \
+ done
+
+clean-checkPROGRAMS:
+ @list='$(check_PROGRAMS)'; for p in $$list; do \
+ f=`echo $$p|sed 's/$(EXEEXT)$$//'`; \
+ echo " rm -f $$p $$f"; \
+ rm -f $$p $$f ; \
+ done
+h5stat$(EXEEXT): $(h5stat_OBJECTS) $(h5stat_DEPENDENCIES)
+ @rm -f h5stat$(EXEEXT)
+ $(LINK) $(h5stat_OBJECTS) $(h5stat_LDADD) $(LIBS)
+h5stat_gentest$(EXEEXT): $(h5stat_gentest_OBJECTS) $(h5stat_gentest_DEPENDENCIES)
+ @rm -f h5stat_gentest$(EXEEXT)
+ $(LINK) $(h5stat_gentest_OBJECTS) $(h5stat_gentest_LDADD) $(LIBS)
+install-binSCRIPTS: $(bin_SCRIPTS)
+ @$(NORMAL_INSTALL)
+ test -z "$(bindir)" || $(MKDIR_P) "$(DESTDIR)$(bindir)"
+ @list='$(bin_SCRIPTS)'; for p in $$list; do \
+ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
+ if test -f $$d$$p; then \
+ f=`echo "$$p" | sed 's|^.*/||;$(transform)'`; \
+ echo " $(binSCRIPT_INSTALL) '$$d$$p' '$(DESTDIR)$(bindir)/$$f'"; \
+ $(binSCRIPT_INSTALL) "$$d$$p" "$(DESTDIR)$(bindir)/$$f"; \
+ else :; fi; \
+ done
+
+uninstall-binSCRIPTS:
+ @$(NORMAL_UNINSTALL)
+ @list='$(bin_SCRIPTS)'; for p in $$list; do \
+ f=`echo "$$p" | sed 's|^.*/||;$(transform)'`; \
+ echo " rm -f '$(DESTDIR)$(bindir)/$$f'"; \
+ rm -f "$(DESTDIR)$(bindir)/$$f"; \
+ done
+
+mostlyclean-compile:
+ -rm -f *.$(OBJEXT)
+
+distclean-compile:
+ -rm -f *.tab.c
+
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/h5stat.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/h5stat_gentest.Po@am__quote@
+
+.c.o:
+@am__fastdepCC_TRUE@ $(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
+@am__fastdepCC_TRUE@ mv -f $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@ $(COMPILE) -c $<
+
+.c.obj:
+@am__fastdepCC_TRUE@ $(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
+@am__fastdepCC_TRUE@ mv -f $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@ $(COMPILE) -c `$(CYGPATH_W) '$<'`
+
+.c.lo:
+@am__fastdepCC_TRUE@ $(LTCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
+@am__fastdepCC_TRUE@ mv -f $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCC_FALSE@ $(LTCOMPILE) -c -o $@ $<
+
+mostlyclean-libtool:
+ -rm -f *.lo
+
+clean-libtool:
+ -rm -rf .libs _libs
+
+ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES)
+ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+ unique=`for i in $$list; do \
+ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+ done | \
+ $(AWK) ' { files[$$0] = 1; } \
+ END { for (i in files) print i; }'`; \
+ mkid -fID $$unique
+tags: TAGS
+
+TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \
+ $(TAGS_FILES) $(LISP)
+ tags=; \
+ here=`pwd`; \
+ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+ unique=`for i in $$list; do \
+ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+ done | \
+ $(AWK) ' { files[$$0] = 1; } \
+ END { for (i in files) print i; }'`; \
+ if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \
+ test -n "$$unique" || unique=$$empty_fix; \
+ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+ $$tags $$unique; \
+ fi
+ctags: CTAGS
+CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \
+ $(TAGS_FILES) $(LISP)
+ tags=; \
+ here=`pwd`; \
+ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \
+ unique=`for i in $$list; do \
+ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+ done | \
+ $(AWK) ' { files[$$0] = 1; } \
+ END { for (i in files) print i; }'`; \
+ test -z "$(CTAGS_ARGS)$$tags$$unique" \
+ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+ $$tags $$unique
+
+GTAGS:
+ here=`$(am__cd) $(top_builddir) && pwd` \
+ && cd $(top_srcdir) \
+ && gtags -i $(GTAGS_ARGS) $$here
+
+distclean-tags:
+ -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+distdir: $(DISTFILES)
+ @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+ list='$(DISTFILES)'; \
+ dist_files=`for file in $$list; do echo $$file; done | \
+ sed -e "s|^$$srcdirstrip/||;t" \
+ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+ case $$dist_files in \
+ */*) $(MKDIR_P) `echo "$$dist_files" | \
+ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+ sort -u` ;; \
+ esac; \
+ for file in $$dist_files; do \
+ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+ if test -d $$d/$$file; then \
+ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+ cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \
+ fi; \
+ cp -pR $$d/$$file $(distdir)$$dir || exit 1; \
+ else \
+ test -f $(distdir)/$$file \
+ || cp -p $$d/$$file $(distdir)/$$file \
+ || exit 1; \
+ fi; \
+ done
+check-am: all-am
+ $(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS) $(check_SCRIPTS)
+ $(MAKE) $(AM_MAKEFLAGS) check-TESTS
+check: check-am
+all-am: Makefile $(PROGRAMS) $(SCRIPTS) all-local
+installdirs:
+ for dir in "$(DESTDIR)$(bindir)" "$(DESTDIR)$(bindir)"; do \
+ test -z "$$dir" || $(MKDIR_P) "$$dir"; \
+ done
+install: install-am
+install-exec: install-exec-am
+install-data: install-data-am
+uninstall: uninstall-am
+
+install-am: all-am
+ @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-am
+install-strip:
+ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+ `test -z '$(STRIP)' || \
+ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install
+mostlyclean-generic:
+
+clean-generic:
+ -test -z "$(CLEANFILES)" || rm -f $(CLEANFILES)
+
+distclean-generic:
+ -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+ -test -z "$(DISTCLEANFILES)" || rm -f $(DISTCLEANFILES)
+
+maintainer-clean-generic:
+ @echo "This command is intended for maintainers to use"
+ @echo "it deletes files that may require special tools to rebuild."
+clean: clean-am
+
+clean-am: clean-binPROGRAMS clean-checkPROGRAMS clean-generic \
+ clean-libtool mostlyclean-am
+
+distclean: distclean-am
+ -rm -rf ./$(DEPDIR)
+ -rm -f Makefile
+distclean-am: clean-am distclean-compile distclean-generic \
+ distclean-tags
+
+dvi: dvi-am
+
+dvi-am:
+
+html: html-am
+
+info: info-am
+
+info-am:
+
+install-data-am:
+
+install-dvi: install-dvi-am
+
+install-exec-am: install-binPROGRAMS install-binSCRIPTS
+
+install-html: install-html-am
+
+install-info: install-info-am
+
+install-man:
+
+install-pdf: install-pdf-am
+
+install-ps: install-ps-am
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-am
+ -rm -rf ./$(DEPDIR)
+ -rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-am
+
+mostlyclean-am: mostlyclean-compile mostlyclean-generic \
+ mostlyclean-libtool mostlyclean-local
+
+pdf: pdf-am
+
+pdf-am:
+
+ps: ps-am
+
+ps-am:
+
+uninstall-am: uninstall-binPROGRAMS uninstall-binSCRIPTS
+
+.MAKE: install-am install-strip
+
+.PHONY: CTAGS GTAGS all all-am all-local check check-TESTS check-am \
+ clean clean-binPROGRAMS clean-checkPROGRAMS clean-generic \
+ clean-libtool ctags distclean distclean-compile \
+ distclean-generic distclean-libtool distclean-tags distdir dvi \
+ dvi-am html html-am info info-am install install-am \
+ install-binPROGRAMS install-binSCRIPTS install-data \
+ install-data-am install-dvi install-dvi-am install-exec \
+ install-exec-am install-html install-html-am install-info \
+ install-info-am install-man install-pdf install-pdf-am \
+ install-ps install-ps-am install-strip installcheck \
+ installcheck-am installdirs maintainer-clean \
+ maintainer-clean-generic mostlyclean mostlyclean-compile \
+ mostlyclean-generic mostlyclean-libtool mostlyclean-local pdf \
+ pdf-am ps ps-am tags uninstall uninstall-am \
+ uninstall-binPROGRAMS uninstall-binSCRIPTS
+
+
+# List all build rules defined by HDF5 Makefiles as "PHONY" targets here.
+# This tells the Makefiles that these targets are not files to be built but
+# commands that should be executed even if a file with the same name already
+# exists.
+.PHONY: build-check-clean build-check-p build-check-s build-lib build-progs \
+ build-tests check-clean check-install check-p check-s check-vfd \
+ install-doc lib progs tests uninstall-doc _exec_check-s _test help
+
+help:
+ @$(top_srcdir)/bin/makehelp
+
+# lib/progs/tests targets recurse into subdirectories. build-* targets
+# build files in this directory.
+build-lib: $(LIB)
+build-progs: $(LIB) $(PROGS)
+build-tests: $(LIB) $(PROGS) $(TESTS)
+
+# General rule for recursive building targets.
+# BUILT_SOURCES contain targets that need to be built before anything else
+# in the directory (e.g., for Fortran type detection)
+lib progs tests check-s check-p :: $(BUILT_SOURCES)
+ @$(MAKE) $(AM_MAKEFLAGS) build-$@ || exit 1;
+ @for d in X $(SUBDIRS); do \
+ if test $$d != X && test $$d != .; then \
+ (set -x; cd $$d && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1; \
+ fi; \
+ done
+
+# General rule for recursive cleaning targets. Like the rule above,
+# but doesn't require building BUILT_SOURCES.
+check-clean ::
+ @$(MAKE) $(AM_MAKEFLAGS) build-$@ || exit 1;
+ @for d in X $(SUBDIRS); do \
+ if test $$d != X && test $$d != .; then \
+ (set -x; cd $$d && $(MAKE) $(AM_MAKEFLAGS) $@) || exit 1; \
+ fi; \
+ done
+
+# Tell Automake to build tests when the user types `make all' (this is
+# not its default behavior). Also build EXTRA_LIB and EXTRA_PROG since
+# Automake won't build them automatically, either.
+all-local: $(EXTRA_LIB) $(EXTRA_PROG) $(TESTS)
+
+# make install-doc doesn't do anything outside of doc directory, but
+# Makefiles should recognize it.
+# UPDATE: docs no longer reside in this build tree, so this target
+# is depreciated.
+install-doc uninstall-doc:
+ @echo "Nothing to be done."
+
+# clean up files generated by tests so they can be re-run.
+build-check-clean:
+ $(RM) -rf $(CHECK_CLEANFILES)
+
+# run check-clean whenever mostlyclean is run
+mostlyclean-local: build-check-clean
+
+# check-install is just a synonym for installcheck
+check-install: installcheck
+
+# Run each test in order, passing $(TEST_FLAGS) to the program.
+# Since tests are done in a shell loop, "make -i" does apply inside it.
+# Set HDF5_Make_Ignore to a non-blank string to ignore errors inside the loop.
+# The timestamps give a rough idea how much time the tests use.
+#
+# Note that targets in TESTS (defined above) will be built when the user
+# types 'make tests' or 'make check', but only programs in TEST_PROG,
+# TEST_PROG_PARA, or TEST_SCRIPT will actually be executed.
+check-TESTS: test
+
+test _test:
+ @$(MAKE) build-check-s
+ @$(MAKE) build-check-p
+
+# Actual execution of check-s.
+build-check-s: $(LIB) $(PROGS) $(TESTS)
+ @if test -n "$(TEST_PROG)$(TEST_SCRIPT)"; then \
+ echo "===Serial tests in `echo ${PWD} | sed -e s:.*/::` begin `date`==="; \
+ fi
+ @$(MAKE) $(AM_MAKEFLAGS) _exec_check-s
+ @if test -n "$(TEST_PROG)$(TEST_SCRIPT)"; then \
+ echo "===Serial tests in `echo ${PWD} | sed -e s:.*/::` ended `date`===";\
+ fi
+
+_exec_check-s: $(TEST_PROG_CHKEXE) $(TEST_SCRIPT_CHKSH)
+
+# The dummy.chkexe here prevents the target from being
+# empty if there are no tests in the current directory.
+# $${log} is the log file.
+# $${tname} is the name of test.
+$(TEST_PROG_CHKEXE) $(TEST_PROG_PARA_CHKEXE) dummy.chkexe_:
+ @if test "X$@" != "X.chkexe_" && test "X$@" != "Xdummy.chkexe_"; then \
+ tname=$(@:.chkexe_=)$(EXEEXT);\
+ log=$(@:.chkexe_=.chklog); \
+ echo "============================"; \
+ if $(top_srcdir)/bin/newer $(@:.chkexe_=.chkexe) $${tname}; then \
+ echo "No need to test $${tname} again."; \
+ else \
+ echo "============================" > $${log}; \
+ if test "X$(HDF_FORTRAN)" = "Xyes"; then \
+ echo "Fortran API: Testing $(HDF5_DRIVER) $${tname} $(TEST_FLAGS)"; \
+ echo "Fortran API: $(HDF5_DRIVER) $${tname} $(TEST_FLAGS) Test Log" >> $${log}; \
+ elif test "X$(HDF_CXX)" = "Xyes"; then \
+ echo "C++ API: Testing $(HDF5_DRIVER) $${tname} $(TEST_FLAGS)"; \
+ echo "C++ API: $(HDF5_DRIVER) $${tname} $(TEST_FLAGS) Test Log" >> $${log};\
+ else \
+ echo "Testing $(HDF5_DRIVER) $${tname} $(TEST_FLAGS)"; \
+ echo "$(HDF5_DRIVER) $${tname} $(TEST_FLAGS) Test Log" >> $${log}; \
+ fi; \
+ echo "============================" >> $${log}; \
+ srcdir="$(srcdir)" \
+ $(TIME) $(RUNTESTS) ./$${tname} $(TEST_FLAGS) >> $${log} 2>&1 \
+ && touch $(@:.chkexe_=.chkexe) || \
+ (test $$HDF5_Make_Ignore && echo "*** Error ignored") || \
+ (cat $${log} && false) || exit 1; \
+ echo "" >> $${log}; \
+ echo "Finished testing $${tname} $(TEST_FLAGS)" >> $${log}; \
+ echo "============================" >> $${log}; \
+ echo "Finished testing $${tname} $(TEST_FLAGS)"; \
+ cat $${log}; \
+ fi; \
+ fi
+
+# The dummysh.chkexe here prevents the target from being
+# empty if there are no tests in the current directory.
+# $${log} is the log file.
+# $${tname} is the name of test.
+$(TEST_SCRIPT_CHKSH) $(TEST_SCRIPT_PARA_CHKSH) dummysh.chkexe_:
+ @if test "X$@" != "X.chkexe_" && test "X$@" != "Xdummysh.chkexe_"; then \
+ cmd=$(@:.chkexe_=);\
+ tname=`basename $$cmd`;\
+ chkname=`basename $(@:.chkexe_=.chkexe)`;\
+ log=`basename $(@:.chkexe_=.chklog)`; \
+ echo "============================"; \
+ if $(top_srcdir)/bin/newer $${chkname} $$cmd $(SCRIPT_DEPEND); then \
+ echo "No need to test $${tname} again."; \
+ else \
+ echo "============================" > $${log}; \
+ if test "X$(HDF_FORTRAN)" = "Xyes"; then \
+ echo "Fortran API: Testing $${tname} $(TEST_FLAGS)"; \
+ echo "Fortran API: $${tname} $(TEST_FLAGS) Test Log" >> $${log}; \
+ elif test "X$(HDF_CXX)" = "Xyes"; then \
+ echo "C++ API: Testing $${tname} $(TEST_FLAGS)"; \
+ echo "C++ API: $${tname} $(TEST_FLAGS) Test Log" >> $${log}; \
+ else \
+ echo "Testing $${tname} $(TEST_FLAGS)"; \
+ echo "$${tname} $(TEST_FLAGS) Test Log" >> $${log}; \
+ fi; \
+ echo "============================" >> $${log}; \
+ RUNSERIAL="$(RUNSERIAL)" RUNPARALLEL="$(RUNPARALLEL)" \
+ srcdir="$(srcdir)" \
+ $(TIME) $(SHELL) $$cmd $(TEST_FLAGS) >> $${log} 2>&1 \
+ && touch $${chkname} || \
+ (test $$HDF5_Make_Ignore && echo "*** Error ignored") || \
+ (cat $${log} && false) || exit 1; \
+ echo "" >> $${log}; \
+ echo "Finished testing $${tname} $(TEST_FLAGS)" >> $${log}; \
+ echo "============================" >> $${log}; \
+ echo "Finished testing $${tname} $(TEST_FLAGS)"; \
+ cat $${log}; \
+ fi; \
+ echo "============================"; \
+ fi
+
+# Actual execution of check-p.
+build-check-p: $(LIB) $(PROGS) $(TESTS)
+ @if test -n "$(TEST_PROG_PARA)$(TEST_SCRIPT_PARA)"; then \
+ echo "===Parallel tests in `echo ${PWD} | sed -e s:.*/::` begin `date`==="; \
+ fi
+ @if test -n "$(TEST_PROG_PARA)"; then \
+ echo "**** Hint ****"; \
+ echo "Parallel test files reside in the current directory" \
+ "by default."; \
+ echo "Set HDF5_PARAPREFIX to use another directory. E.g.,"; \
+ echo " HDF5_PARAPREFIX=/PFS/user/me"; \
+ echo " export HDF5_PARAPREFIX"; \
+ echo " make check"; \
+ echo "**** end of Hint ****"; \
+ fi
+ @for test in $(TEST_PROG_PARA) dummy; do \
+ if test $$test != dummy; then \
+ $(MAKE) $(AM_MAKEFLAGS) $$test.chkexe_ \
+ RUNTESTS="$(RUNPARALLEL)" || exit 1; \
+ fi; \
+ done
+ @for test in $(TEST_SCRIPT_PARA) dummy; do \
+ if test $$test != dummy; then \
+ $(MAKE) $(AM_MAKEFLAGS) $$test.chkexe_ || exit 1; \
+ fi; \
+ done
+ @if test -n "$(TEST_PROG_PARA)$(TEST_SCRIPT_PARA)"; then \
+ echo "===Parallel tests in `echo ${PWD} | sed -e s:.*/::` ended `date`===";\
+ fi
+
+# Run test with different Virtual File Driver
+check-vfd: $(LIB) $(PROGS) $(TESTS)
+ @for vfd in $(VFD_LIST) dummy; do \
+ if test $$vfd != dummy; then \
+ echo "============================"; \
+ echo "Testing Virtual File Driver $$vfd"; \
+ echo "============================"; \
+ $(MAKE) $(AM_MAKEFLAGS) check-clean || exit 1; \
+ HDF5_DRIVER=$$vfd $(MAKE) $(AM_MAKEFLAGS) check || exit 1; \
+ fi; \
+ done
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/tools/h5stat/h5stat.c b/tools/h5stat/h5stat.c
new file mode 100644
index 0000000..8f29519
--- /dev/null
+++ b/tools/h5stat/h5stat.c
@@ -0,0 +1,1377 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <stdlib.h>
+#include <string.h>
+#include "H5private.h" /* Generic Functions */
+#include "h5tools.h"
+#include "h5tools_utils.h"
+#include "h5tools_ref.h"
+#include "h5trav.h"
+#include "hdf5.h"
+
+/* Parameters to control statistics gathered */
+#define SIZE_SMALL_GROUPS 10
+#define SIZE_SMALL_ATTRS 10
+#define SIZE_SMALL_DSETS 10
+
+#define H5_NFILTERS_IMPL 8 /* Number of currently implemented filters + one to
+ accommodate for user-define filters + one
+ to accomodate datasets whithout any filters */
+
+
+
+/* Datatype statistics for datasets */
+typedef struct dtype_info_t {
+ hid_t tid; /* ID of datatype */
+ unsigned long count; /* Number of types found */
+ unsigned long named; /* Number of types that are named */
+} dtype_info_t;
+
+typedef struct ohdr_info_t {
+ hsize_t total_size; /* Total size of object headers */
+ hsize_t free_size; /* Total free space in object headers */
+} ohdr_info_t;
+
+/* Info to pass to the iteration functions */
+typedef struct iter_t {
+ const char *container; /* Full name of the container object */
+ unsigned long curr_depth; /* Current depth of hierarchy */
+
+ unsigned long uniq_groups; /* Number of unique groups */
+ unsigned long uniq_dsets; /* Number of unique datasets */
+ unsigned long uniq_types; /* Number of unique named datatypes */
+ unsigned long uniq_links; /* Number of unique links */
+ unsigned long uniq_others; /* Number of other unique objects */
+
+ unsigned long max_depth; /* Maximum depth of hierarchy */
+ unsigned long max_links; /* Maximum # of links to an object */
+ hsize_t max_fanout; /* Maximum fanout from a group */
+ unsigned long num_small_groups[SIZE_SMALL_GROUPS]; /* Size of small groups tracked */
+ unsigned group_nbins; /* Number of bins for group counts */
+ unsigned long *group_bins; /* Pointer to array of bins for group counts */
+ ohdr_info_t group_ohdr_info; /* Object header information for groups */
+
+ hsize_t max_attrs; /* Maximum attributes from a group */
+ unsigned long num_small_attrs[SIZE_SMALL_ATTRS]; /* Size of small attributes tracked */
+ unsigned attr_nbins;
+ unsigned long *attr_bins;
+
+ unsigned long max_dset_rank; /* Maximum rank of dataset */
+ unsigned long dset_rank_count[H5S_MAX_RANK]; /* Number of datasets of each rank */
+ hsize_t max_dset_dims; /* Maximum dimension size of dataset */
+ unsigned long small_dset_dims[SIZE_SMALL_DSETS]; /* Size of dimensions of small datasets tracked */
+ unsigned long dset_layouts[H5D_NLAYOUTS]; /* Type of storage for each dataset */
+ unsigned long dset_comptype[H5_NFILTERS_IMPL]; /* Number of currently implemented filters */
+ unsigned long dset_ntypes; /* Number of diff. dataset datatypes found */
+ dtype_info_t *dset_type_info; /* Pointer to dataset datatype information found */
+ unsigned dset_dim_nbins; /* Number of bins for dataset dimensions */
+ unsigned long *dset_dim_bins; /* Pointer to array of bins for dataset dimensions */
+ ohdr_info_t dset_ohdr_info; /* Object header information for datasets */
+ hsize_t dset_storage_size; /* Size of raw data for datasets */
+ hsize_t groups_btree_storage_size; /* btree size for group */
+ hsize_t groups_heap_storage_size; /* heap size for group */
+ hsize_t attrs_btree_storage_size; /* btree size for attributes (1.8) */
+ hsize_t attrs_heap_storage_size; /* fractal heap size for attributes (1.8) */
+ hsize_t SM_hdr_storage_size; /* header size for SOHM table (1.8) */
+ hsize_t SM_index_storage_size; /* index (btree & list) size for SOHM table (1.8) */
+ hsize_t SM_heap_storage_size; /* fractal heap size for SOHM table (1.8) */
+ hsize_t super_ext_size; /* superblock extension size */
+ hsize_t datasets_btree_storage_size; /* btree size for chunked dataset */
+ unsigned long nexternal; /* Number of external files for a dataset */
+ int local; /* Flag to indicate iteration over the object*/
+} iter_t;
+
+
+/* Table containing object id and object name */
+static struct {
+ int nalloc; /* number of slots allocated */
+ int nobjs; /* number of objects */
+ struct {
+ haddr_t id; /* object number */
+ char *name; /* full object name */
+ } *obj;
+} idtab_g;
+
+const char *progname = "h5stat";
+int d_status = EXIT_SUCCESS;
+static int display_all = TRUE;
+static int display_file_metadata = FALSE;
+static int display_file = FALSE;
+static int display_group_metadata = FALSE;
+static int display_group = FALSE;
+static int display_dset_metadata = FALSE;
+static int display_dset = FALSE;
+static int display_dtype_metadata = FALSE;
+/* Not used yet 11/17/06 EIP
+static int display_dtype = FALSE;
+*/
+static int display_object = FALSE;
+static int display_attr = FALSE;
+
+/* a structure for handling the order command-line parameters come in */
+struct handler_t {
+ void (*func)(void *);
+ int flag;
+ char *obj;
+};
+
+
+static const char *s_opts ="AFfhGgDdTO:V";
+static struct long_options l_opts[] = {
+ {"help", no_arg, 'h'},
+ {"hel", no_arg, 'h'},
+ {"file", no_arg, 'f'},
+ {"fil", no_arg, 'f'},
+ {"fi", no_arg, 'f'},
+ {"FILEmetadata", no_arg, 'F'},
+ {"FILEmetadat", no_arg, 'F'},
+ {"FILEmetada", no_arg, 'F'},
+ {"FILEmetad", no_arg, 'F'},
+ {"FILEmeta", no_arg, 'F'},
+ {"FILEmet", no_arg, 'F'},
+ {"FILEme", no_arg, 'F'},
+ {"FILEm", no_arg, 'F'},
+ {"group", no_arg, 'g'},
+ {"grou", no_arg, 'g'},
+ {"gro", no_arg, 'g'},
+ {"gr", no_arg, 'g'},
+ {"groupmetadata", no_arg, 'G'},
+ {"groupmetadat", no_arg, 'G'},
+ {"groupmetada", no_arg, 'G'},
+ {"groupmetad", no_arg, 'G'},
+ {"groupmeta", no_arg, 'G'},
+ {"groupmet", no_arg, 'G'},
+ {"groupme", no_arg, 'G'},
+ {"groupm", no_arg, 'G'},
+ {"dset", no_arg, 'd'},
+ {"dse", no_arg, 'd'},
+ {"ds", no_arg, 'd'},
+ {"d", no_arg, 'd'},
+ {"dsetmetadata", no_arg, 'D'},
+ {"dsetmetadat", no_arg, 'D'},
+ {"dsetmetada", no_arg, 'D'},
+ {"dsetmetad", no_arg, 'D'},
+ {"dsetmeta", no_arg, 'D'},
+ {"dsetmet", no_arg, 'D'},
+ {"dsetme", no_arg, 'D'},
+ {"dsetm", no_arg, 'D'},
+ {"dtypemetadata", no_arg, 'T'},
+ {"dtypemetadat", no_arg, 'T'},
+ {"dtypemetada", no_arg, 'T'},
+ {"dtypemetad", no_arg, 'T'},
+ {"dtypemeta", no_arg, 'T'},
+ {"dtypemet", no_arg, 'T'},
+ {"dtypeme", no_arg, 'T'},
+ {"dtypem", no_arg, 'T'},
+ {"dtype", no_arg, 'T'},
+ { "object", require_arg, 'O' },
+ { "objec", require_arg, 'O' },
+ { "obje", require_arg, 'O' },
+ { "obj", require_arg, 'O' },
+ { "ob", require_arg, 'O' },
+ { "version", no_arg, 'V' },
+ { "versio", no_arg, 'V' },
+ { "versi", no_arg, 'V' },
+ { "vers", no_arg, 'V' },
+ { "ver", no_arg, 'V' },
+ { "ve", no_arg, 'V' },
+ { "attribute", no_arg, 'A' },
+ { "attribut", no_arg, 'A' },
+ { "attribu", no_arg, 'A' },
+ { "attrib", no_arg, 'A' },
+ { "attri", no_arg, 'A' },
+ { "attr", no_arg, 'A' },
+ { "att", no_arg, 'A' },
+ { "at", no_arg, 'A' },
+ { "a", no_arg, 'A' },
+ { NULL, 0, '\0' }
+};
+
+static void
+leave(int ret)
+{
+ h5tools_close();
+ exit(ret);
+}
+
+
+static void usage(const char *prog)
+{
+ fflush(stdout);
+ fprintf(stdout, "\n");
+ fprintf(stdout, "This tool is under development. For detailed information\n");
+ fprintf(stdout, "please see the specification document at\n");
+ fprintf(stdout, "http://hdf.ncsa.uiuc.edu/RFC/h5stat/h5stat-spec.pdf\n");
+ fprintf(stdout, "\n");
+ fprintf(stdout, "Please send your comments and questions to help@hdfgroup.org\n");
+ fprintf(stdout, "\n");
+ fprintf(stdout, "Usage: %s [OPTIONS] file\n", prog);
+ fprintf(stdout, "\n");
+ fprintf(stdout, " OPTIONS\n");
+ fprintf(stdout, " -h, --help Print a usage message and exit\n");
+ fprintf(stdout, " -V, --version Print version number and exit\n");
+ fprintf(stdout, " -f, --file Print file information\n");
+ fprintf(stdout, " -F, --filemetadata Print file metadata\n");
+ fprintf(stdout, " -g, --group Print group information\n");
+ fprintf(stdout, " -G, --groupmetadata Print group metadata\n");
+ fprintf(stdout, " -d, --dset Print dataset information\n");
+ fprintf(stdout, " -D, --dsetmetadata Print dataset metadata\n");
+ fprintf(stdout, " -T, --dtypemetadata Print datatype metadata\n");
+ fprintf(stdout, " -A, --attribute Print attribute information\n");
+ fprintf(stdout, "\n");
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: ceil_log10
+ *
+ * Purpose: Compute the ceiling of log_10(x)
+ *
+ * Return: >0 on success, 0 on failure
+ *
+ * Programmer: Quincey Koziol
+ * Monday, August 22, 2005
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static unsigned
+ceil_log10(unsigned long x)
+{
+ unsigned long pow10 = 1;
+ unsigned ret = 0;
+
+ while(x >= pow10) {
+ pow10 *= 10;
+ ret++;
+ } /* end while */
+
+ return(ret);
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: sym_insert
+ *
+ * Purpose: Add a symbol to the table.
+ *
+ * Return: void
+ *
+ * Programmer: Robb Matzke
+ * Thursday, January 21, 1999
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+sym_insert(H5O_info_t *oi, const char *name)
+{
+ /* Don't add it if the link count is 1 because such an object can only
+ * have one name. */
+ if(oi->rc > 1) {
+ int n;
+
+ /* Extend the table */
+ if(idtab_g.nobjs >= idtab_g.nalloc) {
+ idtab_g.nalloc = MAX(256, 2 * idtab_g.nalloc);
+ idtab_g.obj = realloc(idtab_g.obj, idtab_g.nalloc * sizeof(idtab_g.obj[0]));
+ } /* end if */
+
+ /* Insert the entry */
+ n = idtab_g.nobjs++;
+ idtab_g.obj[n].id = oi->addr;
+ idtab_g.obj[n].name = strdup(name);
+ } /* end if */
+} /* end sym_insert() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: sym_lookup
+ *
+ * Purpose: Find another name for the specified object.
+ *
+ * Return: Success: Ptr to another name.
+ *
+ * Failure: NULL
+ *
+ * Programmer: Robb Matzke
+ * Thursday, January 21, 1999
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static char *
+sym_lookup(H5O_info_t *oi)
+{
+ int n;
+
+ /*only one name possible*/
+ if(oi->rc < 2)
+ return NULL;
+
+ for(n = 0; n < idtab_g.nobjs; n++)
+ if(idtab_g.obj[n].id == oi->addr)
+ return idtab_g.obj[n].name;
+
+ return NULL;
+} /* end sym_lookup() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: fix_name
+ *
+ * Purpose: Returns a malloc'd buffer that contains the PATH and BASE
+ * names separated by a single slash. It also removes duplicate
+ * and trailing slashes.
+ *
+ * Return: Success: Ptr to fixed name from malloc()
+ *
+ * Failure: NULL
+ *
+ * Programmer: Robb Matzke
+ * Thursday, January 21, 1999
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static char *
+fix_name(const char *path, const char *base)
+{
+ size_t n = (path ? strlen(path) : 0) + (base ? strlen(base) : 0) + 3;
+ char *s = malloc(n), prev='\0';
+ size_t len = 0;
+
+ if (path) {
+ /* Path, followed by slash */
+ for (/*void*/; *path; path++)
+ if ('/'!=*path || '/'!=prev)
+ prev = s[len++] = *path;
+ if ('/' != prev)
+ prev = s[len++] = '/';
+ }
+
+ if (base) {
+ /* Base name w/o trailing slashes */
+ const char *end = base + strlen(base);
+ while (end > base && '/' == end[-1])
+ --end;
+
+ for (/*void*/; base < end; base++)
+ if ('/' != *base || '/' != prev)
+ prev = s[len++] = *base;
+ }
+
+ s[len] = '\0';
+ return s;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: attribute_stats
+ *
+ * Purpose: Gather statistics about attributes on an object
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Quincey Koziol
+ * Tuesday, July 17, 2007
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+attribute_stats(iter_t *iter, const H5O_info_t *oi)
+{
+ unsigned bin; /* "bin" the number of objects falls in */
+
+ /* Update dataset & attribute metadata info */
+ iter->attrs_btree_storage_size += oi->meta_size.attr.index_size;
+ iter->attrs_heap_storage_size += oi->meta_size.attr.heap_size;
+
+ /* Update small # of attribute count & limits */
+ if(oi->num_attrs < SIZE_SMALL_ATTRS)
+ (iter->num_small_attrs[(size_t)oi->num_attrs])++;
+ if(oi->num_attrs > iter->max_attrs)
+ iter->max_attrs = oi->num_attrs;
+
+ /* Add attribute count to proper bin */
+ bin = ceil_log10((unsigned long)oi->num_attrs);
+ if((bin + 1) > iter->attr_nbins) {
+ iter->attr_bins = realloc(iter->attr_bins, (bin + 1) * sizeof(unsigned long));
+ assert(iter->attr_bins);
+
+ /* Initialize counts for intermediate bins */
+ while(iter->attr_nbins < bin)
+ iter->attr_bins[iter->attr_nbins++] = 0;
+ iter->attr_nbins++;
+
+ /* Initialize count for new bin */
+ iter->attr_bins[bin] = 1;
+ } /* end if */
+ else
+ (iter->attr_bins[bin])++;
+
+ return 0;
+} /* end attribute_stats() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: group_stats
+ *
+ * Purpose: Gather statistics about the group
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Quincey Koziol
+ * Tuesday, August 16, 2005
+ *
+ * Modifications: Refactored code from the walk_function
+ * EIP, Wednesday, August 16, 2006
+ *
+ * Vailin Choi 12 July 2007
+ * 1. Gathered storage info for btree and heap
+ * (groups and attributes)
+ * 2. Gathered info for attributes
+ *
+ * Vailin Choi 14 July 2007
+ * Cast "num_objs" and "num_attrs" to size_t
+ * Due to the -Mbounds problem for the pgi-32 bit compiler on indexing
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+group_stats(hid_t group, const char *name, const char *fullname,
+ const H5O_info_t *oi, H5G_iterate_t walk, iter_t *iter)
+{
+ hid_t gid; /* Group ID */
+ const char *last_container;
+ hsize_t num_objs;
+ unsigned bin; /* "bin" the number of objects falls in */
+ herr_t ret;
+
+ /* Gather statistics about this type of object */
+ iter->uniq_groups++;
+ if(iter->curr_depth > iter->max_depth)
+ iter->max_depth = iter->curr_depth;
+
+ /* Get object header information */
+ iter->group_ohdr_info.total_size += oi->hdr.space.total;
+ iter->group_ohdr_info.free_size += oi->hdr.space.free;
+
+ gid = H5Gopen(group, name);
+ assert(gid > 0);
+
+ /* Get number of links in this group */
+ ret = H5Gget_num_objs(gid, &num_objs);
+ assert(ret >= 0);
+
+ /* Update link stats */
+ if(num_objs < SIZE_SMALL_GROUPS)
+ (iter->num_small_groups[(size_t)num_objs])++;
+ if(num_objs > iter->max_fanout)
+ iter->max_fanout = num_objs;
+
+ /* Add group count to proper bin */
+ bin = ceil_log10((unsigned long)num_objs);
+ if((bin + 1) > iter->group_nbins) {
+ /* Allocate more storage for info about dataset's datatype */
+ iter->group_bins = realloc(iter->group_bins, (bin + 1) * sizeof(unsigned long));
+ assert(iter->group_bins);
+
+ /* Initialize counts for intermediate bins */
+ while(iter->group_nbins < bin)
+ iter->group_bins[iter->group_nbins++] = 0;
+ iter->group_nbins++;
+
+ /* Initialize count for new bin */
+ iter->group_bins[bin] = 1;
+ } /* end if */
+ else
+ (iter->group_bins[bin])++;
+
+ /* Update group metadata info */
+ iter->groups_btree_storage_size += oi->meta_size.obj.index_size;
+ iter->groups_heap_storage_size += oi->meta_size.obj.heap_size;
+
+ /* Update attribute metadata info */
+ ret = attribute_stats(iter, oi);
+ assert(ret >= 0);
+
+ /* Close current group */
+ ret = H5Gclose(gid);
+ assert(ret >= 0);
+
+ /* Update current container info */
+ last_container = iter->container;
+ iter->container = fullname;
+ iter->curr_depth++;
+
+ /* Recursively descend into current group's objects */
+ H5Giterate(group, name, NULL, walk, iter);
+
+ /* Revert current container info */
+ iter->container = last_container;
+ iter->curr_depth--;
+
+ return 0;
+} /* end group_stats() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: dataset_stats
+ *
+ * Purpose: Gather statistics about the dataset
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Quincey Koziol
+ * Tuesday, August 16, 2005
+ *
+ * Modifications: Refactored code from the walk_function
+ * EIP, Wednesday, August 16, 2006
+ *
+ * Vailin Choi 12 July 2007
+ * 1. Gathered storage info for btree and heap
+ * (chunked datasets and attributes)
+ * 2. Gathered info for attributes
+ *
+ * Vailin Choi 14 July 2007
+ * Cast "dims" and "num_attrs" to size_t
+ * Due to the -Mbounds problem for the pgi-32bit compiler on indexing
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+dataset_stats(hid_t group, const char *name, const H5O_info_t *oi, iter_t *iter)
+{
+ unsigned bin; /* "bin" the number of objects falls in */
+ hid_t did; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid; /* Datatype ID */
+ hid_t dcpl; /* Dataset creation property list ID */
+ hsize_t dims[H5S_MAX_RANK];/* Dimensions of dataset */
+ H5D_layout_t lout; /* Layout of dataset */
+ unsigned type_found; /* Whether the dataset's datatype was */
+ /* already found */
+ int ndims; /* Number of dimensions of dataset */
+ hsize_t storage; /* Size of dataset storage */
+ unsigned u; /* Local index variable */
+ int num_ext; /* Number of external files for a dataset */
+ int nfltr; /* Number of filters for a dataset */
+ H5Z_filter_t fltr; /* Filter identifier */
+ herr_t ret;
+
+ /* Gather statistics about this type of object */
+ iter->uniq_dsets++;
+
+ /* Get object header information */
+ iter->dset_ohdr_info.total_size += oi->hdr.space.total;
+ iter->dset_ohdr_info.free_size += oi->hdr.space.free;
+
+ did = H5Dopen(group, name);
+ assert(did > 0);
+
+ /* Update dataset metadata info */
+ iter->datasets_btree_storage_size += oi->meta_size.obj.index_size;
+
+ /* Update attribute metadata info */
+ ret = attribute_stats(iter, oi);
+ assert(ret >= 0);
+
+ /* Get storage info */
+ storage = H5Dget_storage_size(did);
+ iter->dset_storage_size += storage;
+
+ /* Gather dataspace statistics */
+ sid = H5Dget_space(did);
+ assert(sid > 0);
+
+ ndims = H5Sget_simple_extent_dims(sid, dims, NULL);
+ assert(ndims >= 0);
+
+ /* Check for larger rank of dataset */
+ if((unsigned)ndims > iter->max_dset_rank)
+ iter->max_dset_rank = ndims;
+
+ /* Track the number of datasets with each rank */
+ (iter->dset_rank_count[ndims])++;
+
+ /* Only gather dim size statistics on 1-D datasets */
+ if(ndims == 1) {
+ iter->max_dset_dims = dims[0];
+ if(dims[0] < SIZE_SMALL_DSETS)
+ (iter->small_dset_dims[(size_t)dims[0]])++;
+
+ /* Add dim count to proper bin */
+ bin = ceil_log10((unsigned long)dims[0]);
+ if((bin + 1) > iter->dset_dim_nbins) {
+ /* Allocate more storage for info about dataset's datatype */
+ iter->dset_dim_bins = realloc(iter->dset_dim_bins, (bin + 1) * sizeof(unsigned long));
+ assert(iter->dset_dim_bins);
+
+ /* Initialize counts for intermediate bins */
+ while(iter->dset_dim_nbins < bin)
+ iter->dset_dim_bins[iter->dset_dim_nbins++] = 0;
+ iter->dset_dim_nbins++;
+
+ /* Initialize count for this bin */
+ iter->dset_dim_bins[bin] = 1;
+ } /* end if */
+ else
+ (iter->dset_dim_bins[bin])++;
+ } /* end if */
+
+ ret = H5Sclose(sid);
+ assert(ret >= 0);
+
+ /* Gather datatype statistics */
+ tid = H5Dget_type(did);
+ assert(tid > 0);
+
+ type_found = FALSE;
+ for(u = 0; u < iter->dset_ntypes; u++)
+ if(H5Tequal(iter->dset_type_info[u].tid, tid) > 0) {
+ type_found = TRUE;
+ break;
+ } /* end for */
+ if(type_found)
+ (iter->dset_type_info[u].count)++;
+ else {
+ unsigned curr_ntype = iter->dset_ntypes;
+
+ /* Increment # of datatypes seen for datasets */
+ iter->dset_ntypes++;
+
+ /* Allocate more storage for info about dataset's datatype */
+ iter->dset_type_info = realloc(iter->dset_type_info, iter->dset_ntypes * sizeof(dtype_info_t));
+ assert(iter->dset_type_info);
+
+ /* Initialize information about datatype */
+ iter->dset_type_info[curr_ntype].tid = H5Tcopy(tid);
+ assert(iter->dset_type_info[curr_ntype].tid > 0);
+ iter->dset_type_info[curr_ntype].count = 1;
+ iter->dset_type_info[curr_ntype].named = 0;
+
+ /* Set index for later */
+ u = curr_ntype;
+ } /* end else */
+
+ /* Check if the datatype is a named datatype */
+ if(H5Tcommitted(tid) > 0)
+ (iter->dset_type_info[u].named)++;
+
+ ret = H5Tclose(tid);
+ assert(ret >= 0);
+
+ /* Gather layout statistics */
+ dcpl = H5Dget_create_plist(did);
+ assert(dcpl > 0);
+
+ lout = H5Pget_layout(dcpl);
+ assert(lout >= 0);
+
+ /* Track the layout type for dataset */
+ (iter->dset_layouts[lout])++;
+
+ num_ext = H5Pget_external_count(dcpl);
+ assert (num_ext >= 0);
+
+ if(num_ext)
+ iter->nexternal = iter->nexternal + num_ext;
+
+ /* Track different filters */
+ if((nfltr = H5Pget_nfilters(dcpl)) >= 0) {
+ if(nfltr == 0)
+ iter->dset_comptype[0]++;
+ for(u = 0; u < (unsigned)nfltr; u++) {
+#ifdef H5_WANT_H5_V1_6_COMPAT
+ fltr = H5Pget_filter(dcpl, u, 0, 0, 0, 0, 0);
+#else /* H5_WANT_H5_V1_6_COMPAT */
+ fltr = H5Pget_filter(dcpl, u, 0, 0, 0, 0, 0, NULL);
+#endif /* H5_WANT_H5_V1_6_COMPAT */
+ if(fltr < (H5_NFILTERS_IMPL - 1))
+ iter->dset_comptype[fltr]++;
+ else
+ iter->dset_comptype[H5_NFILTERS_IMPL - 1]++; /*other filters*/
+ } /* end for */
+ } /* endif nfltr */
+
+ ret = H5Pclose(dcpl);
+ assert(ret >= 0);
+
+ ret = H5Dclose(did);
+ assert(ret >= 0);
+
+ return 0;
+} /* end dataset_stats() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: walk
+ *
+ * Purpose: Gather statistics about the file
+ *
+ * Return: Success: 0
+ * Failure: -1
+ *
+ * Programmer: Quincey Koziol
+ * Tuesday, August 16, 2005
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+walk(hid_t group, const char *name, void *_iter)
+{
+ iter_t *iter = (iter_t *)_iter;
+ H5O_info_t oi;
+ char *fullname = NULL;
+ char *s;
+ herr_t ret; /* Generic return value */
+
+ /* Get the full object name */
+ fullname = fix_name(iter->container, name);
+
+ /* Get object information */
+ ret = H5Oget_info(group, name, &oi, H5P_DEFAULT);
+ assert(ret >= 0);
+
+ /* If the object has already been printed then just show the object ID
+ * and return. */
+ if((s = sym_lookup(&oi))) {
+ printf("%s same as %s\n", name, s);
+ } else {
+ sym_insert(&oi, fullname);
+
+ /* Gather some statistics about the object */
+ if(oi.rc > iter->max_links)
+ iter->max_links = oi.rc;
+
+ switch(oi.type) {
+ case H5G_GROUP:
+ group_stats(group, name, fullname, &oi, walk, iter);
+ break;
+
+ case H5G_DATASET:
+ dataset_stats(group, name, &oi, iter);
+ break;
+
+ case H5G_TYPE:
+ /* Gather statistics about this type of object */
+ iter->uniq_types++;
+ break;
+
+ case H5G_LINK:
+ /* Gather statistics about links and UD links */
+ iter->uniq_links++;
+ break;
+
+ default:
+ /* Gather statistics about this type of object */
+ iter->uniq_others++;
+ break;
+ } /* end switch */
+ }
+
+ if(fullname)
+ free(fullname);
+
+ return 0;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: parse_command_line
+ *
+ * Purpose: Parses command line and sets up global variable to control output
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Elena Pourmal
+ * Saturday, August 12, 2006
+ *
+ * Modifications:
+ * Vailin Choi 12 July 2007
+ * Added 'A' option to display attribute info
+ *
+ *-------------------------------------------------------------------------
+ */
+static struct handler_t *
+parse_command_line(int argc, const char *argv[])
+{
+ int opt, i;
+ struct handler_t *hand;
+
+ /* Allocate space to hold the command line info */
+ hand = calloc((size_t)argc, sizeof(struct handler_t));
+
+ /* parse command line options */
+ while ((opt = get_option(argc, argv, s_opts, l_opts)) != EOF) {
+ switch ((char)opt) {
+ case 'A':
+ display_all = FALSE;
+ display_attr = TRUE;
+ break;
+ case 'F':
+ display_all = FALSE;
+ display_file_metadata = TRUE;
+ break;
+ case 'f':
+ display_all = FALSE;
+ display_file = TRUE;
+ break;
+ case 'G':
+ display_all = FALSE;
+ display_group_metadata = TRUE;
+ break;
+ case 'g':
+ display_all = FALSE;
+ display_group = TRUE;
+ break;
+ case 'T':
+ display_all = FALSE;
+ display_dtype_metadata = TRUE;
+ break;
+ case 'D':
+ display_all = FALSE;
+ display_dset_metadata = TRUE;
+ break;
+ case 'd':
+ display_all = FALSE;
+ display_dset = TRUE;
+ break;
+ case 'h':
+ usage(progname);
+ leave(EXIT_SUCCESS);
+ case 'V':
+ print_version(progname);
+ leave(EXIT_SUCCESS);
+ break;
+ case 'O':
+ display_object = TRUE;
+ for (i = 0; i < argc; i++)
+ if (!hand[i].obj) {
+ hand[i].obj = HDstrdup(opt_arg);
+ hand[i].flag = 1;
+ break;
+ }
+ break;
+ default:
+ usage(progname);
+ leave(EXIT_FAILURE);
+ }
+ }
+
+ /* check for file name to be processed */
+ if (argc <= opt_ind) {
+ error_msg(progname, "missing file name\n");
+ usage(progname);
+ leave(EXIT_FAILURE);
+ }
+ return hand;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: init_iter
+ *
+ * Purpose: Initialize iter structure
+ *
+ * Return: Success: 0
+ *
+ * Failure: Never fails
+ *
+ * Programmer: Elena Pourmal
+ * Saturday, August 12, 2006
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+iter_init(iter_t *iter)
+{
+ /* Clear everything to zeros */
+ memset(iter, 0, sizeof(*iter));
+
+ /* Initialize non-zero information */
+ iter->container = "/";
+
+ return 0;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: print_file_info
+ *
+ * Purpose: Prints information about file
+ *
+ * Return: Success: 0
+ *
+ * Failure: Never fails
+ *
+ * Programmer: Elena Pourmal
+ * Saturday, August 12, 2006
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+print_file_info(const iter_t *iter)
+{
+ printf("File information\n");
+ printf("\t# of unique groups: %lu\n", iter->uniq_groups);
+ printf("\t# of unique datasets: %lu\n", iter->uniq_dsets);
+ printf("\t# of unique named dataypes: %lu\n", iter->uniq_types);
+ printf("\t# of unique links: %lu\n", iter->uniq_links);
+ printf("\t# of unique other: %lu\n", iter->uniq_others);
+ printf("\tMax. # of links to object: %lu\n", iter->max_links);
+ printf("\tMax. depth of hierarchy: %lu\n", iter->max_depth);
+ HDfprintf(stdout, "\tMax. # of objects in group: %Hu\n", iter->max_fanout);
+
+ return 0;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: print_file_metadata
+ *
+ * Purpose: Prints metadata information about file
+ *
+ * Return: Success: 0
+ *
+ * Failure: Never fails
+ *
+ * Programmer: Elena Pourmal
+ * Saturday, August 12, 2006
+ *
+ * Modifications:
+ * Vailin Choi 12 July 2007
+ * Print storage info for:
+ * 1. btree/heap storage for groups and attributes
+ * 2. btree storage for chunked dataset
+ * 3. hdr/btree/list/heap storage for SOHM table
+ * 4. superblock extension size
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+print_file_metadata(const iter_t *iter)
+{
+ printf("Object header size: (total/unused)\n");
+ HDfprintf(stdout, "\tGroups: %Hu/%Hu\n", iter->group_ohdr_info.total_size,
+ iter->group_ohdr_info.free_size);
+ HDfprintf(stdout, "\tDatasets: %Hu/%Hu\n", iter->dset_ohdr_info.total_size,
+ iter->dset_ohdr_info.free_size);
+
+ printf("Storage information:\n");
+ HDfprintf(stdout, "\tGroups:\n");
+ HDfprintf(stdout, "\t\tB-tree/List: %Hu\n", iter->groups_btree_storage_size);
+ HDfprintf(stdout, "\t\tHeap: %Hu\n", iter->groups_heap_storage_size);
+
+ HDfprintf(stdout, "\tAttributes:\n");
+ HDfprintf(stdout, "\t\tB-tree/List: %Hu\n", iter->attrs_btree_storage_size);
+ HDfprintf(stdout, "\t\tHeap: %Hu\n", iter->attrs_heap_storage_size);
+
+ HDfprintf(stdout, "\tChunked datasets:\n");
+ HDfprintf(stdout, "\t\tB-tree: %Hu\n", iter->datasets_btree_storage_size);
+
+ HDfprintf(stdout, "\tShared Messages:\n");
+ HDfprintf(stdout, "\t\tHeader: %Hu\n", iter->SM_hdr_storage_size);
+ HDfprintf(stdout, "\t\tB-tree/List: %Hu\n", iter->SM_index_storage_size);
+ HDfprintf(stdout, "\t\tHeap: %Hu\n", iter->SM_heap_storage_size);
+
+ HDfprintf(stdout, "\tSuperblock extension: %Hu\n", iter->super_ext_size);
+
+ return 0;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: print_group_info
+ *
+ * Purpose: Prints information about groups in the file
+ *
+ * Return: Success: 0
+ *
+ * Failure: Never fails
+ *
+ * Programmer: Elena Pourmal
+ * Saturday, August 12, 2006
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+print_group_info(const iter_t *iter)
+{
+ unsigned long power; /* Temporary "power" for bins */
+ unsigned long total; /* Total count for various statistics */
+ unsigned u; /* Local index variable */
+
+ printf("Small groups:\n");
+ total = 0;
+ for(u = 0; u < SIZE_SMALL_GROUPS; u++) {
+ if(iter->num_small_groups[u] > 0) {
+ printf("\t# of groups of size %u: %lu\n", u, iter->num_small_groups[u]);
+ total += iter->num_small_groups[u];
+ } /* end if */
+ } /* end for */
+ printf("\tTotal # of small groups: %lu\n", total);
+
+ printf("Group bins:\n");
+ total = 0;
+ if(iter->group_bins[0] > 0) {
+ printf("\t# of groups of size 0: %lu\n", iter->group_bins[0]);
+ total = iter->group_bins[0];
+ } /* end if */
+ power = 1;
+ for(u = 1; u < iter->group_nbins; u++) {
+ if(iter->group_bins[u] > 0) {
+ printf("\t# of groups of size %lu - %lu: %lu\n", power, (power * 10) - 1,
+ iter->group_bins[u]);
+ total += iter->group_bins[u];
+ } /* end if */
+ power *= 10;
+ } /* end for */
+ printf("\tTotal # of groups: %lu\n", total);
+
+ return 0;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: print_attr_info
+ *
+ * Purpose: Prints information about attributes in the file
+ *
+ * Return: Success: 0
+ *
+ * Failure: Never fails
+ *
+ * Programmer: Vailin Choi
+ * July 12, 2007
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+print_attr_info(const iter_t *iter)
+{
+ unsigned long power; /* Temporary "power" for bins */
+ unsigned long total; /* Total count for various statistics */
+ unsigned u; /* Local index variable */
+
+ printf("Small # of attributes:\n");
+ total = 0;
+ for(u = 1; u < SIZE_SMALL_ATTRS; u++) {
+ if(iter->num_small_attrs[u] > 0) {
+ printf("\t# of objects with %u attributes: %lu\n", u, iter->num_small_attrs[u]);
+ total += iter->num_small_attrs[u];
+ } /* end if */
+ } /* end for */
+ printf("\tTotal # of objects with small # of attributes: %lu\n", total);
+
+ printf("Attribute bins:\n");
+ total = 0;
+ power = 1;
+ for(u = 1; u < iter->attr_nbins; u++) {
+ if(iter->attr_bins[u] > 0) {
+ printf("\t# of objects with %lu - %lu attributes: %lu\n", power, (power * 10) - 1,
+ iter->attr_bins[u]);
+ total += iter->attr_bins[u];
+ } /* end if */
+ power *= 10;
+ } /* end for */
+ printf("\tTotal # of objects with attributes: %lu\n", total);
+ printf("\tMax. # of attributes to objects: %lu\n", (unsigned long)iter->max_attrs);
+
+ return 0;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: print_dataset_info
+ *
+ * Purpose: Prints information about datasets in the file
+ *
+ * Return: Success: 0
+ *
+ * Failure: Never fails
+ *
+ * Programmer: Elena Pourmal
+ * Saturday, August 12, 2006
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+print_dataset_info(const iter_t *iter)
+{
+ unsigned long power; /* Temporary "power" for bins */
+ unsigned long total; /* Total count for various statistics */
+ size_t dtype_size; /* Size of encoded datatype */
+ unsigned u; /* Local index variable */
+
+ if(iter->uniq_dsets > 0) {
+ printf("Dataset dimension information:\n");
+ printf("\tMax. rank of datasets: %lu\n", iter->max_dset_rank);
+ printf("\tDataset ranks:\n");
+ for(u = 0; u < H5S_MAX_RANK; u++)
+ if(iter->dset_rank_count[u] > 0)
+ printf("\t\t# of dataset with rank %u: %lu\n", u, iter->dset_rank_count[u]);
+
+ printf("1-D Dataset information:\n");
+ HDfprintf(stdout, "\tMax. dimension size of 1-D datasets: %Hu\n", iter->max_dset_dims);
+ printf("\tSmall 1-D datasets:\n");
+ total = 0;
+ for(u = 0; u < SIZE_SMALL_DSETS; u++) {
+ if(iter->small_dset_dims[u] > 0) {
+ printf("\t\t# of dataset dimensions of size %u: %lu\n", u,
+ iter->small_dset_dims[u]);
+ total += iter->small_dset_dims[u];
+ } /* end if */
+ } /* end for */
+ printf("\t\tTotal small datasets: %lu\n", total);
+
+ /* Protect against no datasets in file */
+ if(iter->dset_dim_nbins > 0) {
+ printf("\t1-D Dataset dimension bins:\n");
+ total = 0;
+ if(iter->dset_dim_bins[0] > 0) {
+ printf("\t\t# of datasets of size 0: %lu\n", iter->dset_dim_bins[0]);
+ total = iter->dset_dim_bins[0];
+ } /* end if */
+ power = 1;
+ for(u = 1; u < iter->dset_dim_nbins; u++) {
+ if(iter->dset_dim_bins[u] > 0) {
+ printf("\t\t# of datasets of size %lu - %lu: %lu\n", power, (power * 10) - 1,
+ iter->dset_dim_bins[u]);
+ total += iter->dset_dim_bins[u];
+ } /* end if */
+ power *= 10;
+ } /* end for */
+ printf("\t\tTotal # of datasets: %lu\n", total);
+ } /* end if */
+
+ printf("Dataset storage information:\n");
+ HDfprintf(stdout, "\tTotal raw data size: %Hu\n", iter->dset_storage_size);
+
+ printf("Dataset layout information:\n");
+ for(u = 0; u < H5D_NLAYOUTS; u++)
+ printf("\tDataset layout counts[%s]: %lu\n", (u == 0 ? "COMPACT" :
+ (u == 1 ? "CONTIG" : "CHUNKED")), iter->dset_layouts[u]);
+ printf("\tNumber of external files : %lu\n", iter->nexternal);
+
+ printf("Dataset filters information:\n");
+ printf("\tNumber of datasets with:\n");
+ printf("\t\tNO filter: %lu\n", iter->dset_comptype[H5Z_FILTER_ERROR+1]);
+ printf("\t\tGZIP filter: %lu\n", iter->dset_comptype[H5Z_FILTER_DEFLATE]);
+ printf("\t\tSHUFFLE filter: %lu\n", iter->dset_comptype[H5Z_FILTER_SHUFFLE]);
+ printf("\t\tFLETCHER32 filter: %lu\n", iter->dset_comptype[H5Z_FILTER_FLETCHER32]);
+ printf("\t\tSZIP filter: %lu\n", iter->dset_comptype[H5Z_FILTER_SZIP]);
+ printf("\t\tNBIT filter: %lu\n", iter->dset_comptype[H5Z_FILTER_NBIT]);
+ printf("\t\tSCALEOFFSET filter: %lu\n", iter->dset_comptype[H5Z_FILTER_SCALEOFFSET]);
+ printf("\t\tUSER-DEFINED filter: %lu\n", iter->dset_comptype[H5_NFILTERS_IMPL-1]);
+
+ if(display_dtype_metadata) {
+ printf("Dataset datatype information:\n");
+ printf("\t# of unique datatypes used by datasets: %lu\n", iter->dset_ntypes);
+ total = 0;
+ for(u = 0; u < iter->dset_ntypes; u++) {
+ H5Tencode(iter->dset_type_info[u].tid, NULL, &dtype_size);
+ printf("\tDataset datatype #%u:\n", u);
+ printf("\t\tCount (total/named) = (%lu/%lu)\n", iter->dset_type_info[u].count, iter->dset_type_info[u].named);
+ printf("\t\tSize (desc./elmt) = (%lu/%lu)\n", (unsigned long)dtype_size,
+ (unsigned long)H5Tget_size(iter->dset_type_info[u].tid));
+ H5Tclose(iter->dset_type_info[u].tid);
+ total += iter->dset_type_info[u].count;
+ } /* end for */
+ printf("\tTotal dataset datatype count: %lu\n", total);
+ }
+ } /* end if */
+
+ return 0;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: print_file_statistics
+ *
+ * Purpose: Prints file statistics
+ *
+ * Return: Success: 0
+ *
+ * Failure: Never fails
+ *
+ * Programmer: Elena Pourmal
+ * Saturday, August 12, 2006
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+print_file_statistics(const iter_t *iter)
+{
+ if(display_all) {
+ display_file = TRUE;
+ display_file_metadata = TRUE;
+ display_group = TRUE;
+ display_group_metadata = TRUE;
+ display_dset = TRUE;
+ display_dtype_metadata = TRUE;
+ display_attr = TRUE;
+ }
+
+ if(display_file) print_file_info(iter);
+ if(display_file_metadata) print_file_metadata(iter);
+ if(display_group) print_group_info(iter);
+ if(display_dset) print_dataset_info(iter);
+ if(display_attr) print_attr_info(iter);
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: print_object_statistics
+ *
+ * Purpose: Prints object statistics
+ *
+ * Return: Success: 0
+ *
+ * Failure: Never fails
+ *
+ * Programmer: Elena Pourmal
+ * Thursday, August 17, 2006
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+print_object_statistics(const char *name)
+{
+ printf("Object name %s\n", name);
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: print_statistics
+ *
+ * Purpose: Prints statistics
+ *
+ * Return: Success: 0
+ *
+ * Failure: Never fails
+ *
+ * Programmer: Elena Pourmal
+ * Thursday, August 17, 2006
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+print_statistics(const char *name, const iter_t *iter)
+{
+ if(display_object)
+ print_object_statistics(name);
+ else
+ print_file_statistics(iter);
+}
+
+
+int
+main(int argc, const char *argv[])
+{
+ iter_t iter;
+ const char *fname = NULL;
+ hid_t fid;
+ struct handler_t *hand;
+ char root[] = "/";
+ int i;
+ H5F_info_t finfo;
+
+
+ /* Disable error reporting */
+ H5Eset_auto2(H5E_DEFAULT, NULL, NULL);
+
+ /* Initialize h5tools lib */
+ h5tools_init();
+ hand = parse_command_line (argc, argv);
+ if (!hand) {
+ error_msg(progname, "unable to parse command line arguments \n");
+ leave(EXIT_FAILURE);
+ }
+
+ fname = argv[opt_ind];
+ hand[opt_ind].obj = root;
+ hand[opt_ind].flag = 1;
+ if (display_object) hand[opt_ind].flag = 0;
+
+ printf("Filename: %s\n", fname);
+
+ fid = H5Fopen(fname, H5F_ACC_RDONLY, H5P_DEFAULT);
+ if (fid < 0) {
+ error_msg(progname, "unable to open file \"%s\"\n", fname);
+ leave(EXIT_FAILURE);
+ }
+
+ /* Initialize iter structure */
+ iter_init(&iter);
+
+ /* Get storge info for SOHM's btree/list/heap and superblock extension */
+ if(H5Fget_info(fid, &finfo) < 0)
+ warn_msg(progname, "Unable to retrieve SOHM info\n");
+ else {
+ iter.super_ext_size = finfo.super_ext_size;
+ iter.SM_hdr_storage_size = finfo.sohm.hdr_size;
+ iter.SM_index_storage_size = finfo.sohm.msgs_info.index_size;
+ iter.SM_heap_storage_size = finfo.sohm.msgs_info.heap_size;
+ }
+
+ /* Walk the objects or all file */
+ for(i = 0; i < argc; i++) {
+ if(hand[i].obj) {
+ if(hand[i].flag) {
+ walk(fid, hand[i].obj, &iter);
+ print_statistics(hand[i].obj, &iter);
+ }
+ }
+ }
+
+ free(hand);
+
+ if(H5Fclose(fid) < 0) {
+ error_msg(progname, "unable to close file \"%s\"\n", fname);
+ leave(EXIT_FAILURE);
+ }
+
+ leave(EXIT_SUCCESS);
+}
+
diff --git a/tools/h5stat/h5stat_gentest.c b/tools/h5stat/h5stat_gentest.c
new file mode 100644
index 0000000..f0219bd
--- /dev/null
+++ b/tools/h5stat/h5stat_gentest.c
@@ -0,0 +1,84 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/*
+ * Generate the binary hdf5 files for the h5stat tests.
+ * Usage: just execute the program without any arguments will
+ * generate all the binary hdf5 files in the ./testfiles directory.
+ *
+ * If you regenerate the test files (e.g., changing some code,
+ * trying it on a new platform, ...), you need to verify the correctness
+ * of the expected output and update the corresponding *.ddl files.
+ */
+
+#include "hdf5.h"
+
+#define FILE "h5stat_newgrat.h5"
+#define DATASET_NAME "DATASET_NAME"
+#define GROUP_NAME "GROUP"
+#define ATTR_NAME "ATTR"
+#define NUM_GRPS 350
+#define NUM_ATTRS 100
+
+/*
+ * Generate 1.8 HDF5 file
+ * with NUM_GRPS groups
+ * with NUM_ATTRS for the
+ */
+static void gen_file(void)
+{
+ int ret, i;
+ hid_t fapl, gid;
+ hid_t file, type_id, space_id, attr_id, dset_id;
+ char name[30];
+ char attrname[30];
+
+
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ ret = H5Pset_latest_format(fapl,1);
+
+ /* Create dataset */
+ file=H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
+ for (i=1; i<=NUM_GRPS; i++) {
+ sprintf(name, "%s%d", GROUP_NAME,i);
+ gid = H5Gcreate(file, name, (size_t)0);
+ H5Gclose(gid);
+ }
+
+
+ /* Create a datatype to commit and use */
+ type_id=H5Tcopy(H5T_NATIVE_INT);
+ /* Create dataspace for dataset */
+ space_id=H5Screate(H5S_SCALAR);
+ /* Create dataset */
+ dset_id=H5Dcreate(file, DATASET_NAME,type_id,space_id,H5P_DEFAULT);
+ for (i=1; i<=NUM_ATTRS; i++) {
+ sprintf(attrname, "%s%d", ATTR_NAME,i);
+ attr_id=H5Acreate(dset_id,attrname, type_id,space_id,H5P_DEFAULT);
+ ret=H5Aclose(attr_id);
+ }
+
+ ret=H5Dclose(dset_id);
+ ret=H5Sclose(space_id);
+ ret=H5Tclose(type_id);
+ ret=H5Fclose(file);
+}
+
+int main(void)
+{
+ gen_file();
+
+ return 0;
+}
diff --git a/tools/h5stat/testfiles/h5stat_filters-F.ddl b/tools/h5stat/testfiles/h5stat_filters-F.ddl
new file mode 100644
index 0000000..544d0c7
--- /dev/null
+++ b/tools/h5stat/testfiles/h5stat_filters-F.ddl
@@ -0,0 +1,21 @@
+#############################
+Expected output for 'h5stat -F h5stat_filters.h5'
+#############################
+Filename: h5stat_filters.h5
+Object header size: (total/unused)
+ Groups: 48/8
+ Datasets: 4936/1344
+Storage information:
+ Groups:
+ B-tree/List: 1200
+ Heap: 288
+ Attributes:
+ B-tree/List: 0
+ Heap: 0
+ Chunked datasets:
+ B-tree: 31392
+ Shared Messages:
+ Header: 0
+ B-tree/List: 0
+ Heap: 0
+ Superblock extension: 0
diff --git a/tools/h5stat/testfiles/h5stat_filters-d.ddl b/tools/h5stat/testfiles/h5stat_filters-d.ddl
new file mode 100644
index 0000000..e83c6c4
--- /dev/null
+++ b/tools/h5stat/testfiles/h5stat_filters-d.ddl
@@ -0,0 +1,33 @@
+#############################
+Expected output for 'h5stat -d h5stat_filters.h5'
+#############################
+Filename: h5stat_filters.h5
+Dataset dimension information:
+ Max. rank of datasets: 2
+ Dataset ranks:
+ # of dataset with rank 1: 1
+ # of dataset with rank 2: 14
+1-D Dataset information:
+ Max. dimension size of 1-D datasets: 100
+ Small 1-D datasets:
+ Total small datasets: 0
+ 1-D Dataset dimension bins:
+ # of datasets of size 100 - 999: 1
+ Total # of datasets: 1
+Dataset storage information:
+ Total raw data size: 8659
+Dataset layout information:
+ Dataset layout counts[COMPACT]: 1
+ Dataset layout counts[CONTIG]: 2
+ Dataset layout counts[CHUNKED]: 12
+ Number of external files : 2
+Dataset filters information:
+ Number of datasets with:
+ NO filter: 7
+ GZIP filter: 2
+ SHUFFLE filter: 2
+ FLETCHER32 filter: 2
+ SZIP filter: 2
+ NBIT filter: 2
+ SCALEOFFSET filter: 1
+ USER-DEFINED filter: 1
diff --git a/tools/h5stat/testfiles/h5stat_filters-dT.ddl b/tools/h5stat/testfiles/h5stat_filters-dT.ddl
new file mode 100644
index 0000000..62735f0
--- /dev/null
+++ b/tools/h5stat/testfiles/h5stat_filters-dT.ddl
@@ -0,0 +1,42 @@
+#############################
+Expected output for 'h5stat -dT h5stat_filters.h5'
+#############################
+Filename: h5stat_filters.h5
+Dataset dimension information:
+ Max. rank of datasets: 2
+ Dataset ranks:
+ # of dataset with rank 1: 1
+ # of dataset with rank 2: 14
+1-D Dataset information:
+ Max. dimension size of 1-D datasets: 100
+ Small 1-D datasets:
+ Total small datasets: 0
+ 1-D Dataset dimension bins:
+ # of datasets of size 100 - 999: 1
+ Total # of datasets: 1
+Dataset storage information:
+ Total raw data size: 8659
+Dataset layout information:
+ Dataset layout counts[COMPACT]: 1
+ Dataset layout counts[CONTIG]: 2
+ Dataset layout counts[CHUNKED]: 12
+ Number of external files : 2
+Dataset filters information:
+ Number of datasets with:
+ NO filter: 7
+ GZIP filter: 2
+ SHUFFLE filter: 2
+ FLETCHER32 filter: 2
+ SZIP filter: 2
+ NBIT filter: 2
+ SCALEOFFSET filter: 1
+ USER-DEFINED filter: 1
+Dataset datatype information:
+ # of unique datatypes used by datasets: 2
+ Dataset datatype #0:
+ Count (total/named) = (14/0)
+ Size (desc./elmt) = (14/4)
+ Dataset datatype #1:
+ Count (total/named) = (1/0)
+ Size (desc./elmt) = (14/4)
+ Total dataset datatype count: 15
diff --git a/tools/h5stat/testfiles/h5stat_filters-file.ddl b/tools/h5stat/testfiles/h5stat_filters-file.ddl
new file mode 100644
index 0000000..c6aeef0
--- /dev/null
+++ b/tools/h5stat/testfiles/h5stat_filters-file.ddl
@@ -0,0 +1,13 @@
+#############################
+Expected output for 'h5stat -f h5stat_filters.h5'
+#############################
+Filename: h5stat_filters.h5
+File information
+ # of unique groups: 1
+ # of unique datasets: 15
+ # of unique named dataypes: 1
+ # of unique links: 0
+ # of unique other: 0
+ Max. # of links to object: 1
+ Max. depth of hierarchy: 0
+ Max. # of objects in group: 16
diff --git a/tools/h5stat/testfiles/h5stat_filters-g.ddl b/tools/h5stat/testfiles/h5stat_filters-g.ddl
new file mode 100644
index 0000000..d488b8f
--- /dev/null
+++ b/tools/h5stat/testfiles/h5stat_filters-g.ddl
@@ -0,0 +1,9 @@
+#############################
+Expected output for 'h5stat -g h5stat_filters.h5'
+#############################
+Filename: h5stat_filters.h5
+Small groups:
+ Total # of small groups: 0
+Group bins:
+ # of groups of size 10 - 99: 1
+ Total # of groups: 1
diff --git a/tools/h5stat/testfiles/h5stat_filters.ddl b/tools/h5stat/testfiles/h5stat_filters.ddl
new file mode 100644
index 0000000..4dd6aee
--- /dev/null
+++ b/tools/h5stat/testfiles/h5stat_filters.ddl
@@ -0,0 +1,78 @@
+#############################
+Expected output for 'h5stat h5stat_filters.h5'
+#############################
+Filename: h5stat_filters.h5
+File information
+ # of unique groups: 1
+ # of unique datasets: 15
+ # of unique named dataypes: 1
+ # of unique links: 0
+ # of unique other: 0
+ Max. # of links to object: 1
+ Max. depth of hierarchy: 0
+ Max. # of objects in group: 16
+Object header size: (total/unused)
+ Groups: 48/8
+ Datasets: 4936/1344
+Storage information:
+ Groups:
+ B-tree/List: 1200
+ Heap: 288
+ Attributes:
+ B-tree/List: 0
+ Heap: 0
+ Chunked datasets:
+ B-tree: 31392
+ Shared Messages:
+ Header: 0
+ B-tree/List: 0
+ Heap: 0
+ Superblock extension: 0
+Small groups:
+ Total # of small groups: 0
+Group bins:
+ # of groups of size 10 - 99: 1
+ Total # of groups: 1
+Dataset dimension information:
+ Max. rank of datasets: 2
+ Dataset ranks:
+ # of dataset with rank 1: 1
+ # of dataset with rank 2: 14
+1-D Dataset information:
+ Max. dimension size of 1-D datasets: 100
+ Small 1-D datasets:
+ Total small datasets: 0
+ 1-D Dataset dimension bins:
+ # of datasets of size 100 - 999: 1
+ Total # of datasets: 1
+Dataset storage information:
+ Total raw data size: 8659
+Dataset layout information:
+ Dataset layout counts[COMPACT]: 1
+ Dataset layout counts[CONTIG]: 2
+ Dataset layout counts[CHUNKED]: 12
+ Number of external files : 2
+Dataset filters information:
+ Number of datasets with:
+ NO filter: 7
+ GZIP filter: 2
+ SHUFFLE filter: 2
+ FLETCHER32 filter: 2
+ SZIP filter: 2
+ NBIT filter: 2
+ SCALEOFFSET filter: 1
+ USER-DEFINED filter: 1
+Dataset datatype information:
+ # of unique datatypes used by datasets: 2
+ Dataset datatype #0:
+ Count (total/named) = (14/0)
+ Size (desc./elmt) = (14/4)
+ Dataset datatype #1:
+ Count (total/named) = (1/0)
+ Size (desc./elmt) = (14/4)
+ Total dataset datatype count: 15
+Small # of attributes:
+ Total # of objects with small # of attributes: 0
+Attribute bins:
+ Total # of objects with attributes: 0
+ Max. # of attributes to objects: 0
diff --git a/tools/h5stat/testfiles/h5stat_filters.h5 b/tools/h5stat/testfiles/h5stat_filters.h5
new file mode 100644
index 0000000..5b5f4bb
--- /dev/null
+++ b/tools/h5stat/testfiles/h5stat_filters.h5
Binary files differ
diff --git a/tools/h5stat/testfiles/h5stat_help1.ddl b/tools/h5stat/testfiles/h5stat_help1.ddl
new file mode 100644
index 0000000..0841572
--- /dev/null
+++ b/tools/h5stat/testfiles/h5stat_help1.ddl
@@ -0,0 +1,24 @@
+#############################
+Expected output for 'h5stat -h'
+#############################
+
+This tool is under development. For detailed information
+please see the specification document at
+http://hdf.ncsa.uiuc.edu/RFC/h5stat/h5stat-spec.pdf
+
+Please send your comments and questions to help@hdfgroup.org
+
+Usage: h5stat [OPTIONS] file
+
+ OPTIONS
+ -h, --help Print a usage message and exit
+ -V, --version Print version number and exit
+ -f, --file Print file information
+ -F, --filemetadata Print file metadata
+ -g, --group Print group information
+ -G, --groupmetadata Print group metadata
+ -d, --dset Print dataset information
+ -D, --dsetmetadata Print dataset metadata
+ -T, --dtypemetadata Print datatype metadata
+ -A, --attribute Print attribute information
+
diff --git a/tools/h5stat/testfiles/h5stat_help2.ddl b/tools/h5stat/testfiles/h5stat_help2.ddl
new file mode 100644
index 0000000..1e6295b
--- /dev/null
+++ b/tools/h5stat/testfiles/h5stat_help2.ddl
@@ -0,0 +1,24 @@
+#############################
+Expected output for 'h5stat --help'
+#############################
+
+This tool is under development. For detailed information
+please see the specification document at
+http://hdf.ncsa.uiuc.edu/RFC/h5stat/h5stat-spec.pdf
+
+Please send your comments and questions to help@hdfgroup.org
+
+Usage: h5stat [OPTIONS] file
+
+ OPTIONS
+ -h, --help Print a usage message and exit
+ -V, --version Print version number and exit
+ -f, --file Print file information
+ -F, --filemetadata Print file metadata
+ -g, --group Print group information
+ -G, --groupmetadata Print group metadata
+ -d, --dset Print dataset information
+ -D, --dsetmetadata Print dataset metadata
+ -T, --dtypemetadata Print datatype metadata
+ -A, --attribute Print attribute information
+
diff --git a/tools/h5stat/testfiles/h5stat_newgrat.ddl b/tools/h5stat/testfiles/h5stat_newgrat.ddl
new file mode 100644
index 0000000..228d565
--- /dev/null
+++ b/tools/h5stat/testfiles/h5stat_newgrat.ddl
@@ -0,0 +1,74 @@
+#############################
+Expected output for 'h5stat h5stat_newgrat.h5'
+#############################
+Filename: h5stat_newgrat.h5
+File information
+ # of unique groups: 351
+ # of unique datasets: 1
+ # of unique named dataypes: 0
+ # of unique links: 0
+ # of unique other: 0
+ Max. # of links to object: 1
+ Max. depth of hierarchy: 1
+ Max. # of objects in group: 351
+Object header size: (total/unused)
+ Groups: 51597/32292
+ Datasets: 414/312
+Storage information:
+ Groups:
+ B-tree/List: 5158
+ Heap: 7643
+ Attributes:
+ B-tree/List: 2598
+ Heap: 4442
+ Chunked datasets:
+ B-tree: 0
+ Shared Messages:
+ Header: 0
+ B-tree/List: 0
+ Heap: 0
+ Superblock extension: 0
+Small groups:
+ # of groups of size 0: 350
+ Total # of small groups: 350
+Group bins:
+ # of groups of size 0: 350
+ # of groups of size 100 - 999: 1
+ Total # of groups: 351
+Dataset dimension information:
+ Max. rank of datasets: 0
+ Dataset ranks:
+ # of dataset with rank 0: 1
+1-D Dataset information:
+ Max. dimension size of 1-D datasets: 0
+ Small 1-D datasets:
+ Total small datasets: 0
+Dataset storage information:
+ Total raw data size: 0
+Dataset layout information:
+ Dataset layout counts[COMPACT]: 0
+ Dataset layout counts[CONTIG]: 1
+ Dataset layout counts[CHUNKED]: 0
+ Number of external files : 0
+Dataset filters information:
+ Number of datasets with:
+ NO filter: 1
+ GZIP filter: 0
+ SHUFFLE filter: 0
+ FLETCHER32 filter: 0
+ SZIP filter: 0
+ NBIT filter: 0
+ SCALEOFFSET filter: 0
+ USER-DEFINED filter: 0
+Dataset datatype information:
+ # of unique datatypes used by datasets: 1
+ Dataset datatype #0:
+ Count (total/named) = (1/0)
+ Size (desc./elmt) = (14/4)
+ Total dataset datatype count: 1
+Small # of attributes:
+ Total # of objects with small # of attributes: 0
+Attribute bins:
+ # of objects with 100 - 999 attributes: 1
+ Total # of objects with attributes: 1
+ Max. # of attributes to objects: 100
diff --git a/tools/h5stat/testfiles/h5stat_newgrat.h5 b/tools/h5stat/testfiles/h5stat_newgrat.h5
new file mode 100644
index 0000000..cab98bb
--- /dev/null
+++ b/tools/h5stat/testfiles/h5stat_newgrat.h5
Binary files differ
diff --git a/tools/h5stat/testfiles/h5stat_tsohm.ddl b/tools/h5stat/testfiles/h5stat_tsohm.ddl
new file mode 100644
index 0000000..572965e
--- /dev/null
+++ b/tools/h5stat/testfiles/h5stat_tsohm.ddl
@@ -0,0 +1,72 @@
+#############################
+Expected output for 'h5stat h5stat_tsohm.h5'
+#############################
+Filename: h5stat_tsohm.h5
+File information
+ # of unique groups: 1
+ # of unique datasets: 2
+ # of unique named dataypes: 0
+ # of unique links: 0
+ # of unique other: 0
+ Max. # of links to object: 1
+ Max. depth of hierarchy: 0
+ Max. # of objects in group: 2
+Object header size: (total/unused)
+ Groups: 51/2
+ Datasets: 568/358
+Storage information:
+ Groups:
+ B-tree/List: 872
+ Heap: 120
+ Attributes:
+ B-tree/List: 0
+ Heap: 0
+ Chunked datasets:
+ B-tree: 0
+ Shared Messages:
+ Header: 38
+ B-tree/List: 550
+ Heap: 1316
+ Superblock extension: 40
+Small groups:
+ # of groups of size 2: 1
+ Total # of small groups: 1
+Group bins:
+ # of groups of size 1 - 9: 1
+ Total # of groups: 1
+Dataset dimension information:
+ Max. rank of datasets: 2
+ Dataset ranks:
+ # of dataset with rank 2: 2
+1-D Dataset information:
+ Max. dimension size of 1-D datasets: 0
+ Small 1-D datasets:
+ Total small datasets: 0
+Dataset storage information:
+ Total raw data size: 0
+Dataset layout information:
+ Dataset layout counts[COMPACT]: 0
+ Dataset layout counts[CONTIG]: 0
+ Dataset layout counts[CHUNKED]: 2
+ Number of external files : 0
+Dataset filters information:
+ Number of datasets with:
+ NO filter: 2
+ GZIP filter: 0
+ SHUFFLE filter: 0
+ FLETCHER32 filter: 0
+ SZIP filter: 0
+ NBIT filter: 0
+ SCALEOFFSET filter: 0
+ USER-DEFINED filter: 0
+Dataset datatype information:
+ # of unique datatypes used by datasets: 1
+ Dataset datatype #0:
+ Count (total/named) = (2/0)
+ Size (desc./elmt) = (14/4)
+ Total dataset datatype count: 2
+Small # of attributes:
+ Total # of objects with small # of attributes: 0
+Attribute bins:
+ Total # of objects with attributes: 0
+ Max. # of attributes to objects: 0
diff --git a/tools/h5stat/testfiles/h5stat_tsohm.h5 b/tools/h5stat/testfiles/h5stat_tsohm.h5
new file mode 100644
index 0000000..45ee36c
--- /dev/null
+++ b/tools/h5stat/testfiles/h5stat_tsohm.h5
Binary files differ
diff --git a/tools/h5stat/testh5stat.sh.in b/tools/h5stat/testh5stat.sh.in
new file mode 100644
index 0000000..66d05be
--- /dev/null
+++ b/tools/h5stat/testh5stat.sh.in
@@ -0,0 +1,135 @@
+#! /bin/sh
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+#
+# Tests for the h5dump tool
+
+# Determine which filters are available
+USE_FILTER_SZIP="@USE_FILTER_SZIP@"
+USE_FILTER_DEFLATE="@USE_FILTER_DEFLATE@"
+USE_FILTER_SHUFFLE="@USE_FILTER_SHUFFLE@"
+USE_FILTER_FLETCHER32="@USE_FILTER_FLETCHER32@"
+USE_FILTER_NBIT="@USE_FILTER_NBIT@"
+USE_FILTER_SCALEOFFSET="@USE_FILTER_SCALEOFFSET@"
+
+STAT=h5stat # The tool name
+STAT_BIN=`pwd`/$STAT # The path of the tool binary
+
+CMP='cmp -s'
+DIFF='diff -c'
+
+nerrors=0
+verbose=yes
+
+# The build (current) directory might be different than the source directory.
+if test -z "$srcdir"; then
+ srcdir=.
+fi
+
+test -d ./testfiles || mkdir ./testfiles
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+#
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
+}
+
+# Run a test and print PASS or *FAIL*. If a test fails then increment
+# the `nerrors' global variable and (if $verbose is set) display the
+# difference between the actual output and the expected output. The
+# expected output is given as the first argument to this function and
+# the actual output file is calculated by replacing the `.ddl' with
+# `.out'. The actual output is not removed if $HDF5_NOCLEANUP has a
+# non-zero value.
+#
+TOOLTEST() {
+ expect="$srcdir/testfiles/$1"
+ actual="./testfiles/`basename $1 .ddl`.out"
+ actual_err="./testfiles/`basename $1 .ddl`.err"
+ shift
+
+ # Run test.
+ TESTING $STAT $@
+ (
+ echo "#############################"
+ echo "Expected output for '$STAT $@'"
+ echo "#############################"
+ cd $srcdir/testfiles
+ $RUNSERIAL $STAT_BIN $@
+ ) >$actual 2>$actual_err
+ cat $actual_err >> $actual
+
+
+ if [ ! -f $expect ]; then
+ # Create the expect file if it doesn't yet exist.
+ echo " CREATED"
+ cp $actual $expect
+ elif $CMP $expect $actual; then
+ echo " PASSED"
+ else
+ echo "*FAILED*"
+ echo " Expected result (*.ddl) differs from actual result (*.out)"
+ nerrors="`expr $nerrors + 1`"
+ test yes = "$verbose" && $DIFF $expect $actual |sed 's/^/ /'
+ fi
+
+ # Clean up output file
+ if test -z "$HDF5_NOCLEANUP"; then
+ rm -f $actual $actual_err
+ fi
+}
+
+
+# Print a "SKIP" message
+SKIP() {
+ TESTING $STAT $@
+ echo " -SKIP-"
+}
+
+
+
+##############################################################################
+##############################################################################
+### T H E T E S T S ###
+##############################################################################
+##############################################################################
+
+# Test for help flag
+TOOLTEST h5stat_help1.ddl -h
+TOOLTEST h5stat_help2.ddl --help
+
+# Test file with groups, compressed datasets, user-applied fileters, etc.
+# h5stat_filters.h5 is a copy of ../../testfiles/tfilters.h5 as of release 1.8.0-alpha4
+TOOLTEST h5stat_filters.ddl h5stat_filters.h5
+TOOLTEST h5stat_filters-file.ddl -f h5stat_filters.h5
+TOOLTEST h5stat_filters-F.ddl -F h5stat_filters.h5
+TOOLTEST h5stat_filters-d.ddl -d h5stat_filters.h5
+TOOLTEST h5stat_filters-g.ddl -g h5stat_filters.h5
+TOOLTEST h5stat_filters-dT.ddl -dT h5stat_filters.h5
+# h5stat_tsohm.h5 is a copy of ../../../test/tsohm.h5 generated by tsohm.c
+# as of release 1.8.0-alpha4
+TOOLTEST h5stat_tsohm.ddl h5stat_tsohm.h5
+# h5stat_newgrat.h5 is generated by h5stat_gentest.c
+TOOLTEST h5stat_newgrat.ddl h5stat_newgrat.h5
+echo
+
+
+if test $nerrors -eq 0 ; then
+ echo "All $STAT tests passed."
+fi
+
+exit $nerrors
+