summaryrefslogtreecommitdiffstats
path: root/hl/test
diff options
context:
space:
mode:
Diffstat (limited to 'hl/test')
-rw-r--r--hl/test/Makefile.am15
-rw-r--r--hl/test/Makefile.in75
-rw-r--r--hl/test/gen_test_ld.c360
-rw-r--r--hl/test/ld_extend.c151
-rw-r--r--hl/test/ld_monitor.c148
-rw-r--r--hl/test/test_ld.c1397
-rw-r--r--hl/test/test_ld.h5bin0 -> 28336 bytes
-rw-r--r--hl/test/test_ld.sh.in94
-rw-r--r--hl/test/testfiles/test_ld_out131
-rw-r--r--hl/test/testfiles/test_ld_out272
10 files changed, 2323 insertions, 20 deletions
diff --git a/hl/test/Makefile.am b/hl/test/Makefile.am
index 86e3318..013db18 100644
--- a/hl/test/Makefile.am
+++ b/hl/test/Makefile.am
@@ -1,4 +1,3 @@
-#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
@@ -24,20 +23,25 @@ include $(top_srcdir)/config/commence.am
# Add include directories to C preprocessor flags
AM_CPPFLAGS+=-I. -I$(srcdir) -I$(top_builddir)/src -I$(top_srcdir)/src -I$(top_builddir)/test -I$(top_srcdir)/test -I$(top_srcdir)/hl/src
+# Test script
+TEST_SCRIPT = test_ld.sh
+check_SCRIPTS = $(TEST_SCRIPT)
+SCRIPT_DEPEND = ld_monitor$(EXEEXT) ld_extend$(EXEEXT)
+
# The tests depend on the hdf5, hdf5 test, and hdf5_hl libraries
LDADD=$(LIBH5_HL) $(LIBH5TEST) $(LIBHDF5)
# Test programs. These are our main targets. They should be listed in the
# order to be executed, generally most specific tests to least specific tests.
-TEST_PROG=test_lite test_image test_table test_ds test_packet
-check_PROGRAMS=$(TEST_PROG)
+TEST_PROG=test_lite test_image test_table test_ds test_packet test_ld
+check_PROGRAMS=$(TEST_PROG) ld_monitor ld_extend
# These programs generate test files for the tests. They don't need to be
# compiled every time we want to test the library. However, putting
# them in a conditional causes automake to generate rules so that they
# can be built by hand. They can also be built by specifying
# --enable-build-all at configure time.
-BUILD_ALL_PROGS=gen_test_ds
+BUILD_ALL_PROGS=gen_test_ds gen_test_ld
if BUILD_ALL_CONDITIONAL
noinst_PROGRAMS=$(BUILD_ALL_PROGS)
@@ -46,6 +50,7 @@ endif
# Temporary files. These files are the ones created by running `make test'.
CHECK_CLEANFILES+=combine_tables[1-2].h5 test_ds[1-9].h5 test_image[1-3].h5 \
test_lite[1-2].h5 test_table.h5 test_packet_table.h5 \
- test_packet_compress.h5 test_detach.h5
+ test_packet_compress.h5 test_detach.h5 test_ld.h5
+DISTCLEANFILES=test_ld.sh
include $(top_srcdir)/config/conclude.am
diff --git a/hl/test/Makefile.in b/hl/test/Makefile.in
index 4e639a6..31e1cd5 100644
--- a/hl/test/Makefile.in
+++ b/hl/test/Makefile.in
@@ -15,7 +15,6 @@
@SET_MAKE@
-#
# Copyright by The HDF Group.
# Copyright by the Board of Trustees of the University of Illinois.
# All rights reserved.
@@ -54,11 +53,13 @@ POST_UNINSTALL = :
build_triplet = @build@
host_triplet = @host@
DIST_COMMON = $(srcdir)/H5srcdir_str.h.in $(srcdir)/Makefile.am \
- $(srcdir)/Makefile.in $(top_srcdir)/config/commence.am \
+ $(srcdir)/Makefile.in $(srcdir)/test_ld.sh.in \
+ $(top_srcdir)/config/commence.am \
$(top_srcdir)/config/conclude.am COPYING
-check_PROGRAMS = $(am__EXEEXT_1)
+check_PROGRAMS = $(am__EXEEXT_1) ld_monitor$(EXEEXT) \
+ ld_extend$(EXEEXT)
@BUILD_ALL_CONDITIONAL_TRUE@noinst_PROGRAMS = $(am__EXEEXT_2)
-TESTS = $(check_PROGRAMS)
+TESTS = $(check_PROGRAMS) $(check_SCRIPTS)
subdir = hl/test
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/configure.in
@@ -66,16 +67,29 @@ am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
mkinstalldirs = $(SHELL) $(top_srcdir)/bin/mkinstalldirs
CONFIG_HEADER = $(top_builddir)/src/H5config.h
-CONFIG_CLEAN_FILES = H5srcdir_str.h
+CONFIG_CLEAN_FILES = H5srcdir_str.h test_ld.sh
CONFIG_CLEAN_VPATH_FILES =
am__EXEEXT_1 = test_lite$(EXEEXT) test_image$(EXEEXT) \
- test_table$(EXEEXT) test_ds$(EXEEXT) test_packet$(EXEEXT)
-am__EXEEXT_2 = gen_test_ds$(EXEEXT)
+ test_table$(EXEEXT) test_ds$(EXEEXT) test_packet$(EXEEXT) \
+ test_ld$(EXEEXT)
+am__EXEEXT_2 = gen_test_ds$(EXEEXT) gen_test_ld$(EXEEXT)
PROGRAMS = $(noinst_PROGRAMS)
gen_test_ds_SOURCES = gen_test_ds.c
gen_test_ds_OBJECTS = gen_test_ds.$(OBJEXT)
gen_test_ds_LDADD = $(LDADD)
gen_test_ds_DEPENDENCIES = $(LIBH5_HL) $(LIBH5TEST) $(LIBHDF5)
+gen_test_ld_SOURCES = gen_test_ld.c
+gen_test_ld_OBJECTS = gen_test_ld.$(OBJEXT)
+gen_test_ld_LDADD = $(LDADD)
+gen_test_ld_DEPENDENCIES = $(LIBH5_HL) $(LIBH5TEST) $(LIBHDF5)
+ld_extend_SOURCES = ld_extend.c
+ld_extend_OBJECTS = ld_extend.$(OBJEXT)
+ld_extend_LDADD = $(LDADD)
+ld_extend_DEPENDENCIES = $(LIBH5_HL) $(LIBH5TEST) $(LIBHDF5)
+ld_monitor_SOURCES = ld_monitor.c
+ld_monitor_OBJECTS = ld_monitor.$(OBJEXT)
+ld_monitor_LDADD = $(LDADD)
+ld_monitor_DEPENDENCIES = $(LIBH5_HL) $(LIBH5TEST) $(LIBHDF5)
test_ds_SOURCES = test_ds.c
test_ds_OBJECTS = test_ds.$(OBJEXT)
test_ds_LDADD = $(LDADD)
@@ -84,6 +98,10 @@ test_image_SOURCES = test_image.c
test_image_OBJECTS = test_image.$(OBJEXT)
test_image_LDADD = $(LDADD)
test_image_DEPENDENCIES = $(LIBH5_HL) $(LIBH5TEST) $(LIBHDF5)
+test_ld_SOURCES = test_ld.c
+test_ld_OBJECTS = test_ld.$(OBJEXT)
+test_ld_LDADD = $(LDADD)
+test_ld_DEPENDENCIES = $(LIBH5_HL) $(LIBH5TEST) $(LIBHDF5)
test_lite_SOURCES = test_lite.c
test_lite_OBJECTS = test_lite.$(OBJEXT)
test_lite_LDADD = $(LDADD)
@@ -109,10 +127,12 @@ CCLD = $(CC)
LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \
--mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \
$(LDFLAGS) -o $@
-SOURCES = gen_test_ds.c test_ds.c test_image.c test_lite.c \
- test_packet.c test_table.c
-DIST_SOURCES = gen_test_ds.c test_ds.c test_image.c test_lite.c \
- test_packet.c test_table.c
+SOURCES = gen_test_ds.c gen_test_ld.c ld_extend.c ld_monitor.c \
+ test_ds.c test_image.c test_ld.c test_lite.c test_packet.c \
+ test_table.c
+DIST_SOURCES = gen_test_ds.c gen_test_ld.c ld_extend.c ld_monitor.c \
+ test_ds.c test_image.c test_ld.c test_lite.c test_packet.c \
+ test_table.c
ETAGS = etags
CTAGS = ctags
am__tty_colors = \
@@ -399,21 +419,27 @@ TRACE = perl $(top_srcdir)/bin/trace
CHECK_CLEANFILES = *.chkexe *.chklog *.clog combine_tables[1-2].h5 \
test_ds[1-9].h5 test_image[1-3].h5 test_lite[1-2].h5 \
test_table.h5 test_packet_table.h5 test_packet_compress.h5 \
- test_detach.h5
+ test_detach.h5 test_ld.h5
+
+# Test script
+TEST_SCRIPT = test_ld.sh
+check_SCRIPTS = $(TEST_SCRIPT)
+SCRIPT_DEPEND = ld_monitor$(EXEEXT) ld_extend$(EXEEXT)
# The tests depend on the hdf5, hdf5 test, and hdf5_hl libraries
LDADD = $(LIBH5_HL) $(LIBH5TEST) $(LIBHDF5)
# Test programs. These are our main targets. They should be listed in the
# order to be executed, generally most specific tests to least specific tests.
-TEST_PROG = test_lite test_image test_table test_ds test_packet
+TEST_PROG = test_lite test_image test_table test_ds test_packet test_ld
# These programs generate test files for the tests. They don't need to be
# compiled every time we want to test the library. However, putting
# them in a conditional causes automake to generate rules so that they
# can be built by hand. They can also be built by specifying
# --enable-build-all at configure time.
-BUILD_ALL_PROGS = gen_test_ds
+BUILD_ALL_PROGS = gen_test_ds gen_test_ld
+DISTCLEANFILES = test_ld.sh
# Automake needs to be taught how to build lib, progs, and tests targets.
# These will be filled in automatically for the most part (e.g.,
@@ -466,6 +492,8 @@ $(ACLOCAL_M4): @MAINTAINER_MODE_TRUE@ $(am__aclocal_m4_deps)
$(am__aclocal_m4_deps):
H5srcdir_str.h: $(top_builddir)/config.status $(srcdir)/H5srcdir_str.h.in
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@
+test_ld.sh: $(top_builddir)/config.status $(srcdir)/test_ld.sh.in
+ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@
clean-checkPROGRAMS:
@list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \
@@ -487,12 +515,24 @@ clean-noinstPROGRAMS:
gen_test_ds$(EXEEXT): $(gen_test_ds_OBJECTS) $(gen_test_ds_DEPENDENCIES)
@rm -f gen_test_ds$(EXEEXT)
$(LINK) $(gen_test_ds_OBJECTS) $(gen_test_ds_LDADD) $(LIBS)
+gen_test_ld$(EXEEXT): $(gen_test_ld_OBJECTS) $(gen_test_ld_DEPENDENCIES)
+ @rm -f gen_test_ld$(EXEEXT)
+ $(LINK) $(gen_test_ld_OBJECTS) $(gen_test_ld_LDADD) $(LIBS)
+ld_extend$(EXEEXT): $(ld_extend_OBJECTS) $(ld_extend_DEPENDENCIES)
+ @rm -f ld_extend$(EXEEXT)
+ $(LINK) $(ld_extend_OBJECTS) $(ld_extend_LDADD) $(LIBS)
+ld_monitor$(EXEEXT): $(ld_monitor_OBJECTS) $(ld_monitor_DEPENDENCIES)
+ @rm -f ld_monitor$(EXEEXT)
+ $(LINK) $(ld_monitor_OBJECTS) $(ld_monitor_LDADD) $(LIBS)
test_ds$(EXEEXT): $(test_ds_OBJECTS) $(test_ds_DEPENDENCIES)
@rm -f test_ds$(EXEEXT)
$(LINK) $(test_ds_OBJECTS) $(test_ds_LDADD) $(LIBS)
test_image$(EXEEXT): $(test_image_OBJECTS) $(test_image_DEPENDENCIES)
@rm -f test_image$(EXEEXT)
$(LINK) $(test_image_OBJECTS) $(test_image_LDADD) $(LIBS)
+test_ld$(EXEEXT): $(test_ld_OBJECTS) $(test_ld_DEPENDENCIES)
+ @rm -f test_ld$(EXEEXT)
+ $(LINK) $(test_ld_OBJECTS) $(test_ld_LDADD) $(LIBS)
test_lite$(EXEEXT): $(test_lite_OBJECTS) $(test_lite_DEPENDENCIES)
@rm -f test_lite$(EXEEXT)
$(LINK) $(test_lite_OBJECTS) $(test_lite_LDADD) $(LIBS)
@@ -510,8 +550,12 @@ distclean-compile:
-rm -f *.tab.c
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/gen_test_ds.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/gen_test_ld.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ld_extend.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ld_monitor.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/test_ds.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/test_image.Po@am__quote@
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/test_ld.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/test_lite.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/test_packet.Po@am__quote@
@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/test_table.Po@am__quote@
@@ -626,7 +670,7 @@ distdir: $(DISTFILES)
fi; \
done
check-am: all-am
- $(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS)
+ $(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS) $(check_SCRIPTS)
$(MAKE) $(AM_MAKEFLAGS) check-TESTS
check: check-am
all-am: Makefile $(PROGRAMS) all-local
@@ -652,6 +696,7 @@ clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
+ -test -z "$(DISTCLEANFILES)" || rm -f $(DISTCLEANFILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
diff --git a/hl/test/gen_test_ld.c b/hl/test/gen_test_ld.c
new file mode 100644
index 0000000..bf130df
--- /dev/null
+++ b/hl/test/gen_test_ld.c
@@ -0,0 +1,360 @@
+#include "hdf5.h"
+#include "H5LDprivate.h"
+#include <time.h>
+#include <sys/time.h>
+#include <sys/resource.h>
+#include <stdlib.h>
+
+/*
+ * WATCH.h5: file with various types of datasets for testing--
+ *
+ * The following datasets are chunked, H5D_ALLOC_TIME_INCR, max. dimensional setting:
+ * DSET_ONE: one-dimensional dataset
+ * DSET_TWO: two-dimensional dataset
+ * DSET_CMPD: one-dimensional dataset with compound type
+ * DSET_CMPD_ESC: one-dimensional dataset with compound type and member names with
+ * escape/separator characters
+ * DSET_CMPD_TWO: two-dimensional dataset with compound type
+ *
+ * The following datasets are one-dimensional, chunked, max. dimension setting:
+ * DSET_ALLOC_EARLY: dataset with H5D_ALLOC_TIME_EARLY
+ * DSET_ALLOC_LATE: dataset H5D_ALLOC_TIME_LATE
+ *
+ * The following datasets are one-dimensional:
+ * DSET_NONE: fixed dimension setting, contiguous, H5D_ALLOC_TIME_LATE
+ * DSET_NOMAX: fixed dimension setting, chunked, H5D_ALLOC_TIME_INCR
+ */
+#define ONE_DIMS0 10
+#define MAX_ONE_DIMS0 100
+
+#define DSET_ONE "DSET_ONE"
+#define DSET_NONE "DSET_NONE"
+#define DSET_NOMAX "DSET_NOMAX"
+#define DSET_ALLOC_LATE "DSET_ALLOC_LATE"
+#define DSET_ALLOC_EARLY "DSET_ALLOC_EARLY"
+#define DSET_CMPD "DSET_CMPD"
+#define DSET_CMPD_ESC "DSET_CMPD_ESC"
+#define DSET_NULL "DSET_NULL"
+#define DSET_SCALAR "DSET_SCALAR"
+
+#define TWO_DIMS0 4
+#define TWO_DIMS1 10
+#define MAX_TWO_DIMS0 60
+#define MAX_TWO_DIMS1 100
+
+#define DSET_TWO "DSET_TWO"
+#define DSET_CMPD_TWO "DSET_CMPD_TWO"
+
+#define CHUNK_SIZE 2
+
+#define FILE "test_ld.h5"
+
+/* Data structures for datasets with compound types */
+typedef struct sub22_t {
+ unsigned int a;
+ unsigned int b;
+ unsigned int c;
+} sub22_t;
+
+typedef struct sub2_t {
+ unsigned int a;
+ sub22_t b;
+ unsigned int c;
+} sub2_t;
+
+typedef struct sub4_t {
+ unsigned int a;
+ unsigned int b;
+} sub4_t;
+
+typedef struct set_t {
+ unsigned int field1;
+ sub2_t field2;
+ double field3;
+ sub4_t field4;
+} set_t;
+
+/*
+ **************************************************************************************
+ *
+ * Create a dataset with the given input parameters
+ * Write to the dataset with the given "data"
+ *
+ **************************************************************************************
+ */
+static int
+generate_dset(hid_t fid, const char *dname, int ndims, hsize_t *dims, hsize_t *maxdims, hid_t dtid, void *data)
+{
+ hid_t dcpl; /* Dataset creation property */
+ hid_t did; /* Dataset id */
+ hid_t sid; /* Dataspace id */
+ int i; /* Local index variable */
+ hsize_t chunk_dims[H5S_MAX_RANK]; /* Dimension sizes for chunks */
+
+ /* Create the dataspace */
+ if((sid = H5Screate_simple(ndims, dims, maxdims)) < 0)
+ goto done;
+
+ /* Set up dataset's creation properties */
+ if(!HDstrcmp(dname, DSET_NONE))
+ dcpl = H5P_DEFAULT;
+ else {
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ goto done;
+ for(i = 0; i < ndims; i++)
+ chunk_dims[i] = CHUNK_SIZE;
+ if(H5Pset_chunk(dcpl, ndims, chunk_dims) < 0)
+ goto done;
+ }
+
+ if(!HDstrcmp(dname, DSET_ALLOC_LATE)) {
+ if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_LATE) < 0)
+ goto done;
+ } else if(!HDstrcmp(dname, DSET_ALLOC_EARLY)) {
+ if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0)
+ goto done;
+ }
+
+ /* Create the dataset */
+ if((did = H5Dcreate2(fid, dname, dtid, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ goto done;
+
+ /* Write to the dataset */
+ if(H5Dwrite(did, dtid, H5S_ALL, H5S_ALL, H5P_DEFAULT, data) < 0)
+ goto done;
+
+ /* Closing */
+ if(H5Pclose(dcpl) < 0) goto done;
+ if(H5Sclose(sid) < 0) goto done;
+ if(H5Dclose(did) < 0) goto done;
+
+ return(SUCCEED);
+
+done:
+ H5E_BEGIN_TRY
+ H5Sclose(sid);
+ H5Pclose(dcpl);
+ H5Dclose(did);
+ H5E_END_TRY
+
+ return(FAIL);
+} /* generate_dset() */
+
+int
+main(void)
+{
+ hid_t fid; /* File id */
+ hsize_t cur_dims[1]; /* Dimension sizes */
+ hsize_t max_dims[1]; /* Maximum dimension sizes */
+ hsize_t cur2_dims[2]; /* Current dimension sizes */
+ hsize_t max2_dims[2]; /* Maximum dimension sizes */
+ hid_t set_tid, esc_set_tid; /* Compound type id */
+ hid_t sub22_tid; /* Compound type id */
+ hid_t sub2_tid, esc_sub2_tid; /* Compound type id */
+ hid_t sub4_tid, esc_sub4_tid; /* Compound type id */
+ hid_t null_did, null_sid; /* H5S_NULL dataset & dataspace ids */
+ hid_t scalar_did, scalar_sid; /* H5S_SCALAR dataset & dataspace ids */
+ int one_data[ONE_DIMS0]; /* Buffer for data */
+ int two_data[TWO_DIMS0*TWO_DIMS1]; /* Buffer for data */
+ set_t one_cbuf[ONE_DIMS0]; /* Buffer for data with compound type */
+ set_t two_cbuf[TWO_DIMS0*TWO_DIMS1]; /* Buffer for data with compound type */
+ int i; /* Local index variable */
+
+ /* Create a file */
+ if((fid = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ goto done;
+
+ /* Initialization for one-dimensional dataset */
+ cur_dims[0] = ONE_DIMS0;
+ max_dims[0] = MAX_ONE_DIMS0;
+ for(i = 0; i < ONE_DIMS0; i++)
+ one_data[i] = i;
+
+ /* Generate DSET_ONE, DSET_NONE, DSET_NOMAX, DSET_ALLOC_LATE, DSET_EARLY */
+ if(generate_dset(fid, DSET_ONE, 1, cur_dims, max_dims, H5T_NATIVE_INT, one_data) < 0)
+ goto done;
+ if(generate_dset(fid, DSET_NONE, 1, cur_dims, NULL, H5T_NATIVE_INT, one_data) < 0)
+ goto done;
+ if(generate_dset(fid, DSET_NOMAX, 1, cur_dims, NULL, H5T_NATIVE_INT, one_data) < 0)
+ goto done;
+ if(generate_dset(fid, DSET_ALLOC_LATE, 1, cur_dims, max_dims, H5T_NATIVE_INT, one_data) < 0)
+ goto done;
+ if(generate_dset(fid, DSET_ALLOC_EARLY, 1, cur_dims, max_dims, H5T_NATIVE_INT, one_data) < 0)
+ goto done;
+
+ /* Initialization for two-dimensional dataset */
+ cur2_dims[0] = TWO_DIMS0;
+ cur2_dims[1] = TWO_DIMS1;
+ max2_dims[0] = MAX_TWO_DIMS0;
+ max2_dims[1] = MAX_TWO_DIMS1;
+
+ for(i = 0; i < (TWO_DIMS0 * TWO_DIMS1); i++)
+ two_data[i] = i;
+
+ /* Generate DSET_TWO */
+ if(generate_dset(fid, DSET_TWO, 2, cur2_dims, max2_dims, H5T_NATIVE_INT, two_data) < 0)
+ goto done;
+
+ /* Initialization for one-dimensional compound typed dataset */
+ cur_dims[0] = ONE_DIMS0;
+ max_dims[0] = MAX_ONE_DIMS0;
+
+ for (i = 0; i < ONE_DIMS0; i++) {
+ one_cbuf[i].field1 = 1;
+ one_cbuf[i].field2.a = 2;
+ one_cbuf[i].field2.c = 4;
+ one_cbuf[i].field2.b.a = 20;
+ one_cbuf[i].field2.b.b = 40;
+ one_cbuf[i].field2.b.c = 80;
+ one_cbuf[i].field3 = 3.0;
+ one_cbuf[i].field4.a = 4;
+ one_cbuf[i].field4.b = 8;
+ }
+
+ /* Create the compound type */
+ if((sub22_tid = H5Tcreate(H5T_COMPOUND, sizeof(sub22_t))) < 0)
+ goto done;
+ if(H5Tinsert(sub22_tid, "a", HOFFSET(sub22_t, a), H5T_NATIVE_INT) < 0)
+ goto done;
+ if(H5Tinsert(sub22_tid, "b", HOFFSET(sub22_t, b), H5T_NATIVE_INT) < 0)
+ goto done;
+ if(H5Tinsert(sub22_tid, "c", HOFFSET(sub22_t, c), H5T_NATIVE_INT) < 0)
+ goto done;
+
+ if((sub2_tid = H5Tcreate(H5T_COMPOUND, sizeof(sub2_t))) < 0)
+ goto done;
+ if(H5Tinsert(sub2_tid, "a", HOFFSET(sub2_t, a), H5T_NATIVE_INT) < 0)
+ goto done;
+ if(H5Tinsert(sub2_tid, "b", HOFFSET(sub2_t, b), sub22_tid) < 0)
+ goto done;
+ if(H5Tinsert(sub2_tid, "c", HOFFSET(sub2_t, c), H5T_NATIVE_INT) < 0)
+ goto done;
+
+ if((sub4_tid = H5Tcreate(H5T_COMPOUND, sizeof(sub4_t))) < 0)
+ goto done;
+ if(H5Tinsert(sub4_tid, "a", HOFFSET(sub4_t, a), H5T_NATIVE_INT) < 0)
+ goto done;
+ if(H5Tinsert(sub4_tid, "b", HOFFSET(sub4_t, b), H5T_NATIVE_INT) < 0)
+ goto done;
+
+ if((set_tid = H5Tcreate(H5T_COMPOUND, sizeof(set_t))) < 0)
+ goto done;
+ if(H5Tinsert(set_tid, "field1", HOFFSET(set_t, field1), H5T_NATIVE_INT) < 0)
+ goto done;
+ if(H5Tinsert(set_tid, "field2", HOFFSET(set_t, field2), sub2_tid) < 0)
+ goto done;
+ if(H5Tinsert(set_tid, "field3", HOFFSET(set_t, field3), H5T_NATIVE_DOUBLE) < 0)
+ goto done;
+ if(H5Tinsert(set_tid, "field4", HOFFSET(set_t, field4), sub4_tid) < 0)
+ goto done;
+
+ /* Create the compound type with escape/separator characters */
+ if((esc_sub2_tid = H5Tcreate(H5T_COMPOUND, sizeof(sub2_t))) < 0)
+ goto done;
+ if(H5Tinsert(esc_sub2_tid, ".a", HOFFSET(sub2_t, a), H5T_NATIVE_INT) < 0)
+ goto done;
+ if(H5Tinsert(esc_sub2_tid, ",b", HOFFSET(sub2_t, b), sub22_tid) < 0)
+ goto done;
+ if(H5Tinsert(esc_sub2_tid, "\\c", HOFFSET(sub2_t, c), H5T_NATIVE_INT) < 0)
+ goto done;
+
+ if((esc_sub4_tid = H5Tcreate(H5T_COMPOUND, sizeof(sub4_t))) < 0)
+ goto done;
+ if(H5Tinsert(esc_sub4_tid, "a.", HOFFSET(sub4_t, a), H5T_NATIVE_INT) < 0)
+ goto done;
+ if(H5Tinsert(esc_sub4_tid, "b,", HOFFSET(sub4_t, b), H5T_NATIVE_INT) < 0)
+ goto done;
+
+ if((esc_set_tid = H5Tcreate(H5T_COMPOUND, sizeof(set_t))) < 0)
+ goto done;
+ if(H5Tinsert(esc_set_tid, "field,1", HOFFSET(set_t, field1), H5T_NATIVE_INT) < 0)
+ goto done;
+ if(H5Tinsert(esc_set_tid, "field2.", HOFFSET(set_t, field2), esc_sub2_tid) < 0)
+ goto done;
+ if(H5Tinsert(esc_set_tid, "field\\3", HOFFSET(set_t, field3), H5T_NATIVE_DOUBLE) < 0)
+ goto done;
+ if(H5Tinsert(esc_set_tid, "field4,", HOFFSET(set_t, field4), esc_sub4_tid) < 0)
+ goto done;
+
+ /* Generate DSET_CMPD, DSET_CMPD_ESC */
+ if(generate_dset(fid, DSET_CMPD, 1, cur_dims, max_dims, set_tid, one_cbuf) < 0)
+ goto done;
+ if(generate_dset(fid, DSET_CMPD_ESC, 1, cur_dims, max_dims, esc_set_tid, one_cbuf) < 0)
+ goto done;
+
+ /* Initialization for two-dimensional compound typed dataset */
+ cur2_dims[0] = TWO_DIMS0;
+ cur2_dims[1] = TWO_DIMS1;
+ max2_dims[0] = MAX_TWO_DIMS0;
+ max2_dims[0] = MAX_TWO_DIMS1;
+
+ for (i = 0; i < (TWO_DIMS0 * TWO_DIMS1); i++) {
+ two_cbuf[i].field1 = 1;
+ two_cbuf[i].field2.a = 2;
+ two_cbuf[i].field2.c = 4;
+ two_cbuf[i].field2.b.a = 20;
+ two_cbuf[i].field2.b.b = 40;
+ two_cbuf[i].field2.b.c = 80;
+ two_cbuf[i].field3 = 3.0;
+ two_cbuf[i].field4.a = 4;
+ two_cbuf[i].field4.b = 8;
+ }
+
+ /* Generate DSET_CMPD_TWO */
+ if(generate_dset(fid, DSET_CMPD_TWO, 2, cur2_dims, max2_dims, set_tid, two_cbuf) < 0)
+ goto done;
+
+ /* Create NULL dataspace */
+ if((null_sid = H5Screate(H5S_NULL)) < 0)
+ goto done;
+
+ /* Create the NULL dataset */
+ if((null_did = H5Dcreate2(fid, DSET_NULL, H5T_NATIVE_UINT, null_sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ goto done;
+
+ /* Create SCALAR dataspace */
+ if((scalar_sid = H5Screate(H5S_SCALAR)) < 0)
+ goto done;
+
+ /* Create the SCALAR dataset */
+ if((scalar_did = H5Dcreate2(fid, DSET_SCALAR, H5T_NATIVE_INT, scalar_sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ goto done;
+
+ /* Closing */
+ if(H5Dclose(scalar_did) < 0) goto done;
+ if(H5Sclose(scalar_sid) < 0) goto done;
+
+ if(H5Dclose(null_did) < 0) goto done;
+ if(H5Sclose(null_sid) < 0) goto done;
+
+ if(H5Tclose(sub22_tid) < 0) goto done;
+ if(H5Tclose(sub2_tid) < 0) goto done;
+ if(H5Tclose(sub4_tid) < 0) goto done;
+ if(H5Tclose(set_tid) < 0) goto done;
+ if(H5Tclose(esc_sub2_tid) < 0) goto done;
+ if(H5Tclose(esc_sub4_tid) < 0) goto done;
+ if(H5Tclose(esc_set_tid) < 0) goto done;
+ if(H5Fclose(fid) < 0) goto done;
+
+ exit(EXIT_SUCCESS);
+
+done:
+ H5E_BEGIN_TRY
+ H5Tclose(sub22_tid);
+ H5Tclose(sub2_tid);
+ H5Tclose(sub4_tid);
+ H5Tclose(set_tid);
+ H5Tclose(esc_sub2_tid);
+ H5Tclose(esc_sub4_tid);
+ H5Tclose(esc_set_tid);
+
+ H5Dclose(null_did);
+ H5Sclose(null_sid);
+ H5Dclose(scalar_did);
+ H5Sclose(scalar_sid);
+
+ H5Fclose(fid);
+ H5E_END_TRY
+
+ exit(EXIT_FAILURE);
+} /* main() */
diff --git a/hl/test/ld_extend.c b/hl/test/ld_extend.c
new file mode 100644
index 0000000..3ddd883
--- /dev/null
+++ b/hl/test/ld_extend.c
@@ -0,0 +1,151 @@
+#include "H5HLprivate2.h"
+#include <stdio.h>
+#include <string.h>
+#include <assert.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+/* Size of data buffer */
+#define TEST_BUF_SIZE 100
+
+/*
+ * Test variations (incremental) for one-dimensional dataset:
+ * Varies from 10->13->12->12->1->3
+ */
+#define ONE_NTESTS 5
+int one_tests[ONE_NTESTS] = {3, -1, 0, -11, 2};
+
+/*
+ * Test variations (incremental) for two-dimensional dataset:
+ * Varies from {4,10}->{6,12}->{8,1}->{10,1}->
+ * {3,3}->{2,2}->{1,2}->
+ * {1,4}->{1,3}->{1,3}
+ */
+#define TWO_NTESTS 9
+int two_tests[TWO_NTESTS][2] = { {2, 2}, {2, -11}, {2, 0},
+ {-7, 2}, {-1, -1}, {-1, 0},
+ {0, 2}, {0, -1}, {0, 0}
+ };
+
+static int extend_dset(const char *file, char *dname);
+
+/*
+ * Extend the specified dataset in the file with ld_monitor.c monitoring
+ * the dataset on the other end:
+ *
+ * 1) Extend the dataset according to the variations: ONE_NTESTS, TWO_NTESTS
+ * 2) Write to the dataset (currently, only for integer dataset)
+ * 3) Flush the dataset
+ */
+static int
+extend_dset(const char *file, char *dname)
+{
+ hid_t fid; /* file id */
+ hid_t did; /* dataset id */
+ hid_t dtype; /* dataset's datatype */
+ hid_t sid; /* dataspace id */
+ int i, j, k; /* local index variable */
+ int ndims; /* number of dimensions */
+ int buf[TEST_BUF_SIZE]; /* buffer for data */
+ hsize_t cur_dims[2]; /* current dimension sizes */
+ hsize_t ext_dims[2]; /* new dimension sizes after extension */
+
+ /* Open the file */
+ if((fid = H5Fopen(file, H5F_ACC_RDWR, H5P_DEFAULT)) < 0)
+ goto done;
+
+ /* Open the dataset */
+ if((did = H5Dopen2(fid, dname, H5P_DEFAULT)) < 0)
+ goto done;
+
+ /* Get the dataset's data space */
+ if((sid = H5Dget_space(did)) < 0)
+ goto done;
+
+ /* Get the # of dimensions for the dataset */
+ if((ndims = H5Sget_simple_extent_ndims(sid)) < 0)
+ goto done;
+
+ /* Initialize data written to the dataset */
+ HDmemset(buf, 0, sizeof(buf));
+ for(k = 0; k < TEST_BUF_SIZE; k++)
+ buf[k] = k;
+
+ /* Loop through different variations of extending the dataset */
+ for(i = 0; i < (ndims == 1 ? ONE_NTESTS: TWO_NTESTS); i++) {
+
+ sleep(2);
+
+ /* Get the dataset's current dimension sizes */
+ if(H5LDget_dset_dims(did, cur_dims) < 0)
+ goto done;
+
+ /* Set up the new extended dimension sizes */
+ for(j = 0; j < ndims; j++)
+ ext_dims[j] = cur_dims[j] + (ndims == 1 ? (hsize_t)one_tests[i] : (hsize_t)two_tests[i][j]);
+
+ /* Extend the dataset */
+ if(H5Dset_extent(did, ext_dims) < 0)
+ goto done;
+
+ /* Get the dataset's data type */
+ if((dtype = H5Tget_native_type(H5Dget_type(did), H5T_DIR_DEFAULT)) < 0)
+ goto done;
+
+ /* Write to the whole dataset after extension */
+ if(H5Dwrite(did, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0)
+ goto done;
+
+ /* Flush the data */
+ if(H5Dflush(did) < 0)
+ goto done;
+
+ } /* end for ONE_NTESTS or TWO_NTESTS */
+
+ /* Closing */
+ if(H5Tclose(dtype) < 0) goto done;
+ if(H5Sclose(sid) < 0) goto done;
+ if(H5Dclose(did) < 0) goto done;
+ if(H5Fclose(fid) < 0) goto done;
+
+ return(0);
+
+done:
+ H5E_BEGIN_TRY
+ H5Tclose(dtype);
+ H5Sclose(sid);
+ H5Dclose(did);
+ H5Fclose(fid);
+ H5E_END_TRY
+
+ return(-1);
+} /* extend_dset() */
+
+
+/* Usage: extend_dset xx.h5 dname */
+int
+main(int argc, const char *argv[])
+{
+ char *dname = NULL; /* dataset name */
+ char *fname = NULL; /* file name */
+
+ if(argc != 3) {
+ fprintf(stderr, "Should have file name and dataset name to be extended...\n");
+ goto done;
+ }
+
+ /* Get the file and dataset names to be extended */
+ fname = HDstrdup(argv[1]);
+ dname = HDstrdup(argv[2]);
+
+ /* Extend the specified dataset in the file */
+ if(extend_dset(fname, dname) < 0)
+ goto done;
+
+ exit(EXIT_SUCCESS);
+
+done:
+ if(dname) HDfree(dname);
+ if(fname) HDfree(fname);
+ exit(EXIT_FAILURE);
+} /* main() */
diff --git a/hl/test/ld_monitor.c b/hl/test/ld_monitor.c
new file mode 100644
index 0000000..7b90715
--- /dev/null
+++ b/hl/test/ld_monitor.c
@@ -0,0 +1,148 @@
+#include "H5HLprivate2.h"
+#include <stdio.h>
+#include <string.h>
+#include <assert.h>
+#include <stdlib.h>
+#include <unistd.h>
+
+#define TEST_BUF_SIZE 100
+
+/*
+ * Monitor the specified dataset in the file while ld_extend.c extending
+ * and writing to the dataset on the other end:
+ *
+ * 1) Retrieve the dataset's current dimension sizes
+ * 2) If there are changes in dimension sizes:
+ * print the dimension sizes
+ * retrieve the appended data and print them
+ */
+static int
+monitor_dset(const char *fname, char *dname)
+{
+ hid_t fid; /* dataset id */
+ hid_t did; /* dataset id */
+ hid_t sid; /* dataspace id */
+ int ndims; /* # of dimensions in the dataspace */
+ int i, u; /* local index variable */
+ hsize_t cur_dims[H5S_MAX_RANK]; /* current dimension sizes */
+ hsize_t prev_dims[H5S_MAX_RANK]; /* previous dimension sizes */
+ int buf[TEST_BUF_SIZE]; /* Buffer for data */
+ herr_t ret_value = 0; /* return value */
+
+ /* Open the file with SWMR */
+ if((fid = H5Fopen(fname, H5F_ACC_SWMR_READ, H5P_DEFAULT)) < 0)
+ goto done;
+
+ HDfprintf(stdout, "Monitoring dataset %s...\n", dname);
+
+ /* Open the dataset for minitoring */
+ if((did = H5Dopen2(fid, dname, H5P_DEFAULT)) < 0) {
+ HDfprintf(stdout, "error in opening dataset \"%s\"\n", dname);
+ ret_value = -1;
+ goto done;
+ }
+
+ /* Get the dataset's data space */
+ if((sid = H5Dget_space(did)) < 0) {
+ HDfprintf(stdout, "error in getting dataspace id for dataset \"%s\"\n", dname);
+ ret_value = -1;
+ goto done;
+ }
+
+ /* Get the dataset's dimension sizes */
+ if((ndims = H5Sget_simple_extent_dims(sid, prev_dims, NULL)) < 0) {
+ HDfprintf(stdout, "unable to get dimensions sizes for \"%s\"\n", dname);
+ ret_value = -1;
+ goto done;
+ }
+
+ /* Monitor the dataset for changes */
+ while(1) {
+
+ /* Refresh the dataset */
+ if(H5Drefresh(did) < 0) {
+ ret_value = -1;
+ goto done;
+ }
+
+ /* Get the dataset's current dimension sizes */
+ if(H5LDget_dset_dims(did, cur_dims) < 0) {
+ HDfprintf(stdout, "unable to get dimension sizes for \"%s\"\n", dname);
+ ret_value = -1;
+ goto done;
+ }
+
+ /* Check for changes in dimension sizes */
+ for(u = 0; u < ndims; u++) {
+ if(cur_dims[u] != prev_dims[u])
+ break;
+ }
+
+ /* Printing only when there are changes */
+ if(u < ndims) {
+ /* Print the current dimension sizes */
+ HDfprintf(stdout, "\n");
+ for(i = 0; i < ndims; i++)
+ HDfprintf(stdout, "%d ", (int)cur_dims[i]);
+ HDfprintf(stdout, "\n");
+
+ /* Get data appended to the dataset and print the data */
+ HDmemset(buf, 0, sizeof(buf));
+ if(H5LDget_dset_elmts(did, prev_dims, cur_dims, NULL, buf) >= 0) {
+
+ for(i = 0; i < TEST_BUF_SIZE; i++) {
+ if(i % 10)
+ HDfprintf(stdout, "%d ", buf[i]);
+ else
+ HDfprintf(stdout, "\n%d ", buf[i]);
+ }
+ HDfprintf(stdout, "\n");
+ }
+
+ /* Flush the output to stdout */
+ HDfflush(stdout);
+ /* Update the dimension sizes */
+ HDmemcpy(prev_dims, cur_dims, ndims * sizeof(hsize_t));
+ }
+
+ /* Sleep before next monitor */
+ sleep(1);
+ } /* end while */
+
+done:
+ /* Closing */
+ H5E_BEGIN_TRY
+ H5Sclose(sid);
+ H5Dclose(did);
+ H5E_END_TRY
+
+ return(ret_value);
+} /* monitor_dset() */
+
+/* usage: monitor xx.h5 dname */
+int
+main(int argc, const char *argv[])
+{
+ char *dname = NULL; /* dataset name */
+ char *fname = NULL; /* file name */
+
+ if(argc != 3) {
+ HDfprintf(stderr, "Should have file name and dataset name to be monitored...\n");
+ goto done;
+ }
+
+ /* Get the file name and dataset name to be extended */
+ fname = strdup(argv[1]);
+ dname = strdup(argv[2]);
+
+ /* only integer dataset */
+ if(monitor_dset(fname, dname) < 0)
+ goto done;
+
+ exit(EXIT_SUCCESS);
+
+done:
+ if(dname) free(dname);
+ if(fname) free(fname);
+ exit(EXIT_FAILURE);
+}
diff --git a/hl/test/test_ld.c b/hl/test/test_ld.c
new file mode 100644
index 0000000..3a282c8
--- /dev/null
+++ b/hl/test/test_ld.c
@@ -0,0 +1,1397 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+* Copyright by The HDF Group. *
+* Copyright by the Board of Trustees of the University of Illinois. *
+* All rights reserved. *
+* *
+* This file is part of HDF5. The full HDF5 copyright notice, including *
+* terms governing use, modification, and redistribution, is contained in *
+* the files COPYING and Copyright.html. COPYING can be found at the root *
+* of the source code distribution tree; Copyright.html can be found at the *
+* root level of an installed copy of the electronic HDF5 document set and *
+* is linked from the top-level documents page. It can also be found at *
+* http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+* access to either file, you may request a copy from help@hdfgroup.org. *
+* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <stdlib.h>
+#include <string.h>
+#include <stdio.h>
+#include <errno.h>
+#include <setjmp.h>
+#include "h5hltest.h"
+#include "H5srcdir.h"
+#include "H5LDpublic.h"
+
+/* File name */
+#define FILE "test_ld.h5"
+/* Copied file name */
+#define COPY_FILENAME "COPY_test_ld.h5"
+
+/* Dataset names */
+#define DSET_ONE "DSET_ONE"
+#define DSET_ALLOC_LATE "DSET_ALLOC_LATE"
+#define DSET_ALLOC_EARLY "DSET_ALLOC_EARLY"
+#define DSET_TWO "DSET_TWO"
+#define TWO_DIM_1 4
+#define TWO_DIM_2 10
+#define DSET_CMPD "DSET_CMPD"
+#define DSET_CMPD_ESC "DSET_CMPD_ESC"
+#define DSET_CMPD_TWO "DSET_CMPD_TWO"
+#define DSET_NULL "DSET_NULL"
+#define DSET_SCALAR "DSET_SCALAR"
+
+/* Size of data buffer */
+#define TEST_BUF_SIZE 100
+
+/* Temporary Buffer size */
+#define READ_BUF_SIZE 4096
+
+/* Selected compound field members for testing */
+#define VALID_FIELDS1 "field1,field2.a,field3,field4" /* TEMPORORAY */
+#define VALID_FIELDS2 "field2.b.a,field2.c,field4.b"
+
+#define INVALID_FIELDS1 "field2.k.a,field2.c,field4.k"
+#define INVALID_FIELDS2 "field2.b.a,field2.c,field4.b."
+#define INVALID_FIELDS3 "field2.b.a,,field2.c,field4.b"
+
+#define VALID_ESC_FIELDS1 "field\\,1,field2\\..\\.a,field\\\\3,field4\\,"
+#define VALID_ESC_FIELDS2 "field2\\..\\,b.a,field2\\..\\\\c,field4\\,.b\\,"
+
+#define INVALID_ESC_FIELDS1 "field2\\..\\,k.a,field2\\..\\\\c,field4\\,.k\\,"
+#define INVALID_ESC_FIELDS2 "field2\\..\\,b.a,field2\\..\\\\c,field4\\,.b\\,."
+#define INVALID_ESC_FIELDS3 "field2\\..\\,,b.a,field2\\..\\\\c,field4\\,.b\\,"
+
+/*
+ * Test variations (retained original) for one-dimensional dataset:
+ * Varies from 10->13; 10->9, 10->10, 10->1, 10->11
+ */
+#define ONE_NTESTS 5
+int one_tests[ONE_NTESTS] = {3, -1, 0, -9, 1};
+
+/*
+ * Test variations (retained original) for two-dimensional dataset:
+ * Varies from {4,10}->{6,12}; {4,10}->{6,9}; {4,10}->{6,10};
+ * {4,10}->{3,12}; {4,10}->{3,9}; {4,10}->{3,10};
+ * {4,10}->{4,12}; {4,10}->{4,9}; {4,10}->{4,10}
+ */
+#define TWO_NTESTS 9
+int two_tests[TWO_NTESTS][2] = { {2,2}, {2,-1}, {2,0},
+ {-1,2}, {-1,-1}, {-1,0},
+ {0,2}, {0,-1}, {0,0} };
+
+
+/* Verify that the two input values are the same */
+#define VERIFY_EQUAL(_x, _y) \
+{ \
+ long __x = (long)_x, __y = (long)_y; \
+ if(__x != __y) TEST_ERROR \
+}
+
+/* Copy srcfile to dstfile */
+#define COPY_FILE(srcfile, dstfile) \
+{ \
+ int src_fd = (-1); /* Descriptor for input file */ \
+ int dst_fd = (-1); /* Descriptor for output file */ \
+ ssize_t nread; /* Number of bytes read in */ \
+ char tmp_buf[READ_BUF_SIZE];/* Temporary buffer */ \
+ \
+ /* Open srcfile */ \
+ if((src_fd = HDopen(srcfile, O_RDONLY, 0666)) < 0) \
+ TEST_ERROR; \
+ \
+ /* Open dstfile */ \
+ if((dst_fd = HDopen(dstfile, O_RDWR|O_CREAT|O_TRUNC, 0666)) < 0) \
+ TEST_ERROR; \
+ \
+ /* Copy data from srcfile to dstfile */ \
+ while((nread = HDread(src_fd, tmp_buf, (size_t)READ_BUF_SIZE)) > 0) \
+ HDwrite(dst_fd, tmp_buf, (size_t)nread); \
+ \
+ /* Close files */ \
+ if(HDclose(src_fd) < 0) TEST_ERROR; \
+ if(HDclose(dst_fd) < 0) TEST_ERROR; \
+}
+
+/* Macros for verifying compound fields */
+/* Verify all fields */
+#define VERIFY_ELMTS_ALL(ent1, ent2) { \
+ VERIFY_EQUAL(ent1.field1, ent2.field1); \
+ VERIFY_EQUAL(ent1.field2.a, ent2.field2.a); \
+ VERIFY_EQUAL(ent1.field2.b.a, ent2.field2.b.a); \
+ VERIFY_EQUAL(ent1.field2.b.b, ent2.field2.b.b); \
+ VERIFY_EQUAL(ent1.field2.b.c, ent2.field2.b.c); \
+ VERIFY_EQUAL(ent1.field2.c, ent2.field2.c); \
+ VERIFY_EQUAL(ent1.field3, ent2.field3); \
+ VERIFY_EQUAL(ent1.field4.a, ent2.field4.a); \
+}
+
+/* Verify fields selected in VALID_FIELDS1 */
+#define VERIFY_ELMTS_VALID1(ent1, ent2) { \
+ VERIFY_EQUAL(ent1.field1, ent2.field1); \
+ VERIFY_EQUAL(ent1.field2_a, ent2.field2.a); \
+ VERIFY_EQUAL(ent1.field3, ent2.field3); \
+ VERIFY_EQUAL(ent1.field4.a, ent2.field4.a); \
+ VERIFY_EQUAL(ent1.field4.b, ent2.field4.b); \
+}
+
+/* Verify fields selected in VALID_FIELDS2 */
+#define VERIFY_ELMTS_VALID2(ent1, ent2) { \
+ VERIFY_EQUAL(ent1.field2_b_a, ent2.field2.b.a); \
+ VERIFY_EQUAL(ent1.field2_c, ent2.field2.c); \
+ VERIFY_EQUAL(ent1.field4_b, ent2.field4.b); \
+}
+
+/* The types of 2-dimensional dataset: DSET_TWO or DSET_CMPD_TWO */
+#define TWO_NONE 0 /* DSET_TWO */
+#define TWO_CMPD_NULL 1 /* DSET_CMPD_TWO with NULL fields */
+#define TWO_CMPD_VALID1 2 /* DSET_CMPD_TWO with VALID_FIELDS1 or VALID_ESC_FIELDS1 */
+#define TWO_CMPD_VALID2 3 /* DSET_CMPD_TWO with VALID_FIELDS2 or VALID_ESC_FIELDS2 */
+
+#define VERIFY_ELMTS(type, _ldbuf, _buf) { \
+ if(type == TWO_NONE) { \
+ int *iibuf = (int *)_ldbuf; \
+ int *ibuf = (int *)_buf; \
+ \
+ VERIFY_EQUAL(iibuf[k], ibuf[ind+n]) \
+ } else if(type == TWO_CMPD_NULL) { \
+ set_t *ccbuf = (set_t *)_ldbuf; \
+ set_t *cbuf = (set_t *)_buf; \
+ \
+ VERIFY_ELMTS_ALL(ccbuf[k], cbuf[ind+n]) \
+ } else if(type == TWO_CMPD_VALID1) { \
+ test_valid_fields1 *vbuf1 = (test_valid_fields1 *)_ldbuf; \
+ set_t *cbuf = (set_t *)_buf; \
+ \
+ VERIFY_ELMTS_VALID1(vbuf1[k], cbuf[ind+n]) \
+ } else if(type == TWO_CMPD_VALID2) { \
+ test_valid_fields2 *vbuf2 = (test_valid_fields2 *)_ldbuf; \
+ set_t *cbuf = (set_t *)_buf; \
+ \
+ VERIFY_ELMTS_VALID2(vbuf2[k], cbuf[ind+n]) \
+ } \
+}
+
+/* Tests for test_LD_elmts_pipe() */
+#define ONE_TESTS 3
+int onetests[ONE_TESTS] = {3, 9, 1};
+#define TWO_TESTS 5
+int twotests[TWO_TESTS][2] = { {2,2}, {2,-1}, {2,0}, {-1,2}, {0,2} };
+
+
+static herr_t test_LD_dims_params(const char *file);
+static herr_t test_LD_dims(const char *file);
+
+static herr_t test_LD_size(const char *file);
+
+static herr_t test_LD_elmts_invalid(const char *file);
+static herr_t test_LD_elmts_one(const char *file, const char *dname, const char *fields);
+static herr_t test_LD_elmts_two(const char *file, const char *dname, const char *fields);
+
+static herr_t verify_elmts_two(int type, hsize_t *ext_dims, hsize_t *prev_dims, void *_ldbuf, void *_buf);
+
+/* data structures for compound data type */
+typedef struct sub22_t {
+ int a;
+ int b;
+ int c;
+} sub22_t;
+
+typedef struct sub2_t {
+ int a;
+ sub22_t b;
+ int c;
+} sub2_t;
+
+typedef struct sub4_t {
+ int a;
+ int b;
+} sub4_t;
+
+typedef struct set_t {
+ int field1;
+ sub2_t field2;
+ double field3;
+ sub4_t field4;
+} set_t;
+
+
+/* NOTE:
+ * This will fail on heiwa and amani when VALID_FIELDS1 is "field1,field3,field4"
+ * because of alignment problems:
+ * amani and heiwa - 8 byte alignment
+ * jam - 4 byte alignemnt
+ * This will need to be fixed in the libarary for H5Tget_native_type().
+ */
+/* VALID_FIELDS1 "field1,field2.a,field3,field4" */
+/* VALID_ESC_FIELDS1 "field\\,1,field2\\..\\.a,field\\\\3,field4\\," */
+typedef struct test_valid_fields1 {
+ int field1;
+ int field2_a;
+ double field3;
+ sub4_t field4;
+} test_valid_fields1;
+
+/* VALID_FIELDS2 "field2.b.a,field2.c,field4.b" */
+/* VALID_ESC_FIELDS2 "field2\\..\\,b.a,field2\\..\\\\c,field4\\,.b\\," */
+typedef struct test_valid_fields2 {
+ int field2_b_a;
+ int field2_c;
+ int field4_b;
+} test_valid_fields2;
+
+/*
+ *********************************************************************************
+ *
+ * Testing for the High Level public routine: H5LDget_dset_dims()
+ * 1) An invalid dataset id
+ * 2) "DSET_ALLOC_EARLY": NULL cur_dims
+ * 3) "DSET_ALLOC_LATE": nonNULL cur_dims
+ * 4) "DSET_CMPD_TWO": nonNULL cur_dims
+ * 5) "DSET_NULL": nonNULL cur_dims
+ * 6) "DSET_SCALAR": nonNULL cur_dims
+ *
+ *********************************************************************************
+ */
+static herr_t
+test_LD_dims_params(const char *file)
+{
+ hid_t fid; /* file identifier */
+ hid_t did; /* dataset identifier */
+ hsize_t one_cur_dims[1]; /* current dimension sizes for 1-dimensonal dataset */
+ hsize_t two_cur_dims[2]; /* current dimension sizes for 2-dimensional dataset */
+ herr_t ret; /* return value */
+
+ const char *filename = H5_get_srcdir_filename(file);
+
+ TESTING("H5LDget_dset_dims");
+
+ /* Make a copy of the test file */
+ COPY_FILE(filename, COPY_FILENAME)
+
+ /* Open the copied file */
+ if((fid = H5Fopen(COPY_FILENAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /*
+ * 1. Verify failure with negative dataset id
+ */
+ H5E_BEGIN_TRY {
+ ret = H5LDget_dset_dims(-1, one_cur_dims);
+ } H5E_END_TRY;
+ VERIFY_EQUAL(ret, FAIL)
+
+ /*
+ * 2. Verify failure for NULL cur_dims
+ */
+ if((did = H5Dopen2(fid, DSET_ALLOC_EARLY, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+ H5E_BEGIN_TRY {
+ ret = H5LDget_dset_dims(did, NULL);
+ } H5E_END_TRY;
+ VERIFY_EQUAL(ret, FAIL)
+ H5Dclose(did);
+
+ /*
+ * 3. Verify for nonNULL cur_dims
+ */
+ if((did = H5Dopen2(fid, DSET_ALLOC_LATE, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+ if(H5LDget_dset_dims(did, one_cur_dims) < 0)
+ FAIL_STACK_ERROR
+ VERIFY_EQUAL(one_cur_dims[0], 10)
+ H5Dclose(did);
+
+ /*
+ * 4. Verify nonNULL cur_dims for a 2-dimensional dataset
+ */
+ if((did = H5Dopen2(fid, DSET_CMPD_TWO, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+ if(H5LDget_dset_dims(did, two_cur_dims) < 0)
+ FAIL_STACK_ERROR
+ VERIFY_EQUAL(two_cur_dims[0], TWO_DIM_1)
+ VERIFY_EQUAL(two_cur_dims[1], TWO_DIM_2)
+ H5Dclose(did);
+
+ /*
+ * 5. Verify nonNULL cur_dims for dataset with H5S_NULL dataspace
+ */
+ one_cur_dims[0] = 0;
+
+ if((did = H5Dopen2(fid, DSET_NULL, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ if(H5LDget_dset_dims(did, one_cur_dims) < 0)
+ FAIL_STACK_ERROR
+ VERIFY_EQUAL(one_cur_dims[0], 0)
+ H5Dclose(did);
+
+ /*
+ * 6. Verify nonNULL cur_dims for dataset with H5S_SCALAR dataspace
+ */
+ one_cur_dims[0] = 0;
+
+ if((did = H5Dopen2(fid, DSET_SCALAR, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ if(H5LDget_dset_dims(did, one_cur_dims) < 0)
+ FAIL_STACK_ERROR
+ VERIFY_EQUAL(one_cur_dims[0], 0)
+ H5Dclose(did);
+
+ /* Close the file */
+ if(H5Fclose(fid) < 0)
+ FAIL_STACK_ERROR
+
+ /* Remove the copied file */
+ HDremove(COPY_FILENAME);
+
+ PASSED();
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Dclose(did);
+ H5Fclose(fid);
+ } H5E_END_TRY;
+ return(-1);
+
+} /* test_LD_dims_params() */
+
+/*
+ *********************************************************************************
+ *
+ * Testing for the High Level public routine: H5LDget_dset_dims()
+ * Verify that the dimension sizes retrieved via H5LDget_dset_dims() are correct
+ * for the following cases:
+ *
+ * DSET_ONE: one-dimensional dataset
+ * 1. Increase dims[0]
+ * 2. Decrease dims[0]
+ * 3. same dims[0]
+ * 4. Decrease dims[0]
+ * 5. Increase dims[0]
+ *
+ * one_tests[ONE_NTESTS] = {3, -1, 0, -9, 1}
+ * Varies from 10->3; 10->9, 10->10, 10->1, 10->11
+ *
+ * DSET_TWO: two-dimensional dataset
+ * 1. Increase dims[0], increase dims[1]
+ * 2. Increase dims[0], decrease dims[1]
+ * 3. Increase dims[0], same dims[1]
+ * 4. Decrease dims[0], increase dims[1]
+ * 5. Decrease dims[0], decrease dims[1]
+ * 6. Decrease dims[0], same dims[1]
+ * 7. same dims[0], increase dims[1]
+ * 8. same dims[0], decrease dims[1]
+ * 9. same dims[0], same dims[1]
+ *
+ * two_tests[TWO_NTESTS][2] = { {2,2}, {2,-1}, {2,0},
+ * {-1,2}, {-1,-1}, {-1,0},
+ * {0,2}, {0,-1}, {0,0} }
+ * Varies from {4,10}->{6,12}; {4,10}->{6,9}; {4,10}->{6,10};
+ * {4,10}->{3,12}; {4,10}->{3,9}; {4,10}->{3,10};
+ * {4,10}->{4,12}; {4,10}->{4,9}; {4,10}->{4,10}
+ *
+ *********************************************************************************
+ */
+static herr_t
+test_LD_dims(const char *file)
+{
+ hid_t fid; /* file identifier */
+ hid_t did; /* dataset identifier */
+ int i; /* local index variable */
+ hsize_t one_prev_dims[1]; /* original dimension sizes for 1-dimensonal dataset */
+ hsize_t one_cur_dims[1]; /* current dimension sizes for 1-dimensonal dataset */
+ hsize_t one_ext_dims[1]; /* extended dimension sizes for 1-dimensonal dataset */
+ hsize_t two_prev_dims[2]; /* original dimension sizes for 2-dimensional dataset */
+ hsize_t two_cur_dims[2]; /* current dimension sizes for 2-dimensional dataset */
+ hsize_t two_ext_dims[2]; /* extended dimension sizes for 2-dimensional dataset*/
+
+ const char *filename = H5_get_srcdir_filename(file);
+
+ TESTING("H5LDget_dset_dims with H5Dset_extent");
+
+ /* Make a copy of the test file */
+ COPY_FILE(filename, COPY_FILENAME)
+
+ /* Open the copied file */
+ if((fid = H5Fopen(COPY_FILENAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /*
+ * Testing with one-dimensional dataset: DSET_ONE
+ */
+ if((did = H5Dopen2(fid, DSET_ONE, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Retrieve dimension sizes */
+ if(H5LDget_dset_dims(did, one_prev_dims) < 0)
+ FAIL_STACK_ERROR
+
+ for(i = 0; i < ONE_NTESTS; i++) {
+
+ /* Set up the extended dimension sizes */
+ one_ext_dims[0] = one_prev_dims[0] + one_tests[i];
+
+ /* Change the dimension size */
+ if(H5Dset_extent(did, one_ext_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Retrieve the dimension size */
+ if(H5LDget_dset_dims(did, one_cur_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Verify that the retrieved dimension size is correct as expected */
+ VERIFY_EQUAL(one_cur_dims[0], one_ext_dims[0])
+ }
+
+ /* Close the dataset */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR
+
+ /*
+ * Testing with two-dimensional dataset: DSET_TWO
+ */
+ if((did = H5Dopen2(fid, DSET_TWO, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Retrieve the dimension sizes */
+ if(H5LDget_dset_dims(did, two_prev_dims) < 0)
+ FAIL_STACK_ERROR
+
+ for(i = 0; i < TWO_NTESTS; i++) {
+
+ /* Set up the extended dimension sizes */
+ two_ext_dims[0] = two_prev_dims[0] + two_tests[i][0];
+ two_ext_dims[1] = two_prev_dims[1] + two_tests[i][1];
+
+ /* Change the dimension sizes */
+ if(H5Dset_extent(did, two_ext_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Retrieve the dimension sizes */
+ if(H5LDget_dset_dims(did, two_cur_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Verify that the retrieved dimension sizes are correct as expected */
+ VERIFY_EQUAL(two_cur_dims[0], two_ext_dims[0])
+ VERIFY_EQUAL(two_cur_dims[1], two_ext_dims[1])
+ } /* end TWO_NTESTS */
+
+ /* Close the dataset */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR
+
+ /* Close the file */
+ if(H5Fclose(fid) < 0)
+ FAIL_STACK_ERROR
+
+ /* Remove the copied file */
+ HDremove(COPY_FILENAME);
+
+ PASSED();
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Dclose(did);
+ H5Fclose(fid);
+ } H5E_END_TRY;
+ return(-1);
+
+} /* test_LD_dims() */
+
+/*
+ **********************************************************************************
+ *
+ * Testing for the High Level public routine: H5LDget_dset_type_size()
+ * Verify that the data type size returned via H5LDget_dset_type_size()
+ * are correct for the following cases:
+ *
+ * Verify failure for an invalid dataset id
+ *
+ * DSET_CMPD: one-dimensional dataset with compound type
+ * 1. The whole element
+ * 2. VALID_FIELDS1: "field1,field2.a,field3,field4"
+ * 3. VALID_FIELDS2: "field2.b.a,field2.c,field4.b"
+ * 4. INVALID_FIELDS1: "field2.k.a,field2.c,field4.k"
+ * 5. INVALID_FIELDS2: "field2.b.a,field2.c,field4.b."
+ * 6. INVALID_FIELDS3: "field2.b.a,,field2.c,field4.b"
+ *
+ * DSET_CMPD_ESC: one-dimensional dataset with compound type and
+ * member names with escape/separator characters
+ * 1. The whole element
+ * 2. VALID_ESC_FIELDS1: "field\\,1,field2\\..\\.a,field\\\\3,field4\\,"
+ * 3. VALID_ESC_FIELDS2: "field2\\..\\,b.a,field2\\..\\\\c,field4\\,.b\\,"
+ * 4. INVALID_ESC_FIELDS1: "field2\\..\\,k.a,field2\\..\\\\c,field4\\,.k\\,"
+ * 5. INVALID_ESC_FIELDS2: "field2\\..\\,b.a,field2\\..\\\\c,field4\\,.b\\,."
+ * 6. INVALID_ESC_FIELDS3: "field2\\..\\,,b.a,field2\\..\\\\c,field4\\,.b\\,"
+ *
+ **********************************************************************************
+ */
+static int
+test_LD_size(const char *file)
+{
+ hid_t fid; /* file identifier */
+ hid_t did; /* dataset identifier */
+ hid_t dtid; /* dataset's datatype identifier */
+ hid_t memb0_tid; /* type identifier for a member in the compound type */
+ hid_t memb1_tid; /* type identifier for a member in the compound type */
+ hid_t memb2_tid; /* type identifier for a member in the compound type */
+ hid_t memb3_tid; /* type identifier for a member in the compound type */
+ hid_t memb_tid; /* type identifier for a member in the compound type */
+ hid_t memb_tid2; /* type identifier for a member in the compound type */
+ size_t dsize; /* size of the dataset's datatype */
+ size_t ck_dsize; /* size of the dataset's datatype to be checked against */
+
+ const char *filename = H5_get_srcdir_filename(file);
+
+ TESTING("H5LDget_dset_type_size");
+
+ /* Open the file */
+ if((fid = H5Fopen(filename, H5F_ACC_RDWR, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /*
+ * Verify failure with an invalid dataset id
+ */
+ H5E_BEGIN_TRY {
+ dsize = H5LDget_dset_type_size(-1, NULL);
+ } H5E_END_TRY;
+ VERIFY_EQUAL(dsize, 0)
+
+ /*
+ * Testing one-dimensional dataset with compound datatype:
+ * DSET_CMPD
+ */
+
+ /* Open dataset DSET_CMPD */
+ if((did = H5Dopen2(fid, DSET_CMPD, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Return size of the whole element */
+ if((dsize = H5LDget_dset_type_size(did, NULL)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Get the dataset's datatype and then its datatype size */
+ if((dtid = H5Tget_native_type(H5Dget_type(did), H5T_DIR_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ if((ck_dsize = H5Tget_size(dtid)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Verify case #1 */
+ VERIFY_EQUAL(dsize, ck_dsize)
+
+ /* Get datatype id for each member */
+ if((memb0_tid = H5Tget_member_type(dtid, 0)) < 0) /* "field1" */
+ FAIL_STACK_ERROR
+ if((memb1_tid = H5Tget_member_type(dtid, 1)) < 0) /* "field2" */
+ FAIL_STACK_ERROR
+ if((memb2_tid = H5Tget_member_type(dtid, 2)) < 0) /* "field3" */
+ FAIL_STACK_ERROR
+ if((memb3_tid = H5Tget_member_type(dtid, 3)) < 0) /* "field4" */
+ FAIL_STACK_ERROR
+
+ /* Obtain size for VALID_FIELDS1: "field1,field2.a,field3,field4" */
+ if((dsize = H5LDget_dset_type_size(did, VALID_FIELDS1)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Get the datatype size for "field1" */
+ if((ck_dsize = H5Tget_size(memb0_tid)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Add the datatype size for "field2.a" */
+ if((memb_tid = H5Tget_member_type(memb1_tid, 0)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize += H5Tget_size(memb_tid)) == 0)
+ FAIL_STACK_ERROR
+ H5Tclose(memb_tid);
+
+ /* Add the datatype size for "field3" */
+ if((ck_dsize += H5Tget_size(memb2_tid)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Add the datatype size for "field4" */
+ if((ck_dsize += H5Tget_size(memb3_tid)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Verify case #2 */
+ VERIFY_EQUAL(dsize, ck_dsize)
+
+ /* Obtain datatype size for VALID_FIELDS2: "field2.b.a,field2.c,field4.b" */
+ if((dsize = H5LDget_dset_type_size(did, VALID_FIELDS2)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Get the datatype size for "field2.b.a" */
+ if((memb_tid = H5Tget_member_type(memb1_tid, 1)) < 0)
+ FAIL_STACK_ERROR
+ if((memb_tid2 = H5Tget_member_type(memb_tid, 0)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize = H5Tget_size(memb_tid2)) == 0)
+ FAIL_STACK_ERROR
+ H5Tclose(memb_tid);
+ H5Tclose(memb_tid2);
+
+ /* Add the datatype size for "field2.c" */
+ if((memb_tid = H5Tget_member_type(memb1_tid, 2)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize += H5Tget_size(memb_tid)) == 0)
+ FAIL_STACK_ERROR
+ H5Tclose(memb_tid);
+
+ /* Add the datatype size for "field4.b" */
+ if((memb_tid = H5Tget_member_type(memb3_tid, 1)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize += H5Tget_size(memb_tid)) == 0)
+ FAIL_STACK_ERROR
+ H5Tclose(memb_tid);
+
+ /* Verify case #3 */
+ VERIFY_EQUAL(dsize, ck_dsize)
+
+ /*
+ * Verify failure for the following invalid nested fields:
+ * INVALID_FIELDS1: "field2.k.a,field2.c,field4.k"
+ * INVALID_FIELDS2: "field2.b.a,field2.c,field4.b."
+ * INVALID_FIELDS3: "field2.b.a,,field2.c,field4.b"
+ */
+ /* Verify failure for case #4 */
+ dsize = H5LDget_dset_type_size(did, INVALID_FIELDS1);
+ VERIFY_EQUAL(dsize, 0)
+
+ /* Verify failure for case #5 */
+ dsize = H5LDget_dset_type_size(did, INVALID_FIELDS2);
+ VERIFY_EQUAL(dsize, 0)
+
+ /* Verify failure for case #6 */
+ dsize = H5LDget_dset_type_size(did, INVALID_FIELDS3);
+ VERIFY_EQUAL(dsize, 0)
+
+ /* Closing */
+ H5Tclose(memb0_tid);
+ H5Tclose(memb1_tid);
+ H5Tclose(memb2_tid);
+ H5Tclose(memb3_tid);
+ H5Tclose(dtid);
+ H5Dclose(did);
+
+ /*
+ * Testing one-dimensional dataset with compound datatype and
+ * member names consisting of escape/separator characters:
+ * DSET_CMPD_ESC
+ */
+
+ /* Open dataset DSET_CMPD_ESC */
+ if((did = H5Dopen2(fid, DSET_CMPD_ESC, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Return size of the whole element */
+ if((dsize = H5LDget_dset_type_size(did, NULL)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Get the dataset's datatype and then its datatype size */
+ if((dtid = H5Tget_native_type(H5Dget_type(did), H5T_DIR_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize = H5Tget_size(dtid)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Verify case #1 */
+ VERIFY_EQUAL(dsize, ck_dsize)
+
+ /* Get datatype id for each member */
+ if((memb0_tid = H5Tget_member_type(dtid, 0)) < 0) /* "field,1" */
+ FAIL_STACK_ERROR
+ if((memb1_tid = H5Tget_member_type(dtid, 1)) < 0) /* "field2." */
+ FAIL_STACK_ERROR
+ if((memb2_tid = H5Tget_member_type(dtid, 2)) < 0) /* "field\3" */
+ FAIL_STACK_ERROR
+ if((memb3_tid = H5Tget_member_type(dtid, 3)) < 0) /* "field4," */
+ FAIL_STACK_ERROR
+
+ /* Obtain size for VALID_ESC_FIELDS1: "field\\,1,field2\\..\\.a,field\\\\3,field4\\," */
+ if((dsize = H5LDget_dset_type_size(did, VALID_ESC_FIELDS1)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Get the datatype size for "field\\,1" */
+ if((ck_dsize = H5Tget_size(memb0_tid)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Add the datatype size for "field2\\..\\.a" */
+ if((memb_tid = H5Tget_member_type(memb1_tid, 0)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize += H5Tget_size(memb_tid)) == 0)
+ FAIL_STACK_ERROR
+ H5Tclose(memb_tid);
+
+ /* Add the datatype size for "field\\\\3" */
+ if((ck_dsize += H5Tget_size(memb2_tid)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Add the datatype size for "field4\\," */
+ if((ck_dsize += H5Tget_size(memb3_tid)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Verify case #2 */
+ VERIFY_EQUAL(dsize, ck_dsize)
+
+ /* Obtain datatype size for VALID_ESC_FIELDS2:
+ "field2\\..\\,b.a,field2\\..\\\\c,field4\\,.b\\," */
+ if((dsize = H5LDget_dset_type_size(did, VALID_ESC_FIELDS2)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Get the datatype size for "field2\..,b.a" */
+ if((memb_tid = H5Tget_member_type(memb1_tid, 1)) < 0)
+ FAIL_STACK_ERROR
+ if((memb_tid2 = H5Tget_member_type(memb_tid, 0)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize = H5Tget_size(memb_tid2)) == 0)
+ FAIL_STACK_ERROR
+ H5Tclose(memb_tid);
+ H5Tclose(memb_tid2);
+
+ /* Add the datatype size for "field2\..\\c" */
+ if((memb_tid = H5Tget_member_type(memb1_tid, 2)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize += H5Tget_size(memb_tid)) == 0)
+ FAIL_STACK_ERROR
+ H5Tclose(memb_tid);
+
+ /* Add the datatype size for "field4\,.b\," */
+ if((memb_tid = H5Tget_member_type(memb3_tid, 1)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize += H5Tget_size(memb_tid)) == 0)
+ FAIL_STACK_ERROR
+ H5Tclose(memb_tid);
+
+ /* Verify case #3 */
+ VERIFY_EQUAL(dsize, ck_dsize)
+
+ /*
+ * Verify failure for the following invalid nested fields:
+ * INVALID_ESC_FIELDS1: "field2\..\,k.a,field2\..\\c,field4\,.k\,"
+ * INVALID_ESC_FIELDS2: "field2\..\,b.a,field2\..\\c,field4\,.b\,."
+ * INVALID_ESC_FIELDS3: "field2\..\,,b.a,field2\..\\c,field4\,.b\,"
+ */
+ /* Verify failure for case #4 */
+ dsize = H5LDget_dset_type_size(did, INVALID_ESC_FIELDS1);
+ VERIFY_EQUAL(dsize, 0)
+
+ /* Verify failure for case #5 */
+ dsize = H5LDget_dset_type_size(did, INVALID_ESC_FIELDS2);
+ VERIFY_EQUAL(dsize, 0)
+
+ /* Verify failure for case #6 */
+ dsize = H5LDget_dset_type_size(did, INVALID_ESC_FIELDS3);
+ VERIFY_EQUAL(dsize, 0)
+
+ /* Closing */
+ H5Tclose(memb0_tid);
+ H5Tclose(memb1_tid);
+ H5Tclose(memb2_tid);
+ H5Tclose(memb3_tid);
+ H5Tclose(dtid);
+ H5Dclose(did);
+
+ H5Fclose(fid);
+
+ /* Remove the copied file */
+ HDremove(COPY_FILENAME);
+
+ PASSED();
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Tclose(memb0_tid);
+ H5Tclose(memb1_tid);
+ H5Tclose(memb2_tid);
+ H5Tclose(memb3_tid);
+ H5Tclose(dtid);
+ H5Dclose(did);
+ H5Fclose(fid);
+ } H5E_END_TRY;
+ return(-1);
+
+} /* test_LD_size() */
+
+/*
+ **************************************************************************************
+ * Testing for the High Level public routine: H5LDget_dset_elmts()
+ * Verify failures when calling H5LDget_dset_elmts() with the following
+ * invalid conditions:
+ *
+ * A. DSET_TWO: two-dimensional dataset
+ * 1. CUR_DIMS and PREV_DIMS are NULL
+ * 2. PREV_DIMS is NULL
+ * 3. CUR_DIMS is NULL
+ * 4. FIELDS is nonnull but the dataset is not compound datatype
+ * 5. BUF is NULL
+ * 6. CUR_DIMS is not greater than PREV_DIMS
+ *
+ * B. DSET_CMPD: one-dimensional dataset with compound type
+ * 1. Invalid dataset id
+ * 2. FIELDS are not valid members in the compound type
+ *
+ **************************************************************************************
+ */
+static int
+test_LD_elmts_invalid(const char *file)
+{
+ hid_t fid; /* file identifier */
+ hid_t did; /* dataset identifier */
+ hid_t sid; /* dataspace identifier */
+ int ret; /* return value */
+ hsize_t cur_dims[2]; /* current dimension sizes of the dataset */
+ hsize_t prev_dims[2]; /* previous dimension sizes of the dataset */
+ char tbuf[2]; /* temporary buffer for testing */
+ int ndims; /* # of dimension sizes */
+ int i; /* local index variable */
+
+ const char *filename = H5_get_srcdir_filename(file);
+
+ TESTING("H5LDget_dset_elmts on invalid conditions");
+
+ /* Copied the test file */
+ COPY_FILE(filename, COPY_FILENAME)
+
+ /* Open the copied file */
+ if((fid = H5Fopen(COPY_FILENAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /*
+ * Testing two-dimensional dataset: DSET_TWO
+ */
+
+ /* Open dataset: DSET_TWO */
+ if((did = H5Dopen2(fid, DSET_TWO, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Verify failure from case #1: cur_dims and prev_dims are NULL */
+ ret = H5LDget_dset_elmts(did, NULL, NULL, NULL, NULL);
+ VERIFY_EQUAL(ret, FAIL)
+
+ /* Verify failure from case #2: prev_dims is NULL */
+ ret = H5LDget_dset_elmts(did, cur_dims, NULL, NULL, NULL);
+ VERIFY_EQUAL(ret, FAIL)
+
+ /* Verify failure from case #3: cur_dims is NULL */
+ ret = H5LDget_dset_elmts(did, NULL, prev_dims, NULL, NULL);
+ VERIFY_EQUAL(ret, FAIL)
+
+ if((sid = H5Dget_space(did)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Get the # of dimensions and current dimension sizes */
+ if((ndims = H5Sget_simple_extent_dims(sid, cur_dims, NULL)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Set up valid cur_dims and prev_dims */
+ for(i = 0; i < ndims; i++)
+ prev_dims[i] = cur_dims[i] - 1;
+
+ /* Verify failure from case #4: FIELDS is nonNULL but the dataset is not compound datatype */
+ ret = H5LDget_dset_elmts(did, prev_dims, cur_dims, "field1", tbuf);
+ VERIFY_EQUAL(ret, FAIL)
+
+ /* Verify failure from case #5: BUF is NULL */
+ ret = H5LDget_dset_elmts(did, prev_dims, cur_dims, NULL, NULL);
+ VERIFY_EQUAL(ret, FAIL)
+
+ /* Verify failure from case #6: cur_dims is not > than prev_dims */
+ cur_dims[0] = prev_dims[0] - 1;
+ cur_dims[1] = prev_dims[1] - 1;
+ ret = H5LDget_dset_elmts(did, prev_dims, cur_dims, NULL, tbuf);
+ VERIFY_EQUAL(ret, FAIL)
+
+ /* Close DSET_TWO */
+ H5Dclose(did);
+
+ /*
+ * Testing one-dimensional dataset with compound datatype:
+ * DSET_CMPD
+ */
+
+ /* Verify failure from case #1: an invalid dataset id */
+ H5E_BEGIN_TRY {
+ ret = H5LDget_dset_elmts(-1, prev_dims, cur_dims, NULL, tbuf);
+ } H5E_END_TRY;
+ VERIFY_EQUAL(ret, FAIL)
+
+ /* Open dataset: DSET_CMPD */
+ if((did = H5Dopen2(fid, DSET_CMPD, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Retrieve the current dimension sizes */
+ if(H5LDget_dset_dims(did, cur_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Set up valid cur_dims, prev_dims */
+ prev_dims[0] = cur_dims[0] - 1;
+
+ /* Verify failure from case #2: invalid FIELDS */
+ ret = H5LDget_dset_elmts(did, prev_dims, cur_dims, "field2.k.a,field2.c,field4.k", tbuf);
+ VERIFY_EQUAL(ret, FAIL)
+
+ /* Close DSET_CMPD */
+ H5Dclose(did);
+
+ /* Close the file */
+ H5Fclose(fid);
+
+ /* Remove the copied file */
+ HDremove(COPY_FILENAME);
+
+ PASSED();
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Dclose(did);
+ H5Fclose(fid);
+ } H5E_END_TRY;
+ return(-1);
+
+} /* test_LD_elmts_invalid() */
+
+
+/*
+ **************************************************************************************
+ * Testing for the High Level public routine: H5LDget_dset_elmts()
+ * Verify elements retrieved via H5LDget_dset_elmts() are correct as expected
+ * when the dataset's dimension sizes are changed according to one_tests[]:
+ *
+ * one-dimensional dataset :
+ * DSET_ONE with NULL fields
+ * DSET_CMPD with fields: NULL, VALID_FIELDS1, VALID_FIELDS2
+ * DSET_CMPD_ESC with fields: NULL, VALID_ESC_FIELDS1, VALID_ESC_FIELDS2
+ *
+ * case #1. increase dims[0]
+ * case #2. decrease dims[0] (failure)
+ * case #3. same dims[0] (failure)
+ * case #4. decrease dims[0] (failure)
+ * case #5. increase dims[0]
+ *
+ **************************************************************************************
+ */
+static herr_t
+test_LD_elmts_one(const char *file, const char *dname, const char *fields)
+{
+ hid_t fid; /* file identifier */
+ hid_t did; /* dataset identifier */
+ hid_t dtype; /* dataset's data type */
+ hsize_t ext_dims[1]; /* extended dimension sizes of the dataset */
+ hsize_t prev_dims[1]; /* previous dimension sizes of the dataset */
+ int i, j; /* local index variable */
+ int iibuf[TEST_BUF_SIZE]; /* buffer for storing retrieved elements */
+ int ibuf[TEST_BUF_SIZE]; /* buffer for storing retrieved elements (integer) */
+ set_t cbuf[TEST_BUF_SIZE]; /* buffer for storing retrieved elemnets (compound) */
+ set_t ccbuf[TEST_BUF_SIZE]; /* buffer for storing retrieved elemnets (compound) */
+ test_valid_fields1 vbuf1[TEST_BUF_SIZE]; /* buffer for storing retrieved elements (FIELDS1) */
+ test_valid_fields2 vbuf2[TEST_BUF_SIZE]; /* buffer for storing retrieved elements (FIELDS2) */
+ int ret = 0; /* return value */
+
+ const char *filename = H5_get_srcdir_filename(file);
+
+ TESTING("H5LDget_dset_elmts: one-dimensional dataset");
+
+ /* Copy the test file */
+ COPY_FILE(filename, COPY_FILENAME)
+
+ for (i = 0; i < TEST_BUF_SIZE; i++) {
+ cbuf[i].field1 = i;
+ cbuf[i].field2.a = i;
+ cbuf[i].field2.b.a = i;
+ cbuf[i].field2.b.b = i;
+ cbuf[i].field2.b.c = i;
+ cbuf[i].field2.c = i;
+ cbuf[i].field3 = (double)i;
+ cbuf[i].field4.a = i;
+ cbuf[i].field4.b = i;
+ ibuf[i] = i;
+ }
+
+ /* Open the copied file */
+ if((fid = H5Fopen(COPY_FILENAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Open the dataset */
+ if((did = H5Dopen2(fid, dname, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Get the dataset's data type */
+ if((dtype = H5Tget_native_type(H5Dget_type(did), H5T_DIR_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Get current dimension sizes before extending the dataset's dimension sizes */
+ if(H5LDget_dset_dims(did, prev_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Loop through different variations of extending the dataset */
+ for(i = 0; i < ONE_NTESTS; i++) {
+ ext_dims[0] = prev_dims[0] + one_tests[i];
+
+ /* Change the dimension sizes of the dataset */
+ if(H5Dset_extent(did, ext_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Initialize data */
+ if(!HDstrcmp(dname, DSET_CMPD) || !HDstrcmp(dname, DSET_CMPD_ESC)) {
+ if(H5Dwrite(did, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, cbuf) < 0)
+ FAIL_STACK_ERROR
+
+ } else if(!HDstrcmp(dname, DSET_ONE)) {
+ if(H5Dwrite(did, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, ibuf) < 0)
+ FAIL_STACK_ERROR
+ }
+
+ /* There are changes in dimension sizes */
+ if(one_tests[i] > 0) {
+
+ if(!HDstrcmp(dname, DSET_CMPD) || !HDstrcmp(dname, DSET_CMPD_ESC)) {
+
+ if(fields && (!HDstrcmp(fields, VALID_FIELDS1) ||
+ !HDstrcmp(fields, VALID_ESC_FIELDS1))) {
+ /* Retrieve the elmemts in BUF */
+ if(H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, vbuf1) < 0)
+ TEST_ERROR
+ for(j = 0; j < one_tests[i]; j++)
+ VERIFY_ELMTS_VALID1(vbuf1[j], cbuf[prev_dims[0]+j])
+
+ } else if(fields && (!HDstrcmp(fields, VALID_FIELDS2) ||
+ !HDstrcmp(fields, VALID_ESC_FIELDS2))) {
+
+ /* Retrieve the elmemts in BUF */
+ if(H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, vbuf2) < 0)
+ TEST_ERROR
+ for(j = 0; j < one_tests[i]; j++)
+ VERIFY_ELMTS_VALID2(vbuf2[j], cbuf[prev_dims[0]+j])
+
+ } else if(fields == NULL) {
+
+ /* Retrieve the elmemts in BUF */
+ if(H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, ccbuf) < 0)
+ TEST_ERROR
+
+ for(j = 0; j < one_tests[i]; j++)
+ VERIFY_ELMTS_ALL(ccbuf[j], cbuf[prev_dims[0]+j])
+ }
+ } else {
+ /* Retrieve the elmemts in BUF */
+ if(H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, iibuf) < 0)
+ TEST_ERROR
+ for(j = 0; j < one_tests[i]; j++)
+ VERIFY_EQUAL(iibuf[j], ibuf[prev_dims[0] + j])
+ }
+ } else {
+ /* Verify failure when changes between prev_dims and ext_dims are same/decrease */
+ ret = H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, iibuf);
+ VERIFY_EQUAL(ret, FAIL)
+ }
+
+ } /* end for */
+
+ /* Closing */
+ H5Tclose(dtype);
+ H5Dclose(did);
+ H5Fclose(fid);
+
+ /* Remove the copied file */
+ HDremove(COPY_FILENAME);
+
+ PASSED();
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Tclose(dtype);
+ H5Dclose(did);
+ H5Fclose(fid);
+ } H5E_END_TRY;
+ return(-1);
+} /* test_LD_elmts_one() */
+
+
+/*
+ **************************************************************************************
+ *
+ * Helper routine to verify elements of a 2-dimensional dataset
+ * _ldbuf contains the elements retrieved via H5LDget_dset_elmts()
+ * _buf contains the data written to the dataset
+ *
+ * e.g. prev_dims[2] = {4, 6}; ext_dims[2] = {6, 10}
+ * elements marked in 'v' in _buf are compared to elements in _ldbuf
+ * 0 1 2 3 4 5 | 6 7 8 9
+ * 0 | v v v v
+ * 1 | v v v v
+ * 2 | v v v v
+ * 3 | v v v v
+ * ---------------------
+ * 4 v v v v v v v v v v
+ * 5 v v v v v v v v v v
+ *
+ **************************************************************************************
+ */
+static herr_t
+verify_elmts_two(int type, hsize_t *ext_dims, hsize_t *prev_dims, void *_ldbuf, void *_buf)
+{
+ int k, m, n, ind; /* Local index variable */
+
+ k = 0;
+ for(m = 0; m < (int)ext_dims[0]; m++) {
+ ind = m * ext_dims[1];
+ if(m < (int)prev_dims[0]) {
+ for(n = (int)prev_dims[1]; n < (int)ext_dims[1]; n++) {
+ VERIFY_ELMTS(type, _ldbuf, _buf)
+ ++k;
+ }
+ } else {
+ for(n = 0; n < (int)ext_dims[1]; n++) {
+ VERIFY_ELMTS(type, _ldbuf, _buf)
+ ++k;
+ }
+ }
+ } /* end for */
+
+ return(0);
+error:
+ return(-1);
+} /* verify_elmts_two() */
+
+/*
+ **************************************************************************************
+ * Testing for the High Level public routine: H5LDget_dset_elmts()
+ * Verify elements retrieved via H5LDget_dset_elmts() are correct as expected when
+ * the datset's dimension sizes are changed accordingly to two_tests[]:
+ *
+ * two-dimensional dataset: DSET_TWO with NULL fields
+ * DSET_CMPD_TWO with fields: NULL, VALID_FIELDS1, VALID_FIELDS2
+ *
+ * dims[0] dims[1]
+ * ------- -------
+ * case #1: increase increase
+ * case #2: increase decrease
+ * case #3: increase same
+ * case #4: decrease increase
+ * case #5: decrease decrease (failure)
+ * case #6: decrease same (failure)
+ * case #7: same increase
+ * case #8: same decrease (failure)
+ * case #9: same same (failure)
+ *
+ **************************************************************************************
+ */
+static herr_t
+test_LD_elmts_two(const char *file, const char *dname, const char *fields)
+{
+ hid_t fid; /* file identifier */
+ hid_t did; /* dataset identifier */
+ hid_t dtype; /* dataset's data type */
+ hsize_t ext_dims[2]; /* extended dimension sizes of the dataset */
+ hsize_t prev_dims[2]; /* previous dimension sizes of the dataset */
+ int i; /* local index variable */
+ int iibuf[TEST_BUF_SIZE]; /* buffer for storing retrieved elements */
+ int ibuf[TEST_BUF_SIZE]; /* buffer for storing retrieved elements (integer) */
+ set_t cbuf[TEST_BUF_SIZE]; /* buffer for storing retrieved elemnets (compound) */
+ set_t ccbuf[TEST_BUF_SIZE]; /* buffer for storing retrieved elemnets (compound) */
+ test_valid_fields1 vbuf1[TEST_BUF_SIZE]; /* buffer for storing retrieved elements (FIELDS1) */
+ test_valid_fields2 vbuf2[TEST_BUF_SIZE]; /* buffer for storing retrieved elements (FIELDS2) */
+ int ret = 0; /* return value */
+
+ const char *filename = H5_get_srcdir_filename(file);
+
+ TESTING("H5LDget_dset_elmts: two-dimensional dataset");
+
+ /* Copy the test file */
+ COPY_FILE(filename, COPY_FILENAME)
+
+ for (i = 0; i < TEST_BUF_SIZE; i++) {
+ cbuf[i].field1 = i;
+ cbuf[i].field2.a = i;
+ cbuf[i].field2.b.a = i;
+ cbuf[i].field2.b.b = i;
+ cbuf[i].field2.b.c = i;
+ cbuf[i].field2.c = i;
+ cbuf[i].field3 = (double)i;
+ cbuf[i].field4.a = i;
+ cbuf[i].field4.b = i;
+ ibuf[i] = i;
+ }
+
+ /* Open the copied file */
+ if((fid = H5Fopen(COPY_FILENAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Open the dataset */
+ if((did = H5Dopen2(fid, dname, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Get the dataset's data type */
+ if((dtype = H5Tget_native_type(H5Dget_type(did), H5T_DIR_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Get current dimension sizes before extending the dataset's dimension sizes */
+ if(H5LDget_dset_dims(did, prev_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Loop through different variations of extending the dataset */
+ for(i = 0; i < TWO_NTESTS; i++) {
+ ext_dims[0] = prev_dims[0] + two_tests[i][0];
+ ext_dims[1] = prev_dims[1] + two_tests[i][1];
+
+ /* Change the dimension sizes of the dataset */
+ if(H5Dset_extent(did, ext_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Initialize data */
+ if(!HDstrcmp(dname, DSET_CMPD_TWO)) {
+ if(H5Dwrite(did, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, cbuf) < 0)
+ FAIL_STACK_ERROR
+
+ } else if(!HDstrcmp(dname, DSET_TWO)) {
+ if(H5Dwrite(did, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, ibuf) < 0)
+ FAIL_STACK_ERROR
+ }
+
+ /* There are changes in dimension sizes */
+ if(two_tests[i][0] > 0 || two_tests[i][1] > 0) {
+
+ if(!HDstrcmp(dname, DSET_CMPD_TWO)) {
+
+ if(fields && (!HDstrcmp(fields, VALID_FIELDS1) ||
+ !HDstrcmp(fields, VALID_ESC_FIELDS1))) {
+
+ HDmemset(vbuf1, 0, sizeof(vbuf1));
+
+ /* Retrieve the elmemts in BUF */
+ if(H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, vbuf1) < 0)
+ TEST_ERROR
+
+ if(verify_elmts_two(TWO_CMPD_VALID1, ext_dims, prev_dims, vbuf1, cbuf) < 0)
+ TEST_ERROR
+
+ } else if(fields && (!HDstrcmp(fields, VALID_FIELDS2) ||
+ !HDstrcmp(fields, VALID_ESC_FIELDS2))) {
+ HDmemset(vbuf2, 0, sizeof(vbuf2));
+
+ /* Retrieve the elmemts in BUF */
+ if(H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, vbuf2) < 0)
+ TEST_ERROR
+
+ if(verify_elmts_two(TWO_CMPD_VALID2, ext_dims, prev_dims, vbuf2, cbuf) < 0)
+ TEST_ERROR
+
+ } else if(fields == NULL) {
+
+ HDmemset(ccbuf, 0, sizeof(ccbuf));
+
+ /* Retrieve the elmemts in BUF */
+ if(H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, ccbuf) < 0)
+ TEST_ERROR
+
+ if(verify_elmts_two(TWO_CMPD_NULL, ext_dims, prev_dims, ccbuf, cbuf) < 0)
+ TEST_ERROR
+ }
+ } else { /* DSET_TWO */
+ HDmemset(iibuf, 0, sizeof(iibuf));
+
+ /* Retrieve the elmemts in BUF */
+ if(H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, iibuf) < 0)
+ TEST_ERROR
+
+ if(verify_elmts_two(TWO_NONE, ext_dims, prev_dims, iibuf, ibuf) < 0)
+ TEST_ERROR
+ }
+ } else {
+ /* Verify failure when changes between prev_dims and ext_dims are same/decrease */
+ ret = H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, iibuf);
+ VERIFY_EQUAL(ret, FAIL)
+ }
+ } /* end for */
+
+ /* Closing */
+ H5Tclose(dtype);
+ H5Dclose(did);
+ H5Fclose(fid);
+
+ /* Remove the copied file */
+ HDremove(COPY_FILENAME);
+
+ PASSED();
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Tclose(dtype);
+ H5Dclose(did);
+ H5Fclose(fid);
+ } H5E_END_TRY;
+ return(-1);
+} /* test_LD_elmts_two() */
+
+/*
+ * Tests for High Level routines:
+ * H5LDget_dset_dims(), H5LDget_dset_elmts, H5LDget_dset_type_size()
+ */
+int main(void)
+{
+ int nerrors = 0;
+
+ /*
+ * Testing H5LDget_dset_dims()
+ */
+ nerrors += test_LD_dims_params(FILE);
+ nerrors += test_LD_dims(FILE);
+
+ /*
+ * Testing H5LDget_dset_type_size()
+ */
+ nerrors += test_LD_size(FILE);
+
+ /*
+ * Testing invalid conditions for H5LDget_dset_elmts()
+ */
+ nerrors += test_LD_elmts_invalid(FILE);
+
+ /*
+ * Testing H5LDget_dset_elmts():
+ * 1-dimensional dataset
+ */
+ nerrors += test_LD_elmts_one(FILE, DSET_ONE, NULL);
+
+ /*
+ * Testing H5LDget_dset_elmts():
+ * 1-dimensional dataset w/ compound datatype
+ */
+ nerrors += test_LD_elmts_one(FILE, DSET_CMPD, NULL);
+ nerrors += test_LD_elmts_one(FILE, DSET_CMPD, VALID_FIELDS1);
+ nerrors += test_LD_elmts_one(FILE, DSET_CMPD, VALID_FIELDS2);
+
+ /*
+ * Testing H5LDget_dset_elmts():
+ * 1-dimensional dataset with compound datatype and
+ * member names with escape/separator characters
+ */
+ nerrors += test_LD_elmts_one(FILE, DSET_CMPD_ESC, NULL);
+ nerrors += test_LD_elmts_one(FILE, DSET_CMPD_ESC, VALID_ESC_FIELDS1);
+ nerrors += test_LD_elmts_one(FILE, DSET_CMPD_ESC, VALID_ESC_FIELDS2);
+
+ /*
+ * Testing H5LDget_dset_elmts() for 2-dimensional datasets
+ */
+ nerrors += test_LD_elmts_two(FILE, DSET_TWO, NULL);
+ nerrors += test_LD_elmts_two(FILE, DSET_CMPD_TWO, NULL);
+ nerrors += test_LD_elmts_two(FILE, DSET_CMPD_TWO, VALID_FIELDS1);
+ nerrors += test_LD_elmts_two(FILE, DSET_CMPD_TWO, VALID_FIELDS2);
+
+ /* check for errors */
+ if(nerrors)
+ goto error;
+
+ puts("All tests for H5LD high level routines passed.");
+
+ return(0);
+
+error:
+ return(1);
+} /* main() */
diff --git a/hl/test/test_ld.h5 b/hl/test/test_ld.h5
new file mode 100644
index 0000000..40c4ff0
--- /dev/null
+++ b/hl/test/test_ld.h5
Binary files differ
diff --git a/hl/test/test_ld.sh.in b/hl/test/test_ld.sh.in
new file mode 100644
index 0000000..69beea6
--- /dev/null
+++ b/hl/test/test_ld.sh.in
@@ -0,0 +1,94 @@
+#! /bin/sh
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+#
+#
+# Concurrent tests for H5LD* routines
+#
+# Determine backward compatibility options eneabled
+DEPRECATED_SYMBOLS="@DEPRECATED_SYMBOLS@"
+
+CMP='cmp -s'
+DIFF='diff -c'
+CP='cp'
+KILL='kill'
+SLEEP='sleep'
+LD_MONITOR=ld_monitor
+LD_MONITOR_BIN=`pwd`/$LD_MONITOR
+LD_EXTEND=ld_extend
+LD_EXTEND_BIN=`pwd`/$LD_EXTEND
+
+nerrors=0
+verbose=yes
+
+# The build (current) directory might be different than the source directory.
+if test -z "$srcdir"; then
+ srcdir=.
+fi
+
+test -d ./testfiles || mkdir ./testfiles
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+#
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
+}
+
+# $1 -- the hdf5 file to be monitored and extended
+# $2 -- the dataset name to be monitored and extended
+# $3 -- the expected output from monitoring the dataset
+TESTLD() {
+ expect="$srcdir/testfiles/$3" # the expected output
+ actual="./testfiles/$3.OUT" # the actual output
+ FNAME="`basename $1 .h5`_$2.h5" # the HDF5 file
+ $CP $srcdir/$1 ./$FNAME # copy the file to a temporary file
+ $LD_MONITOR_BIN $FNAME $2 > $actual 2>&1 & # monitor the dataset in the file
+ MONITOR_PID=$! # get the id of the monitor process
+ $LD_EXTEND_BIN $FNAME $2 # extend the dataset
+ echo "Sleeping for 3 seconds..."
+ $SLEEP 3 # sleep to allow output to be flushed
+ echo "Killing the monitor..."
+ $KILL $MONITOR_PID # kill the monitor process
+ if $CMP $expect $actual; then # compare the output with the expected output
+ echo " PASSED"
+ else
+ echo "*FAILED*"
+ echo " Expected result differs from actual result"
+ nerrors="`expr $nerrors + 1`"
+ test yes = "$verbose" && $DIFF $expect $actual |sed 's/^/ /'
+ fi
+ if test -z "$HDF5_NOCLEANUP"; then # clean up output file, temporary HDF5 file
+ rm -f $actual $FNAME
+ fi
+}
+
+##############################################################################
+##############################################################################
+### T H E T E S T S ###
+##############################################################################
+##############################################################################
+
+# Monitor DSET_ONE while extending the dataset
+TESTLD test_ld.h5 DSET_ONE test_ld_out1
+#
+# Monitor DSET_TWO while extending the dataset
+TESTLD test_ld.h5 DSET_TWO test_ld_out2
+
+if test $nerrors -eq 0 ; then
+ echo "All high level H5LD concurrent tests passed."
+fi
+
+exit $nerrors
diff --git a/hl/test/testfiles/test_ld_out1 b/hl/test/testfiles/test_ld_out1
new file mode 100644
index 0000000..dda3552
--- /dev/null
+++ b/hl/test/testfiles/test_ld_out1
@@ -0,0 +1,31 @@
+Monitoring dataset DSET_ONE...
+
+13
+
+10 11 12 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+
+12
+
+1
+
+3
+
+1 2 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
diff --git a/hl/test/testfiles/test_ld_out2 b/hl/test/testfiles/test_ld_out2
new file mode 100644
index 0000000..7a98c19
--- /dev/null
+++ b/hl/test/testfiles/test_ld_out2
@@ -0,0 +1,72 @@
+Monitoring dataset DSET_TWO...
+
+6 12
+
+10 11 22 23 34 35 46 47 48 49
+50 51 52 53 54 55 56 57 58 59
+60 61 62 63 64 65 66 67 68 69
+70 71 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+
+8 1
+
+6 7 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+
+10 1
+
+8 9 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+
+3 3
+
+1 2 4 5 7 8 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+
+2 2
+
+1 2
+
+1 4
+
+2 3 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+0 0 0 0 0 0 0 0 0 0
+
+1 3