summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--doc/html/Intro/Dependencies0
-rw-r--r--tools/h4toh5/Dependencies0
-rw-r--r--tools/h4toh5/Makefile.in60
-rw-r--r--tools/h4toh5/h4toh5anno.c665
-rw-r--r--tools/h4toh5/h4toh5image.c861
-rw-r--r--tools/h4toh5/h4toh5main.c1629
-rw-r--r--tools/h4toh5/h4toh5main.h109
-rw-r--r--tools/h4toh5/h4toh5sds.c1096
-rw-r--r--tools/h4toh5/h4toh5util.c1633
-rw-r--r--tools/h4toh5/h4toh5vdata.c831
-rw-r--r--tools/h4toh5/h4toh5vgroup.c812
-rwxr-xr-xtools/h4toh5/testh4toh5.sh249
-rw-r--r--tools/h5dump/Dependencies0
-rw-r--r--tools/h5dump/Makefile.in65
-rw-r--r--tools/h5dump/h5dump.h22
-rw-r--r--tools/h5dump/h5dumptst.c2577
-rwxr-xr-xtools/h5dump/testh5dump.sh197
-rw-r--r--tools/h5ls/Dependencies0
-rw-r--r--tools/h5ls/Makefile.in55
-rwxr-xr-xtools/h5ls/testh5ls.sh143
-rw-r--r--tools/h5toh4/Dependencies0
-rw-r--r--tools/h5toh4/Makefile.in60
-rw-r--r--tools/h5toh4/h5toh4.h43
-rwxr-xr-xtools/h5toh4/testh5toh4.sh299
-rw-r--r--tools/lib/Dependencies0
-rw-r--r--tools/misc/Dependencies0
-rw-r--r--tools/misc/Makefile.in71
-rw-r--r--tools/misc/h5debug.c185
-rw-r--r--tools/misc/h5import.c141
-rw-r--r--tools/misc/h5repart.c418
-rw-r--r--tools/misc/pdb2hdf.c503
-rw-r--r--tools/testfiles/Expected/anno_test.h5bin0 -> 4096 bytes
-rw-r--r--tools/testfiles/Expected/gr_typ_test.h5bin0 -> 5296 bytes
-rw-r--r--tools/testfiles/Expected/grnameclash_test.h5bin0 -> 11192 bytes
-rw-r--r--tools/testfiles/Expected/image_attr_test.h5bin0 -> 5164 bytes
-rw-r--r--tools/testfiles/Expected/ras_24_test.h5bin0 -> 4396 bytes
-rw-r--r--tools/testfiles/Expected/ras_8_test.h5bin0 -> 4196 bytes
-rw-r--r--tools/testfiles/Expected/sds_attr.h5bin0 -> 14048 bytes
-rw-r--r--tools/testfiles/Expected/sds_attr_test.h5bin0 -> 11872 bytes
-rw-r--r--tools/testfiles/Expected/sds_dim_test.h5bin0 -> 14032 bytes
-rw-r--r--tools/testfiles/Expected/sds_typ_test.h5bin0 -> 10624 bytes
-rw-r--r--tools/testfiles/Expected/sdsnameclash_test.h5bin0 -> 11840 bytes
-rw-r--r--tools/testfiles/Expected/vdata_test.h5bin0 -> 4306 bytes
-rw-r--r--tools/testfiles/Expected/vdnameclash_test.h5bin0 -> 10660 bytes
-rw-r--r--tools/testfiles/Expected/vg_all_test.h5bin0 -> 24674 bytes
-rw-r--r--tools/testfiles/Expected/vg_hl_test.h5bin0 -> 6400 bytes
-rw-r--r--tools/testfiles/Expected/vg_loop_test.h5bin0 -> 6400 bytes
-rw-r--r--tools/testfiles/Expected/vgnameclash_test.h5bin0 -> 6344 bytes
-rw-r--r--tools/testfiles/anno_test.hdfbin0 -> 448 bytes
-rw-r--r--tools/testfiles/gr_typ_test.hdfbin0 -> 2052 bytes
-rw-r--r--tools/testfiles/grnameclash_test.hdfbin0 -> 3981 bytes
-rw-r--r--tools/testfiles/image_attr_test.hdfbin0 -> 1761 bytes
-rw-r--r--tools/testfiles/ras_24_test.hdfbin0 -> 795 bytes
-rw-r--r--tools/testfiles/ras_8_test.hdfbin0 -> 593 bytes
-rw-r--r--tools/testfiles/sds_attr_test.hdfbin0 -> 9758 bytes
-rw-r--r--tools/testfiles/sds_dim_test.hdfbin0 -> 9086 bytes
-rw-r--r--tools/testfiles/sds_typ_test.hdfbin0 -> 9171 bytes
-rw-r--r--tools/testfiles/sdsnameclash_test.hdfbin0 -> 5426 bytes
-rw-r--r--tools/testfiles/tall.h5.xml173
-rw-r--r--tools/testfiles/tarray1.h5.xml31
-rw-r--r--tools/testfiles/tarray2.h5.xml80
-rw-r--r--tools/testfiles/tarray3.h5.xml120
-rw-r--r--tools/testfiles/tarray6.h5.xml38
-rw-r--r--tools/testfiles/tarray7.h5.xml43
-rw-r--r--tools/testfiles/tattr.h5.xml91
-rw-r--r--tools/testfiles/tbitfields.h5.xml48
-rw-r--r--tools/testfiles/tcompound.h5.xml280
-rw-r--r--tools/testfiles/tcompound2.h5.xml196
-rw-r--r--tools/testfiles/tdatareg.h5.xml54
-rw-r--r--tools/testfiles/tdset.h5.xml133
-rw-r--r--tools/testfiles/tdset2.h5.xml95
-rw-r--r--tools/testfiles/tempty.h5.xml125
-rw-r--r--tools/testfiles/tenum.h5.xml59
-rw-r--r--tools/testfiles/tgroup.h5.xml35
-rw-r--r--tools/testfiles/thlink.h5.xml39
-rw-r--r--tools/testfiles/tloop.h5.xml19
-rw-r--r--tools/testfiles/tloop2.h5.xml17
-rw-r--r--tools/testfiles/tmany.h5.xml338
-rw-r--r--tools/testfiles/tname-amp.h5bin0 -> 2880 bytes
-rw-r--r--tools/testfiles/tname-amp.h5.xml55
-rw-r--r--tools/testfiles/tname-apos.h5bin0 -> 2880 bytes
-rw-r--r--tools/testfiles/tname-apos.h5.xml55
-rw-r--r--tools/testfiles/tname-gt.h5bin0 -> 2880 bytes
-rw-r--r--tools/testfiles/tname-gt.h5.xml55
-rw-r--r--tools/testfiles/tname-lt.h5bin0 -> 2880 bytes
-rw-r--r--tools/testfiles/tname-lt.h5.xml55
-rw-r--r--tools/testfiles/tname-quot.h5bin0 -> 2880 bytes
-rw-r--r--tools/testfiles/tname-quot.h5.xml55
-rw-r--r--tools/testfiles/tname-sp.h5bin0 -> 2880 bytes
-rw-r--r--tools/testfiles/tname-sp.h5.xml55
-rw-r--r--tools/testfiles/tnestedcomp.h5.xml76
-rw-r--r--tools/testfiles/tnodata.h5bin0 -> 1412 bytes
-rw-r--r--tools/testfiles/tnodata.h5.xml26
-rw-r--r--tools/testfiles/tobjref.h5.xml92
-rw-r--r--tools/testfiles/topaque.h5.xml27
-rw-r--r--tools/testfiles/tref-escapes-at.h5bin0 -> 5849 bytes
-rw-r--r--tools/testfiles/tref-escapes-at.h5.xml282
-rw-r--r--tools/testfiles/tref-escapes.h5bin0 -> 5536 bytes
-rw-r--r--tools/testfiles/tref-escapes.h5.xml157
-rw-r--r--tools/testfiles/tref.h5bin0 -> 3004 bytes
-rw-r--r--tools/testfiles/tref.h5.xml66
-rw-r--r--tools/testfiles/tsaf.h5.xml2602
-rw-r--r--tools/testfiles/tslink.h5.xml11
-rw-r--r--tools/testfiles/tstr.h5.xml359
-rw-r--r--tools/testfiles/tstr2.h5.xml196
-rw-r--r--tools/testfiles/tstring-at.h5bin0 -> 1672 bytes
-rw-r--r--tools/testfiles/tstring-at.h5.xml55
-rw-r--r--tools/testfiles/tstring.h5bin0 -> 2160 bytes
-rw-r--r--tools/testfiles/tstring.h5.xml55
-rw-r--r--tools/testfiles/tvldtypes1.h5.xml73
-rw-r--r--tools/testfiles/tvldtypes2.h5.xml36
-rw-r--r--tools/testfiles/tvldtypes3.h5.xml51
-rw-r--r--tools/testfiles/vdata_test.hdfbin0 -> 802 bytes
-rw-r--r--tools/testfiles/vdnameclash_test.hdfbin0 -> 1536 bytes
-rw-r--r--tools/testfiles/vg_all_test.hdfbin0 -> 14231 bytes
-rw-r--r--tools/testfiles/vg_hl_test.hdfbin0 -> 1243 bytes
-rw-r--r--tools/testfiles/vg_loop_test.hdfbin0 -> 1249 bytes
-rw-r--r--tools/testfiles/vgnameclash_test.hdfbin0 -> 893 bytes
118 files changed, 19232 insertions, 0 deletions
diff --git a/doc/html/Intro/Dependencies b/doc/html/Intro/Dependencies
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/doc/html/Intro/Dependencies
diff --git a/tools/h4toh5/Dependencies b/tools/h4toh5/Dependencies
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tools/h4toh5/Dependencies
diff --git a/tools/h4toh5/Makefile.in b/tools/h4toh5/Makefile.in
new file mode 100644
index 0000000..71019e2
--- /dev/null
+++ b/tools/h4toh5/Makefile.in
@@ -0,0 +1,60 @@
+## HDF5 Library Makefile(.in)
+##
+## Copyright (C) 2001 National Center for Supercomputing Applications.
+## All rights reserved.
+##
+##
+top_srcdir=@top_srcdir@
+top_builddir=../..
+srcdir=@srcdir@
+SUBDIRS=
+@COMMENCE@
+
+## Add include directory to the C preprocessor flags, add -lh5tools and
+## -lhdf5 to the list of libraries.
+##
+CPPFLAGS=-I. -I$(srcdir) -I$(top_builddir)/src -I$(top_srcdir)/src \
+ -I$(top_srcdir)/tools/lib @CPPFLAGS@
+
+## Test programs and scripts.
+##
+TEST_PROGS=
+TEST_SCRIPTS=@TESTH4TOH5@
+
+## These are our main targets: library and tools.
+##
+LIBTOOLS=../lib/libh5tools.la
+LIBHDF5=$(top_builddir)/src/libhdf5.la
+
+PUB_PROGS=@H4TOH5@
+PROGS=$(PUB_PROGS) $(TEST_PROGS) @H4TOH5TEST@
+
+## Source and object files for h4toh5 converter.
+##
+PROG_SRC=h4toh5main.c h4toh5vgroup.c h4toh5vdata.c h4toh5sds.c \
+ h4toh5image.c h4toh5pal.c h4toh5anno.c h4toh5util.c
+PROG_OBJ=$(PROG_SRC:.c=.lo)
+
+PRIVATE_HDR=h4toh5main.h h4toh5util.h
+
+## Source and object files for the tests
+##
+TEST_SRC=h4toh5test.c
+TEST_OBJ=$(TEST_SRC:.c=.lo)
+
+## Programs have to be built before they can be tested!
+##
+check test _test: $(PROGS)
+
+## How to build the programs... They all depend on the hdf5 library and
+## the tools library compiled in this directory.
+##
+$(PROGS): $(LIBTOOLS) $(LIBHDF5)
+
+h4toh5test: h4toh5test.lo
+ @$(LT_LINK_EXE) $(CFLAGS) -o $@ h4toh5test.lo $(LIBTOOLS) $(LIBHDF5) $(LDFLAGS) $(LIBS)
+
+h4toh5: $(PROG_OBJ)
+ @$(LT_LINK_EXE) $(CFLAGS) -o $@ $(PROG_OBJ) $(LIBTOOLS) $(LIBHDF5) $(LDFLAGS) $(LIBS)
+
+@CONCLUDE@
diff --git a/tools/h4toh5/h4toh5anno.c b/tools/h4toh5/h4toh5anno.c
new file mode 100644
index 0000000..cd09e26
--- /dev/null
+++ b/tools/h4toh5/h4toh5anno.c
@@ -0,0 +1,665 @@
+/*-------------------------------------------------------------------------
+ *
+ * Copyright (C) 2000 National Center for Supercomputing Applications.
+ * All rights reserved.
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/******************************************************************************
+
+ Description:
+
+1. converter
+
+See HDF4 to HDF5 mapping specification at
+(http://hdf.ncsa.uiuc.edu/HDF5/papers/h4toh5) for the default mapping
+from HDF4 object to HDF5 object.
+
+The whole converter includes 10 files, h4toh5util.h, h4toh5main.h, h4toh5util.c, h4toh5main.c, h4toh5sds.c, h4toh5image.c,h4toh5vdata.c,h4toh5vgroup.c,h4toh5pal.c and h4toh5anno.c.
+
+2. this file
+
+converting an hdf4 annotation into an hdf5 attribute of the corresponding object.
+
+
+Author: Kent Yang(ymuqun@ncsa.uiuc.edu)
+
+
+*****************************************************************************/
+
+#include "h4toh5main.h"
+
+
+/*-------------------------------------------------------------------------
+ * Function: Annofil_h4_to_h5
+ *
+ * Purpose: translate file annotation object into hdf5 dataset
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ file_id: file identifier
+ h5_group: hdf5 group id
+
+ *-------------------------------------------------------------------------
+ */
+
+int Annofil_h4_to_h5(int32 file_id,hid_t h5group){
+
+ int32 an_id;
+ int32 ann_id;
+ int32 i;
+
+ int32 ann_length;
+
+ int32 n_file_label = 0;
+ int32 n_file_desc = 0;
+ int32 n_data_label = 0;
+ int32 n_data_desc = 0;
+
+ int32 istat;
+
+ char* ann_buf;
+ char anno_labelname[30];
+ char anno_descname[30];
+ char index_str[5];
+
+ hid_t h5_sid;
+ hid_t h5_aid;
+ hid_t sh5str_type;
+ hid_t sh5str1_type;
+ hid_t ret;
+
+ an_id = ANstart(file_id);
+
+ if(an_id < 0) {
+ printf("error in obtaining an_id. \n");
+ return FAIL;
+ }
+
+ istat = ANfileinfo(an_id,&n_file_label,&n_file_desc,
+ &n_data_label,&n_data_desc);
+
+ if(istat == FAIL) {
+ printf("error getting file information.\n");
+ ANend(file_id);
+ return FAIL;
+ }
+
+ for (i = 0; i < n_file_label; i++) {
+
+ ann_id = ANselect(an_id,i,AN_FILE_LABEL);
+ if(ann_id == FAIL) {
+ printf("error in obtaining annotation id. \n");
+ ANend(file_id);
+ return FAIL;
+ }
+
+ ann_length = ANannlen(ann_id);
+ if(ann_length == FAIL) {
+ printf("error in obtaining annotation length. \n");
+ ANend(file_id);
+ ANendaccess(ann_id);
+ return FAIL;
+ }
+
+ ann_buf = malloc((size_t)ann_length + 1);
+ if(ann_buf == NULL) {
+ printf("error in allocating memory. \n");
+ return FAIL;
+ }
+ h4toh5_ZeroMemory(ann_buf,(ann_length+1)*sizeof(char));
+ istat = ANreadann(ann_id,ann_buf,ann_length+1);
+
+ if(istat==FAIL) {
+ printf("fail to read file information. \n");
+ ANend(file_id);
+ ANendaccess(ann_id);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ h5_sid = H5Screate(H5S_SCALAR);
+
+ if (h5_sid < 0) {
+ printf("failed to create attribute space for");
+ printf(" HDF4 FILE ANNOTATION. \n");
+ ANend(file_id);
+ ANendaccess(ann_id);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ if ((sh5str_type = mkstr(ann_length+1,H5T_STR_SPACEPAD))<0) {
+ printf("error in making string at FILE lABEL ANNO. \n");
+ ANend(file_id);
+ ANendaccess(ann_id);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ if(conv_int_str(i,index_str)== FAIL) {
+ printf("fail to convert integer into character format.\n");
+ ANend(file_id);
+ ANendaccess(ann_id);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ strcpy(anno_labelname,HDF4_FILE_LABEL);
+ strcat(anno_labelname,"_");
+ strcat(anno_labelname,index_str);
+
+
+ h5_aid = H5Acreate(h5group,anno_labelname,sh5str_type,
+ h5_sid,H5P_DEFAULT);
+
+ if (h5_aid <0) {
+ printf("failed to obtain attribute id for");
+ printf(" File annotation. \n");
+ ANend(file_id);
+ ANendaccess(ann_id);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ ret = H5Awrite(h5_aid,sh5str_type,(void *)ann_buf);
+
+ if (ret <0) {
+ printf("failed to obtain attribute.\n ");
+ ANend(file_id);
+ ANendaccess(ann_id);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ ret = H5Sclose(h5_sid);
+ ret = H5Aclose(h5_aid);
+ free(ann_buf);
+ ANendaccess(ann_id);
+
+ }
+
+ for (i = 0; i < n_file_desc; i++) {
+
+ ann_id = ANselect(an_id,i,AN_FILE_DESC);
+ if(ann_id == FAIL) {
+ printf("error in obtaining annotation id. \n");
+ ANend(an_id);
+ return FAIL;
+ }
+
+ ann_length = ANannlen(ann_id);
+
+ if(ann_length == FAIL) {
+ printf("error in obtaining annotation length. \n");
+ ANend(an_id);
+ ANendaccess(ann_id);
+ return FAIL;
+ }
+
+ ann_buf = malloc((size_t)ann_length+1);
+ if(ann_buf == NULL) {
+ printf("error in allocating memory. \n");
+ ANend(an_id);
+ ANendaccess(ann_id);
+ return FAIL;
+ }
+ h4toh5_ZeroMemory(ann_buf,ann_length+1);
+
+ istat = ANreadann(ann_id,ann_buf,ann_length+1);
+
+ if(istat == FAIL) {
+ printf("error reading file information. \n");
+ ANend(an_id);
+ ANendaccess(ann_id);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ if ((sh5str1_type = mkstr(ann_length+1,H5T_STR_SPACEPAD))<0) {
+ printf("error in making string at FILE DESC. \n");
+ ANend(an_id);
+ ANendaccess(ann_id);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ if(conv_int_str(i,index_str)==FAIL) {
+ printf("fail to convert integer into character format.\n");
+ ANend(an_id);
+ ANendaccess(ann_id);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ strcpy(anno_descname,HDF4_FILE_DESC);
+ strcat(anno_descname,"_");
+ strcat(anno_descname,index_str);
+
+ h5_sid = H5Screate(H5S_SCALAR);
+
+ if (h5_sid < 0) {
+ printf("failed to create attribute space for");
+ printf(" HDF4 FILE ANNOTATION. \n");
+ ANend(an_id);
+ ANendaccess(ann_id);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ h5_aid = H5Acreate(h5group,anno_descname,sh5str1_type,
+ h5_sid,H5P_DEFAULT);
+
+ if (h5_aid <0) {
+
+ printf("failed to obtain attribute id for");
+ printf(" File annotation. \n");
+ ANend(an_id);
+ ANendaccess(ann_id);
+ H5Sclose(h5_sid);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ ret = H5Awrite(h5_aid,sh5str1_type,(void *)ann_buf);
+
+ if (ret <0) {
+ printf("failed to obtain attribute.\n ");
+ ANend(an_id);
+ ANendaccess(ann_id);
+ H5Sclose(h5_sid);
+ H5Aclose(h5_aid);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ ret = H5Sclose(h5_sid);
+ ret = H5Aclose(h5_aid);
+ free(ann_buf);
+ ANendaccess(ann_id);
+ }
+ ANend(an_id);
+ return SUCCEED;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: Annoobj_h4_to_h5
+ *
+ * Purpose: translate annotation object into attribute of hdf5 dataset
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ file_id: file identifier
+ obj_ref: object reference
+ obj_tag: object tag
+ h5group: hdf5 group
+
+ *-------------------------------------------------------------------------
+ */
+int Annoobj_h4_to_h5(int32 file_id,int32 obj_ref, int32 obj_tag,
+ hid_t h5group){
+
+ int32 an_id;
+ int32 ann_id;
+ int32 i;
+ int32 status;
+ int32 ann_length;
+
+ int32 n_file_label =-1;
+ int32 n_file_desc =-1;
+ int32 n_data_label =-1;
+ int32 n_data_desc =-1;
+
+ int num_lab_anno;
+ int num_des_anno;
+ int32 istat;
+ int32* des_anno_list=NULL;
+ int32* lab_anno_list=NULL;
+
+ char* ann_buf;
+ char* ann_obj_name;
+ char ann_labelname[30];
+
+ char index_str[5];
+
+ hid_t h5_sid;
+ hid_t h5_aid;
+ hid_t sh5str_type;
+ hid_t ret;
+
+ an_id = ANstart(file_id);
+ if(an_id == FAIL) {
+ printf("fail to start annotation interface.\n");
+ return FAIL;
+ }
+
+ istat = ANfileinfo(an_id,&n_file_label,&n_file_desc,
+ &n_data_label,&n_data_desc);
+
+ if(istat == FAIL ) {
+ printf("error getting file information.\n");
+ ANend(an_id);
+ return FAIL;
+ }
+
+ num_lab_anno = ANnumann(an_id,AN_DATA_LABEL,obj_tag,obj_ref);
+ num_des_anno = ANnumann(an_id,AN_DATA_DESC,obj_tag,obj_ref);
+
+ if (num_lab_anno == FAIL) {
+ printf("error getting number of annotation data label.\n");
+ ANend(an_id);
+ return FAIL;
+ }
+
+ if (num_des_anno == FAIL) {
+ printf("error getting number of annotation object label.\n");
+ ANend(an_id);
+ return FAIL;
+ }
+
+ if(num_lab_anno > 0) {
+
+ for(i=0; i<num_lab_anno;i++) {
+ ann_id = ANselect(an_id,i,AN_DATA_LABEL);
+
+ if(ann_id == FAIL) {
+ printf("error in obtaining annotation id.\n");
+ ANend(an_id);
+ return FAIL;
+ }
+
+ ann_length = ANannlen(ann_id);
+ if(ann_length == FAIL) {
+ printf("error in getting annotation length. \n");
+ ANendaccess(ann_id);
+ ANend(an_id);
+ return FAIL;
+ }
+
+ ann_buf = malloc((size_t)ann_length+1);
+ if(ann_buf == NULL) {
+ printf("error in allocating annotation memory.\n");
+ ANendaccess(ann_id);
+ ANend(an_id);
+ return FAIL;
+ }
+ h4toh5_ZeroMemory(ann_buf,(ann_length+1)*sizeof(char));
+ status = ANreadann(ann_id,ann_buf,ann_length+1);
+ if(status == FAIL) {
+ printf("error in reading data.\n");
+ ANendaccess(ann_id);
+ ANend(an_id);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ status = ANendaccess(ann_id);
+
+ h5_sid = H5Screate(H5S_SCALAR);
+
+ if (h5_sid < 0) {
+ printf("failed to create attribute space for");
+ printf(" HDF4 FILE ANNOTATION. \n");
+ ANend(an_id);
+ free(lab_anno_list);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ if ((sh5str_type = mkstr(ann_length+1,H5T_STR_SPACEPAD))<0) {
+ printf("error in making string at OBJ LABEL. \n");
+ ANend(an_id);
+ free(lab_anno_list);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ if(conv_int_str(i,index_str)== FAIL) {
+ printf("fail to convert annotation index into character format.\n");
+ ANend(an_id);
+ free(lab_anno_list);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ /* obtain annotation object name. The name is defined based on object tag
+ */
+ ann_obj_name = trans_tag_name(obj_tag,AN_DATA_LABEL);
+
+ if(ann_obj_name != NULL)
+ strcpy(ann_labelname,ann_obj_name);
+
+ strcat(ann_labelname,"_");
+ strcat(ann_labelname,index_str);
+
+ h5_aid = H5Acreate(h5group,ann_labelname,sh5str_type,
+ h5_sid,H5P_DEFAULT);
+
+ if (h5_aid <0) {
+
+ printf("failed to obtain attribute id for");
+ printf(" file annotation. \n");
+ ANend(an_id);
+ free(lab_anno_list);
+ free(ann_buf);
+ free(ann_obj_name);
+ return FAIL;
+ }
+
+ ret = H5Awrite(h5_aid,sh5str_type,(void *)ann_buf);
+
+ if (ret <0) {
+ printf("failed to obtain attribute.\n ");
+ ANend(an_id);
+ free(lab_anno_list);
+ free(ann_buf);
+ free(ann_obj_name);
+ return FAIL;
+ }
+
+ ret = H5Sclose(h5_sid);
+ ret = H5Aclose(h5_aid);
+ free(ann_obj_name);
+ free(ann_buf);
+ }
+ }
+
+ if(num_des_anno > 0) {
+
+
+ for (i = 0; i< num_des_anno;i++) {
+
+ ann_id = ANselect(an_id,i,AN_DATA_DESC);
+ if(ann_id == FAIL) {
+ printf("error in obtaining annotation id.\n");
+ ANend(an_id);
+ return FAIL;
+ }
+ ann_length = ANannlen(ann_id);
+ if(ann_length == FAIL) {
+ printf("error in getting annotation length. \n");
+ ANendaccess(ann_id);
+ ANend(an_id);
+ return FAIL;
+ }
+
+ ann_buf = malloc((size_t)ann_length+1);
+
+ if(ann_buf == NULL) {
+ printf("error in allocating annotation memory.\n");
+ ANendaccess(ann_id);
+ ANend(an_id);
+ return FAIL;
+ }
+
+ h4toh5_ZeroMemory(ann_buf,(ann_length+1)*sizeof(char));
+ ANreadann(ann_id,ann_buf,ann_length+1);
+
+ if ((sh5str_type = mkstr(ann_length+1,H5T_STR_SPACEPAD))<0) {
+ printf("error in making string at OBJECT DESC. \n");
+ ANend(an_id);
+ free(des_anno_list);
+ free(ann_buf);
+ return FAIL;
+ }
+
+ if(conv_int_str(i,index_str)== FAIL) {
+ printf("fail to convert annotation index into character format.\n");
+ ANend(an_id);
+ free(ann_buf);
+ free(des_anno_list);
+ return FAIL;
+ }
+ ann_obj_name = trans_tag_name(obj_tag,AN_DATA_DESC);
+ if(ann_obj_name == NULL) {
+ printf("error in obtaining tag name. \n");
+ ANend(an_id);
+ free(ann_buf);
+ free(des_anno_list);
+ return FAIL;
+ }
+
+ strcpy(ann_labelname,ann_obj_name);
+ strcat(ann_labelname,"_");
+ strcat(ann_labelname,index_str);
+
+ h5_sid = H5Screate(H5S_SCALAR);
+
+ if (h5_sid < 0) {
+ printf("failed to create attribute space for");
+ printf(" HDF4 OBJECT ANNOTATION. \n");
+ ANend(an_id);
+ free(des_anno_list);
+ free(ann_buf);
+ free(ann_obj_name);
+ return FAIL;
+ }
+
+ h5_aid = H5Acreate(h5group,ann_labelname,sh5str_type,
+ h5_sid,H5P_DEFAULT);
+
+ if (h5_aid <0) {
+
+ ANend(an_id);
+ free(ann_buf);
+ free(des_anno_list);
+ free(ann_obj_name);
+ printf("failed to obtain attribute id for ");
+ printf("File annotation. \n");
+ return FAIL;
+ }
+
+ ret = H5Awrite(h5_aid,sh5str_type,(void *)ann_buf);
+
+ if (ret <0) {
+ printf("failed to obtain attribute.\n ");
+ ANend(an_id);
+ free(ann_buf);
+ free(des_anno_list);
+ free(ann_obj_name);
+ return FAIL;
+ }
+ ret = H5Sclose(h5_sid);
+ ret = H5Aclose(h5_aid);
+ free(ann_obj_name);
+
+ free(ann_buf);
+ }
+
+ }
+ ANend(an_id);
+ return SUCCEED;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: trans_tag_name
+ *
+ * Purpose: in annotation routine,
+ translate annotation object tag into corresponding HDF5 object
+ name.
+
+ *
+ * Return: NULL if failed, HDF5 object name if successful.
+ *
+ * In :
+ obj_tag: hdf4 object tag
+ annot_type: hdf4 annotation type
+
+ *-------------------------------------------------------------------------
+ */
+char* trans_tag_name(int32 obj_tag,ann_type annot_type){
+
+ char* obj_name;
+
+ obj_name = malloc(strlen(HDF4_VGROUP_LABEL)+1);
+
+ if(obj_name == NULL) {
+ printf("error in obtaining tag name. \n");
+ return NULL;
+ }
+
+ if (obj_tag == DFTAG_NDG || obj_tag == DFTAG_SDG || obj_tag == DFTAG_SD) {
+
+ if(annot_type == AN_DATA_LABEL)
+ strcpy(obj_name,HDF4_SDS_LABEL);
+
+ else if(annot_type == AN_DATA_DESC)
+ strcpy(obj_name,HDF4_SDS_DESC);
+ else
+ return NULL;
+ }
+
+ else if(obj_tag == DFTAG_RIG || obj_tag == DFTAG_RI || obj_tag == DFTAG_RI8)
+ {
+ if(annot_type == AN_DATA_LABEL)
+ strcpy(obj_name,HDF4_IMAGE_LABEL);
+ else if(annot_type == AN_DATA_DESC)
+ strcpy(obj_name,HDF4_IMAGE_DESC);
+ else
+ return NULL;
+ }
+
+ else if(obj_tag == DFTAG_VG) {
+ if(annot_type == AN_DATA_LABEL)
+ strcpy(obj_name,HDF4_VGROUP_LABEL);
+ else if(annot_type == AN_DATA_DESC)
+ strcpy(obj_name,HDF4_VGROUP_DESC);
+ else
+ return NULL;
+ }
+
+ else if(obj_tag == DFTAG_VS || obj_tag == DFTAG_VH) {
+ if(annot_type == AN_DATA_LABEL)
+ strcpy(obj_name,HDF4_VDATA_LABEL);
+ else if(annot_type == AN_DATA_DESC)
+ strcpy(obj_name,HDF4_VDATA_DESC);
+ else
+ return NULL;
+ }
+
+ else if(obj_tag == DFTAG_LUT) {
+ if(annot_type == AN_DATA_LABEL)
+ strcpy(obj_name,HDF4_PAL_LABEL);
+ else if(annot_type == AN_DATA_DESC)
+ strcpy(obj_name,HDF4_PAL_DESC);
+ else
+ return NULL;
+ }
+ return obj_name;
+}
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/h4toh5/h4toh5image.c b/tools/h4toh5/h4toh5image.c
new file mode 100644
index 0000000..8fbbdc0
--- /dev/null
+++ b/tools/h4toh5/h4toh5image.c
@@ -0,0 +1,861 @@
+/*-------------------------------------------------------------------------
+ *
+ * Copyright (C) 2000 National Center for Supercomputing Applications.
+ * All rights reserved.
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/******************************************************************************
+
+ Description:
+
+1. converter
+
+See HDF4 to HDF5 mapping specification at
+(http://hdf.ncsa.uiuc.edu/HDF5/papers/h4toh5) for the default mapping
+from HDF4 object to HDF5 object.
+
+The whole converter includes 10 files, h4toh5util.h, h4toh5main.h, h4toh5util.c, h4toh5main.c, h4toh5sds.c, h4toh5image.c,h4toh5vdata.c,h4toh5vgroup.c,h4toh5pal.c and h4toh5anno.c.
+
+2. this file
+
+converting an hdf4 image object into an hdf5 dataset, for three component image, this object will be converted into an hdf5 dataset with compound data type.
+
+Author: Kent Yang(ymuqun@ncsa.uiuc.edu)
+
+
+*****************************************************************************/
+
+#include "h4toh5main.h"
+
+/*-------------------------------------------------------------------------
+ * Function: Image_h4_to_h5
+ *
+ * Purpose: translate Image object into hdf5 dataset
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ ri_id: RI identifier
+ h5_group: hdf5 group id
+ h5_palgroup: hdf5 palette group id
+
+ *-------------------------------------------------------------------------
+ */
+
+int Image_h4_to_h5(int32 file_id,int32 ri_id,hid_t h5_group,hid_t h5_palgroup) {
+
+ int32 istat;
+ int32 ngrattrs;
+ int32 ncomp;
+ int check_gloattr;
+ int32 start[2];
+ int32 edges[2];
+ int32 dimsizes[2];
+ uint16 gr_ref;
+ int32 image_dtype;
+
+ int check_imagename;
+ int i;
+ char image_name[MAX_GR_NAME];
+ char grlabel[MAX_GR_NAME];
+ char image_class[MAX_GR_NAME];
+ char* h5cimage_name;
+ void* image_data;
+ HDF_CHUNK_DEF c_def_out;
+ int32 chunk_dims[2];
+ int32 c_flags;
+
+ /* define varibles for hdf5. */
+
+ hid_t h5ty_id;
+ hid_t h5memtype;
+
+ hid_t h5_ctype;
+ hid_t h5_cmemtype;
+
+ hid_t h5d_sid;
+ hid_t h5dset;
+
+ size_t h4size;
+ size_t h4memsize;
+ hsize_t fielddim[1];
+ hsize_t h5dims[2];
+ herr_t ret;
+ hid_t create_plist;
+
+ /* zeroing out memory.*/
+
+ h4toh5_ZeroMemory(image_name,MAX_GR_NAME);
+ h4toh5_ZeroMemory(image_class,MAX_GR_NAME);
+ h4toh5_ZeroMemory(grlabel,MAX_GR_NAME);
+
+ /* Obtain information of the image.*/
+
+ if(GRgetchunkinfo(ri_id,&c_def_out,&c_flags)==FAIL){
+ printf("error in getting chunking information. \n");
+ return FAIL;
+ }
+
+ istat = GRgetiminfo(ri_id, image_name, &ncomp, &image_dtype,
+ NULL, dimsizes, &ngrattrs);
+
+ if(istat == FAIL) {
+ printf("Cannot obtain GR info. at Image routine.\n");
+ return FAIL;
+ }
+
+ /* data type transferring from hdf4 to hdf5. */
+ if(h4type_to_h5type(image_dtype,&h5memtype,&h4memsize,
+ &h4size,&h5ty_id)== FAIL) {
+ printf("failed to translate image datatype. \n");
+ return FAIL;
+ }
+
+ /* check whether the datatype is string. */
+ if (h5ty_id == H5T_STRING) {
+ /* rechange string datatype into numerical datatype.*/
+
+ if(h5string_to_int(image_dtype,&h5memtype,h4memsize,
+ &h5ty_id)== FAIL) {
+ printf("error in translating H5T_STRING to int.\n");
+ return FAIL;
+ }
+ }
+
+ start[0] = 0;
+ start[1] = 0;
+ edges[0] = dimsizes[0];
+ edges[1] = dimsizes[1];
+
+ image_data = malloc(h4memsize*dimsizes[0]*dimsizes[1]*ncomp);
+
+ if(image_data == NULL) {
+ printf("error in allocating memory for image data. \n");
+ return FAIL;
+ }
+
+ istat = GRreadimage(ri_id, start, NULL, edges, (VOIDP)image_data);
+
+ if (istat == FAIL) {
+ printf("error in reading images.\n");
+ free(image_data);
+ return FAIL;
+ }
+
+ /* change the order of image dimension:
+ due to the difference of hdf4 image specification and
+ hdf5 image specification. */
+
+
+
+ h5dims[0] = edges[1]-start[1];
+ h5dims[1] = edges[0]-start[0];
+
+ gr_ref = GRidtoref(ri_id);
+ if(gr_ref == 0) {
+ printf("error in obtaining gr reference number. \n");
+ free(image_data);
+ return FAIL;
+ }
+
+ /* obtaining absolute path of image name.*/
+
+ check_imagename = -10;
+ h5cimage_name = get_name(gr_ref,2*num_images,gr_hashtab,&check_imagename);
+
+ if (h5cimage_name == NULL && check_imagename == 0 ) {
+ printf("error,cannot find image name.\n");
+ free(image_data);
+ return FAIL;
+ }
+
+ if (h5cimage_name == NULL && check_imagename == -1) {
+ printf("error,image name is not defined.\n");
+ free(image_data);
+ return FAIL;
+ }
+
+ if (h5cimage_name == NULL && check_imagename == -2) {
+ printf("error,not enough memory for get_name. \n");
+ free(image_data);
+ return FAIL;
+ }
+
+ /**** check number of component of the image object,
+ and transfer HDF4 object into HDF5 object. ****/
+
+ if (ncomp <= 0) {
+ printf("error in obtaining image component\n");
+ free(image_data);
+ free(h5cimage_name);
+ return FAIL;
+ }
+
+ /* create property list. */
+
+ create_plist = H5Pcreate(H5P_DATASET_CREATE);
+
+ if(c_flags == HDF_CHUNK || c_flags == (HDF_CHUNK | HDF_COMP)
+ || c_flags == (HDF_CHUNK | HDF_NBIT) ){
+
+ chunk_dims[0] = c_def_out.chunk_lengths[0];
+ chunk_dims[1] = c_def_out.chunk_lengths[1];
+
+ if(H5Pset_chunk(create_plist, 2, (hsize_t *)chunk_dims)<0) {
+ printf("failed to set up chunking information for ");
+ printf("property list.\n");
+ free(image_data);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+ }
+ if (ncomp == 1) {
+
+ h5d_sid = H5Screate_simple(2,h5dims,NULL);
+
+ if(h5d_sid <0) {
+ printf("error in creating space for dataset. \n");
+ free(image_data);
+ free(h5cimage_name);
+ return FAIL;
+ }
+
+ h5dset = H5Dcreate(h5_group,h5cimage_name,h5ty_id,h5d_sid,create_plist);
+
+ if(h5dset < 0) {
+ printf("error in creating hdf5 dataset converted from images. \n");
+ free(image_data);
+ free(h5cimage_name);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ if (H5Dwrite(h5dset,h5memtype,h5d_sid,h5d_sid,H5P_DEFAULT,
+ image_data)<0) {
+ printf("error writing data for hdf5 dataset converted from images.\n");
+ free(image_data);
+ free(h5cimage_name);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ }
+
+ else { /* compound datatype. */
+
+ h5_ctype = H5Tcreate(H5T_COMPOUND,ncomp*h4size);
+ if (h5_ctype < 0) {
+ printf("error in generating hdf5 compound data type. \n");
+ free(image_data);
+ free(h5cimage_name);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ h5_cmemtype = H5Tcreate(H5T_COMPOUND,ncomp*h4memsize);
+ if (h5_cmemtype < 0) {
+ printf("error in generating hdf5 memory compound data type. \n");
+ free(image_data);
+ free(h5cimage_name);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+
+ fielddim[0] = ncomp;
+
+ {
+ hid_t arr_type; /* Array datatype for inserting fields */
+
+ /* Create array datatype */
+ if((arr_type=H5Tarray_create(h5ty_id,1,fielddim,NULL))<0) {
+ printf("error creating array datatype.\n");
+ free(image_data);
+ free(h5cimage_name);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ ret = H5Tinsert(h5_ctype,"HDF4Image_data",0,arr_type);
+ if(ret < 0) {
+ printf("error in inserting array of compound datatype. \n");
+ free(image_data);
+ free(h5cimage_name);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ /* Close array datatype */
+ if(H5Tclose(arr_type)<0) {
+ printf("error closing array datatype.\n");
+ free(image_data);
+ free(h5cimage_name);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ /* Create array datatype */
+ if((arr_type=H5Tarray_create(h5memtype,1,fielddim,NULL))<0) {
+ printf("error creating array datatype.\n");
+ free(image_data);
+ free(h5cimage_name);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ ret = H5Tinsert(h5_cmemtype,"HDF4Image_data",0,arr_type);
+ if(ret < 0) {
+ printf("error in inserting array of compound datatype at memory. \n");
+ free(image_data);
+ free(h5cimage_name);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ /* Close array datatype */
+ if(H5Tclose(arr_type)<0) {
+ printf("error closing array datatype.\n");
+ free(image_data);
+ free(h5cimage_name);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+ }
+
+ h5d_sid = H5Screate_simple(2,h5dims,NULL);
+ if(h5d_sid < 0) {
+ printf("error in creating space. \n");
+ free(image_data);
+ free(h5cimage_name);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ h5dset = H5Dcreate(h5_group,h5cimage_name,h5_ctype,h5d_sid,
+ create_plist);
+ if(h5dset < 0) {
+ printf("error in creating dataset. \n");
+ free(image_data);
+ free(h5cimage_name);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ if (H5Dwrite(h5dset,h5_cmemtype,h5d_sid,h5d_sid,H5P_DEFAULT,
+ (void *)image_data)<0) {
+ printf("error writing data\n");
+ free(image_data);
+ free(h5cimage_name);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+ ret = H5Tclose(h5_ctype);
+ if(ret < 0) {
+ printf("error in closing h5_ctype. \n");
+ }
+ ret = H5Tclose(h5_cmemtype);
+ if(ret <0) {
+ printf("error in closing h5_cmemtype. \n");
+ }
+ }
+
+/* convert image annotation into attribute of image dataset.
+ Since there is no routines to find the exact tag of image object,
+ we will check three possible object tags of image objects, that is:
+ DFTAG_RIG,DFTAG_RI,DFTAG_RI8. If the object tag of image object is
+ falling out of this scope, we will not convert annotations into
+ hdf5 attributes; it is user's responsibility to make sure object tags
+ for image objects are only one of the above three tags.*/
+
+ if(Annoobj_h4_to_h5(file_id,gr_ref,DFTAG_RIG,h5dset)== FAIL){
+ printf("failed to convert image annotation into hdf5 attribute.\n");
+ free(image_data);
+ free(h5cimage_name);
+ H5Pclose(create_plist);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ return FAIL;
+ }
+
+ if(Annoobj_h4_to_h5(file_id,gr_ref,DFTAG_RI,h5dset)== FAIL){
+ printf("failed to convert image annotation into hdf5 attribute.\n");
+ free(h5cimage_name);
+ free(image_data);
+ H5Pclose(create_plist);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ return FAIL;
+ }
+
+ if(Annoobj_h4_to_h5(file_id,gr_ref,DFTAG_RI8,h5dset)== FAIL){
+ printf("failed to convert image annotation into hdf5 attribute.\n");
+ free(h5cimage_name);
+ free(image_data);
+ H5Pclose(create_plist);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ return FAIL;
+ }
+
+
+ /************************************/
+ /* translate GR attributes into HDF5 dataset attribute.*/
+
+ check_gloattr = 0;
+ if(gr_tranattrs(ri_id,h5dset,ngrattrs,check_gloattr)==FAIL){
+ printf(" cannot obtain attributes. \n");
+ free(image_data);
+ H5Pclose(create_plist);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ return FAIL;
+ }
+
+ /* deal with h5dset predefined and user-defined attributes.
+ Obtain the name and data type and the total number of attributes.
+ Data attribute at hdf4 is only one-dimensional array. */
+
+ if (ncomp == 1 && h4size == 1)
+ strcpy(grlabel,RAST8LABEL);
+ else if(ncomp == 3 && h4size == 1)
+ strcpy(grlabel,RAST24LABEL);
+ else
+ strcpy(grlabel,GRLABEL);
+
+ strcpy(image_class,IM_CLASS);
+
+ /* transfer hdf4 predefined attributes into hdf5 dataset.*/
+ if(h4_transpredattrs(h5dset,HDF4_OBJECT_TYPE,grlabel)==FAIL){
+ printf("error in getting hdf4 image type attribute \n");
+ H5Pclose(create_plist);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ free(h5cimage_name);
+ free(image_data);
+ return FAIL;
+ }
+
+ if(h4_transpredattrs(h5dset,HDF4_OBJECT_NAME,image_name)==FAIL){
+ printf("error in getting hdf4 image name attribute. \n");
+ H5Pclose(create_plist);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ free(h5cimage_name);
+ free(image_data);
+ return FAIL;
+ }
+
+ if(h4_transpredattrs(h5dset,HDF4_IMAGE_CLASS,image_class)==FAIL){
+ printf("error in getting hdf4 image class attribute. \n");
+ H5Pclose(create_plist);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ free(h5cimage_name);
+ free(image_data);
+ return FAIL;
+ }
+
+ gr_ref = GRidtoref(ri_id);
+
+ if(gr_ref == 0) {
+ printf("error in obtaining reference number of GR.\n");
+ H5Pclose(create_plist);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ free(h5cimage_name);
+ free(image_data);
+ return FAIL;
+ }
+
+ if(h4_transnumattr(h5dset,HDF4_REF_NUM,gr_ref)==FAIL) {
+ printf("error in getting hdf4 image number attribute.\n");
+ H5Pclose(create_plist);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ free(h5cimage_name);
+ free(image_data);
+ return FAIL;
+ }
+
+ /* deal with palette. */
+
+ if(gr_palette(file_id,ri_id,h5dset,h5_palgroup)== FAIL) {
+ printf("error in translating palette into h5 dataset.\n");
+ H5Pclose(create_plist);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ free(h5cimage_name);
+ free(image_data);
+ return FAIL;
+ }
+
+ ret = H5Pclose(create_plist);
+ ret = H5Sclose(h5d_sid);
+ ret = H5Dclose(h5dset);
+ istat = GRendaccess(ri_id);
+ free(image_data);
+ free(h5cimage_name);
+ return SUCCEED;
+}
+
+/**** palette routine. ****/
+/*-------------------------------------------------------------------------
+ * Function: gr_palette
+ *
+ * Purpose: translate palette into hdf5 dataset
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ file_id: HDF4 identifier
+ ri: raster image id
+ h5dset: hdf5 dataset
+ h5_palgroup: hdf5 palette group
+
+ Out:
+ *-------------------------------------------------------------------------
+ */
+
+int gr_palette(int32 file_id,int32 ri_id,hid_t h5dset,hid_t h5_palgroup) {
+
+ int32 pal_id;
+ uint16 pal_ref;
+ char palref_str[MAXREF_LENGTH];
+ char palg_name[MAX_GR_NAME];
+ char image_index[MAX_GR_NAME];
+ int check_pal;
+ int check_palname;
+ int pal_stat;
+ char* h5pal_name=NULL;
+
+
+ /* get palette id */
+ pal_id = GRgetlutid(ri_id,0);
+ if(pal_id == FAIL) {
+ printf("error in obtaining palette id. \n");
+ return FAIL;
+ }
+
+ pal_ref = GRluttoref(pal_id);
+
+ if(pal_ref >0) {
+
+ /* convert reference number into string format. */
+ if(conv_int_str(pal_ref,palref_str)==FAIL) {
+ printf("error in converting palette reference number into string.\n");
+ return FAIL;
+ }
+
+ /* check whether this palette has been looked up already. */
+ check_pal = lookup(pal_ref,PAL_HASHSIZE,pal_hashtab);
+
+ if( check_pal < 0) {
+ printf("error at looking up palette table. \n");
+ return FAIL;
+ }
+
+ /* if check_pal equals to 1, this palette has already been
+ converted into hdf5 dataset, just obtain the palette name.
+ if check_pal equals to 0, we will do the converting. */
+
+ if(check_pal == 1) {
+
+ h5pal_name = get_name(pal_ref,PAL_HASHSIZE,pal_hashtab,
+ &check_palname);
+
+ if (h5pal_name == NULL && check_palname == 0 ) {
+ printf("error,cannot find group\n");
+ return FAIL;
+ }
+
+ if (h5pal_name == NULL && check_palname == -1 ) {
+ printf("error,group name is not defined.\n");
+ return FAIL;
+ }
+
+ }
+
+ if(check_pal == 0) {
+ /* do converting. */
+ strcpy(palg_name,HDF4_PALG);
+
+ /* obtain hdf5 dataset name converted from palette,
+ no name for hdf4 palette.*/
+ h5pal_name = get_obj_aboname(NULL,palref_str,palg_name,HDF4_PALETTE);
+ if(h5pal_name == NULL) {
+ printf("error in getting hdf5 palette name.\n");
+ return FAIL;
+ }
+
+ if(set_name(pal_ref,PAL_HASHSIZE,pal_hashtab,h5pal_name)==FAIL) {
+ printf("error in setting object name.\n");
+ free(h5pal_name);
+ return FAIL;
+ }
+
+ pal_stat = Palette_h4_to_h5(file_id,pal_id,h5_palgroup,h5pal_name);
+
+ if(pal_stat == FAIL) {
+ printf("error occurring in transferring palette into dataset. \n");
+ free(h5pal_name);
+ return FAIL;
+ }
+
+ }
+
+ if(create_pal_objref(h5dset,h5_palgroup,h5pal_name)== FAIL) {
+ printf("error in creating palette object reference.\n");
+ free(h5pal_name);
+ return FAIL;
+ }
+
+ if(h5pal_name != NULL) free(h5pal_name);
+
+ strcpy(image_index,HDF4_IMAGE_INDEXED);
+ if(h4_transpredattrs(h5dset,HDF4_IMAGE_SUBCLASS,image_index)== FAIL) {
+ printf("failed to transfer hdf4 image indexed.\n");
+ return FAIL;
+ }
+ }
+ return SUCCEED;
+}
+/***** end of palette application. *****/
+/*-------------------------------------------------------------------------
+ * Function: gr_tranattrs
+ *
+ * Purpose: translate attributes of Image object into hdf5 dataset
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ sri_id: RI identifier
+ sh5_dset: hdf5 dataset
+ snum_grattrs: number of attribute
+ check_gloflag: flag to check whether this attribute belongs
+ to gr interface.
+
+ Out:
+ *-------------------------------------------------------------------------
+ */
+int gr_tranattrs(int32 sri_id, hid_t sh5_dset,int snum_grattrs,
+ int check_gloflag) {
+
+ char sgratrr_name[2*MAX_NC_NAME];
+ char grglo[MAX_NC_NAME];
+ char* grrepattr_name;
+ int32 count_sgradata;
+ int32 sgr_atype;
+ size_t sh4_amemsize;
+ size_t sh4_asize;
+
+ hid_t sh5a_sid;
+ hid_t sh5a_id;
+ hid_t sh5_atype;
+ hid_t sh5_amemtype;
+ hid_t sh5str_type;
+ hid_t sh5str_memtype;
+ hsize_t sh5dims[MAX_VAR_DIMS];
+ void* sgr_adata;
+ herr_t sret;
+ int i;
+
+
+ for (i =0;i <snum_grattrs;i++) {
+
+ if (GRattrinfo(sri_id,i,sgratrr_name,&sgr_atype,&count_sgradata)==FAIL){
+ printf("unable to obtain attribute information. \n");
+ return FAIL;
+ }
+
+ /*convert datatype for attribute. */
+
+ if(h4type_to_h5type(sgr_atype,&sh5_amemtype,&sh4_amemsize,
+ &sh4_asize,&sh5_atype)==FAIL){
+ printf("unable to do type transferring.\n");
+ return FAIL;
+ }
+
+ sgr_adata = malloc(sh4_amemsize*count_sgradata);
+
+ if(GRgetattr(sri_id,i,(VOIDP)sgr_adata)==FAIL){
+ printf("unable to get GR attributes. \n");
+ return FAIL;
+ }
+
+ /* if attribute doesn't have name, a default name is set. */
+ if(sgratrr_name[0] == '\0') {
+ grrepattr_name = trans_obj_name(DFTAG_RIG,i);
+ strcpy(sgratrr_name,grrepattr_name);
+ free(grrepattr_name);
+ }
+
+ /* if the sds attribute is a file attribute. */
+ if(check_gloflag == 1){
+ strcpy(grglo,GLOIMAGE);
+ strcat(sgratrr_name,"_");
+ strcat(sgratrr_name,grglo);
+ }
+ /* now do attribute-transferring.
+ 1. deal with string data type
+ 2. set attribute space.
+ 3. get attribute name, set property list. */
+
+ if (sh5_atype == H5T_STRING) {
+
+ sh5a_sid = H5Screate(H5S_SCALAR);
+
+ if (sh5a_sid < 0) {
+ printf("failed to create attribute space for IMAGE. \n");
+ return FAIL;
+ }
+
+ if ((sh5str_type = mkstr(count_sgradata*sh4_asize,H5T_STR_SPACEPAD))<0){
+ printf("error in making string for image attribute \n");
+ return FAIL;
+ }
+
+ /* check this line later. */
+ if ((sh5str_memtype = mkstr(count_sgradata*sh4_amemsize,
+ H5T_STR_SPACEPAD))<0){
+ printf("error in making memory string. \n");
+ return FAIL;
+ }
+
+ sh5a_id = H5Acreate(sh5_dset,sgratrr_name,sh5str_type,sh5a_sid,
+ H5P_DEFAULT);
+ if (sh5a_id <0) {
+ printf("failed to obtain attribute id for IMAGE. \n");
+ return FAIL;
+ }
+
+ sret = H5Awrite(sh5a_id,sh5str_memtype,(void *)sgr_adata);
+
+ if (sret <0) {
+ printf("failed to obtain attribute of IMAGE.\n ");
+ return FAIL;
+ }
+
+ sret = H5Sclose(sh5a_sid);
+ sret = H5Aclose(sh5a_id);
+ }
+
+ else {
+
+ if (count_sgradata == 1) {
+
+ sh5a_sid = H5Screate(H5S_SCALAR);
+
+ if (sh5a_sid < 0) {
+ printf("failed to create space id. \n");
+ return FAIL;
+ }
+ }
+ else {
+
+ sh5dims[0] = count_sgradata;
+ sh5a_sid = H5Screate_simple(1,sh5dims,NULL);
+
+ if (sh5a_sid < 0) {
+ printf("failed to create attribute space. \n");
+ return FAIL;
+ }
+ }
+
+ sh5a_id = H5Acreate(sh5_dset,sgratrr_name,sh5_atype,sh5a_sid,
+ H5P_DEFAULT);
+
+ if(sh5a_id <0) {
+ printf("failed to obtain attribute id. \n");
+ return FAIL;
+ }
+
+ sret = H5Awrite(sh5a_id,sh5_amemtype,(void *)sgr_adata);
+
+ if(sret <0) {
+ printf("failed to obtain attribute.\n ");
+ return FAIL;
+ }
+ sret = H5Aclose(sh5a_id);
+ sret = H5Sclose(sh5a_sid);
+
+ }
+
+ free(sgr_adata);
+
+ }
+
+ return SUCCEED;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: create_pal_objref
+ *
+ * Purpose: create object reference for palette
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ h5dset: hdf5 dataset
+ h5_palgroup: hdf5 palette group
+ h5pal_name: hdf5 palette name
+
+ Out:
+ *-------------------------------------------------------------------------
+ */
+
+int create_pal_objref(hid_t h5dset,hid_t h5_palgroup,char *h5pal_name){
+
+ hobj_ref_t pal_refdat;
+ hsize_t pal_refDims[1];
+ hid_t pal_refSpace;
+ hid_t pal_refType;
+ hid_t attribID;
+ herr_t ret;
+
+ pal_refDims[0] = 1;
+ pal_refSpace = H5Screate_simple(1,pal_refDims,NULL);
+
+ if(pal_refSpace < 0) {
+ printf("error in obtaining reference space. \n");
+ return FAIL;
+ }
+
+ pal_refType = H5Tcopy(H5T_STD_REF_OBJ);
+ if(pal_refType < 0) {
+ printf("error in obtaining reference type. \n");
+ H5Sclose(pal_refSpace);
+ return FAIL;
+ }
+
+ ret = H5Rcreate(&pal_refdat,h5_palgroup,h5pal_name,
+ H5R_OBJECT,-1);
+ if(ret < 0) {
+ printf("error in creating reference space. \n");
+ H5Sclose(pal_refSpace);
+ H5Tclose(pal_refType);
+ return FAIL;
+ }
+
+ attribID = H5Acreate(h5dset,PALETTE,pal_refType,pal_refSpace,
+ H5P_DEFAULT);
+
+ if(attribID < 0) {
+ printf("error in obtaining attribute ID. \n");
+ H5Sclose(pal_refSpace);
+ H5Tclose(pal_refType);
+ return FAIL;
+ }
+
+ ret = H5Awrite(attribID,pal_refType,(void *)&pal_refdat);
+
+
+ H5Sclose(pal_refSpace);
+ if(H5Tclose(pal_refType)<0) {
+ printf("error closing palette reference type.\n");
+ H5Aclose(attribID);
+ }
+ H5Aclose(attribID);
+ return SUCCEED;
+}
diff --git a/tools/h4toh5/h4toh5main.c b/tools/h4toh5/h4toh5main.c
new file mode 100644
index 0000000..e9ef567
--- /dev/null
+++ b/tools/h4toh5/h4toh5main.c
@@ -0,0 +1,1629 @@
+/*-------------------------------------------------------------------------
+ *
+ * Copyright (C) 2000 National Center for Supercomputing Applications.
+ * All rights reserved.
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/******************************************************************************
+
+ Description:
+
+1. converter
+
+See HDF4 to HDF5 mapping specification at
+(http://hdf.ncsa.uiuc.edu/HDF5/papers/h4toh5) for the default mapping
+from HDF4 object to HDF5 object.
+
+The whole converter includes 10 files, h4toh5util.h, h4toh5main.h, h4toh5util.c, h4toh5main.c, h4toh5sds.c, h4toh5image.c,h4toh5vdata.c,h4toh5vgroup.c,h4toh5pal.c and h4toh5anno.c.
+
+2. this file
+
+This file describes the main driver of hdf to hdf5 converter. It checks
+the inputting parameters, initializes the global tables, sets up the root level
+hdf5 structure and also check the special case fof vgroup loops at HDF file.
+
+
+Author: Kent Yang(ymuqun@ncsa.uiuc.edu)
+
+
+*****************************************************************************/
+
+
+#include "h4toh5main.h"
+
+int32 estnum_vg;
+int32 estnum_vd;
+int32 num_sds;
+int32 num_images;
+int num_objects;
+int32 num_glsdsattrs;
+int32 num_glgrattrs;
+struct table* sds_hashtab;
+struct table* gr_hashtab;
+struct table* vg_hashtab;
+struct table* vd_hashtab;
+struct table* pal_hashtab;
+struct name_table* name_hashtab;
+struct name_table* dim_hashtab;
+
+/*-------------------------------------------------------------------------
+ * Function: main
+ *
+ * Purpose: driver routine to handle all objects of hdf4 file.
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+
+ Modfication:
+ *-------------------------------------------------------------------------
+ */
+
+
+
+int main(int argc, char ** argv) {
+
+ char *h5_filename=NULL;
+ char *h4_filename=NULL;
+ char *h5_extension;
+ int status = 0;
+
+ argc--;
+ argv++;
+
+ if (argc == 0) {
+ fprintf(stderr,"\nError: Invalid Arguments\n");
+ PrintOptions_h4toh5();
+ return FAIL;
+ }
+
+ /* take care -h (help) option first */
+ { int i;
+ for (i=0; i < argc; i++)
+ if ( HDstrcmp(argv[i],"-h") == 0 ) {
+ PrintOptions_h4toh5();
+ return SUCCEED;
+ }
+ }
+
+
+ switch(argc) {
+
+ case 0:
+
+ PrintOptions_h4toh5();
+ break;
+
+ case 1: /* h4toh5 file1 */
+ h4_filename = argv[0];
+#ifndef WIN32
+ if (test_file(h4_filename,O_EXCL,292) != 0 ) {
+ /* 292 Decimal - 0444 Octal, a+r */
+ printf("the current hdf4 file name is not set properly.\n");
+ status = -1;
+ break;
+ }
+ if (test_dir(h4_filename) != 0 ) {
+ fprintf(stderr,"%s: Is a directory\n",h4_filename);
+ status = -1;
+ break;
+ }
+#endif
+ /*0. check whether this file is an hdf file. */
+
+ if(!Hishdf(h4_filename)){
+ printf("error: not an hdf file. \n");
+ printf("the file will not be converted. \n");
+ status = -1;
+ break;
+ }
+ h5_extension = HDstrdup("h5");
+ h5_filename = BuildFilename(h4_filename,h5_extension);
+ if (h5_filename == NULL) {
+ printf("error in creating hdf5 file name.\n");
+ status = -1;
+ break;
+ }
+#ifndef WIN32
+ if (test_file(h5_filename,O_CREAT|O_EXCL,436) != 0) {
+ /* 436 Decimal - 0664 Octal, ug+rw,o+r */
+ printf("permission of hdf5 file is not set properly.\n");
+ status = -1;
+ break;
+ }
+#endif
+ status = h4toh5(h4_filename, h5_filename);
+
+ if ( status == FAIL ) {
+ printf("error in converting %s into %s\n",h4_filename,h5_filename);
+ break;
+ }
+ if (h5_filename != NULL) {
+ HDfree(h5_filename);
+ }
+
+ break;
+
+ case 2: /* h4toh5 file_in file_out */
+
+ h4_filename = argv[0];
+ h5_filename = argv[1];
+
+#ifndef WIN32
+ if (test_file(h4_filename,O_EXCL,292) != 0 ) {
+ /* 292 Decimal - 0444 Octal, a+r */
+ printf("permission of hdf4 file is not set properly.\n");
+ status = -1;
+ break;
+ }
+
+ if (test_dir(h4_filename) != 0 ) {
+ fprintf(stderr,"%s: Is a directory\n",h4_filename);
+ status = -1;
+ break;
+ }
+
+#endif
+ /*0. check whether this file is a hdf file. */
+
+ if(!Hishdf(h4_filename)){
+ printf("error: not an hdf file. \n");
+ printf("the file will not be converted. \n");
+ status = -1;
+ break;
+ }
+
+#ifndef WIN32
+ if (test_file(h5_filename,O_CREAT|O_RDWR,436) != 0) { /* 436 Decimal - 0664 Octal, ug+rw,o+r */
+ printf("permission of hdf5 file is not set properly.\n");
+ status = -1;
+ break;
+ }
+
+ if (test_dir(h4_filename) != 0 ) {
+ fprintf(stderr,"%s: Is a directory\n",h4_filename);
+ status = -1;
+ break;
+ }
+
+#endif
+ status = h4toh5(h4_filename, h5_filename);
+ if ( status == FAIL ) {
+ printf("error in converting %sinto %s\n",h4_filename,h5_filename);
+ break;
+ }
+ break;
+
+ default:
+ break;
+ }
+
+ return status;
+
+}
+
+/*-------------------------------------------------------------------------
+ * Function: h4toh5
+ *
+ * Purpose: This routine checks out arguments sent, makes sure that hdf4
+ file is valid, makes sure filename for hdf5 file is correct,
+ and then call h4toh5().
+
+ *-------------------------------------------------------------------------
+ */
+int h4toh5(char*filename4, char*filename5) {
+
+ /* define variables for hdf4. */
+ int32 istat ; /* hdf4 library routine return value. */
+ int32 file_id;/* file identfier of hdf file.*/
+ int32 sd_id;/* sd interface identifer*/
+ int32 gr_id;/* gr interface identifer*/
+ int check_glo;
+
+ /* define variables for hdf5. */
+ hid_t file5_id;/* hdf5 file identifier. */
+ hid_t h5_root;/* new hdf5 root group identifier.*/
+
+ hid_t h5_dimg;/* hdf5 dimensional scale group identifier. */
+ hid_t h5_palg;/* hdf5 palette group identifier. */
+
+ /*1. open the current hdf4 file. */
+
+ file_id = Hopen(filename4, DFACC_READ, 0);
+ if(file_id == FAIL) {
+ printf("error: no such hdf4 files. \n");
+ return FAIL;
+ }
+
+ /* open sd interface.*/
+ sd_id = SDstart(filename4,DFACC_READ);
+ if(sd_id == FAIL) {
+ printf("error: cannot start SD interface. \n");
+ Hclose(file_id);
+ return FAIL;
+ }
+
+ /* open gr interface.*/
+ gr_id = GRstart(file_id);
+ if(gr_id == FAIL) {
+ printf("error in obtaining gr id. \n");
+ SDend(sd_id);
+ Hclose(file_id);
+ return FAIL;
+ }
+
+ /* open V interface. */
+ istat = Vstart(file_id);
+ if(istat == FAIL) {
+ printf("error in starting V interface. \n");
+ SDend(sd_id);
+ GRend(gr_id);
+ Hclose(file_id);
+ return FAIL;
+ }
+
+ /* 2. obtain number of hdf4 objects(sds,image,vdata,vgroup,palette)
+ in this hdf4 file. */
+
+ if(get_numof_hdf4obj(filename4,file_id) == FAIL) {
+ printf("error in obtaining number of hdf4 objects.\n");
+ SDend(sd_id);
+ GRend(gr_id);
+ Vend(file_id);
+ Hclose(file_id);
+ return FAIL;
+ }
+
+ /* set up global hash tables for hdf4 objects. */
+ if(set_hashtables() == FAIL){
+ printf("error in setting hashtables. \n");
+ SDend(sd_id);
+ GRend(gr_id);
+ Vend(file_id);
+ Hclose(file_id);
+ return FAIL;
+ }
+
+ /* create hdf5 file. */
+ file5_id = H5Fcreate(filename5,H5F_ACC_TRUNC,H5P_DEFAULT,H5P_DEFAULT);
+
+ if (file5_id < 0) {
+ fprintf(stderr, "unable to create hdf5 file \n");
+ SDend(sd_id);
+ GRend(gr_id);
+ Vend(file_id);
+ Hclose(file_id);
+ free_allhashmemory();
+ return FAIL;
+ }
+
+ /* Initialize hdf5 group interface. */
+ h5_root = H5Gopen(file5_id,"/");
+
+ if(h5_root < 0) {
+ printf("error in opening hdf5 root group. \n");
+ SDend(sd_id);
+ GRend(gr_id);
+ Vend(file_id);
+ Hclose(file_id);
+ H5Fclose(file5_id);
+ free_allhashmemory();
+ return FAIL;
+ }
+
+ /**** build up helper groups(dimensional scale and palette) ****/
+ if(set_helpgroups(h5_root,&h5_dimg,&h5_palg)==FAIL) {
+ printf("error setting up dimensional scale and palette groups.\n");
+ SDend(sd_id);
+ GRend(gr_id);
+ Vend(file_id);
+ Hclose(file_id);
+ H5Fclose(file5_id);
+ free_allhashmemory();
+ return FAIL;
+ }
+
+ /* convert global sds attributes into global attributes under root group.*/
+ check_glo = 1;
+
+ if(sds_transattrs(sd_id, h5_root,num_glsdsattrs,check_glo)==FAIL) {
+ SDend(sd_id);
+ GRend(gr_id);
+ Vend(file_id);
+ Hclose(file_id);
+ H5Fclose(file5_id);
+ free_allhashmemory();
+ return FAIL;
+ }
+
+
+ /* convert global image attributes into global attributes under root group.*/
+ check_glo = 1;
+
+ if(gr_tranattrs(gr_id, h5_root,num_glgrattrs,check_glo)==FAIL) {
+ SDend(sd_id);
+ GRend(gr_id);
+ Vend(file_id);
+ Hclose(file_id);
+ H5Fclose(file5_id);
+ free_allhashmemory();
+ return FAIL;
+ }
+
+ /* convert all objects in lone vgroups into corresponding hdf5 objects. */
+ if(h4toh5lonevgs(file_id,sd_id,h5_root,h5_dimg,h5_palg)== FAIL) {
+ printf("error in translating lone vgroup into hdf5 objects.\n");
+ SDend(sd_id);
+ GRend(gr_id);
+ Vend(file_id);
+ Hclose(file_id);
+ if(num_sds >0) H5Gclose(h5_dimg);
+ if(num_images >0) H5Gclose(h5_palg);
+ H5Gclose(h5_root);
+ H5Fclose(file5_id);
+ free_allhashmemory();
+ return FAIL;
+ }
+
+/*convert all objects in group rings into corresponding hdf5 objects. */
+ if(h4toh5vgrings(file_id,sd_id,h5_root,h5_dimg,h5_palg) == FAIL){
+ printf("error in translating vgroup rings into hdf5 objects.\n");
+ SDend(sd_id);
+ GRend(gr_id);
+ Vend(file_id);
+ Hclose(file_id);
+ if(num_sds >0) H5Gclose(h5_dimg);
+ if(num_images >0) H5Gclose(h5_palg);
+ H5Gclose(h5_root);
+ H5Fclose(file5_id);
+ free_allhashmemory();
+ return FAIL;
+ }
+
+ /*convert all independent lone vdata into corresponding hdf5 datasets with
+
+ if(h4toh5lonevds(file_id,h5_root) == FAIL){
+ printf("error in translating lone independent vdata into hdf5 objects.\n");
+ SDend(sd_id);
+ GRend(gr_id);
+ Vend(file_id);
+ Hclose(file_id);
+ if(num_sds >0) H5Gclose(h5_dimg);
+ if(num_images >0) H5Gclose(h5_palg);
+ H5Gclose(h5_root);
+ H5Fclose(file5_id);
+ free_allhashmemory();
+ return FAIL;
+ }
+
+ /*** convert hdf file annotations into hdf5 attributes under the root.***/
+ if(Annofil_h4_to_h5(file_id,h5_root) == FAIL) {
+ printf("error in translating file annotations into root attributes.\n");
+ SDend(sd_id);
+ GRend(gr_id);
+ Vend(file_id);
+ Hclose(file_id);
+ if(num_sds >0) H5Gclose(h5_dimg);
+ if(num_images >0) H5Gclose(h5_palg);
+ H5Gclose(h5_root);
+ H5Fclose(file5_id);
+ free_allhashmemory();
+ return FAIL;
+ }
+
+ /*** deal with untouched sds objects.convert them into hdf5 datasets under root group.***/
+
+ if(h4toh5unvisitedsds(file_id,sd_id,h5_root,h5_dimg) == FAIL) {
+ printf("error in converting unvisited sds objects into hdf5 file.\n");
+ SDend(sd_id);
+ GRend(gr_id);
+ Vend(file_id);
+ Hclose(file_id);
+ if(num_sds >0) H5Gclose(h5_dimg);
+ if(num_images >0) H5Gclose(h5_palg);
+ H5Gclose(h5_root);
+ H5Fclose(file5_id);
+ free_allhashmemory();
+ return FAIL;
+ }
+
+ /*** deal with untouched image objects. convert them into hdf5 datasets under root group. ***/
+
+ if(h4toh5unvisitedimages(file_id,h5_root,h5_palg) == FAIL) {
+ printf("error in converting unvisited image objects into hdf5 file.\n");
+ SDend(sd_id);
+ GRend(gr_id);
+ Vend(file_id);
+ Hclose(file_id);
+ if(num_sds >0) H5Gclose(h5_dimg);
+ if(num_images >0) H5Gclose(h5_palg);
+ H5Gclose(h5_root);
+ H5Fclose(file5_id);
+ free_allhashmemory();
+ return FAIL;
+ }
+
+ free_allhashmemory();
+ SDend(sd_id);
+ GRend(gr_id);
+ Vend(file_id);
+ Hclose(file_id);
+ if(num_sds >0) H5Gclose(h5_dimg);
+ if(num_images >0) H5Gclose(h5_palg);
+ H5Gclose(h5_root);
+ H5Fclose(file5_id);
+ return SUCCEED;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: get_numof_hdf4obj
+ *
+ * Purpose: get number or estimated number of hdf4 objects
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ file_id: hdf file identifier
+ filename: hdf file name
+ Out:
+ Modification:
+ *-------------------------------------------------------------------------
+ */
+int get_numof_hdf4obj(char*filename,int32 file_id) {
+
+ int32 sd_id;/* sd interface identifer*/
+ int32 gr_id;/* gr interface identifer*/
+ int num_lonevd;/* number of lone vdata*/
+ int num_lonevg;/* number of lone vgroup.*/
+ int32 istat;
+
+ estnum_vg = 0;
+ estnum_vd = 0;
+ num_sds = 0;
+ num_images = 0;
+ num_objects = 0;
+
+ /* obtain number of sds and number of global sds attribute. */
+
+ sd_id = SDstart(filename,DFACC_READ);
+ if(sd_id == FAIL) {
+ printf("error: cannot start SD interface. \n");
+ return FAIL;
+ }
+
+ if(SDfileinfo(sd_id,&num_sds,&num_glsdsattrs) == FAIL) {
+ printf("error in obtaining SDS information from the file.\n");
+ return FAIL;
+ }
+
+ /* obtain number of images and number of global image attributes.*/
+
+ gr_id = GRstart(file_id);
+ if(gr_id == FAIL) {
+ printf("error in obtaining gr id. \n");
+ return FAIL;
+ }
+
+ if(GRfileinfo(gr_id,&num_images,&num_glgrattrs) == FAIL) {
+ printf("error in obtaining GR information from the file. \n");
+ return FAIL;
+ }
+
+ /* obtain number of lone vgroup and lone vdata. */
+
+ istat = Vstart(file_id);
+ if (istat == FAIL) {
+ fprintf(stderr, "unable to start hdf4 V interface.\n");
+ return FAIL;
+ }
+
+ num_lonevd = VSlone(file_id,NULL,0);
+ if(num_lonevd == FAIL) {
+ printf("error in obtaining lone vdata number. \n");
+ return FAIL;
+ }
+
+ num_lonevg = Vlone(file_id,NULL,0);
+ if(num_lonevg == FAIL) {
+ printf("error in obtaining lone vgroup number. \n");
+ return FAIL;
+ }
+
+ /* intelligent guess of the total number of vgroups,total number of
+ independent vdata. */
+
+ estnum_vg = 6* num_lonevg;
+ estnum_vd = 4* num_lonevd;
+
+ /* set the size of name hashtable to num_objects. */
+ num_objects = estnum_vg + estnum_vd + num_sds + num_images;
+
+ return SUCCEED;
+}
+
+
+
+/*-------------------------------------------------------------------------
+ * Function: set_helpgroups
+ *
+ * Purpose: get number or estimated number of hdf4 objects
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ h5root: hdf5 group identifier
+ h5dimgptr: h5 dimensional group pointer
+ h5palgptr: h5 palette group pointer
+ Modification:
+ *-------------------------------------------------------------------------
+ */
+
+int set_helpgroups(hid_t h5root,hid_t* h5dimgptr,hid_t* h5palgptr){
+
+ hid_t h5_dimg=(-1);/* hdf5 dimensional scale group identifier. */
+ hid_t h5_palg;/* hdf5 palette group identifier. */
+
+ /*1. dimensional scale group.*/
+
+ if(num_sds > 0) {
+ h5_dimg = H5Gcreate(h5root,HDF4_DIMG,0);
+ if (h5_dimg <0) {
+ printf("error in creating hdf5 dimensional scale group. \n");
+ return FAIL;
+ }
+
+ *h5dimgptr = h5_dimg;
+ }
+
+ /*2. palette group.*/
+
+ if(num_images >0) {
+ h5_palg = H5Gcreate(h5root,HDF4_PALG,0);
+ if(h5_palg <0) {
+ printf("error in creating hdf5 palette group. \n");
+ if(h5_dimg>0) H5Gclose(h5_dimg);
+ return FAIL;
+ }
+
+ *h5palgptr = h5_palg;
+ }
+
+ return SUCCEED;
+
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: set_hashtables
+ *
+ * Purpose: set up hashtables
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+
+ Out:
+ Modification:
+ *-------------------------------------------------------------------------
+ */
+int set_hashtables(void) {
+
+ if(num_sds > 0) {
+ sds_hashtab = malloc(sizeof(struct table)*2*num_sds);
+ if(init_tab(2*num_sds,sds_hashtab)== FAIL){
+ printf("cannot initialize sds hashing table. \n");
+ return FAIL;
+ }
+ }
+
+ if(num_images > 0) {
+ gr_hashtab = malloc(sizeof(struct table)*2*num_images);
+ if(init_tab(2*num_images,gr_hashtab) == FAIL){
+ printf("cannot initialize image hashing table. \n");
+ return FAIL;
+ }
+ }
+
+ /*hashtable is made to be fixed for dimensional scale and palette.*/
+
+ if(num_sds > 0) {
+ dim_hashtab = malloc(sizeof(struct name_table)*DIM_HASHSIZE);
+ if(init_nametab(DIM_HASHSIZE,dim_hashtab) == FAIL) {
+ printf("can not initialize dimension hashing table.\n");
+ return FAIL;
+ }
+ }
+
+ /* initialize the palette table */
+ if(num_images > 0){
+ pal_hashtab = malloc(sizeof(struct table)*PAL_HASHSIZE);
+ if(init_tab(PAL_HASHSIZE,pal_hashtab) == FAIL) {
+ printf("can not initialize palette hashing table.\n");
+ return FAIL;
+ }
+ }
+
+ /* initialize the vgroup table */
+ if(estnum_vg > 0) {
+ vg_hashtab = malloc(sizeof(struct table)*estnum_vg);
+ }
+ else {
+ estnum_vg = VG_DEFHASHSIZE;
+ vg_hashtab = malloc(sizeof(struct table)*estnum_vg);
+ }
+ if(init_tab(estnum_vg,vg_hashtab) == FAIL) {
+ printf("error in allocating memory for vgroup hashing table.\n");
+ return FAIL;
+ }
+
+ /* initialize the vdata table.*/
+ if(estnum_vd > 0) {
+ vd_hashtab = malloc(sizeof(struct table)*estnum_vd);
+ }
+ else {
+ estnum_vd = VD_DEFHASHSIZE;
+ vd_hashtab = malloc(sizeof(struct table)*estnum_vd);
+ }
+
+ if(init_tab(estnum_vd,vd_hashtab)== FAIL) {
+ printf("cannot initialize vdata hashing table.\n");
+ return FAIL;
+ }
+
+ /* The name hashtable is only for dealing with name clashing,
+ num_objects is the size of the hash table. */
+
+ if(num_objects != 0){
+ name_hashtab = malloc(sizeof(struct name_table)*num_objects);
+ if(init_nametab(num_objects,name_hashtab)== FAIL) {
+ printf("cannot initialize name hashing table. \n");
+ return FAIL;
+ }
+ }
+
+ return SUCCEED;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: h4toh5lonevgs
+ *
+ * Purpose: Recursively convert hdf4 objects in lone vgroups into
+ corresponding hdf5 datasets
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In : file_id: hdf file id
+ sd_id: hdf sd interface id
+ h5group: hdf5 group id
+ h5_dimg: hdf5 dimensional scale group id
+ h5_palg: hdf5 palette group id
+
+ Out:
+ Modification:
+ *-------------------------------------------------------------------------
+ */
+int h4toh5lonevgs(int32 file_id,int32 sd_id,hid_t h5group,hid_t h5_dimg,hid_t h5_palg) {
+
+ int32 vgroup_id;
+ int num_lonevg; /* number of lone vgroup.*/
+ int32 *ref_array;
+ int32 istat;
+ char vgroup_name[VGNAMELENMAX];
+ char* cor_vgroupname;
+ char vgroup_class[VGNAMELENMAX];
+ char refstr[MAXREF_LENGTH];
+ int check_vgroup;
+ int check_tabst;
+ int lone_vg_number;
+ char *h5cgroup_name;
+
+ istat = Vstart(file_id);
+ if (istat == FAIL) {
+ fprintf(stderr, "unable to start hdf4 V interface.\n");
+ return FAIL;
+ }
+
+ num_lonevg = Vlone(file_id,NULL,0);
+
+ if (num_lonevg == FAIL) {
+ printf("error in obtaining lone vgroup number. \n");
+ return FAIL;
+ }
+
+ /* obtain object reference array. */
+
+ /* if no lone vgroup, quit from this function. */
+ if(num_lonevg == 0)
+ return SUCCEED;
+
+ ref_array = (int32 *)malloc(sizeof(int32) *num_lonevg);
+
+ if(ref_array == NULL) {
+ printf("error in allocating memory for ref_array.\n");
+ return FAIL;
+ }
+
+ num_lonevg = Vlone(file_id,ref_array,num_lonevg);
+
+ /* walk through every lone group in the file */
+
+ for(lone_vg_number = 0; lone_vg_number < num_lonevg;
+ lone_vg_number++) {
+
+ vgroup_id = Vattach(file_id,ref_array[lone_vg_number],"r");
+
+ if(vgroup_id ==FAIL) {
+ printf("error in attaching lone vgroup.\n");
+ free(ref_array);
+ return FAIL;
+ }
+
+ /*obtain group name and class name.*/
+ h4toh5_ZeroMemory(vgroup_class,VGNAMELENMAX);
+ istat = Vgetclass(vgroup_id,vgroup_class);
+ if(istat == FAIL) {
+ printf("error in getting vgroup class.\n");
+ free(ref_array);
+ Vdetach(vgroup_id);
+ return FAIL;
+ }
+
+ h4toh5_ZeroMemory(vgroup_name,VGNAMELENMAX);
+ istat = Vgetname(vgroup_id,vgroup_name);
+ if(istat == FAIL ) {
+ printf("error in getting vgroup name. \n");
+ Vdetach(vgroup_id);
+ free(ref_array);
+ return FAIL;
+ }
+
+ /* check for CDF0.0 and RIG0.0, if yes
+ don't go into this group.*/
+
+ if(strcmp(vgroup_class,_HDF_CDF)==0) {
+ Vdetach(vgroup_id);
+ continue;
+ }
+ if(strcmp(vgroup_class,GR_NAME)==0) {
+ Vdetach(vgroup_id);
+ continue;
+ }
+
+ /* converting integer number into string format. */
+ if(conv_int_str(ref_array[lone_vg_number],refstr) == FAIL) {
+ printf("ref. is negative, error in converting\n");
+ Vdetach(vgroup_id);
+ free(ref_array);
+ return FAIL;
+ }
+
+ /* checking whether vgroup name contains ORI_SLASH, changing into CHA_SLASH.*/
+ cor_vgroupname = correct_name(vgroup_name);
+ if(cor_vgroupname == NULL) {
+ printf("error in generating corrected vgroup name. \n");
+ Vdetach(vgroup_id);
+ free(ref_array);
+ return FAIL;
+ }
+
+ /* obtaining group name of the converted lone vgroup. In this call,
+ we will deal with cases such as name clashing and no available vgroup
+ name. */
+
+ h5cgroup_name = get_obj_aboname(cor_vgroupname,refstr,NULL,HDF4_VGROUP);
+
+ if(h5cgroup_name == NULL) {
+ printf("error in getting group name.\n");
+ Vdetach(vgroup_id);
+ free(ref_array);
+ free(cor_vgroupname);
+ return FAIL;
+ }
+
+ /* free memory of corrected name. */
+ free(cor_vgroupname);
+
+ /* updating lookup table for vgroups.*/
+
+ check_vgroup = lookup(ref_array[lone_vg_number],estnum_vg,vg_hashtab);
+
+ if(check_vgroup == 0) { /* adding this vgroup into the list. */
+
+ check_tabst = set_name(ref_array[lone_vg_number],estnum_vg,
+ vg_hashtab,h5cgroup_name);
+ if(check_tabst == FAIL) {
+ printf("not enough memory to be allocated for vgroup name. \n");
+ Vdetach(vgroup_id);
+ free(h5cgroup_name);
+ free(ref_array);
+ return FAIL;
+ }
+ }
+
+ /* this line should never fail, if failed, something is wrong with converter or hdf library. */
+
+ if(check_vgroup == 1){
+ fprintf(stderr,"this vgroup should not be touched. \n");
+ Vdetach(vgroup_id);
+ free(h5cgroup_name);
+ free(ref_array);
+ return FAIL;
+ }
+
+ if(Vgroup_h4_to_h5(file_id,vgroup_id,sd_id,h5group,h5_dimg,h5_palg)==FAIL){
+ printf("error in translating vgroup into hdf5 objects.\n");
+ Vdetach(vgroup_id);
+ free(h5cgroup_name);
+ free(ref_array);
+ return FAIL;
+ }
+
+ Vdetach(vgroup_id);
+ free(h5cgroup_name);
+ }
+ free(ref_array);
+ return SUCCEED;
+}
+
+
+
+/*-------------------------------------------------------------------------
+ * Function: h4toh5vgrings
+ *
+ * Purpose: Recursively convert objects at special hdf4 vgroups
+ (vgroup rings)
+ into objects of corresponding hdf5 groups. The strategy here
+ is to arbitrily grab any vgroup in the group ring and put it
+ under hdf5 root.
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In : file_id: hdf file id
+ sd_id: hdf sds id
+ h5group: hdf5 group id
+ h5_dimg: hdf5 dimensional scale group id
+ h5_palg: hdf5 palette group id
+
+ Out:
+ Modification:
+ *-------------------------------------------------------------------------
+ */
+
+int h4toh5vgrings(int32 file_id,int32 sd_id,hid_t h5group,hid_t h5_dimg,hid_t h5_palg){
+
+ int32 vgroup_id;
+ int32 ref_num;
+ char vgroup_name[VGNAMELENMAX];
+ char* cor_vgroupname;
+ char vgroup_class[VGNAMELENMAX];
+ char refstr[MAXREF_LENGTH];
+ int check_vgroup;
+ int32 istat;
+ char *h5cgroup_name;
+
+ ref_num = Vgetid(file_id,-1);
+
+ while (ref_num != -1) {
+
+ /* if we find a group that is not touched, grab it under root group.*/
+
+ check_vgroup = lookup(ref_num,estnum_vg,vg_hashtab);
+
+ if (check_vgroup == 0){
+
+ vgroup_id = Vattach(file_id,ref_num,"r");
+ if(vgroup_id ==FAIL) {
+ printf("error in attaching group in a group ring. \n");
+ return FAIL;
+ }
+
+ h4toh5_ZeroMemory(vgroup_name,VGNAMELENMAX);
+ istat = Vgetname(vgroup_id,vgroup_name);
+ if(istat ==FAIL) {
+ printf("error in obtaining vgroup names. \n");
+ Vdetach(vgroup_id);
+ return FAIL;
+ }
+
+ h4toh5_ZeroMemory(vgroup_class,VGNAMELENMAX);
+ if(Vgetclass(vgroup_id,vgroup_class) == FAIL) {
+ printf("error in obtaining vgroup class name. \n");
+ Vdetach(vgroup_id);
+ return FAIL;
+ }
+
+ /* do nothing for those predefined attribute.*/
+
+ if(vgroup_class[0] != '\0') {
+
+ if(strcmp(vgroup_class,_HDF_ATTRIBUTE)==0) {
+ ref_num = Vgetid(file_id,ref_num);
+ Vdetach(vgroup_id);
+ continue;
+ }
+
+ if(strcmp(vgroup_class,_HDF_VARIABLE)==0) {
+ ref_num = Vgetid(file_id,ref_num);
+ Vdetach(vgroup_id);
+ continue;
+ }
+
+ if(strcmp(vgroup_class,_HDF_DIMENSION)==0) {
+ ref_num = Vgetid(file_id,ref_num);
+ Vdetach(vgroup_id);
+ continue;
+ }
+
+ if(strcmp(vgroup_class,_HDF_UDIMENSION)==0) {
+ ref_num = Vgetid(file_id,ref_num);
+ Vdetach(vgroup_id);
+ continue;
+ }
+
+ if(strcmp(vgroup_class,_HDF_CDF)==0) {
+ ref_num = Vgetid(file_id,ref_num);
+ Vdetach(vgroup_id);
+ continue;
+ }
+
+ if(strcmp(vgroup_class,GR_NAME)==0) {
+ ref_num = Vgetid(file_id,ref_num);
+ Vdetach(vgroup_id);
+ continue;
+ }
+
+ if(strcmp(vgroup_class,RI_NAME)==0) {
+ ref_num = Vgetid(file_id,ref_num);
+ Vdetach(vgroup_id);
+ continue;
+ }
+ }
+
+ if(vgroup_name[0] != '\0') {
+ if(strcmp(vgroup_name,GR_NAME)==0) {
+ ref_num = Vgetid(file_id,ref_num);
+ Vdetach(vgroup_id);
+ continue;
+ }
+ }
+
+ /* convert reference number into string format. */
+ if(conv_int_str(ref_num,refstr) == FAIL) {
+ printf("ref. is negative, error in converting\n");
+ Vdetach(vgroup_id);
+ return FAIL;
+ }
+
+ /* checking whether vgroup name contains ORI_SLASH, changing into CHA_SLASH.*/
+ cor_vgroupname = correct_name(vgroup_name);
+ if(cor_vgroupname == NULL) {
+ printf("error in generating corrected vgroup name. \n");
+ Vdetach(vgroup_id);
+ return FAIL;
+ }
+ /* obtain the hdf5 group name. */
+ h5cgroup_name = get_obj_aboname(cor_vgroupname,refstr,NULL,HDF4_VGROUP);
+
+ if(h5cgroup_name == NULL) {
+ printf("error in getting vgroup name.\n");
+ Vdetach(vgroup_id);
+ free(cor_vgroupname);
+ return FAIL;
+ }
+
+ free(cor_vgroupname);
+ if(set_name(ref_num,estnum_vg,vg_hashtab,h5cgroup_name)==FAIL) {
+ printf("error in setting h5 group name.\n");
+ Vdetach(vgroup_id);
+ free(h5cgroup_name);
+ return FAIL;
+ }
+
+ if(Vgroup_h4_to_h5(file_id,vgroup_id,sd_id,h5group,h5_dimg,h5_palg)
+ ==FAIL){
+
+ printf("error in translating vgroup into hdf5 group\n");
+ Vdetach(vgroup_id);
+ free(h5cgroup_name);
+ return FAIL;
+ }
+
+ Vdetach(vgroup_id);
+ free(h5cgroup_name);
+ }
+ ref_num = Vgetid(file_id,ref_num);
+ }
+ return SUCCEED;
+
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: h4toh5lonevds
+ *
+ * Purpose: convert hdf4 lone vdata into
+ the corresponding hdf5 datasets
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In : file_id: hdf file id
+ h5group: hdf5 group id
+
+ Out:
+ Modification:
+ *-------------------------------------------------------------------------
+ */
+int h4toh5lonevds(int32 file_id, hid_t h5group){
+
+ int32 vdata_id;
+ int32 *ref_vdata_array;
+ int32 vdata_tag;
+ int32 vdata_ref;
+ int32 istat;
+ char vdata_name[VGNAMELENMAX];
+ char* cor_vdataname;
+ char vdata_class[VGNAMELENMAX];
+ char refstr[MAXREF_LENGTH];
+ int check_vdata;
+ int lone_vd_number;
+ int num_lonevd;
+ char *h5cvdata_name;
+
+ num_lonevd = VSlone(file_id,NULL,0);
+
+ if (num_lonevd == FAIL) {
+ printf("error in obtaining lone vgroup number. \n");
+ return FAIL;
+ }
+
+ if (num_lonevd > 0) {
+
+ ref_vdata_array = (int32 *)malloc(sizeof(int32) *(num_lonevd));
+
+ num_lonevd = VSlone(file_id,ref_vdata_array,num_lonevd);
+
+ if(num_lonevd == FAIL) {
+ printf("error in obtaining lone vdata number the second time.\n");
+ free(ref_vdata_array);
+ }
+ /* walk through all lone vdatas. */
+
+ for(lone_vd_number = 0; lone_vd_number < num_lonevd;lone_vd_number++)
+ {
+ vdata_id = VSattach(file_id,ref_vdata_array[lone_vd_number],"r");
+
+ if(vdata_id == FAIL) {
+ printf("error in obtaining vdata id for lone vdata.\n");
+ free(ref_vdata_array);
+ }
+
+ /* Make sure this vdata is not an attribute of other hdf4 objects.*/
+
+ if(!VSisattr(vdata_id)) {
+
+ h4toh5_ZeroMemory(vdata_class,VGNAMELENMAX);
+ istat = VSgetclass(vdata_id,vdata_class);
+ if(istat == FAIL) {
+ printf("error in getting vdata class name.\n");
+ free(ref_vdata_array);
+ VSdetach(vdata_id);
+ return FAIL;
+ }
+
+
+ if(!strncmp(vdata_class,_HDF_CHK_TBL_CLASS,strlen(_HDF_CHK_TBL_CLASS))){
+
+ VSdetach(vdata_id);
+ continue;
+ }
+
+ vdata_ref = VSQueryref(vdata_id);
+
+ if(vdata_ref == FAIL) {
+ printf("error in getting vdata reference number.\n");
+ free(ref_vdata_array);
+ VSdetach(vdata_id);
+ return FAIL;
+ }
+
+ vdata_tag = VSQuerytag(vdata_id);
+ if(vdata_tag == FAIL){
+ printf("error in getting vdata tag.\n");
+ free(ref_vdata_array);
+ VSdetach(vdata_id);
+ return FAIL;
+ }
+
+
+ h4toh5_ZeroMemory(vdata_name,VGNAMELENMAX);
+ istat = VSQueryname(vdata_id,vdata_name);
+
+ if(istat == FAIL) {
+ printf("error in getting vdata name. \n");
+ free(ref_vdata_array);
+ VSdetach(vdata_id);
+ return FAIL;
+ }
+
+ /* converting reference number into string format.*/
+ if(conv_int_str(ref_vdata_array[lone_vd_number],refstr)==FAIL) {
+ printf("error in converting int to string.\n");
+ free(ref_vdata_array);
+ VSdetach(vdata_id);
+ return FAIL;
+ }
+ /* checking whether vdata name contains ORI_SLASH, changing into CHA_SLASH.*/
+ cor_vdataname = correct_name(vdata_name);
+ if(cor_vdataname == NULL) {
+ printf("error in generating corrected vgroup name. \n");
+ VSdetach(vdata_id);
+ free(ref_vdata_array);
+ return FAIL;
+ }
+ /* obtaining hdf5 dataset name that is converted from hdf4 vdata.*/
+ h5cvdata_name = get_obj_aboname(cor_vdataname,refstr,NULL,HDF4_VDATA);
+ if(h5cvdata_name == NULL) {
+ printf("error in getting vdata name.\n");
+ free(ref_vdata_array);
+ VSdetach(vdata_id);
+ free(cor_vdataname);
+ return FAIL;
+ }
+
+ free(cor_vdataname);
+ check_vdata = lookup(ref_vdata_array[lone_vd_number],estnum_vd,
+ vd_hashtab);
+
+ /* check_vdata should be 1, if it is 1, either converter or hdf lib has bugs. */
+ if(check_vdata == 1){
+ printf("lone vdata should not be checked before.\n");
+ free(h5cvdata_name);
+ free(ref_vdata_array);
+ VSdetach(vdata_id);
+ return FAIL;
+ }
+
+ if(set_name(ref_vdata_array[lone_vd_number],estnum_vd,vd_hashtab,
+ h5cvdata_name)==FAIL) {
+ printf("error in setting lone vdata name. \n");
+ free(ref_vdata_array);
+ free(h5cvdata_name);
+ VSdetach(vdata_id);
+ return FAIL;
+ }
+
+ if(Vdata_h4_to_h5(file_id,vdata_id,h5group)== FAIL) {
+ printf("error in translating independent vdata into");
+ printf(" hdf5 datasets.\n");
+ free(h5cvdata_name);
+ free(ref_vdata_array);
+ VSdetach(vdata_id);
+ return FAIL;
+ }
+ free(h5cvdata_name);
+ }
+
+ VSdetach(vdata_id);
+ }
+ free(ref_vdata_array);
+ }
+ return SUCCEED;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: h4toh5unvisitedsds
+ *
+ * Purpose: convert unvisited sds objects into hdf5 datasets and put these
+ datasets under hdf5 root group
+ This routine will cover old hdf file that doesn't have vgroups.
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ sd_id: hdf sds id
+ h5root: hdf5 root id
+ h5_dimg: hdf5 dimensional scale group id
+
+ Out:
+ Modification:
+ *-------------------------------------------------------------------------
+ */
+
+
+int h4toh5unvisitedsds(int32 file_id,int32 sd_id,hid_t h5root,hid_t h5_dimg) {
+
+ int i;
+ int32 sds_id;/* sd dataset identifer*/
+ int32 sds_rank;/* sds dataset dimension rank.*/
+ int32 sds_dimsizes[DIM_HASHSIZE];/* array that contains the size of the each dimension in sds dataset.*/
+ int32 sds_dtype;/*sds dataset datatype.*/
+ int32 num_sdsattrs;/* number of sds attributes. */
+ char sds_name[MAX_NC_NAME];/* sds name.*/
+ char* cor_sdsname;
+ int32 obj_ref; /* obj reference number assigned to sds and images.*/
+ char refstr[MAXREF_LENGTH];/*object reference number in character string format.*/
+ int check_sds;/* flag to check whether this sds is visited. 1 for visited and 0 for non-visited.*/
+ char *h5csds_name;/* absolute path name of hdf5 dataset transferred from old sds.*/
+
+ if(sd_id == FAIL) {
+ printf("error: cannot start SD interface. \n");
+ return FAIL;
+ }
+
+ /* check all sds objects. */
+ for(i=0;i<num_sds;i++){
+
+ sds_id = SDselect(sd_id,i);
+
+ if (sds_id == FAIL) {
+ printf("error in obtaining sd id.\n");
+ return FAIL;
+ }
+
+ /* if this sds is dimensional scale, the converting should be ignored. dimensional scale will be converted separately. */
+ if(SDiscoordvar(sds_id)) continue;
+
+ /* obtain sds information. */
+ if(SDgetinfo(sds_id,sds_name,&sds_rank,sds_dimsizes,
+ &sds_dtype,&num_sdsattrs)== FAIL) {
+ printf("error in obtaining SD info at ");
+ printf("the unvisited sds routine.\n");
+ SDendaccess(sds_id);
+ return FAIL;
+ }
+
+ /* obtain object reference number of the current sds dataset.*/
+ obj_ref = SDidtoref(sds_id);
+ if(obj_ref == FAIL) {
+ printf("error in obtaining sds object reference at ");
+ printf("the unvisited sds routine.\n");
+ SDendaccess(sds_id);
+ return FAIL;
+ }
+
+ /* convert object reference number into string format. */
+ if(conv_int_str(obj_ref,refstr) == FAIL) {
+ printf("error in converting integer into string.\n");
+ SDendaccess(sds_id);
+ return FAIL;
+ }
+
+ /* check whether the current sds is visited or not. */
+ check_sds = lookup(obj_ref,2*num_sds,sds_hashtab);
+
+ /* if not visited, we will do the convertion. */
+
+ if(check_sds == 0) {
+ /* since different hdf sds may hold the same name and it is also
+ legal that sds may not have a name; but for hdf5 dataset,
+ it must hold a name, so we will use get_obj_aboname to assure
+ that each new hdf5 dataset converted from
+ sds objects will have a disabiguous name. */
+
+ /* checking whether vgroup name contains ORI_SLASH, changing into CHA_SLASH.*/
+ cor_sdsname = correct_name(sds_name);
+ if(cor_sdsname == NULL) {
+ printf("error in generating corrected sds name. \n");
+ SDendaccess(sds_id);
+ return FAIL;
+ }
+
+ h5csds_name = get_obj_aboname(cor_sdsname,refstr,NULL,HDF4_SDS);
+ if(h5csds_name == NULL) {
+ printf("error in obtaining sds name.\n");
+ SDendaccess(sds_id);
+ free(cor_sdsname);
+ return FAIL;
+ }
+ free(cor_sdsname);
+ /* put this name into hashtable. */
+ if(set_name(obj_ref,2*num_sds,sds_hashtab,h5csds_name)==FAIL) {
+ printf("error in setting object name.\n");
+ SDendaccess(sds_id);
+ free(h5csds_name);
+ return FAIL;
+ }
+
+ /* do the convertion from sds into hdf5 dataset.*/
+ if(Sds_h4_to_h5(file_id,sds_id,h5root,h5_dimg)== FAIL){
+ printf("error in translating sds into hdf5 dataset.\n");
+ SDendaccess(sds_id);
+ free(h5csds_name);
+ return FAIL;
+ }
+ free(h5csds_name);
+
+ }
+ SDendaccess(sds_id);
+
+ }
+ return SUCCEED;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: h4toh5unvisitedimages
+ *
+ * Purpose: convert unvisited images into hdf5 dataset and put it
+ under hdf5 root group
+ This routine will cover old hdf file.
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ file_id: hdf file id
+ h5_root: hdf5 root id
+ h5_palg: hdf5 palette group id
+
+ Out:
+ Modification:
+ *-------------------------------------------------------------------------
+ */
+
+int h4toh5unvisitedimages(int32 file_id,hid_t h5_root,hid_t h5_palg) {
+
+ int i;
+ int32 istat;
+ int32 gr_id;
+ int32 ri_id;/*raster image identifer.*/
+ char image_name[MAX_GR_NAME];/* image name.*/
+ char* cor_imagename;
+ int check_image;/* flag to check whether this image is visited. 1 for visited and 0 for non-visited.*/
+ int32 obj_ref; /* obj reference number assigned to sds and images.*/
+ char refstr[MAXREF_LENGTH];/*object reference number in character string format.*/
+ char *h5cimage_name;/* absolute path name of hdf5 dataset transferred from old image.*/
+
+ gr_id = GRstart(file_id);
+ if(gr_id == FAIL) {
+ printf("error in obtaining gr id. \n");
+ return FAIL;
+ }
+
+ /* check all images. */
+ for (i=0;i<num_images;i++) {
+
+ ri_id = GRselect(gr_id,i);
+ if(ri_id ==FAIL) {
+ printf("error in selecting gr interface.\n");
+ return FAIL;
+ }
+
+ /* obtain information of GR */
+ istat = GRgetiminfo(ri_id, image_name, NULL, NULL, NULL, NULL, NULL);
+
+ if(istat == FAIL) {
+ printf("error in getting GR images.\n");
+ GRendaccess(ri_id);
+ return FAIL;
+ }
+
+ /* obtain object reference number and convert it into string format. */
+ obj_ref = GRidtoref(ri_id);
+ if(obj_ref == 0) {
+ printf("error in obtaining image reference number");
+ printf(" at h4toh5unvisitedimages routine.\n");
+ GRendaccess(ri_id);
+ return FAIL;
+ }
+
+ if(conv_int_str(obj_ref,refstr)== FAIL) {
+ printf("error in converting object reference number");
+ printf(" into string at h4toh5unvisitedimages routine.\n");
+ GRendaccess(ri_id);
+ return FAIL;
+ }
+
+ /* check whether the current image is visited or not. */
+ check_image = lookup(obj_ref,2*num_images,gr_hashtab);
+
+ if(check_image == 0) {
+
+ /* since different hdf image may hold the same name and it is
+ also legal that an image may not have a name; but for hdf5
+ dataset, it must hold a name, so we will use get_obj_aboname
+ to guarrtte that each new hdf5 dataset converted from
+ image objects will have a disabiguous name. */
+
+ /* checking whether vgroup name contains ORI_SLASH,
+ changing into CHA_SLASH.*/
+
+ cor_imagename = correct_name(image_name);
+ if(cor_imagename == NULL) {
+ printf("error in generating corrected image name. \n");
+ GRendaccess(ri_id);
+ return FAIL;
+ }
+ h5cimage_name = get_obj_aboname(cor_imagename,refstr,NULL,
+ HDF4_IMAGE);
+ if(h5cimage_name == NULL) {
+ printf("error in getting image name.\n");
+ GRendaccess(ri_id);
+ free(cor_imagename);
+ return FAIL;
+ }
+ free(cor_imagename);
+
+ if(set_name(obj_ref,2*num_images,gr_hashtab,h5cimage_name)==FAIL) {
+ printf("error setting image name.\n");
+ GRendaccess(ri_id);
+ free(h5cimage_name);
+ return FAIL;
+ }
+
+ /* do the convertion from the image into hdf5 dataset.*/
+ if(Image_h4_to_h5(file_id,ri_id,h5_root,h5_palg)== FAIL) {
+ printf("error in transferring image name into hdf5 dataset.\n");
+ GRendaccess(ri_id);
+ free(h5cimage_name);
+ return FAIL;
+ }
+ free(h5cimage_name);
+ }
+ GRendaccess(ri_id);
+ }
+ return SUCCEED;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: free_allhashmemory()
+ *
+ * Purpose: free memory allocated for all hashtables
+ *
+ * Return:
+ *
+ * In :
+
+
+ Out:
+ Modification:
+ *-------------------------------------------------------------------------
+ */
+
+void free_allhashmemory(){
+
+ if(estnum_vg != 0) freetable(estnum_vg,vg_hashtab);
+ if(estnum_vd != 0) freetable(estnum_vd,vd_hashtab);
+
+ if(num_sds !=0) {
+ freetable(2*num_sds,sds_hashtab);
+ freenametable(DIM_HASHSIZE,dim_hashtab);
+ }
+
+ if(num_images !=0) {
+ freetable(2*num_images,gr_hashtab);
+ freetable(PAL_HASHSIZE,pal_hashtab);
+ }
+
+ if(num_objects !=0) freenametable(num_objects,name_hashtab);
+
+}
+
+
+
+/********The following routines are adapted from h5toh4 converter. *******/
+/*****************************************************************************
+
+ Routine: test_file
+
+ Description: Test a file for read/write - ability.
+
+ Input: filename - Unix filename
+
+ Output: function return, global variable - errno
+
+*****************************************************************************/
+
+int test_file(char *filename,int oflag,mode_t mode)
+{
+ int fid;
+
+ errno = 0;
+
+ fid = open(filename, oflag, mode);
+ if (fid < 0) {
+ perror(filename);
+ }
+ close(fid);
+
+ return errno;
+
+}
+
+
+/*****************************************************************************
+
+ Routine: test_dir
+
+ Description: Test pathway to determine if it is a directory
+
+ Input: path - pathname given
+
+ Output: function return TRUE/FALSE
+
+*****************************************************************************/
+
+int test_dir(char *path)
+{
+
+ struct stat buf;
+ struct stat *buf_ptr;
+ int idir;
+
+ buf_ptr = &buf;
+
+ idir = stat(path, buf_ptr);
+ if (idir < 0) {
+ if (errno == 2) {
+ return 0;
+ } else {
+ perror(path);
+ }
+ }
+
+ return S_ISDIR(buf_ptr->st_mode);
+}
+
+/*****************************************************************************
+
+ Routine: BuildFilename()
+
+ Description: Build a filename with new extension
+
+ Input: filename - present filename
+ ext - extension to root of filename
+
+ Output: (filename:r).ext
+
+*****************************************************************************/
+
+char *BuildFilename(char *filename, char *ext)
+{
+ /* build outgoing filename */
+
+ char *filename_out;
+ char *lastper_ptr, *lastdir_ptr;
+ int root_len;
+
+ lastper_ptr = strrchr(filename,'.');
+ lastdir_ptr = strrchr(filename,'/');
+
+ if ( lastper_ptr <= lastdir_ptr ) { /* no extension */
+ root_len = strlen(filename);
+ } else { /* existing extension */
+ root_len = (int)(lastper_ptr - filename);
+ }
+
+ filename_out = (char *)HDmalloc(root_len + strlen(ext) + 2);
+ filename_out = strncpy(filename_out, filename, (size_t)root_len);
+ filename_out[root_len] = '\0';
+ filename_out = strcat(filename_out,".");
+ filename_out = strcat(filename_out,ext);
+
+ return filename_out;
+}
+
+
+/*****************************************************************************
+
+ Routine: PrintOptions_h4toh5()
+
+ Description: This routine prints the acceptable argument formats out to stderr.
+
+ Input: None
+
+ Output: output to stderr
+
+*****************************************************************************/
+
+void PrintOptions_h4toh5(void)
+{
+ fprintf(stderr,"\nUsage: ");
+ fprintf(stderr,"\n h4toh5 -h (gives this print-out)\n");
+ fprintf(stderr," h4toh5 input.hdf output.h5\n");
+ fprintf(stderr," h4toh5 input.hdf\n");
+}
+
+
+
+
+
diff --git a/tools/h4toh5/h4toh5main.h b/tools/h4toh5/h4toh5main.h
new file mode 100644
index 0000000..2bb6c88
--- /dev/null
+++ b/tools/h4toh5/h4toh5main.h
@@ -0,0 +1,109 @@
+/*-------------------------------------------------------------------------
+ *
+ * Copyright (C) 2000 National Center for Supercomputing Applications.
+ * All rights reserved.
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/******************************************************************************
+
+Description:
+
+1. converter
+
+See HDF4 to HDF5 mapping specification at
+(http://hdf.ncsa.uiuc.edu/HDF5/papers/h4toh5) for the default mapping
+from HDF4 object to HDF5 object.
+
+The whole converter includes 10 files, h4toh5util.h, h4toh5main.h, h4toh5util.c, h4toh5main.c, h4toh5sds.c, h4toh5image.c,h4toh5vdata.c,h4toh5vgroup.c,h4toh5pal.c and h4toh5anno.c.
+
+2. this file
+
+including declarations of subroutines of all .c files excluding h4toh5util.c.
+
+Author: Kent Yang(ymuqun@ncsa.uiuc.edu)
+
+
+*****************************************************************************/
+
+
+#ifndef H4TOH5MAIN_H
+#define H4TOH5MAIN_H
+#include "hdf.h"
+#include "mfhdf.h"
+#include "hdf5.h"
+#include "h4toh5util.h"
+#include <fcntl.h>
+#include <errno.h>
+#endif
+
+/* For windows support.*/
+#if WIN32
+typedef unsigned int mode_t;
+#endif
+
+#ifndef S_ISDIR
+#define S_ISDIR(mode) (((mode)&0xF000) == S_IFDIR)
+#endif
+
+/* subroutines to check initial settings and inputting parameters.
+Adapted from h5toh4 tools and used for h4toh5main.c */
+
+void PrintOptions_h4toh5(void);
+int test_file(char *filename,int oflag,mode_t mode);
+int test_dir(char *);
+char *BuildFilename(char *filename, char *ext);
+
+/* subroutines for h4toh5main.c */
+int h4toh5(char*,char*);
+int get_numof_hdf4obj(char*,int32);
+int set_hashtables(void);
+int set_helpgroups(hid_t,hid_t*,hid_t*);
+int h4toh5lonevds(int32,hid_t);
+int h4toh5lonevgs(int32,int32,hid_t,hid_t,hid_t);
+int h4toh5vgrings(int32,int32,hid_t,hid_t,hid_t);
+int h4toh5unvisitedimages(int32,hid_t,hid_t);
+int h4toh5unvisitedsds(int32,int32,hid_t,hid_t);
+void free_allhashmemory(void);
+
+/*subroutines for h4toh5vgroup.c*/
+
+int Vgroup_h4_to_h5(int32,int32,int32,hid_t,hid_t,hid_t);
+int convert_vgroup(int32,int32, int32,char* ,hid_t,hid_t,hid_t);
+int convert_vdata(int32,int32,char*,hid_t);
+int convert_sds(int32,int32,int32,char*,hid_t,hid_t);
+int convert_image(int32,int32,char*,hid_t,hid_t);
+
+/*subroutines for h4toh5vdata.c*/
+
+int Vdata_h4_to_h5(int32,int32,hid_t);
+int vdata_transattrs(int32,hid_t,int,int,char*);
+int gen_h5comptype(int32,int32,size_t *,size_t*,hid_t*,hid_t*,hid_t,hid_t);
+
+/* subroutines for h4toh5sds.c*/
+int Sds_h4_to_h5(int32,int32,hid_t,hid_t);
+int sds_transattrs(int32, hid_t,int,int);
+int sdsdim_to_h5dataset(int32,int32,hid_t,hid_t,int32);
+
+
+/*subroutines for h4toh5image.c*/
+int Image_h4_to_h5(int32,int32,hid_t,hid_t);
+int gr_tranattrs(int32, hid_t,int,int);
+int gr_palette(int32,int32,hid_t,hid_t);
+int create_pal_objref(hid_t ,hid_t ,char *);
+
+/*subroutines for h4toh5anno.c*/
+char* trans_tag_name(int32,ann_type);
+int Annofil_h4_to_h5(int32,hid_t);
+int Annoobj_h4_to_h5(int32,int32,int32,hid_t);
+
+/*subroutines for h4toh5pal.c*/
+int Palette_h4_to_h5(int32,int32 ,hid_t,char *);
+
+
+
+
+
+
+
diff --git a/tools/h4toh5/h4toh5sds.c b/tools/h4toh5/h4toh5sds.c
new file mode 100644
index 0000000..89e5ddf
--- /dev/null
+++ b/tools/h4toh5/h4toh5sds.c
@@ -0,0 +1,1096 @@
+/*-------------------------------------------------------------------------
+ *
+ * Copyright (C) 2000 National Center for Supercomputing Applications.
+ * All rights reserved.
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/******************************************************************************
+
+ Description:
+
+1. converter
+
+See HDF4 to HDF5 mapping specification at
+(http://hdf.ncsa.uiuc.edu/HDF5/papers/h4toh5) for the default mapping
+from HDF4 object to HDF5 object.
+
+The whole converter includes 10 files, h4toh5util.h, h4toh5main.h, h4toh5util.c, h4toh5main.c, h4toh5sds.c, h4toh5image.c,h4toh5vdata.c,h4toh5vgroup.c,h4toh5pal.c and h4toh5anno.c.
+
+2. this file
+
+Converting an hdf4 sds object into an hdf5 dataset.
+
+Author: Kent Yang(ymuqun@ncsa.uiuc.edu)
+
+
+*****************************************************************************/
+
+#include "h4toh5main.h"
+
+/*-------------------------------------------------------------------------
+ * Function: Sds_h4_to_h5
+ *
+ * Purpose: translate SDS object into hdf5 dataset
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ sds_id: SDS identifier
+ h5_group: hdf5 group id
+ h5_dimgroup: hdf5 dimension group id
+ dim_pathname: dimensional path name
+
+ *-------------------------------------------------------------------------
+ */
+
+int Sds_h4_to_h5(int32 file_id,int32 sds_id,hid_t h5_group,hid_t h5_dimgroup){
+
+ int32 sds_dtype;
+ int32 sds_rank;
+ int32 sds_dimsizes[MAX_VAR_DIMS];
+ int32* sds_start;
+ int32* sds_edge;
+ int32* sds_stride;
+ int32 count_sdsdata;
+ int32 sds_ref;
+ intn sds_empty;
+ int32 istat;
+ int i;
+ int32 num_sdsattrs;
+ void* sds_data;
+
+ int check_sdsname;
+ int check_gloattr;
+
+ char sdsname[MAX_NC_NAME];
+ char sdslabel[MAX_NC_NAME];
+ size_t h4size;
+ size_t h4memsize;
+ HDF_CHUNK_DEF c_def_out;
+ hsize_t* chunk_dims;
+ int32 c_flags;
+
+ /* define varibles for hdf5. */
+
+ hid_t h5dset;
+ hid_t h5d_sid;
+ hid_t h5ty_id;
+ hid_t h5_memtype;
+ hid_t create_plist;
+ hsize_t h5dims[MAX_VAR_DIMS];
+ hsize_t max_h5dims[MAX_VAR_DIMS];
+
+ char* h5csds_name;
+
+ herr_t ret;
+ /* zeroing out the memory for sdsname and sdslabel.*/
+
+ h4toh5_ZeroMemory(sdsname,MAX_NC_NAME);
+ h4toh5_ZeroMemory(sdslabel,MAX_NC_NAME);
+ /* check whether the sds is empty. */
+
+ if(SDcheckempty(sds_id,&sds_empty)== FAIL) {
+ printf("error in running SDcheckempty routine. \n");
+ return FAIL;
+ }
+
+ if(sds_empty != 0) return SUCCEED;
+
+
+ /*check whether the sds is created with unlimited dimension. */
+
+ if(SDgetchunkinfo(sds_id,&c_def_out, &c_flags)== FAIL) {
+ printf("error in getting chunking information. \n");
+ return FAIL;
+ }
+
+ /*obtain name,rank,dimsizes,datatype and num of attributes of sds */
+ if (SDgetinfo(sds_id,sdsname,&sds_rank,sds_dimsizes,&sds_dtype,
+ &num_sdsattrs)==FAIL) {
+ printf("unable to get information of sds h5dset.\n");
+ return FAIL;
+ }
+
+ /* obtain start,edge, stride and number of sds data. */
+
+ sds_start = malloc(sizeof(int32)*sds_rank);
+ if(sds_start == NULL) {
+ printf("error in allocating memory for sds start.\n");
+ return FAIL;
+ }
+
+ sds_edge = malloc(sizeof(int32)*sds_rank);
+ if(sds_edge == NULL) {
+ printf("error in allocating memory for sds edge.\n");
+ free(sds_start);
+ return FAIL;
+ }
+
+ sds_stride = malloc(sizeof(int32)*sds_rank);
+ if(sds_stride == NULL) {
+ printf("error in allocating memory for sds stride. \n");
+ free(sds_start);
+ free(sds_edge);
+ return FAIL;
+ }
+
+ count_sdsdata = 1;
+ for (i=0;i<sds_rank;i++){
+ sds_stride[i] = 1;
+ sds_start[i] = 0;
+ sds_edge[i] = sds_dimsizes[i];
+ count_sdsdata = count_sdsdata*sds_dimsizes[i];
+
+ }
+
+ for (i=0;i<sds_rank;i++) {
+ h5dims[i] = sds_edge[i]-sds_start[i];
+ max_h5dims[i] = h5dims[i];
+ }
+ if(SDisrecord(sds_id)) max_h5dims[0] = H5S_UNLIMITED;
+
+ /* convert hdf4 data type to hdf5 data type. */
+ if (h4type_to_h5type(sds_dtype,&h5_memtype,&h4memsize,&h4size,
+ &h5ty_id) == FAIL) {
+ printf("failed to translate datatype. \n");
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ return FAIL;
+ }
+
+ /* check whether the datatype is string, if we find string format,
+ we will change them back into integer format.*/
+
+ if (h5ty_id == H5T_STRING) {
+ /* rechange string datatype into numerical datatype.*/
+ if(h5string_to_int(sds_dtype,&h5_memtype,h4memsize,
+ &h5ty_id)== FAIL) {
+ printf("error in translating H5T_STRING to int.\n");
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ return FAIL;
+ }
+ }
+
+ sds_data = malloc(h4memsize*count_sdsdata);
+ if(sds_data == NULL) {
+ printf("error in allocating memory. \n");
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ return FAIL;
+ }
+
+ istat = SDreaddata(sds_id, sds_start, sds_stride, sds_edge,
+ (VOIDP)sds_data);
+ if (istat == FAIL) {
+ printf("unable to read data from h5dset. \n");
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ return FAIL;
+ }
+
+ /* obtaining reference number and name of h5 dataset
+ corresponding to sds. */
+
+ sds_ref = SDidtoref(sds_id);
+ if(sds_ref == FAIL) {
+ printf("error in obtaining sds reference number. \n");
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ return FAIL;
+ }
+
+ h5csds_name = get_name(sds_ref,2*num_sds,sds_hashtab,&check_sdsname);
+ if (h5csds_name == NULL && check_sdsname == 0 ) {
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ printf("error,cannot find sds name \n");
+ return FAIL;
+ }
+
+ if (h5csds_name == NULL && check_sdsname == -1) {
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ printf("error,sds name is not defined.\n");
+ return FAIL;
+ }
+
+ if (h5csds_name == NULL && check_sdsname == -2) {
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ printf("error,not enough memory for allocating sds name.\n");
+ return FAIL;
+ }
+
+ h5d_sid = H5Screate_simple(sds_rank,h5dims,max_h5dims);
+
+ if (h5d_sid < 0) {
+ printf("failed to create hdf5 data space converted from SDS. \n");
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ return FAIL;
+ }
+
+ /* create property list. */
+
+ create_plist = H5Pcreate(H5P_DATASET_CREATE);
+ chunk_dims = malloc(sizeof(hsize_t)*sds_rank);
+
+ /* if the sds is not chunked, but with unlimited dimension, we have to
+ provide a chunk size for the corresponding hdf5 dataset. we will choose
+ 1/2 dimension size right now. */
+
+ if(c_flags == HDF_NONE && SDisrecord(sds_id))
+ {
+ for(i=0;i<sds_rank;i++){
+ chunk_dims[i] = (hsize_t)(h5dims[i]/2);
+ }
+ if(H5Pset_chunk(create_plist, sds_rank, chunk_dims)<0) {
+ printf("failed to set up chunking information for ");
+ printf("property list.\n");
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ free(chunk_dims);
+ H5Sclose(h5d_sid);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ }
+ if(c_flags == HDF_CHUNK || c_flags == (HDF_CHUNK | HDF_COMP)
+ || c_flags == (HDF_CHUNK | HDF_NBIT) ){
+
+ for(i=0;i<sds_rank;i++)
+ chunk_dims[i] = (hsize_t)c_def_out.chunk_lengths[i];
+
+ if(H5Pset_chunk(create_plist, sds_rank, chunk_dims)<0) {
+ printf("failed to set up chunking information for ");
+ printf("property list.\n");
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ free(chunk_dims);
+ H5Sclose(h5d_sid);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+ }
+
+
+ h5dset = H5Dcreate(h5_group,h5csds_name,h5ty_id,h5d_sid,create_plist);
+
+ if (h5dset < 0) {
+ printf("failed to create hdf5 dataset converted from SDS. \n");
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ free(chunk_dims);
+ H5Sclose(h5d_sid);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ if (H5Dwrite(h5dset,h5_memtype,h5d_sid,h5d_sid,H5P_DEFAULT,
+ (void *)sds_data)<0) {
+ printf("failed to write data into hdf5 dataset");
+ printf(" converted from SDS.\n");
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ H5Pclose(create_plist);
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ free(chunk_dims);
+ return FAIL;
+ }
+
+
+ /* convert sds annotation into attribute of sds dataset.
+ Since there is no routines to find the exact tag of sds object,
+ we will check three possible object tags of sds objects, that is:
+ DFTAG_SD,DFTAG_SDG,DFTAG_NDG. If the object tag of sds object is
+ falling out of this scope, we will not convert annotations into
+ hdf5 attributes; it is user's responsibility to make sure object tags
+ for sds objects are only one of the above three tags.*/
+
+ if(Annoobj_h4_to_h5(file_id,sds_ref,DFTAG_SD,h5dset)== FAIL){
+ printf("failed to convert sds annotation into hdf5 attribute.\n");
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ free(chunk_dims);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ if(Annoobj_h4_to_h5(file_id,sds_ref,DFTAG_SDG,h5dset)== FAIL){
+ printf("failed to convert sds annotation into hdf5 attribute.\n");
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ free(chunk_dims);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ if(Annoobj_h4_to_h5(file_id,sds_ref,DFTAG_NDG,h5dset)== FAIL){
+ printf("failed to convert sds annotation into hdf5 attribute.\n");
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ free(chunk_dims);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ /* convert sds dimensional scale dataset into hdf5 dataset. */
+ if(sdsdim_to_h5dataset(sds_id,sds_rank,h5dset,h5_dimgroup,sds_dimsizes[0]) == FAIL) {
+ printf("failed to convert dimensional scale to hdf5 dataset. \n");
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ free(chunk_dims);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+ check_gloattr = 0;
+ if (sds_transattrs(sds_id,h5dset,num_sdsattrs,check_gloattr)==FAIL) {
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ free(chunk_dims);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ H5Pclose(create_plist);
+ printf(" Error in obtaining sds attributes. \n");
+ return FAIL;
+ }
+
+ /********************************************/
+ /* handle extra attributes of sds : sds label, object type
+ and reference num */
+
+ strcpy(sdslabel,SDSLABEL);
+
+ if(h4_transpredattrs(h5dset,HDF4_OBJECT_TYPE,sdslabel)==FAIL) {
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ free(chunk_dims);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ H5Pclose(create_plist);
+ printf("unable to transfer sds label to HDF4 OBJECT TYPE.\n");
+ return FAIL;
+ }
+
+ if(sdsname[0] != '\0') {
+ if(h4_transpredattrs(h5dset,HDF4_OBJECT_NAME,sdsname)==FAIL){
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ free(chunk_dims);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ H5Pclose(create_plist);
+ printf("unable to transfer sds name to HDF5 dataset attribute.\n");
+ return FAIL;
+ }
+ }
+
+ if(h4_transnumattr(h5dset,HDF4_REF_NUM,sds_ref)==FAIL){
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(sds_data);
+ free(chunk_dims);
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ H5Pclose(create_plist);
+ printf("unable to transfer sds ref. to HDF5 dataset attribute.\n");
+ return FAIL;
+ }
+
+ istat = SDendaccess(sds_id);
+ ret = H5Pclose(create_plist);
+ ret = H5Sclose(h5d_sid);
+ ret = H5Dclose(h5dset);
+ free(sds_data);
+ free(sds_start);
+ free(sds_edge);
+ free(sds_stride);
+ free(chunk_dims);
+ return SUCCEED;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: sds_transattrs
+ *
+ * Purpose: translate attribute of HDF4 SDS object into
+ hdf5 dataset attribute
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ ssds_id: SDS identifier
+ sh5_dset: hdf5 dataset
+ snum_sdsattrs: number of sds attribute
+ check_gloflag: a flag that check whether the attribute is
+ a file attribute or a sds id or a dimensional scale id.
+
+ *-------------------------------------------------------------------------
+ */
+
+int sds_transattrs(int32 ssds_id, hid_t sh5_dset,int snum_sdsattrs,
+ int check_gloflag) {
+
+ char ssdsatrr_name[2*MAX_NC_NAME];
+ char sdsglo[MAX_NC_NAME];
+ char* sdsrepattr_name;
+ int32 count_ssdsadata;
+ int32 ssds_atype;
+ size_t sh4_amemsize;
+ size_t sh4_asize;
+ hid_t sh5a_sid;
+ hid_t sh5a_id;
+ hid_t sh5_atype;
+ hid_t sh5_amemtype;
+ hid_t sh5str_type;
+ hid_t sh5str_memtype;
+ hsize_t sh5dims[MAX_VAR_DIMS];
+ void* ssds_adata;
+ herr_t sret;
+ int i;
+
+ for (i = 0;i < snum_sdsattrs; i++) {
+
+ if (SDattrinfo(ssds_id,i,ssdsatrr_name,&ssds_atype,
+ &count_ssdsadata)==FAIL){
+ printf("unable to obtain SDS attribute information. \n");
+ return FAIL;
+ }
+
+ /* make a table for the attribute type, to do the corresponding type. */
+
+ if(h4type_to_h5type(ssds_atype,&sh5_amemtype,&sh4_amemsize,
+ &sh4_asize,&sh5_atype)== FAIL) {
+ printf("fail to translate sds attribute data type from H4 to H5. \n");
+ return FAIL;
+ }
+
+ ssds_adata = malloc(sh4_amemsize * count_ssdsadata);
+ if(ssds_adata == NULL) {
+ printf("error, cannot allocate memory for sds attribute data. \n");
+ return FAIL;
+ }
+
+ if(SDreadattr(ssds_id,i,(VOIDP)ssds_adata)== FAIL) {
+ printf("error in reading attributes of sds object. \n");
+ free(ssds_adata);
+ return FAIL;
+ }
+
+ /* if attribute doesn't have name, a default name is set. */
+ if(ssdsatrr_name[0] == '\0') {
+ sdsrepattr_name = trans_obj_name(DFTAG_NDG,i);
+ strcpy(ssdsatrr_name,sdsrepattr_name);
+ free(sdsrepattr_name);
+ }
+
+ /* if the sds attribute is a file attribute. */
+ if(check_gloflag == 1){
+ strcpy(sdsglo,GLOSDS);
+ strcat(ssdsatrr_name,"_");
+ strcat(ssdsatrr_name,sdsglo);
+ }
+
+ /* now do attribute-transferring.
+ 1. deal with string data type
+ 2. set attribute space.
+ 3. get attribute name, set property list. */
+
+ if (sh5_atype == H5T_STRING) {
+
+ sh5a_sid = H5Screate(H5S_SCALAR);
+
+ if (sh5a_sid < 0) {
+ printf("failed to create attribute space for");
+ printf(" HDF4_OBJECT_TYPE SDS. \n");
+ free(ssds_adata);
+ return FAIL;
+ }
+
+ if ((sh5str_type = mkstr(count_ssdsadata,
+ H5T_STR_SPACEPAD))<0) {
+ printf("error in making string. \n");
+ H5Sclose(sh5a_sid);
+ free(ssds_adata);
+ return FAIL;
+ }
+
+ /* check this line later. */
+ if ((sh5str_memtype = mkstr(count_ssdsadata*sh4_amemsize,
+ H5T_STR_SPACEPAD))<0) {
+ printf("error in making memory string. \n");
+ H5Sclose(sh5a_sid);
+ free(ssds_adata);
+ return FAIL;
+ }
+
+ sh5a_id = H5Acreate(sh5_dset,ssdsatrr_name,sh5str_type,
+ sh5a_sid,H5P_DEFAULT);
+
+ if (sh5a_id <0) {
+ printf("failed to obtain attribute id for");
+ printf(" HDF4_OBJECT_TYPE SDS. \n");
+ H5Sclose(sh5a_sid);
+ free(ssds_adata);
+ return FAIL;
+ }
+
+ sret = H5Awrite(sh5a_id,sh5str_memtype,(void *)ssds_adata);
+
+ if (sret <0) {
+ printf("failed to write attribute data for");
+ printf(" HDF4_OBJECT_TYPE SDS. \n");
+ H5Sclose(sh5a_sid);
+ H5Aclose(sh5a_id);
+ free(ssds_adata);
+ return FAIL;
+ }
+
+ sret = H5Sclose(sh5a_sid);
+ sret = H5Aclose(sh5a_id);
+ }
+
+ else {
+
+ if(count_ssdsadata == 1) {
+
+ sh5a_sid = H5Screate(H5S_SCALAR);
+ if (sh5a_sid < 0) {
+ printf("failed to create space id. \n");
+ free(ssds_adata);
+ return FAIL;
+ }
+ }
+ else {
+ sh5dims[0] = count_ssdsadata;
+ sh5a_sid = H5Screate_simple(1,sh5dims,NULL);
+
+ if (sh5a_sid < 0) {
+ printf("failed to create attribute space. \n");
+ free(ssds_adata);
+ return FAIL;
+ }
+ }
+ sh5a_id = H5Acreate(sh5_dset,ssdsatrr_name,sh5_atype,
+ sh5a_sid,H5P_DEFAULT);
+
+ if(sh5a_id <0) {
+ printf("failed to obtain attribute id. \n");
+ H5Sclose(sh5a_sid);
+ free(ssds_adata);
+ return FAIL;
+ }
+
+ sret = H5Awrite(sh5a_id,sh5_amemtype,(void *)ssds_adata);
+
+ if(sret <0) {
+ printf("failed to write attribute data.\n ");
+ H5Sclose(sh5a_sid);
+ H5Aclose(sh5a_id);
+ free(ssds_adata);
+ return FAIL;
+ }
+ sret = H5Sclose(sh5a_sid);
+ sret = H5Aclose(sh5a_id);
+ }
+ free(ssds_adata);
+ }
+ return SUCCEED;
+}
+/****************sdsdim_to_h5dataset*******************
+
+ * Purpose: translate dimensional scale dataset into
+ hdf5 dataset
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ sds_id: SDS identifier
+ sds_rank: number of sds dimensions
+ Out:
+ Modification:
+
+ *-------------------------------------------------------------------------
+ */
+
+int sdsdim_to_h5dataset(int32 sds_id,int32 sds_rank,hid_t sh5dset,
+ hid_t sh5_dimgroup,int32 firstdimsize) {
+
+ int32 sdsdim_id;
+ int32 sdsdim_type = 0;
+ int32 sds_dimscasize[1];
+ int32 istat;
+ int i;
+ int count_h5objref;/* this counter updates the number of h5 object reference. */
+ int count_h5attrname;/*this counter updates the number of h5 dimensional name attribute.*/
+
+ int check_gloattr;
+ int32 num_sdsdimattrs;
+ int check_sdsdim;
+ void* dim_scadata;
+
+ char sdsdim_name[MAX_NC_NAME+1];
+ char* cor_sdsdimname;
+ size_t h4dim_memsize;
+ size_t h4dim_size;
+
+ HDF_CHUNK_DEF c_def_out;
+ int32 c_flags;
+
+ /* define varibles for hdf5. */
+
+ hid_t h5dim_dset;
+ hid_t h5dim_sid;
+
+ hid_t h5dim_tid;
+ hid_t h5dim_memtype;
+
+ hid_t h5dim_nameaid;
+ hid_t h5dim_namesid;
+
+ hid_t h5str_dimntype;
+
+ hid_t attr_refSpace;
+ hid_t attr_refType;
+ hid_t attribID;
+ hid_t create_plist;
+
+ hsize_t h5dimscas[1];
+ hsize_t max_h5dimscas[1];
+ hsize_t h5dim_dims[1];
+ hsize_t attr_refDims[1];
+ hsize_t h5dim_chunkdim[1];
+ hobj_ref_t dim_refdat;
+
+ hobj_ref_t* alldim_refdat;
+
+ char* h5sdsdim_name;
+ char h5sdsdim_allname[MAX_VAR_DIMS * MAX_DIM_NAME];
+ char h5newsdsdim_name[MAX_DIM_NAME];
+ char h5dimpath_name[MAX_DIM_NAME];
+ herr_t ret;
+
+
+ /*zero out memory for h5sdsdim_allname and h5dimpath_name */
+ h4toh5_ZeroMemory(h5sdsdim_allname,(MAX_VAR_DIMS*MAX_DIM_NAME)*sizeof(char));
+ h4toh5_ZeroMemory(h5dimpath_name,MAX_DIM_NAME*sizeof(char));
+
+ /*check whether the sds is created with unlimited dimension. */
+
+ if(SDgetchunkinfo(sds_id,&c_def_out, &c_flags)== FAIL) {
+ printf("error in getting chunking information. \n");
+ return FAIL;
+ }
+
+ /* initialize the dimensional number of sds dimensions, h5dim_dims
+ is used for grabbing hdf5 dimensional name list and object reference
+ list. */
+ h5dim_dims[0] = (hsize_t)sds_rank;
+ count_h5objref = 0;
+ count_h5attrname = 0;
+
+ for (i = 0; i<sds_rank;i++) {
+
+ sdsdim_id = SDgetdimid(sds_id,i);
+
+ if(sdsdim_id == FAIL) {
+ printf("error in obtaining sds dimension id. \n");
+ return FAIL;
+ }
+
+ istat = SDdiminfo(sdsdim_id,sdsdim_name,sds_dimscasize,
+ &sdsdim_type,&num_sdsdimattrs);
+
+ if (istat == FAIL) {
+ printf("sds get dim. information failed. \n");
+ SDendaccess(sdsdim_id);
+ return FAIL;
+ }
+
+ /* for unlimited sds dimension, grab the current dimensional size. */
+ if(sds_dimscasize[0] == 0) sds_dimscasize[0] = firstdimsize;
+
+ /* check whether this dimensional scale dataset is looked up. */
+ check_sdsdim = lookup_name(sdsdim_name,DIM_HASHSIZE,dim_hashtab);
+
+ strcpy(h5dimpath_name,HDF4_DIMG);
+
+ /* checking whether sds dimension scale name contains ORI_SLASH, changing into CHA_SLASH.*/
+
+ cor_sdsdimname = correct_name(sdsdim_name);
+ if(cor_sdsdimname == NULL) {
+ printf("error in generating corrected sds dimensional scale name.\n");
+ SDendaccess(sdsdim_id);
+ return FAIL;
+ }
+
+ /* generating hdf5 dimensional scale name. */
+ h5sdsdim_name = get_obj_aboname(cor_sdsdimname,NULL,h5dimpath_name,NULL);
+ if (h5sdsdim_name == NULL) {
+ printf("error in getting hdf5 sds dimension name.\n");
+ SDendaccess(sdsdim_id);
+ free(cor_sdsdimname);
+ return FAIL;
+ }
+ free(cor_sdsdimname);
+
+ strcpy(&h5sdsdim_allname[count_h5attrname*MAX_DIM_NAME],h5sdsdim_name);
+
+ /* here we should add some comments for fakedim0--name. It seems that
+ hdf4(netcdf) will use unique fake dimension name, fakedim + unique
+ number, so check_sdsdim will never be 1 if the dimension name is fake
+ name. Under this case, count_h5objref and count_h5attrname
+ will not increase if this dimension doesnot
+ have dimensional scale data. That assures the object reference of sds is
+ correct. */
+
+ /*if this dimension is not touched, get name of the dimensional scale data. */
+ if (check_sdsdim == 1){/* the dimension is touched, skip this one.*/
+ free(h5sdsdim_name);
+ SDendaccess(sdsdim_id);
+ count_h5objref = count_h5objref + 1;
+ count_h5attrname = count_h5attrname + 1;
+ continue;
+ }
+
+ if (check_sdsdim != 0) {
+ printf("error in checking sds dimensions.\n");
+ SDendaccess(sdsdim_id);
+ free(h5sdsdim_name);
+ return FAIL;
+ }
+
+ /* if this sds dimension has no dimensional scale data. skip it.*/
+ if(sdsdim_type == 0)
+ continue;
+
+
+ /* get h5 dimensional scale data type. */
+ if(h4type_to_h5type(sdsdim_type,&h5dim_memtype,&h4dim_memsize,
+ &h4dim_size,&h5dim_tid)== FAIL) {
+ printf("error in transferring sds dimension data type.\n");
+ SDendaccess(sdsdim_id);
+ free(h5sdsdim_name);
+ return FAIL;
+ }
+
+ /* dimensional scale dataset cannot be H5T_STRING data type.
+ So transferring back to int8 */
+
+ if (h5dim_tid == H5T_STRING) {
+ if(h5string_to_int(sdsdim_type,&h5dim_memtype,h4dim_memsize,
+ &h5dim_tid)==FAIL){
+ printf("error in translating from string to int. \n");
+ SDendaccess(sdsdim_id);
+ free(h5sdsdim_name);
+ return FAIL;
+ }
+ }
+
+ /* get the dimensional scale data. */
+ dim_scadata = malloc(h4dim_memsize*sds_dimscasize[0]);
+ istat = SDgetdimscale(sdsdim_id,(VOIDP)dim_scadata);
+
+ if (istat == FAIL) {
+ printf("sds get dim. scale failed. \n");
+ SDendaccess(sdsdim_id);
+ free(h5sdsdim_name);
+ free(dim_scadata);
+ return FAIL;
+ }
+
+ /* set dimensional scale size properly. */
+ h5dimscas[0] = sds_dimscasize[0];
+
+ /* only set for the first dimension if SDS is unlimited dimension. */
+ if(SDisrecord(sds_id) && i == 0)
+ max_h5dimscas[0] = H5S_UNLIMITED;
+ else
+ max_h5dimscas[0] = h5dimscas[0];
+
+ h5dim_sid = H5Screate_simple(1,h5dimscas,max_h5dimscas);
+
+ if(h5dim_sid <0) {
+ printf("error in creating space. \n");
+ SDendaccess(sdsdim_id);
+ free(h5sdsdim_name);
+ free(dim_scadata);
+ return FAIL;
+ }
+
+ /* create property list, for chunked sds or unlimited dimension cases */
+
+ create_plist = H5Pcreate(H5P_DATASET_CREATE);
+
+ if(create_plist == -1) {
+ printf("failed to create property list. \n");
+ SDendaccess(sdsdim_id);
+ free(h5sdsdim_name);
+ free(dim_scadata);
+ H5Sclose(h5dim_sid);
+ }
+
+
+ if(c_flags == HDF_NONE && SDisrecord(sds_id) && i == 0)
+ {
+ h5dim_chunkdim[0] = (hsize_t)(h5dimscas[0]/2);
+
+ if(H5Pset_chunk(create_plist,1, h5dim_chunkdim)<0) {
+ printf("failed to set up chunking information for ");
+ printf("dimensional scale property list.\n");
+ SDendaccess(sdsdim_id);
+ free(h5sdsdim_name);
+ free(dim_scadata);
+ H5Sclose(h5dim_sid);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ }
+
+ if(c_flags == HDF_CHUNK || c_flags == (HDF_CHUNK | HDF_COMP)
+ || c_flags == (HDF_CHUNK | HDF_NBIT) ){
+
+ h5dim_chunkdim[0] = (hsize_t)c_def_out.chunk_lengths[0];
+
+ if(H5Pset_chunk(create_plist,1, h5dim_chunkdim)<0) {
+ printf("failed to set up chunking information for ");
+ printf("property list.\n");
+ SDendaccess(sdsdim_id);
+ free(h5sdsdim_name);
+ free(dim_scadata);
+ H5Sclose(h5dim_sid);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+ }
+
+ /* create h5 dataset under group HDF4_DIMG*/
+ h5dim_dset = H5Dcreate(sh5_dimgroup,h5sdsdim_name,h5dim_tid,
+ h5dim_sid,create_plist);
+
+ if(h5dim_dset <0) {
+ printf("error in creating dataset. \n");
+ free(h5sdsdim_name);
+ free(dim_scadata);
+ SDendaccess(sdsdim_id);
+ H5Sclose(h5dim_sid);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+
+ if (H5Dwrite(h5dim_dset,h5dim_memtype,h5dim_sid,h5dim_sid,
+ H5P_DEFAULT,(void *)dim_scadata)<0) {
+ printf("error writing data\n");
+ free(h5sdsdim_name);
+ free(dim_scadata);
+ SDendaccess(sdsdim_id);
+ H5Sclose(h5dim_sid);
+ H5Pclose(create_plist);
+ H5Dclose(h5dim_dset);
+ return FAIL;
+ }
+
+ check_gloattr = 0;
+ if(sds_transattrs(sdsdim_id,h5dim_dset,num_sdsdimattrs,check_gloattr)
+ == FAIL){
+ printf("error in transferring attributes. \n");
+ free(h5sdsdim_name);
+ free(dim_scadata);
+ SDendaccess(sdsdim_id);
+ H5Sclose(h5dim_sid);
+ H5Dclose(h5dim_dset);
+ H5Pclose(create_plist);
+ return FAIL;
+ }
+ SDendaccess(sdsdim_id);
+ free(dim_scadata);
+ free(h5sdsdim_name);
+ ret = H5Sclose(h5dim_sid);
+ ret = H5Dclose(h5dim_dset);
+ ret = H5Pclose(create_plist);
+ count_h5objref = count_h5objref + 1;
+ count_h5attrname =count_h5attrname + 1;
+ }
+
+ /*1. create object reference number to dimensional scale dataset.
+ 2. store absolute name of dimensional name into
+ dimensional list. */
+
+ if ( count_h5objref != 0) {
+
+ h5dim_dims[0] = count_h5objref;
+ attr_refDims[0] = count_h5objref;
+ attr_refSpace = H5Screate_simple(1,attr_refDims,NULL);
+ attr_refType = H5Tcopy(H5T_STD_REF_OBJ);
+ alldim_refdat = calloc((size_t)count_h5objref,sizeof(hobj_ref_t));
+
+ if(alldim_refdat == NULL) {
+ printf("error in allocating memory. \n");
+ H5Sclose(attr_refSpace);
+ H5Tclose(attr_refType);
+ return FAIL;
+ }
+
+ for(i=0;i<count_h5objref;i++){
+ h4toh5_ZeroMemory(h5newsdsdim_name,MAX_DIM_NAME);
+ strcpy(h5newsdsdim_name,&h5sdsdim_allname[i*MAX_DIM_NAME]);
+
+ ret = H5Rcreate(&dim_refdat,sh5_dimgroup,h5newsdsdim_name,
+ H5R_OBJECT,-1);
+ if(ret <0) {
+ free(alldim_refdat);
+ H5Sclose(attr_refSpace);
+ H5Tclose(attr_refType);
+ printf("error in generating H5 reference. \n");
+ return FAIL;
+ }
+ alldim_refdat[i] = dim_refdat;
+
+ }
+
+ attribID = H5Acreate(sh5dset,DIMSCALE,attr_refType,attr_refSpace,
+ H5P_DEFAULT);
+ if(attribID < 0) {
+ free(alldim_refdat);
+ H5Sclose(attr_refSpace);
+ H5Tclose(attr_refType);
+ H5Aclose(attribID);
+ printf("error in generating H5 attribute ID. \n");
+ return FAIL;
+ }
+
+ ret = H5Awrite(attribID,attr_refType,(void *)alldim_refdat);
+
+ H5Sclose(attr_refSpace);
+ H5Tclose(attr_refType);
+ H5Aclose(attribID);
+ free(alldim_refdat);
+ }
+
+ if(count_h5attrname!= 0) {
+
+ h5dim_namesid = H5Screate_simple(1,h5dim_dims,NULL);
+
+ if(h5dim_namesid <0) {
+ printf("error in creating sds dimensionlist space.\n");
+ return FAIL;
+ }
+
+ h5str_dimntype = mkstr(MAX_DIM_NAME,H5T_STR_SPACEPAD);
+ if(h5str_dimntype < 0) {
+ H5Sclose(h5dim_namesid);
+ printf("error in generating H5T_STRING type.\n");
+ return FAIL;
+ }
+
+ h5dim_nameaid = H5Acreate(sh5dset,HDF4_DIMENSION_LIST,h5str_dimntype,
+ h5dim_namesid,H5P_DEFAULT);
+
+ if(h5dim_nameaid <0) {
+ H5Sclose(h5dim_namesid);
+ printf("error in creating sds dimensionlist id.\n");
+ return FAIL;
+ }
+
+ ret = H5Awrite(h5dim_nameaid,h5str_dimntype,h5sdsdim_allname);
+
+ if(ret < 0) {
+ H5Sclose(h5dim_namesid);
+ H5Aclose(h5dim_nameaid);
+ printf("error in writing sds dimensionlist. \n");
+ return FAIL;
+ }
+
+ ret = H5Sclose(h5dim_namesid);
+ ret = H5Aclose(h5dim_nameaid);
+
+ }
+ return SUCCEED;
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/h4toh5/h4toh5util.c b/tools/h4toh5/h4toh5util.c
new file mode 100644
index 0000000..879e8a9
--- /dev/null
+++ b/tools/h4toh5/h4toh5util.c
@@ -0,0 +1,1633 @@
+/*-------------------------------------------------------------------------
+ *
+ * Copyright (C) 2000 National Center for Supercomputing Applications.
+ * All rights reserved.
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/******************************************************************************
+
+ Description:
+
+1. converter
+
+See HDF4 to HDF5 mapping specification at
+(http://hdf.ncsa.uiuc.edu/HDF5/papers/h4toh5) for the default mapping
+from HDF4 object to HDF5 object.
+
+The whole converter includes 10 files, h4toh5util.h, h4toh5main.h, h4toh5util.c, h4toh5main.c, h4toh5sds.c, h4toh5image.c,h4toh5vdata.c,h4toh5vgroup.c,h4toh5pal.c and h4toh5anno.c.
+
+2. this file
+
+including all routines that are useful for other files.
+
+Author: Kent Yang(ymuqun@ncsa.uiuc.edu)
+
+
+*****************************************************************************/
+
+
+#include "h4toh5util.h"
+
+
+/* Function h4toh5_ZeroMemory
+ Purpose: Zero out memory
+ return: None
+ In: size_t n(DWORD in windows)
+ void* s(PVOID in windows)
+*/
+void h4toh5_ZeroMemory(void*s,size_t n) {
+#ifdef WIN32
+ ZeroMemory(s,n);
+#else
+ bzero(s,n);
+#endif
+}
+
+/*-------------------------------------------------------------------------
+ * Function: h5string_to_int
+ *
+ * Purpose: This function will convert H5T_STRING into integer.
+ This is a correction routine when the user define the
+ numerical datatype int8 as DFNT_CHAR8 and DFNT_UCHAR8
+
+ * Errors: will return error message to the interface
+ * Return: FAIL if failed, SUCCEED if success
+ *
+ * In : h4type: HDF4 datatype
+ h4memsize: the real memory size of h4type
+
+ * Out: h5memtype: pointer of which value should be h5memtype(the real
+ data type stored at the memory)
+ h5type: pointer of which value should be h5type(the hdf5
+ type stored at the disk).
+ *
+ *-------------------------------------------------------------------------
+ */
+
+int h5string_to_int(const int32 h4type, hid_t* h5memtype,
+ const size_t h4memsize,hid_t* h5type) {
+
+ switch(h4type) {
+
+ case DFNT_CHAR8:
+
+ *h5type = H5T_STD_I8BE;
+ if (h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_SCHAR;
+ else if(h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_SHORT;
+ else if(h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_INT;
+ else if(h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_LONG;
+ else return FAIL;
+ break;
+
+ case DFNT_UCHAR8:
+
+ *h5type = H5T_STD_U8BE;
+ if (h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_UCHAR;
+ else if(h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_USHORT;
+ else if(h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_UINT;
+ else if(h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_ULONG;
+ else return FAIL;
+ break;
+ }
+ return SUCCEED;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: h4type_to_h5type
+ *
+ * Purpose: this function will convert HDF4 datatype into HDF5 datatype
+ The converter includes file to file datatype and datasize
+ conversion, file to memory datatype and datasize conversion.
+ Check the mapping document for details.
+
+ * Errors: will return error message to the interface.
+ * Return: false, FAIL. otherwise,SUCCEED.
+ *
+ * In : h4type: HDF4 datatype.
+ * Out: h4size: the file(disk) size of h4type.
+ h4memsize: the real memory size of h4type.
+ * h5memtype: pointer of which value should be h5memtype(the real
+ type stored at the memory).
+ h5type: pointer of which value should be h5type(the hdf5
+ type that is stored at the disk).
+ *
+ *
+ *-------------------------------------------------------------------------
+ */
+int h4type_to_h5type(const int32 h4type, hid_t* h5memtype,
+ size_t* h4memsize,size_t* h4size, hid_t *h5type)
+{
+
+ switch (h4type) {
+
+ case DFNT_CHAR8:
+
+ *h4size = 1;
+ *h4memsize = sizeof(int8);
+ /* assume DFNT_CHAR8 C type character. */
+ *h5memtype = H5T_STRING;
+ *h5type = H5T_STRING;
+ break;
+
+ case DFNT_UCHAR8:
+
+ *h4size = 1;
+ *h4memsize = sizeof(int8);
+ *h5memtype = H5T_STRING;
+ *h5type = H5T_STRING;
+ break;
+
+ case DFNT_INT8:
+
+ *h4size = 1;
+ *h5type = H5T_STD_I8BE;
+ *h4memsize = sizeof(int8);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_SCHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_SHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_INT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_LONG;
+ else return FAIL;
+ break;
+
+ case DFNT_UINT8:
+
+ *h4size =1;
+ *h5type = H5T_STD_U8BE;
+ *h4memsize = sizeof(int8);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_UCHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_USHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_UINT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_ULONG;
+ else return FAIL;
+ break;
+
+ case DFNT_NINT8:
+ printf("warning, Native HDF datatype is encountered");
+ printf(" the converting result may not be correct.\n");
+ *h4size = 1;
+ *h5type = H5T_NATIVE_SCHAR;
+ *h4memsize = sizeof(int8);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_SCHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_SHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_INT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_LONG;
+ else return FAIL;
+ break;
+
+ case DFNT_NUINT8:
+ printf("warning, Native HDF datatype is encountered");
+ printf(" the converting result may not be correct.\n");
+ *h4size = 1;
+ *h5type = H5T_NATIVE_UCHAR;
+ *h4memsize = sizeof(int8);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_UCHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_SHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_INT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_LONG;
+ else return FAIL;
+ break;
+
+ case DFNT_LINT8:
+ *h4size = 1;
+ *h5type = H5T_STD_I8LE;
+ *h4memsize = sizeof(int8);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_UCHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_SHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_INT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_LONG;
+ else return FAIL;
+ break;
+
+ case DFNT_LUINT8:
+ *h4size = 1;
+ *h5type = H5T_STD_U8LE;
+ *h4memsize = sizeof(int8);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_UCHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_USHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_UINT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_ULONG;
+ else return FAIL;
+ break;
+
+ case DFNT_INT16:
+ *h4size = 2;
+ *h5type = H5T_STD_I16BE;
+ *h4memsize = sizeof(int16);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_CHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_SHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_INT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_LONG;
+ else return FAIL;
+ break;
+
+ case DFNT_UINT16:
+ *h4size = 2;
+ *h5type = H5T_STD_U16BE;
+ *h4memsize = sizeof(int16);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_UCHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_USHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_UINT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_ULONG;
+ else return FAIL;
+ break;
+
+ case DFNT_NINT16:
+ printf("warning, Native HDF datatype is encountered");
+ printf(" the converting result may not be correct.\n");
+ *h4size = 2;
+ *h5type = H5T_NATIVE_SHORT;
+ *h4memsize = sizeof(int16);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_CHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_SHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_INT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_LONG;
+ else return FAIL;
+ break;
+
+ case DFNT_NUINT16:
+ printf("warning, Native HDF datatype is encountered");
+ printf(" the converting result may not be correct.\n");
+ *h4size = 2;
+ *h5type = H5T_NATIVE_USHORT;
+ *h4memsize = sizeof(int16);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_UCHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_USHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_UINT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_ULONG;
+ else return FAIL;
+ break;
+
+ case DFNT_LINT16:
+ *h4size = 2;
+ *h5type = H5T_STD_I16LE;
+ *h4memsize = sizeof(int16);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_UCHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_SHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_INT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_LONG;
+ else return FAIL;
+ break;
+
+ case DFNT_LUINT16:
+ *h4size = 2;
+ *h5type = H5T_STD_U16LE;
+ *h4memsize = sizeof(int16);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_UCHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_USHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_UINT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_ULONG;
+ else return FAIL;
+ break;
+
+ case DFNT_INT32:
+ *h4size = 4;
+ *h5type = H5T_STD_I32BE;
+ *h4memsize = sizeof(int32);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_CHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_SHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_INT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_LONG;
+ else return FAIL;
+ break;
+
+ case DFNT_UINT32:
+ *h4size = 4;
+ *h5type = H5T_STD_U32BE;
+ *h4memsize = sizeof(int32);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_UCHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_USHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_UINT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_ULONG;
+ else return FAIL;
+ break;
+
+ case DFNT_NINT32:
+ printf("warning, Native HDF datatype is encountered");
+ printf(" the converting result may not be correct.\n");
+ *h4size = 4;
+ *h5type = H5T_NATIVE_INT;
+ *h4memsize = sizeof(int32);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_CHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_SHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_INT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_LONG;
+ else return FAIL;
+ break;
+
+ case DFNT_NUINT32:
+ printf("warning, Native HDF datatype is encountered");
+ printf(" the converting results may not be correct.\n");
+ *h4size =4;
+ *h5type = H5T_NATIVE_UINT;
+ *h4memsize = sizeof(int32);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_UCHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_USHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_UINT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_ULONG;
+ else return FAIL;
+ break;
+
+ case DFNT_LINT32:
+ *h4size =4;
+ *h5type = H5T_STD_I32LE;
+ *h4memsize = sizeof(int32);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_CHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_SHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_INT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_LONG;
+ else return FAIL;
+ break;
+
+ case DFNT_LUINT32:
+ *h4size =4;
+ *h5type = H5T_STD_U32LE;
+ *h4memsize = sizeof(int32);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_CHAR))
+ *h5memtype = H5T_NATIVE_UCHAR;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_SHORT))
+ *h5memtype = H5T_NATIVE_USHORT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_INT))
+ *h5memtype = H5T_NATIVE_UINT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_LONG))
+ *h5memtype = H5T_NATIVE_ULONG;
+ else return FAIL;
+ break;
+
+ case DFNT_FLOAT32:
+ *h4size =4;
+ *h5type = H5T_IEEE_F32BE;
+ *h4memsize = sizeof(float32);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_FLOAT))
+ *h5memtype = H5T_NATIVE_FLOAT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_DOUBLE))
+ *h5memtype = H5T_NATIVE_DOUBLE;
+ else return FAIL;
+ break;
+
+ case DFNT_FLOAT64:
+ *h4size = 8;
+ *h5type = H5T_IEEE_F64BE;
+ *h4memsize = sizeof(float64);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_FLOAT))
+ *h5memtype = H5T_NATIVE_FLOAT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_DOUBLE))
+ *h5memtype = H5T_NATIVE_DOUBLE;
+ else return FAIL;
+ break;
+
+ case DFNT_NFLOAT32:
+ printf("warning, Native HDF datatype is encountered");
+ printf(" the converting results may not be correct.\n");
+ *h4size = 4;
+ *h5type = H5T_NATIVE_FLOAT;
+ *h4memsize = sizeof(float32);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_FLOAT))
+ *h5memtype = H5T_NATIVE_FLOAT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_DOUBLE))
+ *h5memtype = H5T_NATIVE_DOUBLE;
+ else return FAIL;
+ break;
+
+ case DFNT_NFLOAT64:
+ printf("warning, Native HDF datatype is encountered");
+ printf(" the converting result may not be correct.\n");
+ *h4size = 8;
+ *h5type = H5T_NATIVE_DOUBLE;
+ *h4memsize = sizeof(float64);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_FLOAT))
+ *h5memtype = H5T_NATIVE_FLOAT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_DOUBLE))
+ *h5memtype = H5T_NATIVE_DOUBLE;
+ else return FAIL;
+ break;
+
+ case DFNT_LFLOAT32:
+ *h4size = 4;
+ *h5type = H5T_IEEE_F32LE;
+ *h4memsize = sizeof(float32);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_FLOAT))
+ *h5memtype = H5T_NATIVE_FLOAT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_DOUBLE))
+ *h5memtype = H5T_NATIVE_DOUBLE;
+ else return FAIL;
+ break;
+
+ case DFNT_LFLOAT64:
+ *h4size = 8;
+ *h5type = H5T_IEEE_F64LE;
+ *h4memsize = sizeof(float64);
+ if(*h4memsize == H5Tget_size(H5T_NATIVE_FLOAT))
+ *h5memtype = H5T_NATIVE_FLOAT;
+ else if(*h4memsize == H5Tget_size(H5T_NATIVE_DOUBLE))
+ *h5memtype = H5T_NATIVE_DOUBLE;
+ else return FAIL;
+ break;
+
+ default:
+ return FAIL;
+ }
+ return SUCCEED;
+}
+/*-------------------------------------------------------------------------
+ * Function: conv_int_str
+ *
+ * Purpose: this function will convert numerical number into the
+ string format for a reference(<=65536).
+ * Return: SUCCEED if success, FAIL if failed.
+ *
+ * In : num: an unsigned digital number that is not greater than 65536.
+
+ * Out: str_num: character string format of the number.
+
+ *
+ *
+ *-------------------------------------------------------------------------
+ */
+
+int conv_int_str(uint16 num, char* str_num) {
+
+ /* the maximum reference number is 65536. */
+
+
+ if(str_num == NULL) {
+ printf(" memory for str_num should be allocated.\n");
+ return FAIL;
+ }
+
+ /* Adding this line will cause problems, investigating this later.
+h4toh5_ZeroMemory(str_num,strlen(str_num)+1);*/
+
+ sprintf(str_num,"%d",num);
+ return SUCCEED;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: lookup
+ *
+ * Purpose: this function will use objref as a key to check whether
+ * the current object is touched.
+
+ * Return: 1, the object is found. 0,the object is not found.
+ -1, the table doesn't exist.
+ *
+ * In : objref: reference number of the current object.
+ SIZE: the hashtable SIZE.
+ hashtab: pointer to the hash table.
+
+ *-------------------------------------------------------------------------
+ */
+
+int lookup(int objref,int SIZE,struct table*hashtab) {
+
+ struct table *np;
+ if(hashtab == NULL) {
+ printf("the table doesn't exist. \n");
+ return -1;
+ }
+ np = hashtab+objref%SIZE;
+
+ for (np = hashtab+objref%SIZE; np!=NULL;np=np->next){
+ if (np->ref == objref){
+ return 1;
+ }
+ }
+ return 0;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: init_tab
+ *
+ * Purpose: this function will initialize the hash table.
+ *
+
+ * Return: SUCCEED, table is initialzed. FAIL,otherwise.
+ *
+ * In :
+ SIZE: the hashtable SIZE.
+ hashtab: pointer to the hash table.
+
+ *-------------------------------------------------------------------------
+ */
+
+int init_tab(int SIZE,struct table *hashtab) {
+
+ int i;
+ if(hashtab == NULL) {
+ printf("memory for hashing table is not allocated.\n");
+ return FAIL;
+ }
+ for (i = 0;i < SIZE; i++) {
+ (hashtab+i%SIZE)->ref = -1;
+ (hashtab+i%SIZE)->next = NULL;
+ (hashtab+i%SIZE)->name = NULL;
+ }
+ return SUCCEED;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: init_nametab
+ *
+ * Purpose: this function will initialize the name hash table.
+ *
+
+ * Return: SUCCEED, table is initialzed. FAIL,otherwise.
+ *
+ * In :
+ SIZE: the hashtable SIZE.
+ name_hashtab: pointer to the hash table.
+
+ *-------------------------------------------------------------------------
+ */
+int init_nametab(int SIZE, struct name_table * name_hashtab) {
+
+ int i;
+
+ if(name_hashtab == NULL) {
+ printf("cannot allocate memory for name hashing table.\n");
+ return FAIL;
+ }
+ for (i=0;i < SIZE; i++) {
+ (name_hashtab+i%SIZE)->name = NULL;
+ (name_hashtab+i%SIZE)->next = NULL;
+ }
+ return SUCCEED;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: get_name
+ *
+ * Purpose: obtain the name of the object
+ *
+ * Return: the object name
+ *
+ * In : objref: reference number of the current object.
+ SIZE: the hashtable SIZE.
+ hashtab: pointer to the hash table
+ pcheck_get: a flag to check errors
+
+ *-------------------------------------------------------------------------
+ */
+
+char* get_name(int objref,int SIZE,struct table*hashtab, int* pcheck_get) {
+
+ struct table *np;
+ char* tempname;
+
+ np = hashtab+objref%SIZE;
+
+ for (np = hashtab+objref%SIZE; np!=NULL;np=np->next){
+
+ if (np->ref==objref){
+
+ if (np->name == NULL) {
+ *pcheck_get = -1;
+ return NULL;
+ }
+
+ else {
+ tempname = malloc(strlen(np->name)+1);
+ if(tempname == NULL) {
+ *pcheck_get = -2;
+ return NULL;
+ }
+ strcpy(tempname,np->name);
+ return tempname;
+ }
+ }
+ }
+
+ *pcheck_get = 0;
+ return NULL;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: set_name
+ *
+ * Purpose: store the name of the object into the hash table
+ *
+ * Return: SUCCEED: the name is either set before or set in this routine
+ * FAIL: the name is not set properly
+ *
+ * In : objref: reference number of the current object
+ SIZE: the hashtable SIZE
+ hashtab: hash table
+ namein: object name
+
+ *-------------------------------------------------------------------------
+ */
+
+
+int set_name(int objref,int SIZE,struct table*hashtab, char* namein) {
+
+ struct table *np;
+ struct table *temptr;
+
+ temptr = malloc(sizeof(struct table));
+ if(temptr == NULL) {
+ printf("not enough memory to be allocated. \n");
+ return FAIL;
+ }
+
+ np = hashtab+objref%SIZE;
+ if(namein == NULL) {
+ printf("error in inputting name into the table.\n");
+ return FAIL;
+ }
+
+ for (np = hashtab+objref%SIZE; np!= NULL;np = np->next){
+ if (np->ref==objref){
+ /* the name is set already, don't do anything.*/
+ return SUCCEED;
+ }
+ if (np->next == NULL) {
+ np->next = temptr;
+ temptr->ref = objref;
+ temptr->next = NULL;
+ temptr->name = malloc(strlen(namein)+1);
+ if(temptr->name == NULL) {
+ printf("error in allocating memory. \n");
+ return FAIL;
+ }
+ strcpy(temptr->name,namein);
+ return SUCCEED;
+ }
+ }
+ return SUCCEED;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: lookup_name
+ *
+ * Purpose: 1. look up whether the same name is used for different objects
+ 2. then update the table
+ *
+ * Return: 1, if the name is in the name hash table.
+ 0, if the name is to be added into the name table.
+ -1, otherwise.
+ *
+ * In :
+ size: the hashtable SIZE.
+ nametab: name hash table
+ name: the name to be looked up
+
+ *-------------------------------------------------------------------------
+ */
+
+int lookup_name(char* name, int size,struct name_table *nametab) {
+
+ /* temporary pointer of the name table that points to the beginning
+ address of the current bucket.*/
+ struct name_table *np;
+
+ /* temporary pointer of the added name table.*/
+ struct name_table *temptr;
+
+ if(name == NULL) {
+ printf("the name to be looked up is NULL.\n");
+ return -1;
+ }
+
+ temptr = malloc(sizeof(struct name_table));
+ if(temptr == NULL) {
+ printf("not enough memory to be allocated. \n");
+ return -1;
+ }
+
+ if(nametab == NULL) {
+ printf("no name_table for this category of objects.\n");
+ return -1;
+ }
+ np = nametab+hash_fun(name,size);
+
+ temptr->name = malloc(strlen(name)+1);
+ if(temptr->name == NULL) {
+ printf("not enough memory to be allocated to table name.\n");
+ return -1;
+ }
+
+ /* look through the linked list starting from the current bucket.
+ If the name is found, return 1, otherwise, return 0
+ after inserting the new bucket. */
+
+ for(np = nametab+hash_fun(name,size); np!= NULL;np = np->next) {
+ if(np->name == NULL) {
+ np->name = malloc(strlen(name)+1);
+ if(np->name == NULL) {
+ printf("cannot allocate memory for object name.\n");
+ return -1;
+ }
+ strcpy(np->name,name);
+ free(temptr->name);
+ free(temptr);
+ return 0;
+ }
+ if(strcmp(name,np->name)==0){
+ free(temptr->name);
+ free(temptr);
+ return 1;
+ }
+ if (np->next == NULL) {
+ np->next = temptr;
+ temptr->next = NULL;
+ strcpy(temptr->name,name);
+ return 0;
+ }
+ }
+ return -1;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: hash_fun
+ *
+ * Purpose: to get the hash value based on the key
+ *
+ * Return: No. of the hashtable
+ *
+ * In : name: object name
+ size: the hashtable size.
+
+ *-------------------------------------------------------------------------
+ */
+int hash_fun(char *name,int size) {
+
+int hashval;
+
+ for (hashval = 0;*name !='\0';)
+ hashval += *name++;
+ return(hashval%size);
+
+}
+
+/*-------------------------------------------------------------------------
+ * Function: freenametable
+ *
+ * Purpose: free the memory of hash table
+ *
+ * Return: 0
+ *
+ * In :
+ SIZE: the hashtable SIZE.
+ nametab: hash table of the name
+
+ *-------------------------------------------------------------------------
+ */
+int freenametable(int SIZE,struct name_table *nametab) {
+
+ struct name_table *np,*temptr,*temptr1;
+ int i;
+
+ if(nametab == NULL) return 0;
+ /* we first free the additional linked items of the hashtable,
+ and then free the whole hash table. */
+ for (i = 0;i < SIZE; i++) {
+ np = nametab+i;
+ temptr1 = np->next;
+ while(temptr1 != NULL) {
+ temptr = temptr1;
+ temptr1 = temptr1->next;
+ free(temptr->name);
+ free(temptr);
+ }
+ if(np->name !=NULL) free(np->name);
+ }
+ free(nametab);
+ return 0;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: freetable
+ *
+ * Purpose: free the memory of hash table
+ *
+ * Return: 0
+ *
+ * In :
+ SIZE: the hashtable SIZE.
+ nametab: hash table
+
+ *-------------------------------------------------------------------------
+ */
+int freetable(int SIZE,struct table *hashtab) {
+
+ struct table *np,*temptr,*temptr1;
+ int i;
+ if(hashtab == NULL) return 0;
+
+ /* we first free the additional linked items of the hashtable,
+ and then free the whole hash table. */
+ for (i =0;i < SIZE; i++) {
+ np = hashtab+i;
+ temptr1 = np->next;
+ while(temptr1 != NULL) {
+ temptr = temptr1;
+ temptr1 = temptr1->next;
+ free(temptr->name);
+ free(temptr);
+ }
+ if(np->name != NULL) free(np->name);
+ }
+
+ free(hashtab);
+ return 0;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: mkstr
+ *
+ * Purpose: make hdf5 string type
+ *
+ * Return: type
+ *
+ * In :
+ size: String Size
+ H5T_str_t: pad
+
+ *-------------------------------------------------------------------------
+ */
+
+hid_t mkstr(int size, H5T_str_t pad) {
+
+ hid_t type;
+
+ if((type=H5Tcopy(H5T_C_S1))<0) return -1;
+ if((H5Tset_size(type,(size_t)size))<0) return -1;
+ if((H5Tset_strpad(type,pad))<0) return -1;
+
+ return type;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: h4_transnumattr
+ *
+ * Purpose: translate reference number into hdf5 attribute
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ h5g: hdf5 group id
+ refname: reference name
+ group_ref: reference number
+
+ *-------------------------------------------------------------------------
+ */
+int h4_transnumattr(hid_t h5g,const char *refname,uint16 group_ref) {
+
+ hid_t h5memtype=(-1);
+ hid_t h5a_id;
+ hid_t h5a_sid;
+ herr_t ret;
+
+ h5a_sid = H5Screate(H5S_SCALAR);
+
+ if (h5a_sid < 0) {
+ fprintf(stderr,"failed to create attribute space for HDF4_REF_NUM. \n");
+ return FAIL;
+ }
+
+ h5a_id = H5Acreate(h5g,refname,H5T_STD_U16BE,h5a_sid,H5P_DEFAULT);
+
+ if(h5a_id <0) {
+ fprintf(stderr,"failed to obtain attribute id for HDF4_REF_NUM. \n");
+ H5Sclose(h5a_sid);
+ return FAIL;
+ }
+
+ if(H5Tget_size(H5T_NATIVE_CHAR)== sizeof(uint16))
+ h5memtype = H5T_NATIVE_UCHAR;
+ else if(H5Tget_size(H5T_NATIVE_SHORT)== sizeof(uint16))
+ h5memtype = H5T_NATIVE_USHORT;
+ else if(H5Tget_size(H5T_NATIVE_INT) == sizeof(uint16))
+ h5memtype = H5T_NATIVE_UINT;
+ else if(H5Tget_size(H5T_NATIVE_LONG)== sizeof(uint16))
+ h5memtype = H5T_NATIVE_ULONG;
+
+ ret = H5Awrite(h5a_id,h5memtype,(void *)&group_ref);
+
+ if(ret <0) {
+ printf("failed to obtain attribute.\n ");
+ H5Sclose(h5a_sid);
+ H5Aclose(h5a_id);
+ return FAIL;
+ }
+
+ ret = H5Sclose(h5a_sid);
+ ret = H5Aclose(h5a_id);
+ return SUCCEED;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: h4_transpredattrs
+ *
+ * Purpose: translate predefined attributes into hdf5 attribute
+ * predefined attributes include HDF4 OBJECT TYPE,
+ HDF4 OBJECT NAME, HDF4 CLASS etc. They are all in
+ H5T_STRING format.
+
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ h5g: group id
+ attrname: attribute name
+ data: attribute data
+
+ *-------------------------------------------------------------------------
+ */
+int h4_transpredattrs(hid_t h5g,const char *attrname,char*data){
+
+ hsize_t h5str_size;
+ hid_t h5a_id;
+ hid_t h5a_sid;
+ hid_t h5str_type;
+ herr_t ret;
+
+ if(data == NULL) {
+ printf("attribute data is not available.\n");
+ return FAIL;
+ }
+
+ h5str_size = strlen(data);
+
+ if ((h5str_type = mkstr(h5str_size,H5T_STR_SPACEPAD))<0) {
+ printf("error in making string for predefined ATTR. \n");
+ return FAIL;
+ }
+
+ h5a_sid = H5Screate(H5S_SCALAR);
+
+ if (h5a_sid < 0) {
+ printf("failed to create attribute space for HDF4_OBJECT. \n");
+ return FAIL;
+ }
+
+ h5a_id = H5Acreate(h5g,attrname,h5str_type,h5a_sid,H5P_DEFAULT);
+
+ if(h5a_id <0) {
+ fprintf(stderr,"failed to obtain attribute id for HDF4_OBJECT. \n");
+ H5Sclose(h5a_sid);
+ return FAIL;
+ }
+
+ ret = H5Awrite(h5a_id,h5str_type,(void *)data);
+
+ if(ret <0) {
+ fprintf(stderr,"failed to obtain attribute.\n ");
+ H5Aclose(h5a_id);
+ H5Sclose(h5a_sid);
+ return FAIL;
+ }
+ ret = H5Sclose(h5a_sid);
+ ret = H5Aclose(h5a_id);
+ return SUCCEED;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: vg_transattrs
+ *
+ * Purpose: translate predefined vgroup attributes into hdf5 attribute
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ h4vg: hdf4 vgroup id
+ h5g: hdf5 group id
+
+ *-------------------------------------------------------------------------
+ */
+
+int vg_transattrs(int32 h4vg,hid_t h5g) {
+
+ /* define variables for hdf4. */
+ char vgroup_name[VGNAMELENMAX];
+ char vgroup_class[VGNAMELENMAX];
+ char vgattr_name[MAX_NC_NAME];
+ char obtype[MAX_NC_NAME];
+
+ int32 vgroup_cref;
+ int32 num_vgattr;
+ int32 count_vgattr;
+ int32 vg_atype;
+ int32 attr_size;
+
+ size_t sh4_size;
+ size_t sh4_amemsize;
+
+ /* define variables for hdf5. */
+ hid_t sh5a_sid;
+ hid_t sh5a_id;
+ hid_t sh5_atype;
+ hid_t sh5_amemtype;
+ hid_t sh5str_type;
+ hid_t sh5str_memtype;
+ hsize_t sh5dims[MAX_VAR_DIMS];
+ void* vg_adata;
+ herr_t sret;
+ int i;
+
+ num_vgattr = Vnattrs(h4vg);
+
+ for (i = 0;i <num_vgattr;i++) {
+
+ if (Vattrinfo(h4vg,i,vgattr_name,&vg_atype,
+ &count_vgattr,&attr_size)== FAIL){
+ printf("unable to obtain attribute information. \n");
+ return FAIL;
+ }
+
+ /* convert attribute datatype into the corresponding hdf5 datatype */
+
+ if(h4type_to_h5type(vg_atype,&sh5_amemtype,&sh4_amemsize,
+ &sh4_size,&sh5_atype)==FAIL){
+ printf("unable to do data type converting.\n");
+ return FAIL;
+ }
+
+ vg_adata = malloc(sh4_amemsize*count_vgattr);
+
+ if(vg_adata == NULL) {
+ printf("error in allocating vgroup attribute data. \n");
+ return FAIL;
+ }
+
+ if(Vgetattr(h4vg,i,(VOIDP)vg_adata)==FAIL){
+ printf("unable to get attribute.\n");
+ free(vg_adata);
+ return FAIL;
+ }
+
+ /* if the attribute doesn't have a name, a default name is set. */
+ if(vgattr_name[0] == '\0')
+ strcpy(vgattr_name,trans_obj_name(DFTAG_VG,i));
+
+ /* now do attribute-transferring.
+ 1. deal with string data type
+ 2. set attribute space
+ 3. get attribute name, set property list. */
+
+ if (sh5_atype == H5T_STRING ) {
+
+ sh5a_sid = H5Screate(H5S_SCALAR);
+
+ if (sh5a_sid < 0) {
+ printf("failed to create attribute space ");
+ printf("for HDF4_OBJECT_TYPE SDS. \n");
+ free(vg_adata);
+ return FAIL;
+ }
+
+ if ((sh5str_type = mkstr(count_vgattr*sh4_size,H5T_STR_SPACEPAD))<0) {
+ fprintf(stderr,"error in making string for VGROUP ATTR. \n");
+ free(vg_adata);
+ return FAIL;
+ }
+
+
+ if ((sh5str_memtype = mkstr(count_vgattr*sh4_amemsize,
+ H5T_STR_SPACEPAD))<0){
+ fprintf(stderr,"error in making memory string for VGROUP ATTR. \n");
+ free(vg_adata);
+ return FAIL;
+ }
+
+ sh5a_id = H5Acreate(h5g,vgattr_name,sh5str_type,sh5a_sid,H5P_DEFAULT);
+
+ if (sh5a_id <0) {
+ printf("failed to obtain attribute id");
+ printf(" for HDF4_OBJECT_TYPE VGROUP. \n");
+ free(vg_adata);
+ return FAIL;
+ }
+ sret = H5Awrite(sh5a_id,sh5str_memtype,(void *)vg_adata);
+
+ if (sret <0) {
+ fprintf(stderr,"failed to obtain attribute.\n ");
+ free(vg_adata);
+ return FAIL;
+ }
+ sret = H5Sclose(sh5a_sid);
+ sret = H5Aclose(sh5a_id);
+ }
+
+ else {
+
+ if (count_vgattr == 1) {
+ sh5a_sid = H5Screate(H5S_SCALAR);
+ if (sh5a_sid < 0) {
+ fprintf(stderr,"failed to create space id. \n");
+ free(vg_adata);
+ return FAIL;
+ }
+ }
+
+ else {
+
+ sh5dims[0] = count_vgattr;
+ sh5a_sid = H5Screate_simple(1,sh5dims,NULL);
+ if (sh5a_sid < 0) {
+ fprintf(stderr,"failed to create vgroup attribute space. \n");
+ free(vg_adata);
+ return FAIL;
+ }
+ }
+
+ sh5a_id = H5Acreate(h5g,vgattr_name,sh5_atype,sh5a_sid,H5P_DEFAULT);
+
+ if(sh5a_id <0) {
+ fprintf(stderr,"failed to obtain attribute id. \n");
+ free(vg_adata);
+ H5Sclose(sh5a_sid);
+ return FAIL;
+ }
+ sret = H5Awrite(sh5a_id,sh5_amemtype,(void *)vg_adata);
+
+ if(sret < 0) {
+ fprintf(stderr,"failed to obtain attribute.\n ");
+ free(vg_adata);
+ H5Sclose(sh5a_sid);
+ H5Aclose(sh5a_id);
+ return FAIL;
+ }
+
+ sret = H5Sclose(sh5a_sid);
+ sret = H5Aclose(sh5a_id);
+ }
+ free(vg_adata);
+ }
+
+ /*** check this line later. ***/
+ strcpy(obtype,VGROUPLABEL);
+ vgroup_class[0] = '\0';
+
+ /* ignore CDF0.0 and RIG0.0 vgroups. */
+ if(Vgetclass(h4vg,vgroup_class) == SUCCEED){
+ if(vgroup_class[0] != '\0') {
+ if(!strcmp(vgroup_class,_HDF_CDF)||
+ !strcmp(vgroup_class,GR_NAME))
+ return SUCCEED;
+ }
+ }
+
+ /* transfer predefined attributes. */
+ if(h4_transpredattrs(h5g,HDF4_OBJECT_TYPE,obtype)==FAIL){
+ printf("error in data attribute transferring.\n");
+ return FAIL;
+ }
+
+ if(Vgetname(h4vg,vgroup_name) == SUCCEED){
+ if(vgroup_name[0] != '\0') {
+ if(h4_transpredattrs(h5g,HDF4_OBJECT_NAME,vgroup_name)==FAIL){
+ printf("error in data attribute transferring.\n");
+ return FAIL;
+ }
+ }
+ }
+
+ if(vgroup_class[0] !='\0') {
+ if(h4_transpredattrs(h5g,HDF4_VGROUP_CLASS,vgroup_class)==FAIL){
+ printf("error in data attribute transferring.\n");
+ return FAIL;
+ }
+ }
+
+ vgroup_cref = VQueryref(h4vg);
+ if(vgroup_cref == FAIL) {
+ printf("failed to obtain group reference number.\n");
+ return FAIL;
+ }
+
+ if(h4_transnumattr(h5g,HDF4_REF_NUM,vgroup_cref)==FAIL){
+ printf("error in data attribute transferring.\n");
+ return FAIL;
+ }
+
+ return SUCCEED;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: get_obj_aboname
+ *
+ * Purpose: get absolute path name of hdf5 object
+ In this function, we will deal with name clashing.
+ If we find an object name(by using lookup_name routine)
+ that has already been used,
+ we will remake name for this object. We will follow
+ object type(vgroup,sds,image,palette, vdata) plus reference
+ number to make it unique.
+ *
+ * Return: NULL if failed, object name if successful.
+ *
+ * In :
+ obj_name: relative object name
+ ref_str: reference number in character format
+ path_name: absolute path
+ objstr: object type in character format
+
+ *-------------------------------------------------------------------------
+ */
+
+char* get_obj_aboname(char* obj_name,char* refstr,char* path_name,
+ const char*objstr ) {
+
+ char *abo_objname;
+ int check_name;
+ char check_char;
+
+
+ /* sometimes although the object name is not NULL, but it is empty.
+ We will use make_objname_no under this situation. */
+ if(obj_name != NULL) check_char = *obj_name;
+
+ /* obtain the absolute name of the object. */
+ if (obj_name == NULL || check_char == '\0')
+ abo_objname = make_objname_no(refstr,path_name,objstr);
+ else
+ abo_objname = make_objname_yes(obj_name,path_name);
+
+ /* look up the name and see whether there is name clashing here.
+ if yes, remake the object name.*/
+ check_name = lookup_name(abo_objname,num_objects,name_hashtab);
+
+ if(check_name == 1) {
+ /* name_clashing is found. */
+ if(objstr != NULL && refstr != NULL){
+ free(abo_objname);
+
+ if(path_name != NULL) {
+ abo_objname= malloc(strlen(path_name)+strlen(objstr)+
+ strlen(refstr)+3);
+ if(abo_objname == NULL) {
+ printf("error in allocating memory. \n");
+ return NULL;
+ }
+ h4toh5_ZeroMemory(abo_objname,strlen(path_name)+strlen(objstr)+
+ strlen(refstr)+3);
+ strcpy(abo_objname,path_name);
+ strcat(abo_objname,"/");
+ strcat(abo_objname,objstr);
+ strcat(abo_objname,"_");
+ strcat(abo_objname,refstr);
+ }
+
+ else {
+ abo_objname= malloc(strlen(objstr)+strlen(refstr)+3);
+ if(abo_objname == NULL) {
+ printf("error in allocating memory. \n");
+ return NULL;
+ }
+ h4toh5_ZeroMemory(abo_objname,strlen(objstr)+strlen(refstr)+3);
+ strcat(abo_objname,"/");
+ strcat(abo_objname,objstr);
+ strcat(abo_objname,"_");
+ strcat(abo_objname,refstr);
+ }
+ }
+ }
+
+ return abo_objname;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: make_objname_no
+ *
+ * Purpose: get absolute path name of hdf5 object when object name is
+ not defined.
+ We will use path name and
+ object type(vgroup,sds,image,palette, vdata) plus reference
+ number to make it unique.
+ *
+ * Return: NULL if failed, object name if successful.
+ *
+ * In :
+ ref_str: reference number in character format
+ path_name: absolute path
+ objstr: object type in character format
+
+ *-------------------------------------------------------------------------
+ */
+
+char* make_objname_no(char* refstr,char* path_name,const char*objstr) {
+
+ char *new_objname;
+
+ if(objstr == NULL || refstr == NULL) {
+ printf("error, object type and ref. number should be defined.\n");
+ return NULL;
+ }
+
+ if (path_name == NULL) {/* under root group. */
+
+ new_objname= malloc(strlen(objstr)+strlen(refstr)+3);
+ if(new_objname == NULL) {
+ printf("error in allocating memory for object name. \n");
+ return NULL;
+ }
+ h4toh5_ZeroMemory(new_objname,strlen(objstr)+strlen(refstr)+3);
+ strcpy(new_objname,"/");
+ strcat(new_objname,objstr);
+ strcat(new_objname,"_");
+ strcat(new_objname,refstr);
+ }
+
+ else {
+
+ new_objname= malloc(strlen(path_name)+strlen(objstr)+strlen(refstr)+3);
+ if(new_objname == NULL) {
+ printf("error in allocating memory. \n");
+ return NULL;
+ }
+ h4toh5_ZeroMemory(new_objname,strlen(path_name)+strlen(objstr)+strlen(refstr)+3);
+ strcpy(new_objname,path_name);
+ strcat(new_objname,"/");
+ strcat(new_objname,objstr);
+ strcat(new_objname,"_");
+ strcat(new_objname,refstr);
+ }
+
+ return new_objname;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: make_objname_yes
+ *
+ * Purpose: get absolute path name of hdf5 object when object name is
+ defined.
+
+ *
+ * Return: NULL if failed, object name if successful.
+ *
+ * In : obj_name: object name
+ path_name: absolute path
+
+ *-------------------------------------------------------------------------
+ */
+
+char* make_objname_yes(char* obj_name,char* path_name){
+
+ char*new_objname;
+
+ if(path_name == NULL) {
+ new_objname = malloc(strlen(obj_name)+2);
+ if(new_objname == NULL) {
+ printf("error in allocating memory. \n");
+ return NULL;
+ }
+ h4toh5_ZeroMemory(new_objname,strlen(obj_name)+2);
+ strcpy(new_objname,"/");
+ strcat(new_objname,obj_name);
+ }
+ else {
+ new_objname = malloc(strlen(path_name)+strlen(obj_name)+2);
+ if(new_objname == NULL) {
+ printf("error in allocating memory. \n");
+ return NULL;
+ }
+ h4toh5_ZeroMemory(new_objname,strlen(path_name)+strlen(obj_name)+2);
+ strcpy(new_objname,path_name);
+ strcat(new_objname,"/");
+ strcat(new_objname,obj_name);
+ }
+ return new_objname;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: trans_obj_name
+ *
+ * Purpose: obtain hdf4 attribute name from hdf4 object type
+ plus ATTR plus reference number.
+ *
+ * Return: object name;
+ *
+ * In :
+ obj_tag: hdf4 tag
+ index : hdf5 group id
+
+ *-------------------------------------------------------------------------
+ */
+char* trans_obj_name(int32 obj_tag,int32 index) {
+
+ char* obj_name;
+ char indstr[5];
+
+ /* the reason why we allocate memory with strlen(HDF4_PALETTE) is
+ HDF4_PALETTE is the longest string among HDF4_??? */
+ obj_name = malloc(strlen(HDF4_PALETTE)+strlen(ATTR)+8);
+ if(obj_name == NULL) {
+ printf("cannot allocate memory for object name. \n");
+ return NULL;
+ }
+
+ h4toh5_ZeroMemory(obj_name,strlen(HDF4_PALETTE)+strlen(ATTR)+8);
+
+ if(conv_int_str(index,indstr)== FAIL) {
+ printf("indstr is not allocated. \n");
+ return NULL;
+ }
+
+ switch(obj_tag) {
+
+ case DFTAG_SD:
+ case DFTAG_NDG:
+ case DFTAG_SDG:
+ strcpy(obj_name,HDF4_SDS);
+ break;
+
+ case DFTAG_RIG:
+ case DFTAG_RI:
+ case DFTAG_RI8:
+ strcpy(obj_name,HDF4_IMAGE);
+ break;
+
+ case DFTAG_VG:
+ strcpy(obj_name,HDF4_VGROUP);
+ break;
+
+ case DFTAG_VH:
+ case DFTAG_VS:
+ strcpy(obj_name,HDF4_VDATA);
+ break;
+
+ case DFTAG_LUT:
+ strcpy(obj_name,HDF4_PALETTE);
+ break;
+
+ default:
+ printf("error, object tag is transferred out of limits. \n");
+ free(obj_name);
+ return NULL;
+ }
+
+ strcat(obj_name,"_");
+ strcat(obj_name,ATTR);
+ strcat(obj_name,"_");
+ strcat(obj_name,indstr);
+
+ return obj_name;
+}
+/*-------------------------------------------------------------------------
+ * Function: freehashmemory
+ *
+ * Purpose: free memories allocated for hash tables.
+
+ *
+ * Return: NULL
+ *
+ * In :
+
+
+ *-------------------------------------------------------------------------
+ */
+
+void freehashmemory(void){
+
+ if(estnum_vg > 0) freetable(estnum_vg,vg_hashtab);
+ if(estnum_vd > 0) freetable(estnum_vd,vd_hashtab);
+
+ if(num_sds > 0) {
+ freetable(2*num_sds,sds_hashtab);
+ freenametable(DIM_HASHSIZE,dim_hashtab);
+ }
+
+ if(num_images > 0) {
+ freetable(2*num_images,gr_hashtab);
+ freetable(PAL_HASHSIZE,pal_hashtab);
+ }
+
+ if(num_objects > 0) freenametable(num_objects,name_hashtab);
+
+}
+
+/*-------------------------------------------------------------------------
+ * Function: correct_name
+ *
+ * Purpose: modify the hdf4 object name when the name contains '/'. Change
+ this character into '_'.
+
+ *
+ * Return: the corrected name
+ *
+ * In : old name
+
+
+ *-------------------------------------------------------------------------
+ */
+char *correct_name(char* oldname){
+
+ char * cptr;
+ char * newname;
+
+ if(oldname == NULL) {
+ printf("inputting name is wrong.\n");
+ return NULL;
+ }
+
+ newname = malloc(strlen(oldname)+1);
+ h4toh5_ZeroMemory(newname,strlen(oldname)+1);
+ newname = strncpy(newname, oldname, strlen(oldname));
+
+ while(strchr(newname,ORI_SLASH)!= NULL){
+ cptr = strchr(newname,ORI_SLASH);
+ *cptr = CHA_SLASH;
+ }
+
+ return newname;
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/h4toh5/h4toh5vdata.c b/tools/h4toh5/h4toh5vdata.c
new file mode 100644
index 0000000..55a8bf9
--- /dev/null
+++ b/tools/h4toh5/h4toh5vdata.c
@@ -0,0 +1,831 @@
+/*-------------------------------------------------------------------------
+ *
+ * Copyright (C) 2000 National Center for Supercomputing Applications.
+ * All rights reserved.
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/******************************************************************************
+
+ Description:
+
+1. converter
+
+See HDF4 to HDF5 mapping specification at
+(http://hdf.ncsa.uiuc.edu/HDF5/papers/h4toh5) for the default mapping
+from HDF4 object to HDF5 object.
+
+The whole converter includes 10 files, h4toh5util.h, h4toh5main.h, h4toh5util.c, h4toh5main.c, h4toh5sds.c, h4toh5image.c,h4toh5vdata.c,h4toh5vgroup.c,h4toh5pal.c and h4toh5anno.c.
+
+2. this file
+
+Converting an hdf4 independent vdata object into an hdf5 dataset of compound dataset.
+
+Author: Kent Yang(ymuqun@ncsa.uiuc.edu)
+
+
+*****************************************************************************/
+
+#include "h4toh5main.h"
+#include <assert.h>
+
+/*-------------------------------------------------------------------------
+ * Function: Vdata_h4_to_h5
+ *
+ * Purpose: translate Vdata object into hdf5 dataset
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ vdata_id: RI identifier
+ group_id: hdf5 group id
+ Out:
+
+ Modification:
+ *-------------------------------------------------------------------------
+ */
+
+int Vdata_h4_to_h5(int32 file_id,int32 vdata_id, hid_t group_id) {
+
+ /* define variables for hdf4. */
+
+ int32 istat;
+ int32 n_records;
+
+ int32 vdata_ref;
+ int32 vdata_tag;
+
+ int32 interlace_mode;
+
+ int32 vdata_size;
+ int32 vdatamem_size;
+
+ int32 field_index;
+ int32 fieldorder;
+ int32 fieldtype;
+
+ int i;
+ int32 nfields;
+ int num_vd_attrs;
+ int num_vd_field_attrs;
+
+ VOIDP vd_data;
+
+ char vdlabel[10];
+ char vdata_name[MAX_NC_NAME];
+ char fieldname[MAX_NC_NAME];
+ char vdata_class[VSNAMELENMAX];
+ char field_name_list[VSFIELDMAX*FIELDNAMELENMAX];
+
+ /* define varibles for hdf5. */
+
+ hid_t h5d_sid;
+ hid_t h5dset;
+
+ hid_t h5_ctype;
+ hid_t h5_cmemtype;
+
+ hid_t* h5memtype = NULL;
+ hid_t* h5type = NULL;
+
+ size_t* h4memsize = NULL;
+ size_t* h4size = NULL;
+ hsize_t h5_vddims[1];
+ char* h5cvdata_name;
+
+ int check_vdname;
+
+ /* Zeroing out memory for vdlabel,vdata_class,vdata_name */
+
+ h4toh5_ZeroMemory(vdata_name,MAX_NC_NAME);
+ h4toh5_ZeroMemory(fieldname,MAX_NC_NAME);
+ h4toh5_ZeroMemory(vdata_class,VSNAMELENMAX);
+ h4toh5_ZeroMemory(field_name_list,VSFIELDMAX*FIELDNAMELENMAX);
+ h4toh5_ZeroMemory(vdlabel,10);
+
+ /* get absolute path of vdata name. */
+
+ vdata_ref = VSQueryref(vdata_id);
+ if (vdata_ref == FAIL) {
+ printf("error in getting reference number. \n");
+ return FAIL;
+ }
+
+ vdata_tag = VSQuerytag(vdata_id);
+ if (vdata_tag == FAIL) {
+ printf("error in getting object tag number. \n");
+ return FAIL;
+ }
+
+ /* get the class name */
+
+ if(VSgetclass(vdata_id,vdata_class) == FAIL) {
+ printf("error in obtaining class name. \n");
+ return FAIL;
+ }
+
+ /* get number of record,field_name,Size of a record and
+ Name of the vdata*/
+
+ if(VSQueryvsize(vdata_id,&vdata_size)==FAIL) {
+ printf("error in getting size of vdata. \n");
+ return FAIL;
+ }
+
+ if(vdata_size == 0) {/* empty vdata set. */
+ return SUCCEED;
+ }
+
+ /* obtain number of records, field name list, vdata name. */
+ if(VSinquire(vdata_id,&n_records,&interlace_mode,
+ field_name_list,&vdata_size,vdata_name) == FAIL) {
+ printf("error in inquiring vdata. \n");
+ return FAIL;
+ }
+
+ vdatamem_size = 0;
+ vdata_size = 0;
+ nfields = VFnfields(vdata_id);
+
+ if (nfields == FAIL) {
+ printf("error in obtaining number of vdata fields. \n");
+ return FAIL;
+ }
+
+ assert(nfields>0);
+ h5memtype = calloc((size_t)nfields,sizeof(hid_t));
+ h5type = calloc((size_t)nfields,sizeof(hid_t));
+ h4memsize = calloc((size_t)nfields,sizeof(size_t));
+ h4size = calloc((size_t)nfields,sizeof(size_t));
+
+ for (i=0;i<nfields;i++) {
+
+ /* obtain field type. */
+ fieldtype = VFfieldtype(vdata_id,i);
+ if(fieldtype == FAIL){
+ printf("error in obtaining field type. \n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ return FAIL;
+ }
+
+ /* obtain field order.*/
+ fieldorder = VFfieldorder(vdata_id,i);
+ /* printf("fieldorder %d\n",fieldorder);*/
+ if(fieldorder == FAIL){
+ printf("error in obtaining field order. \n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ return FAIL;
+ }
+
+ /* datatype conversion from hdf4 to hdf5.
+ the corresponding memory data type is also converted.*/
+ if(h4type_to_h5type(fieldtype,&h5memtype[i],&h4memsize[i],
+ &h4size[i],&h5type[i])== FAIL){
+ printf("error in doing datatype conversion at vdata routine. \n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ return FAIL;
+ }
+
+ vdatamem_size +=fieldorder*h4memsize[i];
+ vdata_size +=fieldorder*h4size[i];
+
+ }
+
+ vd_data = malloc((size_t)(vdatamem_size*n_records));
+
+ istat = VSsetfields(vdata_id,field_name_list);
+
+ if(istat == FAIL) {
+ printf("error setting fields of vdata.\n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ free(vd_data);
+ return FAIL;
+ }
+
+ istat = VSread(vdata_id,(uint8*)vd_data,n_records,FULL_INTERLACE);
+
+ if(istat == FAIL) {
+ printf("error in obtaining vdata. \n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ free(vd_data);
+ return FAIL;
+ }
+
+ for (i=0;i<nfields;i++) {
+ /* obtain field order.*/
+ fieldorder = VFfieldorder(vdata_id,i);
+
+ if(fieldorder == FAIL){
+ printf("error in obtaining field order. \n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ return FAIL;
+ }
+ }
+ /* create hdf5 compound datatype for both memory and file.*/
+
+ h5_ctype = H5Tcreate(H5T_COMPOUND, (size_t)vdata_size);
+ h5_cmemtype = H5Tcreate(H5T_COMPOUND,(size_t)vdatamem_size);
+
+ if(gen_h5comptype(vdata_id,nfields,h4size,h4memsize,h5type,h5memtype,
+ h5_ctype,h5_cmemtype)==FAIL){
+ printf("error in generating h5 compound data type.\n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ free(vd_data);
+ return FAIL;
+ }
+
+ h5_vddims[0] = n_records;
+ h5d_sid = H5Screate_simple(1,h5_vddims,NULL);
+
+ if(h5d_sid <0){
+ printf("error in obtaining space id.\n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ free(vd_data);
+ return FAIL;
+ }
+
+ /* choose a number that is not returned from the func.*/
+ check_vdname = -3;
+
+ /* obtain hdf5 vdata name. */
+ h5cvdata_name = get_name(vdata_ref,estnum_vd,vd_hashtab,&check_vdname);
+
+ if (h5cvdata_name == NULL && check_vdname == 0 ) {
+ printf("error,cannot find vdata \n");
+ return FAIL;
+ }
+
+ if (h5cvdata_name == NULL && check_vdname == -1) {
+ printf("error,group name is not defined.\n");
+ return FAIL;
+ }
+
+ if (h5cvdata_name == NULL && check_vdname == -2 ) {
+ printf("cannot allocate memory for vdata.\n");
+ return FAIL;
+ }
+
+ h5dset = H5Dcreate(group_id,h5cvdata_name,h5_ctype,h5d_sid,H5P_DEFAULT);
+ if(h5dset <0) {
+ printf("error in obtaining dataset.\n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ free(vd_data);
+ free(h5cvdata_name);
+ return FAIL;
+ }
+ free(h5cvdata_name);
+
+ if(H5Dwrite(h5dset,h5_cmemtype,H5S_ALL,H5S_ALL,H5P_DEFAULT,vd_data)<0){
+ printf("error in writing dataset converted from vdata.\n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ free(vd_data);
+ return FAIL;
+ }
+
+ /* handle vdata attributes and vdata field attributes. */
+
+ num_vd_attrs = VSfnattrs(vdata_id,_HDF_VDATA);
+
+ if (num_vd_attrs == FAIL) {
+ printf("error in obtaining attributes of vdata.\n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ free(vd_data);
+ return FAIL;
+ }
+
+ /* when field_index = -1, only transfer vdata attribute.*/
+
+ field_index = -1;
+ if(vdata_transattrs(vdata_id,h5dset,num_vd_attrs,field_index,NULL)==FAIL){
+ printf("error in translating vdata attibutes.\n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ free(vd_data);
+ return FAIL;
+ }
+
+ for (i =0;i< nfields;i++) {
+
+ if(VFfieldname(vdata_id,i)== NULL) {
+ printf("error in obtaining field name. \n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ free(vd_data);
+ return FAIL;
+ }
+
+ strcpy(fieldname,VFfieldname(vdata_id,i));
+ num_vd_field_attrs = VSfnattrs(vdata_id,i);
+ if(num_vd_field_attrs == FAIL){
+ printf("error in number of vd field attribute \n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ free(vd_data);
+ return FAIL;
+ }
+
+ if(vdata_transattrs(vdata_id,h5dset,num_vd_field_attrs,i,fieldname)
+ ==FAIL){
+ printf("error in transfering vdata attributes.\n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ free(vd_data);
+ return FAIL;
+ }
+ }
+ /* converting annotations of vdata into corresponding hdf5 attribute.*/
+ if( Annoobj_h4_to_h5(file_id,vdata_ref,vdata_tag,h5dset)== FAIL){
+ printf("fail to convert HDF4 VDATA annotation into hdf5 attributes.\n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ free(vd_data);
+ return FAIL;
+ }
+
+ /* converting predefined attributes. */
+ strcpy(vdlabel,VDATALABEL);
+ if(h4_transpredattrs(h5dset,HDF4_OBJECT_TYPE,vdlabel)==FAIL){
+ printf("error in transfering vdata attributes.\n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ free(vd_data);
+ return FAIL;
+ }
+
+ if(vdata_name[0] != '\0') {
+ if(h4_transpredattrs(h5dset,HDF4_OBJECT_NAME,vdata_name)==FAIL){
+ printf("error in transfering vdata attributes.\n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ free(vd_data);
+ return FAIL;
+ }
+ }
+
+ if(h4_transnumattr(h5dset,HDF4_REF_NUM,vdata_ref)==FAIL){
+ printf("error in transfering vdata attributes.\n");
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ free(vd_data);
+ return FAIL;
+ }
+
+ H5Sclose(h5d_sid);
+ H5Dclose(h5dset);
+ VSdetach(vdata_id);
+ free(h5memtype);
+ free(h5type);
+ free(h4memsize);
+ free(h4size);
+ free(vd_data);
+ return SUCCEED;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: vdata_transattrs
+ *
+ * Purpose: translate Vdata attributes into attributes of the
+ corresponding hdf5 dataset
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ vdata_id: vdata identifier
+ h5dset: hdf5 dataset
+ snum_vdattrs: number of vd attributes
+ field_index: index of vdata fields
+ attr_name: vdata(or vdata field) attribute name
+ Out:
+ Modifications:
+
+ *-------------------------------------------------------------------------
+ */
+
+int vdata_transattrs(int32 vdata_id,hid_t h5dset,int snum_vdattrs,
+ int field_index,char* attr_name){
+
+ char svdattr_name[2*MAX_NC_NAME];
+ char* svdrepattr_name;
+ char refstr[MAXREF_LENGTH];
+
+ int32 count_svdadata;
+ int32 svd_atype;
+
+ size_t sh4_amemsize;
+ size_t sh4_asize;
+
+ hid_t sh5a_sid;
+ hid_t sh5a_id;
+ hid_t sh5_atype;
+ hid_t sh5_amemtype;
+ hid_t sh5str_type;
+ hid_t sh5str_memtype;
+
+ hsize_t sh5dims[1];
+ void* svd_adata;
+ herr_t sret;
+ int i;
+
+ /* zeroing out memory for svdattr_name and refstr */
+ h4toh5_ZeroMemory(svdattr_name,2*MAX_NC_NAME);
+ h4toh5_ZeroMemory(refstr,MAXREF_LENGTH);
+
+ /* separate vdata attribute from vdata field attributes. */
+
+ if (field_index < -1) {
+ printf("error: check_field should be either -1(vdata) or ");
+ printf(">=0(vdata field).\n");
+ return FAIL;
+ }
+
+ for (i = 0;i < snum_vdattrs; i++) {
+
+ /* if the field_index is 0, no field attribute exists, only
+ VDATA attributes are converted.*/
+
+ if (VSattrinfo(vdata_id,field_index,i,svdattr_name,&svd_atype,
+ &count_svdadata,NULL)== FAIL){
+ printf("unable to obtain attribute information. \n");
+ return FAIL;
+ }
+
+ if(svdattr_name[0] == '\0') {
+ svdrepattr_name = trans_obj_name(DFTAG_VG,i);
+ strcpy(svdattr_name,svdrepattr_name);
+ free(svdrepattr_name);
+ }
+
+ if (field_index == -1);
+
+ else if (field_index != -1 && attr_name != NULL) {
+
+ strcat(svdattr_name,":");
+ strcat(svdattr_name,attr_name);
+ }
+
+ else {
+
+ strcat(svdattr_name,":");
+ strcat(svdattr_name,"HDF4_VDATA_ATTR_");
+ if(conv_int_str(field_index,refstr)==FAIL) {
+ printf("error in converting vdata field index to string.\n");
+ return FAIL;
+ }
+ strcat(svdattr_name,refstr);
+
+ }
+
+ /* converting attribute data type into the corresponding hdf5 data type */
+
+ if(h4type_to_h5type(svd_atype,&sh5_amemtype,&sh4_amemsize,
+ &sh4_asize,&sh5_atype)==FAIL){
+ printf("fail to translate vdata attribute datatype from H4 to H5.\n");
+ return FAIL;
+ }
+
+ svd_adata = malloc(sh4_amemsize * count_svdadata);
+
+ if(svd_adata == NULL) {
+ printf("fail to allocate memory for vdata attribute data.\n");
+ return FAIL;
+ }
+
+ if(VSgetattr(vdata_id,field_index,i,(VOIDP)svd_adata)==FAIL){
+ printf("error in getting attributes of vdata. \n");
+ free(svd_adata);
+ return FAIL;
+ }
+
+ /* now do attribute-transferring:
+ 1. deal with string data type
+ 2. set attribute space
+ 3. get attribute name */
+
+ if (sh5_atype == H5T_STRING) {
+
+ if ((sh5str_type = mkstr(count_svdadata,
+ H5T_STR_SPACEPAD))<0) {
+ printf("error in making string for vdata attribute. \n");
+ free(svd_adata);
+ return FAIL;
+ }
+
+ if ((sh5str_memtype = mkstr(count_svdadata*sh4_amemsize,
+ H5T_STR_SPACEPAD))<0) {
+ printf("error in making memory string for vdata attribute. \n");
+ free(svd_adata);
+ return FAIL;
+ }
+
+ sh5a_sid = H5Screate(H5S_SCALAR);
+
+ if (sh5a_sid < 0) {
+ printf("failed to create attribute space for ");
+ printf("HDF4_OBJECT_TYPE VDATA. \n");
+ free(svd_adata);
+ return FAIL;
+ }
+
+
+ sh5a_id = H5Acreate(h5dset,svdattr_name,sh5str_type,
+ sh5a_sid,H5P_DEFAULT);
+
+ if (sh5a_id <0) {
+ printf("failed to obtain attribute id for");
+ printf(" HDF4_OBJECT_TYPE VDATA. \n");
+ H5Sclose(sh5a_sid);
+ free(svd_adata);
+ return FAIL;
+ }
+
+ sret = H5Awrite(sh5a_id,sh5str_memtype,(void *)svd_adata);
+
+ if (sret <0) {
+ printf("fail to write vdata attr into hdf5 dataset attr\n ");
+ H5Sclose(sh5a_sid);
+ H5Aclose(sh5a_id);
+ free(svd_adata);
+ return FAIL;
+ }
+
+ free(svd_adata);
+ sret = H5Sclose(sh5a_sid);
+ sret = H5Aclose(sh5a_id);
+ }
+
+ else {
+
+ if(count_svdadata == 1) {
+ sh5a_sid = H5Screate(H5S_SCALAR);
+
+ if (sh5a_sid < 0) {
+ printf("failed to create scalar space id for hdf5 attribute ");
+ printf("of dataset converted from attribute of VDATA.\n");
+ free(svd_adata);
+ return FAIL;
+ }
+ }
+ else {
+ sh5dims[0] = count_svdadata;
+ sh5a_sid = H5Screate_simple(1,sh5dims,NULL);
+
+ if (sh5a_sid < 0) {
+ printf("failed to create simple space id for hdf5 attribute ");
+ printf("of dataset converted from attribute of VDATA.\n");
+ free(svd_adata);
+ return FAIL;
+ }
+ }
+
+ sh5a_id = H5Acreate(h5dset,svdattr_name,sh5_atype,
+ sh5a_sid,H5P_DEFAULT);
+
+ if(sh5a_id <0) {
+ printf("failed to create attribute id for hdf5 attribute ");
+ printf("of dataset converted from attribute of VDATA.\n");
+ H5Sclose(sh5a_sid);
+ free(svd_adata);
+ return FAIL;
+ }
+
+ sret = H5Awrite(sh5a_id,sh5_amemtype,(void *)svd_adata);
+
+ if(sret <0) {
+ printf("failed to write attribute data for hdf5 attribute ");
+ printf("of dataset converted from attribute of VDATA.\n");
+ H5Sclose(sh5a_sid);
+ H5Aclose(sh5a_id);
+ free(svd_adata);
+ return FAIL;
+ }
+
+ sret = H5Aclose(sh5a_id);
+ sret = H5Sclose(sh5a_sid);
+ free(svd_adata);
+ }
+ }
+ return SUCCEED;
+}
+/*-------------------------------------------------------------------------
+ * Function: gen_h5comptype
+ *
+ * Purpose: generate hdf5 compound data type
+
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ vdata_id: vdata identifier
+ nfields: number of fields
+ sh4size: pointer to datatype size in memory
+ sh4memsize: pointer to datatype size in memory
+ sh5type: pointer to hdf5 datatype
+ sh5memtype: pointer to actual hdf5 datatype in memory
+ h5_ctype: hdf5 compound datatype
+ h5_cmemtype: hdf5 compound datatype in memory
+ Out:
+ Modifications:
+
+ *-------------------------------------------------------------------------
+ */
+
+int gen_h5comptype(int32 vdata_id,int32 nfields,
+ size_t* sh4size,size_t* sh4memsize,
+ hid_t* sh5type,hid_t* sh5memtype,
+ hid_t h5_ctype,hid_t h5_cmemtype) {
+
+ char* fieldname;
+ int32 fieldorder;
+ size_t fil_offset;
+ size_t mem_offset;
+ hsize_t fielddim[1];
+ hid_t h5str_type;
+ int check_ifstr;/* flag to check if the h5 type is string.*/
+ int i;
+
+
+ check_ifstr = 0;
+ fil_offset = 0;
+ mem_offset = 0;
+
+
+ for (i =0;i< nfields;i++) {
+
+ fieldname = NULL;
+ fieldorder = VFfieldorder(vdata_id,i);
+
+ if(fieldorder == FAIL){
+ printf("error in obtaining fieldorder.\n");
+ return FAIL;
+ }
+
+ fieldname = VFfieldname(vdata_id,i);
+ if(fieldname == NULL){
+ printf("fail to obtain Vdata field name. \n");
+ return FAIL;
+ }
+
+
+ /* when vdata is a character array, we will write the whole
+ array as one hdf5 type string. */
+
+ if(sh5type[i] == H5T_STRING) {
+
+ if ((h5str_type = mkstr(sh4size[i]*fieldorder,H5T_STR_SPACEPAD))<0) {
+ printf("error in making string of hdf5 string. \n");
+ return FAIL;
+ }
+ sh5type[i] = h5str_type;
+ check_ifstr = 1;
+ }
+
+ if (sh5memtype[i] == H5T_STRING) {
+
+ if((h5str_type = mkstr(sh4memsize[i]*fieldorder,H5T_STR_SPACEPAD))<0){
+ printf("error in making string for VDATA in memory. \n");
+ return FAIL;
+ }
+ sh5memtype[i] = h5str_type;
+
+ }
+
+ fielddim[0] = fieldorder;
+
+ /* if field type is an array, use H5Tinsert_array.
+ When the data type is H5T_STRING,
+ we will treat the the vdata as a HDF5 scalar type.*/
+
+ if (fielddim[0] == 1 || check_ifstr == 1) {
+
+ if(H5Tinsert(h5_ctype,fieldname,fil_offset,sh5type[i])<0) {
+ printf("error inserting hdf5 compound datatype while ");
+ printf("converting vdata.\n");
+ return FAIL;
+ }
+
+ if(H5Tinsert(h5_cmemtype,fieldname,mem_offset,sh5memtype[i])<0){
+ printf("error inserting hdf5 compound datatype of memory");
+ printf(" while converting vdata.\n");
+ return FAIL;
+ }
+ }
+
+ else {
+ hid_t arr_type; /* Array datatype for inserting fields */
+
+ /* Create array datatype */
+ if((arr_type=H5Tarray_create(sh5type[i],1,fielddim,NULL))<0) {
+ printf("error creating array datatype.\n");
+ return FAIL;
+ }
+
+ if(H5Tinsert(h5_ctype,fieldname,fil_offset,arr_type)<0) {
+ printf("error inserting array into hdf5 compound datatype. \n");
+ return FAIL;
+ }
+
+ /* Close array datatype */
+ if(H5Tclose(arr_type)<0) {
+ printf("error closing array datatype.\n");
+ return FAIL;
+ }
+
+ /* Create array datatype */
+ if((arr_type=H5Tarray_create(sh5memtype[i],1,fielddim,NULL))<0) {
+ printf("error creating array datatype.\n");
+ return FAIL;
+ }
+
+ if(H5Tinsert(h5_cmemtype,fieldname,mem_offset,arr_type)<0) {
+ printf("error inserting array into hdf5 compound datatype for memory. \n");
+ return FAIL;
+ }
+
+ /* Close array datatype */
+ if(H5Tclose(arr_type)<0) {
+ printf("error closing array datatype.\n");
+ return FAIL;
+ }
+
+
+ }
+
+ if( check_ifstr == 1) {
+ fil_offset = fil_offset + sh4size[i]*fieldorder;
+ mem_offset = mem_offset + sh4memsize[i]*fieldorder;
+ check_ifstr = 0;
+ }
+ else {
+
+ fil_offset = fil_offset + sh4size[i]*fieldorder;
+ mem_offset = mem_offset + sh4memsize[i]*fieldorder;
+ }
+
+ }
+
+ return SUCCEED;
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tools/h4toh5/h4toh5vgroup.c b/tools/h4toh5/h4toh5vgroup.c
new file mode 100644
index 0000000..92a6735
--- /dev/null
+++ b/tools/h4toh5/h4toh5vgroup.c
@@ -0,0 +1,812 @@
+
+/*-------------------------------------------------------------------------
+ *
+ * Copyright (C) 2000 National Center for Supercomputing Applications.
+ * All rights reserved.
+ *
+ *-------------------------------------------------------------------------
+ */
+
+/******************************************************************************
+
+ Description:
+
+1. converter
+
+See HDF4 to HDF5 mapping specification at
+(http://hdf.ncsa.uiuc.edu/HDF5/papers/h4toh5) for the default mapping
+from HDF4 object to HDF5 object.
+
+The whole converter includes 10 files, h4toh5util.h, h4toh5main.h, h4toh5util.c, h4toh5main.c, h4toh5sds.c, h4toh5image.c,h4toh5vdata.c,h4toh5vgroup.c,h4toh5pal.c and h4toh5anno.c.
+
+2. this file
+
+converting an hdf4 vgroup object into a hdf5 group.
+
+Author: Kent Yang(ymuqun@ncsa.uiuc.edu)
+
+
+*****************************************************************************/
+
+
+#include "h4toh5main.h"
+
+
+/*-------------------------------------------------------------------------
+ * Function: Vgroup_h4_to_h5
+ *
+ * Purpose: translate different Vgroup objects: vgroup,vdata,sds,image
+ into hdf5 datasets and recursively call the routine
+ *
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ file_id: hdf4 file identifier
+ vgroup_id: hdf4 vgroup id
+ sd_id: sd interface id
+ h5_group: hdf5 group id
+ h5_dimgroup: hdf5 dimensional scale group id
+ h5_palgroup: hdf5 palette group id
+ Out:
+
+ Modification:
+ *-------------------------------------------------------------------------
+ */
+
+
+int Vgroup_h4_to_h5(int32 file_id,int32 vgroup_id,int32 sd_id,hid_t h5_group,hid_t h5_dimgroup,hid_t h5_palgroup)
+
+{
+
+ int32 vgroup_tag;
+ int32 vgroup_ref;
+ int32 obj_tag;
+ int32 obj_ref;
+ int32 num_gobjects;
+ int i;
+
+ char refstr[MAXREF_LENGTH];
+ char vgroup_class[VGNAMELENMAX];
+ char vgroup_name[VGNAMELENMAX];
+
+ char* h5pgroup_name;
+
+ int check_vgname;
+ hid_t h5_pgroup;
+
+ /*zeroing out memory for vgroup_class and vgroup_name */
+ h4toh5_ZeroMemory(vgroup_class,VGNAMELENMAX);
+ h4toh5_ZeroMemory(vgroup_name,VGNAMELENMAX);
+
+ vgroup_tag = VQuerytag(vgroup_id);
+ if(vgroup_tag == FAIL) {
+ printf("error in obtaining vgroup tag.\n");
+ return FAIL;
+ }
+
+ vgroup_ref = VQueryref(vgroup_id);
+ if(vgroup_ref == FAIL) {
+ printf("error in obtaining vgroup reference.\n");
+ return FAIL;
+ }
+
+ if(Vgetname(vgroup_id,vgroup_name) == FAIL) {
+ printf("error in obtaining vgroup name.\n");
+ return FAIL;
+ }
+
+ if(Vgetclass(vgroup_id,vgroup_class) == FAIL) {
+ printf("error in obtaining vgroup class name. \n");
+ return FAIL;
+ }
+
+ /*** ignore reserved HDF group ***/
+
+ if(vgroup_class != NULL) {
+ if(strcmp(vgroup_class,_HDF_ATTRIBUTE)==0) return SUCCEED;
+ if(strcmp(vgroup_class,_HDF_VARIABLE)==0) return SUCCEED;
+ if(strcmp(vgroup_class,_HDF_DIMENSION)==0) return SUCCEED;
+ if(strcmp(vgroup_class,_HDF_UDIMENSION)==0) return SUCCEED;
+ if(strcmp(vgroup_class,_HDF_CDF)==0) return SUCCEED;
+ if(strcmp(vgroup_class,GR_NAME)==0) return SUCCEED;
+ if(strcmp(vgroup_class,RI_NAME)==0) return SUCCEED;
+ }
+
+ if(vgroup_name != NULL)
+ if(strcmp(vgroup_name,GR_NAME)==0) return SUCCEED;
+
+ h5pgroup_name = get_name(vgroup_ref,estnum_vg,vg_hashtab,&check_vgname);
+
+ if(h5pgroup_name == NULL && check_vgname == 0 ) {
+ printf("error,cannot find group\n");
+ return FAIL;
+ }
+
+ if(h5pgroup_name == NULL && check_vgname ==-1 ) {
+ printf("error,group name is not defined.\n");
+ return FAIL;
+ }
+
+ /* create a hdf5 group under h5_group.*/
+
+ h5_pgroup = H5Gcreate(h5_group,h5pgroup_name,0);
+
+ if(h5_pgroup < 0) {
+ printf("error in creating group. \n");
+ free(h5pgroup_name);
+ return FAIL;
+ }
+
+ /* vgroup attributes into corresponding hdf5 group attributes.*/
+ if(vg_transattrs(vgroup_id,h5_pgroup)==FAIL) {
+ printf("error in translating vgroup attributes into hdf5 group attr.\n");
+ H5Gclose(h5_pgroup);
+ free(h5pgroup_name);
+ return FAIL;
+ }
+
+ num_gobjects = Vntagrefs(vgroup_id);
+
+ if(num_gobjects == FAIL) {
+ printf("error in obtaining number of objects in the vgroup. \n");
+ H5Gclose(h5_pgroup);
+ free(h5pgroup_name);
+ return FAIL;
+ }
+
+ if(Annoobj_h4_to_h5(file_id,vgroup_ref,vgroup_tag,h5_pgroup)==FAIL) {
+ printf("error in obtaining annotation of the vgroup.\n");
+ H5Gclose(h5_pgroup);
+ free(h5pgroup_name);
+ return FAIL;
+ }
+
+ for( i = 0;i<num_gobjects;i++) {
+
+ if(Vgettagref(vgroup_id,i,&obj_tag,&obj_ref)==FAIL) {
+ printf("failed to get object tag and ref of the current");
+ printf(" object in this vgroup.\n");
+ H5Gclose(h5_pgroup);
+ free(h5pgroup_name);
+ return FAIL;
+ }
+
+ if(conv_int_str(obj_ref,refstr)== FAIL) {
+ printf("failed to convert object reference number ");
+ printf("into string format at vgroup_h4_to_h5 routine.\n");
+ H5Gclose(h5_pgroup);
+ free(h5pgroup_name);
+ return FAIL;
+ }
+
+ if (Visvg(vgroup_id,obj_ref)) {
+
+ if(convert_vgroup(file_id,sd_id,obj_ref,h5pgroup_name,h5_pgroup,
+ h5_dimgroup,h5_palgroup)== FAIL) {
+ printf("convert_vgroup routine failed,");
+ printf("cannot convert vgroup into hdf5 group successfully.\n");
+ free(h5pgroup_name);
+ H5Gclose(h5_pgroup);
+ return FAIL;
+ }
+
+ }
+ /* the object is independent vdata. */
+ else if(Visvs(vgroup_id,obj_ref)) {
+ if(convert_vdata(file_id,obj_ref,h5pgroup_name,h5_pgroup)==FAIL){
+ printf("fail to convert vdata into hdf5 dataset.\n");
+ free(h5pgroup_name);
+ H5Gclose(h5_pgroup);
+ return FAIL;
+ }
+ }
+ else if(obj_tag == DFTAG_NDG || obj_tag == DFTAG_SDG) {
+ if(convert_sds(file_id,sd_id,obj_ref,h5pgroup_name,h5_pgroup,
+ h5_dimgroup)==FAIL){
+ printf("fail to convert sds into hdf5 dataset.\n");
+ H5Gclose(h5_pgroup);
+ free(h5pgroup_name);
+ return FAIL;
+ }
+ }
+ else if(obj_tag == DFTAG_RIG) {
+ if(convert_image(file_id,obj_ref,h5pgroup_name,
+ h5_pgroup,h5_palgroup)==FAIL){
+ printf("fail to convert image into hdf5 dataset.\n");
+ H5Gclose(h5_pgroup);
+ free(h5pgroup_name);
+ return FAIL;
+ }
+ }
+ }
+
+ H5Gclose(h5_pgroup);
+ free(h5pgroup_name);
+ return SUCCEED;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: convert_vgroup
+ *
+ * Purpose: subroutine interface for better modularity of vgroup_h4_to_h5
+ * In this routine, 1) h5 vgroup name is obtained;
+ 2) vgroup_h4_to_h5 is called again for
+ unvisited vgroups
+ 3) HardLink is created for visited vgroups
+
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ file_id: hdf4 file identifier
+ sd_id: sd interface id
+ obj_ref: object reference number
+ h5pgroup_name: h5 group name
+ h5_pgroup: hdf5 group id
+ h5_dimgroup: hdf5 dimensional scale group id
+ h5_palgroup: hdf5 palette group id
+
+ *-------------------------------------------------------------------------
+ */
+
+int convert_vgroup(int32 file_id,int32 sd_id, int32 obj_ref,
+ char* h5pgroup_name,hid_t h5_pgroup,hid_t h5_dimgroup,
+ hid_t h5_palgroup) {
+
+ int32 vgroup_cid;
+ int32 istat;
+ int check_vgname;
+ char refstr[MAXREF_LENGTH];
+ char cvgroup_name[VGNAMELENMAX];
+ char* cor_cvgroupname;
+ char* h5cgroup_name;
+ char* h5lgroup_name;
+ int check_vgroup;
+
+ if(conv_int_str(obj_ref,refstr)== FAIL) {
+ printf("converting integer into string format.\n");
+ return FAIL;
+ }
+
+ vgroup_cid = Vattach(file_id,obj_ref,"r");
+ if(vgroup_cid == FAIL) {
+ printf("error in getting vgroup id.\n");
+ return FAIL;
+ }
+
+ /* recursively obtain information from the group*/
+ /* check whether it is looked up, if yes, create a hard link.*/
+
+ istat = Vgetname(vgroup_cid,cvgroup_name);
+ if(istat == FAIL) {
+ printf("failed to get the name of vgroup.\n");
+ Vdetach(vgroup_cid);
+ return FAIL;
+ }
+
+ /* look up vg hashtable and see whether this object is touched.*/
+ check_vgroup = lookup(obj_ref,estnum_vg,vg_hashtab);
+
+ /* if this vgroup has not been touched, convert it into hdf5 group.
+ else create a hard link to the existing group.*/
+
+ cor_cvgroupname = correct_name(cvgroup_name);
+ if(cor_cvgroupname == NULL) {
+ printf("error in generating corrected vgroup name. \n");
+ Vdetach(vgroup_cid);
+ return FAIL;
+ }
+ if(check_vgroup == 0) {
+
+ /* checking whether vgroup name contains ORI_SLASH, changing into CHA_SLASH.*/
+
+ h5cgroup_name = get_obj_aboname(cor_cvgroupname,refstr,h5pgroup_name,
+ HDF4_VGROUP);
+ if(h5cgroup_name == NULL) {
+ printf("error in getting the group name.\n");
+ Vdetach(vgroup_cid);
+ free(cor_cvgroupname);
+ return FAIL;
+ }
+
+ free(cor_cvgroupname);
+ if(set_name(obj_ref,estnum_vg,vg_hashtab,h5cgroup_name)== FAIL) {
+ printf("error in setting group name.\n");
+ Vdetach(vgroup_cid);
+ free(h5cgroup_name);
+ return FAIL;
+ }
+ if(Vgroup_h4_to_h5(file_id,vgroup_cid,sd_id,h5_pgroup,
+ h5_dimgroup,h5_palgroup)== FAIL) {
+ printf("error in transferring vgroup into hdf5 group.\n");
+ Vdetach(vgroup_cid);
+ free(h5cgroup_name);
+ return FAIL;
+ }
+ free(h5cgroup_name);
+
+ }
+
+ else {
+
+ h5cgroup_name = get_name(obj_ref,estnum_vg,vg_hashtab,&check_vgname);
+ if(h5cgroup_name == NULL && check_vgname ==0 ) {
+ printf("error,cannot find group\n");
+ Vdetach(vgroup_cid);
+ return FAIL;
+ }
+
+ if(h5cgroup_name == NULL && check_vgname == -1 ) {
+ printf("error,group name is not defined.\n");
+ Vdetach(vgroup_cid);
+ return FAIL;
+ }
+
+ /* create HL */
+
+
+ h5lgroup_name = get_obj_aboname(cor_cvgroupname,refstr,h5pgroup_name,
+ HDF4_VGROUP);
+ if(h5lgroup_name == NULL) {
+ printf("failed to obtain group name.\n");
+ Vdetach(vgroup_cid);
+ free(h5cgroup_name);
+ free(cor_cvgroupname);
+ return FAIL;
+ }
+ free(cor_cvgroupname);
+ if(H5Glink(h5_pgroup,H5G_LINK_HARD,h5cgroup_name,h5lgroup_name)<0) {
+ printf("cannot make hard link for two groups.\n");
+ Vdetach(vgroup_cid);
+ free(h5cgroup_name);
+ free(h5lgroup_name);
+ return FAIL;
+ }
+ free(h5cgroup_name);
+ free(h5lgroup_name);
+ }
+
+ Vdetach(vgroup_cid);
+ return SUCCEED;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: convert_vdata
+ *
+ * Purpose: subroutine interface for better modularity of vgroup_h4_to_h5
+ * In this routine, 1) h5 vdata name is obtained;
+ 2) vdata_h4_to_h5 is called for unvisited
+ vdatas
+ 3) HardLink is created for visited vdatas
+
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ file_id: hdf4 file identifier
+ obj_ref: object reference number
+ h5pgroup_name: h5 group name
+ h5_pgroup: hdf5 group id
+
+ *-------------------------------------------------------------------------
+ */
+
+int convert_vdata(int32 file_id,int32 obj_ref,char * h5pgroup_name,
+ hid_t h5_pgroup) {
+
+ int32 vdata_id;
+ int check_vdata;
+ int check_vdname;
+ int32 istat;
+ char refstr[MAXREF_LENGTH];
+ char cvdata_name[VGNAMELENMAX];
+ char* cor_cvdataname;
+ char* h5cvdata_name;
+ char* h5lvdata_name;
+
+ vdata_id = VSattach(file_id,obj_ref,"r");
+ if(vdata_id == FAIL) {
+ printf("error in attaching vdata. \n");
+ return FAIL;
+ }
+
+ if(conv_int_str(obj_ref,refstr)== FAIL) {
+ printf("converting integer into string format.\n");
+ VSdetach(vdata_id);
+ return FAIL;
+ }
+
+ istat = VSisattr(vdata_id);
+ if (istat == FAIL) {
+ printf("error in checking vdata attribute. \n");
+ VSdetach(vdata_id);
+ return FAIL;
+ }
+
+ if(istat); /*ignore, dependent vdata(attributes, etc.)can be retrieved later.*/
+
+ else { /* independent vdata, read in */
+
+ check_vdata = lookup(obj_ref,estnum_vd,vd_hashtab);
+
+ if(check_vdata < 0) {
+ printf("failed to look up the object.\n");
+ VSdetach(vdata_id);
+ return FAIL;
+ }
+
+ if(VSQueryname(vdata_id,cvdata_name)==FAIL) {
+ printf("error in querying name. \n");
+ VSdetach(vdata_id);
+ return FAIL;
+ }
+
+ cor_cvdataname = correct_name(cvdata_name);
+ if(cor_cvdataname == NULL) {
+ printf("error in generating corrected vdata name. \n");
+ VSdetach(vdata_id);
+ return FAIL;
+ }
+ if(check_vdata ==0) {
+ h5cvdata_name = get_obj_aboname(cor_cvdataname,refstr,h5pgroup_name,
+ HDF4_VDATA);
+ if(h5cvdata_name == NULL) {
+ printf("cannot obtain the converted hdf5 dataset name from vdata.\n");
+ VSdetach(vdata_id);
+ free(cor_cvdataname);
+ return FAIL;
+ }
+ free(cor_cvdataname);
+ if(set_name(obj_ref,estnum_vd,vd_hashtab,h5cvdata_name)== FAIL){
+ printf("failed to obtain vdata name.\n");
+ VSdetach(vdata_id);
+ free(h5cvdata_name);
+ return FAIL;
+ }
+
+ if(Vdata_h4_to_h5(file_id,vdata_id,h5_pgroup)==FAIL){
+ printf("failed to transfer vdata into hdf5 dataset.\n");
+ VSdetach(vdata_id);
+ free(h5cvdata_name);
+ return FAIL;
+ }
+ free(h5cvdata_name);
+ }
+
+ else {
+
+ h5cvdata_name = get_name(obj_ref,estnum_vd,vd_hashtab,
+ &check_vdname);
+
+ if(h5cvdata_name == NULL && check_vdname ==0 ){
+ printf("error,cannot find vdata\n");
+ VSdetach(vdata_id);
+ return FAIL;
+ }
+ if(h5cvdata_name == NULL && check_vdname ==-1 ){
+ printf("error,vdata name is not defined.\n");
+ VSdetach(vdata_id);
+ return FAIL;
+ }
+ /*create HL,
+ for the time being, we will use absolute path. */
+
+ h5lvdata_name = get_obj_aboname(cor_cvdataname,refstr,h5pgroup_name,
+ HDF4_VDATA);
+ if(h5lvdata_name == NULL) {
+ printf("error in obtaining vdata name.\n");
+ VSdetach(vdata_id);
+ free(h5cvdata_name);
+ free(cor_cvdataname);
+ return FAIL;
+ }
+ free(cor_cvdataname);
+ if(H5Glink(h5_pgroup,H5G_LINK_HARD,h5cvdata_name,h5lvdata_name)){
+ printf("error in creating hardlink for hdf5 dataset");
+ printf(" converted from vdata.\n");
+ VSdetach(vdata_id);
+ free(h5cvdata_name);
+ free(h5lvdata_name);
+ return FAIL;
+ }
+ free(h5cvdata_name);
+ free(h5lvdata_name);
+ }
+ VSdetach(vdata_id);
+ }
+
+ return SUCCEED;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: convert_sds
+ *
+ * Purpose: subroutine interface for better modularity of vgroup_h4_to_h5
+ * In this routine, 1) h5 sds name is obtained;
+ 2) sds_h4_to_h5 is called for unvisited
+ sds objects
+ 3) HardLink is created for visited sds
+
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ sd_id: hdf4 sds identifier
+ obj_ref: object reference number
+ h5_dimgroup: h5 dimensional scale group id
+ h5_pgroup: hdf5 group id
+
+ *-------------------------------------------------------------------------
+ */
+int convert_sds(int32 file_id,int32 sd_id,int32 obj_ref,char * h5pgroup_name,
+ hid_t h5_pgroup,hid_t h5_dimgroup) {
+
+ int32 sd_index;
+ int32 sds_id;
+ int32 sds_rank;
+ int32 sds_dimsizes[DIM_HASHSIZE];
+ int32 sds_dtype;
+ int32 num_sdsattrs;
+ char sds_name[MAX_NC_NAME];
+ char* cor_sdsname;
+ int check_sds;
+ int check_sdsname;
+ char refstr[MAXREF_LENGTH];
+ char* h5csds_name;
+ char* h5lsds_name;
+
+ sd_index = SDreftoindex(sd_id,obj_ref);
+ if(sd_index == FAIL){
+ printf("error in obtaining reference number of sds.\n");
+ return FAIL;
+ }
+
+ if(conv_int_str(obj_ref,refstr)== FAIL) {
+ printf("error in converting reference number into string type.\n");
+ return FAIL;
+ }
+
+ sds_id = SDselect(sd_id,sd_index);
+
+ if(sds_id == FAIL){
+ printf("error in obtaining sd id.\n");
+ return FAIL;
+ }
+
+ if(SDgetinfo(sds_id,sds_name,&sds_rank,sds_dimsizes,
+ &sds_dtype,&num_sdsattrs)==FAIL) {
+ printf("error in obtaining SD info.\n");
+ SDendaccess(sds_id);
+ return FAIL;
+ }
+
+ /* check whether this sds is touched. */
+ check_sds = lookup(obj_ref,2*num_sds,sds_hashtab);
+
+ cor_sdsname = correct_name(sds_name);
+ if(cor_sdsname == NULL) {
+ printf("error in generating corrected sds name. \n");
+ SDendaccess(sds_id);
+ return FAIL;
+ }
+
+ if(check_sds == 0) {
+
+ /* obtain the absolute name of sds object, deal with the name clashing by
+ looking up things in the "name hashing table".*/
+
+ h5csds_name = get_obj_aboname(cor_sdsname,refstr,h5pgroup_name,HDF4_SDS);
+ if(h5csds_name == NULL) {
+ printf("error in obtaining sds name.\n");
+ SDendaccess(sds_id);
+ free(cor_sdsname);
+ return FAIL;
+ }
+ free(cor_sdsname);
+
+ /* put the absolute path of sds into "hashing table".*/
+ if(set_name(obj_ref,2*num_sds,sds_hashtab,h5csds_name)==FAIL) {
+ printf("error in setting object name.\n");
+ SDendaccess(sds_id);
+ free(h5csds_name);
+ return FAIL;
+ }
+ /* convert the sds object into hdf5 dataset.*/
+ if(Sds_h4_to_h5(file_id,sds_id,h5_pgroup,h5_dimgroup)==FAIL){
+ printf("error in translating sds into hdf5 dataset.\n");
+ SDendaccess(sds_id);
+ free(h5csds_name);
+ return FAIL;
+ }
+ free(h5csds_name);
+ }
+ else {
+ /* if the object has been touched, create a hard link instead.*/
+ h5csds_name = get_name(obj_ref,2*num_sds,sds_hashtab,&check_sdsname);
+ if(h5csds_name == NULL) {
+ printf("error in getting sds name \n");
+ SDendaccess(sds_id);
+ return FAIL;
+ }
+ /*... ADD in the code. create HL,
+ for the time being, we will use absolute path. */
+ h5lsds_name = get_obj_aboname(cor_sdsname,refstr,h5pgroup_name,
+ HDF4_SDS);
+ if(h5lsds_name == NULL) {
+ printf("error in getting sds link name.\n");
+ SDendaccess(sds_id);
+ free(h5csds_name);
+ free(cor_sdsname);
+ return FAIL;
+ }
+ free(cor_sdsname);
+ if(H5Glink(h5_pgroup,H5G_LINK_HARD,h5csds_name,h5lsds_name) <0) {
+ printf("error in getting hard link \n");
+ SDendaccess(sds_id);
+ free(h5csds_name);
+ free(h5lsds_name);
+ return FAIL;
+ }
+
+ free(h5csds_name);
+ free(h5lsds_name);
+ }
+ SDendaccess(sds_id);
+ return SUCCEED;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: convert_image
+ *
+ * Purpose: subroutine interface for better modularity of vgroup_h4_to_h5
+ * In this routine, 1) h5 vdata name is obtained;
+ 2) image_h4_to_h5 is called for unvisited
+ images
+ 3) Hard Link is created for visited images
+
+ * Return: FAIL if failed, SUCCEED if successful.
+ *
+ * In :
+ file_id: hdf4 file identifier
+ obj_ref: object reference number
+ h5pgroup_name: h5 group name
+ h5_pgroup: hdf5 group id
+ h5_palgroup: hdf5 palette group id
+
+ *-------------------------------------------------------------------------
+ */
+int convert_image(int32 file_id,int32 obj_ref,char * h5pgroup_name,
+ hid_t h5_pgroup,hid_t h5_palgroup) {
+
+ int32 gr_id;
+ int32 gr_index;
+ int32 ri_id;
+ int32 istat;
+ char* h5cimage_name;
+ char* h5limage_name;
+ char refstr[MAXREF_LENGTH];
+ char image_name[MAX_GR_NAME];
+ char* cor_imagename;
+
+ int check_imagename;
+ int check_image;
+
+
+ gr_id = GRstart(file_id);
+ if(gr_id == FAIL) {
+ printf("error in obtaining gr id. \n");
+ return FAIL;
+ }
+
+ if(conv_int_str(obj_ref,refstr)== FAIL) {
+ printf("converting integer into string format.\n");
+ return FAIL;
+ }
+
+ gr_index= GRreftoindex(gr_id,obj_ref);
+ if(gr_index == FAIL) {
+ printf("error in getting gr index.\n");
+ return FAIL;
+ }
+
+ ri_id = GRselect(gr_id,gr_index);
+ if(ri_id == FAIL) {
+ printf("error in selecting gr interface.\n");
+ return FAIL;
+ }
+
+ istat = GRgetiminfo(ri_id, image_name, NULL, NULL, NULL, NULL, NULL);
+
+ if(istat == FAIL) {
+ GRendaccess(ri_id);
+ printf("error in getting GR images.\n");
+ return FAIL;
+ }
+
+ /* checking whether image name contains ORI_SLASH,
+ changing into CHA_SLASH.*/
+
+ cor_imagename = correct_name(image_name);
+ if(cor_imagename == NULL) {
+ printf("error in generating corrected image name. \n");
+ GRendaccess(ri_id);
+ return FAIL;
+ }
+
+ /* check whether this image is touched. */
+ check_image = lookup(obj_ref,2*num_images,gr_hashtab);
+
+ if(check_image == 0) {
+
+ /* obtain the absolute name of image object, deal with the name clashing by
+ looking up things in the "name hashing table".*/
+
+ h5cimage_name = get_obj_aboname(cor_imagename,refstr,h5pgroup_name,
+ HDF4_IMAGE);
+ if(h5cimage_name == NULL) {
+ printf("error in getting image name.\n");
+ GRendaccess(ri_id);
+ free(cor_imagename);
+ return FAIL;
+ }
+ free(cor_imagename);
+
+ if(set_name(obj_ref,2*num_images,gr_hashtab,h5cimage_name)==FAIL) {
+ printf("error setting image name.\n");
+ GRendaccess(ri_id);
+ free(h5cimage_name);
+ return FAIL;
+ }
+ if(Image_h4_to_h5(file_id,ri_id,h5_pgroup,h5_palgroup)==FAIL) {
+ printf("error in transferring image name into hdf5 dataset.\n");
+ GRendaccess(ri_id);
+ free(h5cimage_name);
+ return FAIL;
+ }
+ free(h5cimage_name);
+ }
+
+ else{
+
+ /*if the object is visited, create HL. */
+
+ h5cimage_name = get_name(obj_ref,2*num_images,gr_hashtab,
+ &check_imagename);
+
+ if(h5cimage_name == NULL) {
+ printf("error in getting image name into hdf5 dataset.\n");
+ GRendaccess(ri_id);
+ free(h5cimage_name);
+ return FAIL;
+ }
+ h5limage_name = get_obj_aboname(cor_imagename,refstr,h5pgroup_name,
+ HDF4_IMAGE);
+
+ if(h5limage_name == NULL) {
+ printf("error in getting link image name into hdf5 dataset.\n");
+ GRendaccess(ri_id);
+ free(h5cimage_name);
+ free(cor_imagename);
+ return FAIL;
+ }
+ free(cor_imagename);
+
+ if(H5Glink(h5_pgroup,H5G_LINK_HARD,h5cimage_name,h5limage_name)<0){
+ printf("error in linking two groups.\n");
+ GRendaccess(ri_id);
+ free(h5cimage_name);
+ free(h5limage_name);
+ return FAIL;
+ }
+ free(h5cimage_name);
+ free(h5limage_name);
+ }
+
+ GRendaccess(ri_id);
+ /* this is for efficient reason, we will comment out GRend.
+ GRend(gr_id);*/
+
+ return SUCCEED;
+}
+
+
+
+
+
diff --git a/tools/h4toh5/testh4toh5.sh b/tools/h4toh5/testh4toh5.sh
new file mode 100755
index 0000000..c2a196e
--- /dev/null
+++ b/tools/h4toh5/testh4toh5.sh
@@ -0,0 +1,249 @@
+#!/bin/sh
+#
+# Copyright (C) 2001 National Center for Supercomputing Applications.
+# All rights reserved.
+#
+# Test script for the h4toh5 tests.
+# Using the h4toh5 convert to convert a pre-created hdf file to
+# an hdf5 file (output file), then compare it with a pre-created
+# corresponding hdf5 file (expected file).
+# If the same, that particular test passes.
+# If not the same, the output file and expected file are processed
+# by the h5dump tool to see if they produce the same results.
+# If the same, the test passes.
+# If not, show the difference of the two results and report the test failed.
+#
+# h5dump is default to use the one just built. It can be overridden
+# by setting $H5DUMP to a different value such as /usr/local/bin/h5dump.
+
+H4TOH5=h4toh5 # The tool name
+H4TOH5_BIN=`pwd`/$H4TOH5 # The path of the tool binary
+
+CMP='cmp -s'
+DIFF='diff -c'
+
+RM='rm -f'
+SED='sed '
+H5DUMP=${H5DUMP:-`pwd`/'../h5dump/h5dump'} # Default to use the h5dumper
+ # in the same tools directory
+
+# Verify if $H5DUMP is a valid command.
+tmpfile=/tmp/testh4toh5.$$
+$H5DUMP -V > $tmpfile
+if test -s "$tmpfile"; then
+ :
+else
+ echo " Could not run the '$H5DUMP' command. The test can still proceed"
+ echo " but it may fail if '$H5DUMP' is needed to verify the output."
+ echo " You can make sure '$H5DUMP' is among your shell PATH and run"
+ echo " the test again. You may also visit http://hdf.ncsa.uiuc.edu"
+ echo " or email hdfhelp@ncsa.uiuc.edu for more information."
+ H5DUMP=:
+fi
+$RM $tmpfile
+
+# The build (current) directory might be different than the source directory.
+if test -z "$srcdir"; then
+ srcdir=.
+fi
+
+mkdir ../testfiles >/dev/null 2>&1
+
+SRCDIR="$srcdir/../testfiles"
+OUTDIR="../testfiles/Results"
+
+test -d "$OUTDIR" || mkdir $OUTDIR
+
+nerrors=0
+verbose=yes
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" |cut -c1-70 |tr -d '\012'
+}
+
+# Run a test and print PASS or *FAIL*. If a test fails then increment
+# the `nerrors' global variable and (if $verbose is set) display the
+# difference between the actual and the expected hdf4 files. The
+# expected hdf5 files are in testfiles/Expected directory.
+# The actual hdf5 file is not removed if $HDF5_NOCLEANUP is to a non-null
+# value.
+CONVERT() {
+ # Run h4toh5 convert.
+ TESTING $H4TOH5 $@
+
+ #
+ # Set up arguments to run the conversion test.
+ # The converter assumes all hdf4 files has the .hdf suffix as in the form
+ # of foo.hdf. It creates the corresponding hdf5 files with the .h5 suffix
+ # as in the form of foo.h5. One exception is that if exactly two file
+ # names are given, it treats the first argument as an hdf4 file and creates
+ # the corresponding hdf5 file with the name as the second argument, WITOUT
+ # any consideration of the suffix. (For this test script, in order to
+ # match the output hdf5 file with the expected hdf5 file, it expects the
+ # second file of the two-files tests has the .h5 suffix too.)
+ #
+ # If SRCDIR != OUTDIR, need to copy the input hdf4 files from the SRCDIR
+ # to the OUTDIR and transform the input file pathname because of the suffix
+ # convention mentioned above. This way, the hdf5 files are always created
+ # in the OUTDIR directory.
+ #
+
+ INFILES=""
+ OUTFILES=""
+ MULTIRUN=""
+
+ case "$1" in
+ -m) # multiple files conversion
+ MULTIRUN="-m"
+ shift
+ for f in $*
+ do
+ if test "$SRCDIR" != "$OUTDIR"; then
+ cp $SRCDIR/$f $OUTDIR/$f
+ fi
+ INFILES="$INFILES $f"
+ OUTFILES="$OUTFILES `basename $f .hdf`.h5"
+ shift
+ done
+ ;;
+
+ *) # Single file conversion
+ case $# in
+ 1) if test "$SRCDIR" != "$OUTDIR"; then
+ cp $SRCDIR/$1 $OUTDIR/$1
+ fi
+ INFILES="$1"
+ OUTFILES="`basename $1 .hdf`.h5"
+ ;;
+
+ 2) # hdf4 file specified
+ if test "$SRCDIR" != "$OUTDIR"; then
+ cp $SRCDIR/$1 $OUTDIR/$1
+ fi
+ INFILES="$1"
+ OUTFILES="$2"
+ ;;
+
+ *) # Illegal
+ echo "Illegal arguments"
+ exit 1
+ ;;
+ esac
+ ;;
+ esac
+
+ # run the conversion and remove input files that have been copied over
+ (
+ cd $OUTDIR
+ $H4TOH5_BIN $INFILES $OUTFILES 2>/dev/null
+ if test "$SRCDIR" != "$OUTDIR"; then
+ $RM $INFILES
+ fi
+ )
+
+ # Verify results
+ result="passed"
+ for f in $OUTFILES
+ do
+ if $CMP $SRCDIR/Expected/$f $OUTDIR/$f
+ then
+ :
+ else
+ # Use h5dump to dump the files and verify the output.
+ outfile=`basename $f .h5`
+ expect_out=$outfile.expect
+ actual_out=$outfile.actual
+
+ (cd $SRCDIR/Expected
+ $H5DUMP $outfile.h5 ) > $expect_out
+ (cd $OUTDIR
+ $H5DUMP $outfile.h5 ) > $actual_out
+
+ if [ "passed" = $result -a ! -s $actual_out ] ; then
+ echo "*FAILED*"
+ nerrors="`expr $nerrors + 1`"
+ result=failed
+ test yes = "$verbose" &&
+ echo " H5DUMP failed to produce valid output"
+ elif $CMP $expect_out $actual_out; then
+ :
+ else
+ if test "passed" = $result; then
+ echo "*FAILED*"
+ nerrors="`expr $nerrors + 1`"
+ result=failed
+ fi
+ test yes = "$verbose" &&
+ echo " Actual result (*.actual) differs from expected result (*.expect)" &&
+ $DIFF $expect_out $actual_out |sed 's/^/ /'
+ fi
+ fi
+
+ # Clean up output file
+ if test -z "$HDF5_NOCLEANUP"; then
+ $RM $expect_out $actual_out
+ $RM $OUTDIR/$f
+ fi
+ done
+ if test "passed" = "$result"; then
+ echo " PASSED"
+ fi
+}
+
+##############################################################################
+##############################################################################
+### T H E T E S T S ###
+##############################################################################
+##############################################################################
+
+$RM $OUTDIR/*.hdf $OUTDIR/*.tmp
+
+#
+# The HDF5 filenames are created based upon the HDF4 filenames
+# without the extension.
+#
+
+# test for converting H5 groups to H4 Vgroups.
+#CONVERT vg.hdf
+
+
+#
+# The test for conversion are the same as above with the only difference
+# being that the HDF5 filenames are given explicitly.
+#
+
+$RM $OUTDIR/*.tmp
+CONVERT anno_test.hdf anno_test.h5
+CONVERT gr_typ_test.hdf gr_typ_test.h5
+CONVERT grnameclash_test.hdf grnameclash_test.h5
+CONVERT image_attr_test.hdf image_attr_test.h5
+#CONVERT image_maxsize.hdf image_maxsize.h5
+CONVERT ras_24_test.hdf ras_24_test.h5
+CONVERT ras_8_test.hdf ras_8_test.h5
+CONVERT sds_attr_test.hdf sds_attr_test.h5
+CONVERT sds_dim_test.hdf sds_dim_test.h5
+CONVERT sds_typ_test.hdf sds_typ_test.h5
+CONVERT sdsnameclash_test.hdf sdsnameclash_test.h5
+CONVERT vdata_test.hdf vdata_test.h5
+CONVERT vdnameclash_test.hdf vdnameclash_test.h5
+CONVERT vg_hl_test.hdf vg_hl_test.h5
+CONVERT vg_loop_test.hdf vg_loop_test.h5
+CONVERT vgnameclash_test.hdf vgnameclash_test.h5
+CONVERT vg_all_test.hdf vg_all_test.h5
+#
+# Again, the test for conversion are the same as the first set of test.
+# Here, multiple conversion are done on HDF4 files at one time.
+#
+
+$RM $OUTDIR/*.hdf $OUTDIR/*.tmp
+#CONVERT -m vg.hdf
+
+if test $nerrors -eq 0 ; then
+ echo "All h4toh5 tests passed."
+fi
+
+$RM -r $OUTDIR
+exit $nerrors
diff --git a/tools/h5dump/Dependencies b/tools/h5dump/Dependencies
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tools/h5dump/Dependencies
diff --git a/tools/h5dump/Makefile.in b/tools/h5dump/Makefile.in
new file mode 100644
index 0000000..6de1b7c
--- /dev/null
+++ b/tools/h5dump/Makefile.in
@@ -0,0 +1,65 @@
+## HDF5 Library Makefile(.in)
+##
+## Copyright (C) 2001 National Center for Supercomputing Applications.
+## All rights reserved.
+##
+##
+top_srcdir=@top_srcdir@
+top_builddir=../..
+srcdir=@srcdir@
+SUBDIRS=
+@COMMENCE@
+
+## Add include directory to the C preprocessor flags, add -lh5tools and
+## -lhdf5 to the list of libraries.
+##
+CPPFLAGS=-I. -I$(srcdir) -I$(top_builddir)/src -I$(top_srcdir)/src \
+ -I$(top_srcdir)/tools/lib @CPPFLAGS@
+
+## Test programs and scripts.
+##
+TEST_PROGS=
+TEST_SCRIPTS=$(srcdir)/testh5dump.sh
+
+## These are our main targets: library and tools.
+##
+LIBTOOLS=../lib/libh5tools.la
+LIBHDF5=$(top_builddir)/src/libhdf5.la
+
+PUB_PROGS=h5dump
+PROGS=$(PUB_PROGS) $(TEST_PROGS) h5dumptst
+
+## Source and object files for the library; do not install
+##
+LIB_SRC=
+LIB_OBJ=$(LIB_SRC:.c=.lo)
+PUB_LIB=
+
+## Source and object files for programs...
+##
+PROG_SRC=h5dump.c h5dumptst.c
+PROG_OBJ=$(PROG_SRC:.c=.lo)
+
+PRIVATE_HDR=h5dump.h
+
+## Source and object files for the tests
+##
+TEST_SRC=
+TEST_OBJ=$(TEST_SRC:.c=.lo)
+
+## Programs have to be built before they can be tested!
+##
+check test _test: $(PROGS)
+
+## How to build the programs...They all depend on the hdf5 library and
+## the tools library compiled in this directory.
+##
+$(PROGS): $(LIBTOOLS) $(LIBHDF5)
+
+h5dump: h5dump.lo
+ @$(LT_LINK_EXE) $(CFLAGS) -o $@ h5dump.lo $(LIBTOOLS) $(LIBHDF5) $(LDFLAGS) $(LIBS)
+
+h5dumptst: h5dumptst.lo
+ @$(LT_LINK_EXE) $(CFLAGS) -o $@ h5dumptst.lo $(LIBTOOLS) $(LIBHDF5) $(LDFLAGS) $(LIBS)
+
+@CONCLUDE@
diff --git a/tools/h5dump/h5dump.h b/tools/h5dump/h5dump.h
new file mode 100644
index 0000000..70c4178
--- /dev/null
+++ b/tools/h5dump/h5dump.h
@@ -0,0 +1,22 @@
+/*
+ * Copyright (C) 1998-2001 National Center for Supercomputing Applications
+ * All rights reserved.
+ *
+ */
+#ifndef H5DUMP_H__
+#define H5DUMP_H__
+
+#include "hdf5.h"
+
+#define H5DUMP_MAX_RANK H5S_MAX_RANK
+
+#define begin_obj(obj,name,begin) \
+ if (name) \
+ printf("%s \"%s\" %s\n", (obj), (name), (begin)); \
+ else \
+ printf("%s %s\n", (obj), (begin));
+
+#define end_obj(obj,end) \
+ printf("%s %s\n", (end), (obj));
+
+#endif /* !H5DUMP_H__ */
diff --git a/tools/h5dump/h5dumptst.c b/tools/h5dump/h5dumptst.c
new file mode 100644
index 0000000..4e01627
--- /dev/null
+++ b/tools/h5dump/h5dumptst.c
@@ -0,0 +1,2577 @@
+/*
+ * Generate the binary hdf5 files for the h5dump tests.
+ * Usage: just execute the program without any arguments will
+ * generate all the binary hdf5 files in the local directory.
+ *
+ * If you regenerate the test files (e.g., changing some code,
+ * trying it on a new platform, ...), you need to verify the correctness
+ * of the expected output and update the corresponding *.ddl files.
+ */
+#include <limits.h>
+#include "hdf5.h"
+#include <H5private.h>
+
+#define FILE1 "tgroup.h5"
+#define FILE2 "tdset.h5"
+#define FILE3 "tattr.h5"
+#define FILE4 "tslink.h5"
+#define FILE5 "thlink.h5"
+#define FILE6 "tcompound.h5"
+#define FILE7 "tall.h5"
+#define FILE8 "tdset2.h5"
+#define FILE9 "tcompound2.h5"
+#define FILE10 "tloop.h5"
+#define FILE11 "tloop2.h5"
+#define FILE12 "tmany.h5"
+#define FILE13 "tstr.h5"
+#define FILE14 "tstr2.h5"
+#define FILE15 "tenum.h5"
+#define FILE16 "tobjref.h5"
+#define FILE17 "tdatareg.h5"
+#define FILE18 "tnestedcomp.h5"
+#define FILE19 "topaque.h5"
+#define FILE20 "tbitfields.h5"
+#define FILE21 "tvldtypes1.h5"
+#define FILE22 "tvldtypes2.h5"
+#define FILE23 "tvldtypes3.h5"
+#define FILE24 "tvldtypes4.h5"
+#define FILE25 "tarray1.h5"
+#define FILE26 "tarray2.h5"
+#define FILE27 "tarray3.h5"
+#define FILE28 "tarray4.h5"
+#define FILE29 "tarray5.h5"
+#define FILE30 "tarray6.h5"
+#define FILE31 "tarray7.h5"
+#define FILE32 "tempty.h5"
+
+#define LENSTR 50
+#define LENSTR2 11
+
+#define SPACE2_RANK 2
+#define SPACE2_DIM1 10
+#define SPACE2_DIM2 10
+
+#define SPACE1_RANK 1
+#define SPACE1_DIM1 4
+
+/* Element selection information */
+#define POINT1_NPOINTS 10
+
+typedef enum{
+ RED,
+ GREEN,
+ BLUE,
+ WHITE,
+ BLACK
+} enumtype;
+
+/* Compound datatype */
+typedef struct s1_t {
+ unsigned int a;
+ unsigned int b;
+ float c;
+} s1_t;
+
+
+/* 1-D array datatype */
+#define ARRAY1_RANK 1
+#define ARRAY1_DIM1 4
+
+/* 3-D array datatype */
+#define ARRAY2_RANK 3
+#define ARRAY2_DIM1 3
+#define ARRAY2_DIM2 4
+#define ARRAY2_DIM3 5
+
+/* 2-D array datatype */
+#define ARRAY3_RANK 2
+#define ARRAY3_DIM1 6
+#define ARRAY3_DIM2 3
+
+static void test_group(void)
+{
+ hid_t fid, group;
+
+ fid = H5Fcreate(FILE1, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* / */
+ group = H5Gcreate (fid, "/g1", 0);
+ H5Gclose(group);
+ group = H5Gcreate (fid, "/g2", 0);
+ H5Gclose(group);
+ group = H5Gcreate (fid, "/g3", 0);
+ H5Gclose(group);
+
+ /* /g1 */
+ group = H5Gcreate (fid, "/g1/g1.1", 0);
+ H5Gclose(group);
+ group = H5Gcreate (fid, "/g1/g1.2", 0);
+ H5Gclose(group);
+
+ /* /g2 */
+ group = H5Gcreate (fid, "/g2/g2.1", 0);
+ H5Gclose(group);
+
+ /* /g3 */
+ group = H5Gcreate (fid, "/g3/g3.1", 0);
+ H5Gclose(group);
+ group = H5Gcreate (fid, "/g3/g3.2", 0);
+ H5Gclose(group);
+ group = H5Gcreate (fid, "/g3/g3.3", 0);
+ H5Gclose(group);
+ group = H5Gcreate (fid, "/g3/g3.4", 0);
+ H5Gclose(group);
+
+ /* /g2/g2.1 */
+ group = H5Gcreate (fid, "/g2/g2.1/g2.1.1", 0);
+ H5Gclose(group);
+ group = H5Gcreate (fid, "/g2/g2.1/g2.1.2", 0);
+ H5Gclose(group);
+ group = H5Gcreate (fid, "/g2/g2.1/g2.1.3", 0);
+ H5Gclose(group);
+
+ H5Fclose(fid);
+}
+
+static void test_dataset(void)
+{
+ hid_t fid, dataset, space;
+ hsize_t dims[2];
+ int dset1[10][20];
+ double dset2[30][20];
+ int i, j;
+
+ fid = H5Fcreate(FILE2, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* dset1 */
+ dims[0] = 10; dims[1] = 20;
+ space = H5Screate_simple(2, dims, NULL);
+ dataset = H5Dcreate(fid, "/dset1", H5T_STD_I32BE, space, H5P_DEFAULT);
+
+ for (i = 0; i < 10; i++)
+ for (j = 0; j < 20; j++)
+ dset1[i][j] = j+i;
+
+ H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset1);
+ H5Sclose(space);
+ H5Dclose(dataset);
+
+ /* dset2 */
+ dims[0] = 30; dims[1] = 20;
+ space = H5Screate_simple(2, dims, NULL);
+ dataset = H5Dcreate(fid, "/dset2", H5T_IEEE_F64BE, space, H5P_DEFAULT);
+
+ for (i = 0; i < 30; i++)
+ for (j = 0; j < 20; j++)
+ dset2[i][j] = 0.0001*j+i;
+
+ H5Dwrite(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset2);
+
+ H5Sclose(space);
+ H5Dclose(dataset);
+ H5Fclose(fid);
+}
+
+static void test_dataset2(void)
+{
+ hid_t fid, dataset, space, create_plist;
+ hsize_t dims[2];
+ hsize_t maxdims[2];
+ int dset1[10][20];
+ double dset2[30][10];
+ int i, j;
+
+ fid = H5Fcreate(FILE8, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ create_plist = H5Pcreate(H5P_DATASET_CREATE);
+ dims[0] = 5; dims[1] = 5;
+ H5Pset_chunk(create_plist, 2, dims);
+
+ /* dset1 */
+ dims[0] = 10; dims[1] = 20;
+ maxdims[0] = H5S_UNLIMITED; maxdims[1] = 20;
+ space = H5Screate_simple(2, dims, maxdims);
+ dataset = H5Dcreate(fid, "/dset1", H5T_STD_I32BE, space, create_plist);
+
+ for (i = 0; i < 10; i++)
+ for (j = 0; j < 20; j++)
+ dset1[i][j] = j;
+
+ H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset1);
+ H5Sclose(space);
+ H5Dclose(dataset);
+
+ /* dset2 */
+ dims[0] = 30; dims[1] = 10;
+ maxdims[0] = 30; maxdims[1] = H5S_UNLIMITED;
+ space = H5Screate_simple(2, dims, maxdims);
+ dataset = H5Dcreate(fid, "/dset2", H5T_IEEE_F64BE, space, create_plist);
+
+ for (i = 0; i < 30; i++)
+ for (j = 0; j < 10; j++)
+ dset2[i][j] = j;
+
+ H5Dwrite(dataset, H5T_NATIVE_DOUBLE, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset2);
+
+ H5Sclose(space);
+ H5Dclose(dataset);
+ H5Fclose(fid);
+}
+
+
+static void test_attribute(void)
+{
+ hid_t fid, root, space, attr, type;
+ hsize_t dims[2];
+ char buf[60];
+ int i, data[10];
+ double d[10];
+ char string[]= "string attribute";
+ int point = 100;
+
+ fid = H5Fcreate(FILE3, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ root = H5Gopen (fid, "/");
+
+ /* attribute 1 */
+ dims[0] = 24;
+ space = H5Screate_simple(1, dims, NULL);
+ attr = H5Acreate (root, "attr1", H5T_STD_I8BE, space, H5P_DEFAULT);
+ sprintf(buf, "attribute of root group");
+ H5Awrite(attr, H5T_NATIVE_SCHAR, buf);
+ H5Sclose(space);
+ H5Aclose(attr);
+
+ /* attribute 2 */
+ dims[0] = 10;
+ space = H5Screate_simple(1, dims, NULL);
+ attr = H5Acreate (root, "attr2", H5T_STD_I32BE, space, H5P_DEFAULT);
+
+ for (i = 0; i < 10; i++) data[i] = i+1;
+
+ H5Awrite(attr, H5T_NATIVE_INT, data);
+ H5Sclose(space);
+ H5Aclose(attr);
+
+ /* attribute 3 */
+ dims[0] = 10;
+ space = H5Screate_simple(1, dims, NULL);
+ attr = H5Acreate (root, "attr3", H5T_IEEE_F64BE, space, H5P_DEFAULT);
+
+ for (i = 0; i < 10; i++) d[i] = 0.1 * i;
+
+ H5Awrite(attr, H5T_NATIVE_DOUBLE, d);
+ H5Sclose(space);
+ H5Aclose(attr);
+
+ /* attribute 4 */
+ space = H5Screate(H5S_SCALAR);
+ attr = H5Acreate (root, "attr4", H5T_STD_I32BE, space, H5P_DEFAULT);
+ H5Awrite(attr, H5T_NATIVE_INT, &point);
+ H5Sclose(space);
+ H5Aclose(attr);
+
+ /* attribute 5 */
+ space = H5Screate(H5S_SCALAR);
+ type = H5Tcopy(H5T_C_S1);
+ H5Tset_size(type, 17);
+ attr = H5Acreate (root, "attr5", type, space, H5P_DEFAULT);
+ H5Awrite(attr, type, string);
+
+ H5Tclose(type);
+ H5Sclose(space);
+ H5Aclose(attr);
+ H5Gclose(root);
+ H5Fclose(fid);
+}
+
+static void test_softlink(void)
+{
+ hid_t fid, root;
+
+ fid = H5Fcreate(FILE4, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ root = H5Gopen (fid, "/");
+ H5Glink (root, H5G_LINK_SOFT, "somevalue", "slink1");
+ H5Glink (root, H5G_LINK_SOFT, "linkvalue", "slink2");
+
+ H5Gclose(root);
+ H5Fclose(fid);
+}
+
+/*
+ /
+
+ / | \ the dataset is hardlinked to three names
+ /dset1, /g1/dset2, and /g1/g1.1/dset3
+ dset1 g1 g2
+ /g2 and /g1/g1.1 are hardlinked to the same object.
+ / \
+ dset2 g1.1
+ |
+ dset3
+*/
+
+static void test_hardlink(void)
+{
+ hid_t fid, group, dataset, space;
+ hsize_t dim = 5;
+ int i, dset[5];
+
+ fid = H5Fcreate(FILE5, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ space = H5Screate_simple(1, &dim, NULL);
+ dataset = H5Dcreate(fid, "/dset1", H5T_STD_I32BE, space, H5P_DEFAULT);
+
+ for (i = 0; i < 5; i++) dset[i] = i;
+
+ H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset);
+ H5Sclose(space);
+ H5Dclose(dataset);
+
+ group = H5Gcreate (fid, "/g1", 0);
+ H5Glink (group, H5G_LINK_HARD, "/dset1", "dset2");
+ H5Gclose(group);
+
+ group = H5Gcreate (fid, "/g2", 0);
+ H5Glink (group, H5G_LINK_HARD, "/dset1", "dset3");
+ H5Gclose(group);
+
+ group = H5Gopen(fid, "/g1");
+ H5Glink (group, H5G_LINK_HARD, "/g2", "g1.1");
+ H5Gclose(group);
+ H5Fclose(fid);
+}
+
+/*
+ /
+ / | \ \
+ dset1 group1 type1 type2
+ |
+ dset2
+
+*/
+static void test_compound_dt(void) { /* test compound data type */
+ hid_t fid, group, dataset, space, space3, type, type2;
+ hid_t array_dt;
+ typedef struct {
+ int a;
+ float b;
+ double c;
+ } dset1_t;
+ dset1_t dset1[5];
+
+ typedef struct {
+ int a;
+ float b;
+ } dset2_t;
+ dset2_t dset2[5];
+
+ typedef struct {
+ int a[4];
+ float b[5][6];
+ } dset3_t;
+ dset3_t dset3[3][6];
+
+ typedef struct {
+ int a;
+ float b;
+ } dset4_t;
+ dset4_t dset4[5];
+
+ typedef struct {
+ int a;
+ float b;
+ } dset5_t;
+ dset5_t dset5[5];
+
+ int i, j, k, l, ndims;
+ hsize_t dim[2];
+
+ hsize_t sdim = 5;
+ hsize_t dset3_dim[2];
+
+
+ for (i = 0; i < (int)sdim; i++) {
+ dset1[i].a = i;
+ dset1[i].b = i*i;
+ dset1[i].c = 1./(i+1);
+
+ dset2[i].a = i;
+ dset2[i].b = i+ i*0.1;
+
+ dset4[i].a = i;
+ dset4[i].b = i+3;
+
+ dset5[i].a = i;
+ dset5[i].b = i*0.1;
+ }
+
+
+ fid = H5Fcreate(FILE6, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ space = H5Screate_simple(1, &sdim, NULL);
+
+ type = H5Tcreate (H5T_COMPOUND, sizeof(dset1[0]));
+ type2 = H5Tcreate(H5T_COMPOUND, sizeof(dset1[0]));
+ H5Tinsert(type, "a_name", HOFFSET(dset1_t, a), H5T_STD_I32BE);
+ H5Tinsert(type, "b_name", HOFFSET(dset1_t, b), H5T_IEEE_F32BE);
+ H5Tinsert(type, "c_name", HOFFSET(dset1_t, c), H5T_IEEE_F64BE);
+ H5Tinsert(type2, "a_name", HOFFSET(dset1_t, a), H5T_NATIVE_INT);
+ H5Tinsert(type2, "b_name", HOFFSET(dset1_t, b), H5T_NATIVE_FLOAT);
+ H5Tinsert(type2, "c_name", HOFFSET(dset1_t, c), H5T_NATIVE_DOUBLE);
+ dataset = H5Dcreate(fid, "/dset1", type, space, H5P_DEFAULT);
+ H5Dwrite(dataset, type2, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset1);
+ H5Tclose(type2);
+ H5Tclose(type);
+ H5Dclose(dataset);
+
+ /* shared data type 1 */
+ type = H5Tcreate (H5T_COMPOUND, sizeof(dset2_t));
+ H5Tinsert(type, "int_name", HOFFSET(dset2_t, a), H5T_STD_I32BE);
+ H5Tinsert(type, "float_name", HOFFSET(dset2_t, b), H5T_IEEE_F32BE);
+ H5Tcommit(fid, "type1", type);
+ type2 = H5Tcreate (H5T_COMPOUND, sizeof(dset2_t));
+ H5Tinsert(type2, "int_name", HOFFSET(dset2_t, a), H5T_NATIVE_INT);
+ H5Tinsert(type2, "float_name", HOFFSET(dset2_t, b), H5T_NATIVE_FLOAT);
+ group = H5Gcreate (fid, "/group1", 0);
+
+ dataset = H5Dcreate(group, "dset2", type, space, H5P_DEFAULT);
+ H5Dwrite(dataset, type2, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset2);
+ H5Tclose(type2);
+ H5Tclose(type);
+ H5Dclose(dataset);
+
+
+ /* shared data type 2 */
+ type = H5Tcreate (H5T_COMPOUND, sizeof(dset3_t));
+ type2 = H5Tcreate (H5T_COMPOUND, sizeof(dset3_t));
+
+ ndims = 1; dim[0] = 4;
+
+ array_dt=H5Tarray_create(H5T_STD_I32BE,ndims,dim,NULL);
+ H5Tinsert(type, "int_array", HOFFSET(dset3_t, a), array_dt);
+ H5Tclose(array_dt);
+
+ array_dt=H5Tarray_create(H5T_NATIVE_INT,ndims,dim,NULL);
+ H5Tinsert(type2, "int_array", HOFFSET(dset3_t, a), array_dt);
+ H5Tclose(array_dt);
+
+ ndims = 2; dim[0] = 5; dim[1] = 6;
+
+ array_dt=H5Tarray_create(H5T_IEEE_F32BE,ndims,dim,NULL);
+ H5Tinsert(type, "float_array", HOFFSET(dset3_t, b), array_dt);
+ H5Tclose(array_dt);
+
+ array_dt=H5Tarray_create(H5T_NATIVE_FLOAT,ndims,dim,NULL);
+ H5Tinsert(type2, "float_array", HOFFSET(dset3_t, b), array_dt);
+ H5Tclose(array_dt);
+
+ H5Tcommit(fid, "type2", type);
+
+
+ dset3_dim[0] = 3; dset3_dim[1] = 6;
+ space3 = H5Screate_simple(2, dset3_dim, NULL);
+ dataset = H5Dcreate(group, "dset3", type, space3, H5P_DEFAULT);
+ for (i = 0; i < (int)dset3_dim[0]; i++) {
+ for (j = 0; j < (int)dset3_dim[1]; j++) {
+ for (k = 0; k < 4; k++)
+ dset3[i][j].a[k] = k+j+i;
+ for (k = 0; k < 5; k++)
+ for (l = 0; l < 6; l++)
+ dset3[i][j].b[k][l] = (k+1)+l+j+i;
+ }
+ }
+ H5Dwrite(dataset, type2, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset3);
+ H5Sclose(space3);
+ H5Tclose(type);
+ H5Tclose(type2);
+ H5Dclose(dataset);
+
+ /* shared data type 3 */
+ type = H5Tcreate (H5T_COMPOUND, sizeof(dset4_t));
+ type2 = H5Tcreate (H5T_COMPOUND, sizeof(dset4_t));
+ H5Tinsert(type, "int", HOFFSET(dset4_t, a), H5T_STD_I32BE);
+ H5Tinsert(type, "float", HOFFSET(dset4_t, b), H5T_IEEE_F32BE);
+ H5Tcommit(group, "type3", type);
+ H5Tinsert(type2, "int", HOFFSET(dset4_t, a), H5T_NATIVE_INT);
+ H5Tinsert(type2, "float", HOFFSET(dset4_t, b), H5T_NATIVE_FLOAT);
+ dataset = H5Dcreate(group, "dset4", type, space, H5P_DEFAULT);
+ H5Dwrite(dataset, type2, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset4);
+
+ H5Tclose(type);
+ H5Tclose(type2);
+ H5Dclose(dataset);
+ H5Gclose(group);
+
+
+ /* unamed data type */
+ group = H5Gcreate (fid, "/group2", 0);
+
+ type = H5Tcreate (H5T_COMPOUND, sizeof(dset5_t));
+ H5Tinsert(type, "int", HOFFSET(dset5_t, a), H5T_STD_I32BE);
+ H5Tinsert(type, "float", HOFFSET(dset5_t, b), H5T_IEEE_F32BE);
+ H5Tcommit(group, "type4", type);
+ type2 = H5Tcreate (H5T_COMPOUND, sizeof(dset5_t));
+ H5Tinsert(type2, "int", HOFFSET(dset5_t, a), H5T_NATIVE_INT);
+ H5Tinsert(type2, "float", HOFFSET(dset5_t, b), H5T_NATIVE_FLOAT);
+ dataset = H5Dcreate(group, "dset5", type, space, H5P_DEFAULT);
+ H5Dwrite(dataset, type2, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset5);
+
+ H5Gunlink(group,"type4");
+
+ H5Tclose(type);
+ H5Tclose(type2);
+ H5Dclose(dataset);
+ H5Sclose(space);
+ H5Gclose(group);
+
+ H5Fclose(fid);
+
+}
+
+/*
+ /
+ / | \ \
+ dset1 group1 type1 type2
+ |
+ dset2
+
+*/
+static void test_compound_dt2(void) { /* test compound data type */
+ hid_t fid, group, dataset, space, type, create_plist, type2;
+ hid_t array_dt;
+
+ typedef struct {
+ int a;
+ float b;
+ double c;
+ } dset1_t;
+ dset1_t dset1[10];
+
+ typedef struct {
+ int a;
+ float b;
+ } dset2_t;
+ dset2_t dset2[10];
+
+ typedef struct {
+ int a[4];
+ float b[5][6];
+ } dset3_t;
+
+ typedef struct {
+ int a;
+ float b;
+ } dset4_t;
+ dset4_t dset4[10];
+
+ typedef struct {
+ int a;
+ float b;
+ } dset5_t;
+ dset5_t dset5[10];
+
+ int i, ndims;
+ const int perm[2]={0,1};
+ hsize_t dim[2];
+
+ hsize_t sdim, maxdim;
+
+ sdim = 10;
+ for (i = 0; i < (int)sdim; i++) {
+ dset1[i].a = i;
+ dset1[i].b = i*i;
+ dset1[i].c = 1./(i+1);
+
+ dset2[i].a = i;
+ dset2[i].b = i+ i*0.1;
+
+ dset4[i].a = i;
+ dset4[i].b = i*1.0;
+
+ dset5[i].a = i;
+ dset5[i].b = i*1.0;
+ }
+
+ fid = H5Fcreate(FILE9, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ create_plist = H5Pcreate(H5P_DATASET_CREATE);
+
+ sdim = 2;
+ H5Pset_chunk(create_plist, 1, &sdim);
+
+ sdim = 6;
+ maxdim = H5S_UNLIMITED;
+
+ space = H5Screate_simple(1, &sdim, &maxdim);
+
+ type = H5Tcreate (H5T_COMPOUND, sizeof(dset1[0]));
+
+ H5Tinsert(type, "a_name", HOFFSET(dset1_t, a), H5T_STD_I32BE);
+ H5Tinsert(type, "b_name", HOFFSET(dset1_t, b), H5T_IEEE_F32BE);
+ H5Tinsert(type, "c_name", HOFFSET(dset1_t, c), H5T_IEEE_F64BE);
+
+ dataset = H5Dcreate(fid, "/dset1", type, space, create_plist);
+
+ type2 = H5Tcreate (H5T_COMPOUND, sizeof(dset1[0]));
+
+ H5Tinsert(type2, "a_name", HOFFSET(dset1_t, a), H5T_NATIVE_INT);
+ H5Tinsert(type2, "b_name", HOFFSET(dset1_t, b), H5T_NATIVE_FLOAT);
+ H5Tinsert(type2, "c_name", HOFFSET(dset1_t, c), H5T_NATIVE_DOUBLE);
+
+ H5Dwrite(dataset, type2, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset1);
+
+ H5Tclose(type);
+ H5Tclose(type2);
+ H5Sclose(space);
+ H5Dclose(dataset);
+
+ sdim = 6;
+ maxdim = 10;
+
+ space = H5Screate_simple(1, &sdim, &maxdim);
+
+ /* shared data type 1 */
+ type = H5Tcreate (H5T_COMPOUND, sizeof(dset2_t));
+ H5Tinsert(type, "int_name", HOFFSET(dset2_t, a), H5T_STD_I32BE);
+ H5Tinsert(type, "float_name", HOFFSET(dset2_t, b), H5T_IEEE_F32BE);
+ H5Tcommit(fid, "type1", type);
+
+ group = H5Gcreate (fid, "/group1", 0);
+
+ dataset = H5Dcreate(group, "dset2", type, space, create_plist);
+
+ type2 = H5Tcreate (H5T_COMPOUND, sizeof(dset2_t));
+ H5Tinsert(type2, "int_name", HOFFSET(dset2_t, a), H5T_NATIVE_INT);
+ H5Tinsert(type2, "float_name", HOFFSET(dset2_t, b), H5T_NATIVE_FLOAT);
+ H5Dwrite(dataset, type2, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset2);
+
+ H5Tclose(type);
+ H5Tclose(type2);
+ H5Dclose(dataset);
+
+
+ /* shared data type 2 */
+ type = H5Tcreate (H5T_COMPOUND, sizeof(dset3_t));
+
+ ndims = 1; dim[0] = 4;
+ array_dt=H5Tarray_create(H5T_STD_I32BE,ndims,dim,perm);
+ H5Tinsert(type, "int_array", HOFFSET(dset3_t, a), array_dt);
+ H5Tclose(array_dt);
+
+ ndims = 2; dim[0] = 5; dim[1] = 6;
+ array_dt=H5Tarray_create(H5T_IEEE_F32BE,ndims,dim,perm);
+ H5Tinsert(type, "float_array", HOFFSET(dset3_t, b), array_dt);
+ H5Tclose(array_dt);
+
+ H5Tcommit(fid, "type2", type);
+ H5Tclose(type);
+
+ /* shared data type 3 */
+ type = H5Tcreate (H5T_COMPOUND, sizeof(dset4_t));
+ H5Tinsert(type, "int", HOFFSET(dset4_t, a), H5T_STD_I32BE);
+ H5Tinsert(type, "float", HOFFSET(dset4_t, b), H5T_IEEE_F32BE);
+ H5Tcommit(group, "type3", type);
+
+ dataset = H5Dcreate(group, "dset4", type, space, create_plist);
+
+ type2 = H5Tcreate (H5T_COMPOUND, sizeof(dset4_t));
+ H5Tinsert(type2, "int", HOFFSET(dset4_t, a), H5T_NATIVE_INT);
+ H5Tinsert(type2, "float", HOFFSET(dset4_t, b), H5T_NATIVE_FLOAT);
+ H5Dwrite(dataset, type2, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset4);
+
+ H5Tclose(type);
+ H5Tclose(type2);
+ H5Dclose(dataset);
+ H5Gclose(group);
+
+
+ /* unamed data type */
+ group = H5Gcreate (fid, "/group2", 0);
+
+ type = H5Tcreate (H5T_COMPOUND, sizeof(dset5_t));
+ H5Tinsert(type, "int", HOFFSET(dset5_t, a), H5T_STD_I32BE);
+ H5Tinsert(type, "float", HOFFSET(dset5_t, b), H5T_IEEE_F32BE);
+ H5Tcommit(group, "type4", type);
+ dataset = H5Dcreate(group, "dset5", type, space, create_plist);
+ type2 = H5Tcreate (H5T_COMPOUND, sizeof(dset5_t));
+ H5Tinsert(type2, "int", HOFFSET(dset5_t, a), H5T_NATIVE_INT);
+ H5Tinsert(type2, "float", HOFFSET(dset5_t, b), H5T_NATIVE_FLOAT);
+ H5Dwrite(dataset, type2, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset5);
+
+ H5Gunlink(group,"type4");
+
+ H5Tclose(type);
+ H5Tclose(type2);
+ H5Dclose(dataset);
+ H5Sclose(space);
+ H5Gclose(group);
+
+ H5Fclose(fid);
+
+}
+
+
+/*
+
+/ : g1 g2 attr1 attr2
+g1 : g1.1 g1.2
+g1.1 : dset1.1.1(attr1, attr2) dset1.1.2
+g1.2 : g1.2.1
+g1.2.1 : slink
+g2 : dset2.1 dset2.2
+
+*/
+
+static void test_all(void) {
+hid_t fid, group, attr, dataset, space;
+hsize_t dims[2];
+int data[2][2], dset1[10][10], dset2[20];
+char buf[60];
+int i, j;
+float dset2_1[10], dset2_2[3][5];
+
+ fid = H5Fcreate(FILE7, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* create groups */
+ group = H5Gcreate (fid, "/g1", 0);
+ H5Gclose(group);
+
+ group = H5Gcreate (fid, "/g2", 0);
+ H5Gclose(group);
+
+ group = H5Gcreate (fid, "/g1/g1.1", 0);
+ H5Gclose(group);
+
+ group = H5Gcreate (fid, "/g1/g1.2", 0);
+ H5Gclose(group);
+
+ group = H5Gcreate (fid, "/g1/g1.2/g1.2.1", 0);
+ H5Gclose(group);
+
+ /* root attributes */
+ group = H5Gopen (fid, "/");
+
+ dims[0] = 10;
+ space = H5Screate_simple(1, dims, NULL);
+ attr = H5Acreate (group, "attr1", H5T_STD_I8BE, space, H5P_DEFAULT);
+ sprintf(buf, "abcdefghi");
+ H5Awrite(attr, H5T_NATIVE_SCHAR, buf);
+ H5Sclose(space);
+ H5Aclose(attr);
+
+ dims[0] = 2; dims[1] = 2;
+ space = H5Screate_simple(2, dims, NULL);
+ attr = H5Acreate (group, "attr2", H5T_STD_I32BE, space, H5P_DEFAULT);
+ data[0][0] = 0; data[0][1] = 1; data[1][0] = 2; data[1][1] = 3;
+ H5Awrite(attr, H5T_NATIVE_INT, data);
+ H5Sclose(space);
+ H5Aclose(attr);
+
+ H5Gclose(group);
+
+ group = H5Gopen (fid, "/g1/g1.1");
+
+ /* dset1.1.1 */
+ dims[0] = 10; dims[1] = 10;
+ space = H5Screate_simple(2, dims, NULL);
+ dataset = H5Dcreate(group, "dset1.1.1", H5T_STD_I32BE, space, H5P_DEFAULT);
+ for (i = 0; i < 10; i++)
+ for (j = 0; j < 10; j++)
+ dset1[i][j] = j*i;
+ H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset1);
+ H5Sclose(space);
+
+ /* attributes of dset1.1.1 */
+ dims[0] = 27;
+ space = H5Screate_simple(1, dims, NULL);
+ attr = H5Acreate (dataset, "attr1", H5T_STD_I8BE, space, H5P_DEFAULT);
+ sprintf(buf, "1st attribute of dset1.1.1");
+ H5Awrite(attr, H5T_NATIVE_SCHAR, buf);
+ H5Sclose(space);
+ H5Aclose(attr);
+
+ dims[0] = 27;
+ space = H5Screate_simple(1, dims, NULL);
+ attr = H5Acreate (dataset, "attr2", H5T_STD_I8BE, space, H5P_DEFAULT);
+ sprintf(buf, "2nd attribute of dset1.1.1");
+ H5Awrite(attr, H5T_NATIVE_SCHAR, buf);
+ H5Sclose(space);
+ H5Aclose(attr);
+
+ H5Dclose(dataset);
+
+ /* dset1.1.2 */
+ dims[0] = 20;
+ space = H5Screate_simple(1, dims, NULL);
+ dataset = H5Dcreate(group, "dset1.1.2", H5T_STD_I32BE, space, H5P_DEFAULT);
+ for (i = 0; i < 20; i++)
+ dset2[i] = i;
+ H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset2);
+ H5Sclose(space);
+ H5Dclose(dataset);
+
+ H5Gclose(group);
+
+ /* soft link */
+ group = H5Gopen (fid, "/g1/g1.2/g1.2.1");
+ H5Glink (group, H5G_LINK_SOFT, "somevalue", "slink");
+ H5Gclose(group);
+
+ group = H5Gopen (fid, "/g2");
+
+ /* dset2.1 */
+ dims[0] = 10;
+ space = H5Screate_simple(1, dims, NULL);
+ dataset = H5Dcreate(group, "dset2.1", H5T_IEEE_F32BE, space, H5P_DEFAULT);
+ for (i = 0; i < 10; i++)
+ dset2_1[i] = i*0.1+1;
+ H5Dwrite(dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset2_1);
+ H5Sclose(space);
+ H5Dclose(dataset);
+
+ /* dset2.2 */
+ dims[0] = 3; dims[1] = 5;
+ space = H5Screate_simple(2, dims, NULL);
+ dataset = H5Dcreate(group, "dset2.2", H5T_IEEE_F32BE, space, H5P_DEFAULT);
+ for (i = 0; i < 3; i++)
+ for (j = 0; j < 5; j++)
+ dset2_2[i][j] = (i+1)*j*0.1;
+ H5Dwrite(dataset, H5T_NATIVE_FLOAT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset2_2);
+ H5Sclose(space);
+ H5Dclose(dataset);
+
+ H5Gclose(group);
+
+ H5Fclose(fid);
+
+}
+
+/*
+ o
+ /___\
+ g1 o/ \o g2
+ \___/
+
+
+o - group objects
+
+*/
+
+static void test_loop(void) {
+hid_t fid, group;
+
+ fid = H5Fcreate(FILE10, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ group = H5Gcreate (fid, "/g1", 0);
+ H5Gclose(group);
+ group = H5Gcreate (fid, "/g2", 0);
+ H5Gclose(group);
+
+ H5Glink(fid, H5G_LINK_HARD, "/g2", "/g1/g1.1");
+ H5Glink(fid, H5G_LINK_HARD, "/g1", "/g2/g2.1");
+
+ H5Fclose(fid);
+}
+
+static void test_loop2(void) {
+hid_t fid, group;
+
+ fid = H5Fcreate(FILE11, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* create group object g1 and implcit path from root object */
+ group = H5Gcreate (fid, "/g1", 0);
+ H5Gclose(group);
+
+ /* create group object g2 and implcit path from root object */
+ group = H5Gcreate (fid, "/g2", 0);
+ H5Gclose(group);
+
+ /* create path from object at /g1 to object at /g2 and name it g1.1 */
+ H5Glink (fid, H5G_LINK_HARD, "/g2", "/g1/g1.1");
+
+ /* create path from object at /g2 to object at /g1 and name it g2.1 */
+ H5Glink (fid, H5G_LINK_SOFT, "/g1", "/g2/g2.1");
+
+ H5Fclose(fid);
+
+}
+
+/*
+ /
+ | | | \ \ \
+ g1 g2 g3 g4 g5 g6
+ / \ | | \ \ \
+ g1.1 g1.2 slink2 link3 dset2 slink4 dset3
+ | | (g1) (dset2) (dset3)
+ dset1 link1
+ (dset1)
+*/
+
+static void test_many(void) {
+ hid_t fid, group, attr, dataset, space, space2, type, create_plist, type2;
+ hid_t array_dt;
+ hsize_t dims[2];
+ int data[2][2], dset2[10][10], dset3[10][10];
+ double d[10];
+
+ char buf[60];
+ int i, j;
+ int i0, i1, i2, i3;
+ hsize_t sdim, maxdim;
+
+ typedef struct { /* compound type has members with rank > 1 */
+ int a[2][2][2][2]; /* arrays are 2x2x2x2 */
+ double b[2][2][2][2];
+ double c[2][2][2][2];
+ } dset1_t;
+ dset1_t dset1[6];
+
+ hsize_t dim[4];
+ int index[4] = {0,1,2,3}; /* normal indicies */
+ const int perm[4] = {0,1,2,3}; /* the 0'th and the 3'rd indices are permuted */
+
+ fid = H5Fcreate(FILE12, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ group = H5Gcreate (fid, "/g1", 0);
+ H5Gclose(group);
+
+ create_plist = H5Pcreate(H5P_DATASET_CREATE);
+
+ sdim = 2;
+ H5Pset_chunk(create_plist, 1, &sdim);
+
+ group = H5Gcreate (fid, "/g1/g1.1", 0);
+
+ type = H5Tcreate (H5T_COMPOUND, sizeof(dset1[0]));
+
+ dim[0] = dim[1] = dim[2] = dim[3] = 2;
+ array_dt=H5Tarray_create(H5T_STD_I32BE,4,dim,perm);
+ H5Tinsert(type, "a_array", HOFFSET(dset1_t, a), array_dt);
+ H5Tclose(array_dt);
+
+ array_dt=H5Tarray_create(H5T_IEEE_F64BE,4,dim,perm);
+ H5Tinsert(type, "b_array", HOFFSET(dset1_t, b), array_dt);
+ H5Tclose(array_dt);
+
+ array_dt=H5Tarray_create(H5T_IEEE_F64BE,4,dim,perm);
+ H5Tinsert(type, "c_array", HOFFSET(dset1_t, c), array_dt);
+ H5Tclose(array_dt);
+
+ type2 = H5Tcreate (H5T_COMPOUND, sizeof(dset1[0]));
+
+ array_dt=H5Tarray_create(H5T_NATIVE_INT,4,dim,perm);
+ H5Tinsert(type2, "a_array", HOFFSET(dset1_t, a), array_dt);
+ H5Tclose(array_dt);
+
+ array_dt=H5Tarray_create(H5T_NATIVE_DOUBLE,4,dim,perm);
+ H5Tinsert(type2, "b_array", HOFFSET(dset1_t, b), array_dt);
+ H5Tclose(array_dt);
+
+ array_dt=H5Tarray_create(H5T_NATIVE_DOUBLE,4,dim,perm);
+ H5Tinsert(type2, "c_array", HOFFSET(dset1_t, c), array_dt);
+ H5Tclose(array_dt);
+
+
+ /* dset1 */
+ sdim = 6;
+ maxdim = H5S_UNLIMITED;
+ space = H5Screate_simple(1, &sdim, &maxdim);
+ dataset = H5Dcreate(group, "dset1", type, space, create_plist);
+
+ /* add attributes to dset1 */
+ dims[0] = 10;
+ space2 = H5Screate_simple(1, dims, NULL);
+ attr = H5Acreate (dataset, "attr1", H5T_STD_I8BE, space2, H5P_DEFAULT);
+ sprintf(buf, "abcdefghi");
+ H5Awrite(attr, H5T_NATIVE_CHAR, buf);
+ H5Sclose(space2);
+ H5Aclose(attr);
+
+ dims[0] = 2; dims[1] = 2;
+ space2 = H5Screate_simple(2, dims, NULL);
+ attr = H5Acreate (dataset, "attr2", H5T_STD_I32BE, space2, H5P_DEFAULT);
+ data[0][0] = 0; data[0][1] = 1; data[1][0] = 2; data[1][1] = 3;
+ H5Awrite(attr, H5T_NATIVE_INT, data);
+ H5Sclose(space2);
+ H5Aclose(attr);
+
+ dims[0] = 10;
+ space2 = H5Screate_simple(1, dims, NULL);
+ attr = H5Acreate (dataset, "attr3", H5T_IEEE_F64BE, space2, H5P_DEFAULT);
+ for (i = 0; i < 10; i++) d[i] = 0.1 * i;
+ H5Awrite(attr, H5T_NATIVE_DOUBLE, d);
+ H5Sclose(space2);
+ H5Aclose(attr);
+
+ for (j=0; j<(int)sdim; j++) {
+ for (i3 = 0; i3 < 2; i3++) {
+ index[perm[3]] = i3;
+ for (i2 = 0; i2 < 2; i2++) {
+ index[perm[2]] = i2;
+ for (i1 = 0; i1 < 2; i1++) {
+ index[perm[1]] = i1;
+ for (i0 = 0; i0 < 2; i0++) {
+ index[perm[0]] = i0;
+
+ dset1[j].a[index[3]][index[2]][index[1]][index[0]] = i0+j;
+ dset1[j].b[index[3]][index[2]][index[1]][index[0]] = (double)(i0+j);
+#if WIN32
+ dset1[j].c[index[3]][index[2]][index[1]][index[0]] = (double)(i0+j+(signed __int64)sdim);
+#else
+ dset1[j].c[index[3]][index[2]][index[1]][index[0]] = (double)(i0+j+sdim);
+#endif
+ }
+ }
+ }
+ }
+ }
+
+ H5Dwrite(dataset, type2, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset1);
+
+ H5Dclose(dataset);
+ H5Sclose(space);
+
+ H5Tclose(type);
+ H5Tclose(type2);
+ H5Gclose(group);
+
+ group = H5Gcreate (fid, "/g1/g1.2", 0);
+ H5Glink (group, H5G_LINK_HARD, "/g1/g1.1/dset1", "link1");
+ H5Gclose(group);
+
+ group = H5Gcreate (fid, "/g2", 0);
+ H5Glink (group, H5G_LINK_SOFT, "/g1", "slink2");
+ H5Gclose(group);
+
+ group = H5Gcreate (fid, "/g3", 0);
+ H5Gclose(group);
+
+ group = H5Gcreate (fid, "/g4", 0);
+
+ /* dset2 */
+ dims[0] = 10; dims[1] = 10;
+ space = H5Screate_simple(2, dims, NULL);
+
+ dataset = H5Dcreate(group, "dset2", H5T_STD_I32BE, space, H5P_DEFAULT);
+ for (i = 0; i < 10; i++)
+ for (j = 0; j < 10; j++)
+ dset2[i][j] = j;
+ H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset2);
+
+ H5Dclose(dataset);
+
+ H5Sclose(space);
+ H5Gclose(group);
+
+ group = H5Gopen(fid, "/g3");
+ H5Glink (group, H5G_LINK_HARD, "/g4/dset2", "link3");
+ H5Gclose(group);
+
+ group = H5Gcreate (fid, "/g5", 0);
+ H5Gclose(group);
+
+ group = H5Gcreate (fid, "/g6", 0);
+ /* dset3 */
+ dims[0] = 10; dims[1] = 10;
+ space = H5Screate_simple(2, dims, NULL);
+
+ dataset = H5Dcreate(group, "dset3", H5T_STD_I32BE, space, H5P_DEFAULT);
+ for (i = 0; i < 10; i++)
+ for (j = 0; j < 10; j++)
+ dset3[i][j] = i;
+ H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset3);
+
+ H5Dclose(dataset);
+
+ H5Sclose(space);
+ H5Gclose(group);
+
+ group = H5Gopen(fid, "/g5");
+ H5Glink (group, H5G_LINK_SOFT, "/g6/dset3", "slink4");
+ H5Gclose(group);
+
+ H5Fclose(fid);
+
+}
+static hid_t mkstr(int size, H5T_str_t pad) {
+hid_t type;
+
+ if ((type=H5Tcopy(H5T_C_S1))<0) return -1;
+ if (H5Tset_size(type, (size_t)size)<0) return -1;
+ if (H5Tset_strpad(type, pad)<0) return -1;
+
+ return type;
+}
+
+static void test_str(void) {
+ hid_t fid, dataset, space, f_type, m_type, str_type, f_type2;
+ hid_t array_dt;
+
+ hsize_t dims1[] = { 3, 4};
+ char string1[12][2] = {"s1","s2","s3","s4","s5","s6","s7","s8","s9",
+ "s0","s1","s2"};
+
+ hsize_t dims2[]={20};
+ char string2[20][9] = {"ab cd ef1", "ab cd ef2", "ab cd ef3", "ab cd ef4",
+ "ab cd ef5", "ab cd ef6", "ab cd ef7", "ab cd ef8",
+ "ab cd ef9", "ab cd ef0", "ab cd ef1", "ab cd ef2",
+ "ab cd ef3", "ab cd ef4", "ab cd ef5", "ab cd ef6",
+ "ab cd ef7", "ab cd ef8", "ab cd ef9", "ab cd ef0"};
+
+ hsize_t dims3[] = { 27};
+ char string3[27][5] = {"abcd0", "abcd1", "abcd2", "abcd3",
+ "abcd4", "abcd5", "abcd6", "abcd7",
+ "abcd8", "abcd9", "abcd0", "abcd1",
+ "abcd2", "abcd3", "abcd4", "abcd5",
+ "abcd6", "abcd7", "abcd8", "abcd9",
+ "abcd0", "abcd1", "abcd2", "abcd3",
+ "abcd4", "abcd5", "abcd6"};
+
+ int i, j, k, l;
+
+ hsize_t dims4[] = { 3 };
+ char string4[3][20] = { "s1234567890123456789", "s1234567890123456789",
+ "s1234567890123456789"};
+
+ hsize_t dims5[] = { 3, 6};
+ typedef struct {
+ int a[8][10];
+ char s[12][32];
+ } compound_t;
+ compound_t comp1[3][6];
+ hsize_t mdims[2];
+
+ fid = H5Fcreate(FILE13, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* string 1 : nullterm string */
+ space = H5Screate_simple(2, dims1, NULL);
+ f_type = mkstr(5, H5T_STR_NULLTERM);
+ m_type = mkstr(2, H5T_STR_NULLTERM);
+ dataset = H5Dcreate(fid, "/string1", f_type, space, H5P_DEFAULT);
+ H5Dwrite(dataset, m_type, H5S_ALL, H5S_ALL, H5P_DEFAULT, string1);
+ H5Tclose(m_type);
+ H5Tclose(f_type);
+ H5Sclose(space);
+ H5Dclose(dataset);
+
+ /* string 2 : space pad string */
+ space = H5Screate_simple(1, dims2, NULL);
+ f_type = mkstr(11, H5T_STR_SPACEPAD);
+ m_type = mkstr(9, H5T_STR_NULLTERM);
+ dataset = H5Dcreate(fid, "/string2", f_type, space, H5P_DEFAULT);
+ H5Dwrite(dataset, m_type, H5S_ALL, H5S_ALL, H5P_DEFAULT, string2);
+ H5Tclose(m_type);
+ H5Tclose(f_type);
+ H5Sclose(space);
+ H5Dclose(dataset);
+
+ /* string 3 : null pad string */
+ space = H5Screate_simple(1, dims3, NULL);
+ f_type = mkstr(8, H5T_STR_NULLPAD);
+ m_type = mkstr(5, H5T_STR_NULLTERM);
+ dataset = H5Dcreate(fid, "/string3", f_type, space, H5P_DEFAULT);
+ H5Dwrite(dataset, m_type, H5S_ALL, H5S_ALL, H5P_DEFAULT, string3);
+ H5Tclose(m_type);
+ H5Tclose(f_type);
+ H5Sclose(space);
+ H5Dclose(dataset);
+
+ /* string 4 : space pad long string */
+ space = H5Screate_simple(1, dims4, NULL);
+ f_type = mkstr(168, H5T_STR_SPACEPAD);
+ m_type = mkstr(20, H5T_STR_NULLTERM);
+ dataset = H5Dcreate(fid, "/string4", f_type, space, H5P_DEFAULT);
+ H5Dwrite(dataset, m_type, H5S_ALL, H5S_ALL, H5P_DEFAULT, string4);
+ H5Tclose(m_type);
+ H5Tclose(f_type);
+ H5Sclose(space);
+ H5Dclose(dataset);
+
+ /* compound data */
+ space = H5Screate_simple(2, dims5, NULL);
+ f_type = H5Tcreate (H5T_COMPOUND, sizeof(compound_t));
+ f_type2 = H5Tcreate (H5T_COMPOUND, sizeof(compound_t));
+
+ mdims[0] = 8; mdims[1] = 10;
+
+ array_dt=H5Tarray_create(H5T_STD_I32BE,2,mdims,NULL);
+ H5Tinsert(f_type, "int_array", HOFFSET(compound_t, a), array_dt);
+ H5Tclose(array_dt);
+
+ array_dt=H5Tarray_create(H5T_NATIVE_INT,2,mdims,NULL);
+ H5Tinsert(f_type2, "int_array", HOFFSET(compound_t, a), array_dt);
+ H5Tclose(array_dt);
+
+ str_type = mkstr(32, H5T_STR_SPACEPAD);
+ mdims[0] = 3; mdims[1] = 4;
+
+ array_dt=H5Tarray_create(str_type,2,mdims,NULL);
+ H5Tinsert(f_type, "string", HOFFSET(compound_t, s), array_dt);
+ H5Tclose(array_dt);
+
+ array_dt=H5Tarray_create(str_type,2,mdims,NULL);
+ H5Tinsert(f_type2, "string", HOFFSET(compound_t, s), array_dt);
+ H5Tclose(array_dt);
+
+ for (i = 0; i < 3; i++)
+ for (j = 0; j < 6; j++) {
+ for (k = 0 ; k < 8; k++)
+ for (l = 0; l < 10; l++)
+ comp1[i][j].a[k][l] = (l+j+k) * (l+j+k);
+ for (k = 0 ; k < 12; k++)
+ sprintf(comp1[i][j].s[k], "abcdefgh12345678abcdefgh12345678");
+ }
+
+ dataset = H5Dcreate(fid, "/comp1", f_type, space, H5P_DEFAULT);
+ H5Dwrite(dataset, f_type2, H5S_ALL, H5S_ALL, H5P_DEFAULT, comp1);
+
+ H5Tclose(str_type);
+ H5Tclose(f_type);
+ H5Tclose(f_type2);
+ H5Sclose(space);
+ H5Dclose(dataset);
+
+ H5Fclose(fid);
+}
+
+/*
+ /
+ / / | \ \ \
+ g1 g2 g3 g4 g5 g6
+ | | | | \ \
+ string1 string3 string5
+ string2 string4 string6
+*/
+
+static void test_str2(void)
+{
+hid_t fid, group, attr, dataset, space, space2, mem_space, hyper_space;
+hid_t fxdlenstr, fxdlenstr2, memtype;
+hsize_t dims[1], size[1], stride[1], count[1], block[1];
+hssize_t start[1];
+
+
+int i;
+char buf[LENSTR+10];
+char buf2[3*LENSTR2];
+hsize_t sdim;
+
+ fid = H5Fcreate(FILE14, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ fxdlenstr = H5Tcopy(H5T_C_S1);
+ H5Tset_size(fxdlenstr, LENSTR);
+ H5Tset_cset(fxdlenstr, H5T_CSET_ASCII);
+ H5Tset_strpad(fxdlenstr, H5T_STR_NULLTERM);
+
+ memtype = H5Tcopy(H5T_C_S1);
+ H5Tset_size(memtype, LENSTR);
+ H5Tset_cset(memtype, H5T_CSET_ASCII);
+ H5Tset_strpad(memtype, H5T_STR_NULLTERM);
+
+ sdim = 10;
+ size[0] = sdim;
+ space = H5Screate_simple(1, size, NULL);
+ size[0] = 1;
+ mem_space = H5Screate_simple(1,size,NULL);
+ hyper_space = H5Scopy(space);
+
+ /* dset1 */
+
+ group = H5Gcreate (fid, "/g1", 0);
+ dataset = H5Dcreate(group, "dset1", fxdlenstr, space, H5P_DEFAULT);
+
+ /* add attributes to dset1 */
+
+ fxdlenstr2 = H5Tcopy(H5T_C_S1);
+ H5Tset_size(fxdlenstr2, LENSTR2);
+ H5Tset_cset(fxdlenstr2, H5T_CSET_ASCII);
+ H5Tset_strpad(fxdlenstr2, H5T_STR_NULLTERM);
+
+ dims[0] = 3;
+ space2 = H5Screate_simple(1, dims, NULL);
+ attr = H5Acreate (dataset, "attr1", fxdlenstr2, space2, H5P_DEFAULT);
+ sprintf(&(buf2[0*LENSTR2]), "0123456789");
+ sprintf(&(buf2[1*LENSTR2]), "abcdefghij");
+ sprintf(&(buf2[2*LENSTR2]), "ABCDEFGHIJ");
+ H5Awrite(attr, fxdlenstr2, buf2);
+ H5Sclose(space2);
+ H5Tclose(fxdlenstr2);
+ H5Aclose(attr);
+
+ stride[0]=1;
+ count[0]=1;
+ block[0]=1;
+
+ for (i = 0; (hsize_t)i < sdim; i++) {
+ start[0] = i;
+ sprintf(buf,"This is row %1d of type H5T_STR_NULLTERM of",i);
+ H5Tset_size(memtype, HDstrlen(buf)+1);
+ H5Sselect_hyperslab(hyper_space, H5S_SELECT_SET, start, stride, count, block);
+ H5Dwrite(dataset, memtype, mem_space, hyper_space, H5P_DEFAULT, buf);
+ }
+ H5Dclose(dataset);
+ H5Gclose(group);
+
+ group = H5Gcreate (fid, "/g2", 0);
+ dataset = H5Dcreate(group, "dset2", fxdlenstr, space, H5P_DEFAULT);
+
+ for (i = 0; (hsize_t)i < sdim; i++) {
+ start[0] = i;
+ sprintf(buf,"This is row %1d of type H5T_STR_NULLTERM of string array",i);
+ H5Tset_size(memtype, HDstrlen(buf)+1);
+ H5Sselect_hyperslab(hyper_space, H5S_SELECT_SET, start, stride, count, block);
+ H5Dwrite(dataset, memtype, mem_space, hyper_space, H5P_DEFAULT, buf);
+ }
+ H5Dclose(dataset);
+ H5Gclose(group);
+
+
+ H5Tclose(fxdlenstr);
+ fxdlenstr = H5Tcopy(H5T_C_S1);
+ H5Tset_size(fxdlenstr, LENSTR);
+ H5Tset_cset(fxdlenstr, H5T_CSET_ASCII);
+ H5Tset_strpad(fxdlenstr, H5T_STR_NULLPAD);
+
+ group = H5Gcreate (fid, "/g3", 0);
+ dataset = H5Dcreate(group, "dset3", fxdlenstr, space, H5P_DEFAULT);
+
+ for (i = 0;(hsize_t) i < sdim; i++) {
+ start[0] = i;
+ sprintf(buf,"This is row %1d of type H5T_STR_NULLPAD of",i);
+ H5Tset_size(memtype, HDstrlen(buf)+1);
+ H5Sselect_hyperslab(hyper_space, H5S_SELECT_SET, start, stride, count, block);
+ H5Dwrite(dataset, memtype, mem_space, hyper_space, H5P_DEFAULT, buf);
+ }
+ H5Dclose(dataset);
+ H5Gclose(group);
+
+
+ group = H5Gcreate (fid, "/g4", 0);
+ dataset = H5Dcreate(group, "dset4", fxdlenstr, space, H5P_DEFAULT);
+
+ for (i = 0; (hsize_t)i < sdim; i++) {
+ start[0] = i;
+ sprintf(buf,"This is row %1d of type H5T_STR_NULLPAD of string array",i);
+ H5Tset_size(memtype, HDstrlen(buf)+1);
+ H5Sselect_hyperslab(hyper_space, H5S_SELECT_SET, start, stride, count, block);
+ H5Dwrite(dataset, memtype, mem_space, hyper_space, H5P_DEFAULT, buf);
+ }
+ H5Dclose(dataset);
+ H5Gclose(group);
+
+ H5Tclose(fxdlenstr);
+ fxdlenstr = H5Tcopy(H5T_C_S1);
+ H5Tset_size(fxdlenstr, LENSTR);
+ H5Tset_cset(fxdlenstr, H5T_CSET_ASCII);
+ H5Tset_strpad(fxdlenstr, H5T_STR_SPACEPAD);
+
+ group = H5Gcreate (fid, "/g5", 0);
+ dataset = H5Dcreate(group, "dset5", fxdlenstr, space, H5P_DEFAULT);
+
+ for (i = 0; (hsize_t)i < sdim; i++) {
+ start[0] = i;
+ sprintf(buf,"This is row %1d of type H5T_STR_SPACEPAD of",i);
+ H5Tset_size(memtype, HDstrlen(buf)+1);
+ H5Sselect_hyperslab(hyper_space, H5S_SELECT_SET, start, stride, count, block);
+ H5Dwrite(dataset, memtype, mem_space, hyper_space, H5P_DEFAULT, buf);
+ }
+ H5Dclose(dataset);
+ H5Gclose(group);
+
+
+ group = H5Gcreate (fid, "/g6", 0);
+ dataset = H5Dcreate(group, "dset6", fxdlenstr, space, H5P_DEFAULT);
+
+ for (i = 0; (hsize_t)i < sdim; i++) {
+ start[0] = i;
+ sprintf(buf,"This is row %1d of type H5T_STR_SPACEPAD of string array",i);
+ H5Tset_size(memtype, HDstrlen(buf)+1);
+ H5Sselect_hyperslab(hyper_space, H5S_SELECT_SET, start, stride, count, block);
+ H5Dwrite(dataset, memtype, mem_space, hyper_space, H5P_DEFAULT, buf);
+ }
+
+ H5Dclose(dataset);
+ H5Tclose(fxdlenstr);
+ H5Tclose(memtype);
+ H5Sclose(mem_space);
+ H5Sclose(hyper_space);
+ H5Sclose(space);
+ H5Fclose(fid);
+}
+
+static void test_enum(void)
+{
+ /*some code is taken from enum.c in the test dir */
+ hid_t file, type, space, dset;
+ int val;
+ enumtype data[] = {RED, GREEN, BLUE, GREEN, WHITE,
+ WHITE, BLACK, GREEN, BLUE, RED,
+ RED, BLUE, GREEN, BLACK, WHITE,
+ RED, WHITE, GREEN, GREEN, BLUE};
+ hsize_t size[1] = {NELMTS(data)};
+
+ file = H5Fcreate(FILE15,H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+
+ type = H5Tcreate(H5T_ENUM, sizeof(enumtype));
+ H5Tenum_insert(type, "RED", (val = 0, &val));
+ H5Tenum_insert(type, "GREEN", (val = 1, &val));
+ H5Tenum_insert(type, "BLUE", (val = 2, &val));
+ H5Tenum_insert(type, "WHITE", (val = 3, &val));
+ H5Tenum_insert(type, "BLACK", (val = 4, &val));
+ H5Tcommit(file, "enum normal", type);
+
+ space = H5Screate_simple(1,size,NULL);
+ dset = H5Dcreate(file,"table",type, space, H5P_DEFAULT);
+ H5Dwrite(dset,type,space,space,H5P_DEFAULT,data);
+
+ H5Dclose(dset);
+ H5Sclose(space);
+ H5Fclose(file);
+}
+
+static void test_objref(void)
+{
+/*some code is taken from enum.c in the test dir */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+
+ hid_t group; /* Group ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Datatype ID */
+ hsize_t dims1[] = {SPACE1_DIM1};
+ hobj_ref_t *wbuf, /* buffer to write to disk */
+ *rbuf, /* buffer read from disk */
+ *tbuf; /* temp. buffer read from disk */
+ uint32_t *tu32; /* Temporary pointer to uint32 data */
+ intn i; /* counting variables */
+ const char *write_comment="Foo!"; /* Comments for group */
+
+ /* Allocate write & read buffers */
+ wbuf=malloc(sizeof(hobj_ref_t)*SPACE1_DIM1);
+ rbuf=malloc(sizeof(hobj_ref_t)*SPACE1_DIM1);
+ tbuf=malloc(sizeof(hobj_ref_t)*SPACE1_DIM1);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE16, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+
+ /* Create a group */
+ group=H5Gcreate(fid1,"Group1",(size_t)-1);
+
+ /* Set group's comment */
+ H5Gset_comment(group,".",write_comment);
+
+ /* Create a dataset (inside Group1) */
+ dataset=H5Dcreate(group,"Dataset1",H5T_STD_U32BE,sid1,H5P_DEFAULT);
+
+ for(tu32=(uint32_t *)((void*)wbuf),i=0; i<SPACE1_DIM1; i++)
+ *tu32++=i*3;
+
+ /* Write selection to disk */
+ H5Dwrite(dataset,H5T_NATIVE_UINT,H5S_ALL,H5S_ALL,H5P_DEFAULT,wbuf);
+
+ /* Close Dataset */
+ H5Dclose(dataset);
+
+ /* Create another dataset (inside Group1) */
+ dataset=H5Dcreate(group,"Dataset2",H5T_STD_U8BE,sid1,H5P_DEFAULT);
+
+ /* Close Dataset */
+ H5Dclose(dataset);
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tcreate (H5T_COMPOUND, sizeof(s1_t));
+
+ /* Insert fields */
+ H5Tinsert (tid1, "a", HOFFSET(s1_t,a), H5T_STD_I32BE);
+
+ H5Tinsert (tid1, "b", HOFFSET(s1_t,b), H5T_IEEE_F32BE);
+
+ H5Tinsert (tid1, "c", HOFFSET(s1_t,c), H5T_IEEE_F32BE);
+
+ /* Save datatype for later */
+ H5Tcommit (group, "Datatype1", tid1);
+
+ /* Close datatype */
+ H5Tclose(tid1);
+
+ /* Close group */
+ H5Gclose(group);
+
+ /* Create a dataset */
+ dataset=H5Dcreate(fid1,"Dataset3",H5T_STD_REF_OBJ,sid1,H5P_DEFAULT);
+
+ /* Create reference to dataset */
+ H5Rcreate(&wbuf[0],fid1,"/Group1/Dataset1",H5R_OBJECT,-1);
+ H5Rget_object_type(dataset,&wbuf[0]);
+
+ /* Create reference to dataset */
+ H5Rcreate(&wbuf[1],fid1,"/Group1/Dataset2",H5R_OBJECT,-1);
+
+ H5Rget_object_type(dataset,&wbuf[1]);
+
+ /* Create reference to group */
+ H5Rcreate(&wbuf[2],fid1,"/Group1",H5R_OBJECT,-1);
+
+ H5Rget_object_type(dataset,&wbuf[2]);
+
+
+ /* Create reference to named datatype */
+ H5Rcreate(&wbuf[3],fid1,"/Group1/Datatype1",H5R_OBJECT,-1);
+
+ H5Rget_object_type(dataset,&wbuf[3]);
+
+
+ /* Write selection to disk */
+ H5Dwrite(dataset,H5T_STD_REF_OBJ,H5S_ALL,H5S_ALL,H5P_DEFAULT,wbuf);
+
+
+ /* Close disk dataspace */
+ H5Sclose(sid1);
+
+ /* Close Dataset */
+ H5Dclose(dataset);
+
+ /* Close file */
+ H5Fclose(fid1);
+
+ /* Free memory buffers */
+ free(wbuf);
+ free(rbuf);
+ free(tbuf);
+
+}
+
+static void test_datareg(void)
+{
+ /*some code is taken from enum.c in the test dir */
+
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dset1, /* Dataset ID */
+ dset2; /* Dereferenced dataset ID */
+ hid_t sid1, /* Dataspace ID #1 */
+ sid2; /* Dataspace ID #2 */
+ hsize_t dims1[] = {SPACE1_DIM1},
+ dims2[] = {SPACE2_DIM1, SPACE2_DIM2};
+ hssize_t start[SPACE2_RANK]; /* Starting location of hyperslab */
+ hsize_t stride[SPACE2_RANK]; /* Stride of hyperslab */
+ hsize_t count[SPACE2_RANK]; /* Element count of hyperslab */
+ hsize_t block[SPACE2_RANK]; /* Block size of hyperslab */
+ hssize_t coord1[POINT1_NPOINTS][SPACE2_RANK]; /* Coordinates for point selection */
+ hdset_reg_ref_t *wbuf, /* buffer to write to disk */
+ *rbuf; /* buffer read from disk */
+ uint8_t *dwbuf, /* Buffer for writing numeric data to disk */
+ *drbuf; /* Buffer for reading numeric data from disk */
+ uint8_t *tu8; /* Temporary pointer to uint8 data */
+ intn i; /* counting variables */
+
+ /* Allocate write & read buffers */
+ wbuf=calloc(sizeof(hdset_reg_ref_t), SPACE1_DIM1);
+ rbuf=malloc(sizeof(hdset_reg_ref_t)*SPACE1_DIM1);
+ dwbuf=malloc(sizeof(uint8_t)*SPACE2_DIM1*SPACE2_DIM2);
+ drbuf=calloc(sizeof(uint8_t),SPACE2_DIM1*SPACE2_DIM2);
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE17, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Create dataspace for datasets */
+ sid2 = H5Screate_simple(SPACE2_RANK, dims2, NULL);
+
+ /* Create a dataset */
+ dset2=H5Dcreate(fid1,"Dataset2",H5T_STD_U8BE,sid2,H5P_DEFAULT);
+
+ for(tu8=dwbuf,i=0; i<SPACE2_DIM1*SPACE2_DIM2; i++)
+ *tu8++=i*3;
+
+ /* Write selection to disk */
+ H5Dwrite(dset2,H5T_NATIVE_UCHAR,H5S_ALL,H5S_ALL,H5P_DEFAULT,dwbuf);
+
+ /* Close Dataset */
+ H5Dclose(dset2);
+
+ /* Create dataspace for the reference dataset */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+
+ /* Create a dataset */
+ dset1=H5Dcreate(fid1,"Dataset1",H5T_STD_REF_DSETREG,sid1,H5P_DEFAULT);
+
+ /* Create references */
+
+ /* Select 6x6 hyperslab for first reference */
+ start[0]=2; start[1]=2;
+ stride[0]=1; stride[1]=1;
+ count[0]=6; count[1]=6;
+ block[0]=1; block[1]=1;
+ H5Sselect_hyperslab(sid2,H5S_SELECT_SET,start,stride,count,block);
+
+ H5Sget_select_npoints(sid2);
+
+ /* Store first dataset region */
+ H5Rcreate(&wbuf[0],fid1,"/Dataset2",H5R_DATASET_REGION,sid2);
+
+ /* Select sequence of ten points for second reference */
+ coord1[0][0]=6; coord1[0][1]=9;
+ coord1[1][0]=2; coord1[1][1]=2;
+ coord1[2][0]=8; coord1[2][1]=4;
+ coord1[3][0]=1; coord1[3][1]=6;
+ coord1[4][0]=2; coord1[4][1]=8;
+ coord1[5][0]=3; coord1[5][1]=2;
+ coord1[6][0]=0; coord1[6][1]=4;
+ coord1[7][0]=9; coord1[7][1]=0;
+ coord1[8][0]=7; coord1[8][1]=1;
+ coord1[9][0]=3; coord1[9][1]=3;
+ H5Sselect_elements(sid2,H5S_SELECT_SET,POINT1_NPOINTS,(const hssize_t **)coord1);
+
+ H5Sget_select_npoints(sid2);
+
+ /* Store second dataset region */
+ H5Rcreate(&wbuf[1],fid1,"/Dataset2",H5R_DATASET_REGION,sid2);
+
+ /* Write selection to disk */
+ H5Dwrite(dset1,H5T_STD_REF_DSETREG,H5S_ALL,H5S_ALL,H5P_DEFAULT,wbuf);
+
+ /* Close disk dataspace */
+ H5Sclose(sid1);
+
+ /* Close Dataset */
+ H5Dclose(dset1);
+
+ /* Close uint8 dataset dataspace */
+ H5Sclose(sid2);
+
+ /* Close file */
+ H5Fclose(fid1);
+
+ /* Free memory buffers */
+ free(wbuf);
+ free(rbuf);
+ free(dwbuf);
+ free(drbuf);
+}
+
+/*taken from Elena's compound test file*/
+static void test_nestcomp(void)
+{
+ /* Compound memeber of the compound datatype*/
+ typedef struct cmp_t {
+ char a;
+ float b[2];
+ } cmp_t;
+
+ /* First structure and dataset*/
+ typedef struct s1_t {
+ int a;
+ float b;
+ double c;
+ cmp_t d;
+ } s1_t;
+ hid_t cmp_tid; /* Handle for the compound datatype */
+ hid_t char_id; /* Handle for the string datatype */
+ hid_t array_dt;
+ hsize_t array_dims[] = {2}; /* Dataspace dimensions */
+ int ndims = 1; /* Number of dimensions in the array field */
+
+ s1_t s1[10];
+ hid_t s1_tid; /* File datatype identifier */
+
+ int i;
+ hid_t file, dataset, space; /* Handles */
+ herr_t status;
+ hsize_t dim[] = {10}; /* Dataspace dimensions */
+
+ char datasetname[] = "ArrayOfStructures";
+
+
+ /*
+ * Initialize the data
+ */
+ for (i = 0; i< 10; i++) {
+ s1[i].a = i;
+ s1[i].b = i*i;
+ s1[i].c = 1./(i+1);
+ s1[i].d.a = 65 + i;
+ s1[i].d.b[0] = -100.;
+ s1[i].d.b[1] = 100.;
+ }
+
+ /*
+ * Create the data space.
+ */
+ space = H5Screate_simple(1, dim, NULL);
+
+ /*
+ * Create the file.
+ */
+ file = H5Fcreate(FILE18, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /*
+ * Create the memory data type.
+ */
+ /*
+ * Create a datatype for compound field first.
+ */
+ cmp_tid = H5Tcreate (H5T_COMPOUND, sizeof(cmp_t));
+
+ /* We are using C string of length one to represent "real" character */
+ char_id = H5Tcopy(H5T_C_S1);
+ H5Tset_strpad(char_id, H5T_STR_NULLTERM);
+ H5Tinsert(cmp_tid, "char_name", HOFFSET(cmp_t, a), char_id);
+
+ array_dt=H5Tarray_create(H5T_NATIVE_FLOAT,ndims,array_dims,NULL);
+ H5Tinsert(cmp_tid, "array_name", HOFFSET(cmp_t, b), array_dt);
+ H5Tclose(array_dt);
+
+ s1_tid = H5Tcreate (H5T_COMPOUND, sizeof(s1_t));
+ H5Tinsert(s1_tid, "a_name", HOFFSET(s1_t, a), H5T_NATIVE_INT);
+ H5Tinsert(s1_tid, "c_name", HOFFSET(s1_t, c), H5T_NATIVE_DOUBLE);
+ H5Tinsert(s1_tid, "b_name", HOFFSET(s1_t, b), H5T_NATIVE_FLOAT);
+
+ /* Insert compound memeber created above */
+ H5Tinsert(s1_tid, "d_name", HOFFSET(s1_t, d), cmp_tid);
+
+ /*
+ * Create the dataset.
+ */
+ dataset = H5Dcreate(file, datasetname, s1_tid, space, H5P_DEFAULT);
+
+ /*
+ * Wtite data to the dataset;
+ */
+ status = H5Dwrite(dataset, s1_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, s1);
+ if (status < 0)
+ fprintf(stderr, "test_nestcomp H5Dwrite failed\n");
+
+ /*
+ * Release resources
+ */
+ H5Tclose(s1_tid);
+ H5Tclose(cmp_tid);
+ H5Tclose(char_id);
+ H5Sclose(space);
+ H5Dclose(dataset);
+ H5Fclose(file);
+}
+
+static void test_opaque(void)
+{
+ hid_t file, type, dataset, space;
+ char test[100][2];
+ int x;
+ hsize_t dim = 2;
+
+ for (x = 0; x < 100; x++){
+ test[x][0] = x;
+ test[x][1] = 99 - x;
+ }
+
+ /*
+ * Create the data space.
+ */
+ space = H5Screate_simple(1, &dim, NULL);
+
+ /*
+ * Create the file.
+ */
+ file = H5Fcreate(FILE19, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /*
+ * Create the memory datatype.
+ */
+ type = H5Tcreate (H5T_OPAQUE, sizeof(char)*100*2);
+ H5Tset_tag(type, "test opaque type");
+
+ /*
+ * Create the dataset.
+ */
+ dataset = H5Dcreate(file, "opaque test", type, space, H5P_DEFAULT);
+
+ /*
+ * Write data to the dataset;
+ */
+ H5Dwrite(dataset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, test);
+
+ H5Tclose(type);
+ H5Sclose(space);
+ H5Dclose(dataset);
+ H5Fclose(file);
+}
+
+static void test_bitfields(void)
+{
+ hid_t file, grp=-1, type=-1, space=-1, dset=-1;
+ size_t i;
+ hsize_t nelmts;
+ unsigned char buf[32];
+
+ file = H5Fcreate(FILE20, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ if ((grp=H5Gcreate(file, "typetests", 0))<0) goto error;
+
+ /* bitfield_1 */
+ nelmts = sizeof(buf);
+ if ((type=H5Tcopy(H5T_STD_B8LE))<0 ||
+ (space=H5Screate_simple(1, &nelmts, NULL))<0 ||
+ (dset=H5Dcreate(grp, "bitfield_1", type, space, H5P_DEFAULT))<0)
+ goto error;
+
+ for (i=0; i<sizeof buf; i++) buf[i] = (unsigned char)0xff ^ (unsigned char)i;
+ if (H5Dwrite(dset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf)<0)
+ goto error;
+ if (H5Sclose(space)<0) goto error;
+ if (H5Tclose(type)<0) goto error;
+ if (H5Dclose(dset)<0) goto error;
+
+ /* bitfield_2 */
+ nelmts = sizeof(buf)/2;
+ if ((type=H5Tcopy(H5T_STD_B16LE))<0 ||
+ (space=H5Screate_simple(1, &nelmts, NULL))<0 ||
+ (dset=H5Dcreate(grp, "bitfield_2", type, space, H5P_DEFAULT))<0)
+ goto error;
+ for (i=0; i<sizeof buf; i++) buf[i] = (unsigned char)0xff ^ (unsigned char)i;
+ if (H5Dwrite(dset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf)<0)
+ goto error;
+ if (H5Sclose(space)<0) goto error;
+ if (H5Tclose(type)<0) goto error;
+ if (H5Dclose(dset)<0) goto error;
+ if (H5Gclose(grp)<0) goto error;
+ H5Fclose(file);
+
+ error:
+ H5E_BEGIN_TRY {
+ H5Gclose(grp);
+ H5Tclose(type);
+ H5Sclose(space);
+ H5Dclose(dset);
+ } H5E_END_TRY;
+}
+
+static void test_vldatatypes(void)
+{
+ hvl_t adata, wdata[SPACE1_DIM1];
+ hid_t file, dset, space, type;
+ hsize_t dims[] = { SPACE1_DIM1 };
+ int i;
+ herr_t ret=0;
+
+ ret = ret; /* so that compiler won't complain "is set but never used" */
+ file = H5Fcreate(FILE21, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Allocate and initialize VL dataset to write */
+ for(i = 0; i < SPACE1_DIM1; i++) {
+ int j;
+
+ wdata[i].p = malloc((i + 1) * sizeof(int));
+ wdata[i].len = i + 1;
+
+ for (j = 0; j < i + 1; j++)
+ ((int *)wdata[i].p)[j] = i * 10 + j;
+ }
+
+ /* write out the integers in little-endian format */
+ space = H5Screate_simple(SPACE1_RANK, dims, NULL);
+ type = H5Tvlen_create(H5T_NATIVE_INT);
+ dset = H5Dcreate(file, "Dataset1.0", type, space, H5P_DEFAULT);
+ ret = H5Dwrite(dset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ ret = H5Dvlen_reclaim(type, space, H5P_DEFAULT, wdata);
+
+ ret = H5Dclose(dset);
+ ret = H5Tclose(type);
+ ret = H5Sclose(space);
+
+ /* Allocate and initialize VL dataset to write */
+ for(i = 0; i < SPACE1_DIM1; i++) {
+ int j;
+
+ wdata[i].p = malloc((i + 1) * sizeof(float));
+ wdata[i].len = i + 1;
+
+ for (j = 0; j < i + 1; j++)
+ ((float *)wdata[i].p)[j] = i * 10 + ((float)j) / 10.0;
+ }
+
+ /* write out the floats in little-endian format */
+ space = H5Screate_simple(SPACE1_RANK, dims, NULL);
+ type = H5Tvlen_create(H5T_NATIVE_FLOAT);
+ dset = H5Dcreate(file, "Dataset2.0", type, space, H5P_DEFAULT);
+ ret = H5Dwrite(dset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata);
+ ret = H5Dvlen_reclaim(type, space, H5P_DEFAULT, wdata);
+
+ ret = H5Dclose(dset);
+ ret = H5Tclose(type);
+ ret = H5Sclose(space);
+
+ /* Allocate and initialize a scalar VL dataset to write */
+ adata.p = malloc(37 * sizeof(int));
+ adata.len = 37;
+
+ for (i = 0; i < 37; i++)
+ ((int *)adata.p)[i] = i * 2;
+
+ /* write out scalar VL dataset in little-endian format */
+ space = H5Screate_simple(0, NULL, NULL);
+ type = H5Tvlen_create(H5T_NATIVE_INT);
+ dset = H5Dcreate(file, "Dataset3.0", type, space, H5P_DEFAULT);
+ ret = H5Dwrite(dset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, &adata);
+ ret = H5Dvlen_reclaim(type, space, H5P_DEFAULT, &adata);
+
+ ret = H5Dclose(dset);
+ ret = H5Tclose(type);
+ ret = H5Sclose(space);
+ ret = H5Fclose(file);
+}
+
+static void test_vldatatypes2(void)
+{
+ hvl_t wdata[SPACE1_DIM1]; /* Information to write */
+ hvl_t *t1; /* Temporary pointer to VL information */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1, tid2; /* Datatype IDs */
+ hsize_t dims1[] = {SPACE1_DIM1};
+ uintn i,j,k; /* counting variables */
+ herr_t ret; /* Generic return value */
+
+ ret = ret; /* so that compiler won't complain "is set but never used" */
+
+ /* Allocate and initialize VL data to write */
+ for(i=0; i<SPACE1_DIM1; i++) {
+ wdata[i].p=malloc((i+1)*sizeof(hvl_t));
+ if(wdata[i].p==NULL) {
+ printf("Cannot allocate memory for VL data! i=%u\n",i);
+ return;
+ } /* end if */
+ wdata[i].len=i+1;
+ for(t1=wdata[i].p,j=0; j<(i+1); j++, t1++) {
+ t1->p=malloc((j+1)*sizeof(unsigned int));
+ if(t1->p==NULL) {
+ printf("Cannot allocate memory for VL data! i=%u, j=%u\n",i,j);
+ return;
+ } /* end if */
+ t1->len=j+1;
+ for(k=0; k<(j+1); k++)
+ ((unsigned int *)t1->p)[k]=i*100+j*10+k;
+ } /* end for */
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE22, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+
+ /* Create a VL datatype to refer to */
+ tid1 = H5Tvlen_create (H5T_NATIVE_UINT);
+
+ /* Create the base VL type */
+ tid2 = H5Tvlen_create (tid1);
+
+ /* Create a dataset */
+ dataset=H5Dcreate(fid1,"Dataset1",tid2,sid1,H5P_DEFAULT);
+
+ /* Write dataset to disk */
+ ret=H5Dwrite(dataset,tid2,H5S_ALL,H5S_ALL,H5P_DEFAULT,wdata);
+
+ /* Reclaim the write VL data */
+ ret=H5Dvlen_reclaim(tid2,sid1,H5P_DEFAULT,wdata);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ ret = H5Tclose(tid2);
+ ret = H5Tclose(tid1);
+ ret = H5Sclose(sid1);
+ ret = H5Fclose(fid1);
+
+}
+
+static void test_vldatatypes3(void)
+{
+ typedef struct { /* Struct that the VL sequences are composed of */
+ int i;
+ float f;
+ hvl_t v;
+ } s1;
+ s1 wdata[SPACE1_DIM1]; /* Information to write */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1, tid2; /* Datatype IDs */
+ hsize_t dims1[] = {SPACE1_DIM1};
+ uintn i,j; /* counting variables */
+ herr_t ret; /* Generic return value */
+
+ ret = ret; /* so that compiler won't complain "is set but never used" */
+
+ /* Allocate and initialize VL data to write */
+ for(i=0; i<SPACE1_DIM1; i++) {
+ wdata[i].i=i*10;
+ wdata[i].f=(i*20)/3.0;
+ wdata[i].v.p=malloc((i+1)*sizeof(unsigned int));
+ wdata[i].v.len=i+1;
+ for(j=0; j<(i+1); j++)
+ ((unsigned int *)wdata[i].v.p)[j]=i*10+j;
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE23, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+
+ /* Create a VL datatype to refer to */
+ tid1 = H5Tvlen_create (H5T_NATIVE_UINT);
+
+ /* Create the base compound type */
+ tid2 = H5Tcreate(H5T_COMPOUND, sizeof(s1));
+
+ /* Insert fields */
+ ret=H5Tinsert(tid2, "i", HOFFSET(s1, i), H5T_NATIVE_INT);
+ ret=H5Tinsert(tid2, "f", HOFFSET(s1, f), H5T_NATIVE_FLOAT);
+ ret=H5Tinsert(tid2, "v", HOFFSET(s1, v), tid1);
+
+ /* Create a dataset */
+ dataset=H5Dcreate(fid1,"Dataset1",tid2,sid1,H5P_DEFAULT);
+
+ /* Write dataset to disk */
+ ret=H5Dwrite(dataset,tid2,H5S_ALL,H5S_ALL,H5P_DEFAULT,wdata);
+
+ /* Reclaim the write VL data */
+ ret=H5Dvlen_reclaim(tid2,sid1,H5P_DEFAULT,wdata);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ ret = H5Tclose(tid2);
+ ret = H5Tclose(tid1);
+ ret = H5Sclose(sid1);
+ ret = H5Fclose(fid1);
+}
+
+static void test_vldatatypes4(void)
+{
+ typedef struct { /* Struct that the VL sequences are composed of */
+ int i;
+ float f;
+ } s1;
+ hvl_t wdata[SPACE1_DIM1]; /* Information to write */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1, tid2; /* Datatype IDs */
+ hsize_t dims1[] = {SPACE1_DIM1};
+ uintn i,j; /* counting variables */
+ herr_t ret; /* Generic return value */
+
+ ret = ret; /* so that compiler won't complain "is set but never used" */
+
+ /* Allocate and initialize VL data to write */
+ for(i=0; i<SPACE1_DIM1; i++) {
+ wdata[i].p=malloc((i+1)*sizeof(s1));
+ wdata[i].len=i+1;
+ for(j=0; j<(i+1); j++) {
+ ((s1 *)wdata[i].p)[j].i=i*10+j;
+ ((s1 *)wdata[i].p)[j].f=(i*20+j)/3.0;
+ } /* end for */
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE24, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, dims1, NULL);
+
+ /* Create the base compound type */
+ tid2 = H5Tcreate(H5T_COMPOUND, sizeof(s1));
+
+ /* Insert fields */
+ ret=H5Tinsert(tid2, "i", HOFFSET(s1, i), H5T_NATIVE_INT);
+ ret=H5Tinsert(tid2, "f", HOFFSET(s1, f), H5T_NATIVE_FLOAT);
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tvlen_create (tid2);
+
+ /* Create a dataset */
+ dataset=H5Dcreate(fid1,"Dataset1",tid1,sid1,H5P_DEFAULT);
+
+ /* Write dataset to disk */
+ ret=H5Dwrite(dataset,tid1,H5S_ALL,H5S_ALL,H5P_DEFAULT,wdata);
+
+ /* Reclaim the write VL data */
+ ret=H5Dvlen_reclaim(tid1,sid1,H5P_DEFAULT,wdata);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ ret = H5Tclose(tid1);
+ ret = H5Tclose(tid2);
+ ret = H5Sclose(sid1);
+ ret = H5Fclose(fid1);
+}
+
+static void test_array1(void)
+{
+ int wdata[SPACE1_DIM1][ARRAY1_DIM1]; /* Information to write */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Datatype ID */
+ hsize_t sdims1[] = {SPACE1_DIM1};
+ hsize_t tdims1[] = {ARRAY1_DIM1};
+ intn i,j; /* counting variables */
+ herr_t ret; /* Generic return value */
+
+ /* Allocate and initialize array data to write */
+ for(i=0; i<SPACE1_DIM1; i++)
+ for(j=0; j<ARRAY1_DIM1; j++)
+ wdata[i][j]=i*10+j;
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE25, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, sdims1, NULL);
+
+ /* Create a datatype to refer to */
+ tid1 = H5Tarray_create (H5T_NATIVE_INT,ARRAY1_RANK,tdims1,NULL);
+
+ /* Create a dataset */
+ dataset=H5Dcreate(fid1,"Dataset1",tid1,sid1,H5P_DEFAULT);
+
+ /* Write dataset to disk */
+ ret=H5Dwrite(dataset,tid1,H5S_ALL,H5S_ALL,H5P_DEFAULT,wdata);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ ret = H5Tclose(tid1);
+ ret = H5Sclose(sid1);
+ ret = H5Fclose(fid1);
+}
+
+static void test_array2(void)
+{
+ int wdata[SPACE1_DIM1][ARRAY2_DIM1][ARRAY2_DIM2][ARRAY2_DIM3]; /* Information to write */
+ hid_t fid; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid; /* Datatype ID */
+ hsize_t sdims1[] = {SPACE1_DIM1};
+ hsize_t tdims2[] = {ARRAY2_DIM1,ARRAY2_DIM2,ARRAY2_DIM3};
+ intn i,j,k,l; /* counting variables */
+ herr_t ret; /* Generic return value */
+
+ /* Allocate and initialize array data to write */
+ for(i=0; i<SPACE1_DIM1; i++)
+ for(j=0; j<ARRAY2_DIM1; j++)
+ for(k=0; k<ARRAY2_DIM2; k++)
+ for(l=0; l<ARRAY2_DIM3; l++)
+ wdata[i][j][k][l]=i*1000+j*100+k*10+l;
+
+ /* Create file */
+ fid = H5Fcreate(FILE26, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Create dataspace for datasets */
+ sid = H5Screate_simple(SPACE1_RANK, sdims1, NULL);
+
+ /* Create a datatype to refer to */
+ tid = H5Tarray_create (H5T_NATIVE_INT,ARRAY2_RANK,tdims2,NULL);
+
+ /* Create a dataset */
+ dataset=H5Dcreate(fid,"Dataset1",tid,sid,H5P_DEFAULT);
+
+ /* Write dataset to disk */
+ ret=H5Dwrite(dataset,tid,H5S_ALL,H5S_ALL,H5P_DEFAULT,wdata);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ ret = H5Tclose(tid);
+ ret = H5Sclose(sid);
+ ret = H5Fclose(fid);
+}
+
+static void test_array3(void)
+{
+ int wdata[SPACE1_DIM1][ARRAY1_DIM1][ARRAY3_DIM1][ARRAY3_DIM2]; /* Information to write */
+ hid_t fid; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid; /* Dataspace ID */
+ hid_t tid1; /* 1-D array Datatype ID */
+ hid_t tid2; /* 2-D array Datatype ID */
+ hsize_t sdims1[] = {SPACE1_DIM1};
+ hsize_t tdims1[] = {ARRAY1_DIM1};
+ hsize_t tdims2[] = {ARRAY3_DIM1,ARRAY3_DIM2};
+ intn i,j,k,l; /* counting variables */
+ herr_t ret; /* Generic return value */
+
+ /* Allocate and initialize array data to write */
+ for(i=0; i<SPACE1_DIM1; i++)
+ for(j=0; j<ARRAY1_DIM1; j++)
+ for(k=0; k<ARRAY3_DIM1; k++)
+ for(l=0; l<ARRAY3_DIM2; l++)
+ wdata[i][j][k][l]=i*1000+j*100+k*10+l;
+
+ /* Create file */
+ fid = H5Fcreate(FILE27, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Create dataspace for datasets */
+ sid = H5Screate_simple(SPACE1_RANK, sdims1, NULL);
+
+ /* Create a 2-D datatype to refer to */
+ tid2 = H5Tarray_create (H5T_NATIVE_INT,ARRAY3_RANK,tdims2,NULL);
+
+ /* Create a 1-D datatype to refer to */
+ tid1 = H5Tarray_create (tid2,ARRAY1_RANK,tdims1,NULL);
+
+ /* Create a dataset */
+ dataset=H5Dcreate(fid,"Dataset1",tid1,sid,H5P_DEFAULT);
+
+ /* Write dataset to disk */
+ ret=H5Dwrite(dataset,tid1,H5S_ALL,H5S_ALL,H5P_DEFAULT,wdata);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ ret = H5Tclose(tid1);
+ ret = H5Tclose(tid2);
+ ret = H5Sclose(sid);
+ ret = H5Fclose(fid);
+}
+
+static void test_array4(void)
+{
+ typedef struct { /* Typedef for compound datatype */
+ int i;
+ float f;
+ } s1_t;
+ s1_t wdata[SPACE1_DIM1][ARRAY1_DIM1]; /* Information to write */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Array Datatype ID */
+ hid_t tid2; /* Compound Datatype ID */
+ hsize_t sdims1[] = {SPACE1_DIM1};
+ hsize_t tdims1[] = {ARRAY1_DIM1};
+ intn i,j; /* counting variables */
+ herr_t ret; /* Generic return value */
+
+ /* Initialize array data to write */
+ for(i=0; i<SPACE1_DIM1; i++)
+ for(j=0; j<ARRAY1_DIM1; j++) {
+ wdata[i][j].i=i*10+j;
+ wdata[i][j].f=i*2.5+j;
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE28, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, sdims1, NULL);
+
+ /* Create a compound datatype to refer to */
+ tid2 = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
+
+ /* Insert integer field */
+ ret = H5Tinsert (tid2, "i", HOFFSET(s1_t,i), H5T_NATIVE_INT);
+
+ /* Insert float field */
+ ret = H5Tinsert (tid2, "f", HOFFSET(s1_t,f), H5T_NATIVE_FLOAT);
+
+ /* Create an array datatype to refer to */
+ tid1 = H5Tarray_create (tid2,ARRAY1_RANK,tdims1,NULL);
+
+ /* Close compound datatype */
+ ret=H5Tclose(tid2);
+
+ /* Create a dataset */
+ dataset=H5Dcreate(fid1,"Dataset1",tid1,sid1,H5P_DEFAULT);
+
+ /* Write dataset to disk */
+ ret=H5Dwrite(dataset,tid1,H5S_ALL,H5S_ALL,H5P_DEFAULT,wdata);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ ret = H5Tclose(tid1);
+ ret = H5Sclose(sid1);
+ ret = H5Fclose(fid1);
+}
+
+static void test_array5(void)
+{
+ typedef struct { /* Typedef for compound datatype */
+ int i;
+ float f[ARRAY1_DIM1];
+ } s1_t;
+ s1_t wdata[SPACE1_DIM1][ARRAY1_DIM1]; /* Information to write */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Array Datatype ID */
+ hid_t tid2; /* Compound Datatype ID */
+ hid_t tid3; /* Nested Array Datatype ID */
+ hsize_t sdims1[] = {SPACE1_DIM1};
+ hsize_t tdims1[] = {ARRAY1_DIM1};
+ intn i,j,k; /* counting variables */
+ herr_t ret; /* Generic return value */
+
+ /* Initialize array data to write */
+ for(i=0; i<SPACE1_DIM1; i++)
+ for(j=0; j<ARRAY1_DIM1; j++) {
+ wdata[i][j].i=i*10+j;
+ for(k=0; k<ARRAY1_DIM1; k++)
+ wdata[i][j].f[k]=i*10+j*2.5+k;
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE29, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, sdims1, NULL);
+
+ /* Create a compound datatype to refer to */
+ tid2 = H5Tcreate(H5T_COMPOUND, sizeof(s1_t));
+
+ /* Insert integer field */
+ ret = H5Tinsert (tid2, "i", HOFFSET(s1_t,i), H5T_NATIVE_INT);
+
+ /* Create an array of floats datatype */
+ tid3 = H5Tarray_create (H5T_NATIVE_FLOAT,ARRAY1_RANK,tdims1,NULL);
+
+ /* Insert float array field */
+ ret = H5Tinsert (tid2, "f", HOFFSET(s1_t,f), tid3);
+
+ /* Close array of floats field datatype */
+ ret=H5Tclose(tid3);
+
+ /* Create an array datatype to refer to */
+ tid1 = H5Tarray_create (tid2,ARRAY1_RANK,tdims1,NULL);
+
+ /* Close compound datatype */
+ ret=H5Tclose(tid2);
+
+ /* Create a dataset */
+ dataset=H5Dcreate(fid1,"Dataset1",tid1,sid1,H5P_DEFAULT);
+
+ /* Write dataset to disk */
+ ret=H5Dwrite(dataset,tid1,H5S_ALL,H5S_ALL,H5P_DEFAULT,wdata);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ ret = H5Tclose(tid1);
+ ret = H5Sclose(sid1);
+ ret = H5Fclose(fid1);
+}
+
+static void test_array6(void)
+{
+ hvl_t wdata[SPACE1_DIM1][ARRAY1_DIM1]; /* Information to write */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Array Datatype ID */
+ hid_t tid2; /* VL Datatype ID */
+ hsize_t sdims1[] = {SPACE1_DIM1};
+ hsize_t tdims1[] = {ARRAY1_DIM1};
+ intn i,j,k; /* counting variables */
+ herr_t ret; /* Generic return value */
+
+ /* Initialize array data to write */
+ for(i=0; i<SPACE1_DIM1; i++)
+ for(j=0; j<ARRAY1_DIM1; j++) {
+ wdata[i][j].p=malloc((i+j+1)*sizeof(unsigned int));
+ wdata[i][j].len=i+j+1;
+ for(k=0; k<(i+j+1); k++)
+ ((unsigned int *)wdata[i][j].p)[k]=i*100+j*10+k;
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE30, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, sdims1, NULL);
+
+ /* Create a compound datatype to refer to */
+ tid2 = H5Tvlen_create(H5T_NATIVE_UINT);
+
+ /* Create an array datatype to refer to */
+ tid1 = H5Tarray_create (tid2,ARRAY1_RANK,tdims1,NULL);
+
+ /* Close VL datatype */
+ ret=H5Tclose(tid2);
+
+ /* Create a dataset */
+ dataset=H5Dcreate(fid1,"Dataset1",tid1,sid1,H5P_DEFAULT);
+
+ /* Write dataset to disk */
+ ret=H5Dwrite(dataset,tid1,H5S_ALL,H5S_ALL,H5P_DEFAULT,wdata);
+
+ /* Reclaim the write VL data */
+ ret=H5Dvlen_reclaim(tid1,sid1,H5P_DEFAULT,wdata);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ ret = H5Tclose(tid1);
+ ret = H5Sclose(sid1);
+ ret = H5Fclose(fid1);
+}
+
+static void test_array7(void)
+{
+ hvl_t wdata[SPACE1_DIM1][ARRAY1_DIM1]; /* Information to write */
+ hid_t fid1; /* HDF5 File IDs */
+ hid_t dataset; /* Dataset ID */
+ hid_t sid1; /* Dataspace ID */
+ hid_t tid1; /* Array Datatype ID */
+ hid_t tid2; /* VL Datatype ID */
+ hid_t tid3; /* Nested Array Datatype ID */
+ hsize_t sdims1[] = {SPACE1_DIM1};
+ hsize_t tdims1[] = {ARRAY1_DIM1};
+ intn i,j,k,l; /* Index variables */
+ herr_t ret; /* Generic return value */
+
+ /* Initialize array data to write */
+ for(i=0; i<SPACE1_DIM1; i++)
+ for(j=0; j<ARRAY1_DIM1; j++) {
+ wdata[i][j].p=malloc((i+j+1)*(sizeof(unsigned int)*ARRAY1_DIM1));
+ wdata[i][j].len=i+j+1;
+ for(k=0; k<(i+j+1); k++)
+ for(l=0; l<ARRAY1_DIM1; l++)
+ ((unsigned int *)wdata[i][j].p)[k*ARRAY1_DIM1+l]=i*1000+j*100+k*10+l;
+ } /* end for */
+
+ /* Create file */
+ fid1 = H5Fcreate(FILE31, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ /* Create dataspace for datasets */
+ sid1 = H5Screate_simple(SPACE1_RANK, sdims1, NULL);
+
+ /* Create the nested array datatype to refer to */
+ tid3 = H5Tarray_create(H5T_NATIVE_UINT,ARRAY1_RANK,tdims1,NULL);
+
+ /* Create a VL datatype of 1-D arrays to refer to */
+ tid2 = H5Tvlen_create(tid3);
+
+ /* Close nested array datatype */
+ ret=H5Tclose(tid3);
+
+ /* Create an array datatype to refer to */
+ tid1 = H5Tarray_create (tid2,ARRAY1_RANK,tdims1,NULL);
+
+ /* Close VL datatype */
+ ret=H5Tclose(tid2);
+
+ /* Create a dataset */
+ dataset=H5Dcreate(fid1,"Dataset1",tid1,sid1,H5P_DEFAULT);
+
+ /* Write dataset to disk */
+ ret=H5Dwrite(dataset,tid1,H5S_ALL,H5S_ALL,H5P_DEFAULT,wdata);
+
+ /* Reclaim the write VL data */
+ ret=H5Dvlen_reclaim(tid1,sid1,H5P_DEFAULT,wdata);
+
+ /* Close Dataset */
+ ret = H5Dclose(dataset);
+ ret = H5Tclose(tid1);
+ ret = H5Sclose(sid1);
+ ret = H5Fclose(fid1);
+}
+
+static void test_empty(void)
+{
+ typedef struct {
+ int a;
+ float b;
+ char c;
+ } empty_struct;
+ hid_t file, dset, space, type;
+ hsize_t dims[] = { SPACE1_DIM1 };
+ herr_t ret=0;
+
+ ret = ret; /* so that compiler won't complain "is set but never used" */
+ file = H5Fcreate(FILE32, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+
+ space = H5Screate_simple(SPACE1_RANK, dims, NULL);
+
+ /* write out an empty vlen dataset */
+ type = H5Tvlen_create(H5T_NATIVE_INT);
+ dset = H5Dcreate(file, "Dataset1.0", type, space, H5P_DEFAULT);
+ /* Don't write any data */
+ ret = H5Dclose(dset);
+ ret = H5Tclose(type);
+
+ /* write out an empty native integer dataset dataset */
+ dset = H5Dcreate(file, "Dataset2.0", H5T_NATIVE_INT, space, H5P_DEFAULT);
+ /* Don't write any data */
+ ret = H5Dclose(dset);
+
+ /* write out an empty native floating-point dataset dataset */
+ dset = H5Dcreate(file, "Dataset3.0", H5T_NATIVE_FLOAT, space, H5P_DEFAULT);
+ /* Don't write any data */
+ ret = H5Dclose(dset);
+
+ /* write out an empty array dataset */
+ type = H5Tarray_create(H5T_NATIVE_INT,SPACE1_RANK,dims,NULL);
+ dset = H5Dcreate(file, "Dataset4.0", type, space, H5P_DEFAULT);
+ /* Don't write any data */
+ ret = H5Dclose(dset);
+ ret = H5Tclose(type);
+
+ /* write out an empty compound dataset */
+ type = H5Tcreate(H5T_COMPOUND,sizeof(empty_struct));
+ H5Tinsert(type, "a", HOFFSET(empty_struct, a),H5T_NATIVE_INT);
+ H5Tinsert(type, "b", HOFFSET(empty_struct, b),H5T_NATIVE_FLOAT);
+ H5Tinsert(type, "c", HOFFSET(empty_struct, c),H5T_NATIVE_CHAR);
+ dset = H5Dcreate(file, "Dataset5.0", type, space, H5P_DEFAULT);
+ /* Don't write any data */
+ ret = H5Dclose(dset);
+ ret = H5Tclose(type);
+
+ ret = H5Sclose(space);
+
+ ret = H5Fclose(file);
+}
+
+int main(void)
+{
+ test_group();
+ test_attribute();
+ test_softlink();
+ test_dataset();
+ test_hardlink();
+ test_compound_dt();
+ test_all();
+ test_loop();
+
+ test_dataset2();
+ test_compound_dt2();
+ test_loop2();
+ test_many();
+
+ test_str();
+ test_str2();
+
+ test_enum();
+
+ test_objref();
+ test_datareg();
+
+ test_nestcomp();
+
+ test_opaque();
+
+ test_bitfields();
+
+ test_vldatatypes();
+ test_vldatatypes2();
+ test_vldatatypes3();
+ test_vldatatypes4();
+
+ test_array1();
+ test_array2();
+ test_array3();
+ test_array4();
+ test_array5();
+ test_array6();
+ test_array7();
+
+ test_empty();
+
+ return 0;
+}
diff --git a/tools/h5dump/testh5dump.sh b/tools/h5dump/testh5dump.sh
new file mode 100755
index 0000000..05dfb09
--- /dev/null
+++ b/tools/h5dump/testh5dump.sh
@@ -0,0 +1,197 @@
+#! /bin/sh
+#
+# Copyright (C) 1998-2001 National Center for Supercomputing Applications
+# All rights reserved.
+#
+# Tests for the h5dump tool
+
+DUMPER=h5dump # The tool name
+DUMPER_BIN=`pwd`/$DUMPER # The path of the tool binary
+
+CMP='cmp -s'
+DIFF='diff -c'
+
+nerrors=0
+verbose=yes
+
+# The build (current) directory might be different than the source directory.
+if test -z "$srcdir"; then
+ srcdir=.
+fi
+
+test -d ../testfiles || mkdir ../testfiles
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+#
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
+}
+
+# Run a test and print PASS or *FAIL*. If a test fails then increment
+# the `nerrors' global variable and (if $verbose is set) display the
+# difference between the actual output and the expected output. The
+# expected output is given as the first argument to this function and
+# the actual output file is calculated by replacing the `.ddl' with
+# `.out'. The actual output is not removed if $HDF5_NOCLEANUP has a
+# non-zero value.
+#
+TOOLTEST() {
+ expect="$srcdir/../testfiles/$1"
+ actual="../testfiles/`basename $1 .ddl`.out"
+ shift
+
+ # Run test.
+ TESTING $DUMPER $@
+
+ (
+ echo "#############################"
+ echo "Expected output for '$DUMPER $@'"
+ echo "#############################"
+ cd $srcdir/../testfiles
+ $RUNSERIAL $DUMPER_BIN "$@"
+ ) >$actual 2>&1
+
+ if $CMP $expect $actual; then
+ echo " PASSED"
+ else
+ echo "*FAILED*"
+ echo " Expected result (*.ddl) differs from actual result (*.out)"
+ nerrors="`expr $nerrors + 1`"
+ test yes = "$verbose" && $DIFF $expect $actual |sed 's/^/ /'
+ fi
+
+ # Clean up output file
+ if test -z "$HDF5_NOCLEANUP"; then
+ rm -f $actual
+ fi
+}
+
+##############################################################################
+##############################################################################
+### T H E T E S T S ###
+##############################################################################
+##############################################################################
+
+# test for displaying groups
+TOOLTEST tgroup-1.ddl tgroup.h5
+# test for displaying the selected groups
+TOOLTEST tgroup-2.ddl --group=/g2 --group / -g /y tgroup.h5
+
+# test for displaying simple space datasets
+TOOLTEST tdset-1.ddl tdset.h5
+# test for displaying selected datasets
+TOOLTEST tdset-2.ddl -H -d dset1 -d /dset2 --dataset=dset3 tdset.h5
+
+# test for displaying attributes
+TOOLTEST tattr-1.ddl tattr.h5
+# test for displaying the selected attributes of string type and scalar space
+TOOLTEST tattr-2.ddl -a /attr1 --attribute /attr4 --attribute=/attr5 tattr.h5
+# test for header and error messages
+TOOLTEST tattr-3.ddl --header -a /attr2 --attribute=/attr tattr.h5
+
+# test for displaying soft links
+TOOLTEST tslink-1.ddl tslink.h5
+# test for displaying the selected link
+TOOLTEST tslink-2.ddl -l slink2 tslink.h5
+
+# tests for hard links
+TOOLTEST thlink-1.ddl thlink.h5
+TOOLTEST thlink-2.ddl -d /g1/dset2 --dataset /dset1 --dataset=/g1/g1.1/dset3 thlink.h5
+TOOLTEST thlink-3.ddl -d /g1/g1.1/dset3 --dataset /g1/dset2 --dataset=/dset1 thlink.h5
+TOOLTEST thlink-4.ddl -g /g1 thlink.h5
+TOOLTEST thlink-5.ddl -d /dset1 -g /g2 -d /g1/dset2 thlink.h5
+
+# tests for compound data types
+TOOLTEST tcomp-1.ddl tcompound.h5
+# test for named data types
+TOOLTEST tcomp-2.ddl -t /type1 --datatype /type2 --datatype=/group1/type3 tcompound.h5
+# test for unamed type
+TOOLTEST tcomp-3.ddl -t /#5992:0 -g /group2 tcompound.h5
+
+#test for the nested compound type
+TOOLTEST tnestcomp-1.ddl tnestedcomp.h5
+
+# test for options
+TOOLTEST tall-1.ddl tall.h5
+TOOLTEST tall-2.ddl --header -g /g1/g1.1 -a attr2 tall.h5
+TOOLTEST tall-3.ddl -d /g2/dset2.1 -l /g1/g1.2/g1.2.1/slink tall.h5
+
+# test for loop detection
+TOOLTEST tloop-1.ddl tloop.h5
+
+# test for string
+TOOLTEST tstr-1.ddl tstr.h5
+TOOLTEST tstr-2.ddl tstr2.h5
+
+# test for file created by Lib SAF team
+TOOLTEST tsaf.ddl tsaf.h5
+
+# test for file with variable length data
+TOOLTEST tvldtypes1.ddl tvldtypes1.h5
+TOOLTEST tvldtypes2.ddl tvldtypes2.h5
+TOOLTEST tvldtypes3.ddl tvldtypes3.h5
+TOOLTEST tvldtypes4.ddl tvldtypes4.h5
+
+# test for files with array data
+TOOLTEST tarray1.ddl tarray1.h5
+TOOLTEST tarray2.ddl tarray2.h5
+TOOLTEST tarray3.ddl tarray3.h5
+TOOLTEST tarray4.ddl tarray4.h5
+TOOLTEST tarray5.ddl tarray5.h5
+TOOLTEST tarray6.ddl tarray6.h5
+TOOLTEST tarray7.ddl tarray7.h5
+
+# test for files with empty data
+TOOLTEST tempty.ddl tempty.h5
+
+# test XML
+TOOLTEST tall.h5.xml --xml tall.h5
+TOOLTEST tattr.h5.xml --xml tattr.h5
+TOOLTEST tbitfields.h5.xml --xml tbitfields.h5
+TOOLTEST tcompound.h5.xml --xml tcompound.h5
+TOOLTEST tcompound2.h5.xml --xml tcompound2.h5
+TOOLTEST tdatareg.h5.xml --xml tdatareg.h5
+TOOLTEST tdset.h5.xml --xml tdset.h5
+TOOLTEST tdset2.h5.xml --xml tdset2.h5
+TOOLTEST tenum.h5.xml --xml tenum.h5
+TOOLTEST tgroup.h5.xml --xml tgroup.h5
+TOOLTEST thlink.h5.xml --xml thlink.h5
+TOOLTEST tloop.h5.xml --xml tloop.h5
+TOOLTEST tloop2.h5.xml --xml tloop2.h5
+TOOLTEST tmany.h5.xml --xml tmany.h5
+TOOLTEST tnestedcomp.h5.xml --xml tnestedcomp.h5
+TOOLTEST tobjref.h5.xml --xml tobjref.h5
+TOOLTEST topaque.h5.xml --xml topaque.h5
+TOOLTEST tslink.h5.xml --xml tslink.h5
+TOOLTEST tstr.h5.xml --xml tstr.h5
+TOOLTEST tstr2.h5.xml --xml tstr2.h5
+TOOLTEST tref.h5.xml --xml tref.h5
+TOOLTEST tname-amp.h5.xml --xml tname-amp.h5
+TOOLTEST tname-apos.h5.xml --xml tname-apos.h5
+TOOLTEST tname-gt.h5.xml --xml tname-gt.h5
+TOOLTEST tname-lt.h5.xml --xml tname-lt.h5
+TOOLTEST tname-quot.h5.xml --xml tname-quot.h5
+TOOLTEST tname-sp.h5.xml --xml tname-sp.h5
+TOOLTEST tstring.h5.xml --xml tstring.h5
+TOOLTEST tstring-at.h5.xml --xml tstring-at.h5
+TOOLTEST tref-escapes.h5.xml --xml tref-escapes.h5
+TOOLTEST tref-escapes-at.h5.xml --xml tref-escapes-at.h5
+TOOLTEST tnodata.h5.xml --xml tnodata.h5
+TOOLTEST tarray1.h5.xml --xml tarray1.h5
+TOOLTEST tarray2.h5.xml --xml tarray2.h5
+TOOLTEST tarray3.h5.xml --xml tarray3.h5
+TOOLTEST tarray6.h5.xml --xml tarray6.h5
+TOOLTEST tarray7.h5.xml --xml tarray7.h5
+TOOLTEST tvldtypes1.h5.xml --xml tvldtypes1.h5
+TOOLTEST tvldtypes2.h5.xml --xml tvldtypes2.h5
+TOOLTEST tvldtypes3.h5.xml --xml tvldtypes3.h5
+TOOLTEST tsaf.h5.xml --xml tsaf.h5
+TOOLTEST tempty.h5.xml --xml tempty.h5
+
+if test $nerrors -eq 0 ; then
+ echo "All $DUMPER tests passed."
+fi
+
+exit $nerrors
diff --git a/tools/h5ls/Dependencies b/tools/h5ls/Dependencies
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tools/h5ls/Dependencies
diff --git a/tools/h5ls/Makefile.in b/tools/h5ls/Makefile.in
new file mode 100644
index 0000000..ba75fcd
--- /dev/null
+++ b/tools/h5ls/Makefile.in
@@ -0,0 +1,55 @@
+## HDF5 Library Makefile(.in)
+##
+## Copyright (C) 2001 National Center for Supercomputing Applications.
+## All rights reserved.
+##
+##
+top_srcdir=@top_srcdir@
+top_builddir=../..
+srcdir=@srcdir@
+SUBDIRS=
+@COMMENCE@
+
+## Add include directory to the C preprocessor flags, add -lh5tools and
+## -lhdf5 to the list of libraries.
+##
+CPPFLAGS=-I. -I$(srcdir) -I$(top_builddir)/src -I$(top_srcdir)/src \
+ -I$(top_srcdir)/tools/lib @CPPFLAGS@
+
+## Test programs and scripts.
+##
+TEST_PROGS=
+TEST_SCRIPTS=$(srcdir)/testh5ls.sh
+
+## These are our main targets: library and tools.
+##
+LIBTOOLS=../lib/libh5tools.la
+LIBHDF5=$(top_builddir)/src/libhdf5.la
+PUB_PROGS=h5ls
+PROGS=$(PUB_PROGS) $(TEST_PROGS)
+
+## Source and object files for the library; do not install
+LIB_SRC=
+LIB_OBJ=$(LIB_SRC:.c=.lo)
+PUB_LIB=
+
+## Source and object files for programs...
+PROG_SRC=h5ls.c
+PROG_OBJ=$(PROG_SRC:.c=.lo)
+PRIVATE_HDR=
+
+## Source and object files for the tests
+TEST_SRC=
+TEST_OBJ=$(TEST_SRC:.c=.lo)
+
+## Programs have to be built before they can be tested!
+check test _test: $(PROGS)
+
+## How to build the programs... They all depend on the hdf5 library and
+## the tools library compiled in this directory.
+$(PROGS): $(LIBTOOLS) $(LIBHDF5)
+
+h5ls: h5ls.lo
+ @$(LT_LINK_EXE) $(CFLAGS) -o $@ h5ls.lo $(LIBTOOLS) $(LIBHDF5) $(LDFLAGS) $(LIBS)
+
+@CONCLUDE@
diff --git a/tools/h5ls/testh5ls.sh b/tools/h5ls/testh5ls.sh
new file mode 100755
index 0000000..cb3f99f
--- /dev/null
+++ b/tools/h5ls/testh5ls.sh
@@ -0,0 +1,143 @@
+#! /bin/sh
+#
+# Copyright (C) 2001 National Center for Supercomputing Applications
+# All rights reserved.
+#
+# Tests for the h5ls tool
+
+H5LS=h5ls # The tool name
+H5LS_BIN=`pwd`/$H5LS # The path of the tool binary
+
+CMP='cmp -s'
+DIFF='diff -c'
+NLINES=20 # Max. lines of output to display if test fails
+
+nerrors=0
+verbose=yes
+
+# The build (current) directory might be different than the source directory.
+if test -z "$srcdir"; then
+ srcdir=.
+fi
+test -d ../testfiles || mkdir ../testfiles
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" |cut -c1-70 |tr -d '\012'
+}
+
+# Run a test and print PASS or *FAIL*. For now, if h5ls can complete
+# with exit status 0, consider it pass. If a test fails then increment
+# the `nerrors' global variable and (if $verbose is set) display up to $NLINS
+# lines of the actual output from the tool test. The actual output is not
+# removed if $HDF5_NOCLEANUP has a non-zero value.
+# Arguemnts:
+# $1 -- actual output filename to use
+# $2 and on -- argument for the h5ls tool
+TOOLTEST() {
+ expect="$srcdir/../testfiles/$1"
+ actual="../testfiles/`basename $1 .ls`.out"
+ shift
+
+ # Run test.
+ # Stderr is included in stdout so that the diff can detect
+ # any unexpected output from that stream too.
+ TESTING $H5LS $@
+ (
+ echo "#############################"
+ echo " output for '$H5LS $@'"
+ echo "#############################"
+ cd $srcdir/../testfiles
+ $RUNSERIAL $H5LS_BIN "$@"
+ ) >$actual 2>&1
+
+ exitcode=$?
+ if [ $exitcode -ne 0 ]; then
+ echo "*FAILED*"
+ nerrors="`expr $nerrors + 1`"
+ if [ yes = "$verbose" ]; then
+ echo "test returned with exit code $exitcode"
+ echo "test output: (up to $NLINES lines)"
+ head -$NLINES $actual
+ echo "***end of test output***"
+ echo ""
+ fi
+ elif [ ! -f $expect ]; then
+ # Create the expect file if it doesn't yet exist.
+ echo " CREATED"
+ cp $actual $expect
+ elif $CMP $expect $actual; then
+ echo " PASSED"
+ else
+ echo "*FAILED*"
+ echo " Expected result differs from actual result"
+ nerrors="`expr $nerrors + 1`"
+ test yes = "$verbose" && $DIFF $expect $actual |sed 's/^/ /'
+ fi
+
+ # Clean up output file
+ if test -z "$HDF5_NOCLEANUP"; then
+ rm -f $actual
+ fi
+}
+
+##############################################################################
+##############################################################################
+### T H E T E S T S ###
+##############################################################################
+##############################################################################
+
+# Toss in a bunch of tests. Not sure if they are the right kinds.
+# test the help syntax
+TOOLTEST help-1.ls -w80 -h
+TOOLTEST help-2.ls -w80 -help
+TOOLTEST help-3.ls -w80 -?
+
+# test simple command
+TOOLTEST tall-1.ls -w80 tall.h5
+TOOLTEST tall-2.ls -w80 -r -d tall.h5
+TOOLTEST tgroup.ls -w80 tgroup.h5
+
+# test for displaying groups
+TOOLTEST tgroup-1.ls -w80 -r -g tgroup.h5
+
+# test for displaying simple space datasets
+TOOLTEST tdset-1.ls -w80 -r -d tdset.h5
+
+# test for displaying soft links
+TOOLTEST tslink-1.ls -w80 -r tslink.h5
+
+# tests for hard links
+TOOLTEST thlink-1.ls -w80 thlink.h5
+
+# tests for compound data types
+TOOLTEST tcomp-1.ls -w80 -r -d tcompound.h5
+
+#test for the nested compound type
+TOOLTEST tnestcomp-1.ls -w80 -r -d tnestedcomp.h5
+
+# test for loop detection
+TOOLTEST tloop-1.ls -w80 -r -d tloop.h5
+
+# test for string
+TOOLTEST tstr-1.ls -w80 -r -d tstr.h5
+
+# test test file created from lib SAF team
+TOOLTEST tsaf.ls -w80 -r -d tsaf.h5
+
+# test for variable length data types
+TOOLTEST tvldtypes1.ls -w80 -r -d tvldtypes1.h5
+
+# test for array data types
+TOOLTEST tarray1.ls -w80 -r -d tarray1.h5
+
+# test for empty data
+TOOLTEST tempty.ls -w80 -d tempty.h5
+
+if test $nerrors -eq 0 ; then
+ echo "All h5ls tests passed."
+fi
+
+exit $nerrors
diff --git a/tools/h5toh4/Dependencies b/tools/h5toh4/Dependencies
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tools/h5toh4/Dependencies
diff --git a/tools/h5toh4/Makefile.in b/tools/h5toh4/Makefile.in
new file mode 100644
index 0000000..4c529d6
--- /dev/null
+++ b/tools/h5toh4/Makefile.in
@@ -0,0 +1,60 @@
+## HDF5 Library Makefile(.in)
+##
+## Copyright (C) 2001 National Center for Supercomputing Applications.
+## All rights reserved.
+##
+##
+top_srcdir=@top_srcdir@
+top_builddir=../..
+srcdir=@srcdir@
+SUBDIRS=
+@COMMENCE@
+
+## Add include directory to the C preprocessor flags, add -lh5tools and
+## -lhdf5 to the list of libraries.
+##
+CPPFLAGS=-I. -I$(srcdir) -I$(top_builddir)/src -I$(top_srcdir)/src \
+ -I$(top_srcdir)/tools/lib @CPPFLAGS@
+
+## Test programs and scripts.
+##
+TEST_PROGS=
+TEST_SCRIPTS=@TESTH5TOH4@
+
+## These are our main targets: library and tools.
+##
+LIBTOOLS=../lib/libh5tools.la
+LIBHDF5=$(top_builddir)/src/libhdf5.la
+PUB_PROGS=@H5TOH4@
+PROGS=$(PUB_PROGS) $(TEST_PROGS)
+
+## Source and object files for the library; do not install
+##
+LIB_SRC=
+LIB_OBJ=$(LIB_SRC:.c=.lo)
+PUB_LIB=
+
+## Source and object files for programs...
+##
+PROG_SRC=h5toh4.c
+PROG_OBJ=$(PROG_SRC:.c=.lo)
+PRIVATE_HDR=h5toh4.h
+
+## Source and object files for the tests
+##
+TEST_SRC=
+TEST_OBJ=$(TEST_SRC:.c=.lo)
+
+## Programs have to be built before they can be tested!
+##
+check test _test: $(PROGS)
+
+## How to build the programs... They all depend on the hdf5 library and
+## the tools library compiled in this directory.
+##
+$(PROGS): $(LIBTOOLS) $(LIBHDF5)
+
+h5toh4: h5toh4.lo
+ @$(LT_LINK_EXE) $(CFLAGS) -o $@ h5toh4.lo $(LIBTOOLS) $(LIBHDF5) $(LDFLAGS) $(LIBS)
+
+@CONCLUDE@
diff --git a/tools/h5toh4/h5toh4.h b/tools/h5toh4/h5toh4.h
new file mode 100644
index 0000000..d268060
--- /dev/null
+++ b/tools/h5toh4/h5toh4.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright © 1998 NCSA
+ * All rights reserved.
+ *
+ * Programmer: Paul Harten <pharten@ncsa.uiuc.edu>
+ * Friday, October 16th, 1998
+ *
+ * Purpose: Convert H5 files to H4 files.
+ */
+
+#ifndef _H5TOH4_H
+#define _H5TOH4_H
+
+#include "hdf.h"
+#include "mfhdf.h"
+#include "hdf5.h"
+
+#ifdef H5_HAVE_SYS_STAT_H
+# include <sys/stat.h>
+#endif
+
+typedef struct op_data_t {
+ /*
+ * information being carried between iterations.
+ *
+ */
+
+ int32 hfile_id;
+ int32 vgroup_id;
+ int32 sd_id;
+ int32 sds_id;
+ int32 vdata_id;
+ int32 obj_idx;
+
+} op_data_t;
+
+#ifdef H5TOH4_DEBUG
+#define DEBUG_PRINT(s1,s2,s3,n1) ( fprintf(stderr,s1,s2,s3,n1) )
+#else
+#define DEBUG_PRINT(s1,s2,s3,n1) ( fprintf(stderr," ") )
+#endif
+
+#endif
diff --git a/tools/h5toh4/testh5toh4.sh b/tools/h5toh4/testh5toh4.sh
new file mode 100755
index 0000000..986a6a2
--- /dev/null
+++ b/tools/h5toh4/testh5toh4.sh
@@ -0,0 +1,299 @@
+#! /bin/sh
+#
+# Copyright (C) 1997-2001 National Center for Supercomputing Applications.
+# All rights reserved.
+#
+
+H5TOH4=h5toh4 # a relative name
+H5TOH4_BIN=`pwd`/$H5TOH4 # an absolute command path
+
+cmp='cmp -s'
+diff='diff -c'
+
+RM='rm -f'
+SED='sed '
+H4DUMP='hdp'
+
+# Verify if $H4DUMP is a valid command.
+tmpfile=/tmp/testh5toh4.$$
+$H4DUMP -H > $tmpfile
+if test -s "$tmpfile"; then
+ # Find out which version of hdp is being used. Over simplified
+ # algorithm but will do the job for now.
+ if ( grep -s 'NCSA HDF Version 4.1 Release [3-9]' $tmpfile > /dev/null )
+ then
+ H4DUMPVER=413
+ else
+ H4DUMPVER=0
+ echo " Some tests maybe skipped because your version of $H4DUMP does"
+ echo " not handle loops in Vgroups correctly. You need version"
+ echo " 4.1 Release 3 or later. Visit http://hdf.ncsa.uiuc.edu"
+ echo " or email hdfhelp@ncsa.uiuc.edu for more information."
+ fi
+else
+ echo " Could not run the '$H4DUMP' command. The test can still proceed"
+ echo " but it may fail if '$H4DUMP' is needed to verify the output."
+ echo " You can make sure '$H4DUMP' is among your shell PATH and run"
+ echo " the test again. You may also visit http://hdf.ncsa.uiuc.edu"
+ echo " or email hdfhelp@ncsa.uiuc.edu for more information."
+ H4DUMP=:
+ H4DUMPVER=0
+fi
+$RM $tmpfile
+
+# The build (current) directory might be different than the source directory.
+if test -z "$srcdir"; then
+ srcdir=.
+fi
+mkdir ../testfiles >/dev/null 2>&1
+
+SRCDIR="$srcdir/../testfiles"
+OUTDIR="../testfiles/Results"
+
+test -d $OUTDIR || mkdir $OUTDIR
+
+nerrors=0
+verbose=yes
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" |cut -c1-70 |tr -d '\012'
+}
+
+# Run a test and print PASS or *FAIL*. If a test fails then increment
+# the `nerrors' global variable and (if $verbose is set) display the
+# difference between the actual and the expected hdf4 files. The
+# expected hdf4 files are in testfiles/Expected directory.
+# The actual hdf4 file is not removed if $HDF5_NOCLEANUP is to a non-null
+# value.
+CONVERT() {
+ # Run h5toh4 convert.
+ TESTING $H5TOH4 $@
+
+ #
+ # Set up arguments to run the conversion test.
+ # The converter assumes all hdf5 files has the .h5 suffix as in the form
+ # of foo.h5. It creates the corresponding hdf4 files with the .hdf suffix
+ # as in the form of foo.hdf. One exception is that if exactly two file
+ # names are given, it treats the first argument as an hdf5 file and creates
+ # the corresponding hdf4 file with the name as the second argument, WITOUT
+ # any consideration of the suffix. (For this test script, in order to
+ # match the output hdf4 file with the expected hdf4 file, it expects the
+ # second file of the two-files tests has the .hdf suffix too.)
+ #
+ # If SRCDIR != OUTDIR, need to copy the input hdf5 files from the SRCDIR
+ # to the OUTDIR and transform the input file pathname because of the suffix
+ # convention mentioned above. This way, the hdf4 files are always created
+ # in the OUTDIR directory.
+ #
+
+ INFILES=""
+ OUTFILES=""
+ MULTIRUN=""
+
+ case "$1" in
+ "-m") # multiple files conversion
+ MULTIRUN="-m"
+ shift
+ for f in $*
+ do
+ if test "$SRCDIR" != "$OUTDIR"; then
+ cp $SRCDIR/$f $OUTDIR/$f
+ fi
+ INFILES="$INFILES $f"
+ OUTFILES="$OUTFILES `basename $f .h5`.hdf"
+ shift
+ done
+ ;;
+ * ) # Single file conversion
+ case $# in
+ 1) if test "$SRCDIR" != "$OUTDIR"; then
+ cp $SRCDIR/$1 $OUTDIR/$1
+ fi
+ INFILES="$1"
+ OUTFILES="`basename $1 .h5`.hdf"
+ ;;
+ 2) # hdf4 file specified
+ if test "$SRCDIR" != "$OUTDIR"; then
+ cp $SRCDIR/$1 $OUTDIR/$1
+ fi
+ INFILES="$1"
+ OUTFILES="$2"
+ ;;
+ *) # Illegal
+ echo "Illegal arguments"
+ exit 1
+ ;;
+ esac
+ ;;
+ esac
+
+ # run the conversion and remove input files that have been copied over
+ (
+ cd $OUTDIR
+ $H5TOH4_BIN $MULTIRUN $INFILES 2>/dev/null
+ if test "$SRCDIR" != "$OUTDIR"; then
+ $RM $INFILES
+ fi
+ )
+
+ # Verify results
+ result="passed"
+ for f in $OUTFILES
+ do
+ if $cmp $SRCDIR/Expected/$f $OUTDIR/$f
+ then
+ :
+ else
+ # Use hdp to dump the files and verify the output.
+ # Filter out the output of "reference = ..." because
+ # reference numbers are immaterial in general.
+ outfile=`basename $f .hdf`
+ expect_out=$outfile.expect
+ actual_out=$outfile.actual
+
+ if [ $outfile = "tloop" -a $H4DUMPVER -lt 413 ]
+ then
+ echo " -SKIP-"
+ result="skipped"
+ touch $expect_out $actual_out # fake them
+ else
+ (cd $SRCDIR/Expected
+ $H4DUMP dumpvg $outfile.hdf
+ $H4DUMP dumpvd $outfile.hdf
+ $H4DUMP dumpsds $outfile.hdf ) |
+ sed -e 's/reference = [0-9]*;//' > $expect_out
+ (cd $OUTDIR
+ $H4DUMP dumpvg $outfile.hdf
+ $H4DUMP dumpvd $outfile.hdf
+ $H4DUMP dumpsds $outfile.hdf ) |
+ sed -e 's/reference = [0-9]*;//' > $actual_out
+ fi
+
+ if [ "passed" = $result -a ! -s $actual_out ] ; then
+ echo "*FAILED*"
+ nerrors="`expr $nerrors + 1`"
+ result=failed
+ test yes = "$verbose" &&
+ echo " H4DUMP failed to produce valid output"
+ elif $cmp $expect_out $actual_out; then
+ :
+ else
+ if test "passed" = $result; then
+ echo "*FAILED*"
+ nerrors="`expr $nerrors + 1`"
+ result=failed
+ fi
+ test yes = "$verbose" &&
+ echo " Actual result (*.actual) differs from expected result (*.expect)" &&
+ $diff $expect_out $actual_out |sed 's/^/ /'
+ fi
+ fi
+
+ # Clean up output file
+ if test -z "$HDF5_NOCLEANUP"; then
+ $RM $expect_out $actual_out
+ $RM $OUTDIR/$f
+ fi
+ done
+ if test "passed" = "$result"; then
+ echo " PASSED"
+ fi
+}
+
+
+
+##############################################################################
+##############################################################################
+### T H E T E S T S ###
+##############################################################################
+##############################################################################
+
+$RM $OUTDIR/*.hdf $OUTDIR/*.tmp
+
+#
+# The HDF4 filenames are created based upon the HDF5 filenames
+# without the extension.
+#
+
+# test for converting H5 groups to H4 Vgroups.
+CONVERT tgroup.h5
+
+# test for converting H5 datasets to H4 SDS's.
+CONVERT tdset.h5
+
+# test for converting H5 attributes to H4 attributes.
+CONVERT tattr.h5
+
+# test for converting H5 soft links.
+CONVERT tslink.h5
+
+# test for converting H5 hard links.
+CONVERT thlink.h5
+
+# test for converting H5 compound data type to H4 Vdata.
+CONVERT tcompound.h5
+
+# test for converting all H5 objects at in same file.
+CONVERT tall.h5
+
+# tests for converting H5 objects with loops.
+CONVERT tloop.h5
+
+# test for converting extendable H5 datasets to H4 SDS's.
+CONVERT tdset2.h5
+
+# test for converting extendable H5 datasets with compound data type to H4 Vdata.
+CONVERT tcompound2.h5
+
+# tests for converting H5 objects from many different pathways.
+CONVERT tmany.h5
+
+# tests for converting H5 string objects.
+CONVERT tstr.h5
+
+# tests for converting more H5 string objects.
+CONVERT tstr2.h5
+
+#
+# The test for conversion are the same as above with the only difference
+# being that the HDF4 filenames are given explicitly.
+#
+
+$RM $OUTDIR/*.tmp
+CONVERT tgroup.h5 tgroup.hdf
+CONVERT tdset.h5 tdset.hdf
+CONVERT tattr.h5 tattr.hdf
+CONVERT tslink.h5 tslink.hdf
+CONVERT thlink.h5 thlink.hdf
+CONVERT tcompound.h5 tcompound.hdf
+CONVERT tall.h5 tall.hdf
+CONVERT tloop.h5 tloop.hdf
+CONVERT tdset2.h5 tdset2.hdf
+CONVERT tcompound2.h5 tcompound2.hdf
+CONVERT tmany.h5 tmany.hdf
+CONVERT tstr.h5 tstr.hdf
+CONVERT tstr2.h5 tstr2.hdf
+
+#
+# Again, the test for conversion are the same as the first set of test.
+# Here, multiple conversion are done on HDF5 files at one time.
+#
+
+$RM $OUTDIR/*.hdf $OUTDIR/*.tmp
+CONVERT -m tgroup.h5 tdset.h5 tattr.h5 tslink.h5 thlink.h5
+CONVERT -m tcompound.h5 tall.h5
+CONVERT -m tloop.h5
+CONVERT -m tdset2.h5 tcompound2.h5 tmany.h5
+CONVERT -m tstr.h5 tstr2.h5
+
+if test $nerrors -eq 0 ; then
+ echo "All h5toh4 tests passed."
+fi
+
+if test -z "$HDF5_NOCLEANUP"; then
+ $RM -r $OUTDIR
+fi
+exit $nerrors
diff --git a/tools/lib/Dependencies b/tools/lib/Dependencies
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tools/lib/Dependencies
diff --git a/tools/misc/Dependencies b/tools/misc/Dependencies
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tools/misc/Dependencies
diff --git a/tools/misc/Makefile.in b/tools/misc/Makefile.in
new file mode 100644
index 0000000..def8f48
--- /dev/null
+++ b/tools/misc/Makefile.in
@@ -0,0 +1,71 @@
+## HDF5 Library Makefile(.in)
+##
+## Copyright (C) 2001 National Center for Supercomputing Applications.
+## All rights reserved.
+##
+##
+top_srcdir=@top_srcdir@
+top_builddir=../..
+srcdir=@srcdir@
+SUBDIRS=
+@COMMENCE@
+
+## Add include directory to the C preprocessor flags, add -lh5tools and
+## -lhdf5 to the list of libraries.
+##
+CPPFLAGS=-I. -I$(srcdir) -I$(top_builddir)/src -I$(top_srcdir)/src \
+ -I$(top_srcdir)/tools/lib @CPPFLAGS@
+
+## Test programs and scripts.
+##
+TEST_PROGS=
+TEST_SCRIPTS=
+
+## These are our main targets: library and tools.
+##
+LIBTOOLS=../lib/libh5tools.la
+LIBHDF5=$(top_builddir)/src/libhdf5.la
+
+PUB_PROGS=h5debug h5import h5repart @PDB2HDF@
+PROGS=$(PUB_PROGS) $(TEST_PROGS)
+
+## Source and object files for the library; do not install
+##
+LIB_SRC=
+LIB_OBJ=$(LIB_SRC:.c=.lo)
+PUB_LIB=
+
+## Source and object files for programs...
+##
+PROG_SRC=h5debug.c h5import.c h5repart.c pdb2hdf.c
+PROG_OBJ=$(PROG_SRC:.c=.lo)
+
+PRIVATE_HDR=
+
+## Source and object files for the tests
+##
+TEST_SRC=
+TEST_OBJ=$(TEST_SRC:.c=.lo)
+
+## Programs have to be built before they can be tested!
+##
+check test _test: $(PROGS)
+
+## How to build the programs...They all depend on the hdf5 library and
+## the tools library compiled in this directory.
+##
+$(PROGS): $(LIBTOOLS) $(LIBHDF5)
+
+h5debug: h5debug.lo
+ @$(LT_LINK_EXE) $(CFLAGS) -o $@ h5debug.lo $(LIBTOOLS) $(LIBHDF5) $(LDFLAGS) $(LIBS)
+
+h5import: h5import.lo
+ @$(LT_LINK_EXE) $(CFLAGS) -o $@ h5import.lo $(LIBTOOLS) $(LIBHDF5) $(LDFLAGS) $(LIBS)
+
+h5repart: h5repart.lo
+ @$(LT_LINK_EXE) $(CFLAGS) -o $@ h5repart.lo $(LIBTOOLS) $(LIBHDF5) $(LDFLAGS) $(LIBS)
+
+pdb2hdf: pdb2hdf.lo
+ @$(LT_LINK_EXE) $(CFLAGS) -o $@ pdb2hdf.lo $(LIBTOOLS) $(LIBHDF5) $(LDFLAGS) $(LIBS)
+
+@CONCLUDE@
diff --git a/tools/misc/h5debug.c b/tools/misc/h5debug.c
new file mode 100644
index 0000000..76eb472
--- /dev/null
+++ b/tools/misc/h5debug.c
@@ -0,0 +1,185 @@
+/*-------------------------------------------------------------------------
+ * Copyright (C) 1997 National Center for Supercomputing Applications.
+ * All rights reserved.
+ *
+ *-------------------------------------------------------------------------
+ *
+ * Created: debug.c
+ * Jul 18 1997
+ * Robb Matzke <matzke@llnl.gov>
+ *
+ * Purpose: Debugs an existing HDF5 file at a low level.
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+#define H5F_PACKAGE /*suppress error about including H5Fpkg */
+
+#include <H5private.h>
+#include <H5Iprivate.h>
+#include <H5Bprivate.h>
+#include <H5Pprivate.h>
+#include <H5Fpkg.h>
+#include <H5Gprivate.h>
+#include <H5HGprivate.h>
+#include <H5HLprivate.h>
+#include <H5Oprivate.h>
+
+/* File drivers */
+#include <H5FDfamily.h>
+
+#define INDENT 3
+#define VCOL 50
+
+
+/*-------------------------------------------------------------------------
+ * Function: main
+ *
+ * Usage: debug FILENAME [OFFSET]
+ *
+ * Return: Success: exit (0)
+ *
+ * Failure: exit (non-zero)
+ *
+ * Programmer: Robb Matzke
+ * matzke@llnl.gov
+ * Jul 18 1997
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+main(int argc, char *argv[])
+{
+ hid_t fid, plist=H5P_DEFAULT;
+ H5F_t *f;
+ haddr_t addr=0, extra=0;
+ uint8_t sig[16];
+ intn i, ndims;
+ herr_t status = SUCCEED;
+
+ if (argc == 1) {
+ fprintf(stderr,
+ "Usage: %s filename [signature addr [extra]]\n", argv[0]);
+ HDexit(1);
+ }
+
+ /*
+ * Open the file and get the file descriptor.
+ */
+ if (strchr (argv[1], '%')) {
+ plist = H5Pcreate (H5P_FILE_ACCESS);
+ H5Pset_fapl_family (plist, (hsize_t)0, H5P_DEFAULT);
+ }
+ if ((fid = H5Fopen(argv[1], H5F_ACC_RDONLY, plist)) < 0) {
+ fprintf(stderr, "cannot open file\n");
+ HDexit(1);
+ }
+ if (NULL == (f = H5I_object(fid))) {
+ fprintf(stderr, "cannot obtain H5F_t pointer\n");
+ HDexit(2);
+ }
+
+ /*
+ * Parse command arguments.
+ */
+ if (argc > 2) {
+ printf("New address: %s\n", argv[2]);
+ addr = HDstrtoll(argv[2], NULL, 0);
+ }
+ if (argc > 3) {
+ extra = HDstrtoll(argv[3], NULL, 0);
+ }
+ /*
+ * Read the signature at the specified file position.
+ */
+ HDfprintf(stdout, "Reading signature at address %a (rel)\n", addr);
+ if (H5F_block_read(f, H5FD_MEM_SUPER, addr, (hsize_t)sizeof(sig), H5P_DEFAULT, sig)<0) {
+ fprintf(stderr, "cannot read signature\n");
+ HDexit(3);
+ }
+ if (!HDmemcmp(sig, H5F_SIGNATURE, H5F_SIGNATURE_LEN)) {
+ /*
+ * Debug the boot block.
+ */
+ status = H5F_debug(f, addr, stdout, 0, VCOL);
+
+ } else if (!HDmemcmp(sig, H5HL_MAGIC, H5HL_SIZEOF_MAGIC)) {
+ /*
+ * Debug a local heap.
+ */
+ status = H5HL_debug(f, addr, stdout, 0, VCOL);
+
+ } else if (!HDmemcmp (sig, H5HG_MAGIC, H5HG_SIZEOF_MAGIC)) {
+ /*
+ * Debug a global heap collection.
+ */
+ status = H5HG_debug (f, addr, stdout, 0, VCOL);
+
+ } else if (!HDmemcmp(sig, H5G_NODE_MAGIC, H5G_NODE_SIZEOF_MAGIC)) {
+ /*
+ * Debug a symbol table node.
+ */
+ status = H5G_node_debug(f, addr, stdout, 0, VCOL, extra);
+
+ } else if (!HDmemcmp(sig, H5B_MAGIC, H5B_SIZEOF_MAGIC)) {
+ /*
+ * Debug a B-tree. B-trees are debugged through the B-tree
+ * subclass. The subclass identifier is the byte immediately
+ * after the B-tree signature.
+ */
+ H5B_subid_t subtype = (H5B_subid_t)sig[H5B_SIZEOF_MAGIC];
+
+ switch (subtype) {
+ case H5B_SNODE_ID:
+ status = H5G_node_debug(f, addr, stdout, 0, VCOL, extra);
+ break;
+
+ case H5B_ISTORE_ID:
+ ndims = (int)extra;
+ status = H5F_istore_debug (f, addr, stdout, 0, VCOL, ndims);
+ break;
+
+ default:
+ fprintf(stderr, "Unknown B-tree subtype %u\n",
+ (unsigned)(subtype));
+ HDexit(4);
+ }
+
+ } else if (sig[0] == H5O_VERSION) {
+ /*
+ * This could be an object header. Since they don't have a signature
+ * it's a somewhat "ify" detection.
+ */
+ status = H5O_debug(f, addr, stdout, 0, VCOL);
+
+ } else {
+ /*
+ * Got some other unrecognized signature.
+ */
+ printf("%-*s ", VCOL, "Signature:");
+ for (i = 0; i < 8; i++) {
+ if (sig[i] > ' ' && sig[i] <= '~' && '\\' != sig[i]) {
+ HDputchar(sig[i]);
+ } else if ('\\' == sig[i]) {
+ HDputchar('\\');
+ HDputchar('\\');
+ } else {
+ printf("\\%03o", sig[i]);
+ }
+ }
+ HDputchar('\n');
+
+ fprintf(stderr, "unknown signature\n");
+ HDexit(4);
+ }
+
+ if (status < 0) {
+ fprintf(stderr, "An error occurred\n");
+ HDexit(5);
+ }
+ H5Fclose(fid);
+ return 0;
+}
diff --git a/tools/misc/h5import.c b/tools/misc/h5import.c
new file mode 100644
index 0000000..e896feb
--- /dev/null
+++ b/tools/misc/h5import.c
@@ -0,0 +1,141 @@
+/*
+ * Copyright (C) 1998 NCSA
+ * All rights reserved.
+ *
+ * Programmer: Robb Matzke <matzke@llnl.gov>
+ * Thursday, June 11, 1998
+ *
+ * Purpose: Create an hdf5 file with a 1d dataset of uint8.
+ */
+
+/* See H5private.h for how to include system headers */
+#include <hdf5.h>
+#ifdef H5_STDC_HEADERS
+# include <fcntl.h>
+# include <string.h>
+# include <stdlib.h>
+# include <stdio.h>
+#endif
+
+#ifdef H5_HAVE_UNISTD_H
+# include <sys/types.h>
+# include <unistd.h>
+#endif
+
+#ifdef H5_HAVE_SYS_STAT_H
+# include <sys/stat.h>
+#endif
+
+#ifdef WIN32
+#include <io.h>
+#endif
+
+
+/*-------------------------------------------------------------------------
+ * Function: usage
+ *
+ * Purpose: Print a usage message and exit with non-zero status
+ *
+ * Return: never returns
+ *
+ * Programmer: Robb Matzke
+ * Thursday, June 11, 1998
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+usage (const char *argv0)
+{
+ fprintf (stderr, "Usage: %s -f HDF5-FILE FILES...\n", argv0);
+ exit (1);
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: main
+ *
+ * Purpose:
+ *
+ * Return: Success: 0
+ *
+ * Failure: 1
+ *
+ * Programmer: Robb Matzke
+ * Thursday, June 11, 1998
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+main (int argc, char *argv[])
+{
+ hid_t file, space=-1, dset=-1;
+ const char *output_name, *dset_name;
+ int argno, fd=-1;
+ hsize_t size[1];
+ struct stat sb;
+
+ /* Parse arguments */
+ if (argc<4) usage (argv[0]);
+ if (strcmp (argv[1], "-f")) usage (argv[0]);
+ output_name = argv[2];
+
+ /* create the file */
+ H5E_BEGIN_TRY {
+ if ((file = H5Fcreate (output_name, H5F_ACC_EXCL,
+ H5P_DEFAULT, H5P_DEFAULT))<0 &&
+ (file = H5Fopen (output_name, H5F_ACC_RDWR, H5P_DEFAULT)<0)) {
+ fprintf (stderr, "%s: unable to create or open hdf5 file\n",
+ output_name);
+ exit (1);
+ }
+ } H5E_END_TRY;
+
+ /* process files from command-line */
+ for (argno=3; argno<argc; argno++) {
+
+ /* Open the file */
+ if ((dset_name=strrchr (argv[argno], '/'))) dset_name++;
+ else dset_name = argv[argno];
+ fprintf (stderr, "%s\n", dset_name);
+ if ((fd=open (argv[argno], O_RDONLY))<0) {
+ perror (argv[argno]);
+ goto next;
+ }
+ if (fstat (fd, &sb)<0) {
+ perror (argv[argno]);
+ goto next;
+ }
+
+ /* Data space */
+ size[0] = sb.st_size;
+ if ((space = H5Screate_simple (1, size, size))<0) goto next;
+
+ /* Dataset */
+ if ((dset=H5Dcreate (file, dset_name, H5T_NATIVE_SCHAR,
+ space, H5P_DEFAULT))<0) goto next;
+
+
+
+ next:
+ if (fd>=0) close (fd);
+ fd = -1;
+ H5E_BEGIN_TRY {
+ if (space>=0) {
+ H5Sclose (space);
+ space = -1;
+ }
+ if (dset>=0) {
+ H5Dclose (dset);
+ dset = -1;
+ }
+ } H5E_END_TRY;
+ }
+
+ /* Close the file */
+ H5Fclose (file);
+ return 0;
+}
diff --git a/tools/misc/h5repart.c b/tools/misc/h5repart.c
new file mode 100644
index 0000000..de7b3df
--- /dev/null
+++ b/tools/misc/h5repart.c
@@ -0,0 +1,418 @@
+/*
+ * Copyright (C) 1998 NCSA
+ * All rights reserved.
+ *
+ * Programmer: Robb Matzke <matzke@llnl.gov>
+ * Wednesday, May 13, 1998
+ *
+ * Purpose: Repartitions a file family. This program can be used to
+ * split a single file into a family of files, join a family of
+ * files into a single file, or copy one family to another while
+ * changing the size of the family members. It can also be used
+ * to copy a single file to a single file with holes.
+ */
+
+/* See H5private.h for how to include system headers */
+#include <hdf5.h>
+#ifdef H5_STDC_HEADERS
+# include <ctype.h>
+# include <errno.h>
+# include <fcntl.h>
+# include <stdio.h>
+# include <stdlib.h>
+# include <string.h>
+#endif
+
+#ifdef H5_HAVE_UNISTD_H
+# include <sys/types.h>
+# include <unistd.h>
+#endif
+
+#ifdef H5_HAVE_SYS_STAT_H
+# include <sys/stat.h>
+#endif
+
+#ifdef WIN32
+#include <io.h>
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+#ifndef TRUE
+#define TRUE 1
+#endif
+#define NAMELEN 4096
+#define GB *1024*1024*1024
+
+#ifndef MIN
+#define MIN(X,Y) ((X)<(Y)?(X):(Y))
+#endif
+#ifndef MIN3
+#define MIN3(X,Y,Z) MIN(MIN(X,Y),Z)
+#endif
+
+
+/*-------------------------------------------------------------------------
+ * Function: usage
+ *
+ * Purpose: Prints a usage message.
+ *
+ * Return: void
+ *
+ * Programmer: Robb Matzke
+ * Wednesday, May 13, 1998
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+usage (const char *progname)
+{
+ fprintf(stderr, "usage: %s [-v] [-V] [-[b|m] N[g|m|k]] SRC DST\n",
+ progname);
+ fprintf(stderr, " -v Produce verbose output\n");
+ fprintf(stderr, " -V Print a version number and exit\n");
+ fprintf(stderr, " -b N The I/O block size, defaults to 1kB\n");
+ fprintf(stderr, " -m N The destination member size or 1GB\n");
+ fprintf(stderr, " SRC The name of the source file\n");
+ fprintf(stderr, " DST The name of the destination files\n");
+ fprintf(stderr, "Sizes may be suffixed with `g' for GB, `m' for MB or "
+ "`k' for kB.\n");
+ fprintf(stderr, "File family names include an integer printf "
+ "format such as `%%d'\n");
+ exit (1);
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: get_size
+ *
+ * Purpose: Reads a size option of the form `-XNS' where `X' is any
+ * letter, `N' is a multi-character positive decimal number, and
+ * `S' is an optional suffix letter in the set [GgMmk]. The
+ * option may also be split among two arguments as: `-X NS'.
+ * The input value of ARGNO is the argument number for the
+ * switch in the ARGV vector and ARGC is the number of entries
+ * in that vector.
+ *
+ * Return: Success: The value N multiplied according to the
+ * suffix S. On return ARGNO will be the number
+ * of the next argument to process.
+ *
+ * Failure: Calls usage() which exits with a non-zero
+ * status.
+ *
+ * Programmer: Robb Matzke
+ * Wednesday, May 13, 1998
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static off_t
+get_size (const char *progname, int *argno, int argc, char *argv[])
+{
+ off_t retval=-1;
+ char *suffix;
+
+ if (isdigit ((int)(argv[*argno][2]))) {
+ retval = strtol (argv[*argno]+2, &suffix, 10);
+ (*argno)++;
+ } else if (argv[*argno][2] || *argno+1>=argc) {
+ usage (progname);
+ } else {
+ retval = strtol (argv[*argno+1], &suffix, 0);
+ if (suffix==argv[*argno+1]) usage (progname);
+ *argno += 2;
+ }
+ if (suffix && suffix[0] && !suffix[1]) {
+ switch (*suffix) {
+ case 'G':
+ case 'g':
+ retval *= 1024 * 1024 * 1024;
+ break;
+ case 'M':
+ case 'm':
+ retval *= 1024 * 1024;
+ break;
+ case 'k':
+ retval *= 1024;
+ break;
+ default:
+ usage (progname);
+ }
+ } else if (suffix && suffix[0]) {
+ usage (progname);
+ }
+ return retval;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: main
+ *
+ * Purpose: Split an hdf5 file
+ *
+ * Return: Success:
+ *
+ * Failure:
+ *
+ * Programmer: Robb Matzke
+ * Wednesday, May 13, 1998
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+main (int argc, char *argv[])
+{
+ const char *prog_name; /*program name */
+ size_t blk_size=1024; /*size of each I/O block */
+ char *buf=NULL; /*I/O block buffer */
+ size_t n, i; /*counters */
+ ssize_t nio; /*I/O return value */
+ int argno=1; /*program argument number */
+ int src, dst=-1; /*source & destination files */
+ int need_seek=FALSE; /*destination needs to seek? */
+ int need_write; /*data needs to be written? */
+ struct stat sb; /*temporary file stat buffer */
+ int verbose=FALSE; /*display file names? */
+ size_t left_overs=0; /*amount of zeros left over */
+
+ const char *src_gen_name; /*general source name */
+ char src_name[NAMELEN]; /*source member name */
+ off_t src_offset=0; /*offset in source member */
+ int src_is_family; /*is source name a family name? */
+ int src_membno=0; /*source member number */
+ off_t src_size; /*source logical member size */
+ off_t src_act_size; /*source actual member size */
+
+ const char *dst_gen_name; /*general destination name */
+ char dst_name[NAMELEN]; /*destination member name */
+ off_t dst_offset=0; /*offset in destination member */
+ int dst_is_family; /*is dst name a family name? */
+ int dst_membno=0; /*destination member number */
+ off_t dst_size=1 GB; /*destination logical memb size */
+
+ /*
+ * Get the program name from argv[0]. Use only the last component.
+ */
+ if ((prog_name=strrchr (argv[0], '/'))) prog_name++;
+ else prog_name = argv[0];
+
+ /*
+ * Parse switches.
+ */
+ while (argno<argc && '-'==argv[argno][0]) {
+ if (!strcmp (argv[argno], "-v")) {
+ verbose = TRUE;
+ argno++;
+ } else if (!strcmp(argv[argno], "-V")) {
+ printf("This is %s version %u.%u release %u\n",
+ prog_name, H5_VERS_MAJOR, H5_VERS_MINOR, H5_VERS_RELEASE);
+ exit(0);
+ } else if ('b'==argv[argno][1]) {
+ blk_size = get_size (prog_name, &argno, argc, argv);
+ } else if ('m'==argv[argno][1]) {
+ dst_size = get_size (prog_name, &argno, argc, argv);
+ } else {
+ usage (prog_name);
+ }
+ }
+
+ /*
+ * Get the name for the source file and open the first member. The size
+ * of the first member determines the logical size of all the members.
+ */
+ if (argno>=argc) usage (prog_name);
+ src_gen_name = argv[argno++];
+ sprintf (src_name, src_gen_name, src_membno);
+ src_is_family = strcmp (src_name, src_gen_name);
+ if ((src=open (src_name, O_RDONLY))<0) {
+ perror (src_name);
+ exit (1);
+ }
+ if (fstat (src, &sb)<0) {
+ perror ("fstat");
+ exit (1);
+ }
+ src_size = src_act_size = sb.st_size;
+ if (verbose) fprintf (stderr, "< %s\n", src_name);
+
+ /*
+ * Get the name for the destination file and open the first member.
+ */
+ if (argno>=argc) usage (prog_name);
+ dst_gen_name = argv[argno++];
+ sprintf (dst_name, dst_gen_name, dst_membno);
+ dst_is_family = strcmp (dst_name, dst_gen_name);
+ if ((dst=open (dst_name, O_RDWR|O_CREAT|O_TRUNC, 0666))<0) {
+ perror (dst_name);
+ exit (1);
+ }
+ if (verbose) fprintf (stderr, "> %s\n", dst_name);
+
+ /* No more arguments */
+ if (argno<argc) usage (prog_name);
+
+ /* Now the real work, split the file */
+ buf = malloc (blk_size);
+ while (src_offset<src_size) {
+
+ /* Read a block. The amount to read is the minimum of:
+ * 1. The I/O block size
+ * 2. What's left to write in the destination member
+ * 3. Left over zeros or what's left in the source member.
+ */
+ n = blk_size;
+ if (dst_is_family) n = (size_t)MIN((off_t)n, dst_size-dst_offset);
+ if (left_overs) {
+ n = MIN (n, left_overs);
+ left_overs -= n;
+ need_write = FALSE;
+ } else if (src_offset<src_act_size) {
+ n = (size_t)MIN ((off_t)n, src_act_size-src_offset);
+ if ((nio=read (src, buf, n))<0) {
+ perror ("read");
+ exit (1);
+ } else if ((size_t)nio!=n) {
+ fprintf (stderr, "%s: short read\n", src_name);
+ exit (1);
+ }
+ for (i=0; i<n; i++) {
+ if (buf[i]) break;
+ }
+ need_write = (i<n);
+ } else {
+ n = 0;
+ left_overs = src_size - src_act_size;
+ need_write = FALSE;
+ }
+
+ /*
+ * If the block contains non-zero data then write it to the
+ * destination, otherwise just remember that we'll have to do a seek
+ * later in the destination when we finally get non-zero data.
+ */
+ if (need_write) {
+ if (need_seek && lseek (dst, dst_offset, SEEK_SET)<0) {
+ perror ("lseek");
+ exit (1);
+ }
+ if ((nio=write (dst, buf, n))<0) {
+ perror ("write");
+ exit (1);
+ } else if ((size_t)nio!=n) {
+ fprintf (stderr, "%s: short write\n", dst_name);
+ exit (1);
+ }
+ need_seek = FALSE;
+ } else {
+ need_seek = TRUE;
+ }
+
+ /*
+ * Update the source offset and open the next source family member if
+ * necessary. The source stream ends at the first member which
+ * cannot be opened because it doesn't exist. At the end of the
+ * source stream, update the destination offset and break out of the
+ * loop. The destination offset must be updated so we can fix
+ * trailing holes.
+ */
+ src_offset += n;
+ if (src_offset==src_act_size) {
+ close (src);
+ if (!src_is_family) {
+ dst_offset += n;
+ break;
+ }
+ sprintf (src_name, src_gen_name, ++src_membno);
+ if ((src=open (src_name, O_RDONLY))<0 && ENOENT==errno) {
+ dst_offset += n;
+ break;
+ } else if (src<0) {
+ perror (src_name);
+ exit (1);
+ }
+ if (fstat (src, &sb)<0) {
+ perror ("fstat");
+ exit (1);
+ }
+ src_act_size = sb.st_size;
+ if (src_act_size>src_size) {
+ fprintf (stderr, "%s: member truncated to %lu bytes\n",
+ src_name, (unsigned long)src_size);
+ }
+ src_offset = 0;
+ if (verbose) fprintf (stderr, "< %s\n", src_name);
+ }
+
+ /*
+ * Update the destination offset, opening a new member if one will be
+ * needed. The first member is extended to the logical member size
+ * but other members might be smaller if they end with a hole.
+ */
+ dst_offset += n;
+ if (dst_is_family && dst_offset==dst_size) {
+ if (0==dst_membno) {
+ if (lseek (dst, dst_size-1, SEEK_SET)<0) {
+ perror ("lseek");
+ exit (1);
+ }
+ if (read (dst, buf, 1)<0) {
+ perror ("read");
+ exit (1);
+ }
+ if (lseek (dst, dst_size-1, SEEK_SET)<0) {
+ perror ("lseek");
+ exit (1);
+ }
+ if (write (dst, buf, 1)<0) {
+ perror ("write");
+ exit (1);
+ }
+ }
+ close (dst);
+ sprintf (dst_name, dst_gen_name, ++dst_membno);
+ if ((dst=open (dst_name, O_RDWR|O_CREAT|O_TRUNC, 0666))<0) {
+ perror (dst_name);
+ exit (1);
+ }
+ dst_offset = 0;
+ need_seek = FALSE;
+ if (verbose) fprintf (stderr, "> %s\n", dst_name);
+ }
+ }
+
+ /*
+ * Make sure the last family member is the right size and then close it.
+ * The last member can't end with a hole or hdf5 will think that the
+ * family has been truncated.
+ */
+ if (need_seek) {
+ if (lseek (dst, dst_offset-1, SEEK_SET)<0) {
+ perror ("lseek");
+ exit (1);
+ }
+ if (read (dst, buf, 1)<0) {
+ perror ("read");
+ exit (1);
+ }
+ if (lseek (dst, dst_offset-1, SEEK_SET)<0) {
+ perror ("lseek");
+ exit (1);
+ }
+ if (write (dst, buf, 1)<0) {
+ perror ("write");
+ exit (1);
+ }
+ }
+ close (dst);
+
+ /* Free resources and return */
+ free (buf);
+ return 0;
+}
diff --git a/tools/misc/pdb2hdf.c b/tools/misc/pdb2hdf.c
new file mode 100644
index 0000000..7ecd28e
--- /dev/null
+++ b/tools/misc/pdb2hdf.c
@@ -0,0 +1,503 @@
+/*
+ * Copyright © 1999 NCSA
+ * All rights reserved.
+ *
+ * Programmer: Robb Matzke <matzke@llnl.gov>
+ * Tuesday, October 12, 1999
+ *
+ * Purpose: Creates an HDF5 file from a PDB file. The raw data can be
+ * left in the PDB file, creating an HDF5 file that contains
+ * meta data that points into the PDB file.
+ */
+#include <assert.h>
+#include <hdf5.h>
+#include <pdb.h>
+#include <score.h>
+#include <stdio.h>
+#include <string.h>
+
+/*
+ * libsilo renames all the PDB functions. However, this source files uses
+ * their documented names, so we have #define's to translate them to Silo
+ * terminology.
+ */
+#ifdef H5_HAVE_LIBSILO
+# define PD_open lite_PD_open
+# define PD_close lite_PD_close
+# define PD_ls lite_PD_ls
+# define PD_cd lite_PD_cd
+# define PD_inquire_entry lite_PD_inquire_entry
+# define PD_read lite_PD_read
+# define _PD_fixname _lite_PD_fixname
+# define _PD_rl_defstr _lite_PD_rl_defstr
+# define SC_free lite_SC_free
+#endif
+
+static int verbose_g = 0; /*verbose output? */
+static int cached_g = 0; /*use core file driver? */
+
+
+/*-------------------------------------------------------------------------
+ * Function: usage
+ *
+ * Purpose: Print a usage message.
+ *
+ * Return: void
+ *
+ * Programmer: Robb Matzke
+ * Tuesday, October 12, 1999
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+usage(const char *arg0)
+{
+ char *progname;
+
+ if ((progname=strrchr(arg0, '/')) && progname[1]) progname++;
+ else progname = arg0;
+
+ fprintf(stderr, "\
+usage: %s [OPTIONS] [PDBFILE ...]\n\
+ OPTIONS\n\
+ -h, -?, --help Print a usage message and exit\n\
+ -c, --cached Cache all data in memory before writing the output\n\
+ -v, --verbose Print the name of each object processed\n\
+ -V, --version Show the version number of this program\n\
+\n\
+ The options and PDB file names may be interspersed and are processed from\n\
+ left to right.\n\
+\n\
+ The name of the HDF5 file is generated by taking the basename of the PDB\n\
+ file and replacing the last extension (or appending if no extension) with\n\
+ the characters \".h5\". For example, \"/tmp/test/eos.data\" would result\n\
+ in an HDF5 file called \"eos.h5\" in the current directory.\n",
+ progname);
+
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: version
+ *
+ * Purpose: Print the version number.
+ *
+ * Return: void
+ *
+ * Programmer: Robb Matzke
+ * Friday, October 15, 1999
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static void
+version(const char *arg0)
+{
+ const char *progname;
+
+ if ((progname=strrchr(arg0, '/')) && progname[1]) progname++;
+ else progname = arg0;
+ print_version(progname);
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: fix_name
+ *
+ * Purpose: Given a PDB file name create the corresponding HDF5 file
+ * name. This is done by taking the base name of the PDB file
+ * and replacing (or appending) the last extension with ".h5".
+ *
+ * Return: Success: HDF_NAME
+ *
+ * Failure: NULL
+ *
+ * Programmer: Robb Matzke
+ * Tuesday, October 12, 1999
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static char *
+fix_name(const char *pdb_name, char *hdf_name, size_t size)
+{
+ char *s;
+ const char *ext;
+
+ if (!pdb_name || !hdf_name) return NULL;
+ if ((s=strrchr(pdb_name, '/'))) pdb_name = s;
+ if (NULL==(ext=strrchr(pdb_name, '.'))) ext = pdb_name + strlen(pdb_name);
+ if ((size_t)((ext-pdb_name)+4) > size) return NULL; /*overflow*/
+ memcpy(hdf_name, pdb_name, ext-pdb_name);
+ strcpy(hdf_name+(ext-pdb_name), ".h5");
+ return hdf_name;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: fix_type
+ *
+ * Purpose: Given a PDB datatype return a corresponding hdf5 datatype.
+ * The hdf5 datatype should be closed when the caller is
+ * finished using it.
+ *
+ * Return: Success: HDF5 datatype
+ *
+ * Failure: negative
+ *
+ * Programmer: Robb Matzke
+ * Tuesday, October 12, 1999
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static hid_t
+fix_type(PDBfile *pdb, const char *s)
+{
+ hid_t type = -1;
+ defstr *d = _lite_PD_lookup_type((char*)s, pdb->chart);
+
+ /* PDB checking */
+ assert(d);
+ assert(d->size>0);
+ if (d->onescmp) return -1;
+
+
+ if (!strcmp(s, "char")) {
+ /*
+ * Character datatypes. Use whatever sign the native system uses by
+ * default.
+ */
+ type = H5Tcopy(H5T_NATIVE_CHAR);
+
+ } else if (!strcmp(s, "integer")) {
+ /*
+ * Integer datatypes. PDB supports various sizes of signed or
+ * unsigned integers.
+ */
+ type = H5Tcopy(d->unsgned?H5T_NATIVE_UINT:H5T_NATIVE_INT);
+ H5Tset_size(type, d->size);
+ H5Tset_precision(type, 8*d->size);
+ assert(NORMAL_ORDER==d->order_flag || REVERSE_ORDER==d->order_flag);
+ H5Tset_order(type,
+ NORMAL_ORDER==d->order_flag?H5T_ORDER_BE:H5T_ORDER_LE);
+
+ } else if (!strcmp(s, "float") || !strcmp(s, "double")) {
+ /*
+ * Floating-point datatypes
+ */
+ size_t nbits, spos, epos, esize, mpos, msize;
+
+ type = H5Tcopy(H5T_NATIVE_FLOAT);
+ H5Tset_size(type, d->size);
+ H5Tset_precision(type, 8*d->size);
+ assert(d->order);
+ H5Tset_order(type, 1==d->order[0]?H5T_ORDER_BE:H5T_ORDER_LE);
+
+ /*
+ * format[0] = # of bits per number
+ * format[1] = # of bits in exponent
+ * format[2] = # of bits in mantissa
+ * format[3] = start bit of sign
+ * format[4] = start bit of exponent
+ * format[5] = start bit of mantissa
+ * format[6] = high order mantissa bit (CRAY needs this)
+ * format[7] = bias of exponent
+ */
+ assert(d->format && d->format[0] == 8*d->size);
+ nbits = d->format[0];
+ spos = nbits - (d->format[3]+1);
+ esize = d->format[1];
+ epos = nbits - (d->format[4]+esize);
+ msize = d->format[2];
+ mpos = nbits - (d->format[5]+msize);
+ H5Tset_fields(type, spos, epos, esize, mpos, msize);
+ H5Tset_ebias(type, d->format[7]);
+ }
+ return type;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: fix_space
+ *
+ * Purpose: Convert a PDB dimension list into an HDF5 data space.
+ *
+ * Return: Success: HDF5 data space
+ *
+ * Failure: negative
+ *
+ * Programmer: Robb Matzke
+ * Tuesday, October 12, 1999
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static hid_t
+fix_space(const dimdes *dim)
+{
+ hsize_t size[H5S_MAX_RANK];
+ int rank;
+
+ for (rank=0; rank<H5S_MAX_RANK && dim; rank++, dim=dim->next) {
+ size[rank] = dim->number;
+ }
+ if (rank>=H5S_MAX_RANK) return -1;
+ return H5Screate_simple(rank, size, NULL);
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: fix_external
+ *
+ * Purpose: Sets the external file information for a dataset creation
+ * property list based on information from PDB.
+ *
+ * Return: Success: non-negative
+ *
+ * Failure: negative
+ *
+ * Programmer: Robb Matzke
+ * Tuesday, October 12, 1999
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+fix_external(hid_t dcpl, const char *pdb_file_name, long nelmts,
+ hsize_t elmt_size, symblock *block)
+{
+ int i;
+
+ for (i=0; nelmts>0; i++) {
+ hsize_t nbytes = block[i].number * elmt_size;
+ H5Pset_external(dcpl, pdb_file_name, block[i].diskaddr, nbytes);
+ nelmts -= block[i].number;
+ }
+ return 0;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: traverse
+ *
+ * Purpose: Traverse the current working directory of the PDB file.
+ *
+ * Return: Success: 0
+ *
+ * Failure: -1
+ *
+ * Programmer: Robb Matzke
+ * Tuesday, October 12, 1999
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+traverse(PDBfile *pdb, const char *pdb_file_name, hid_t hdf)
+{
+ int nitems, i, in_subdir=FALSE;
+ char **list=NULL;
+ hid_t group=-1, h_type=-1, h_space=-1, dset=-1, dcpl=-1;
+ hsize_t elmt_size;
+ const syment *ep=NULL;
+
+ if (NULL==(list=PD_ls(pdb, ".", NULL, &nitems))) {
+ fprintf(stderr, "cannot obtain PDB directory contents\n");
+ goto error;
+ }
+
+ for (i=0; i<nitems; i++) {
+ ep = PD_inquire_entry(pdb, list[i], TRUE, NULL);
+ if (verbose_g) {
+ printf("%s %s\n", _PD_fixname(pdb, list[i]), ep->type);
+ fflush(stdout);
+ }
+
+
+ if ('/'==list[i][strlen(list[i])-1]) {
+ /*
+ * This is a PDB directory. Make a corresponding HDF5 group and
+ * traverse into that PDB directory and HDF5 group
+ */
+ if ((group=H5Gcreate(hdf, list[i], 0))<0) {
+ fprintf(stderr, "cannot create HDF group %s\n", list[i]);
+ goto error;
+ }
+ if (!PD_cd(pdb, list[i])) {
+ fprintf(stderr, "cannot cd into PDB directory %s\n", list[i]);
+ goto error;
+ } else {
+ in_subdir = TRUE;
+ }
+
+ traverse(pdb, pdb_file_name, group);
+ if (!PD_cd(pdb, "..")) {
+ fprintf(stderr, "cannot traverse out of PDB %s\n", list[i]);
+ goto error;
+ }
+ H5Gclose(group);
+
+ } else {
+ /* This is some non-directory PDB object */
+
+ /* Create an HDF5 datatype from the PDB type */
+ if ((h_type=fix_type(pdb, ep->type))<0) {
+ fprintf(stderr, "cannot create datatype for %s (%s)\n",
+ list[i], ep->type);
+ continue;
+ }
+ elmt_size = H5Tget_size(h_type);
+
+ /* Create an HDF5 dataspace from the PDB dimensions */
+ if ((h_space=fix_space(ep->dimensions))<0) {
+ fprintf(stderr, "cannot create datatype for %s\n", list[i]);
+ continue;
+ }
+
+ /* Create pointers to the external PDB data */
+ dcpl = H5Pcreate(H5P_DATASET_CREATE);
+ fix_external(dcpl, pdb_file_name, ep->number, elmt_size,
+ ep->blocks);
+
+ /* Create the dataset */
+ if ((dset=H5Dcreate(hdf, list[i], h_type, h_space, dcpl))<0) {
+ fprintf(stderr, "cannot create dataset for %s\n", list[i]);
+ }
+
+ H5Pclose(dcpl);
+ H5Dclose(dset);
+ H5Sclose(h_space);
+ H5Tclose(h_type);
+ }
+
+ }
+
+ for (i=0; i<nitems; i++) {
+ SC_free(list[i]);
+ }
+ SC_free(list);
+ return 0;
+
+ error:
+ if (group>=0) H5Gclose(group);
+ if (in_subdir) PD_cd(pdb, "..");
+ if (list) {
+ for (i=0; i<nitems; i++) {
+ SC_free(list[i]);
+ }
+ SC_free(list);
+ }
+ return -1;
+}
+
+
+/*-------------------------------------------------------------------------
+ * Function: main
+ *
+ * Purpose: Create an HDF5 file from a PDB file.
+ *
+ * Return: Success: 0
+ *
+ * Failure: non-zero
+ *
+ * Programmer: Robb Matzke
+ * Tuesday, October 12, 1999
+ *
+ * Modifications:
+ *
+ *-------------------------------------------------------------------------
+ */
+int
+main(int argc, char *argv[])
+{
+ int argno;
+ char _hdf_name[512], *hdf_name, *pdb_name, *s;
+ PDBfile *pdb;
+ hid_t hdf, fapl;
+
+ /* Print a help message if called with no arguments */
+ if (1==argc) {
+ usage(argv[0]);
+ exit(1);
+ }
+
+ /* Process arguments in order; switches interspersed with files */
+ for (argno=1; argno<argc; argno++) {
+ if (!strcmp("--help", argv[argno])) {
+ usage(argv[0]);
+ exit(1);
+ } else if (!strcmp("--verbose", argv[argno])) {
+ verbose_g++;
+ } else if (!strcmp("--cached", argv[argno])) {
+ cached_g++;
+ } else if (!strcmp("--version", argv[argno])) {
+ version(argv[0]);
+ } else if ('-'==argv[argno][0] && '-'!=argv[argno][1]) {
+ for (s=argv[argno]+1; *s; s++) {
+ switch (*s) {
+ case '?':
+ case 'h': /*--help*/
+ usage(argv[0]);
+ exit(0);
+ case 'c': /*--cached*/
+ cached_g++;
+ break;
+ case 'v': /*--verbose*/
+ verbose_g++;
+ break;
+ case 'V': /*--version*/
+ version(argv[0]);
+ break;
+ default:
+ usage(argv[0]);
+ exit(1);
+ }
+ }
+ } else if ('-'==argv[argno][0]) {
+ usage(argv[0]);
+ exit(1);
+ } else {
+ /* This must be a file name. Process it. */
+ fapl = H5Pcreate(H5P_FILE_ACCESS);
+ if (cached_g) H5Pset_fapl_core(fapl, 1024*1024, TRUE);
+
+ pdb_name = argv[argno];
+ hdf_name = fix_name(argv[argno], _hdf_name, sizeof _hdf_name);
+ if (NULL==(pdb=PD_open(pdb_name, "r"))) {
+ fprintf(stderr, "%s: unable to open PDB file\n", pdb_name);
+ exit(1);
+ }
+ if ((hdf=H5Fcreate(hdf_name, H5F_ACC_TRUNC, H5P_DEFAULT,
+ fapl))<0) {
+ fprintf(stderr, "%s: unable to open HDF file\n", hdf_name);
+ exit(1);
+ }
+ H5Pclose(fapl);
+
+ /*
+ * Traverse the PDB file to create the HDF5 file.
+ */
+ traverse(pdb, pdb_name, hdf);
+
+ /* Close the files */
+ if (!PD_close(pdb)) {
+ fprintf(stderr, "%s: problems closing PDB file\n", pdb_name);
+ exit(1);
+ }
+ if (H5Fclose(hdf)<0) {
+ fprintf(stderr, "%s: problems closing HDF file\n", hdf_name);
+ exit(1);
+ }
+ }
+ }
+ return 0;
+}
diff --git a/tools/testfiles/Expected/anno_test.h5 b/tools/testfiles/Expected/anno_test.h5
new file mode 100644
index 0000000..8656bc2
--- /dev/null
+++ b/tools/testfiles/Expected/anno_test.h5
Binary files differ
diff --git a/tools/testfiles/Expected/gr_typ_test.h5 b/tools/testfiles/Expected/gr_typ_test.h5
new file mode 100644
index 0000000..475d48f
--- /dev/null
+++ b/tools/testfiles/Expected/gr_typ_test.h5
Binary files differ
diff --git a/tools/testfiles/Expected/grnameclash_test.h5 b/tools/testfiles/Expected/grnameclash_test.h5
new file mode 100644
index 0000000..d8ae3ef
--- /dev/null
+++ b/tools/testfiles/Expected/grnameclash_test.h5
Binary files differ
diff --git a/tools/testfiles/Expected/image_attr_test.h5 b/tools/testfiles/Expected/image_attr_test.h5
new file mode 100644
index 0000000..392da76
--- /dev/null
+++ b/tools/testfiles/Expected/image_attr_test.h5
Binary files differ
diff --git a/tools/testfiles/Expected/ras_24_test.h5 b/tools/testfiles/Expected/ras_24_test.h5
new file mode 100644
index 0000000..3a684c0
--- /dev/null
+++ b/tools/testfiles/Expected/ras_24_test.h5
Binary files differ
diff --git a/tools/testfiles/Expected/ras_8_test.h5 b/tools/testfiles/Expected/ras_8_test.h5
new file mode 100644
index 0000000..3d3c402
--- /dev/null
+++ b/tools/testfiles/Expected/ras_8_test.h5
Binary files differ
diff --git a/tools/testfiles/Expected/sds_attr.h5 b/tools/testfiles/Expected/sds_attr.h5
new file mode 100644
index 0000000..49b2b0b
--- /dev/null
+++ b/tools/testfiles/Expected/sds_attr.h5
Binary files differ
diff --git a/tools/testfiles/Expected/sds_attr_test.h5 b/tools/testfiles/Expected/sds_attr_test.h5
new file mode 100644
index 0000000..594e7c0
--- /dev/null
+++ b/tools/testfiles/Expected/sds_attr_test.h5
Binary files differ
diff --git a/tools/testfiles/Expected/sds_dim_test.h5 b/tools/testfiles/Expected/sds_dim_test.h5
new file mode 100644
index 0000000..e49450e
--- /dev/null
+++ b/tools/testfiles/Expected/sds_dim_test.h5
Binary files differ
diff --git a/tools/testfiles/Expected/sds_typ_test.h5 b/tools/testfiles/Expected/sds_typ_test.h5
new file mode 100644
index 0000000..a26bb90
--- /dev/null
+++ b/tools/testfiles/Expected/sds_typ_test.h5
Binary files differ
diff --git a/tools/testfiles/Expected/sdsnameclash_test.h5 b/tools/testfiles/Expected/sdsnameclash_test.h5
new file mode 100644
index 0000000..4eacd82
--- /dev/null
+++ b/tools/testfiles/Expected/sdsnameclash_test.h5
Binary files differ
diff --git a/tools/testfiles/Expected/vdata_test.h5 b/tools/testfiles/Expected/vdata_test.h5
new file mode 100644
index 0000000..2368548
--- /dev/null
+++ b/tools/testfiles/Expected/vdata_test.h5
Binary files differ
diff --git a/tools/testfiles/Expected/vdnameclash_test.h5 b/tools/testfiles/Expected/vdnameclash_test.h5
new file mode 100644
index 0000000..56af637
--- /dev/null
+++ b/tools/testfiles/Expected/vdnameclash_test.h5
Binary files differ
diff --git a/tools/testfiles/Expected/vg_all_test.h5 b/tools/testfiles/Expected/vg_all_test.h5
new file mode 100644
index 0000000..c38dfb1
--- /dev/null
+++ b/tools/testfiles/Expected/vg_all_test.h5
Binary files differ
diff --git a/tools/testfiles/Expected/vg_hl_test.h5 b/tools/testfiles/Expected/vg_hl_test.h5
new file mode 100644
index 0000000..a12eecd
--- /dev/null
+++ b/tools/testfiles/Expected/vg_hl_test.h5
Binary files differ
diff --git a/tools/testfiles/Expected/vg_loop_test.h5 b/tools/testfiles/Expected/vg_loop_test.h5
new file mode 100644
index 0000000..507d625
--- /dev/null
+++ b/tools/testfiles/Expected/vg_loop_test.h5
Binary files differ
diff --git a/tools/testfiles/Expected/vgnameclash_test.h5 b/tools/testfiles/Expected/vgnameclash_test.h5
new file mode 100644
index 0000000..0d4e463
--- /dev/null
+++ b/tools/testfiles/Expected/vgnameclash_test.h5
Binary files differ
diff --git a/tools/testfiles/anno_test.hdf b/tools/testfiles/anno_test.hdf
new file mode 100644
index 0000000..3b6d7d9
--- /dev/null
+++ b/tools/testfiles/anno_test.hdf
Binary files differ
diff --git a/tools/testfiles/gr_typ_test.hdf b/tools/testfiles/gr_typ_test.hdf
new file mode 100644
index 0000000..5d70e3e
--- /dev/null
+++ b/tools/testfiles/gr_typ_test.hdf
Binary files differ
diff --git a/tools/testfiles/grnameclash_test.hdf b/tools/testfiles/grnameclash_test.hdf
new file mode 100644
index 0000000..2f385ae
--- /dev/null
+++ b/tools/testfiles/grnameclash_test.hdf
Binary files differ
diff --git a/tools/testfiles/image_attr_test.hdf b/tools/testfiles/image_attr_test.hdf
new file mode 100644
index 0000000..8a9f329
--- /dev/null
+++ b/tools/testfiles/image_attr_test.hdf
Binary files differ
diff --git a/tools/testfiles/ras_24_test.hdf b/tools/testfiles/ras_24_test.hdf
new file mode 100644
index 0000000..394b7ec
--- /dev/null
+++ b/tools/testfiles/ras_24_test.hdf
Binary files differ
diff --git a/tools/testfiles/ras_8_test.hdf b/tools/testfiles/ras_8_test.hdf
new file mode 100644
index 0000000..2fec68a
--- /dev/null
+++ b/tools/testfiles/ras_8_test.hdf
Binary files differ
diff --git a/tools/testfiles/sds_attr_test.hdf b/tools/testfiles/sds_attr_test.hdf
new file mode 100644
index 0000000..7e7323f5
--- /dev/null
+++ b/tools/testfiles/sds_attr_test.hdf
Binary files differ
diff --git a/tools/testfiles/sds_dim_test.hdf b/tools/testfiles/sds_dim_test.hdf
new file mode 100644
index 0000000..b511362
--- /dev/null
+++ b/tools/testfiles/sds_dim_test.hdf
Binary files differ
diff --git a/tools/testfiles/sds_typ_test.hdf b/tools/testfiles/sds_typ_test.hdf
new file mode 100644
index 0000000..b2d9fcb
--- /dev/null
+++ b/tools/testfiles/sds_typ_test.hdf
Binary files differ
diff --git a/tools/testfiles/sdsnameclash_test.hdf b/tools/testfiles/sdsnameclash_test.hdf
new file mode 100644
index 0000000..d32070b
--- /dev/null
+++ b/tools/testfiles/sdsnameclash_test.hdf
Binary files differ
diff --git a/tools/testfiles/tall.h5.xml b/tools/testfiles/tall.h5.xml
new file mode 100644
index 0000000..7d77bb0
--- /dev/null
+++ b/tools/testfiles/tall.h5.xml
@@ -0,0 +1,173 @@
+#############################
+Expected output for 'h5dump --xml tall.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Attribute Name="attr1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 97 98 99 100 101 102 103 104 105 0
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Attribute Name="attr2">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="2" MaxDimSize="2"/>
+ <Dimension DimSize="2" MaxDimSize="2"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 1
+ 2 3
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Group Name="g1" OBJ-XID="/g1" Parents="/" >
+ <Group Name="g1.1" OBJ-XID="/g1/g1.1" Parents="/g1" >
+ <Dataset Name="dset1.1.1" OBJ-XID="/g1/g1.1/dset1.1.1" Parents="/g1/g1.1">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Attribute Name="attr1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="27" MaxDimSize="27"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 49 115 116 32 97 116 116 114 105 98 117 116 101 32 111 102 32
+ 100 115 101 116 49 46 49 46 49 0
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Attribute Name="attr2">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="27" MaxDimSize="27"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 50 110 100 32 97 116 116 114 105 98 117 116 101 32 111 102 32
+ 100 115 101 116 49 46 49 46 49 0
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0 0 0 0 0 0 0
+ 0 1 2 3 4 5 6 7 8 9
+ 0 2 4 6 8 10 12 14 16 18
+ 0 3 6 9 12 15 18 21 24 27
+ 0 4 8 12 16 20 24 28 32 36
+ 0 5 10 15 20 25 30 35 40 45
+ 0 6 12 18 24 30 36 42 48 54
+ 0 7 14 21 28 35 42 49 56 63
+ 0 8 16 24 32 40 48 56 64 72
+ 0 9 18 27 36 45 54 63 72 81
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="dset1.1.2" OBJ-XID="/g1/g1.1/dset1.1.2" Parents="/g1/g1.1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="20" MaxDimSize="20"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+ <Group Name="g1.2" OBJ-XID="/g1/g1.2" Parents="/g1" >
+ <Group Name="g1.2.1" OBJ-XID="/g1/g1.2/g1.2.1" Parents="/g1/g1.2" >
+ <SoftLink LinkName="slink" Target="somevalue" TargetObj="/g1/g1.2/g1.2.1/somevalue" OBJ-XID="/g1/g1.2/g1.2.1/slink" Source="/g1/g1.2/g1.2.1"/>
+ </Group>
+ </Group>
+ </Group>
+ <Group Name="g2" OBJ-XID="/g2" Parents="/" >
+ <Dataset Name="dset2.1" OBJ-XID="/g2/dset2.1" Parents="/g2">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 1 1.1 1.2 1.3 1.4 1.5 1.6 1.7 1.8 1.9
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="dset2.2" OBJ-XID="/g2/dset2.2" Parents="/g2">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="3" MaxDimSize="3"/>
+ <Dimension DimSize="5" MaxDimSize="5"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0.1 0.2 0.3 0.4
+ 0 0.2 0.4 0.6 0.8
+ 0 0.3 0.6 0.9 1.2
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tarray1.h5.xml b/tools/testfiles/tarray1.h5.xml
new file mode 100644
index 0000000..ea654cd
--- /dev/null
+++ b/tools/testfiles/tarray1.h5.xml
@@ -0,0 +1,31 @@
+#############################
+Expected output for 'h5dump --xml tarray1.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="Dataset1" OBJ-XID="/Dataset1" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <ArrayType Ndims="1">
+ <ArrayDimension DimSize="4" DimPerm="0"/>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 1 2 3 10 11 12 13 20 21 22 23 30 31 32 33
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tarray2.h5.xml b/tools/testfiles/tarray2.h5.xml
new file mode 100644
index 0000000..5772b98
--- /dev/null
+++ b/tools/testfiles/tarray2.h5.xml
@@ -0,0 +1,80 @@
+#############################
+Expected output for 'h5dump --xml tarray2.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="Dataset1" OBJ-XID="/Dataset1" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <ArrayType Ndims="3">
+ <ArrayDimension DimSize="3" DimPerm="0"/>
+ <ArrayDimension DimSize="4" DimPerm="1"/>
+ <ArrayDimension DimSize="5" DimPerm="2"/>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 1 2 3 4
+ 10 11 12 13 14
+ 20 21 22 23 24
+ 30 31 32 33 34
+ 100 101 102 103 104
+ 110 111 112 113 114
+ 120 121 122 123 124
+ 130 131 132 133 134
+ 200 201 202 203 204
+ 210 211 212 213 214
+ 220 221 222 223 224
+ 230 231 232 233 234
+ 1000 1001 1002 1003 1004
+ 1010 1011 1012 1013 1014
+ 1020 1021 1022 1023 1024
+ 1030 1031 1032 1033 1034
+ 1100 1101 1102 1103 1104
+ 1110 1111 1112 1113 1114
+ 1120 1121 1122 1123 1124
+ 1130 1131 1132 1133 1134
+ 1200 1201 1202 1203 1204
+ 1210 1211 1212 1213 1214
+ 1220 1221 1222 1223 1224
+ 1230 1231 1232 1233 1234
+ 2000 2001 2002 2003 2004
+ 2010 2011 2012 2013 2014
+ 2020 2021 2022 2023 2024
+ 2030 2031 2032 2033 2034
+ 2100 2101 2102 2103 2104
+ 2110 2111 2112 2113 2114
+ 2120 2121 2122 2123 2124
+ 2130 2131 2132 2133 2134
+ 2200 2201 2202 2203 2204
+ 2210 2211 2212 2213 2214
+ 2220 2221 2222 2223 2224
+ 2230 2231 2232 2233 2234
+ 3000 3001 3002 3003 3004
+ 3010 3011 3012 3013 3014
+ 3020 3021 3022 3023 3024
+ 3030 3031 3032 3033 3034
+ 3100 3101 3102 3103 3104
+ 3110 3111 3112 3113 3114
+ 3120 3121 3122 3123 3124
+ 3130 3131 3132 3133 3134
+ 3200 3201 3202 3203 3204
+ 3210 3211 3212 3213 3214
+ 3220 3221 3222 3223 3224
+ 3230 3231 3232 3233 3234
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tarray3.h5.xml b/tools/testfiles/tarray3.h5.xml
new file mode 100644
index 0000000..5af5d85
--- /dev/null
+++ b/tools/testfiles/tarray3.h5.xml
@@ -0,0 +1,120 @@
+#############################
+Expected output for 'h5dump --xml tarray3.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="Dataset1" OBJ-XID="/Dataset1" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <ArrayType Ndims="1">
+ <ArrayDimension DimSize="4" DimPerm="0"/>
+ <DataType>
+ <ArrayType Ndims="2">
+ <ArrayDimension DimSize="6" DimPerm="0"/>
+ <ArrayDimension DimSize="3" DimPerm="1"/>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 1 2
+ 10 11 12
+ 20 21 22
+ 30 31 32
+ 40 41 42
+ 50 51 52 100 101 102
+ 110 111 112
+ 120 121 122
+ 130 131 132
+ 140 141 142
+ 150 151 152 200 201 202
+ 210 211 212
+ 220 221 222
+ 230 231 232
+ 240 241 242
+ 250 251 252 300 301 302
+ 310 311 312
+ 320 321 322
+ 330 331 332
+ 340 341 342
+ 350 351 352
+ 1000 1001 1002
+ 1010 1011 1012
+ 1020 1021 1022
+ 1030 1031 1032
+ 1040 1041 1042
+ 1050 1051 1052 1100 1101 1102
+ 1110 1111 1112
+ 1120 1121 1122
+ 1130 1131 1132
+ 1140 1141 1142
+ 1150 1151 1152 1200 1201 1202
+ 1210 1211 1212
+ 1220 1221 1222
+ 1230 1231 1232
+ 1240 1241 1242
+ 1250 1251 1252 1300 1301 1302
+ 1310 1311 1312
+ 1320 1321 1322
+ 1330 1331 1332
+ 1340 1341 1342
+ 1350 1351 1352
+ 2000 2001 2002
+ 2010 2011 2012
+ 2020 2021 2022
+ 2030 2031 2032
+ 2040 2041 2042
+ 2050 2051 2052 2100 2101 2102
+ 2110 2111 2112
+ 2120 2121 2122
+ 2130 2131 2132
+ 2140 2141 2142
+ 2150 2151 2152 2200 2201 2202
+ 2210 2211 2212
+ 2220 2221 2222
+ 2230 2231 2232
+ 2240 2241 2242
+ 2250 2251 2252 2300 2301 2302
+ 2310 2311 2312
+ 2320 2321 2322
+ 2330 2331 2332
+ 2340 2341 2342
+ 2350 2351 2352
+ 3000 3001 3002
+ 3010 3011 3012
+ 3020 3021 3022
+ 3030 3031 3032
+ 3040 3041 3042
+ 3050 3051 3052 3100 3101 3102
+ 3110 3111 3112
+ 3120 3121 3122
+ 3130 3131 3132
+ 3140 3141 3142
+ 3150 3151 3152 3200 3201 3202
+ 3210 3211 3212
+ 3220 3221 3222
+ 3230 3231 3232
+ 3240 3241 3242
+ 3250 3251 3252 3300 3301 3302
+ 3310 3311 3312
+ 3320 3321 3322
+ 3330 3331 3332
+ 3340 3341 3342
+ 3350 3351 3352
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tarray6.h5.xml b/tools/testfiles/tarray6.h5.xml
new file mode 100644
index 0000000..35e50ca
--- /dev/null
+++ b/tools/testfiles/tarray6.h5.xml
@@ -0,0 +1,38 @@
+#############################
+Expected output for 'h5dump --xml tarray6.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="Dataset1" OBJ-XID="/Dataset1" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <ArrayType Ndims="1">
+ <ArrayDimension DimSize="4" DimPerm="0"/>
+ <DataType>
+ <VLType>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="false" Size="4" />
+ </AtomicType>
+ </DataType>
+ </VLType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 10 11 20 21 22 30 31 32 33
+ 100 101 110 111 112 120 121 122 123 130 131 132 133 134
+ 200 201 202 210 211 212 213 220 221 222 223 224 230 231 232 233 234 235
+ 300 301 302 303 310 311 312 313 314 320 321 322 323 324 325 330 331 332 333 334 335 336
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tarray7.h5.xml b/tools/testfiles/tarray7.h5.xml
new file mode 100644
index 0000000..a9fd541
--- /dev/null
+++ b/tools/testfiles/tarray7.h5.xml
@@ -0,0 +1,43 @@
+#############################
+Expected output for 'h5dump --xml tarray7.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="Dataset1" OBJ-XID="/Dataset1" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <ArrayType Ndims="1">
+ <ArrayDimension DimSize="4" DimPerm="0"/>
+ <DataType>
+ <VLType>
+ <DataType>
+ <ArrayType Ndims="1">
+ <ArrayDimension DimSize="4" DimPerm="0"/>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="false" Size="4" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ </VLType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 1 2 3 100 101 102 103 110 111 112 113 200 201 202 203 210 211 212 213 220 221 222 223 300 301 302 303 310 311 312 313 320 321 322 323 330 331 332 333
+ 1000 1001 1002 1003 1010 1011 1012 1013 1100 1101 1102 1103 1110 1111 1112 1113 1120 1121 1122 1123 1200 1201 1202 1203 1210 1211 1212 1213 1220 1221 1222 1223 1230 1231 1232 1233 1300 1301 1302 1303 1310 1311 1312 1313 1320 1321 1322 1323 1330 1331 1332 1333 1340 1341 1342 1343
+ 2000 2001 2002 2003 2010 2011 2012 2013 2020 2021 2022 2023 2100 2101 2102 2103 2110 2111 2112 2113 2120 2121 2122 2123 2130 2131 2132 2133 2200 2201 2202 2203 2210 2211 2212 2213 2220 2221 2222 2223 2230 2231 2232 2233 2240 2241 2242 2243 2300 2301 2302 2303 2310 2311 2312 2313 2320 2321 2322 2323 2330 2331 2332 2333 2340 2341 2342 2343 2350 2351 2352 2353
+ 3000 3001 3002 3003 3010 3011 3012 3013 3020 3021 3022 3023 3030 3031 3032 3033 3100 3101 3102 3103 3110 3111 3112 3113 3120 3121 3122 3123 3130 3131 3132 3133 3140 3141 3142 3143 3200 3201 3202 3203 3210 3211 3212 3213 3220 3221 3222 3223 3230 3231 3232 3233 3240 3241 3242 3243 3250 3251 3252 3253 3300 3301 3302 3303 3310 3311 3312 3313 3320 3321 3322 3323 3330 3331 3332 3333 3340 3341 3342 3343 3350 3351 3352 3353 3360 3361 3362 3363
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tattr.h5.xml b/tools/testfiles/tattr.h5.xml
new file mode 100644
index 0000000..757b297
--- /dev/null
+++ b/tools/testfiles/tattr.h5.xml
@@ -0,0 +1,91 @@
+#############################
+Expected output for 'h5dump --xml tattr.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Attribute Name="attr1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="24" MaxDimSize="24"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 97 116 116 114 105 98 117 116 101 32 111 102 32 114 111 111 116 32 103
+ 114 111 117 112 0
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Attribute Name="attr2">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 1 2 3 4 5 6 7 8 9 10
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Attribute Name="attr3">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="8" SignBitLocation="63" ExponentBits="11" ExponentLocation="52" MantissaBits="52" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Attribute Name="attr4">
+ <Dataspace>
+ <ScalarDataspace />
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 100
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Attribute Name="attr5">
+ <Dataspace>
+ <ScalarDataspace />
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="17" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "string attribute"
+ </DataFromFile>
+ </Data>
+ </Attribute>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tbitfields.h5.xml b/tools/testfiles/tbitfields.h5.xml
new file mode 100644
index 0000000..f249fa7
--- /dev/null
+++ b/tools/testfiles/tbitfields.h5.xml
@@ -0,0 +1,48 @@
+#############################
+Expected output for 'h5dump --xml tbitfields.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Group Name="typetests" OBJ-XID="/typetests" Parents="/" >
+ <Dataset Name="bitfield_1" OBJ-XID="/typetests/bitfield_1" Parents="/typetests">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="32" MaxDimSize="32"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <BitfieldType ByteOrder="LE" Size="1"/>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0xff 0xfe 0xfd 0xfc 0xfb 0xfa 0xf9 0xf8 0xf7 0xf6 0xf5 0xf4 0xf3
+ 0xf2 0xf1 0xf0 0xef 0xee 0xed 0xec 0xeb 0xea 0xe9 0xe8 0xe7 0xe6
+ 0xe5 0xe4 0xe3 0xe2 0xe1 0xe0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="bitfield_2" OBJ-XID="/typetests/bitfield_2" Parents="/typetests">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="16" MaxDimSize="16"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <BitfieldType ByteOrder="LE" Size="2"/>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0xfffe 0xfdfc 0xfbfa 0xf9f8 0xf7f6 0xf5f4 0xf3f2 0xf1f0 0xefee
+ 0xedec 0xebea 0xe9e8 0xe7e6 0xe5e4 0xe3e2 0xe1e0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tcompound.h5.xml b/tools/testfiles/tcompound.h5.xml
new file mode 100644
index 0000000..25db6ae
--- /dev/null
+++ b/tools/testfiles/tcompound.h5.xml
@@ -0,0 +1,280 @@
+#############################
+Expected output for 'h5dump --xml tcompound.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <NamedDataType Name="#6632:0" OBJ-XID="/#6632:0" Parents="root">
+ <CompoundType>
+ <Field FieldName="int">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="float">
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </NamedDataType>
+ <NamedDataType Name="type1" OBJ-XID="/type1" Parents="root">
+ <CompoundType>
+ <Field FieldName="int_name">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="float_name">
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </NamedDataType>
+ <NamedDataType Name="type2" OBJ-XID="/type2" Parents="root">
+ <CompoundType>
+ <Field FieldName="int_array">
+ <DataType>
+ <ArrayType Ndims="1">
+ <ArrayDimension DimSize="4" DimPerm="0"/>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ </Field>
+ <Field FieldName="float_array">
+ <DataType>
+ <ArrayType Ndims="2">
+ <ArrayDimension DimSize="5" DimPerm="0"/>
+ <ArrayDimension DimSize="6" DimPerm="1"/>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </NamedDataType>
+ <Dataset Name="dset1" OBJ-XID="/dset1" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="5" MaxDimSize="5"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="a_name">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="b_name">
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="c_name">
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="8" SignBitLocation="63" ExponentBits="11" ExponentLocation="52" MantissaBits="52" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 0 1 1 1 0.5 2 4 0.333333 3 9 0.25 4 16 0.2
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Group Name="group1" OBJ-XID="/group1" Parents="/" >
+ <NamedDataType Name="type3" OBJ-XID="/group1/type3" Parents="/group1">
+ <CompoundType>
+ <Field FieldName="int">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="float">
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </NamedDataType>
+ <Dataset Name="dset2" OBJ-XID="/group1/dset2" Parents="/group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="5" MaxDimSize="5"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <NamedDataTypePtr OBJ-XID="/type1"/>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 0 1 1.1 2 2.2 3 3.3 4 4.4
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="dset3" OBJ-XID="/group1/dset3" Parents="/group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="3" MaxDimSize="3"/>
+ <Dimension DimSize="6" MaxDimSize="6"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <NamedDataTypePtr OBJ-XID="/type2"/>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 1 2 3 1 2 3 4 5 6
+ 2 3 4 5 6 7
+ 3 4 5 6 7 8
+ 4 5 6 7 8 9
+ 5 6 7 8 9 10
+ 1 2 3 4 2 3 4 5 6 7
+ 3 4 5 6 7 8
+ 4 5 6 7 8 9
+ 5 6 7 8 9 10
+ 6 7 8 9 10 11
+ 2 3 4 5 3 4 5 6 7 8
+ 4 5 6 7 8 9
+ 5 6 7 8 9 10
+ 6 7 8 9 10 11
+ 7 8 9 10 11 12
+ 3 4 5 6 4 5 6 7 8 9
+ 5 6 7 8 9 10
+ 6 7 8 9 10 11
+ 7 8 9 10 11 12
+ 8 9 10 11 12 13
+ 4 5 6 7 5 6 7 8 9 10
+ 6 7 8 9 10 11
+ 7 8 9 10 11 12
+ 8 9 10 11 12 13
+ 9 10 11 12 13 14
+ 5 6 7 8 6 7 8 9 10 11
+ 7 8 9 10 11 12
+ 8 9 10 11 12 13
+ 9 10 11 12 13 14
+ 10 11 12 13 14 15
+ 1 2 3 4 2 3 4 5 6 7
+ 3 4 5 6 7 8
+ 4 5 6 7 8 9
+ 5 6 7 8 9 10
+ 6 7 8 9 10 11
+ 2 3 4 5 3 4 5 6 7 8
+ 4 5 6 7 8 9
+ 5 6 7 8 9 10
+ 6 7 8 9 10 11
+ 7 8 9 10 11 12
+ 3 4 5 6 4 5 6 7 8 9
+ 5 6 7 8 9 10
+ 6 7 8 9 10 11
+ 7 8 9 10 11 12
+ 8 9 10 11 12 13
+ 4 5 6 7 5 6 7 8 9 10
+ 6 7 8 9 10 11
+ 7 8 9 10 11 12
+ 8 9 10 11 12 13
+ 9 10 11 12 13 14
+ 5 6 7 8 6 7 8 9 10 11
+ 7 8 9 10 11 12
+ 8 9 10 11 12 13
+ 9 10 11 12 13 14
+ 10 11 12 13 14 15
+ 6 7 8 9 7 8 9 10 11 12
+ 8 9 10 11 12 13
+ 9 10 11 12 13 14
+ 10 11 12 13 14 15
+ 11 12 13 14 15 16
+ 2 3 4 5 3 4 5 6 7 8
+ 4 5 6 7 8 9
+ 5 6 7 8 9 10
+ 6 7 8 9 10 11
+ 7 8 9 10 11 12
+ 3 4 5 6 4 5 6 7 8 9
+ 5 6 7 8 9 10
+ 6 7 8 9 10 11
+ 7 8 9 10 11 12
+ 8 9 10 11 12 13
+ 4 5 6 7 5 6 7 8 9 10
+ 6 7 8 9 10 11
+ 7 8 9 10 11 12
+ 8 9 10 11 12 13
+ 9 10 11 12 13 14
+ 5 6 7 8 6 7 8 9 10 11
+ 7 8 9 10 11 12
+ 8 9 10 11 12 13
+ 9 10 11 12 13 14
+ 10 11 12 13 14 15
+ 6 7 8 9 7 8 9 10 11 12
+ 8 9 10 11 12 13
+ 9 10 11 12 13 14
+ 10 11 12 13 14 15
+ 11 12 13 14 15 16
+ 7 8 9 10 8 9 10 11 12 13
+ 9 10 11 12 13 14
+ 10 11 12 13 14 15
+ 11 12 13 14 15 16
+ 12 13 14 15 16 17
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="dset4" OBJ-XID="/group1/dset4" Parents="/group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="5" MaxDimSize="5"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <NamedDataTypePtr OBJ-XID="/group1/type3"/>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 3 1 4 2 5 3 6 4 7
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+ <Group Name="group2" OBJ-XID="/group2" Parents="/" >
+ <Dataset Name="dset5" OBJ-XID="/group2/dset5" Parents="/group2">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="5" MaxDimSize="5"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <NamedDataTypePtr OBJ-XID="/#6632:0"/>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 0 1 0.1 2 0.2 3 0.3 4 0.4
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tcompound2.h5.xml b/tools/testfiles/tcompound2.h5.xml
new file mode 100644
index 0000000..b19226c
--- /dev/null
+++ b/tools/testfiles/tcompound2.h5.xml
@@ -0,0 +1,196 @@
+#############################
+Expected output for 'h5dump --xml tcompound2.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <NamedDataType Name="#9560:0" OBJ-XID="/#9560:0" Parents="root">
+ <CompoundType>
+ <Field FieldName="int">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="float">
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </NamedDataType>
+ <NamedDataType Name="type1" OBJ-XID="/type1" Parents="root">
+ <CompoundType>
+ <Field FieldName="int_name">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="float_name">
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </NamedDataType>
+ <NamedDataType Name="type2" OBJ-XID="/type2" Parents="root">
+ <CompoundType>
+ <Field FieldName="int_array">
+ <DataType>
+ <ArrayType Ndims="1">
+ <ArrayDimension DimSize="4" DimPerm="0"/>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ </Field>
+ <Field FieldName="float_array">
+ <DataType>
+ <ArrayType Ndims="2">
+ <ArrayDimension DimSize="5" DimPerm="0"/>
+ <ArrayDimension DimSize="6" DimPerm="1"/>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </NamedDataType>
+ <Dataset Name="dset1" OBJ-XID="/dset1" Parents="root">
+ <StorageLayout>
+ <ChunkedLayout Ndims="1">
+ <ChunkDimension DimSize="2" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="6" MaxDimSize="UNLIMITED"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="a_name">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="b_name">
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="c_name">
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="8" SignBitLocation="63" ExponentBits="11" ExponentLocation="52" MantissaBits="52" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 0 1 1 1 0.5 2 4 0.333333 3 9 0.25 4 16 0.2 5 25 0.166667
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Group Name="group1" OBJ-XID="/group1" Parents="/" >
+ <NamedDataType Name="type3" OBJ-XID="/group1/type3" Parents="/group1">
+ <CompoundType>
+ <Field FieldName="int">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="float">
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </NamedDataType>
+ <Dataset Name="dset2" OBJ-XID="/group1/dset2" Parents="/group1">
+ <StorageLayout>
+ <ChunkedLayout Ndims="1">
+ <ChunkDimension DimSize="2" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="6" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <NamedDataTypePtr OBJ-XID="/type1"/>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 0 1 1.1 2 2.2 3 3.3 4 4.4 5 5.5
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="dset4" OBJ-XID="/group1/dset4" Parents="/group1">
+ <StorageLayout>
+ <ChunkedLayout Ndims="1">
+ <ChunkDimension DimSize="2" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="6" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <NamedDataTypePtr OBJ-XID="/group1/type3"/>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 0 1 1 2 2 3 3 4 4 5 5
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+ <Group Name="group2" OBJ-XID="/group2" Parents="/" >
+ <Dataset Name="dset5" OBJ-XID="/group2/dset5" Parents="/group2">
+ <StorageLayout>
+ <ChunkedLayout Ndims="1">
+ <ChunkDimension DimSize="2" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="6" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <NamedDataTypePtr OBJ-XID="/#9560:0"/>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 0 1 1 2 2 3 3 4 4 5 5
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tdatareg.h5.xml b/tools/testfiles/tdatareg.h5.xml
new file mode 100644
index 0000000..ac6a691
--- /dev/null
+++ b/tools/testfiles/tdatareg.h5.xml
@@ -0,0 +1,54 @@
+#############################
+Expected output for 'h5dump --xml tdatareg.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="Dataset1" OBJ-XID="/Dataset1" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <ReferenceType>
+ <ObjectReferenceType />
+ </ReferenceType>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset2" OBJ-XID="/Dataset2" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 3 6 9 12 15 18 21 24 27
+ 30 33 36 39 42 45 48 51 54 57
+ 60 63 66 69 72 75 78 81 84 87
+ 90 93 96 99 102 105 108 111 114 117
+ 120 123 126 129 132 135 138 141 144 147
+ 150 153 156 159 162 165 168 171 174 177
+ 180 183 186 189 192 195 198 201 204 207
+ 210 213 216 219 222 225 228 231 234 237
+ 240 243 246 249 252 255 2 5 8 11
+ 14 17 20 23 26 29 32 35 38 41
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tdset.h5.xml b/tools/testfiles/tdset.h5.xml
new file mode 100644
index 0000000..ea7bdb8
--- /dev/null
+++ b/tools/testfiles/tdset.h5.xml
@@ -0,0 +1,133 @@
+#############################
+Expected output for 'h5dump --xml tdset.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="dset1" OBJ-XID="/dset1" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ <Dimension DimSize="20" MaxDimSize="20"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
+ 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
+ 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21
+ 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22
+ 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23
+ 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24
+ 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
+ 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26
+ 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
+ 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="dset2" OBJ-XID="/dset2" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="30" MaxDimSize="30"/>
+ <Dimension DimSize="20" MaxDimSize="20"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="8" SignBitLocation="63" ExponentBits="11" ExponentLocation="52" MantissaBits="52" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0.0001 0.0002 0.0003 0.0004 0.0005 0.0006 0.0007 0.0008 0.0009 0.001
+ 0.0011 0.0012 0.0013 0.0014 0.0015 0.0016 0.0017 0.0018 0.0019
+ 1 1.0001 1.0002 1.0003 1.0004 1.0005 1.0006 1.0007 1.0008 1.0009 1.001
+ 1.0011 1.0012 1.0013 1.0014 1.0015 1.0016 1.0017 1.0018 1.0019
+ 2 2.0001 2.0002 2.0003 2.0004 2.0005 2.0006 2.0007 2.0008 2.0009 2.001
+ 2.0011 2.0012 2.0013 2.0014 2.0015 2.0016 2.0017 2.0018 2.0019
+ 3 3.0001 3.0002 3.0003 3.0004 3.0005 3.0006 3.0007 3.0008 3.0009 3.001
+ 3.0011 3.0012 3.0013 3.0014 3.0015 3.0016 3.0017 3.0018 3.0019
+ 4 4.0001 4.0002 4.0003 4.0004 4.0005 4.0006 4.0007 4.0008 4.0009 4.001
+ 4.0011 4.0012 4.0013 4.0014 4.0015 4.0016 4.0017 4.0018 4.0019
+ 5 5.0001 5.0002 5.0003 5.0004 5.0005 5.0006 5.0007 5.0008 5.0009 5.001
+ 5.0011 5.0012 5.0013 5.0014 5.0015 5.0016 5.0017 5.0018 5.0019
+ 6 6.0001 6.0002 6.0003 6.0004 6.0005 6.0006 6.0007 6.0008 6.0009 6.001
+ 6.0011 6.0012 6.0013 6.0014 6.0015 6.0016 6.0017 6.0018 6.0019
+ 7 7.0001 7.0002 7.0003 7.0004 7.0005 7.0006 7.0007 7.0008 7.0009 7.001
+ 7.0011 7.0012 7.0013 7.0014 7.0015 7.0016 7.0017 7.0018 7.0019
+ 8 8.0001 8.0002 8.0003 8.0004 8.0005 8.0006 8.0007 8.0008 8.0009 8.001
+ 8.0011 8.0012 8.0013 8.0014 8.0015 8.0016 8.0017 8.0018 8.0019
+ 9 9.0001 9.0002 9.0003 9.0004 9.0005 9.0006 9.0007 9.0008 9.0009 9.001
+ 9.0011 9.0012 9.0013 9.0014 9.0015 9.0016 9.0017 9.0018 9.0019
+ 10 10.0001 10.0002 10.0003 10.0004 10.0005 10.0006 10.0007 10.0008
+ 10.0009 10.001 10.0011 10.0012 10.0013 10.0014 10.0015 10.0016 10.0017
+ 10.0018 10.0019
+ 11 11.0001 11.0002 11.0003 11.0004 11.0005 11.0006 11.0007 11.0008
+ 11.0009 11.001 11.0011 11.0012 11.0013 11.0014 11.0015 11.0016 11.0017
+ 11.0018 11.0019
+ 12 12.0001 12.0002 12.0003 12.0004 12.0005 12.0006 12.0007 12.0008
+ 12.0009 12.001 12.0011 12.0012 12.0013 12.0014 12.0015 12.0016 12.0017
+ 12.0018 12.0019
+ 13 13.0001 13.0002 13.0003 13.0004 13.0005 13.0006 13.0007 13.0008
+ 13.0009 13.001 13.0011 13.0012 13.0013 13.0014 13.0015 13.0016 13.0017
+ 13.0018 13.0019
+ 14 14.0001 14.0002 14.0003 14.0004 14.0005 14.0006 14.0007 14.0008
+ 14.0009 14.001 14.0011 14.0012 14.0013 14.0014 14.0015 14.0016 14.0017
+ 14.0018 14.0019
+ 15 15.0001 15.0002 15.0003 15.0004 15.0005 15.0006 15.0007 15.0008
+ 15.0009 15.001 15.0011 15.0012 15.0013 15.0014 15.0015 15.0016 15.0017
+ 15.0018 15.0019
+ 16 16.0001 16.0002 16.0003 16.0004 16.0005 16.0006 16.0007 16.0008
+ 16.0009 16.001 16.0011 16.0012 16.0013 16.0014 16.0015 16.0016 16.0017
+ 16.0018 16.0019
+ 17 17.0001 17.0002 17.0003 17.0004 17.0005 17.0006 17.0007 17.0008
+ 17.0009 17.001 17.0011 17.0012 17.0013 17.0014 17.0015 17.0016 17.0017
+ 17.0018 17.0019
+ 18 18.0001 18.0002 18.0003 18.0004 18.0005 18.0006 18.0007 18.0008
+ 18.0009 18.001 18.0011 18.0012 18.0013 18.0014 18.0015 18.0016 18.0017
+ 18.0018 18.0019
+ 19 19.0001 19.0002 19.0003 19.0004 19.0005 19.0006 19.0007 19.0008
+ 19.0009 19.001 19.0011 19.0012 19.0013 19.0014 19.0015 19.0016 19.0017
+ 19.0018 19.0019
+ 20 20.0001 20.0002 20.0003 20.0004 20.0005 20.0006 20.0007 20.0008
+ 20.0009 20.001 20.0011 20.0012 20.0013 20.0014 20.0015 20.0016 20.0017
+ 20.0018 20.0019
+ 21 21.0001 21.0002 21.0003 21.0004 21.0005 21.0006 21.0007 21.0008
+ 21.0009 21.001 21.0011 21.0012 21.0013 21.0014 21.0015 21.0016 21.0017
+ 21.0018 21.0019
+ 22 22.0001 22.0002 22.0003 22.0004 22.0005 22.0006 22.0007 22.0008
+ 22.0009 22.001 22.0011 22.0012 22.0013 22.0014 22.0015 22.0016 22.0017
+ 22.0018 22.0019
+ 23 23.0001 23.0002 23.0003 23.0004 23.0005 23.0006 23.0007 23.0008
+ 23.0009 23.001 23.0011 23.0012 23.0013 23.0014 23.0015 23.0016 23.0017
+ 23.0018 23.0019
+ 24 24.0001 24.0002 24.0003 24.0004 24.0005 24.0006 24.0007 24.0008
+ 24.0009 24.001 24.0011 24.0012 24.0013 24.0014 24.0015 24.0016 24.0017
+ 24.0018 24.0019
+ 25 25.0001 25.0002 25.0003 25.0004 25.0005 25.0006 25.0007 25.0008
+ 25.0009 25.001 25.0011 25.0012 25.0013 25.0014 25.0015 25.0016 25.0017
+ 25.0018 25.0019
+ 26 26.0001 26.0002 26.0003 26.0004 26.0005 26.0006 26.0007 26.0008
+ 26.0009 26.001 26.0011 26.0012 26.0013 26.0014 26.0015 26.0016 26.0017
+ 26.0018 26.0019
+ 27 27.0001 27.0002 27.0003 27.0004 27.0005 27.0006 27.0007 27.0008
+ 27.0009 27.001 27.0011 27.0012 27.0013 27.0014 27.0015 27.0016 27.0017
+ 27.0018 27.0019
+ 28 28.0001 28.0002 28.0003 28.0004 28.0005 28.0006 28.0007 28.0008
+ 28.0009 28.001 28.0011 28.0012 28.0013 28.0014 28.0015 28.0016 28.0017
+ 28.0018 28.0019
+ 29 29.0001 29.0002 29.0003 29.0004 29.0005 29.0006 29.0007 29.0008
+ 29.0009 29.001 29.0011 29.0012 29.0013 29.0014 29.0015 29.0016 29.0017
+ 29.0018 29.0019
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tdset2.h5.xml b/tools/testfiles/tdset2.h5.xml
new file mode 100644
index 0000000..fa7ebd3
--- /dev/null
+++ b/tools/testfiles/tdset2.h5.xml
@@ -0,0 +1,95 @@
+#############################
+Expected output for 'h5dump --xml tdset2.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="dset1" OBJ-XID="/dset1" Parents="root">
+ <StorageLayout>
+ <ChunkedLayout Ndims="2">
+ <ChunkDimension DimSize="5" />
+ <ChunkDimension DimSize="5" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="10" MaxDimSize="UNLIMITED"/>
+ <Dimension DimSize="20" MaxDimSize="20"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
+ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
+ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
+ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
+ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
+ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
+ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
+ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
+ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
+ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="dset2" OBJ-XID="/dset2" Parents="root">
+ <StorageLayout>
+ <ChunkedLayout Ndims="2">
+ <ChunkDimension DimSize="5" />
+ <ChunkDimension DimSize="5" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="30" MaxDimSize="30"/>
+ <Dimension DimSize="10" MaxDimSize="UNLIMITED"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="8" SignBitLocation="63" ExponentBits="11" ExponentLocation="52" MantissaBits="52" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tempty.h5.xml b/tools/testfiles/tempty.h5.xml
new file mode 100644
index 0000000..43f4d6b
--- /dev/null
+++ b/tools/testfiles/tempty.h5.xml
@@ -0,0 +1,125 @@
+#############################
+Expected output for 'h5dump --xml tempty.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="Dataset1.0" OBJ-XID="/Dataset1.0" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <VLType>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </VLType>
+ </DataType>
+<!-- Note: format of VL data not specified -->
+ <Data>
+ <DataFromFile>
+
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset2.0" OBJ-XID="/Dataset2.0" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset3.0" OBJ-XID="/Dataset3.0" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="LE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset4.0" OBJ-XID="/Dataset4.0" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <ArrayType Ndims="1">
+ <ArrayDimension DimSize="4" DimPerm="0"/>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset5.0" OBJ-XID="/Dataset5.0" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="a">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="b">
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="LE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="c">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="true" Size="1" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 0 0 0 0 0 0 0 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tenum.h5.xml b/tools/testfiles/tenum.h5.xml
new file mode 100644
index 0000000..8fc64a2
--- /dev/null
+++ b/tools/testfiles/tenum.h5.xml
@@ -0,0 +1,59 @@
+#############################
+Expected output for 'h5dump --xml tenum.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <NamedDataType Name="enum normal" OBJ-XID="/enum normal" Parents="root">
+ <AtomicType>
+ <EnumType Nelems="5">
+ <EnumElement>
+ RED
+ </EnumElement>
+ <EnumValue>
+ 0
+ </EnumValue>
+ <EnumElement>
+ GREEN
+ </EnumElement>
+ <EnumValue>
+ 1
+ </EnumValue>
+ <EnumElement>
+ BLUE
+ </EnumElement>
+ <EnumValue>
+ 2
+ </EnumValue>
+ <EnumElement>
+ WHITE
+ </EnumElement>
+ <EnumValue>
+ 3
+ </EnumValue>
+ <EnumElement>
+ BLACK
+ </EnumElement>
+ <EnumValue>
+ 4
+ </EnumValue>
+ </EnumType>
+ </AtomicType>
+ </NamedDataType>
+ <Dataset Name="table" OBJ-XID="/table" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="20" MaxDimSize="20"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <NamedDataTypePtr OBJ-XID="/enum normal"/>
+ <Data>
+ <DataFromFile>
+ RED GREEN BLUE GREEN WHITE WHITE BLACK GREEN BLUE RED RED BLUE GREEN
+ BLACK WHITE RED WHITE GREEN GREEN BLUE
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tgroup.h5.xml b/tools/testfiles/tgroup.h5.xml
new file mode 100644
index 0000000..c8e936a
--- /dev/null
+++ b/tools/testfiles/tgroup.h5.xml
@@ -0,0 +1,35 @@
+#############################
+Expected output for 'h5dump --xml tgroup.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Group Name="g1" OBJ-XID="/g1" Parents="/" >
+ <Group Name="g1.1" OBJ-XID="/g1/g1.1" Parents="/g1" >
+ </Group>
+ <Group Name="g1.2" OBJ-XID="/g1/g1.2" Parents="/g1" >
+ </Group>
+ </Group>
+ <Group Name="g2" OBJ-XID="/g2" Parents="/" >
+ <Group Name="g2.1" OBJ-XID="/g2/g2.1" Parents="/g2" >
+ <Group Name="g2.1.1" OBJ-XID="/g2/g2.1/g2.1.1" Parents="/g2/g2.1" >
+ </Group>
+ <Group Name="g2.1.2" OBJ-XID="/g2/g2.1/g2.1.2" Parents="/g2/g2.1" >
+ </Group>
+ <Group Name="g2.1.3" OBJ-XID="/g2/g2.1/g2.1.3" Parents="/g2/g2.1" >
+ </Group>
+ </Group>
+ </Group>
+ <Group Name="g3" OBJ-XID="/g3" Parents="/" >
+ <Group Name="g3.1" OBJ-XID="/g3/g3.1" Parents="/g3" >
+ </Group>
+ <Group Name="g3.2" OBJ-XID="/g3/g3.2" Parents="/g3" >
+ </Group>
+ <Group Name="g3.3" OBJ-XID="/g3/g3.3" Parents="/g3" >
+ </Group>
+ <Group Name="g3.4" OBJ-XID="/g3/g3.4" Parents="/g3" >
+ </Group>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/thlink.h5.xml b/tools/testfiles/thlink.h5.xml
new file mode 100644
index 0000000..c116594
--- /dev/null
+++ b/tools/testfiles/thlink.h5.xml
@@ -0,0 +1,39 @@
+#############################
+Expected output for 'h5dump --xml thlink.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="dset1" OBJ-XID="/dset1" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="5" MaxDimSize="5"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 1 2 3 4
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Group Name="g1" OBJ-XID="/g1" Parents="/" >
+ <Dataset Name="dset2" OBJ-XID="/g1/dset2" Parents="/g1">
+ <DatasetPtr OBJ-XID="/dset1"/>
+ </Dataset>
+ <Group Name="g1.1" OBJ-XID="/g1/g1.1" Parents="/g1" >
+ <Dataset Name="dset3" OBJ-XID="/g1/g1.1/dset3" Parents="/g1/g1.1">
+ <DatasetPtr OBJ-XID="/dset1"/>
+ </Dataset>
+ </Group>
+ </Group>
+ <Group Name="g2" OBJ-XID="/g2" Parents="/" >
+ <GroupPtr OBJ-XID="/g1/g1.1"/>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tloop.h5.xml b/tools/testfiles/tloop.h5.xml
new file mode 100644
index 0000000..2e36c89
--- /dev/null
+++ b/tools/testfiles/tloop.h5.xml
@@ -0,0 +1,19 @@
+#############################
+Expected output for 'h5dump --xml tloop.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Group Name="g1" OBJ-XID="/g1" Parents="/" >
+ <Group Name="g1.1" OBJ-XID="/g1/g1.1" Parents="/g1" >
+ <Group Name="g2.1" OBJ-XID="/g1/g1.1/g2.1" Parents="/g1/g1.1" >
+ <GroupPtr OBJ-XID="/g1"/>
+ </Group>
+ </Group>
+ </Group>
+ <Group Name="g2" OBJ-XID="/g2" Parents="/" >
+ <GroupPtr OBJ-XID="/g1/g1.1"/>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tloop2.h5.xml b/tools/testfiles/tloop2.h5.xml
new file mode 100644
index 0000000..d5accdf
--- /dev/null
+++ b/tools/testfiles/tloop2.h5.xml
@@ -0,0 +1,17 @@
+#############################
+Expected output for 'h5dump --xml tloop2.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Group Name="g1" OBJ-XID="/g1" Parents="/" >
+ <Group Name="g1.1" OBJ-XID="/g1/g1.1" Parents="/g1" >
+ <SoftLink LinkName="g2.1" Target="/g1" TargetObj="/g1/g1.1/g1" OBJ-XID="/g1/g1.1/g2.1" Source="/g1/g1.1"/>
+ </Group>
+ </Group>
+ <Group Name="g2" OBJ-XID="/g2" Parents="/" >
+ <GroupPtr OBJ-XID="/g1/g1.1"/>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tmany.h5.xml b/tools/testfiles/tmany.h5.xml
new file mode 100644
index 0000000..11f22c9
--- /dev/null
+++ b/tools/testfiles/tmany.h5.xml
@@ -0,0 +1,338 @@
+#############################
+Expected output for 'h5dump --xml tmany.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Group Name="g1" OBJ-XID="/g1" Parents="/" >
+ <Group Name="g1.1" OBJ-XID="/g1/g1.1" Parents="/g1" >
+ <Dataset Name="dset1" OBJ-XID="/g1/g1.1/dset1" Parents="/g1/g1.1">
+ <StorageLayout>
+ <ChunkedLayout Ndims="1">
+ <ChunkDimension DimSize="2" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="6" MaxDimSize="UNLIMITED"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="a_array">
+ <DataType>
+ <ArrayType Ndims="4">
+ <ArrayDimension DimSize="2" DimPerm="0"/>
+ <ArrayDimension DimSize="2" DimPerm="1"/>
+ <ArrayDimension DimSize="2" DimPerm="2"/>
+ <ArrayDimension DimSize="2" DimPerm="3"/>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ </Field>
+ <Field FieldName="b_array">
+ <DataType>
+ <ArrayType Ndims="4">
+ <ArrayDimension DimSize="2" DimPerm="0"/>
+ <ArrayDimension DimSize="2" DimPerm="1"/>
+ <ArrayDimension DimSize="2" DimPerm="2"/>
+ <ArrayDimension DimSize="2" DimPerm="3"/>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="8" SignBitLocation="63" ExponentBits="11" ExponentLocation="52" MantissaBits="52" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ </Field>
+ <Field FieldName="c_array">
+ <DataType>
+ <ArrayType Ndims="4">
+ <ArrayDimension DimSize="2" DimPerm="0"/>
+ <ArrayDimension DimSize="2" DimPerm="1"/>
+ <ArrayDimension DimSize="2" DimPerm="2"/>
+ <ArrayDimension DimSize="2" DimPerm="3"/>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="8" SignBitLocation="63" ExponentBits="11" ExponentLocation="52" MantissaBits="52" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <Attribute Name="attr1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 97 98 99 100 101 102 103 104 105 0
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Attribute Name="attr2">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="2" MaxDimSize="2"/>
+ <Dimension DimSize="2" MaxDimSize="2"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 1
+ 2 3
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Attribute Name="attr3">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="8" SignBitLocation="63" ExponentBits="11" ExponentLocation="52" MantissaBits="52" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0.1 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 1
+ 0 1
+ 0 1
+ 0 1
+ 0 1
+ 0 1
+ 0 1
+ 0 1 0 1
+ 0 1
+ 0 1
+ 0 1
+ 0 1
+ 0 1
+ 0 1
+ 0 1 6 7
+ 6 7
+ 6 7
+ 6 7
+ 6 7
+ 6 7
+ 6 7
+ 6 7
+ 1 2
+ 1 2
+ 1 2
+ 1 2
+ 1 2
+ 1 2
+ 1 2
+ 1 2 1 2
+ 1 2
+ 1 2
+ 1 2
+ 1 2
+ 1 2
+ 1 2
+ 1 2 7 8
+ 7 8
+ 7 8
+ 7 8
+ 7 8
+ 7 8
+ 7 8
+ 7 8
+ 2 3
+ 2 3
+ 2 3
+ 2 3
+ 2 3
+ 2 3
+ 2 3
+ 2 3 2 3
+ 2 3
+ 2 3
+ 2 3
+ 2 3
+ 2 3
+ 2 3
+ 2 3 8 9
+ 8 9
+ 8 9
+ 8 9
+ 8 9
+ 8 9
+ 8 9
+ 8 9
+ 3 4
+ 3 4
+ 3 4
+ 3 4
+ 3 4
+ 3 4
+ 3 4
+ 3 4 3 4
+ 3 4
+ 3 4
+ 3 4
+ 3 4
+ 3 4
+ 3 4
+ 3 4 9 10
+ 9 10
+ 9 10
+ 9 10
+ 9 10
+ 9 10
+ 9 10
+ 9 10
+ 4 5
+ 4 5
+ 4 5
+ 4 5
+ 4 5
+ 4 5
+ 4 5
+ 4 5 4 5
+ 4 5
+ 4 5
+ 4 5
+ 4 5
+ 4 5
+ 4 5
+ 4 5 10 11
+ 10 11
+ 10 11
+ 10 11
+ 10 11
+ 10 11
+ 10 11
+ 10 11
+ 5 6
+ 5 6
+ 5 6
+ 5 6
+ 5 6
+ 5 6
+ 5 6
+ 5 6 5 6
+ 5 6
+ 5 6
+ 5 6
+ 5 6
+ 5 6
+ 5 6
+ 5 6 11 12
+ 11 12
+ 11 12
+ 11 12
+ 11 12
+ 11 12
+ 11 12
+ 11 12
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+ <Group Name="g1.2" OBJ-XID="/g1/g1.2" Parents="/g1" >
+ <Dataset Name="link1" OBJ-XID="/g1/g1.2/link1" Parents="/g1/g1.2">
+ <DatasetPtr OBJ-XID="/g1/g1.1/dset1"/>
+ </Dataset>
+ </Group>
+ </Group>
+ <Group Name="g2" OBJ-XID="/g2" Parents="/" >
+ <SoftLink LinkName="slink2" Target="/g1" TargetObj="/g2/g1" OBJ-XID="/g2/slink2" Source="/g2"/>
+ </Group>
+ <Group Name="g3" OBJ-XID="/g3" Parents="/" >
+ <Dataset Name="link3" OBJ-XID="/g3/link3" Parents="/g3">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ 0 1 2 3 4 5 6 7 8 9
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+ <Group Name="g4" OBJ-XID="/g4" Parents="/" >
+ <Dataset Name="dset2" OBJ-XID="/g4/dset2" Parents="/g4">
+ <DatasetPtr OBJ-XID="/g3/link3"/>
+ </Dataset>
+ </Group>
+ <Group Name="g5" OBJ-XID="/g5" Parents="/" >
+ <SoftLink LinkName="slink4" Target="/g6/dset3" TargetObj="/g5/g6/dset3" OBJ-XID="/g5/slink4" Source="/g5"/>
+ </Group>
+ <Group Name="g6" OBJ-XID="/g6" Parents="/" >
+ <Dataset Name="dset3" OBJ-XID="/g6/dset3" Parents="/g6">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0 0 0 0 0 0 0
+ 1 1 1 1 1 1 1 1 1 1
+ 2 2 2 2 2 2 2 2 2 2
+ 3 3 3 3 3 3 3 3 3 3
+ 4 4 4 4 4 4 4 4 4 4
+ 5 5 5 5 5 5 5 5 5 5
+ 6 6 6 6 6 6 6 6 6 6
+ 7 7 7 7 7 7 7 7 7 7
+ 8 8 8 8 8 8 8 8 8 8
+ 9 9 9 9 9 9 9 9 9 9
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tname-amp.h5 b/tools/testfiles/tname-amp.h5
new file mode 100644
index 0000000..eeaeb67
--- /dev/null
+++ b/tools/testfiles/tname-amp.h5
Binary files differ
diff --git a/tools/testfiles/tname-amp.h5.xml b/tools/testfiles/tname-amp.h5.xml
new file mode 100644
index 0000000..e7e882b
--- /dev/null
+++ b/tools/testfiles/tname-amp.h5.xml
@@ -0,0 +1,55 @@
+#############################
+Expected output for 'h5dump --xml tname-amp.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Attribute Name="attr1&amp;withamp">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 97 98 99 100 101 102 103 104 105 0
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Group Name="g1.1&amp;withamp" OBJ-XID="/g1.1&amp;withamp" Parents="/" >
+ <Dataset Name="dset1.1.1&amp;withamp" OBJ-XID="/g1.1&amp;withamp/dset1.1.1&amp;withamp" Parents="/g1.1&amp;withamp">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0 0 0 0 0 0 0
+ 0 1 2 3 4 5 6 7 8 9
+ 0 2 4 6 8 10 12 14 16 18
+ 0 3 6 9 12 15 18 21 24 27
+ 0 4 8 12 16 20 24 28 32 36
+ 0 5 10 15 20 25 30 35 40 45
+ 0 6 12 18 24 30 36 42 48 54
+ 0 7 14 21 28 35 42 49 56 63
+ 0 8 16 24 32 40 48 56 64 72
+ 0 9 18 27 36 45 54 63 72 81
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tname-apos.h5 b/tools/testfiles/tname-apos.h5
new file mode 100644
index 0000000..c86d2dd
--- /dev/null
+++ b/tools/testfiles/tname-apos.h5
Binary files differ
diff --git a/tools/testfiles/tname-apos.h5.xml b/tools/testfiles/tname-apos.h5.xml
new file mode 100644
index 0000000..5133ad7
--- /dev/null
+++ b/tools/testfiles/tname-apos.h5.xml
@@ -0,0 +1,55 @@
+#############################
+Expected output for 'h5dump --xml tname-apos.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Attribute Name="attr1&apos;withapos">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 97 98 99 100 101 102 103 104 105 0
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Group Name="g1.1&apos;withapos" OBJ-XID="/g1.1&apos;withapos" Parents="/" >
+ <Dataset Name="dset1.1.1&apos;withapos" OBJ-XID="/g1.1&apos;withapos/dset1.1.1&apos;withapos" Parents="/g1.1&apos;withapos">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0 0 0 0 0 0 0
+ 0 1 2 3 4 5 6 7 8 9
+ 0 2 4 6 8 10 12 14 16 18
+ 0 3 6 9 12 15 18 21 24 27
+ 0 4 8 12 16 20 24 28 32 36
+ 0 5 10 15 20 25 30 35 40 45
+ 0 6 12 18 24 30 36 42 48 54
+ 0 7 14 21 28 35 42 49 56 63
+ 0 8 16 24 32 40 48 56 64 72
+ 0 9 18 27 36 45 54 63 72 81
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tname-gt.h5 b/tools/testfiles/tname-gt.h5
new file mode 100644
index 0000000..b852be8
--- /dev/null
+++ b/tools/testfiles/tname-gt.h5
Binary files differ
diff --git a/tools/testfiles/tname-gt.h5.xml b/tools/testfiles/tname-gt.h5.xml
new file mode 100644
index 0000000..383e31f
--- /dev/null
+++ b/tools/testfiles/tname-gt.h5.xml
@@ -0,0 +1,55 @@
+#############################
+Expected output for 'h5dump --xml tname-gt.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Attribute Name="attr1&gt;withgt">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 97 98 99 100 101 102 103 104 105 0
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Group Name="g1.1&gt;withgt" OBJ-XID="/g1.1&gt;withgt" Parents="/" >
+ <Dataset Name="dset1.1.1&gt;withgt" OBJ-XID="/g1.1&gt;withgt/dset1.1.1&gt;withgt" Parents="/g1.1&gt;withgt">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0 0 0 0 0 0 0
+ 0 1 2 3 4 5 6 7 8 9
+ 0 2 4 6 8 10 12 14 16 18
+ 0 3 6 9 12 15 18 21 24 27
+ 0 4 8 12 16 20 24 28 32 36
+ 0 5 10 15 20 25 30 35 40 45
+ 0 6 12 18 24 30 36 42 48 54
+ 0 7 14 21 28 35 42 49 56 63
+ 0 8 16 24 32 40 48 56 64 72
+ 0 9 18 27 36 45 54 63 72 81
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tname-lt.h5 b/tools/testfiles/tname-lt.h5
new file mode 100644
index 0000000..698cf82
--- /dev/null
+++ b/tools/testfiles/tname-lt.h5
Binary files differ
diff --git a/tools/testfiles/tname-lt.h5.xml b/tools/testfiles/tname-lt.h5.xml
new file mode 100644
index 0000000..abbe46b
--- /dev/null
+++ b/tools/testfiles/tname-lt.h5.xml
@@ -0,0 +1,55 @@
+#############################
+Expected output for 'h5dump --xml tname-lt.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Attribute Name="attr1&lt;withlt">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 97 98 99 100 101 102 103 104 105 0
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Group Name="g1.1&lt;withlt" OBJ-XID="/g1.1&lt;withlt" Parents="/" >
+ <Dataset Name="dset1.1.1&lt;withlt" OBJ-XID="/g1.1&lt;withlt/dset1.1.1&lt;withlt" Parents="/g1.1&lt;withlt">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0 0 0 0 0 0 0
+ 0 1 2 3 4 5 6 7 8 9
+ 0 2 4 6 8 10 12 14 16 18
+ 0 3 6 9 12 15 18 21 24 27
+ 0 4 8 12 16 20 24 28 32 36
+ 0 5 10 15 20 25 30 35 40 45
+ 0 6 12 18 24 30 36 42 48 54
+ 0 7 14 21 28 35 42 49 56 63
+ 0 8 16 24 32 40 48 56 64 72
+ 0 9 18 27 36 45 54 63 72 81
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tname-quot.h5 b/tools/testfiles/tname-quot.h5
new file mode 100644
index 0000000..be7f448
--- /dev/null
+++ b/tools/testfiles/tname-quot.h5
Binary files differ
diff --git a/tools/testfiles/tname-quot.h5.xml b/tools/testfiles/tname-quot.h5.xml
new file mode 100644
index 0000000..d1b2949
--- /dev/null
+++ b/tools/testfiles/tname-quot.h5.xml
@@ -0,0 +1,55 @@
+#############################
+Expected output for 'h5dump --xml tname-quot.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Attribute Name="attr1&quot;withquot">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 97 98 99 100 101 102 103 104 105 0
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Group Name="g1.1&quot;withquot" OBJ-XID="/g1.1&quot;withquot" Parents="/" >
+ <Dataset Name="dset1.1.1&quot;withquot" OBJ-XID="/g1.1&quot;withquot/dset1.1.1&quot;withquot" Parents="/g1.1&quot;withquot">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0 0 0 0 0 0 0
+ 0 1 2 3 4 5 6 7 8 9
+ 0 2 4 6 8 10 12 14 16 18
+ 0 3 6 9 12 15 18 21 24 27
+ 0 4 8 12 16 20 24 28 32 36
+ 0 5 10 15 20 25 30 35 40 45
+ 0 6 12 18 24 30 36 42 48 54
+ 0 7 14 21 28 35 42 49 56 63
+ 0 8 16 24 32 40 48 56 64 72
+ 0 9 18 27 36 45 54 63 72 81
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tname-sp.h5 b/tools/testfiles/tname-sp.h5
new file mode 100644
index 0000000..f21d79f
--- /dev/null
+++ b/tools/testfiles/tname-sp.h5
Binary files differ
diff --git a/tools/testfiles/tname-sp.h5.xml b/tools/testfiles/tname-sp.h5.xml
new file mode 100644
index 0000000..8eb4ae9
--- /dev/null
+++ b/tools/testfiles/tname-sp.h5.xml
@@ -0,0 +1,55 @@
+#############################
+Expected output for 'h5dump --xml tname-sp.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Attribute Name="attr1 withspace">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 97 98 99 100 101 102 103 104 105 0
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Group Name="g1.1 withspace" OBJ-XID="/g1.1 withspace" Parents="/" >
+ <Dataset Name="dset1.1.1 withspace" OBJ-XID="/g1.1 withspace/dset1.1.1 withspace" Parents="/g1.1 withspace">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0 0 0 0 0 0 0
+ 0 1 2 3 4 5 6 7 8 9
+ 0 2 4 6 8 10 12 14 16 18
+ 0 3 6 9 12 15 18 21 24 27
+ 0 4 8 12 16 20 24 28 32 36
+ 0 5 10 15 20 25 30 35 40 45
+ 0 6 12 18 24 30 36 42 48 54
+ 0 7 14 21 28 35 42 49 56 63
+ 0 8 16 24 32 40 48 56 64 72
+ 0 9 18 27 36 45 54 63 72 81
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tnestedcomp.h5.xml b/tools/testfiles/tnestedcomp.h5.xml
new file mode 100644
index 0000000..b6c92d4
--- /dev/null
+++ b/tools/testfiles/tnestedcomp.h5.xml
@@ -0,0 +1,76 @@
+#############################
+Expected output for 'h5dump --xml tnestedcomp.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="ArrayOfStructures" OBJ-XID="/ArrayOfStructures" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="a_name">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="b_name">
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="LE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="c_name">
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="LE" Size="8" SignBitLocation="63" ExponentBits="11" ExponentLocation="52" MantissaBits="52" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="d_name">
+ <DataType>
+ <CompoundType>
+ <Field FieldName="char_name">
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="1" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="array_name">
+ <DataType>
+ <ArrayType Ndims="1">
+ <ArrayDimension DimSize="2" DimPerm="0"/>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="LE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 0 1 "A" -100 100 1 1 0.5 "B" -100 100
+ 2 4 0.333333 "C" -100 100 3 9 0.25 "D" -100 100
+ 4 16 0.2 "E" -100 100 5 25 0.166667 "F" -100 100
+ 6 36 0.142857 "G" -100 100 7 49 0.125 "H" -100 100
+ 8 64 0.111111 "I" -100 100 9 81 0.1 "J" -100 100
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tnodata.h5 b/tools/testfiles/tnodata.h5
new file mode 100644
index 0000000..baaa3fb
--- /dev/null
+++ b/tools/testfiles/tnodata.h5
Binary files differ
diff --git a/tools/testfiles/tnodata.h5.xml b/tools/testfiles/tnodata.h5.xml
new file mode 100644
index 0000000..5e8321a
--- /dev/null
+++ b/tools/testfiles/tnodata.h5.xml
@@ -0,0 +1,26 @@
+#############################
+Expected output for 'h5dump --xml tnodata.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="dset1" OBJ-XID="/dset1" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="5" MaxDimSize="5"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tobjref.h5.xml b/tools/testfiles/tobjref.h5.xml
new file mode 100644
index 0000000..3cfe6f7
--- /dev/null
+++ b/tools/testfiles/tobjref.h5.xml
@@ -0,0 +1,92 @@
+#############################
+Expected output for 'h5dump --xml tobjref.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="Dataset3" OBJ-XID="/Dataset3" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <ReferenceType>
+ <ObjectReferenceType />
+ </ReferenceType>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "/Group1/Dataset1"
+ "/Group1/Dataset2"
+ "/Group1"
+ "/Group1/Datatype1"
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Group Name="Group1" OBJ-XID="/Group1" Parents="/" >
+ <NamedDataType Name="Datatype1" OBJ-XID="/Group1/Datatype1" Parents="/Group1">
+ <CompoundType>
+ <Field FieldName="a">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="b">
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="c">
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </NamedDataType>
+ <Dataset Name="Dataset1" OBJ-XID="/Group1/Dataset1" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 3 6 9
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset2" OBJ-XID="/Group1/Dataset2" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/topaque.h5.xml b/tools/testfiles/topaque.h5.xml
new file mode 100644
index 0000000..688f2b8
--- /dev/null
+++ b/tools/testfiles/topaque.h5.xml
@@ -0,0 +1,27 @@
+#############################
+Expected output for 'h5dump --xml topaque.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="opaque test" OBJ-XID="/opaque test" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="2" MaxDimSize="2"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <OpaqueType Tag="test opaque type" Size="200"/>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0x0063016202610360045f055e065d075c085b095a0a590b580c570d560e550f541053115212511350144f154e164d174c184b194a1a491b481c471d461e451f442043214222412340243f253e263d273c283b293a2a392b382c372d362e352f343033313232313330342f352e362d372c382b392a3a293b283c273d263e253f244023412242214320441f451e461d471c481b491a4a194b184c174d164e154f145013511252115310540f550e560d570c580b590a5a095b085c075d065e055f046003610262016300
+ 0x3800002c3c000027340001ea04000012000000000008a4640000000000000000000000000000000000000000ef7ec9c000000001effffa84effffa8c0003fc000000000000000000effffa2000012e84effffa2000012e5800000003effffa8400000004effffa8c00000005effffaf0000000000000000000000001effffa84effffa8c0003fc0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tref-escapes-at.h5 b/tools/testfiles/tref-escapes-at.h5
new file mode 100644
index 0000000..c03c201
--- /dev/null
+++ b/tools/testfiles/tref-escapes-at.h5
Binary files differ
diff --git a/tools/testfiles/tref-escapes-at.h5.xml b/tools/testfiles/tref-escapes-at.h5.xml
new file mode 100644
index 0000000..03184f3
--- /dev/null
+++ b/tools/testfiles/tref-escapes-at.h5.xml
@@ -0,0 +1,282 @@
+#############################
+Expected output for 'h5dump --xml tref-escapes-at.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Group Name="Group1" OBJ-XID="/Group1" Parents="/" >
+ <Dataset Name="Dataset space" OBJ-XID="/Group1/Dataset space" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset&quot;quote" OBJ-XID="/Group1/Dataset&quot;quote" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 3 6 9
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset&amp;amp" OBJ-XID="/Group1/Dataset&amp;amp" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset&apos;apos" OBJ-XID="/Group1/Dataset&apos;apos" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset&lt;lt" OBJ-XID="/Group1/Dataset&lt;lt" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset&gt;gt" OBJ-XID="/Group1/Dataset&gt;gt" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset\slash" OBJ-XID="/Group1/Dataset\slash" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Group Name="Group1.1" OBJ-XID="/Group1/Group1.1" Parents="/Group1" >
+ <Dataset Name="Datasetrefs" OBJ-XID="/Group1/Group1.1/Datasetrefs" Parents="/Group1/Group1.1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="1" MaxDimSize="1"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Attribute Name="reftoquote">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="1" MaxDimSize="1"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <ReferenceType>
+ <ObjectReferenceType />
+ </ReferenceType>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "/Group1/Dataset\"quote"
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Attribute Name="reftoslash">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="1" MaxDimSize="1"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <ReferenceType>
+ <ObjectReferenceType />
+ </ReferenceType>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "/Group1/Dataset\\slash"
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Attribute Name="reftoamp">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="1" MaxDimSize="1"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <ReferenceType>
+ <ObjectReferenceType />
+ </ReferenceType>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "/Group1/Dataset&amp;amp"
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Attribute Name="reftolt">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="1" MaxDimSize="1"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <ReferenceType>
+ <ObjectReferenceType />
+ </ReferenceType>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "/Group1/Dataset&lt;lt"
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Attribute Name="reftogt">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="1" MaxDimSize="1"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <ReferenceType>
+ <ObjectReferenceType />
+ </ReferenceType>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "/Group1/Dataset&gt;gt"
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Attribute Name="reftoapos">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="1" MaxDimSize="1"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <ReferenceType>
+ <ObjectReferenceType />
+ </ReferenceType>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "/Group1/Dataset&apos;apos"
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Attribute Name="reftospace">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="1" MaxDimSize="1"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <ReferenceType>
+ <ObjectReferenceType />
+ </ReferenceType>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "/Group1/Dataset space"
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Data>
+ <DataFromFile>
+ 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tref-escapes.h5 b/tools/testfiles/tref-escapes.h5
new file mode 100644
index 0000000..b9bff90
--- /dev/null
+++ b/tools/testfiles/tref-escapes.h5
Binary files differ
diff --git a/tools/testfiles/tref-escapes.h5.xml b/tools/testfiles/tref-escapes.h5.xml
new file mode 100644
index 0000000..4f47144
--- /dev/null
+++ b/tools/testfiles/tref-escapes.h5.xml
@@ -0,0 +1,157 @@
+#############################
+Expected output for 'h5dump --xml tref-escapes.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Group Name="Group1" OBJ-XID="/Group1" Parents="/" >
+ <Dataset Name="Dataset space" OBJ-XID="/Group1/Dataset space" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset&quot;quote" OBJ-XID="/Group1/Dataset&quot;quote" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 3 6 9
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset&amp;amp" OBJ-XID="/Group1/Dataset&amp;amp" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset&apos;apos" OBJ-XID="/Group1/Dataset&apos;apos" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset&lt;lt" OBJ-XID="/Group1/Dataset&lt;lt" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset&gt;gt" OBJ-XID="/Group1/Dataset&gt;gt" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset\slash" OBJ-XID="/Group1/Dataset\slash" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Group Name="Group1.1" OBJ-XID="/Group1/Group1.1" Parents="/Group1" >
+ <Dataset Name="Datasetrefs" OBJ-XID="/Group1/Group1.1/Datasetrefs" Parents="/Group1/Group1.1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="7" MaxDimSize="7"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <ReferenceType>
+ <ObjectReferenceType />
+ </ReferenceType>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "/Group1/Dataset\"quote"
+ "/Group1/Dataset\\slash"
+ "/Group1/Dataset&amp;amp"
+ "/Group1/Dataset&lt;lt"
+ "/Group1/Dataset space"
+ "/Group1/Dataset&apos;apos"
+ "/Group1/Dataset&gt;gt"
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tref.h5 b/tools/testfiles/tref.h5
new file mode 100644
index 0000000..12453be
--- /dev/null
+++ b/tools/testfiles/tref.h5
Binary files differ
diff --git a/tools/testfiles/tref.h5.xml b/tools/testfiles/tref.h5.xml
new file mode 100644
index 0000000..2637347
--- /dev/null
+++ b/tools/testfiles/tref.h5.xml
@@ -0,0 +1,66 @@
+#############################
+Expected output for 'h5dump --xml tref.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="ZZZDataset3" OBJ-XID="/ZZZDataset3" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="3" MaxDimSize="3"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <ReferenceType>
+ <ObjectReferenceType />
+ </ReferenceType>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "/Group1/Dataset1"
+ "/Group1/Dataset2"
+ "/Group1"
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Group Name="Group1" OBJ-XID="/Group1" Parents="/" >
+ <Dataset Name="Dataset1" OBJ-XID="/Group1/Dataset1" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 3 6 9
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset2" OBJ-XID="/Group1/Dataset2" Parents="/Group1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 0 0 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tsaf.h5.xml b/tools/testfiles/tsaf.h5.xml
new file mode 100644
index 0000000..9b3feef
--- /dev/null
+++ b/tools/testfiles/tsaf.h5.xml
@@ -0,0 +1,2602 @@
+#############################
+Expected output for 'h5dump --xml tsaf.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name=".DSL_METADATA" OBJ-XID="/.DSL_METADATA" Parents="root">
+ <StorageLayout>
+ <ChunkedLayout Ndims="1">
+ <ChunkDimension DimSize="1024" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="5919" MaxDimSize="UNLIMITED"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="false" Size="1" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 47 32 67 79 78 84 65 73 78 69 82 10 47 46 97 116 116 114 105 98 117
+ 116 101 115 32 67 79 78 84 65 73 78 69 82 10 47 46 97 116 116 114 105
+ 98 117 116 101 115 47 100 97 116 97 98 97 115 101 32 67 79 78 84 65 73
+ 78 69 82 10 47 46 97 116 116 114 105 98 117 116 101 115 47 100 97 116
+ 97 98 97 115 101 47 46 83 65 70 95 68 98 80 114 111 112 115 32 68 65
+ 84 65 83 69 84 32 115 116 114 117 99 116 32 83 65 70 95 68 98 80 114
+ 111 112 115 123 105 110 116 32 109 97 103 105 99 59 99 104 97 114 32
+ 83 70 105 108 101 68 105 114 91 49 48 50 52 93 59 99 104 97 114 32 73
+ 109 112 111 114 116 70 105 108 101 91 49 48 50 52 93 59 105 110 116 32
+ 112 97 114 97 108 108 101 108 59 115 116 114 117 99 116 32 83 65 70 95
+ 86 101 114 115 105 111 110 73 110 102 111 123 105 110 116 32 118 109
+ 97 106 111 114 59 105 110 116 32 118 109 105 110 111 114 59 105 110
+ 116 32 114 101 108 59 99 104 97 114 32 97 110 110 111 116 91 49 48 93
+ 59 125 115 97 102 97 112 105 59 115 116 114 117 99 116 32 83 65 70 95
+ 86 101 114 115 105 111 110 73 110 102 111 123 105 110 116 32 118 109
+ 97 106 111 114 59 105 110 116 32 118 109 105 110 111 114 59 105 110
+ 116 32 114 101 108 59 99 104 97 114 32 97 110 110 111 116 91 49 48 93
+ 59 125 115 97 102 108 105 98 59 115 116 114 117 99 116 32 83 65 70 95
+ 86 101 114 115 105 111 110 73 110 102 111 123 105 110 116 32 118 109
+ 97 106 111 114 59 105 110 116 32 118 109 105 110 111 114 59 105 110
+ 116 32 114 101 108 59 99 104 97 114 32 97 110 110 111 116 91 49 48 93
+ 59 125 118 98 116 59 115 116 114 117 99 116 32 83 65 70 95 86 101 114
+ 115 105 111 110 73 110 102 111 123 105 110 116 32 118 109 97 106 111
+ 114 59 105 110 116 32 118 109 105 110 111 114 59 105 110 116 32 114
+ 101 108 59 99 104 97 114 32 97 110 110 111 116 91 49 48 93 59 125 100
+ 115 108 59 115 116 114 117 99 116 32 83 65 70 95 86 101 114 115 105
+ 111 110 73 110 102 111 123 105 110 116 32 118 109 97 106 111 114 59
+ 105 110 116 32 118 109 105 110 111 114 59 105 110 116 32 114 101 108
+ 59 99 104 97 114 32 97 110 110 111 116 91 49 48 93 59 125 104 100 102
+ 53 59 115 116 114 117 99 116 32 83 65 70 95 86 101 114 115 105 111 110
+ 73 110 102 111 123 105 110 116 32 118 109 97 106 111 114 59 105 110
+ 116 32 118 109 105 110 111 114 59 105 110 116 32 114 101 108 59 99 104
+ 97 114 32 97 110 110 111 116 91 49 48 93 59 125 109 112 105 59 105 110
+ 116 32 68 111 84 111 99 59 105 110 116 32 82 101 97 100 79 110 108 121
+ 59 105 110 116 32 67 108 111 98 98 101 114 59 105 110 116 32 79 83 77
+ 111 100 101 115 59 125 59 10 47 115 115 114 101 108 45 95 48 48 48 48
+ 32 68 65 84 65 83 69 84 32 105 110 116 59 10 47 115 115 114 101 108 45
+ 95 48 48 48 49 32 68 65 84 65 83 69 84 32 105 110 116 59 10 47 115 115
+ 114 101 108 45 95 48 48 48 50 32 68 65 84 65 83 69 84 32 105 110 116
+ 59 10 47 115 115 114 101 108 45 95 48 48 48 51 32 68 65 84 65 83 69 84
+ 32 105 110 116 59 10 47 115 115 114 101 108 45 95 48 48 48 52 32 68 65
+ 84 65 83 69 84 32 105 110 116 59 10 47 115 115 114 101 108 45 95 48 48
+ 48 53 32 68 65 84 65 83 69 84 32 105 110 116 59 10 47 115 115 114 101
+ 108 45 95 48 48 48 54 32 68 65 84 65 83 69 84 32 105 110 116 59 10 47
+ 115 115 114 101 108 45 95 48 48 48 55 32 68 65 84 65 83 69 84 32 105
+ 110 116 59 10 47 115 115 114 101 108 45 95 48 48 48 56 32 68 65 84 65
+ 83 69 84 32 105 110 116 59 10 47 115 115 114 101 108 45 95 48 48 48 57
+ 32 68 65 84 65 83 69 84 32 105 110 116 59 10 47 115 115 114 101 108 45
+ 95 48 48 49 48 32 68 65 84 65 83 69 84 32 105 110 116 59 10 47 115 115
+ 114 101 108 45 95 48 48 49 49 32 68 65 84 65 83 69 84 32 105 110 116
+ 59 10 47 115 115 114 101 108 45 95 48 48 49 50 32 68 65 84 65 83 69 84
+ 32 105 110 116 59 10 47 116 111 112 111 114 101 108 45 95 48 48 49 55
+ 32 68 65 84 65 83 69 84 32 105 110 116 59 10 47 116 111 112 111 114
+ 101 108 45 95 48 48 49 56 32 68 65 84 65 83 69 84 32 105 110 116 59 10
+ 47 116 111 112 111 114 101 108 45 95 48 48 49 57 32 68 65 84 65 83 69
+ 84 32 105 110 116 59 10 47 116 111 112 111 114 101 108 45 95 48 48 50
+ 48 32 68 65 84 65 83 69 84 32 105 110 116 59 10 47 102 105 101 108 100
+ 45 99 111 111 114 100 115 95 48 48 48 50 32 68 65 84 65 83 69 84 32
+ 102 108 111 97 116 59 10 47 102 105 101 108 100 45 100 105 115 116 114
+ 105 98 117 116 105 111 110 95 102 97 99 116 111 114 115 95 48 48 48 51
+ 32 68 65 84 65 83 69 84 32 102 108 111 97 116 59 10 47 102 105 101 108
+ 100 45 116 101 109 112 101 114 97 116 117 114 101 95 48 48 48 52 32 68
+ 65 84 65 83 69 84 32 102 108 111 97 116 59 10 47 102 105 101 108 100
+ 45 100 105 115 112 108 97 99 101 109 101 110 116 115 95 48 48 48 55 32
+ 68 65 84 65 83 69 84 32 102 108 111 97 116 59 10 47 102 105 101 108
+ 100 45 115 116 114 101 115 115 95 48 48 49 49 32 68 65 84 65 83 69 84
+ 32 102 108 111 97 116 59 10 47 102 105 101 108 100 45 116 101 109 112
+ 101 114 97 116 117 114 101 95 48 48 49 50 32 68 65 84 65 83 69 84 32
+ 102 108 111 97 116 59 10 47 102 105 101 108 100 45 112 114 101 115 115
+ 117 114 101 95 48 48 49 51 32 68 65 84 65 83 69 84 32 102 108 111 97
+ 116 59 10 47 66 108 111 98 32 68 65 84 65 83 69 84 32 115 116 114 117
+ 99 116 32 66 108 111 98 123 68 83 76 95 79 102 102 115 101 116 32 102
+ 105 108 101 95 105 100 59 68 83 76 95 79 102 102 115 101 116 32 100 97
+ 116 97 115 101 116 95 105 100 59 68 83 76 95 79 102 102 115 101 116 32
+ 111 102 102 115 101 116 59 68 83 76 95 79 102 102 115 101 116 32 115
+ 116 114 105 100 101 59 68 83 76 95 79 102 102 115 101 116 32 99 111
+ 117 110 116 59 68 83 76 95 79 102 102 115 101 116 32 98 97 115 101 95
+ 105 100 59 68 83 76 95 79 102 102 115 101 116 32 110 117 109 95 114
+ 101 99 115 59 125 59 10 47 73 110 100 101 120 83 112 101 99 32 68 65
+ 84 65 83 69 84 32 115 116 114 117 99 116 32 73 110 100 101 120 83 112
+ 101 99 123 105 110 116 32 110 100 105 109 115 59 105 110 116 32 111
+ 114 105 103 105 110 115 91 56 93 59 105 110 116 32 115 105 122 101 115
+ 91 56 93 59 105 110 116 32 111 114 100 101 114 91 56 93 59 101 110 117
+ 109 32 73 110 100 101 120 84 121 112 101 123 86 66 84 95 73 78 68 69
+ 88 95 84 89 80 69 95 67 95 79 82 68 69 82 61 48 44 86 66 84 95 73 78
+ 68 69 88 95 84 89 80 69 95 70 95 79 82 68 69 82 61 49 44 86 66 84 95
+ 73 78 68 69 88 95 84 89 80 69 95 65 78 89 61 45 49 44 86 66 84 95 73
+ 78 68 69 88 95 84 89 80 69 95 73 78 86 65 76 73 68 61 45 50 44 86 66
+ 84 95 73 78 68 69 88 95 84 89 80 69 95 78 65 61 45 51 44 86 66 84 95
+ 73 78 68 69 88 95 84 89 80 69 95 85 78 75 78 79 87 78 61 45 52 125 105
+ 110 100 101 120 95 116 121 112 101 59 68 83 76 95 79 102 102 115 101
+ 116 32 98 97 115 101 95 105 100 59 68 83 76 95 79 102 102 115 101 116
+ 32 110 117 109 95 114 101 99 115 59 125 59 10 47 67 97 116 32 68 65 84
+ 65 83 69 84 32 115 116 114 117 99 116 32 67 97 116 123 99 104 97 114
+ 32 110 97 109 101 91 54 52 93 59 101 110 117 109 32 82 111 108 101 123
+ 86 66 84 95 82 79 76 69 95 84 79 80 79 61 48 44 86 66 84 95 82 79 76
+ 69 95 66 78 68 61 49 44 86 66 84 95 82 79 76 69 95 80 82 79 67 61 50
+ 44 86 66 84 95 82 79 76 69 95 68 79 77 78 61 51 44 86 66 84 95 82 79
+ 76 69 95 66 76 79 67 75 61 52 44 86 66 84 95 82 79 76 69 95 65 83 83
+ 89 61 53 44 86 66 84 95 82 79 76 69 95 77 65 84 61 54 44 86 66 84 95
+ 82 79 76 69 95 88 80 82 79 68 61 55 44 86 66 84 95 82 79 76 69 95 85
+ 83 69 82 68 61 56 44 86 66 84 95 82 79 76 69 95 65 78 89 61 45 49 44
+ 86 66 84 95 82 79 76 69 95 73 78 86 65 76 73 68 61 45 50 44 86 66 84
+ 95 82 79 76 69 95 78 65 61 45 51 44 86 66 84 95 82 79 76 69 95 85 78
+ 75 78 79 87 78 61 45 52 125 114 111 108 101 59 105 110 116 32 116 100
+ 105 109 59 68 83 76 95 79 102 102 115 101 116 32 98 97 115 101 95 105
+ 100 59 68 83 76 95 79 102 102 115 101 116 32 110 117 109 95 114 101 99
+ 115 59 125 59 10 47 83 101 116 32 68 65 84 65 83 69 84 32 115 116 114
+ 117 99 116 32 83 101 116 123 105 110 116 32 117 115 101 114 95 105 100
+ 59 99 104 97 114 32 110 97 109 101 91 54 52 93 59 105 110 116 32 116
+ 100 105 109 59 101 110 117 109 32 83 105 108 82 111 108 101 123 86 66
+ 84 95 83 82 79 76 69 95 84 73 77 69 61 48 44 86 66 84 95 83 82 79 76
+ 69 95 83 80 65 67 69 61 49 44 86 66 84 95 83 82 79 76 69 95 83 84 65
+ 84 69 61 50 44 86 66 84 95 83 82 79 76 69 95 80 65 82 65 77 61 51 44
+ 86 66 84 95 83 82 79 76 69 95 67 84 89 80 69 61 52 44 86 66 84 95 83
+ 82 79 76 69 95 65 84 89 80 69 61 53 44 86 66 84 95 83 82 79 76 69 95
+ 85 83 69 82 68 61 54 44 86 66 84 95 83 82 79 76 69 95 65 78 89 61 45
+ 49 44 86 66 84 95 83 82 79 76 69 95 73 78 86 65 76 73 68 61 45 50 44
+ 86 66 84 95 83 82 79 76 69 95 78 65 61 45 51 44 86 66 84 95 83 82 79
+ 76 69 95 85 78 75 78 79 87 78 61 45 52 125 115 114 111 108 101 59 68
+ 83 76 95 79 102 102 115 101 116 32 99 111 108 108 95 105 100 115 91 49
+ 54 93 59 105 110 116 32 105 115 95 116 111 112 59 105 110 116 32 105
+ 115 95 101 120 116 101 110 100 105 98 108 101 59 68 83 76 95 79 102
+ 102 115 101 116 32 100 102 108 116 95 99 111 111 114 100 102 108 100
+ 95 105 100 59 68 83 76 95 79 102 102 115 101 116 32 98 110 100 95 115
+ 101 116 95 105 100 59 68 83 76 95 79 102 102 115 101 116 32 98 97 115
+ 101 95 105 100 59 68 83 76 95 79 102 102 115 101 116 32 110 117 109 95
+ 114 101 99 115 59 125 59 10 47 67 111 108 108 101 99 116 105 111 110
+ 32 68 65 84 65 83 69 84 32 115 116 114 117 99 116 32 67 111 108 108
+ 101 99 116 105 111 110 123 68 83 76 95 79 102 102 115 101 116 32 99
+ 111 110 116 97 105 110 105 110 103 95 115 101 116 95 105 100 59 68 83
+ 76 95 79 102 102 115 101 116 32 99 97 116 95 105 100 59 101 110 117
+ 109 32 67 101 108 108 84 121 112 101 123 86 66 84 95 67 69 76 76 95 84
+ 89 80 69 95 78 79 78 69 61 48 44 86 66 84 95 67 69 76 76 95 84 89 80
+ 69 95 80 79 73 78 84 61 49 44 86 66 84 95 67 69 76 76 95 84 89 80 69
+ 95 76 73 78 69 61 50 44 86 66 84 95 67 69 76 76 95 84 89 80 69 95 84
+ 82 73 61 51 44 86 66 84 95 67 69 76 76 95 84 89 80 69 95 81 85 65 68
+ 61 52 44 86 66 84 95 67 69 76 76 95 84 89 80 69 95 84 69 84 61 53 44
+ 86 66 84 95 67 69 76 76 95 84 89 80 69 95 80 89 82 65 77 73 68 61 54
+ 44 86 66 84 95 67 69 76 76 95 84 89 80 69 95 80 82 73 83 77 61 55 44
+ 86 66 84 95 67 69 76 76 95 84 89 80 69 95 72 69 88 61 56 44 86 66 84
+ 95 67 69 76 76 95 84 89 80 69 95 77 73 88 69 68 61 57 44 86 66 84 95
+ 67 69 76 76 95 84 89 80 69 95 65 82 66 61 49 48 44 86 66 84 95 67 69
+ 76 76 95 84 89 80 69 95 49 66 65 76 76 61 49 49 44 86 66 84 95 67 69
+ 76 76 95 84 89 80 69 95 50 66 65 76 76 61 49 50 44 86 66 84 95 67 69
+ 76 76 95 84 89 80 69 95 51 66 65 76 76 61 49 51 44 86 66 84 95 67 69
+ 76 76 95 84 89 80 69 95 49 83 72 69 76 76 61 49 52 44 86 66 84 95 67
+ 69 76 76 95 84 89 80 69 95 50 83 72 69 76 76 61 49 53 44 86 66 84 95
+ 67 69 76 76 95 84 89 80 69 95 65 78 89 61 45 49 44 86 66 84 95 67 69
+ 76 76 95 84 89 80 69 95 73 78 86 65 76 73 68 61 45 50 44 86 66 84 95
+ 67 69 76 76 95 84 89 80 69 95 78 65 61 45 51 44 86 66 84 95 67 69 76
+ 76 95 84 89 80 69 95 85 78 75 78 79 87 78 61 45 52 125 99 101 108 108
+ 95 116 121 112 101 59 105 110 116 32 99 111 117 110 116 59 68 83 76 95
+ 79 102 102 115 101 116 32 105 110 100 101 120 105 110 103 95 105 100
+ 59 105 110 116 32 105 115 95 100 101 99 111 109 112 59 68 83 76 95 79
+ 102 102 115 101 116 32 109 101 109 98 101 114 115 95 98 108 111 98 95
+ 105 100 59 68 83 76 95 79 102 102 115 101 116 32 98 97 115 101 95 105
+ 100 59 68 83 76 95 79 102 102 115 101 116 32 110 117 109 95 114 101 99
+ 115 59 125 59 10 47 82 101 108 97 116 105 111 110 32 68 65 84 65 83 69
+ 84 32 115 116 114 117 99 116 32 82 101 108 97 116 105 111 110 123 68
+ 83 76 95 79 102 102 115 101 116 32 115 117 98 95 105 100 59 68 83 76
+ 95 79 102 102 115 101 116 32 115 117 98 95 99 97 116 95 105 100 59 68
+ 83 76 95 79 102 102 115 101 116 32 115 117 98 95 100 101 99 111 109
+ 112 95 99 97 116 95 105 100 59 68 83 76 95 79 102 102 115 101 116 32
+ 115 117 112 95 105 100 59 68 83 76 95 79 102 102 115 101 116 32 115
+ 117 112 95 99 97 116 95 105 100 59 68 83 76 95 79 102 102 115 101 116
+ 32 115 117 112 95 100 101 99 111 109 112 95 99 97 116 95 105 100 59
+ 101 110 117 109 32 82 101 108 75 105 110 100 123 86 66 84 95 82 69 76
+ 75 73 78 68 95 83 85 66 83 69 84 61 48 44 86 66 84 95 82 69 76 75 73
+ 78 68 95 83 85 80 83 69 84 61 49 44 86 66 84 95 82 69 76 75 73 78 68
+ 95 66 79 85 78 68 61 50 44 86 66 84 95 82 69 76 75 73 78 68 95 80 69
+ 82 77 85 84 69 61 51 44 86 66 84 95 82 69 76 75 73 78 68 95 78 69 73
+ 71 72 66 79 82 61 52 44 86 66 84 95 82 69 76 75 73 78 68 95 67 79 80
+ 89 61 53 44 86 66 84 95 82 69 76 75 73 78 68 95 69 81 85 65 76 61 54
+ 44 86 66 84 95 82 69 76 75 73 78 68 95 65 78 89 61 45 49 44 86 66 84
+ 95 82 69 76 75 73 78 68 95 73 78 86 65 76 73 68 61 45 50 44 86 66 84
+ 95 82 69 76 75 73 78 68 95 78 65 61 45 51 44 86 66 84 95 82 69 76 75
+ 73 78 68 95 85 78 75 78 79 87 78 61 45 52 125 107 105 110 100 59 101
+ 110 117 109 32 82 101 108 82 101 112 123 86 66 84 95 82 69 76 82 69 80
+ 95 73 68 69 78 84 73 84 89 61 48 44 86 66 84 95 82 69 76 82 69 80 95
+ 72 76 73 83 84 61 49 44 86 66 84 95 82 69 76 82 69 80 95 84 76 73 83
+ 84 61 50 44 86 66 84 95 82 69 76 82 69 80 95 84 76 73 83 84 95 49 61
+ 51 44 86 66 84 95 82 69 76 82 69 80 95 69 76 73 83 84 61 52 44 86 66
+ 84 95 82 69 76 82 69 80 95 83 84 82 85 67 84 85 82 69 68 61 53 44 86
+ 66 84 95 82 69 76 82 69 80 95 85 78 83 84 82 85 67 84 85 82 69 68 61
+ 54 44 86 66 84 95 82 69 76 82 69 80 95 65 82 66 73 84 82 65 82 89 95
+ 82 61 55 44 86 66 84 95 82 69 76 82 69 80 95 65 82 66 73 84 82 65 82
+ 89 95 68 82 61 56 44 86 66 84 95 82 69 76 82 69 80 95 65 78 89 61 45
+ 49 44 86 66 84 95 82 69 76 82 69 80 95 73 78 86 65 76 73 68 61 45 50
+ 44 86 66 84 95 82 69 76 82 69 80 95 78 65 61 45 51 44 86 66 84 95 82
+ 69 76 82 69 80 95 85 78 75 78 79 87 78 61 45 52 125 114 101 112 95 116
+ 121 112 101 59 68 83 76 95 79 102 102 115 101 116 32 100 95 98 108 111
+ 98 95 105 100 59 68 83 76 95 79 102 102 115 101 116 32 114 95 98 108
+ 111 98 95 105 100 59 68 83 76 95 79 102 102 115 101 116 32 98 97 115
+ 101 95 105 100 59 68 83 76 95 79 102 102 115 101 116 32 110 117 109 95
+ 114 101 99 115 59 125 59 10 47 70 105 101 108 100 84 109 112 108 32 68
+ 65 84 65 83 69 84 32 115 116 114 117 99 116 32 70 105 101 108 100 84
+ 109 112 108 123 99 104 97 114 32 110 97 109 101 91 54 52 93 59 68 83
+ 76 95 79 102 102 115 101 116 32 98 97 115 101 95 115 112 97 99 101 95
+ 105 100 59 101 110 117 109 32 65 108 103 101 98 114 97 105 99 84 121
+ 112 101 123 86 66 84 95 65 76 71 69 66 82 65 73 67 95 84 89 80 69 95
+ 67 79 78 83 84 65 78 84 61 48 44 86 66 84 95 65 76 71 69 66 82 65 73
+ 67 95 84 89 80 69 95 67 79 77 80 79 78 69 78 84 61 49 44 86 66 84 95
+ 65 76 71 69 66 82 65 73 67 95 84 89 80 69 95 83 67 65 76 65 82 61 50
+ 44 86 66 84 95 65 76 71 69 66 82 65 73 67 95 84 89 80 69 95 86 69 67
+ 84 79 82 61 51 44 86 66 84 95 65 76 71 69 66 82 65 73 67 95 84 89 80
+ 69 95 84 69 78 83 79 82 61 52 44 86 66 84 95 65 76 71 69 66 82 65 73
+ 67 95 84 89 80 69 95 83 89 77 95 84 69 78 83 79 82 61 53 44 86 66 84
+ 95 65 76 71 69 66 82 65 73 67 95 84 89 80 69 95 70 73 69 76 68 61 54
+ 44 86 66 84 95 65 76 71 69 66 82 65 73 67 95 84 89 80 69 95 83 84 65
+ 84 69 61 55 44 86 66 84 95 65 76 71 69 66 82 65 73 67 95 84 89 80 69
+ 95 84 85 80 76 69 61 56 44 86 66 84 95 65 76 71 69 66 82 65 73 67 95
+ 84 89 80 69 95 65 78 89 61 45 49 44 86 66 84 95 65 76 71 69 66 82 65
+ 73 67 95 84 89 80 69 95 73 78 86 65 76 73 68 61 45 50 44 86 66 84 95
+ 65 76 71 69 66 82 65 73 67 95 84 89 80 69 95 78 65 61 45 51 44 86 66
+ 84 95 65 76 71 69 66 82 65 73 67 95 84 89 80 69 95 85 78 75 78 79 87
+ 78 61 45 52 125 97 108 103 95 116 121 112 101 59 101 110 117 109 32 66
+ 97 115 105 115 84 121 112 101 123 86 66 84 95 66 65 83 73 83 95 84 89
+ 80 69 95 85 78 73 84 89 61 48 44 86 66 84 95 66 65 83 73 83 95 84 89
+ 80 69 95 67 65 82 84 69 83 73 65 78 61 49 44 86 66 84 95 66 65 83 73
+ 83 95 84 89 80 69 95 83 80 72 69 82 73 67 65 76 61 50 44 86 66 84 95
+ 66 65 83 73 83 95 84 89 80 69 95 67 89 76 73 78 68 82 73 67 65 76 61
+ 51 44 86 66 84 95 66 65 83 73 83 95 84 89 80 69 95 85 80 80 69 82 95
+ 84 82 73 61 52 44 86 66 84 95 66 65 83 73 83 95 84 89 80 69 95 70 79
+ 85 82 73 69 82 61 53 44 86 66 84 95 66 65 83 73 83 95 84 89 80 69 95
+ 86 65 82 73 65 66 76 69 61 54 44 86 66 84 95 66 65 83 73 83 95 84 89
+ 80 69 95 65 78 89 61 45 49 44 86 66 84 95 66 65 83 73 83 95 84 89 80
+ 69 95 73 78 86 65 76 73 68 61 45 50 44 86 66 84 95 66 65 83 73 83 95
+ 84 89 80 69 95 78 65 61 45 51 44 86 66 84 95 66 65 83 73 83 95 84 89
+ 80 69 95 85 78 75 78 79 87 78 61 45 52 125 98 97 115 105 115 59 68 83
+ 76 95 79 102 102 115 101 116 32 113 117 97 110 116 105 116 121 95 105
+ 100 59 105 110 116 32 110 117 109 95 99 111 109 112 115 59 68 83 76 95
+ 79 102 102 115 101 116 32 102 116 109 112 108 95 105 100 115 95 98 108
+ 111 98 95 105 100 59 68 83 76 95 79 102 102 115 101 116 32 98 97 115
+ 101 95 105 100 59 68 83 76 95 79 102 102 115 101 116 32 110 117 109 95
+ 114 101 99 115 59 125 59 10 47 70 105 101 108 100 32 68 65 84 65 83 69
+ 84 32 115 116 114 117 99 116 32 70 105 101 108 100 123 68 83 76 95 79
+ 102 102 115 101 116 32 102 116 109 112 108 95 105 100 59 99 104 97 114
+ 32 110 97 109 101 91 54 52 93 59 68 83 76 95 79 102 102 115 101 116 32
+ 117 110 105 116 115 95 105 100 59 68 83 76 95 79 102 102 115 101 116
+ 32 115 116 111 114 97 103 101 95 100 101 99 111 109 112 95 99 97 116
+ 95 105 100 59 101 110 117 109 32 73 110 116 101 114 108 101 97 118 101
+ 123 86 66 84 95 73 78 84 69 82 76 69 65 86 69 95 67 79 77 80 79 78 69
+ 78 84 61 48 44 86 66 84 95 73 78 84 69 82 76 69 65 86 69 95 86 69 67
+ 84 79 82 61 49 44 86 66 84 95 73 78 84 69 82 76 69 65 86 69 95 73 78
+ 68 69 80 69 78 68 69 78 84 61 50 44 86 66 84 95 73 78 84 69 82 76 69
+ 65 86 69 95 78 79 78 69 61 51 44 86 66 84 95 73 78 84 69 82 76 69 65
+ 86 69 95 65 78 89 61 45 49 44 86 66 84 95 73 78 84 69 82 76 69 65 86
+ 69 95 73 78 86 65 76 73 68 61 45 50 44 86 66 84 95 73 78 84 69 82 76
+ 69 65 86 69 95 78 65 61 45 51 44 86 66 84 95 73 78 84 69 82 76 69 65
+ 86 69 95 85 78 75 78 79 87 78 61 45 52 125 99 111 109 112 95 105 110
+ 116 108 118 59 68 83 76 95 79 102 102 115 101 116 32 105 110 100 101
+ 120 105 110 103 95 105 100 59 68 83 76 95 79 102 102 115 101 116 32
+ 100 111 102 95 97 115 115 111 99 95 99 97 116 95 105 100 59 105 110
+ 116 32 97 115 115 111 99 95 114 97 116 105 111 59 68 83 76 95 79 102
+ 102 115 101 116 32 101 118 97 108 95 100 101 99 111 109 112 95 99 97
+ 116 95 105 100 59 101 110 117 109 32 69 118 97 108 70 117 110 99 123
+ 86 66 84 95 69 86 65 76 95 70 85 78 67 95 67 79 78 83 84 65 78 84 61
+ 48 44 86 66 84 95 69 86 65 76 95 70 85 78 67 95 85 78 73 70 79 82 77
+ 61 49 44 86 66 84 95 69 86 65 76 95 70 85 78 67 95 80 87 67 79 78 83
+ 84 61 50 44 86 66 84 95 69 86 65 76 95 70 85 78 67 95 80 87 76 73 78
+ 69 65 82 61 51 44 86 66 84 95 69 86 65 76 95 70 85 78 67 95 65 78 89
+ 61 45 49 44 86 66 84 95 69 86 65 76 95 70 85 78 67 95 73 78 86 65 76
+ 73 68 61 45 50 44 86 66 84 95 69 86 65 76 95 70 85 78 67 95 78 65 61
+ 45 51 44 86 66 84 95 69 86 65 76 95 70 85 78 67 95 85 78 75 78 79 87
+ 78 61 45 52 125 101 118 97 108 95 102 117 110 99 59 105 110 116 32 105
+ 115 95 104 111 109 111 103 101 110 101 111 117 115 59 105 110 116 32
+ 105 115 95 99 111 111 114 100 95 102 105 101 108 100 59 68 83 76 95 79
+ 102 102 115 101 116 32 99 111 109 112 95 105 100 115 95 98 108 111 98
+ 95 105 100 59 68 83 76 95 79 102 102 115 101 116 32 99 111 109 112 95
+ 111 114 100 101 114 95 98 108 111 98 95 105 100 59 68 83 76 95 79 102
+ 102 115 101 116 32 118 98 97 115 105 115 95 98 108 111 98 95 105 100
+ 59 68 83 76 95 79 102 102 115 101 116 32 100 111 102 95 98 108 111 98
+ 95 105 100 59 68 83 76 95 79 102 102 115 101 116 32 98 97 115 101 95
+ 105 100 59 68 83 76 95 79 102 102 115 101 116 32 110 117 109 95 114
+ 101 99 115 59 125 59 10 47 109 101 116 97 98 108 111 98 48 48 48 48 48
+ 46 105 110 100 101 120 32 68 65 84 65 83 69 84 32 115 116 114 117 99
+ 116 32 73 110 100 101 120 80 97 105 114 123 68 83 76 95 79 102 102 115
+ 101 116 32 105 110 100 101 120 59 105 110 116 32 108 101 110 103 116
+ 104 59 125 59 10 47 109 101 116 97 98 108 111 98 48 48 48 48 48 46 98
+ 108 111 98 32 68 65 84 65 83 69 84 32 68 83 76 95 79 102 102 115 101
+ 116 59 10
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Blob" OBJ-XID="/Blob" Parents="root">
+ <StorageLayout>
+ <ChunkedLayout Ndims="1">
+ <ChunkDimension DimSize="1024" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="24" MaxDimSize="UNLIMITED"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="file_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="dataset_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="offset">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="stride">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="count">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="base_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="num_recs">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ -1 1 0 1 9 0 1 -1 2 0 1 4 1 1 -1 3 0 1 7 2 1 -1 4 0 1 4 3 1
+ -1 5 0 1 5 4 1 -1 6 0 1 3 5 1 -1 7 0 1 4 6 1 -1 8 0 1 1 7 1
+ -1 9 0 1 10 8 1 -1 10 0 1 4 9 1 -1 11 0 1 3 10 1 -1 12 0 1 5 11 1
+ -1 13 0 1 5 12 1 -1 14 0 1 16 13 1 -1 15 0 1 9 14 1 -1 16 0 1 4 15 1
+ -1 17 0 1 16 16 1 -1 18 0 1 36 17 1 -1 19 0 1 5 18 1 -1 20 0 1 5 19 1
+ -1 21 0 1 36 20 1 -1 22 0 1 12 21 1 -1 23 0 1 7 22 1 -1 24 0 1 2 23 1
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Cat" OBJ-XID="/Cat" Parents="root">
+ <StorageLayout>
+ <ChunkedLayout Ndims="1">
+ <ChunkDimension DimSize="1024" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="5" MaxDimSize="UNLIMITED"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="name">
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="64" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="role">
+ <DataType>
+ <AtomicType>
+ <EnumType Nelems="13">
+ <EnumElement>
+ TOPO
+ </EnumElement>
+ <EnumValue>
+ 0
+ </EnumValue>
+ <EnumElement>
+ BND
+ </EnumElement>
+ <EnumValue>
+ 1
+ </EnumValue>
+ <EnumElement>
+ PROC
+ </EnumElement>
+ <EnumValue>
+ 2
+ </EnumValue>
+ <EnumElement>
+ DOMN
+ </EnumElement>
+ <EnumValue>
+ 3
+ </EnumValue>
+ <EnumElement>
+ BLOCK
+ </EnumElement>
+ <EnumValue>
+ 4
+ </EnumValue>
+ <EnumElement>
+ ASSY
+ </EnumElement>
+ <EnumValue>
+ 5
+ </EnumValue>
+ <EnumElement>
+ MAT
+ </EnumElement>
+ <EnumValue>
+ 6
+ </EnumValue>
+ <EnumElement>
+ XPROD
+ </EnumElement>
+ <EnumValue>
+ 7
+ </EnumValue>
+ <EnumElement>
+ USERD
+ </EnumElement>
+ <EnumValue>
+ 8
+ </EnumValue>
+ <EnumElement>
+ ANY
+ </EnumElement>
+ <EnumValue>
+ -1
+ </EnumValue>
+ <EnumElement>
+ INVALID
+ </EnumElement>
+ <EnumValue>
+ -2
+ </EnumValue>
+ <EnumElement>
+ NA
+ </EnumElement>
+ <EnumValue>
+ -3
+ </EnumValue>
+ <EnumElement>
+ UNKNOWN
+ </EnumElement>
+ <EnumValue>
+ -4
+ </EnumValue>
+ </EnumType>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="tdim">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="base_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="num_recs">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ "nodes" TOPO 0 0 1 "elems" TOPO 2 1 1 "edges" USERD 1 2 1
+ "blocks" BLOCK 2 3 1 "side_sets" USERD 1 4 1
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Collection" OBJ-XID="/Collection" Parents="root">
+ <StorageLayout>
+ <ChunkedLayout Ndims="1">
+ <ChunkDimension DimSize="1024" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="24" MaxDimSize="UNLIMITED"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="containing_set_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="cat_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="cell_type">
+ <DataType>
+ <AtomicType>
+ <EnumType Nelems="20">
+ <EnumElement>
+ NONE
+ </EnumElement>
+ <EnumValue>
+ 0
+ </EnumValue>
+ <EnumElement>
+ POINT
+ </EnumElement>
+ <EnumValue>
+ 1
+ </EnumValue>
+ <EnumElement>
+ LINE
+ </EnumElement>
+ <EnumValue>
+ 2
+ </EnumValue>
+ <EnumElement>
+ TRI
+ </EnumElement>
+ <EnumValue>
+ 3
+ </EnumValue>
+ <EnumElement>
+ QUAD
+ </EnumElement>
+ <EnumValue>
+ 4
+ </EnumValue>
+ <EnumElement>
+ TET
+ </EnumElement>
+ <EnumValue>
+ 5
+ </EnumValue>
+ <EnumElement>
+ PYRAMID
+ </EnumElement>
+ <EnumValue>
+ 6
+ </EnumValue>
+ <EnumElement>
+ PRISM
+ </EnumElement>
+ <EnumValue>
+ 7
+ </EnumValue>
+ <EnumElement>
+ HEX
+ </EnumElement>
+ <EnumValue>
+ 8
+ </EnumValue>
+ <EnumElement>
+ MIXED
+ </EnumElement>
+ <EnumValue>
+ 9
+ </EnumValue>
+ <EnumElement>
+ ARB
+ </EnumElement>
+ <EnumValue>
+ 10
+ </EnumValue>
+ <EnumElement>
+ 1BALL
+ </EnumElement>
+ <EnumValue>
+ 11
+ </EnumValue>
+ <EnumElement>
+ 2BALL
+ </EnumElement>
+ <EnumValue>
+ 12
+ </EnumValue>
+ <EnumElement>
+ 3BALL
+ </EnumElement>
+ <EnumValue>
+ 13
+ </EnumValue>
+ <EnumElement>
+ 1SHELL
+ </EnumElement>
+ <EnumValue>
+ 14
+ </EnumValue>
+ <EnumElement>
+ 2SHELL
+ </EnumElement>
+ <EnumValue>
+ 15
+ </EnumValue>
+ <EnumElement>
+ ANY
+ </EnumElement>
+ <EnumValue>
+ -1
+ </EnumValue>
+ <EnumElement>
+ INVALID
+ </EnumElement>
+ <EnumValue>
+ -2
+ </EnumValue>
+ <EnumElement>
+ NA
+ </EnumElement>
+ <EnumValue>
+ -3
+ </EnumValue>
+ <EnumElement>
+ UNKNOWN
+ </EnumElement>
+ <EnumValue>
+ -4
+ </EnumValue>
+ </EnumType>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="count">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="indexing_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="is_decomp">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="members_blob_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="base_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="num_recs">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 0 POINT 18 0 0 -2 0 1 0 1 MIXED 12 1 1 -2 1 1 0 3 NONE 4 2 1 0 2 1
+ 0 4 NONE 2 3 0 1 3 1 1 0 POINT 9 4 0 -2 4 1 1 1 QUAD 4 5 1 -2 5 1
+ 1 3 NONE 1 6 1 2 6 1 2 0 POINT 7 7 0 -2 7 1 2 1 MIXED 4 8 1 -2 8 1
+ 2 3 NONE 2 9 1 3 9 1 3 0 POINT 10 10 0 -2 10 1 3 1 QUAD 4 11 1 -2 11 1
+ 3 3 NONE 1 12 1 4 12 1 4 0 POINT 3 13 0 -2 13 1
+ 4 2 LINE 2 14 1 -2 14 1 5 0 POINT 5 15 0 -2 15 1
+ 5 2 LINE 4 16 1 -2 16 1 6 0 POINT 5 17 1 -2 17 1
+ 7 0 POINT 5 18 0 -2 18 1 7 1 TRI 3 19 1 -2 19 1 7 3 NONE 1 20 1 5 20 1
+ 8 0 POINT 4 21 0 -2 21 1 8 1 QUAD 1 22 1 -2 22 1 8 3 NONE 1 23 1 6 23 1
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Field" OBJ-XID="/Field" Parents="root">
+ <StorageLayout>
+ <ChunkedLayout Ndims="1">
+ <ChunkDimension DimSize="1024" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="14" MaxDimSize="UNLIMITED"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="ftmpl_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="name">
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="64" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="units_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="storage_decomp_cat_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="comp_intlv">
+ <DataType>
+ <AtomicType>
+ <EnumType Nelems="8">
+ <EnumElement>
+ COMPONENT
+ </EnumElement>
+ <EnumValue>
+ 0
+ </EnumValue>
+ <EnumElement>
+ VECTOR
+ </EnumElement>
+ <EnumValue>
+ 1
+ </EnumValue>
+ <EnumElement>
+ INDEPENDENT
+ </EnumElement>
+ <EnumValue>
+ 2
+ </EnumValue>
+ <EnumElement>
+ NONE
+ </EnumElement>
+ <EnumValue>
+ 3
+ </EnumValue>
+ <EnumElement>
+ ANY
+ </EnumElement>
+ <EnumValue>
+ -1
+ </EnumValue>
+ <EnumElement>
+ INVALID
+ </EnumElement>
+ <EnumValue>
+ -2
+ </EnumValue>
+ <EnumElement>
+ NA
+ </EnumElement>
+ <EnumValue>
+ -3
+ </EnumValue>
+ <EnumElement>
+ UNKNOWN
+ </EnumElement>
+ <EnumValue>
+ -4
+ </EnumValue>
+ </EnumType>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="indexing_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="dof_assoc_cat_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="assoc_ratio">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="eval_decomp_cat_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="eval_func">
+ <DataType>
+ <AtomicType>
+ <EnumType Nelems="8">
+ <EnumElement>
+ CONSTANT
+ </EnumElement>
+ <EnumValue>
+ 0
+ </EnumValue>
+ <EnumElement>
+ UNIFORM
+ </EnumElement>
+ <EnumValue>
+ 1
+ </EnumValue>
+ <EnumElement>
+ PWCONST
+ </EnumElement>
+ <EnumValue>
+ 2
+ </EnumValue>
+ <EnumElement>
+ PWLINEAR
+ </EnumElement>
+ <EnumValue>
+ 3
+ </EnumValue>
+ <EnumElement>
+ ANY
+ </EnumElement>
+ <EnumValue>
+ -1
+ </EnumValue>
+ <EnumElement>
+ INVALID
+ </EnumElement>
+ <EnumValue>
+ -2
+ </EnumValue>
+ <EnumElement>
+ NA
+ </EnumElement>
+ <EnumValue>
+ -3
+ </EnumValue>
+ <EnumElement>
+ UNKNOWN
+ </EnumElement>
+ <EnumValue>
+ -4
+ </EnumValue>
+ </EnumType>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="is_homogeneous">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="is_coord_field">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="comp_ids_blob_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="comp_order_blob_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="vbasis_blob_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="dof_blob_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="base_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="num_recs">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 1 "X" -7 -2 NONE -2 0 1 0 PWLINEAR 1 0 -2 -2 -2 -2 0 1
+ 1 "Y" -7 -2 NONE -2 0 1 0 PWLINEAR 1 0 -2 -2 -2 -2 1 1
+ 0 "coords" -7 -2 VECTOR -2 0 1 0 PWLINEAR 1 1 7 -2 -2 17 2 1
+ 2 "distribution factors" -5 -2 NONE -2 0 1 0 PWLINEAR 1 0 -2 -2 -2 18 3 1
+ 3 "temperature" -7 -2 NONE -2 0 1 0 PWLINEAR 1 0 -2 -2 -2 19 4 1
+ 1 "dX" -7 -2 NONE -2 0 1 0 PWLINEAR 1 0 -2 -2 -2 -2 5 1
+ 1 "dY" -7 -2 NONE -2 0 1 0 PWLINEAR 1 0 -2 -2 -2 -2 6 1
+ 0 "displacements" -7 -2 VECTOR -2 0 1 0 PWLINEAR 1 0 8 -2 -2 20 7 1
+ 5 "Sx" -7 -2 NONE -2 1 1 1 PWCONST 1 0 -2 -2 -2 -2 8 1
+ 5 "Sy" -7 -2 NONE -2 1 1 1 PWCONST 1 0 -2 -2 -2 -2 9 1
+ 5 "Sxy" -7 -2 NONE -2 1 1 1 PWCONST 1 0 -2 -2 -2 -2 10 1
+ 4 "stress" -7 -2 VECTOR -2 1 1 1 PWCONST 1 0 9 -2 -2 21 11 1
+ 6 "temperature" -7 -2 NONE -2 0 1 0 PWLINEAR 1 0 -2 -2 -2 22 12 1
+ 7 "pressure" -7 -2 NONE -2 2 1 2 PWCONST 1 0 -2 -2 -2 23 13 1
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="FieldTmpl" OBJ-XID="/FieldTmpl" Parents="root">
+ <StorageLayout>
+ <ChunkedLayout Ndims="1">
+ <ChunkDimension DimSize="1024" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="8" MaxDimSize="UNLIMITED"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="name">
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="64" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="base_space_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="alg_type">
+ <DataType>
+ <AtomicType>
+ <EnumType Nelems="13">
+ <EnumElement>
+ CONSTANT
+ </EnumElement>
+ <EnumValue>
+ 0
+ </EnumValue>
+ <EnumElement>
+ COMPONENT
+ </EnumElement>
+ <EnumValue>
+ 1
+ </EnumValue>
+ <EnumElement>
+ SCALAR
+ </EnumElement>
+ <EnumValue>
+ 2
+ </EnumValue>
+ <EnumElement>
+ VECTOR
+ </EnumElement>
+ <EnumValue>
+ 3
+ </EnumValue>
+ <EnumElement>
+ TENSOR
+ </EnumElement>
+ <EnumValue>
+ 4
+ </EnumValue>
+ <EnumElement>
+ SYM_TENSOR
+ </EnumElement>
+ <EnumValue>
+ 5
+ </EnumValue>
+ <EnumElement>
+ FIELD
+ </EnumElement>
+ <EnumValue>
+ 6
+ </EnumValue>
+ <EnumElement>
+ STATE
+ </EnumElement>
+ <EnumValue>
+ 7
+ </EnumValue>
+ <EnumElement>
+ TUPLE
+ </EnumElement>
+ <EnumValue>
+ 8
+ </EnumValue>
+ <EnumElement>
+ ANY
+ </EnumElement>
+ <EnumValue>
+ -1
+ </EnumValue>
+ <EnumElement>
+ INVALID
+ </EnumElement>
+ <EnumValue>
+ -2
+ </EnumValue>
+ <EnumElement>
+ NA
+ </EnumElement>
+ <EnumValue>
+ -3
+ </EnumValue>
+ <EnumElement>
+ UNKNOWN
+ </EnumElement>
+ <EnumValue>
+ -4
+ </EnumValue>
+ </EnumType>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="basis">
+ <DataType>
+ <AtomicType>
+ <EnumType Nelems="11">
+ <EnumElement>
+ UNITY
+ </EnumElement>
+ <EnumValue>
+ 0
+ </EnumValue>
+ <EnumElement>
+ CARTESIAN
+ </EnumElement>
+ <EnumValue>
+ 1
+ </EnumValue>
+ <EnumElement>
+ SPHERICAL
+ </EnumElement>
+ <EnumValue>
+ 2
+ </EnumValue>
+ <EnumElement>
+ CYLINDRICAL
+ </EnumElement>
+ <EnumValue>
+ 3
+ </EnumValue>
+ <EnumElement>
+ UPPER_TRI
+ </EnumElement>
+ <EnumValue>
+ 4
+ </EnumValue>
+ <EnumElement>
+ FOURIER
+ </EnumElement>
+ <EnumValue>
+ 5
+ </EnumValue>
+ <EnumElement>
+ VARIABLE
+ </EnumElement>
+ <EnumValue>
+ 6
+ </EnumValue>
+ <EnumElement>
+ ANY
+ </EnumElement>
+ <EnumValue>
+ -1
+ </EnumValue>
+ <EnumElement>
+ INVALID
+ </EnumElement>
+ <EnumValue>
+ -2
+ </EnumValue>
+ <EnumElement>
+ NA
+ </EnumElement>
+ <EnumValue>
+ -3
+ </EnumValue>
+ <EnumElement>
+ UNKNOWN
+ </EnumElement>
+ <EnumValue>
+ -4
+ </EnumValue>
+ </EnumType>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="quantity_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="num_comps">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="ftmpl_ids_blob_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="base_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="num_recs">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ "coordinate_tmpl" 0 VECTOR CARTESIAN 2 2 0 0 1
+ "coordinate_tmpl_component" 0 SCALAR CARTESIAN 2 1 0 1 1
+ "distrib_factors_tmpl" 5 SCALAR UNITY -5 1 0 2 1
+ "temp_on_ns1_tmpl" 6 SCALAR UNITY 0 1 0 3 1
+ "stress_on_cell_1_tmpl" 1 SYM_TENSOR UPPER_TRI 0 3 0 4 1
+ "stress_on_cell_1_tmpl_component" 1 SCALAR UPPER_TRI 0 1 0 5 1
+ "temp_on_cell_2_tmpl" 2 SCALAR UNITY 1 1 0 6 1
+ "pressure_on_ss1" 4 SCALAR UNITY 0 1 0 7 1
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="IndexSpec" OBJ-XID="/IndexSpec" Parents="root">
+ <StorageLayout>
+ <ChunkedLayout Ndims="1">
+ <ChunkDimension DimSize="1024" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="24" MaxDimSize="UNLIMITED"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="ndims">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="origins">
+ <DataType>
+ <CompoundType>
+ <Field FieldName="_">
+ <DataType>
+ <ArrayType Ndims="1">
+ <ArrayDimension DimSize="8" DimPerm="0"/>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ </Field>
+ <Field FieldName="sizes">
+ <DataType>
+ <CompoundType>
+ <Field FieldName="_">
+ <DataType>
+ <ArrayType Ndims="1">
+ <ArrayDimension DimSize="8" DimPerm="0"/>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ </Field>
+ <Field FieldName="order">
+ <DataType>
+ <CompoundType>
+ <Field FieldName="_">
+ <DataType>
+ <ArrayType Ndims="1">
+ <ArrayDimension DimSize="8" DimPerm="0"/>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ </Field>
+ <Field FieldName="index_type">
+ <DataType>
+ <AtomicType>
+ <EnumType Nelems="6">
+ <EnumElement>
+ C_ORDER
+ </EnumElement>
+ <EnumValue>
+ 0
+ </EnumValue>
+ <EnumElement>
+ F_ORDER
+ </EnumElement>
+ <EnumValue>
+ 1
+ </EnumValue>
+ <EnumElement>
+ ANY
+ </EnumElement>
+ <EnumValue>
+ -1
+ </EnumValue>
+ <EnumElement>
+ INVALID
+ </EnumElement>
+ <EnumValue>
+ -2
+ </EnumValue>
+ <EnumElement>
+ NA
+ </EnumElement>
+ <EnumValue>
+ -3
+ </EnumValue>
+ <EnumElement>
+ UNKNOWN
+ </EnumElement>
+ <EnumValue>
+ -4
+ </EnumValue>
+ </EnumType>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="base_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="num_recs">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 1 0 0 0 0 0 0 0 0 18 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 0 1
+ 1 0 0 0 0 0 0 0 0 12 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 1 1
+ 1 0 0 0 0 0 0 0 0 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 2 1
+ 1 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 3 1
+ 1 0 0 0 0 0 0 0 0 9 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 4 1
+ 1 0 0 0 0 0 0 0 0 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 5 1
+ 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 6 1
+ 1 0 0 0 0 0 0 0 0 7 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 7 1
+ 1 0 0 0 0 0 0 0 0 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 8 1
+ 1 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 9 1
+ 1 0 0 0 0 0 0 0 0 10 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 10 1
+ 1 0 0 0 0 0 0 0 0 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 11 1
+ 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 12 1
+ 1 0 0 0 0 0 0 0 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 13 1
+ 1 0 0 0 0 0 0 0 0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 14 1
+ 1 0 0 0 0 0 0 0 0 5 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 15 1
+ 1 0 0 0 0 0 0 0 0 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 16 1
+ 1 0 0 0 0 0 0 0 0 5 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 17 1
+ 1 0 0 0 0 0 0 0 0 5 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 18 1
+ 1 0 0 0 0 0 0 0 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 19 1
+ 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 20 1
+ 1 0 0 0 0 0 0 0 0 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 21 1
+ 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 22 1
+ 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 C_ORDER 23 1
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Relation" OBJ-XID="/Relation" Parents="root">
+ <StorageLayout>
+ <ChunkedLayout Ndims="1">
+ <ChunkDimension DimSize="1024" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="21" MaxDimSize="UNLIMITED"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="sub_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="sub_cat_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="sub_decomp_cat_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="sup_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="sup_cat_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="sup_decomp_cat_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="kind">
+ <DataType>
+ <AtomicType>
+ <EnumType Nelems="11">
+ <EnumElement>
+ SUBSET
+ </EnumElement>
+ <EnumValue>
+ 0
+ </EnumValue>
+ <EnumElement>
+ SUPSET
+ </EnumElement>
+ <EnumValue>
+ 1
+ </EnumValue>
+ <EnumElement>
+ BOUND
+ </EnumElement>
+ <EnumValue>
+ 2
+ </EnumValue>
+ <EnumElement>
+ PERMUTE
+ </EnumElement>
+ <EnumValue>
+ 3
+ </EnumValue>
+ <EnumElement>
+ NEIGHBOR
+ </EnumElement>
+ <EnumValue>
+ 4
+ </EnumValue>
+ <EnumElement>
+ COPY
+ </EnumElement>
+ <EnumValue>
+ 5
+ </EnumValue>
+ <EnumElement>
+ EQUAL
+ </EnumElement>
+ <EnumValue>
+ 6
+ </EnumValue>
+ <EnumElement>
+ ANY
+ </EnumElement>
+ <EnumValue>
+ -1
+ </EnumValue>
+ <EnumElement>
+ INVALID
+ </EnumElement>
+ <EnumValue>
+ -2
+ </EnumValue>
+ <EnumElement>
+ NA
+ </EnumElement>
+ <EnumValue>
+ -3
+ </EnumValue>
+ <EnumElement>
+ UNKNOWN
+ </EnumElement>
+ <EnumValue>
+ -4
+ </EnumValue>
+ </EnumType>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="rep_type">
+ <DataType>
+ <AtomicType>
+ <EnumType Nelems="13">
+ <EnumElement>
+ IDENTITY
+ </EnumElement>
+ <EnumValue>
+ 0
+ </EnumValue>
+ <EnumElement>
+ HLIST
+ </EnumElement>
+ <EnumValue>
+ 1
+ </EnumValue>
+ <EnumElement>
+ TLIST
+ </EnumElement>
+ <EnumValue>
+ 2
+ </EnumValue>
+ <EnumElement>
+ TLIST_1
+ </EnumElement>
+ <EnumValue>
+ 3
+ </EnumValue>
+ <EnumElement>
+ ELIST
+ </EnumElement>
+ <EnumValue>
+ 4
+ </EnumValue>
+ <EnumElement>
+ STRUCTURED
+ </EnumElement>
+ <EnumValue>
+ 5
+ </EnumValue>
+ <EnumElement>
+ UNSTRUCTURED
+ </EnumElement>
+ <EnumValue>
+ 6
+ </EnumValue>
+ <EnumElement>
+ ARBITRARY_R
+ </EnumElement>
+ <EnumValue>
+ 7
+ </EnumValue>
+ <EnumElement>
+ ARBITRARY_DR
+ </EnumElement>
+ <EnumValue>
+ 8
+ </EnumValue>
+ <EnumElement>
+ ANY
+ </EnumElement>
+ <EnumValue>
+ -1
+ </EnumValue>
+ <EnumElement>
+ INVALID
+ </EnumElement>
+ <EnumValue>
+ -2
+ </EnumValue>
+ <EnumElement>
+ NA
+ </EnumElement>
+ <EnumValue>
+ -3
+ </EnumValue>
+ <EnumElement>
+ UNKNOWN
+ </EnumElement>
+ <EnumValue>
+ -4
+ </EnumValue>
+ </EnumType>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="d_blob_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="r_blob_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="base_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="num_recs">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 1 0 -2 0 0 -2 EQUAL TLIST -2 0 0 1 1 1 -2 0 1 -2 EQUAL TLIST -2 1 1 1
+ 2 0 -2 0 0 -2 EQUAL TLIST -2 2 2 1 2 1 -2 0 1 -2 EQUAL TLIST -2 3 3 1
+ 7 0 -2 0 0 -2 EQUAL TLIST -2 4 4 1 7 1 -2 0 1 -2 EQUAL TLIST -2 5 5 1
+ 8 0 -2 0 0 -2 EQUAL TLIST -2 6 6 1 8 1 -2 0 1 -2 EQUAL TLIST -2 7 7 1
+ 3 0 -2 0 0 -2 EQUAL TLIST -2 8 8 1 3 1 -2 0 1 -2 EQUAL TLIST -2 9 9 1
+ 4 0 -2 0 0 -2 EQUAL TLIST -2 10 10 1
+ 5 0 -2 0 0 -2 EQUAL TLIST -2 11 11 1
+ 6 0 -2 0 0 -2 EQUAL TLIST -2 12 12 1
+ 1 3 -2 0 3 -2 EQUAL TLIST -2 -2 13 1
+ 7 3 -2 0 3 -2 EQUAL TLIST -2 -2 14 1
+ 8 3 -2 0 3 -2 EQUAL TLIST -2 -2 15 1
+ 3 3 -2 0 3 -2 EQUAL TLIST -2 -2 16 1
+ 1 1 -2 0 0 -2 SUBSET UNSTRUCTURED -2 13 17 1
+ 7 1 -2 0 0 -2 SUBSET UNSTRUCTURED -2 14 18 1
+ 8 1 -2 0 0 -2 SUBSET UNSTRUCTURED -2 15 19 1
+ 3 1 -2 0 0 -2 SUBSET UNSTRUCTURED -2 16 20 1
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Set" OBJ-XID="/Set" Parents="root">
+ <StorageLayout>
+ <ChunkedLayout Ndims="1">
+ <ChunkDimension DimSize="1024" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="9" MaxDimSize="UNLIMITED"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="user_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="name">
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="64" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="tdim">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="srole">
+ <DataType>
+ <AtomicType>
+ <EnumType Nelems="11">
+ <EnumElement>
+ TIME
+ </EnumElement>
+ <EnumValue>
+ 0
+ </EnumValue>
+ <EnumElement>
+ SPACE
+ </EnumElement>
+ <EnumValue>
+ 1
+ </EnumValue>
+ <EnumElement>
+ STATE
+ </EnumElement>
+ <EnumValue>
+ 2
+ </EnumValue>
+ <EnumElement>
+ PARAM
+ </EnumElement>
+ <EnumValue>
+ 3
+ </EnumValue>
+ <EnumElement>
+ CTYPE
+ </EnumElement>
+ <EnumValue>
+ 4
+ </EnumValue>
+ <EnumElement>
+ ATYPE
+ </EnumElement>
+ <EnumValue>
+ 5
+ </EnumValue>
+ <EnumElement>
+ USERD
+ </EnumElement>
+ <EnumValue>
+ 6
+ </EnumValue>
+ <EnumElement>
+ ANY
+ </EnumElement>
+ <EnumValue>
+ -1
+ </EnumValue>
+ <EnumElement>
+ INVALID
+ </EnumElement>
+ <EnumValue>
+ -2
+ </EnumValue>
+ <EnumElement>
+ NA
+ </EnumElement>
+ <EnumValue>
+ -3
+ </EnumValue>
+ <EnumElement>
+ UNKNOWN
+ </EnumElement>
+ <EnumValue>
+ -4
+ </EnumValue>
+ </EnumType>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="coll_ids">
+ <DataType>
+ <CompoundType>
+ <Field FieldName="_">
+ <DataType>
+ <ArrayType Ndims="1">
+ <ArrayDimension DimSize="16" DimPerm="0"/>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ </Field>
+ <Field FieldName="is_top">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="is_extendible">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="dflt_coordfld_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="bnd_set_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="base_id">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="num_recs">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 "TOP_CELL" 2 SPACE 0 1 -2 2 3 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 1 0 -2 -2 0 1
+ 0 "CELL_1" 2 SPACE 4 5 -2 6 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 0 0 -2 -2 1 1
+ 0 "CELL_2" 2 SPACE 7 8 -2 9 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 0 0 -2 -2 2 1
+ 0 "CELL_3" 2 SPACE 10 11 -2 12 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 0 0 -2 -2 3 1
+ 0 "SIDE_SET_1" 1 SPACE 13 -2 14 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 0 0 -2 -2 4 1
+ 0 "SIDE_SET_2" 1 SPACE 15 -2 16 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 0 0 -2 -2 5 1
+ 0 "NODE_SET_1" 0 SPACE 17 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 0 0 -2 -2 6 1
+ 0 "CELL_2_TRIS" 2 SPACE 18 19 -2 20 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 0 0 -2 -2 7 1
+ 0 "CELL_2_QUADS" 2 SPACE 21 22 -2 23 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 0 0 -2 -2 8 1
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="field-coords_0002" OBJ-XID="/field-coords_0002" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="36" MaxDimSize="36"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0 4 1 4 2 4 2.5 4 0 3 1 3 2 3 2.5 3 0 2 1 2 2 2 2.5 2 0 1 2 1 2.5 1 0
+ 0 2 0 2.5 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="field-displacements_0007" OBJ-XID="/field-displacements_0007" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="36" MaxDimSize="36"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0.25 0.25 0.25 0.25 0.25 0.25 0.25 0.25 0.25 0.25 0.25 0.25 0.25 0.25
+ 0.25 0.25 0.25 0.25 0.25 0.25 0.25 0.25 0.25 0.25 0.25 0.25 0.25 0.25
+ 0.25 0.25 0.25 0.25 0.25 0.25 0.25 0.25
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="field-distribution_factors_0003" OBJ-XID="/field-distribution_factors_0003" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="5" MaxDimSize="5"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 4 3 2 1 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="field-pressure_0013" OBJ-XID="/field-pressure_0013" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="2" MaxDimSize="2"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 45 55
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="field-stress_0011" OBJ-XID="/field-stress_0011" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="12" MaxDimSize="12"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 0.5 0.25 0.5 0.5 0.25 0.5 0.5 0.25 0.5 0.5 0.25 0.5
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="field-temperature_0004" OBJ-XID="/field-temperature_0004" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="5" MaxDimSize="5"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 100 150 150 100 75
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="field-temperature_0012" OBJ-XID="/field-temperature_0012" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="7" MaxDimSize="7"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="BE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 75 95 120 80 115 85 110
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="metablob00000.blob" OBJ-XID="/metablob00000.blob" Parents="root">
+ <StorageLayout>
+ <ChunkedLayout Ndims="1">
+ <ChunkDimension DimSize="32" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="19" MaxDimSize="UNLIMITED"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 1 7 8 3 -2 -2 -2 -2 -2 -2 -2 -2 0 1 5 6 8 9 10
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="metablob00000.index" OBJ-XID="/metablob00000.index" Parents="root">
+ <StorageLayout>
+ <ChunkedLayout Ndims="1">
+ <ChunkDimension DimSize="32" />
+ </ChunkedLayout>
+ </StorageLayout>
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="UNLIMITED"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="index">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="length">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 4 4 2 6 1 7 2 9 1 10 1 11 1 12 2 14 2 16 3
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="ssrel-_0000" OBJ-XID="/ssrel-_0000" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="9" MaxDimSize="9"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 1 2 3 5 6 7 9 10 11
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="ssrel-_0001" OBJ-XID="/ssrel-_0001" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 1 2 4 5
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="ssrel-_0002" OBJ-XID="/ssrel-_0002" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="7" MaxDimSize="7"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 9 10 11 13 14 16 17
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="ssrel-_0003" OBJ-XID="/ssrel-_0003" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 7 8 9 11
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="ssrel-_0004" OBJ-XID="/ssrel-_0004" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="5" MaxDimSize="5"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 9 10 11 13 14
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="ssrel-_0005" OBJ-XID="/ssrel-_0005" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="3" MaxDimSize="3"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 7 8 9
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="ssrel-_0006" OBJ-XID="/ssrel-_0006" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 13 14 16 17
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="ssrel-_0007" OBJ-XID="/ssrel-_0007" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="1" MaxDimSize="1"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 11
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="ssrel-_0008" OBJ-XID="/ssrel-_0008" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 3 4 7 8 11 12 14 15 17 18
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="ssrel-_0009" OBJ-XID="/ssrel-_0009" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 3 6 10 12
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="ssrel-_0010" OBJ-XID="/ssrel-_0010" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="3" MaxDimSize="3"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 9 10 11
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="ssrel-_0011" OBJ-XID="/ssrel-_0011" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="5" MaxDimSize="5"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 1 5 9 13 16
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="ssrel-_0012" OBJ-XID="/ssrel-_0012" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="5" MaxDimSize="5"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 4 8 12 15 18
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="toporel-_0017" OBJ-XID="/toporel-_0017" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="16" MaxDimSize="16"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 1 2 6 5 2 3 7 6 5 6 10 9 6 7 11 10
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="toporel-_0018" OBJ-XID="/toporel-_0018" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="9" MaxDimSize="9"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 9 10 13 10 14 13 10 11 14
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="toporel-_0019" OBJ-XID="/toporel-_0019" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 13 14 17 16
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="toporel-_0020" OBJ-XID="/toporel-_0020" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="16" MaxDimSize="16"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ 3 4 8 7 7 8 12 11 11 12 15 14 14 15 18 17
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Group Name=".attributes" OBJ-XID="/.attributes" Parents="/" >
+ <Group Name="database" OBJ-XID="/.attributes/database" Parents="/.attributes" >
+ <Dataset Name=".SAF_DbProps" OBJ-XID="/.attributes/database/.SAF_DbProps" Parents="/.attributes/database">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="1" MaxDimSize="1"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="magic">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="SFileDir">
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="1024" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="ImportFile">
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="1024" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="parallel">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="safapi">
+ <DataType>
+ <CompoundType>
+ <Field FieldName="vmajor">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="vminor">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="rel">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="annot">
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="10" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ </Field>
+ <Field FieldName="saflib">
+ <DataType>
+ <CompoundType>
+ <Field FieldName="vmajor">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="vminor">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="rel">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="annot">
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="10" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ </Field>
+ <Field FieldName="vbt">
+ <DataType>
+ <CompoundType>
+ <Field FieldName="vmajor">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="vminor">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="rel">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="annot">
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="10" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ </Field>
+ <Field FieldName="dsl">
+ <DataType>
+ <CompoundType>
+ <Field FieldName="vmajor">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="vminor">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="rel">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="annot">
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="10" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ </Field>
+ <Field FieldName="hdf5">
+ <DataType>
+ <CompoundType>
+ <Field FieldName="vmajor">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="vminor">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="rel">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="annot">
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="10" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ </Field>
+ <Field FieldName="mpi">
+ <DataType>
+ <CompoundType>
+ <Field FieldName="vmajor">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="vminor">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="rel">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="annot">
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="10" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ </Field>
+ <Field FieldName="DoToc">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="ReadOnly">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="Clobber">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="OSModes">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ -604320037 "." "don't import" 1 0 0 0 "none" 0 1 0 "devel" 1 3 0 "" 0 0 0 "none" 1 2 1 "" 1 2 0 "" 1 0 1 0
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tslink.h5.xml b/tools/testfiles/tslink.h5.xml
new file mode 100644
index 0000000..6dadce5
--- /dev/null
+++ b/tools/testfiles/tslink.h5.xml
@@ -0,0 +1,11 @@
+#############################
+Expected output for 'h5dump --xml tslink.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <SoftLink LinkName="slink1" Target="somevalue" TargetObj="/somevalue" OBJ-XID="/slink1" Source="root"/>
+ <SoftLink LinkName="slink2" Target="linkvalue" TargetObj="/linkvalue" OBJ-XID="/slink2" Source="root"/>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tstr.h5.xml b/tools/testfiles/tstr.h5.xml
new file mode 100644
index 0000000..b31aacc
--- /dev/null
+++ b/tools/testfiles/tstr.h5.xml
@@ -0,0 +1,359 @@
+#############################
+Expected output for 'h5dump --xml tstr.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="comp1" OBJ-XID="/comp1" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="3" MaxDimSize="3"/>
+ <Dimension DimSize="6" MaxDimSize="6"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="int_array">
+ <DataType>
+ <ArrayType Ndims="2">
+ <ArrayDimension DimSize="8" DimPerm="0"/>
+ <ArrayDimension DimSize="10" DimPerm="1"/>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="BE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ </Field>
+ <Field FieldName="string">
+ <DataType>
+ <ArrayType Ndims="2">
+ <ArrayDimension DimSize="3" DimPerm="0"/>
+ <ArrayDimension DimSize="4" DimPerm="1"/>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="32" StrPad="H5T_STR_SPACEPAD"/>
+ </AtomicType>
+ </DataType>
+ </ArrayType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 1 4 9 16 25 36 49 64 81
+ 1 4 9 16 25 36 49 64 81 100
+ 4 9 16 25 36 49 64 81 100 121
+ 9 16 25 36 49 64 81 100 121 144
+ 16 25 36 49 64 81 100 121 144 169
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 1 4 9 16 25 36 49 64 81 100
+ 4 9 16 25 36 49 64 81 100 121
+ 9 16 25 36 49 64 81 100 121 144
+ 16 25 36 49 64 81 100 121 144 169
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256
+ 64 81 100 121 144 169 196 225 256 289 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 4 9 16 25 36 49 64 81 100 121
+ 9 16 25 36 49 64 81 100 121 144
+ 16 25 36 49 64 81 100 121 144 169
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256
+ 64 81 100 121 144 169 196 225 256 289
+ 81 100 121 144 169 196 225 256 289 324 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 9 16 25 36 49 64 81 100 121 144
+ 16 25 36 49 64 81 100 121 144 169
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256
+ 64 81 100 121 144 169 196 225 256 289
+ 81 100 121 144 169 196 225 256 289 324
+ 100 121 144 169 196 225 256 289 324 361 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 16 25 36 49 64 81 100 121 144 169
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256
+ 64 81 100 121 144 169 196 225 256 289
+ 81 100 121 144 169 196 225 256 289 324
+ 100 121 144 169 196 225 256 289 324 361
+ 121 144 169 196 225 256 289 324 361 400 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256
+ 64 81 100 121 144 169 196 225 256 289
+ 81 100 121 144 169 196 225 256 289 324
+ 100 121 144 169 196 225 256 289 324 361
+ 121 144 169 196 225 256 289 324 361 400
+ 144 169 196 225 256 289 324 361 400 441 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 0 1 4 9 16 25 36 49 64 81
+ 1 4 9 16 25 36 49 64 81 100
+ 4 9 16 25 36 49 64 81 100 121
+ 9 16 25 36 49 64 81 100 121 144
+ 16 25 36 49 64 81 100 121 144 169
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 1 4 9 16 25 36 49 64 81 100
+ 4 9 16 25 36 49 64 81 100 121
+ 9 16 25 36 49 64 81 100 121 144
+ 16 25 36 49 64 81 100 121 144 169
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256
+ 64 81 100 121 144 169 196 225 256 289 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 4 9 16 25 36 49 64 81 100 121
+ 9 16 25 36 49 64 81 100 121 144
+ 16 25 36 49 64 81 100 121 144 169
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256
+ 64 81 100 121 144 169 196 225 256 289
+ 81 100 121 144 169 196 225 256 289 324 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 9 16 25 36 49 64 81 100 121 144
+ 16 25 36 49 64 81 100 121 144 169
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256
+ 64 81 100 121 144 169 196 225 256 289
+ 81 100 121 144 169 196 225 256 289 324
+ 100 121 144 169 196 225 256 289 324 361 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 16 25 36 49 64 81 100 121 144 169
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256
+ 64 81 100 121 144 169 196 225 256 289
+ 81 100 121 144 169 196 225 256 289 324
+ 100 121 144 169 196 225 256 289 324 361
+ 121 144 169 196 225 256 289 324 361 400 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256
+ 64 81 100 121 144 169 196 225 256 289
+ 81 100 121 144 169 196 225 256 289 324
+ 100 121 144 169 196 225 256 289 324 361
+ 121 144 169 196 225 256 289 324 361 400
+ 144 169 196 225 256 289 324 361 400 441 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 0 1 4 9 16 25 36 49 64 81
+ 1 4 9 16 25 36 49 64 81 100
+ 4 9 16 25 36 49 64 81 100 121
+ 9 16 25 36 49 64 81 100 121 144
+ 16 25 36 49 64 81 100 121 144 169
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 1 4 9 16 25 36 49 64 81 100
+ 4 9 16 25 36 49 64 81 100 121
+ 9 16 25 36 49 64 81 100 121 144
+ 16 25 36 49 64 81 100 121 144 169
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256
+ 64 81 100 121 144 169 196 225 256 289 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 4 9 16 25 36 49 64 81 100 121
+ 9 16 25 36 49 64 81 100 121 144
+ 16 25 36 49 64 81 100 121 144 169
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256
+ 64 81 100 121 144 169 196 225 256 289
+ 81 100 121 144 169 196 225 256 289 324 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 9 16 25 36 49 64 81 100 121 144
+ 16 25 36 49 64 81 100 121 144 169
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256
+ 64 81 100 121 144 169 196 225 256 289
+ 81 100 121 144 169 196 225 256 289 324
+ 100 121 144 169 196 225 256 289 324 361 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 16 25 36 49 64 81 100 121 144 169
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256
+ 64 81 100 121 144 169 196 225 256 289
+ 81 100 121 144 169 196 225 256 289 324
+ 100 121 144 169 196 225 256 289 324 361
+ 121 144 169 196 225 256 289 324 361 400 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ 25 36 49 64 81 100 121 144 169 196
+ 36 49 64 81 100 121 144 169 196 225
+ 49 64 81 100 121 144 169 196 225 256
+ 64 81 100 121 144 169 196 225 256 289
+ 81 100 121 144 169 196 225 256 289 324
+ 100 121 144 169 196 225 256 289 324 361
+ 121 144 169 196 225 256 289 324 361 400
+ 144 169 196 225 256 289 324 361 400 441 "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678" "abcdefgh12345678abcdefgh12345678"
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="string1" OBJ-XID="/string1" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="2">
+ <Dimension DimSize="3" MaxDimSize="3"/>
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="5" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "s1"
+ "s2"
+ "s3"
+ "s4"
+ "s5"
+ "s6"
+ "s7"
+ "s8"
+ "s9"
+ "s0"
+ "s1"
+ "s2"
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="string2" OBJ-XID="/string2" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="20" MaxDimSize="20"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="11" StrPad="H5T_STR_SPACEPAD"/>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "ab cd ef1 "
+ "ab cd ef2 "
+ "ab cd ef3 "
+ "ab cd ef4 "
+ "ab cd ef5 "
+ "ab cd ef6 "
+ "ab cd ef7 "
+ "ab cd ef8 "
+ "ab cd ef9 "
+ "ab cd ef0 "
+ "ab cd ef1 "
+ "ab cd ef2 "
+ "ab cd ef3 "
+ "ab cd ef4 "
+ "ab cd ef5 "
+ "ab cd ef6 "
+ "ab cd ef7 "
+ "ab cd ef8 "
+ "ab cd ef9 "
+ "ab cd ef0 "
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="string3" OBJ-XID="/string3" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="27" MaxDimSize="27"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="8" StrPad="H5T_STR_NULLPAD"/>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "abcd0"
+ "abcd1"
+ "abcd2"
+ "abcd3"
+ "abcd4"
+ "abcd5"
+ "abcd6"
+ "abcd7"
+ "abcd8"
+ "abcd9"
+ "abcd0"
+ "abcd1"
+ "abcd2"
+ "abcd3"
+ "abcd4"
+ "abcd5"
+ "abcd6"
+ "abcd7"
+ "abcd8"
+ "abcd9"
+ "abcd0"
+ "abcd1"
+ "abcd2"
+ "abcd3"
+ "abcd4"
+ "abcd5"
+ "abcd6"
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="string4" OBJ-XID="/string4" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="3" MaxDimSize="3"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="168" StrPad="H5T_STR_SPACEPAD"/>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "s1234567890123456789 "
+ "s1234567890123456789 "
+ "s1234567890123456789 "
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tstr2.h5.xml b/tools/testfiles/tstr2.h5.xml
new file mode 100644
index 0000000..046cfba
--- /dev/null
+++ b/tools/testfiles/tstr2.h5.xml
@@ -0,0 +1,196 @@
+#############################
+Expected output for 'h5dump --xml tstr2.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Group Name="g1" OBJ-XID="/g1" Parents="/" >
+ <Dataset Name="dset1" OBJ-XID="/g1/dset1" Parents="/g1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="50" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ <Attribute Name="attr1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="3" MaxDimSize="3"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="11" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "0123456789"
+ "abcdefghij"
+ "ABCDEFGHIJ"
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Data>
+ <DataFromFile>
+ "This is row 0 of type H5T_STR_NULLTERM of"
+ "This is row 1 of type H5T_STR_NULLTERM of"
+ "This is row 2 of type H5T_STR_NULLTERM of"
+ "This is row 3 of type H5T_STR_NULLTERM of"
+ "This is row 4 of type H5T_STR_NULLTERM of"
+ "This is row 5 of type H5T_STR_NULLTERM of"
+ "This is row 6 of type H5T_STR_NULLTERM of"
+ "This is row 7 of type H5T_STR_NULLTERM of"
+ "This is row 8 of type H5T_STR_NULLTERM of"
+ "This is row 9 of type H5T_STR_NULLTERM of"
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+ <Group Name="g2" OBJ-XID="/g2" Parents="/" >
+ <Dataset Name="dset2" OBJ-XID="/g2/dset2" Parents="/g2">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="50" StrPad="H5T_STR_NULLTERM"/>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "This is row 0 of type H5T_STR_NULLTERM of string "
+ "This is row 1 of type H5T_STR_NULLTERM of string "
+ "This is row 2 of type H5T_STR_NULLTERM of string "
+ "This is row 3 of type H5T_STR_NULLTERM of string "
+ "This is row 4 of type H5T_STR_NULLTERM of string "
+ "This is row 5 of type H5T_STR_NULLTERM of string "
+ "This is row 6 of type H5T_STR_NULLTERM of string "
+ "This is row 7 of type H5T_STR_NULLTERM of string "
+ "This is row 8 of type H5T_STR_NULLTERM of string "
+ "This is row 9 of type H5T_STR_NULLTERM of string "
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+ <Group Name="g3" OBJ-XID="/g3" Parents="/" >
+ <Dataset Name="dset3" OBJ-XID="/g3/dset3" Parents="/g3">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="50" StrPad="H5T_STR_NULLPAD"/>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "This is row 0 of type H5T_STR_NULLPAD of"
+ "This is row 1 of type H5T_STR_NULLPAD of"
+ "This is row 2 of type H5T_STR_NULLPAD of"
+ "This is row 3 of type H5T_STR_NULLPAD of"
+ "This is row 4 of type H5T_STR_NULLPAD of"
+ "This is row 5 of type H5T_STR_NULLPAD of"
+ "This is row 6 of type H5T_STR_NULLPAD of"
+ "This is row 7 of type H5T_STR_NULLPAD of"
+ "This is row 8 of type H5T_STR_NULLPAD of"
+ "This is row 9 of type H5T_STR_NULLPAD of"
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+ <Group Name="g4" OBJ-XID="/g4" Parents="/" >
+ <Dataset Name="dset4" OBJ-XID="/g4/dset4" Parents="/g4">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="50" StrPad="H5T_STR_NULLPAD"/>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "This is row 0 of type H5T_STR_NULLPAD of string ar"
+ "This is row 1 of type H5T_STR_NULLPAD of string ar"
+ "This is row 2 of type H5T_STR_NULLPAD of string ar"
+ "This is row 3 of type H5T_STR_NULLPAD of string ar"
+ "This is row 4 of type H5T_STR_NULLPAD of string ar"
+ "This is row 5 of type H5T_STR_NULLPAD of string ar"
+ "This is row 6 of type H5T_STR_NULLPAD of string ar"
+ "This is row 7 of type H5T_STR_NULLPAD of string ar"
+ "This is row 8 of type H5T_STR_NULLPAD of string ar"
+ "This is row 9 of type H5T_STR_NULLPAD of string ar"
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+ <Group Name="g5" OBJ-XID="/g5" Parents="/" >
+ <Dataset Name="dset5" OBJ-XID="/g5/dset5" Parents="/g5">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="50" StrPad="H5T_STR_SPACEPAD"/>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "This is row 0 of type H5T_STR_SPACEPAD of "
+ "This is row 1 of type H5T_STR_SPACEPAD of "
+ "This is row 2 of type H5T_STR_SPACEPAD of "
+ "This is row 3 of type H5T_STR_SPACEPAD of "
+ "This is row 4 of type H5T_STR_SPACEPAD of "
+ "This is row 5 of type H5T_STR_SPACEPAD of "
+ "This is row 6 of type H5T_STR_SPACEPAD of "
+ "This is row 7 of type H5T_STR_SPACEPAD of "
+ "This is row 8 of type H5T_STR_SPACEPAD of "
+ "This is row 9 of type H5T_STR_SPACEPAD of "
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+ <Group Name="g6" OBJ-XID="/g6" Parents="/" >
+ <Dataset Name="dset6" OBJ-XID="/g6/dset6" Parents="/g6">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="10" MaxDimSize="10"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="50" StrPad="H5T_STR_SPACEPAD"/>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "This is row 0 of type H5T_STR_SPACEPAD of string a"
+ "This is row 1 of type H5T_STR_SPACEPAD of string a"
+ "This is row 2 of type H5T_STR_SPACEPAD of string a"
+ "This is row 3 of type H5T_STR_SPACEPAD of string a"
+ "This is row 4 of type H5T_STR_SPACEPAD of string a"
+ "This is row 5 of type H5T_STR_SPACEPAD of string a"
+ "This is row 6 of type H5T_STR_SPACEPAD of string a"
+ "This is row 7 of type H5T_STR_SPACEPAD of string a"
+ "This is row 8 of type H5T_STR_SPACEPAD of string a"
+ "This is row 9 of type H5T_STR_SPACEPAD of string a"
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ </Group>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tstring-at.h5 b/tools/testfiles/tstring-at.h5
new file mode 100644
index 0000000..c166dea
--- /dev/null
+++ b/tools/testfiles/tstring-at.h5
Binary files differ
diff --git a/tools/testfiles/tstring-at.h5.xml b/tools/testfiles/tstring-at.h5.xml
new file mode 100644
index 0000000..18e4809
--- /dev/null
+++ b/tools/testfiles/tstring-at.h5.xml
@@ -0,0 +1,55 @@
+#############################
+Expected output for 'h5dump --xml tstring-at.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="dset1" OBJ-XID="/dset1" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="7" MaxDimSize="7"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="16" StrPad="H5T_STR_SPACEPAD"/>
+ </AtomicType>
+ </DataType>
+ <Attribute Name="attr1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="7" MaxDimSize="7"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="16" StrPad="H5T_STR_SPACEPAD"/>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ "String withsp "
+ "String\\withsl "
+ "String&amp;withamp "
+ "String&lt;withlt "
+ "String&gt;withgt "
+ "String&apos;withapos "
+ "String\"withquot "
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Data>
+ <DataFromFile>
+ ""
+ ""
+ ""
+ ""
+ ""
+ ""
+ ""
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tstring.h5 b/tools/testfiles/tstring.h5
new file mode 100644
index 0000000..41fb96d
--- /dev/null
+++ b/tools/testfiles/tstring.h5
Binary files differ
diff --git a/tools/testfiles/tstring.h5.xml b/tools/testfiles/tstring.h5.xml
new file mode 100644
index 0000000..3617aaf
--- /dev/null
+++ b/tools/testfiles/tstring.h5.xml
@@ -0,0 +1,55 @@
+#############################
+Expected output for 'h5dump --xml tstring.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="dset1" OBJ-XID="/dset1" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="7" MaxDimSize="7"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="16" StrPad="H5T_STR_SPACEPAD"/>
+ </AtomicType>
+ </DataType>
+ <Attribute Name="attr1">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="7" MaxDimSize="7"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <AtomicType>
+ <StringType Cset="H5T_CSET_ASCII" StrSize="16" StrPad="H5T_STR_SPACEPAD"/>
+ </AtomicType>
+ </DataType>
+ <Data>
+ <DataFromFile>
+ " "
+ " "
+ " "
+ " "
+ " "
+ " "
+ " "
+ </DataFromFile>
+ </Data>
+ </Attribute>
+ <Data>
+ <DataFromFile>
+ "String withsp "
+ "String\\withsl "
+ "String&amp;withamp "
+ "String&lt;withlt "
+ "String&gt;withgt "
+ "String&apos;withapos "
+ "String\"withquot "
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tvldtypes1.h5.xml b/tools/testfiles/tvldtypes1.h5.xml
new file mode 100644
index 0000000..303e25d
--- /dev/null
+++ b/tools/testfiles/tvldtypes1.h5.xml
@@ -0,0 +1,73 @@
+#############################
+Expected output for 'h5dump --xml tvldtypes1.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="Dataset1.0" OBJ-XID="/Dataset1.0" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <VLType>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </VLType>
+ </DataType>
+<!-- Note: format of VL data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 10 11 20 21 22 30 31 32 33
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset2.0" OBJ-XID="/Dataset2.0" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <VLType>
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="LE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </VLType>
+ </DataType>
+<!-- Note: format of VL data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 10 10.1 20 20.1 20.2 30 30.1 30.2 30.3
+ </DataFromFile>
+ </Data>
+ </Dataset>
+ <Dataset Name="Dataset3.0" OBJ-XID="/Dataset3.0" Parents="root">
+ <Dataspace>
+ <ScalarDataspace />
+ </Dataspace>
+ <DataType>
+ <VLType>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </VLType>
+ </DataType>
+<!-- Note: format of VL data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 2 4 6 8 10 12 14 16 18 20 22 24 26 28 30 32 34 36 38 40 42 44 46 48 50 52 54 56 58 60 62 64 66 68 70 72
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tvldtypes2.h5.xml b/tools/testfiles/tvldtypes2.h5.xml
new file mode 100644
index 0000000..a8802ec
--- /dev/null
+++ b/tools/testfiles/tvldtypes2.h5.xml
@@ -0,0 +1,36 @@
+#############################
+Expected output for 'h5dump --xml tvldtypes2.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="Dataset1" OBJ-XID="/Dataset1" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <VLType>
+ <DataType>
+ <VLType>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="false" Size="4" />
+ </AtomicType>
+ </DataType>
+ </VLType>
+ </DataType>
+ </VLType>
+ </DataType>
+<!-- Note: format of VL data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 100 110 111 200 210 211 220 221 222
+ 300 310 311 320 321 322 330 331 332 333
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/tvldtypes3.h5.xml b/tools/testfiles/tvldtypes3.h5.xml
new file mode 100644
index 0000000..dc6f717
--- /dev/null
+++ b/tools/testfiles/tvldtypes3.h5.xml
@@ -0,0 +1,51 @@
+#############################
+Expected output for 'h5dump --xml tvldtypes3.h5'
+#############################
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE HDF5-File PUBLIC "HDF5-File.dtd" "http://hdf.ncsa.uiuc.edu/DTDs/HDF5-File.dtd">
+<HDF5-File>
+<RootGroup OBJ-XID="root">
+ <Dataset Name="Dataset1" OBJ-XID="/Dataset1" Parents="root">
+ <Dataspace>
+ <SimpleDataspace Ndims="1">
+ <Dimension DimSize="4" MaxDimSize="4"/>
+ </SimpleDataspace>
+ </Dataspace>
+ <DataType>
+ <CompoundType>
+ <Field FieldName="i">
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="true" Size="4" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="f">
+ <DataType>
+ <AtomicType>
+ <FloatType ByteOrder="LE" Size="4" SignBitLocation="31" ExponentBits="8" ExponentLocation="23" MantissaBits="23" MantissaLocation="0" />
+ </AtomicType>
+ </DataType>
+ </Field>
+ <Field FieldName="v">
+ <DataType>
+ <VLType>
+ <DataType>
+ <AtomicType>
+ <IntegerType ByteOrder="LE" Sign="false" Size="4" />
+ </AtomicType>
+ </DataType>
+ </VLType>
+ </DataType>
+ </Field>
+ </CompoundType>
+ </DataType>
+ <!-- Note: format of compound data not specified -->
+ <Data>
+ <DataFromFile>
+ 0 0 0 10 6.66667 10 11 20 13.3333 20 21 22 30 20 30 31 32 33
+ </DataFromFile>
+ </Data>
+ </Dataset>
+</RootGroup>
+</HDF5-File>
diff --git a/tools/testfiles/vdata_test.hdf b/tools/testfiles/vdata_test.hdf
new file mode 100644
index 0000000..f44bad8
--- /dev/null
+++ b/tools/testfiles/vdata_test.hdf
Binary files differ
diff --git a/tools/testfiles/vdnameclash_test.hdf b/tools/testfiles/vdnameclash_test.hdf
new file mode 100644
index 0000000..5ec91fc
--- /dev/null
+++ b/tools/testfiles/vdnameclash_test.hdf
Binary files differ
diff --git a/tools/testfiles/vg_all_test.hdf b/tools/testfiles/vg_all_test.hdf
new file mode 100644
index 0000000..84bf4a2
--- /dev/null
+++ b/tools/testfiles/vg_all_test.hdf
Binary files differ
diff --git a/tools/testfiles/vg_hl_test.hdf b/tools/testfiles/vg_hl_test.hdf
new file mode 100644
index 0000000..367fcd6
--- /dev/null
+++ b/tools/testfiles/vg_hl_test.hdf
Binary files differ
diff --git a/tools/testfiles/vg_loop_test.hdf b/tools/testfiles/vg_loop_test.hdf
new file mode 100644
index 0000000..c2ce53b
--- /dev/null
+++ b/tools/testfiles/vg_loop_test.hdf
Binary files differ
diff --git a/tools/testfiles/vgnameclash_test.hdf b/tools/testfiles/vgnameclash_test.hdf
new file mode 100644
index 0000000..269f36e
--- /dev/null
+++ b/tools/testfiles/vgnameclash_test.hdf
Binary files differ