summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--MANIFEST6
-rw-r--r--hl/src/H5LD.c639
-rw-r--r--hl/src/H5LDprivate.h49
-rw-r--r--hl/src/H5LDpublic.h33
-rw-r--r--hl/src/Makefile.am4
-rw-r--r--hl/src/hdf5_hl.h1
-rw-r--r--hl/test/Makefile.am4
-rw-r--r--hl/test/gen_test_ld.c379
-rw-r--r--hl/test/test_ld.c1430
-rw-r--r--hl/test/test_ld.h5bin0 -> 42931 bytes
10 files changed, 2541 insertions, 4 deletions
diff --git a/MANIFEST b/MANIFEST
index e47b020..8d30ca3 100644
--- a/MANIFEST
+++ b/MANIFEST
@@ -2352,6 +2352,9 @@
./hl/src/H5IM.c
./hl/src/H5IMprivate.h
./hl/src/H5IMpublic.h
+./hl/src/H5LD.c
+./hl/src/H5LDprivate.h
+./hl/src/H5LDpublic.h
./hl/src/H5LT.c
./hl/src/H5LTanalyze.c
./hl/src/H5LTanalyze.l
@@ -2378,6 +2381,7 @@
./hl/test/dtype_file_readable.txt
./hl/test/earth.pal
./hl/test/gen_test_ds.c
+./hl/test/gen_test_ld.c
./hl/test/h5hltest.h
./hl/test/image24pixel.txt
./hl/test/image24plane.txt
@@ -2391,6 +2395,8 @@
./hl/test/test_dset_opt.c
./hl/test/test_file_image.c
./hl/test/test_image.c
+./hl/test/test_ld.c
+./hl/test/test_ld.h5
./hl/test/test_lite.c
./hl/test/test_packet.c
./hl/test/test_packet_vlen.c
diff --git a/hl/src/H5LD.c b/hl/src/H5LD.c
new file mode 100644
index 0000000..4abd740
--- /dev/null
+++ b/hl/src/H5LD.c
@@ -0,0 +1,639 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+* Copyright by The HDF Group. *
+* Copyright by the Board of Trustees of the University of Illinois. *
+* All rights reserved. *
+* *
+* This file is part of HDF5. The full HDF5 copyright notice, including *
+* terms governing use, modification, and redistribution, is contained in *
+* the files COPYING and Copyright.html. COPYING can be found at the root *
+* of the source code distribution tree; Copyright.html can be found at the *
+* root level of an installed copy of the electronic HDF5 document set and *
+* is linked from the top-level documents page. It can also be found at *
+* http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+* access to either file, you may request a copy from help@hdfgroup.org. *
+* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <string.h>
+#include <stdlib.h>
+#include <assert.h>
+#include <stdio.h>
+#include "H5LDprivate.h"
+
+/*-------------------------------------------------------------------------
+ *
+ * internal functions
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t H5LD_construct_info(H5LD_memb_t *memb, hid_t par_tid);
+static herr_t H5LD_get_dset_dims(hid_t did, hsize_t *cur_dims);
+static size_t H5LD_get_dset_type_size(hid_t did, const char *fields);
+static herr_t H5LD_get_dset_elmts(hid_t did, const hsize_t *prev_dims,
+ const hsize_t *cur_dims, const char *fields, void *buf);
+
+
+/*-------------------------------------------------------------------------
+ * Function: H5LD_clean_vector
+ *
+ * Purpose: Process the vector of info:
+ * 1) free the array of pointers to member names in listv[n]
+ * 2) close the type id of the last member in listv[n]
+ * 3) free the H5LD_memb_t structure itself as pointed to by listv[n]
+ *
+ * Return: void
+ *
+ * Programmer: Vailin Choi; Aug 2010
+ *
+ *-------------------------------------------------------------------------
+ */
+void
+H5LD_clean_vector(H5LD_memb_t *listv[])
+{
+ unsigned n; /* Local index variable */
+
+ HDassert(listv);
+
+ /* Go through info for each field stored in listv[] */
+ for(n = 0; listv[n] != NULL; n++) {
+ if(listv[n]->names) {
+ HDfree(listv[n]->names);
+ listv[n]->names = NULL;
+ } /* end if */
+
+ /* Close the type id of the last member in the field */
+ if(!(listv[n]->last_tid < 0)) {
+ H5Tclose(listv[n]->last_tid);
+ listv[n]->last_tid = -1;
+ } /* end if */
+
+ /* Free the H5LD_memb_t structure for the field */
+ HDfree(listv[n]);
+ listv[n] = NULL;
+ } /* end for */
+} /* H5LD_clean_vector() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: H5LD_construct_info()
+ *
+ * Purpose: Get the remaining info for a field:
+ * 1) Get the type id of the last member in the field
+ * 2) Get the total offset of all the members in the field
+ * 3) Get the type size of the last member in the field
+ *
+ * Return: Success: 0
+ * Failure: negative
+ *
+ * Programmer: Vailin Choi; Aug 2010
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+H5LD_construct_info(H5LD_memb_t *memb, hid_t par_tid)
+{
+ hid_t tmp_tid = -1; /* Dataset type id */
+ unsigned i; /* Local index variable */
+ herr_t ret_value = FAIL; /* Return value */
+
+ /* Make a copy of the incoming datatype */
+ tmp_tid = H5Tcopy(par_tid);
+
+ /* Validate all the members in a field */
+ for(i = 0; memb->names[i] != NULL; i++) {
+ hid_t memb_tid; /* Type id for a member in a field */
+ int idx; /* Index # of a member in a compound datatype */
+
+ /* Get the member index and member type id */
+ if((idx = H5Tget_member_index(tmp_tid, memb->names[i])) < 0)
+ goto done;
+ if((memb_tid = H5Tget_member_type(tmp_tid, (unsigned)idx)) < 0)
+ goto done;
+
+ /* Sum up the offset of all the members in the field */
+ memb->tot_offset += H5Tget_member_offset(tmp_tid, (unsigned)idx);
+ if(H5Tclose(tmp_tid) < 0)
+ goto done;
+ tmp_tid = memb_tid;
+ } /* end for */
+
+ /* Get the type size of the last member in the field */
+ memb->last_tsize = H5Tget_size(tmp_tid);
+
+ /* Save the type id of the last member in the field */
+ memb->last_tid = H5Tcopy(tmp_tid);
+
+ /* Indicate success */
+ ret_value = SUCCEED;
+
+done:
+ H5E_BEGIN_TRY
+ H5Tclose(tmp_tid);
+ H5E_END_TRY
+
+ return(ret_value);
+} /* H5LD_construct_info() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: H5LD_construct_vector
+ *
+ * Purpose: Process the comma-separated list of fields in "fields" as follows:
+ * Example:
+ * "fields": "a.b.c,d"
+ * listv[0]->tot_offset = total offset of "a" & "b" & "c"
+ * listv[0]->last_tid = type id of "c"
+ * listv[0]->last_tsize = type size of "c"
+ * listv[0]->names[0] = "a"
+ * listv[0]->names[1] = "b"
+ * listv[0]->names[2] = "c"
+ * listv[0]->names[3] = NULL
+ *
+ * listv[1]->tot_offset = offset of "d"
+ * listv[1]->last_tid = type id of "d"
+ * listv[1]->last_tsize = type size of "d"
+ * listv[1]->names[0] = "d"
+ * listv[1]->names[1] = NULL
+ *
+ * Return: Success: # of comma-separated fields in "fields"
+ * Failure: negative value
+ *
+ * Programmer: Vailin Choi; Aug 2010
+ *
+*-------------------------------------------------------------------------
+*/
+int
+H5LD_construct_vector(char *fields, H5LD_memb_t *listv[]/*OUT*/, hid_t par_tid)
+{
+ int nfields; /* The # of comma-separated fields in "fields" */
+ hbool_t end_of_fields = FALSE; /* end of "fields" */
+ char *fields_ptr; /* Pointer to "fields" */
+ int ret_value = FAIL; /* Return value */
+
+ HDassert(listv);
+ HDassert(fields);
+
+ fields_ptr = fields;
+ nfields = 0;
+
+ /* Process till end of "fields" */
+ while(!end_of_fields) {
+ H5LD_memb_t *memb = NULL; /* Pointer to structure for storing a field's info */
+ char *cur; /* Pointer to a member in a field */
+ size_t len; /* Estimated # of members in a field */
+ hbool_t gotcomma = FALSE; /* A comma encountered */
+ hbool_t gotmember = FALSE; /* Getting member in a field */
+ hbool_t valid = TRUE; /* Whether a field being processed is valid or not */
+ int j = 0; /* The # of members in a field */
+
+ len = (HDstrlen(fields_ptr) / 2) + 2;
+
+ /* Allocate memory for an H5LD_memb_t for storing a field's info */
+ if(NULL == (memb = (H5LD_memb_t *)HDcalloc((size_t)1, sizeof(H5LD_memb_t))))
+ goto done;
+
+ /* Allocate memory for an array of pointers to member names */
+ if(NULL == (memb->names = (char **)HDcalloc(len, sizeof(char *))))
+ goto done;
+
+ memb->names[j] = fields_ptr;
+ memb->last_tid = -1;
+ cur = fields_ptr;
+
+ /* Continue processing till: not valid or comma encountered or "fields" ended */
+ while(valid && !gotcomma && !end_of_fields) {
+ switch(*fields_ptr) {
+ case '\0': /* end of list */
+ if(gotmember) { /* getting something and end of "fields" */
+ *cur++ = '\0';;
+ memb->names[++j] = NULL;
+ } /* end if */
+ else /* getting nothing but end of list */
+ valid = FALSE;
+ end_of_fields = TRUE;
+ break;
+
+ case '\\': /* escape character */
+ ++fields_ptr; /* skip it */
+ if(*fields_ptr == '\0')
+ valid = FALSE;
+ else {
+ *cur++ = *fields_ptr++;
+ gotmember = TRUE;
+ } /* end else */
+ break;
+
+ case '.': /* nested field separator */
+ *fields_ptr++ = *cur++ = '\0';;
+ if(gotmember) {
+ memb->names[++j] = cur;
+ gotmember = FALSE;
+ } /* end if */
+ else
+ valid = FALSE;
+ break;
+
+ case ',': /* field separator */
+ *fields_ptr++ = *cur++ = '\0';;
+ if(gotmember) {
+ memb->names[++j] = NULL;
+ gotmember = FALSE;
+ } /* end if */
+ else
+ valid = FALSE;
+ gotcomma = TRUE;
+ break;
+
+ default:
+ *cur++ = *fields_ptr++;
+ gotmember = TRUE;
+ break;
+ } /* end switch */
+ } /* while (valid && !gotcomma && !end_of_fields) */
+
+ /* If valid, put into listv and continue processing further info */
+ if(valid) {
+ listv[nfields++] = memb;
+ if(H5LD_construct_info(memb, par_tid) < 0)
+ goto done;
+ } /* end if */
+ else {
+ if(memb)
+ HDfree(memb);
+ goto done;
+ } /* end else */
+ } /* while !end_of_fields */
+
+ /* Indicate success */
+ ret_value = nfields;
+
+done:
+ listv[nfields] = NULL;
+ if(ret_value == FAIL)
+ H5LD_clean_vector(listv);
+
+ return(ret_value);
+} /* H5LD_construct_vector() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: H5LD_get_dset_dims
+ *
+ * Purpose: To return the current size for each dimension of the
+ * dataset's dataspace
+ *
+ * Return: Success: 0
+ * Failure: negative value
+ *
+ * Programmer: Vailin Choi; March 2010
+ *
+*-------------------------------------------------------------------------
+*/
+static herr_t
+H5LD_get_dset_dims(hid_t did, hsize_t *cur_dims)
+{
+ hid_t sid = -1; /* Dataspace ID */
+ herr_t ret_value = FAIL; /* Return Value */
+
+ /* Verify parameter */
+ if(cur_dims == NULL)
+ goto done;
+
+ /* Get the dataset's dataspace */
+ if((sid = H5Dget_space(did)) < 0)
+ goto done;
+
+ /* Get the current dimension size */
+ if(H5Sget_simple_extent_dims(sid, cur_dims, NULL) < 0)
+ goto done;
+
+ /* Indicate success */
+ ret_value = SUCCEED;
+
+done:
+ H5E_BEGIN_TRY {
+ H5Sclose(sid);
+ } H5E_END_TRY;
+
+ return(ret_value);
+} /* H5LD_get_dset_dims() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: H5LD_get_dset_type_size
+ *
+ * Purpose: To return the size of the dataset's datatype in bytes
+ * null "fields": return the size of the dataset's datatype
+ * non-null "fields": return the size of the dataset's datatype
+ * with respect to the selection in "fields"
+ *
+ * Return: Success: size of the dataset's datatype
+ * Failure: 0 (valid datatypes are never zero size)
+ *
+ * Programmer: Vailin Choi; March 2010
+ *
+ *-------------------------------------------------------------------------
+ */
+static size_t
+H5LD_get_dset_type_size(hid_t did, const char *fields)
+{
+ hid_t dset_tid = -1; /* Dataset's type identifier */
+ hid_t tid = -1; /* Native Type identifier */
+ H5LD_memb_t **listv = NULL; /* Vector for storing information in "fields" */
+ char *dup_fields = NULL; /* A copy of "fields" */
+ size_t ret_value = 0; /* Return value */
+
+ /* Get the datatype of the dataset */
+ if((dset_tid = H5Dget_type(did)) < 0)
+ goto done;
+ if((tid = H5Tget_native_type(dset_tid, H5T_DIR_DEFAULT)) < 0)
+ goto done;
+
+ if(fields == NULL) /* If no "fields" is specified */
+ ret_value = H5Tget_size(tid);
+ else { /* "fields" are specified */
+ size_t len; /* Estimate the number of comma-separated fields in "fields" */
+ size_t tot = 0; /* Data type size of all the fields in "fields" */
+ int n = 0, num = 0; /* Local index variables */
+
+ HDassert(fields && *fields);
+
+ /* Should be a compound datatype if "fields" exists */
+ if(H5Tget_class(dset_tid) != H5T_COMPOUND)
+ goto done;
+
+ /* Get a copy of "fields" */
+ if(NULL == (dup_fields = HDstrdup(fields)))
+ goto done;
+
+ /* Allocate memory for a list of H5LD_memb_t pointers to store "fields" info */
+ len = (HDstrlen(fields) / 2) + 2;
+ if(NULL == (listv = (H5LD_memb_t **)HDcalloc(len, sizeof(H5LD_memb_t *))))
+ goto done;
+
+ /* Process and store info for "fields" */
+ if((num = H5LD_construct_vector(dup_fields, listv/*OUT*/, tid)) < 0)
+ goto done;
+
+ /* Sum up the size of all the datatypes in "fields" */
+ for(n = 0; n < num; n++)
+ tot += listv[n]->last_tsize;
+
+ /* Clean up the vector of H5LD_memb_t structures */
+ H5LD_clean_vector(listv);
+
+ /* Return the total size */
+ ret_value = tot;
+ } /* end else */
+
+done:
+ H5E_BEGIN_TRY
+ H5Tclose(tid);
+ H5Tclose(dset_tid);
+ H5E_END_TRY
+
+ /* Free the array of H5LD_memb_t pointers */
+ if(listv)
+ HDfree(listv);
+
+ /* Free memory */
+ if(dup_fields)
+ HDfree(dup_fields);
+
+ return(ret_value);
+} /* H5LD_get_dset_type_size() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: H5LD_get_dset_elmts
+ *
+ * Purpose: To retrieve selected data from the dataset
+ *
+ * Return: Success: 0
+ * Failure: negative
+ *
+ * Programmer: Vailin Choi; August 2010
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+H5LD_get_dset_elmts(hid_t did, const hsize_t *prev_dims, const hsize_t *cur_dims,
+ const char *fields, void *buf)
+{
+ hid_t dtid = -1, tid = -1; /* Dataset type id */
+ hid_t sid = -1, mid = -1; /* Dataspace and memory space id */
+ hssize_t snum_elmts; /* Number of dataset elements in the selection (signed) */
+ hsize_t num_elmts; /* Number of dataset elements in the selection */
+ hsize_t start[H5S_MAX_RANK];/* Starting offset */
+ hsize_t count[H5S_MAX_RANK];/* ??offset */
+ H5LD_memb_t **listv = NULL; /* Vector for storing information in "fields" */
+ char *dup_fields = NULL; /* A copy of "fields" */
+ char *sav_buf = NULL; /* Saved pointer temporary buffer */
+ unsigned ctr; /* Counter for # of curr_dims > prev_dims */
+ int ndims; /* Number of dimensions for the dataset */
+ int i; /* Local index variable */
+ herr_t ret_value = FAIL; /* Return value */
+
+ /* Verify parameters */
+ if(prev_dims == NULL || cur_dims == NULL || buf == NULL)
+ goto done;
+
+ /* Get dataset's dataspace */
+ if((sid = H5Dget_space(did)) < 0)
+ goto done;
+
+ /* Get the number of dimensions */
+ if((ndims = H5Sget_simple_extent_ndims(sid)) < 0)
+ goto done;
+
+ /* Verify that cur_dims must have one dimension whose size is greater than prev_dims */
+ HDmemset(start, 0, sizeof start);
+ HDmemset(count, 0, sizeof count);
+ ctr = 0;
+ for(i = 0; i < ndims; i++)
+ if(cur_dims[i] > prev_dims[i]) {
+ ++ctr;
+ count[i] = cur_dims[i] - prev_dims[i];
+ start[i] = prev_dims[i];
+ } /* end if */
+ else { /* < or = */
+ start[i] = 0;
+ count[i] = MIN(prev_dims[i], cur_dims[i]);
+ } /* end else */
+ if(!ctr)
+ goto done;
+
+ if(ctr == 1) { /* changes for only one dimension */
+ /* Make the selection in the dataset based on "cur_dims" and "prev_dims" */
+ if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, NULL, count, NULL) < 0)
+ goto done;
+ } /* end if */
+ else { /* changes for more than one dimensions */
+ HDmemset(start, 0, sizeof start);
+
+ /* Make the selection in the dataset based on "cur_dims" and "prev_dims" */
+ if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, start, NULL, cur_dims, NULL) < 0)
+ goto done;
+ if(H5Sselect_hyperslab(sid, H5S_SELECT_NOTB, start, NULL, prev_dims, NULL) < 0)
+ goto done;
+ } /* end else */
+
+ /* Get the number of elements in the selection */
+ if(0 == (snum_elmts = H5Sget_select_npoints(sid)))
+ goto done;
+ num_elmts = (hsize_t)snum_elmts;
+
+ /* Create the memory space for the selection */
+ if((mid = H5Screate_simple(1, &num_elmts, NULL)) < 0)
+ goto done;
+
+ /* Get the native datatype size */
+ if((dtid = H5Dget_type(did)) < 0)
+ goto done;
+ if((tid = H5Tget_native_type(dtid, H5T_DIR_DEFAULT)) < 0)
+ goto done;
+
+ if(fields == NULL) { /* nothing in "fields" */
+ /* Read and store all the elements in "buf" */
+ if(H5Dread(did, tid, mid, sid, H5P_DEFAULT, buf) < 0)
+ goto done;
+ } /* end if */
+ else { /* "fields" is specified */
+ unsigned char *buf_p = (unsigned char *)buf; /* Pointer to the destination buffer */
+ char *tmp_buf; /* Temporary buffer for data read */
+ size_t tot_tsize; /* Total datatype size */
+ size_t len; /* Estimate the number of comma-separated fields in "fields" */
+
+ /* should be a compound datatype if "fields" exists */
+ if(H5Tget_class(tid) != H5T_COMPOUND)
+ goto done;
+
+ /* Get the total size of the dataset's datatypes */
+ if(0 == (tot_tsize = H5LD_get_dset_type_size(did, NULL)))
+ goto done;
+
+ /* Allocate memory for reading in the elements in the dataset selection */
+ if(NULL == (sav_buf = tmp_buf = (char *)HDcalloc((size_t)num_elmts, tot_tsize)))
+ goto done;
+
+ /* Read the dataset elements in the selection */
+ if(H5Dread(did, tid, mid, sid, H5P_DEFAULT, tmp_buf) < 0)
+ goto done;
+
+ /* Make a copy of "fields" */
+ if(NULL == (dup_fields = HDstrdup(fields)))
+ goto done;
+
+ /* Allocate memory for the vector of H5LD_memb_t pointers */
+ len = (HDstrlen(fields) / 2) + 2;
+ if(NULL == (listv = (H5LD_memb_t **)HDcalloc(len, sizeof(H5LD_memb_t *))))
+ goto done;
+
+ /* Process and store information for "fields" */
+ if(H5LD_construct_vector(dup_fields, listv, tid) < 0)
+ goto done;
+
+ /* Copy data for each dataset element in the selection */
+ for(i = 0; i < (int)num_elmts; i++) {
+ int j; /* Local index variable */
+
+ /* Copy data for "fields" to the input buffer */
+ for(j = 0; listv[j] != NULL; j++) {
+ HDmemcpy(buf_p, tmp_buf + listv[j]->tot_offset, listv[j]->last_tsize);
+ buf_p += listv[j]->last_tsize;
+ } /* end for */
+ tmp_buf += tot_tsize;
+ } /* end for */
+
+ /* Clean up the vector of H5LD_memb_t structures */
+ H5LD_clean_vector(listv);
+ } /* end else */
+
+ /* Indicate success */
+ ret_value = SUCCEED;
+
+done:
+ H5E_BEGIN_TRY
+ H5Tclose(dtid);
+ H5Tclose(tid);
+ H5Sclose(sid);
+ H5Sclose(mid);
+ H5E_END_TRY
+
+ /* Free the array of H5LD_memb_t pointers */
+ if(listv)
+ HDfree(listv);
+
+ /* Free memory */
+ if(dup_fields)
+ HDfree(dup_fields);
+ if(sav_buf)
+ HDfree(sav_buf);
+
+ return(ret_value);
+} /* H5LD_get_dset_elmts() */
+
+/*-------------------------------------------------------------------------
+ *
+ * Public functions
+ *
+ *-------------------------------------------------------------------------
+ */
+
+
+/*-------------------------------------------------------------------------
+ * Function: H5LDget_dset_dims
+ *
+ * Purpose: To retrieve the current dimension sizes for a dataset
+ *
+ * Return: Success: 0
+ * Failure: negative value
+ *
+ * Programmer: Vailin Choi; March 2010
+ *
+ *-------------------------------------------------------------------------
+ */
+herr_t
+H5LDget_dset_dims(hid_t did, hsize_t *cur_dims)
+{
+ return(H5LD_get_dset_dims(did, cur_dims));
+} /* H5LDget_dset_dims() */
+
+
+
+/*-------------------------------------------------------------------------
+ * Function: H5LDget_dset_type_size
+ *
+ * Purpose: To return the size in bytes of the datatype for the dataset
+ *
+ * Return: Success: size in bytes of the dataset's datatype
+ * Failure: 0 (valid datatypes are never zero size)
+ *
+ * Programmer: Vailin Choi; March 2010
+ *
+ *-------------------------------------------------------------------------
+ */
+size_t
+H5LDget_dset_type_size(hid_t did, const char *fields)
+{
+ return(H5LD_get_dset_type_size(did, fields));
+} /* H5LDget_dset_type_size() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: H5LDget_dset_elmts
+ *
+ * Purpose: To retrieve selected data from the dataset
+ *
+ * Return: Success: 0
+ * Failure: negative value
+ *
+ * Programmer: Vailin Choi; March 2010
+ *
+ *-------------------------------------------------------------------------
+ */
+herr_t
+H5LDget_dset_elmts(hid_t did, const hsize_t *prev_dims, const hsize_t *cur_dims, const char *fields, void *buf)
+{
+ return(H5LD_get_dset_elmts(did, prev_dims, cur_dims, fields, buf) );
+} /* H5LDget_dset_elmts() */
+
diff --git a/hl/src/H5LDprivate.h b/hl/src/H5LDprivate.h
new file mode 100644
index 0000000..13e0710
--- /dev/null
+++ b/hl/src/H5LDprivate.h
@@ -0,0 +1,49 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef _H5LDprivate_H
+#define _H5LDprivate_H
+
+/* High-level library internal header file */
+#include "H5HLprivate2.h"
+#include "H5LDpublic.h"
+
+/* Store information for a field in <list_of_fields> for a compound data type */
+/*
+ * Note: This data structure is used by both H5LD.c and hl/tools/h5watch
+ * This declaration is repeated in tools/lib/h5tools_str.c
+ */
+typedef struct H5LD_memb_t {
+ size_t tot_offset;
+ size_t last_tsize;
+ hid_t last_tid;
+ char **names;
+} H5LD_memb_t;
+
+/*
+ * Note that these two private routines are called by hl/tools/h5watch.
+ * Have considered the following options:
+ * 1) Repeat the coding in both H5LD.c and h5watch
+ * 2) Make these public routines
+ * 3) Break the rule "to avoid tools calling private routines in the library"
+ * #1: not good for maintenance
+ * #2: these two routines are too specific to be made as public routines
+ * Decide to do #3 at this point of time after some discussion.
+ */
+void H5LD_clean_vector(H5LD_memb_t *listv[]);
+int H5LD_construct_vector(char *fields, H5LD_memb_t *listv[], hid_t par_tid);
+
+#endif /* end _H5LDprivate_H */
+
diff --git a/hl/src/H5LDpublic.h b/hl/src/H5LDpublic.h
new file mode 100644
index 0000000..4844d42
--- /dev/null
+++ b/hl/src/H5LDpublic.h
@@ -0,0 +1,33 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#ifndef _H5LDpublic_H
+#define _H5LDpublic_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+H5_HLDLL herr_t H5LDget_dset_dims(hid_t did, hsize_t *cur_dims);
+H5_HLDLL size_t H5LDget_dset_type_size(hid_t did, const char *fields);
+H5_HLDLL herr_t H5LDget_dset_elmts(hid_t did, const hsize_t *prev_dims,
+ const hsize_t *cur_dims, const char *fields, void *buf);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* _H5LDpublic_H */
+
diff --git a/hl/src/Makefile.am b/hl/src/Makefile.am
index e772233..fef3450 100644
--- a/hl/src/Makefile.am
+++ b/hl/src/Makefile.am
@@ -31,12 +31,12 @@ lib_LTLIBRARIES=libhdf5_hl.la
libhdf5_hl_la_LDFLAGS= -version-info $(LT_HL_VERS_INTERFACE):$(LT_HL_VERS_REVISION):$(LT_HL_VERS_AGE) $(AM_LDFLAGS)
# List sources to include in the HDF5 HL Library.
-libhdf5_hl_la_SOURCES=H5DO.c H5DS.c H5IM.c H5LT.c H5LTanalyze.c H5LTparse.c H5PT.c H5TB.c
+libhdf5_hl_la_SOURCES=H5DO.c H5DS.c H5IM.c H5LT.c H5LTanalyze.c H5LTparse.c H5PT.c H5TB.c H5LD.c
# HDF5 HL library depends on HDF5 Library.
libhdf5_hl_la_LIBADD=$(LIBHDF5)
# Public header files (to be installed)
-include_HEADERS=hdf5_hl.h H5DOpublic.h H5IMpublic.h H5LTpublic.h H5TBpublic.h H5DSpublic.h H5PTpublic.h
+include_HEADERS=hdf5_hl.h H5DOpublic.h H5IMpublic.h H5LTpublic.h H5TBpublic.h H5DSpublic.h H5PTpublic.h H5LDpublic.h
include $(top_srcdir)/config/conclude.am
diff --git a/hl/src/hdf5_hl.h b/hl/src/hdf5_hl.h
index 6c363f1..f55aa04 100644
--- a/hl/src/hdf5_hl.h
+++ b/hl/src/hdf5_hl.h
@@ -28,6 +28,7 @@
#include "H5IMpublic.h" /* image */
#include "H5TBpublic.h" /* table */
#include "H5PTpublic.h" /* packet table */
+#include "H5LDpublic.h" /* lite dataset */
#endif /*H5_INCLUDE_HL*/
diff --git a/hl/test/Makefile.am b/hl/test/Makefile.am
index 7bfd6b2..3f21218 100644
--- a/hl/test/Makefile.am
+++ b/hl/test/Makefile.am
@@ -29,7 +29,7 @@ LDADD=$(LIBH5_HL) $(LIBH5TEST) $(LIBHDF5)
# Test programs. These are our main targets. They should be listed in the
# order to be executed, generally most specific tests to least specific tests.
TEST_PROG=test_lite test_image test_file_image test_table test_ds test_packet test_dset_opt \
- test_dset_append
+ test_ld test_dset_append
check_PROGRAMS=$(TEST_PROG)
# These programs generate test files for the tests. They don't need to be
@@ -37,7 +37,7 @@ check_PROGRAMS=$(TEST_PROG)
# them in a conditional causes automake to generate rules so that they
# can be built by hand. They can also be built by specifying
# --enable-build-all at configure time.
-BUILD_ALL_PROGS=gen_test_ds
+BUILD_ALL_PROGS=gen_test_ds gen_test_ld
if BUILD_ALL_CONDITIONAL
noinst_PROGRAMS=$(BUILD_ALL_PROGS)
diff --git a/hl/test/gen_test_ld.c b/hl/test/gen_test_ld.c
new file mode 100644
index 0000000..1313d2a
--- /dev/null
+++ b/hl/test/gen_test_ld.c
@@ -0,0 +1,379 @@
+#include "hdf5.h"
+#include "H5LDprivate.h"
+#include <time.h>
+#include <sys/time.h>
+#include <sys/resource.h>
+#include <stdlib.h>
+
+/*
+ * WATCH.h5: file with various types of datasets for testing--
+ *
+ * The following datasets are chunked, H5D_ALLOC_TIME_INCR, max. dimensional setting:
+ * DSET_ONE: one-dimensional dataset
+ * DSET_TWO: two-dimensional dataset
+ * DSET_CMPD: one-dimensional dataset with compound type
+ * DSET_CMPD_ESC: one-dimensional dataset with compound type and member names with
+ * escape/separator characters
+ * DSET_CMPD_TWO: two-dimensional dataset with compound type
+ *
+ * The following datasets are one-dimensional, chunked, max. dimension setting:
+ * DSET_ALLOC_EARLY: dataset with H5D_ALLOC_TIME_EARLY
+ * DSET_ALLOC_LATE: dataset H5D_ALLOC_TIME_LATE
+ *
+ * The following datasets are one-dimensional:
+ * DSET_NONE: fixed dimension setting, contiguous, H5D_ALLOC_TIME_LATE
+ * DSET_NOMAX: fixed dimension setting, chunked, H5D_ALLOC_TIME_INCR
+ */
+#define ONE_DIMS0 10
+#define MAX_ONE_DIMS0 100
+
+#define DSET_ONE "DSET_ONE"
+#define DSET_NONE "DSET_NONE"
+#define DSET_NOMAX "DSET_NOMAX"
+#define DSET_ALLOC_LATE "DSET_ALLOC_LATE"
+#define DSET_ALLOC_EARLY "DSET_ALLOC_EARLY"
+#define DSET_CMPD "DSET_CMPD"
+#define DSET_CMPD_ESC "DSET_CMPD_ESC"
+#define DSET_NULL "DSET_NULL"
+#define DSET_SCALAR "DSET_SCALAR"
+
+#define TWO_DIMS0 4
+#define TWO_DIMS1 10
+#define MAX_TWO_DIMS0 60
+#define MAX_TWO_DIMS1 100
+
+#define DSET_TWO "DSET_TWO"
+#define DSET_CMPD_TWO "DSET_CMPD_TWO"
+
+#define CHUNK_SIZE 2
+
+#define FILE "test_ld.h5"
+
+/* Data structures for datasets with compound types */
+typedef struct sub22_t {
+ unsigned int a;
+ unsigned int b;
+ unsigned int c;
+} sub22_t;
+
+typedef struct sub2_t {
+ unsigned int a;
+ sub22_t b;
+ unsigned int c;
+} sub2_t;
+
+typedef struct sub4_t {
+ unsigned int a;
+ unsigned int b;
+} sub4_t;
+
+typedef struct set_t {
+ unsigned int field1;
+ sub2_t field2;
+ double field3;
+ sub4_t field4;
+} set_t;
+
+/*
+ **************************************************************************************
+ *
+ * Create a dataset with the given input parameters
+ * Write to the dataset with the given "data"
+ *
+ **************************************************************************************
+ */
+static int
+generate_dset(hid_t fid, const char *dname, int ndims, hsize_t *dims,
+ hsize_t *maxdims, hid_t dtid, void *data)
+{
+ hid_t dcpl = -1; /* Dataset creation property */
+ hid_t did = -1; /* Dataset id */
+ hid_t sid = -1; /* Dataspace id */
+ int i; /* Local index variable */
+
+ /* Create the dataspace */
+ if((sid = H5Screate_simple(ndims, dims, maxdims)) < 0)
+ goto done;
+
+ /* Set up dataset's creation properties */
+ if(!HDstrcmp(dname, DSET_NONE))
+ dcpl = H5P_DEFAULT;
+ else {
+ hsize_t chunk_dims[H5S_MAX_RANK]; /* Dimension sizes for chunks */
+
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ goto done;
+ for(i = 0; i < ndims; i++)
+ chunk_dims[i] = CHUNK_SIZE;
+ if(H5Pset_chunk(dcpl, ndims, chunk_dims) < 0)
+ goto done;
+ } /* end else */
+
+ if(!HDstrcmp(dname, DSET_ALLOC_LATE)) {
+ if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_LATE) < 0)
+ goto done;
+ } /* end if */
+ else if(!HDstrcmp(dname, DSET_ALLOC_EARLY)) {
+ if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0)
+ goto done;
+ } /* end if */
+
+ /* Create the dataset */
+ if((did = H5Dcreate2(fid, dname, dtid, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
+ goto done;
+
+ /* Write to the dataset */
+ if(H5Dwrite(did, dtid, H5S_ALL, H5S_ALL, H5P_DEFAULT, data) < 0)
+ goto done;
+
+ /* Closing */
+ if(H5Pclose(dcpl) < 0)
+ goto done;
+ if(H5Sclose(sid) < 0)
+ goto done;
+ if(H5Dclose(did) < 0)
+ goto done;
+
+ return(SUCCEED);
+
+done:
+ H5E_BEGIN_TRY
+ H5Sclose(sid);
+ H5Pclose(dcpl);
+ H5Dclose(did);
+ H5E_END_TRY
+
+ return(FAIL);
+} /* generate_dset() */
+
+int
+main(void)
+{
+ hid_t fid; /* File id */
+ hid_t fapl; /* File access property list */
+ hsize_t cur_dims[1]; /* Dimension sizes */
+ hsize_t max_dims[1]; /* Maximum dimension sizes */
+ hsize_t cur2_dims[2]; /* Current dimension sizes */
+ hsize_t max2_dims[2]; /* Maximum dimension sizes */
+ hid_t set_tid, esc_set_tid; /* Compound type id */
+ hid_t sub22_tid; /* Compound type id */
+ hid_t sub2_tid, esc_sub2_tid; /* Compound type id */
+ hid_t sub4_tid, esc_sub4_tid; /* Compound type id */
+ hid_t null_did, null_sid; /* H5S_NULL dataset & dataspace ids */
+ hid_t scalar_did, scalar_sid; /* H5S_SCALAR dataset & dataspace ids */
+ int one_data[ONE_DIMS0]; /* Buffer for data */
+ int two_data[TWO_DIMS0*TWO_DIMS1]; /* Buffer for data */
+ set_t one_cbuf[ONE_DIMS0]; /* Buffer for data with compound type */
+ set_t two_cbuf[TWO_DIMS0*TWO_DIMS1]; /* Buffer for data with compound type */
+ int i; /* Local index variable */
+
+ /* Create a file access property list */
+ if((fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0)
+ goto done;
+
+ /* Set to use latest library format */
+ if((H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST)) < 0)
+ goto done;
+
+ /* Create a file */
+ if((fid = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0)
+ goto done;
+
+ /* Initialization for one-dimensional dataset */
+ cur_dims[0] = ONE_DIMS0;
+ max_dims[0] = MAX_ONE_DIMS0;
+ for(i = 0; i < ONE_DIMS0; i++)
+ one_data[i] = i;
+
+ /* Generate DSET_ONE, DSET_NONE, DSET_NOMAX, DSET_ALLOC_LATE, DSET_EARLY */
+ if(generate_dset(fid, DSET_ONE, 1, cur_dims, max_dims, H5T_NATIVE_INT, one_data) < 0)
+ goto done;
+ if(generate_dset(fid, DSET_NONE, 1, cur_dims, NULL, H5T_NATIVE_INT, one_data) < 0)
+ goto done;
+ if(generate_dset(fid, DSET_NOMAX, 1, cur_dims, NULL, H5T_NATIVE_INT, one_data) < 0)
+ goto done;
+ if(generate_dset(fid, DSET_ALLOC_LATE, 1, cur_dims, max_dims, H5T_NATIVE_INT, one_data) < 0)
+ goto done;
+ if(generate_dset(fid, DSET_ALLOC_EARLY, 1, cur_dims, max_dims, H5T_NATIVE_INT, one_data) < 0)
+ goto done;
+
+ /* Initialization for two-dimensional dataset */
+ cur2_dims[0] = TWO_DIMS0;
+ cur2_dims[1] = TWO_DIMS1;
+ max2_dims[0] = MAX_TWO_DIMS0;
+ max2_dims[1] = MAX_TWO_DIMS1;
+
+ for(i = 0; i < (TWO_DIMS0 * TWO_DIMS1); i++)
+ two_data[i] = i;
+
+ /* Generate DSET_TWO */
+ if(generate_dset(fid, DSET_TWO, 2, cur2_dims, max2_dims, H5T_NATIVE_INT, two_data) < 0)
+ goto done;
+
+ /* Initialization for one-dimensional compound typed dataset */
+ cur_dims[0] = ONE_DIMS0;
+ max_dims[0] = MAX_ONE_DIMS0;
+
+ for (i = 0; i < ONE_DIMS0; i++) {
+ one_cbuf[i].field1 = 1;
+ one_cbuf[i].field2.a = 2;
+ one_cbuf[i].field2.c = 4;
+ one_cbuf[i].field2.b.a = 20;
+ one_cbuf[i].field2.b.b = 40;
+ one_cbuf[i].field2.b.c = 80;
+ one_cbuf[i].field3 = 3.0f;
+ one_cbuf[i].field4.a = 4;
+ one_cbuf[i].field4.b = 8;
+ } /* end for */
+
+ /* Create the compound type */
+ if((sub22_tid = H5Tcreate(H5T_COMPOUND, sizeof(sub22_t))) < 0)
+ goto done;
+ if(H5Tinsert(sub22_tid, "a", HOFFSET(sub22_t, a), H5T_NATIVE_INT) < 0)
+ goto done;
+ if(H5Tinsert(sub22_tid, "b", HOFFSET(sub22_t, b), H5T_NATIVE_INT) < 0)
+ goto done;
+ if(H5Tinsert(sub22_tid, "c", HOFFSET(sub22_t, c), H5T_NATIVE_INT) < 0)
+ goto done;
+
+ if((sub2_tid = H5Tcreate(H5T_COMPOUND, sizeof(sub2_t))) < 0)
+ goto done;
+ if(H5Tinsert(sub2_tid, "a", HOFFSET(sub2_t, a), H5T_NATIVE_INT) < 0)
+ goto done;
+ if(H5Tinsert(sub2_tid, "b", HOFFSET(sub2_t, b), sub22_tid) < 0)
+ goto done;
+ if(H5Tinsert(sub2_tid, "c", HOFFSET(sub2_t, c), H5T_NATIVE_INT) < 0)
+ goto done;
+
+ if((sub4_tid = H5Tcreate(H5T_COMPOUND, sizeof(sub4_t))) < 0)
+ goto done;
+ if(H5Tinsert(sub4_tid, "a", HOFFSET(sub4_t, a), H5T_NATIVE_INT) < 0)
+ goto done;
+ if(H5Tinsert(sub4_tid, "b", HOFFSET(sub4_t, b), H5T_NATIVE_INT) < 0)
+ goto done;
+
+ if((set_tid = H5Tcreate(H5T_COMPOUND, sizeof(set_t))) < 0)
+ goto done;
+ if(H5Tinsert(set_tid, "field1", HOFFSET(set_t, field1), H5T_NATIVE_INT) < 0)
+ goto done;
+ if(H5Tinsert(set_tid, "field2", HOFFSET(set_t, field2), sub2_tid) < 0)
+ goto done;
+ if(H5Tinsert(set_tid, "field3", HOFFSET(set_t, field3), H5T_NATIVE_DOUBLE) < 0)
+ goto done;
+ if(H5Tinsert(set_tid, "field4", HOFFSET(set_t, field4), sub4_tid) < 0)
+ goto done;
+
+ /* Create the compound type with escape/separator characters */
+ if((esc_sub2_tid = H5Tcreate(H5T_COMPOUND, sizeof(sub2_t))) < 0)
+ goto done;
+ if(H5Tinsert(esc_sub2_tid, ".a", HOFFSET(sub2_t, a), H5T_NATIVE_INT) < 0)
+ goto done;
+ if(H5Tinsert(esc_sub2_tid, ",b", HOFFSET(sub2_t, b), sub22_tid) < 0)
+ goto done;
+ if(H5Tinsert(esc_sub2_tid, "\\c", HOFFSET(sub2_t, c), H5T_NATIVE_INT) < 0)
+ goto done;
+
+ if((esc_sub4_tid = H5Tcreate(H5T_COMPOUND, sizeof(sub4_t))) < 0)
+ goto done;
+ if(H5Tinsert(esc_sub4_tid, "a.", HOFFSET(sub4_t, a), H5T_NATIVE_INT) < 0)
+ goto done;
+ if(H5Tinsert(esc_sub4_tid, "b,", HOFFSET(sub4_t, b), H5T_NATIVE_INT) < 0)
+ goto done;
+
+ if((esc_set_tid = H5Tcreate(H5T_COMPOUND, sizeof(set_t))) < 0)
+ goto done;
+ if(H5Tinsert(esc_set_tid, "field,1", HOFFSET(set_t, field1), H5T_NATIVE_INT) < 0)
+ goto done;
+ if(H5Tinsert(esc_set_tid, "field2.", HOFFSET(set_t, field2), esc_sub2_tid) < 0)
+ goto done;
+ if(H5Tinsert(esc_set_tid, "field\\3", HOFFSET(set_t, field3), H5T_NATIVE_DOUBLE) < 0)
+ goto done;
+ if(H5Tinsert(esc_set_tid, "field4,", HOFFSET(set_t, field4), esc_sub4_tid) < 0)
+ goto done;
+
+ /* Generate DSET_CMPD, DSET_CMPD_ESC */
+ if(generate_dset(fid, DSET_CMPD, 1, cur_dims, max_dims, set_tid, one_cbuf) < 0)
+ goto done;
+ if(generate_dset(fid, DSET_CMPD_ESC, 1, cur_dims, max_dims, esc_set_tid, one_cbuf) < 0)
+ goto done;
+
+ /* Initialization for two-dimensional compound typed dataset */
+ cur2_dims[0] = TWO_DIMS0;
+ cur2_dims[1] = TWO_DIMS1;
+ max2_dims[0] = MAX_TWO_DIMS0;
+ max2_dims[0] = MAX_TWO_DIMS1;
+
+ for (i = 0; i < (TWO_DIMS0 * TWO_DIMS1); i++) {
+ two_cbuf[i].field1 = 1;
+ two_cbuf[i].field2.a = 2;
+ two_cbuf[i].field2.c = 4;
+ two_cbuf[i].field2.b.a = 20;
+ two_cbuf[i].field2.b.b = 40;
+ two_cbuf[i].field2.b.c = 80;
+ two_cbuf[i].field3 = 3.0f;
+ two_cbuf[i].field4.a = 4;
+ two_cbuf[i].field4.b = 8;
+ } /* end for */
+
+ /* Generate DSET_CMPD_TWO */
+ if(generate_dset(fid, DSET_CMPD_TWO, 2, cur2_dims, max2_dims, set_tid, two_cbuf) < 0)
+ goto done;
+
+ /* Create NULL dataspace */
+ if((null_sid = H5Screate(H5S_NULL)) < 0)
+ goto done;
+
+ /* Create the NULL dataset */
+ if((null_did = H5Dcreate2(fid, DSET_NULL, H5T_NATIVE_UINT, null_sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ goto done;
+
+ /* Create SCALAR dataspace */
+ if((scalar_sid = H5Screate(H5S_SCALAR)) < 0)
+ goto done;
+
+ /* Create the SCALAR dataset */
+ if((scalar_did = H5Dcreate2(fid, DSET_SCALAR, H5T_NATIVE_INT, scalar_sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ goto done;
+
+ /* Closing */
+ if(H5Dclose(scalar_did) < 0) goto done;
+ if(H5Sclose(scalar_sid) < 0) goto done;
+
+ if(H5Dclose(null_did) < 0) goto done;
+ if(H5Sclose(null_sid) < 0) goto done;
+
+ if(H5Tclose(sub22_tid) < 0) goto done;
+ if(H5Tclose(sub2_tid) < 0) goto done;
+ if(H5Tclose(sub4_tid) < 0) goto done;
+ if(H5Tclose(set_tid) < 0) goto done;
+ if(H5Tclose(esc_sub2_tid) < 0) goto done;
+ if(H5Tclose(esc_sub4_tid) < 0) goto done;
+ if(H5Tclose(esc_set_tid) < 0) goto done;
+
+ if(H5Pclose(fapl) < 0) goto done;
+ if(H5Fclose(fid) < 0) goto done;
+
+ exit(EXIT_SUCCESS);
+
+done:
+ H5E_BEGIN_TRY
+ H5Tclose(sub22_tid);
+ H5Tclose(sub2_tid);
+ H5Tclose(sub4_tid);
+ H5Tclose(set_tid);
+ H5Tclose(esc_sub2_tid);
+ H5Tclose(esc_sub4_tid);
+ H5Tclose(esc_set_tid);
+
+ H5Dclose(null_did);
+ H5Sclose(null_sid);
+ H5Dclose(scalar_did);
+ H5Sclose(scalar_sid);
+
+ H5Pclose(fapl);
+ H5Fclose(fid);
+ H5E_END_TRY
+
+ exit(EXIT_FAILURE);
+} /* main() */
+
diff --git a/hl/test/test_ld.c b/hl/test/test_ld.c
new file mode 100644
index 0000000..df721e6
--- /dev/null
+++ b/hl/test/test_ld.c
@@ -0,0 +1,1430 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+* Copyright by The HDF Group. *
+* Copyright by the Board of Trustees of the University of Illinois. *
+* All rights reserved. *
+* *
+* This file is part of HDF5. The full HDF5 copyright notice, including *
+* terms governing use, modification, and redistribution, is contained in *
+* the files COPYING and Copyright.html. COPYING can be found at the root *
+* of the source code distribution tree; Copyright.html can be found at the *
+* root level of an installed copy of the electronic HDF5 document set and *
+* is linked from the top-level documents page. It can also be found at *
+* http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+* access to either file, you may request a copy from help@hdfgroup.org. *
+* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <stdlib.h>
+#include <string.h>
+#include <stdio.h>
+#include <errno.h>
+#include <setjmp.h>
+#include "h5hltest.h"
+#include "H5srcdir.h"
+#include "H5LDpublic.h"
+
+/* File name */
+#define FILE "test_ld.h5"
+/* Copied file name */
+#define COPY_FILENAME "COPY_test_ld.h5"
+
+/* Dataset names */
+#define DSET_ONE "DSET_ONE"
+#define DSET_ALLOC_LATE "DSET_ALLOC_LATE"
+#define DSET_ALLOC_EARLY "DSET_ALLOC_EARLY"
+#define DSET_TWO "DSET_TWO"
+#define TWO_DIM_1 4
+#define TWO_DIM_2 10
+#define DSET_CMPD "DSET_CMPD"
+#define DSET_CMPD_ESC "DSET_CMPD_ESC"
+#define DSET_CMPD_TWO "DSET_CMPD_TWO"
+#define DSET_NULL "DSET_NULL"
+#define DSET_SCALAR "DSET_SCALAR"
+
+/* Selected compound field members for testing */
+#define VALID_FIELDS1 "field1,field2.a,field3,field4" /* TEMPORORAY */
+#define VALID_FIELDS2 "field2.b.a,field2.c,field4.b"
+
+#define INVALID_FIELDS1 "field2.k.a,field2.c,field4.k"
+#define INVALID_FIELDS2 "field2.b.a,field2.c,field4.b."
+#define INVALID_FIELDS3 "field2.b.a,,field2.c,field4.b"
+
+#define VALID_ESC_FIELDS1 "field\\,1,field2\\..\\.a,field\\\\3,field4\\,"
+#define VALID_ESC_FIELDS2 "field2\\..\\,b.a,field2\\..\\\\c,field4\\,.b\\,"
+
+#define INVALID_ESC_FIELDS1 "field2\\..\\,k.a,field2\\..\\\\c,field4\\,.k\\,"
+#define INVALID_ESC_FIELDS2 "field2\\..\\,b.a,field2\\..\\\\c,field4\\,.b\\,."
+#define INVALID_ESC_FIELDS3 "field2\\..\\,,b.a,field2\\..\\\\c,field4\\,.b\\,"
+
+/*
+ * Test variations (retained original) for one-dimensional dataset:
+ * Varies from 10->13; 10->9, 10->10, 10->1, 10->11
+ */
+#define ONE_NTESTS 5
+int one_tests[ONE_NTESTS] = {3, -1, 0, -9, 1};
+
+/*
+ * Test variations (retained original) for two-dimensional dataset:
+ * Varies from {4,10}->{6,12}; {4,10}->{6,9}; {4,10}->{6,10};
+ * {4,10}->{3,12}; {4,10}->{3,9}; {4,10}->{3,10};
+ * {4,10}->{4,12}; {4,10}->{4,9}; {4,10}->{4,10}
+ */
+#define TWO_NTESTS 9
+int two_tests[TWO_NTESTS][2] = { {2,2}, {2,-1}, {2,0},
+ {-1,2}, {-1,-1}, {-1,0},
+ {0,2}, {0,-1}, {0,0} };
+
+
+/* Verify that the two input values are the same */
+#define VERIFY_EQUAL(_x, _y) \
+{ \
+ long __x = (long)_x, __y = (long)_y; \
+ if(__x != __y) TEST_ERROR \
+}
+
+/* Temporary buffer for reading in the test file */
+#define TMP_BUF_SIZE 2048
+char g_tmp_buf[TMP_BUF_SIZE];
+
+/* Macros for verifying compound fields */
+/* Verify all fields */
+#define VERIFY_ELMTS_ALL(ent1, ent2) { \
+ VERIFY_EQUAL(ent1.field1, ent2.field1); \
+ VERIFY_EQUAL(ent1.field2.a, ent2.field2.a); \
+ VERIFY_EQUAL(ent1.field2.b.a, ent2.field2.b.a); \
+ VERIFY_EQUAL(ent1.field2.b.b, ent2.field2.b.b); \
+ VERIFY_EQUAL(ent1.field2.b.c, ent2.field2.b.c); \
+ VERIFY_EQUAL(ent1.field2.c, ent2.field2.c); \
+ VERIFY_EQUAL(ent1.field3, ent2.field3); \
+ VERIFY_EQUAL(ent1.field4.a, ent2.field4.a); \
+}
+
+/* Verify fields selected in VALID_FIELDS1 */
+#define VERIFY_ELMTS_VALID1(ent1, ent2) { \
+ VERIFY_EQUAL(ent1.field1, ent2.field1); \
+ VERIFY_EQUAL(ent1.field2_a, ent2.field2.a); \
+ VERIFY_EQUAL(ent1.field3, ent2.field3); \
+ VERIFY_EQUAL(ent1.field4.a, ent2.field4.a); \
+ VERIFY_EQUAL(ent1.field4.b, ent2.field4.b); \
+}
+
+/* Verify fields selected in VALID_FIELDS2 */
+#define VERIFY_ELMTS_VALID2(ent1, ent2) { \
+ VERIFY_EQUAL(ent1.field2_b_a, ent2.field2.b.a); \
+ VERIFY_EQUAL(ent1.field2_c, ent2.field2.c); \
+ VERIFY_EQUAL(ent1.field4_b, ent2.field4.b); \
+}
+
+/* The types of 2-dimensional dataset: DSET_TWO or DSET_CMPD_TWO */
+#define TWO_NONE 0 /* DSET_TWO */
+#define TWO_CMPD_NULL 1 /* DSET_CMPD_TWO with NULL fields */
+#define TWO_CMPD_VALID1 2 /* DSET_CMPD_TWO with VALID_FIELDS1 or VALID_ESC_FIELDS1 */
+#define TWO_CMPD_VALID2 3 /* DSET_CMPD_TWO with VALID_FIELDS2 or VALID_ESC_FIELDS2 */
+
+#define VERIFY_ELMTS(type, k, ind, _ldbuf, _buf) { \
+ if(type == TWO_NONE) { \
+ int *iib = (int *)_ldbuf; \
+ int *ib = (int *)_buf; \
+ \
+ VERIFY_EQUAL(iib[k], ib[ind + n]) \
+ } else if(type == TWO_CMPD_NULL) { \
+ set_t *ccb = (set_t *)_ldbuf; \
+ set_t *cb = (set_t *)_buf; \
+ \
+ VERIFY_ELMTS_ALL(ccb[k], cb[ind + n]) \
+ } else if(type == TWO_CMPD_VALID1) { \
+ test_valid_fields1 *vb1 = (test_valid_fields1 *)_ldbuf; \
+ set_t *cb = (set_t *)_buf; \
+ \
+ VERIFY_ELMTS_VALID1(vb1[k], cb[ind + n]) \
+ } else if(type == TWO_CMPD_VALID2) { \
+ test_valid_fields2 *vb2 = (test_valid_fields2 *)_ldbuf; \
+ set_t *cb = (set_t *)_buf; \
+ \
+ VERIFY_ELMTS_VALID2(vb2[k], cb[ind + n]) \
+ } \
+}
+
+/* Tests for test_LD_elmts_pipe() */
+#define ONE_TESTS 3
+int onetests[ONE_TESTS] = {3, 9, 1};
+#define TWO_TESTS 5
+int twotests[TWO_TESTS][2] = { {2,2}, {2,-1}, {2,0}, {-1,2}, {0,2} };
+
+
+static herr_t test_LD_dims_params(const char *file);
+static herr_t test_LD_dims(const char *file);
+
+static herr_t test_LD_size(const char *file);
+
+static herr_t test_LD_elmts_invalid(const char *file);
+static herr_t test_LD_elmts_one(const char *file, const char *dname, const char *fields);
+static herr_t test_LD_elmts_two(const char *file, const char *dname, const char *fields);
+
+static herr_t verify_elmts_two(int type, hsize_t *ext_dims, hsize_t *prev_dims, void *_ldbuf, void *_buf);
+
+/* data structures for compound data type */
+typedef struct sub22_t {
+ int a;
+ int b;
+ int c;
+} sub22_t;
+
+typedef struct sub2_t {
+ int a;
+ sub22_t b;
+ int c;
+} sub2_t;
+
+typedef struct sub4_t {
+ int a;
+ int b;
+} sub4_t;
+
+typedef struct set_t {
+ int field1;
+ sub2_t field2;
+ double field3;
+ sub4_t field4;
+} set_t;
+
+/* NOTE:
+ * This will fail on heiwa and amani when VALID_FIELDS1 is "field1,field3,field4"
+ * because of alignment problems:
+ * amani and heiwa - 8 byte alignment
+ * jam - 4 byte alignemnt
+ * This will need to be fixed in the library for H5Tget_native_type().
+ */
+/* VALID_FIELDS1 "field1,field2.a,field3,field4" */
+/* VALID_ESC_FIELDS1 "field\\,1,field2\\..\\.a,field\\\\3,field4\\," */
+typedef struct test_valid_fields1 {
+ int field1;
+ int field2_a;
+ double field3;
+ sub4_t field4;
+} test_valid_fields1;
+
+/* VALID_FIELDS2 "field2.b.a,field2.c,field4.b" */
+/* VALID_ESC_FIELDS2 "field2\\..\\,b.a,field2\\..\\\\c,field4\\,.b\\," */
+typedef struct test_valid_fields2 {
+ int field2_b_a;
+ int field2_c;
+ int field4_b;
+} test_valid_fields2;
+
+
+/* Temporary buffers for tests: test_LD_elmts_one() & test_LD_elmts_two() */
+#define TEST_BUF_SIZE 100
+int *iibuf; /* buffer for storing retrieved elements */
+int *ibuf; /* buffer for storing retrieved elements (integer) */
+set_t *cbuf; /* buffer for storing retrieved elemnets (compound) */
+set_t *ccbuf; /* buffer for storing retrieved elemnets (compound) */
+test_valid_fields1 *vbuf1; /* buffer for storing retrieved elements (FIELDS1) */
+test_valid_fields2 *vbuf2; /* buffer for storing retrieved elements (FIELDS2) */
+
+
+/*
+ *********************************************************************************
+ *
+ * Testing for the High Level public routine: H5LDget_dset_dims()
+ * 1) An invalid dataset id
+ * 2) "DSET_ALLOC_EARLY": NULL cur_dims
+ * 3) "DSET_ALLOC_LATE": nonNULL cur_dims
+ * 4) "DSET_CMPD_TWO": nonNULL cur_dims
+ * 5) "DSET_NULL": nonNULL cur_dims
+ * 6) "DSET_SCALAR": nonNULL cur_dims
+ *
+ *********************************************************************************
+ */
+static herr_t
+test_LD_dims_params(const char *file)
+{
+ hid_t fid=-1; /* file identifier */
+ hid_t did=-1; /* dataset identifier */
+ hsize_t one_cur_dims[1]; /* current dimension sizes for 1-dimensonal dataset */
+ hsize_t two_cur_dims[2]; /* current dimension sizes for 2-dimensional dataset */
+ hid_t invalid_id = -1;
+ herr_t ret; /* return value */
+
+ const char *filename = H5_get_srcdir_filename(file);
+
+ TESTING("H5LDget_dset_dims");
+
+ /* Open the copied file */
+ if((fid = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /*
+ * 1. Verify failure with negative dataset id
+ */
+ H5E_BEGIN_TRY {
+ ret = H5LDget_dset_dims(invalid_id, one_cur_dims);
+ } H5E_END_TRY;
+ VERIFY_EQUAL(ret, FAIL)
+
+ /*
+ * 2. Verify failure for NULL cur_dims
+ */
+ if((did = H5Dopen2(fid, DSET_ALLOC_EARLY, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+ H5E_BEGIN_TRY {
+ ret = H5LDget_dset_dims(did, NULL);
+ } H5E_END_TRY;
+ VERIFY_EQUAL(ret, FAIL)
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR
+
+ /*
+ * 3. Verify for nonNULL cur_dims
+ */
+ if((did = H5Dopen2(fid, DSET_ALLOC_LATE, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+ if(H5LDget_dset_dims(did, one_cur_dims) < 0)
+ FAIL_STACK_ERROR
+ VERIFY_EQUAL(one_cur_dims[0], 10)
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR
+
+ /*
+ * 4. Verify nonNULL cur_dims for a 2-dimensional dataset
+ */
+ if((did = H5Dopen2(fid, DSET_CMPD_TWO, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+ if(H5LDget_dset_dims(did, two_cur_dims) < 0)
+ FAIL_STACK_ERROR
+ VERIFY_EQUAL(two_cur_dims[0], TWO_DIM_1)
+ VERIFY_EQUAL(two_cur_dims[1], TWO_DIM_2)
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR
+
+ /*
+ * 5. Verify nonNULL cur_dims for dataset with H5S_NULL dataspace
+ */
+ one_cur_dims[0] = 0;
+
+ if((did = H5Dopen2(fid, DSET_NULL, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ if(H5LDget_dset_dims(did, one_cur_dims) < 0)
+ FAIL_STACK_ERROR
+ VERIFY_EQUAL(one_cur_dims[0], 0)
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR
+
+ /*
+ * 6. Verify nonNULL cur_dims for dataset with H5S_SCALAR dataspace
+ */
+ one_cur_dims[0] = 0;
+
+ if((did = H5Dopen2(fid, DSET_SCALAR, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ if(H5LDget_dset_dims(did, one_cur_dims) < 0)
+ FAIL_STACK_ERROR
+ VERIFY_EQUAL(one_cur_dims[0], 0)
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR
+
+ /* Close the file */
+ if(H5Fclose(fid) < 0)
+ FAIL_STACK_ERROR
+
+ PASSED();
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Dclose(did);
+ H5Fclose(fid);
+ } H5E_END_TRY;
+
+ return(-1);
+} /* test_LD_dims_params() */
+
+
+/*
+ *********************************************************************************
+ *
+ * Testing for the High Level public routine: H5LDget_dset_dims()
+ * Verify that the dimension sizes retrieved via H5LDget_dset_dims() are correct
+ * for the following cases:
+ *
+ * DSET_ONE: one-dimensional dataset
+ * 1. Increase dims[0]
+ * 2. Decrease dims[0]
+ * 3. same dims[0]
+ * 4. Decrease dims[0]
+ * 5. Increase dims[0]
+ *
+ * one_tests[ONE_NTESTS] = {3, -1, 0, -9, 1}
+ * Varies from 10->3; 10->9, 10->10, 10->1, 10->11
+ *
+ * DSET_TWO: two-dimensional dataset
+ * 1. Increase dims[0], increase dims[1]
+ * 2. Increase dims[0], decrease dims[1]
+ * 3. Increase dims[0], same dims[1]
+ * 4. Decrease dims[0], increase dims[1]
+ * 5. Decrease dims[0], decrease dims[1]
+ * 6. Decrease dims[0], same dims[1]
+ * 7. same dims[0], increase dims[1]
+ * 8. same dims[0], decrease dims[1]
+ * 9. same dims[0], same dims[1]
+ *
+ * two_tests[TWO_NTESTS][2] = { {2,2}, {2,-1}, {2,0},
+ * {-1,2}, {-1,-1}, {-1,0},
+ * {0,2}, {0,-1}, {0,0} }
+ * Varies from {4,10}->{6,12}; {4,10}->{6,9}; {4,10}->{6,10};
+ * {4,10}->{3,12}; {4,10}->{3,9}; {4,10}->{3,10};
+ * {4,10}->{4,12}; {4,10}->{4,9}; {4,10}->{4,10}
+ *
+ *********************************************************************************
+ */
+static herr_t
+test_LD_dims(const char *file)
+{
+ hid_t fid=-1; /* file identifier */
+ hid_t did=-1; /* dataset identifier */
+ hsize_t one_prev_dims[1]; /* original dimension sizes for 1-dimensonal dataset */
+ hsize_t one_cur_dims[1]; /* current dimension sizes for 1-dimensonal dataset */
+ hsize_t one_ext_dims[1]; /* extended dimension sizes for 1-dimensonal dataset */
+ hsize_t two_prev_dims[2]; /* original dimension sizes for 2-dimensional dataset */
+ hsize_t two_cur_dims[2]; /* current dimension sizes for 2-dimensional dataset */
+ hsize_t two_ext_dims[2]; /* extended dimension sizes for 2-dimensional dataset*/
+ int i; /* local index variable */
+
+ TESTING("H5LDget_dset_dims with H5Dset_extent");
+
+ /* Make a copy of the test file */
+ if(h5_make_local_copy(file, COPY_FILENAME) < 0)
+ TEST_ERROR
+
+ /* Open the copied file */
+ if((fid = H5Fopen(COPY_FILENAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /*
+ * Testing with one-dimensional dataset: DSET_ONE
+ */
+ if((did = H5Dopen2(fid, DSET_ONE, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Retrieve dimension sizes */
+ if(H5LDget_dset_dims(did, one_prev_dims) < 0)
+ FAIL_STACK_ERROR
+
+ for(i = 0; i < ONE_NTESTS; i++) {
+
+ /* Set up the extended dimension sizes */
+ one_ext_dims[0] = (hsize_t)((int)one_prev_dims[0] + one_tests[i]);
+
+ /* Change the dimension size */
+ if(H5Dset_extent(did, one_ext_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Retrieve the dimension size */
+ if(H5LDget_dset_dims(did, one_cur_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Verify that the retrieved dimension size is correct as expected */
+ VERIFY_EQUAL(one_cur_dims[0], one_ext_dims[0])
+ }
+
+ /* Close the dataset */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR
+
+ /*
+ * Testing with two-dimensional dataset: DSET_TWO
+ */
+ if((did = H5Dopen2(fid, DSET_TWO, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Retrieve the dimension sizes */
+ if(H5LDget_dset_dims(did, two_prev_dims) < 0)
+ FAIL_STACK_ERROR
+
+ for(i = 0; i < TWO_NTESTS; i++) {
+
+ /* Set up the extended dimension sizes */
+ two_ext_dims[0] = (hsize_t)((int)two_prev_dims[0] + two_tests[i][0]);
+ two_ext_dims[1] = (hsize_t) ((int)two_prev_dims[1] + two_tests[i][1]);
+
+ /* Change the dimension sizes */
+ if(H5Dset_extent(did, two_ext_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Retrieve the dimension sizes */
+ if(H5LDget_dset_dims(did, two_cur_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Verify that the retrieved dimension sizes are correct as expected */
+ VERIFY_EQUAL(two_cur_dims[0], two_ext_dims[0])
+ VERIFY_EQUAL(two_cur_dims[1], two_ext_dims[1])
+ } /* end TWO_NTESTS */
+
+ /* Close the dataset */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR
+
+ /* Close the file */
+ if(H5Fclose(fid) < 0)
+ FAIL_STACK_ERROR
+
+ /* Remove the copied file */
+ HDremove(COPY_FILENAME);
+
+ PASSED();
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Dclose(did);
+ H5Fclose(fid);
+ } H5E_END_TRY;
+ return(-1);
+
+} /* test_LD_dims() */
+
+
+/*
+ **********************************************************************************
+ *
+ * Testing for the High Level public routine: H5LDget_dset_type_size()
+ * Verify that the data type size returned via H5LDget_dset_type_size()
+ * are correct for the following cases:
+ *
+ * Verify failure for an invalid dataset id
+ *
+ * DSET_CMPD: one-dimensional dataset with compound type
+ * 1. The whole element
+ * 2. VALID_FIELDS1: "field1,field2.a,field3,field4"
+ * 3. VALID_FIELDS2: "field2.b.a,field2.c,field4.b"
+ * 4. INVALID_FIELDS1: "field2.k.a,field2.c,field4.k"
+ * 5. INVALID_FIELDS2: "field2.b.a,field2.c,field4.b."
+ * 6. INVALID_FIELDS3: "field2.b.a,,field2.c,field4.b"
+ *
+ * DSET_CMPD_ESC: one-dimensional dataset with compound type and
+ * member names with escape/separator characters
+ * 1. The whole element
+ * 2. VALID_ESC_FIELDS1: "field\\,1,field2\\..\\.a,field\\\\3,field4\\,"
+ * 3. VALID_ESC_FIELDS2: "field2\\..\\,b.a,field2\\..\\\\c,field4\\,.b\\,"
+ * 4. INVALID_ESC_FIELDS1: "field2\\..\\,k.a,field2\\..\\\\c,field4\\,.k\\,"
+ * 5. INVALID_ESC_FIELDS2: "field2\\..\\,b.a,field2\\..\\\\c,field4\\,.b\\,."
+ * 6. INVALID_ESC_FIELDS3: "field2\\..\\,,b.a,field2\\..\\\\c,field4\\,.b\\,"
+ *
+ **********************************************************************************
+ */
+static int
+test_LD_size(const char *file)
+{
+ hid_t fid=-1; /* file identifier */
+ hid_t did=-1; /* dataset identifier */
+ hid_t dtid=-1; /* dataset's datatype identifier */
+ hid_t invalid_id=-1;
+ hid_t memb0_tid=-1; /* type identifier for a member in the compound type */
+ hid_t memb1_tid=-1; /* type identifier for a member in the compound type */
+ hid_t memb2_tid=-1; /* type identifier for a member in the compound type */
+ hid_t memb3_tid=-1; /* type identifier for a member in the compound type */
+ hid_t memb_tid=-1; /* type identifier for a member in the compound type */
+ hid_t memb_tid2=-1; /* type identifier for a member in the compound type */
+ size_t dsize; /* size of the dataset's datatype */
+ size_t ck_dsize; /* size of the dataset's datatype to be checked against */
+
+ const char *filename = H5_get_srcdir_filename(file);
+
+ TESTING("H5LDget_dset_type_size");
+
+ /* Open the file */
+ if((fid = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /*
+ * Verify failure with an invalid dataset id
+ */
+ H5E_BEGIN_TRY {
+ dsize = H5LDget_dset_type_size(invalid_id, NULL);
+ } H5E_END_TRY;
+ VERIFY_EQUAL(dsize, 0)
+
+ /*
+ * Testing one-dimensional dataset with compound datatype:
+ * DSET_CMPD
+ */
+
+ /* Open dataset DSET_CMPD */
+ if((did = H5Dopen2(fid, DSET_CMPD, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Return size of the whole element */
+ if((dsize = H5LDget_dset_type_size(did, NULL)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Get the dataset's datatype and then its datatype size */
+ if((dtid = H5Tget_native_type(H5Dget_type(did), H5T_DIR_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ if((ck_dsize = H5Tget_size(dtid)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Verify case #1 */
+ VERIFY_EQUAL(dsize, ck_dsize)
+
+ /* Get datatype id for each member */
+ if((memb0_tid = H5Tget_member_type(dtid, 0)) < 0) /* "field1" */
+ FAIL_STACK_ERROR
+ if((memb1_tid = H5Tget_member_type(dtid, 1)) < 0) /* "field2" */
+ FAIL_STACK_ERROR
+ if((memb2_tid = H5Tget_member_type(dtid, 2)) < 0) /* "field3" */
+ FAIL_STACK_ERROR
+ if((memb3_tid = H5Tget_member_type(dtid, 3)) < 0) /* "field4" */
+ FAIL_STACK_ERROR
+
+ /* Obtain size for VALID_FIELDS1: "field1,field2.a,field3,field4" */
+ if((dsize = H5LDget_dset_type_size(did, VALID_FIELDS1)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Get the datatype size for "field1" */
+ if((ck_dsize = H5Tget_size(memb0_tid)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Add the datatype size for "field2.a" */
+ if((memb_tid = H5Tget_member_type(memb1_tid, 0)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize += H5Tget_size(memb_tid)) == 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(memb_tid) < 0)
+ FAIL_STACK_ERROR
+
+ /* Add the datatype size for "field3" */
+ if((ck_dsize += H5Tget_size(memb2_tid)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Add the datatype size for "field4" */
+ if((ck_dsize += H5Tget_size(memb3_tid)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Verify case #2 */
+ VERIFY_EQUAL(dsize, ck_dsize)
+
+ /* Obtain datatype size for VALID_FIELDS2: "field2.b.a,field2.c,field4.b" */
+ if((dsize = H5LDget_dset_type_size(did, VALID_FIELDS2)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Get the datatype size for "field2.b.a" */
+ if((memb_tid = H5Tget_member_type(memb1_tid, 1)) < 0)
+ FAIL_STACK_ERROR
+ if((memb_tid2 = H5Tget_member_type(memb_tid, 0)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize = H5Tget_size(memb_tid2)) == 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(memb_tid) < 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(memb_tid2) < 0)
+ FAIL_STACK_ERROR
+
+ /* Add the datatype size for "field2.c" */
+ if((memb_tid = H5Tget_member_type(memb1_tid, 2)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize += H5Tget_size(memb_tid)) == 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(memb_tid) < 0)
+ FAIL_STACK_ERROR
+
+ /* Add the datatype size for "field4.b" */
+ if((memb_tid = H5Tget_member_type(memb3_tid, 1)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize += H5Tget_size(memb_tid)) == 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(memb_tid) < 0)
+ FAIL_STACK_ERROR
+
+ /* Verify case #3 */
+ VERIFY_EQUAL(dsize, ck_dsize)
+
+ /*
+ * Verify failure for the following invalid nested fields:
+ * INVALID_FIELDS1: "field2.k.a,field2.c,field4.k"
+ * INVALID_FIELDS2: "field2.b.a,field2.c,field4.b."
+ * INVALID_FIELDS3: "field2.b.a,,field2.c,field4.b"
+ */
+ /* Verify failure for case #4 */
+ dsize = H5LDget_dset_type_size(did, INVALID_FIELDS1);
+ VERIFY_EQUAL(dsize, 0)
+
+ /* Verify failure for case #5 */
+ dsize = H5LDget_dset_type_size(did, INVALID_FIELDS2);
+ VERIFY_EQUAL(dsize, 0)
+
+ /* Verify failure for case #6 */
+ dsize = H5LDget_dset_type_size(did, INVALID_FIELDS3);
+ VERIFY_EQUAL(dsize, 0)
+
+ /* Closing */
+ if(H5Tclose(memb0_tid) < 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(memb1_tid) < 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(memb2_tid) < 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(memb3_tid) < 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(dtid) < 0)
+ FAIL_STACK_ERROR
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR
+
+ /*
+ * Testing one-dimensional dataset with compound datatype and
+ * member names consisting of escape/separator characters:
+ * DSET_CMPD_ESC
+ */
+
+ /* Open dataset DSET_CMPD_ESC */
+ if((did = H5Dopen2(fid, DSET_CMPD_ESC, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Return size of the whole element */
+ if((dsize = H5LDget_dset_type_size(did, NULL)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Get the dataset's datatype and then its datatype size */
+ if((dtid = H5Tget_native_type(H5Dget_type(did), H5T_DIR_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize = H5Tget_size(dtid)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Verify case #1 */
+ VERIFY_EQUAL(dsize, ck_dsize)
+
+ /* Get datatype id for each member */
+ if((memb0_tid = H5Tget_member_type(dtid, 0)) < 0) /* "field,1" */
+ FAIL_STACK_ERROR
+ if((memb1_tid = H5Tget_member_type(dtid, 1)) < 0) /* "field2." */
+ FAIL_STACK_ERROR
+ if((memb2_tid = H5Tget_member_type(dtid, 2)) < 0) /* "field\3" */
+ FAIL_STACK_ERROR
+ if((memb3_tid = H5Tget_member_type(dtid, 3)) < 0) /* "field4," */
+ FAIL_STACK_ERROR
+
+ /* Obtain size for VALID_ESC_FIELDS1: "field\\,1,field2\\..\\.a,field\\\\3,field4\\," */
+ if((dsize = H5LDget_dset_type_size(did, VALID_ESC_FIELDS1)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Get the datatype size for "field\\,1" */
+ if((ck_dsize = H5Tget_size(memb0_tid)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Add the datatype size for "field2\\..\\.a" */
+ if((memb_tid = H5Tget_member_type(memb1_tid, 0)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize += H5Tget_size(memb_tid)) == 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(memb_tid) < 0)
+ FAIL_STACK_ERROR
+
+ /* Add the datatype size for "field\\\\3" */
+ if((ck_dsize += H5Tget_size(memb2_tid)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Add the datatype size for "field4\\," */
+ if((ck_dsize += H5Tget_size(memb3_tid)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Verify case #2 */
+ VERIFY_EQUAL(dsize, ck_dsize)
+
+ /* Obtain datatype size for VALID_ESC_FIELDS2:
+ "field2\\..\\,b.a,field2\\..\\\\c,field4\\,.b\\," */
+ if((dsize = H5LDget_dset_type_size(did, VALID_ESC_FIELDS2)) == 0)
+ FAIL_STACK_ERROR
+
+ /* Get the datatype size for "field2\..,b.a" */
+ if((memb_tid = H5Tget_member_type(memb1_tid, 1)) < 0)
+ FAIL_STACK_ERROR
+ if((memb_tid2 = H5Tget_member_type(memb_tid, 0)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize = H5Tget_size(memb_tid2)) == 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(memb_tid) < 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(memb_tid2) < 0)
+ FAIL_STACK_ERROR
+
+ /* Add the datatype size for "field2\..\\c" */
+ if((memb_tid = H5Tget_member_type(memb1_tid, 2)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize += H5Tget_size(memb_tid)) == 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(memb_tid) < 0)
+ FAIL_STACK_ERROR
+
+ /* Add the datatype size for "field4\,.b\," */
+ if((memb_tid = H5Tget_member_type(memb3_tid, 1)) < 0)
+ FAIL_STACK_ERROR
+ if((ck_dsize += H5Tget_size(memb_tid)) == 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(memb_tid) < 0)
+ FAIL_STACK_ERROR
+
+ /* Verify case #3 */
+ VERIFY_EQUAL(dsize, ck_dsize)
+
+ /*
+ * Verify failure for the following invalid nested fields:
+ * INVALID_ESC_FIELDS1: "field2\..\,k.a,field2\..\\c,field4\,.k\,"
+ * INVALID_ESC_FIELDS2: "field2\..\,b.a,field2\..\\c,field4\,.b\,."
+ * INVALID_ESC_FIELDS3: "field2\..\,,b.a,field2\..\\c,field4\,.b\,"
+ */
+ /* Verify failure for case #4 */
+ dsize = H5LDget_dset_type_size(did, INVALID_ESC_FIELDS1);
+ VERIFY_EQUAL(dsize, 0)
+
+ /* Verify failure for case #5 */
+ dsize = H5LDget_dset_type_size(did, INVALID_ESC_FIELDS2);
+ VERIFY_EQUAL(dsize, 0)
+
+ /* Verify failure for case #6 */
+ dsize = H5LDget_dset_type_size(did, INVALID_ESC_FIELDS3);
+ VERIFY_EQUAL(dsize, 0)
+
+ /* Closing */
+ if(H5Tclose(memb0_tid) < 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(memb1_tid) < 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(memb2_tid) < 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(memb3_tid) < 0)
+ FAIL_STACK_ERROR
+ if(H5Tclose(dtid) < 0)
+ FAIL_STACK_ERROR
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR
+
+ if(H5Fclose(fid) < 0)
+ FAIL_STACK_ERROR
+
+ PASSED();
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Tclose(memb0_tid);
+ H5Tclose(memb1_tid);
+ H5Tclose(memb2_tid);
+ H5Tclose(memb3_tid);
+ H5Tclose(memb_tid);
+ H5Tclose(memb_tid2);
+ H5Tclose(dtid);
+ H5Dclose(did);
+ H5Fclose(fid);
+ } H5E_END_TRY;
+ return(-1);
+
+} /* test_LD_size() */
+
+
+/*
+ **************************************************************************************
+ * Testing for the High Level public routine: H5LDget_dset_elmts()
+ * Verify failures when calling H5LDget_dset_elmts() with the following
+ * invalid conditions:
+ *
+ * A. DSET_TWO: two-dimensional dataset
+ * 1. CUR_DIMS and PREV_DIMS are NULL
+ * 2. PREV_DIMS is NULL
+ * 3. CUR_DIMS is NULL
+ * 4. FIELDS is nonnull but the dataset is not compound datatype
+ * 5. BUF is NULL
+ * 6. CUR_DIMS is not greater than PREV_DIMS
+ *
+ * B. DSET_CMPD: one-dimensional dataset with compound type
+ * 1. Invalid dataset id
+ * 2. FIELDS are not valid members in the compound type
+ *
+ **************************************************************************************
+ */
+static int
+test_LD_elmts_invalid(const char *file)
+{
+ hid_t fid=-1; /* file identifier */
+ hid_t did=-1; /* dataset identifier */
+ hid_t sid=-1; /* dataspace identifier */
+ hid_t invalid_id=-1;
+ int ret; /* return value */
+ hsize_t cur_dims[2]; /* current dimension sizes of the dataset */
+ hsize_t prev_dims[2]; /* previous dimension sizes of the dataset */
+ char tbuf[2]; /* temporary buffer for testing */
+ int ndims; /* # of dimension sizes */
+ int i; /* local index variable */
+
+ const char *filename = H5_get_srcdir_filename(file);
+
+ TESTING("H5LDget_dset_elmts on invalid conditions");
+
+ /* Open the copied file */
+ if((fid = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /*
+ * Testing two-dimensional dataset: DSET_TWO
+ */
+
+ /* Open dataset: DSET_TWO */
+ if((did = H5Dopen2(fid, DSET_TWO, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Verify failure from case #1: cur_dims and prev_dims are NULL */
+ ret = H5LDget_dset_elmts(did, NULL, NULL, NULL, NULL);
+ VERIFY_EQUAL(ret, FAIL)
+
+ /* Verify failure from case #2: prev_dims is NULL */
+ ret = H5LDget_dset_elmts(did, cur_dims, NULL, NULL, NULL);
+ VERIFY_EQUAL(ret, FAIL)
+
+ /* Verify failure from case #3: cur_dims is NULL */
+ ret = H5LDget_dset_elmts(did, NULL, prev_dims, NULL, NULL);
+ VERIFY_EQUAL(ret, FAIL)
+
+ if((sid = H5Dget_space(did)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Get the # of dimensions and current dimension sizes */
+ if((ndims = H5Sget_simple_extent_dims(sid, cur_dims, NULL)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Set up valid cur_dims and prev_dims */
+ for(i = 0; i < ndims; i++)
+ prev_dims[i] = cur_dims[i] - 1;
+
+ /* Verify failure from case #4: FIELDS is nonNULL but the dataset is not compound datatype */
+ ret = H5LDget_dset_elmts(did, prev_dims, cur_dims, "field1", tbuf);
+ VERIFY_EQUAL(ret, FAIL)
+
+ /* Verify failure from case #5: BUF is NULL */
+ ret = H5LDget_dset_elmts(did, prev_dims, cur_dims, NULL, NULL);
+ VERIFY_EQUAL(ret, FAIL)
+
+ /* Verify failure from case #6: cur_dims is not > than prev_dims */
+ cur_dims[0] = prev_dims[0] - 1;
+ cur_dims[1] = prev_dims[1] - 1;
+ ret = H5LDget_dset_elmts(did, prev_dims, cur_dims, NULL, tbuf);
+ VERIFY_EQUAL(ret, FAIL)
+
+ /* Close DSET_TWO */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR
+
+ /*
+ * Testing one-dimensional dataset with compound datatype:
+ * DSET_CMPD
+ */
+
+ /* Verify failure from case #1: an invalid dataset id */
+ H5E_BEGIN_TRY {
+ ret = H5LDget_dset_elmts(invalid_id, prev_dims, cur_dims, NULL, tbuf);
+ } H5E_END_TRY;
+ VERIFY_EQUAL(ret, FAIL)
+
+ /* Open dataset: DSET_CMPD */
+ if((did = H5Dopen2(fid, DSET_CMPD, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Retrieve the current dimension sizes */
+ if(H5LDget_dset_dims(did, cur_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Set up valid cur_dims, prev_dims */
+ prev_dims[0] = cur_dims[0] - 1;
+
+ /* Verify failure from case #2: invalid FIELDS */
+ ret = H5LDget_dset_elmts(did, prev_dims, cur_dims, "field2.k.a,field2.c,field4.k", tbuf);
+ VERIFY_EQUAL(ret, FAIL)
+
+ /* Close DSET_CMPD */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR
+
+ /* Close the file */
+ if(H5Fclose(fid) < 0)
+ FAIL_STACK_ERROR
+
+ PASSED();
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Sclose(sid);
+ H5Dclose(did);
+ H5Fclose(fid);
+ } H5E_END_TRY;
+ return(-1);
+
+} /* test_LD_elmts_invalid() */
+
+
+/*
+ **************************************************************************************
+ * Testing for the High Level public routine: H5LDget_dset_elmts()
+ * Verify elements retrieved via H5LDget_dset_elmts() are correct as expected
+ * when the dataset's dimension sizes are changed according to one_tests[]:
+ *
+ * one-dimensional dataset :
+ * DSET_ONE with NULL fields
+ * DSET_CMPD with fields: NULL, VALID_FIELDS1, VALID_FIELDS2
+ * DSET_CMPD_ESC with fields: NULL, VALID_ESC_FIELDS1, VALID_ESC_FIELDS2
+ *
+ * case #1. increase dims[0]
+ * case #2. decrease dims[0] (failure)
+ * case #3. same dims[0] (failure)
+ * case #4. decrease dims[0] (failure)
+ * case #5. increase dims[0]
+ *
+ **************************************************************************************
+ */
+static herr_t
+test_LD_elmts_one(const char *file, const char *dname, const char *fields)
+{
+ hid_t fid=-1; /* file identifier */
+ hid_t did=-1; /* dataset identifier */
+ hid_t dtype=-1; /* dataset's data type */
+ hsize_t ext_dims[1]; /* extended dimension sizes of the dataset */
+ hsize_t prev_dims[1]; /* previous dimension sizes of the dataset */
+ int i, j; /* local index variable */
+ int ret = 0; /* return value */
+
+ TESTING("H5LDget_dset_elmts: one-dimensional dataset");
+
+ /* Copy the test file */
+ if(h5_make_local_copy(file, COPY_FILENAME) < 0)
+ TEST_ERROR
+
+ for(i = 0; i < TEST_BUF_SIZE; i++) {
+ cbuf[i].field1 = i;
+ cbuf[i].field2.a = i;
+ cbuf[i].field2.b.a = i;
+ cbuf[i].field2.b.b = i;
+ cbuf[i].field2.b.c = i;
+ cbuf[i].field2.c = i;
+ cbuf[i].field3 = (double)i;
+ cbuf[i].field4.a = i;
+ cbuf[i].field4.b = i;
+ ibuf[i] = i;
+ } /* end for */
+
+ /* Open the copied file */
+ if((fid = H5Fopen(COPY_FILENAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Open the dataset */
+ if((did = H5Dopen2(fid, dname, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Get the dataset's data type */
+ if((dtype = H5Tget_native_type(H5Dget_type(did), H5T_DIR_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Get current dimension sizes before extending the dataset's dimension sizes */
+ if(H5LDget_dset_dims(did, prev_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Loop through different variations of extending the dataset */
+ for(i = 0; i < ONE_NTESTS; i++) {
+ HDmemset(vbuf1, 0, TEST_BUF_SIZE * sizeof(test_valid_fields1));
+ HDmemset(vbuf2, 0, TEST_BUF_SIZE * sizeof(test_valid_fields2));
+ HDmemset(ccbuf, 0, TEST_BUF_SIZE * sizeof(set_t));
+ HDmemset(iibuf, 0, TEST_BUF_SIZE * sizeof(int));
+
+ ext_dims[0] = (hsize_t)((int)prev_dims[0] + one_tests[i]);
+
+ /* Change the dimension sizes of the dataset */
+ if(H5Dset_extent(did, ext_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Initialize data */
+ if(!HDstrcmp(dname, DSET_CMPD) || !HDstrcmp(dname, DSET_CMPD_ESC)) {
+ if(H5Dwrite(did, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, cbuf) < 0)
+ FAIL_STACK_ERROR
+ } /* end if */
+ else if(!HDstrcmp(dname, DSET_ONE)) {
+ if(H5Dwrite(did, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, ibuf) < 0)
+ FAIL_STACK_ERROR
+ } /* end if */
+
+ /* There are changes in dimension sizes */
+ if(one_tests[i] > 0) {
+ if(!HDstrcmp(dname, DSET_CMPD) || !HDstrcmp(dname, DSET_CMPD_ESC)) {
+ if(fields) {
+ if(!HDstrcmp(fields, VALID_FIELDS1) || !HDstrcmp(fields, VALID_ESC_FIELDS1)) {
+ /* Retrieve the elmemts in BUF */
+ if(H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, vbuf1) < 0)
+ TEST_ERROR
+ for(j = 0; j < one_tests[i]; j++)
+ VERIFY_ELMTS_VALID1(vbuf1[j], cbuf[prev_dims[0] + (hsize_t)j])
+ } /* end if */
+ else if(!HDstrcmp(fields, VALID_FIELDS2) || !HDstrcmp(fields, VALID_ESC_FIELDS2)) {
+ /* Retrieve the elmemts in BUF */
+ if(H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, vbuf2) < 0)
+ TEST_ERROR
+ for(j = 0; j < one_tests[i]; j++)
+ VERIFY_ELMTS_VALID2(vbuf2[j], cbuf[prev_dims[0] + (hsize_t)j])
+ } /* end else-if */
+ else
+ TEST_ERROR
+ } /* end if */
+ else {
+ /* Retrieve the elmemts in BUF */
+ if(H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, ccbuf) < 0)
+ TEST_ERROR
+ for(j = 0; j < one_tests[i]; j++)
+ VERIFY_ELMTS_ALL(ccbuf[j], cbuf[prev_dims[0] + (hsize_t)j])
+ } /* end else-if */
+ } /* end if */
+ else {
+ /* Retrieve the elmemts in BUF */
+ if(H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, iibuf) < 0)
+ TEST_ERROR
+ for(j = 0; j < one_tests[i]; j++)
+ VERIFY_EQUAL(iibuf[j], ibuf[prev_dims[0] + (hsize_t)j])
+ } /* end else */
+ } /* end if */
+ else {
+ /* Verify failure when changes between prev_dims and ext_dims are same/decrease */
+ ret = H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, iibuf);
+ VERIFY_EQUAL(ret, FAIL)
+ } /* end else */
+ } /* end for */
+
+ /* Closing */
+ if(H5Tclose(dtype) < 0)
+ FAIL_STACK_ERROR
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR
+ if(H5Fclose(fid) < 0)
+ FAIL_STACK_ERROR
+
+ /* Remove the copied file */
+ HDremove(COPY_FILENAME);
+
+ PASSED();
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Tclose(dtype);
+ H5Dclose(did);
+ H5Fclose(fid);
+ } H5E_END_TRY;
+ return(-1);
+} /* test_LD_elmts_one() */
+
+
+/*
+ **************************************************************************************
+ *
+ * Helper routine to verify elements of a 2-dimensional dataset
+ * _ldbuf contains the elements retrieved via H5LDget_dset_elmts()
+ * _buf contains the data written to the dataset
+ *
+ * e.g. prev_dims[2] = {4, 6}; ext_dims[2] = {6, 10}
+ * elements marked in 'v' in _buf are compared to elements in _ldbuf
+ * 0 1 2 3 4 5 | 6 7 8 9
+ * 0 | v v v v
+ * 1 | v v v v
+ * 2 | v v v v
+ * 3 | v v v v
+ * ---------------------
+ * 4 v v v v v v v v v v
+ * 5 v v v v v v v v v v
+ *
+ **************************************************************************************
+ */
+static herr_t
+verify_elmts_two(int type, hsize_t *ext_dims, hsize_t *prev_dims, void *_ldbuf, void *_buf)
+{
+ int k, m; /* Local index variable */
+
+ k = 0;
+ for(m = 0; m < (int)ext_dims[0]; m++) {
+ int n, ind; /* Local index variable */
+
+ ind = m * (int)ext_dims[1];
+ if(m < (int)prev_dims[0]) {
+ for(n = (int)prev_dims[1]; n < (int)ext_dims[1]; n++) {
+ VERIFY_ELMTS(type, k, ind, _ldbuf, _buf)
+ ++k;
+ } /* end for */
+ } /* end if */
+ else {
+ for(n = 0; n < (int)ext_dims[1]; n++) {
+ VERIFY_ELMTS(type, k, ind, _ldbuf, _buf)
+ ++k;
+ } /* end for */
+ } /* end else */
+ } /* end for */
+
+ return(0);
+
+error:
+ return(-1);
+} /* verify_elmts_two() */
+
+
+/*
+ **************************************************************************************
+ * Testing for the High Level public routine: H5LDget_dset_elmts()
+ * Verify elements retrieved via H5LDget_dset_elmts() are correct as expected when
+ * the datset's dimension sizes are changed accordingly to two_tests[]:
+ *
+ * two-dimensional dataset: DSET_TWO with NULL fields
+ * DSET_CMPD_TWO with fields: NULL, VALID_FIELDS1, VALID_FIELDS2
+ *
+ * dims[0] dims[1]
+ * ------- -------
+ * case #1: increase increase
+ * case #2: increase decrease
+ * case #3: increase same
+ * case #4: decrease increase
+ * case #5: decrease decrease (failure)
+ * case #6: decrease same (failure)
+ * case #7: same increase
+ * case #8: same decrease (failure)
+ * case #9: same same (failure)
+ *
+ **************************************************************************************
+ */
+static herr_t
+test_LD_elmts_two(const char *file, const char *dname, const char *fields)
+{
+ hid_t fid=-1; /* file identifier */
+ hid_t did=-1; /* dataset identifier */
+ hid_t dtype=-1; /* dataset's data type */
+ hsize_t ext_dims[2]; /* extended dimension sizes of the dataset */
+ hsize_t prev_dims[2]; /* previous dimension sizes of the dataset */
+ int i; /* local index variable */
+ int ret = 0; /* return value */
+
+ TESTING("H5LDget_dset_elmts: two-dimensional dataset");
+
+ /* Copy the test file */
+ if(h5_make_local_copy(file, COPY_FILENAME) < 0)
+ TEST_ERROR
+
+ for(i = 0; i < TEST_BUF_SIZE; i++) {
+ cbuf[i].field1 = i;
+ cbuf[i].field2.a = i;
+ cbuf[i].field2.b.a = i;
+ cbuf[i].field2.b.b = i;
+ cbuf[i].field2.b.c = i;
+ cbuf[i].field2.c = i;
+ cbuf[i].field3 = (double)i;
+ cbuf[i].field4.a = i;
+ cbuf[i].field4.b = i;
+ ibuf[i] = i;
+ } /* end for */
+
+ /* Open the copied file */
+ if((fid = H5Fopen(COPY_FILENAME, H5F_ACC_RDWR, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Open the dataset */
+ if((did = H5Dopen2(fid, dname, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Get the dataset's data type */
+ if((dtype = H5Tget_native_type(H5Dget_type(did), H5T_DIR_DEFAULT)) < 0)
+ FAIL_STACK_ERROR
+
+ /* Get current dimension sizes before extending the dataset's dimension sizes */
+ if(H5LDget_dset_dims(did, prev_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Loop through different variations of extending the dataset */
+ for(i = 0; i < TWO_NTESTS; i++) {
+ HDmemset(vbuf1, 0, TEST_BUF_SIZE * sizeof(test_valid_fields1));
+ HDmemset(vbuf2, 0, TEST_BUF_SIZE * sizeof(test_valid_fields2));
+ HDmemset(ccbuf, 0, TEST_BUF_SIZE * sizeof(set_t));
+ HDmemset(iibuf, 0, TEST_BUF_SIZE * sizeof(int));
+
+ ext_dims[0] = (hsize_t)((int)prev_dims[0] + two_tests[i][0]);
+ ext_dims[1] = (hsize_t)((int)prev_dims[1] + two_tests[i][1]);
+
+ /* Change the dimension sizes of the dataset */
+ if(H5Dset_extent(did, ext_dims) < 0)
+ FAIL_STACK_ERROR
+
+ /* Initialize data */
+ if(!HDstrcmp(dname, DSET_CMPD_TWO)) {
+ if(H5Dwrite(did, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, cbuf) < 0)
+ FAIL_STACK_ERROR
+ } /* end if */
+ else if(!HDstrcmp(dname, DSET_TWO)) {
+ if(H5Dwrite(did, dtype, H5S_ALL, H5S_ALL, H5P_DEFAULT, ibuf) < 0)
+ FAIL_STACK_ERROR
+ } /* end else-if */
+ else
+ TEST_ERROR
+
+ /* There are changes in dimension sizes */
+ if(two_tests[i][0] > 0 || two_tests[i][1] > 0) {
+ if(!HDstrcmp(dname, DSET_CMPD_TWO)) {
+ if(fields) {
+ if(!HDstrcmp(fields, VALID_FIELDS1) || !HDstrcmp(fields, VALID_ESC_FIELDS1)) {
+ /* Retrieve the elmemts in BUF */
+ if(H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, vbuf1) < 0)
+ TEST_ERROR
+ if(verify_elmts_two(TWO_CMPD_VALID1, ext_dims, prev_dims, vbuf1, cbuf) < 0)
+ TEST_ERROR
+ } /* end if */
+ else if(!HDstrcmp(fields, VALID_FIELDS2) || !HDstrcmp(fields, VALID_ESC_FIELDS2)) {
+ /* Retrieve the elmemts in BUF */
+ if(H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, vbuf2) < 0)
+ TEST_ERROR
+ if(verify_elmts_two(TWO_CMPD_VALID2, ext_dims, prev_dims, vbuf2, cbuf) < 0)
+ TEST_ERROR
+ } /* end else-if */
+ else
+ TEST_ERROR
+ } /* end if */
+ else {
+ /* Retrieve the elmemts in BUF */
+ if(H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, ccbuf) < 0)
+ TEST_ERROR
+ if(verify_elmts_two(TWO_CMPD_NULL, ext_dims, prev_dims, ccbuf, cbuf) < 0)
+ TEST_ERROR
+ } /* end else */
+ } /* end if */
+ else { /* DSET_TWO */
+ /* Retrieve the elmemts in BUF */
+ if(H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, iibuf) < 0)
+ TEST_ERROR
+ if(verify_elmts_two(TWO_NONE, ext_dims, prev_dims, iibuf, ibuf) < 0)
+ TEST_ERROR
+ } /* end else */
+ } /* end if */
+ else {
+ /* Verify failure when changes between prev_dims and ext_dims are same/decrease */
+ ret = H5LDget_dset_elmts(did, prev_dims, ext_dims, fields, iibuf);
+ VERIFY_EQUAL(ret, FAIL)
+ } /* end else */
+ } /* end for */
+
+ /* Closing */
+ if(H5Tclose(dtype) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Fclose(fid) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Remove the copied file */
+ HDremove(COPY_FILENAME);
+
+ PASSED();
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Tclose(dtype);
+ H5Dclose(did);
+ H5Fclose(fid);
+ } H5E_END_TRY;
+ return(-1);
+} /* test_LD_elmts_two() */
+
+/*
+ * Tests for High Level routines:
+ * H5LDget_dset_dims(), H5LDget_dset_elmts, H5LDget_dset_type_size()
+ */
+int main(void)
+{
+ int nerrors = 0;
+
+ /* Set up temporary buffers for tests: test_LD_elmts_one() & test_LD_elmts_two() */
+ if(NULL == (ibuf = (int *)HDmalloc(sizeof(int) * TEST_BUF_SIZE)))
+ FAIL_STACK_ERROR;
+ if(NULL == (iibuf = (int *)HDmalloc(sizeof(int) * TEST_BUF_SIZE)))
+ FAIL_STACK_ERROR;
+
+ if(NULL == (cbuf = (set_t *)HDmalloc(sizeof(set_t) * TEST_BUF_SIZE)))
+ FAIL_STACK_ERROR;
+ if(NULL == (ccbuf = (set_t *)HDmalloc(sizeof(set_t) * TEST_BUF_SIZE)))
+ FAIL_STACK_ERROR;
+
+ if(NULL == (vbuf1 = (test_valid_fields1 *)HDmalloc(sizeof(test_valid_fields1) * TEST_BUF_SIZE)))
+ FAIL_STACK_ERROR;
+ if(NULL == (vbuf2 = (test_valid_fields2 *)HDmalloc(sizeof(test_valid_fields2) * TEST_BUF_SIZE)))
+ FAIL_STACK_ERROR;
+
+ /*
+ * Testing H5LDget_dset_dims()
+ */
+ nerrors += test_LD_dims_params(FILE);
+ nerrors += test_LD_dims(FILE);
+
+ /*
+ * Testing H5LDget_dset_type_size()
+ */
+ nerrors += test_LD_size(FILE);
+
+ /*
+ * Testing invalid conditions for H5LDget_dset_elmts()
+ */
+ nerrors += test_LD_elmts_invalid(FILE);
+
+ /*
+ * Testing H5LDget_dset_elmts():
+ * 1-dimensional dataset
+ */
+ nerrors += test_LD_elmts_one(FILE, DSET_ONE, NULL);
+
+ /*
+ * Testing H5LDget_dset_elmts():
+ * 1-dimensional dataset w/ compound datatype
+ */
+ nerrors += test_LD_elmts_one(FILE, DSET_CMPD, NULL);
+ nerrors += test_LD_elmts_one(FILE, DSET_CMPD, VALID_FIELDS1);
+ nerrors += test_LD_elmts_one(FILE, DSET_CMPD, VALID_FIELDS2);
+
+ /*
+ * Testing H5LDget_dset_elmts():
+ * 1-dimensional dataset with compound datatype and
+ * member names with escape/separator characters
+ */
+ nerrors += test_LD_elmts_one(FILE, DSET_CMPD_ESC, NULL);
+ nerrors += test_LD_elmts_one(FILE, DSET_CMPD_ESC, VALID_ESC_FIELDS1);
+ nerrors += test_LD_elmts_one(FILE, DSET_CMPD_ESC, VALID_ESC_FIELDS2);
+
+ /*
+ * Testing H5LDget_dset_elmts() for 2-dimensional datasets
+ */
+ nerrors += test_LD_elmts_two(FILE, DSET_TWO, NULL);
+ nerrors += test_LD_elmts_two(FILE, DSET_CMPD_TWO, NULL);
+ nerrors += test_LD_elmts_two(FILE, DSET_CMPD_TWO, VALID_FIELDS1);
+ nerrors += test_LD_elmts_two(FILE, DSET_CMPD_TWO, VALID_FIELDS2);
+
+ /* Free temporary buffers */
+ if(ibuf)
+ HDfree(ibuf);
+ if(iibuf)
+ HDfree(iibuf);
+ if(cbuf)
+ HDfree(cbuf);
+ if(ccbuf)
+ HDfree(ccbuf);
+ if(vbuf1)
+ HDfree(vbuf1);
+ if(vbuf2)
+ HDfree(vbuf2);
+
+ /* check for errors */
+ if(nerrors)
+ goto error;
+
+ puts("All tests for H5LD high level routines passed.");
+
+ return(0);
+
+error:
+ return(1);
+} /* main() */
+
diff --git a/hl/test/test_ld.h5 b/hl/test/test_ld.h5
new file mode 100644
index 0000000..bd5730c
--- /dev/null
+++ b/hl/test/test_ld.h5
Binary files differ