summaryrefslogtreecommitdiffstats
path: root/hl
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2016-05-01 10:24:56 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2016-05-01 10:24:56 (GMT)
commita6ce3d4e45faab4691a6181a8ce6197157aea21a (patch)
tree35fa8c0766f7408609d5d55731c52d8ca84288a9 /hl
parentac72823bc2a538be8365854a2d3c6f42cf1d5b62 (diff)
downloadhdf5-a6ce3d4e45faab4691a6181a8ce6197157aea21a.zip
hdf5-a6ce3d4e45faab4691a6181a8ce6197157aea21a.tar.gz
hdf5-a6ce3d4e45faab4691a6181a8ce6197157aea21a.tar.bz2
[svn-r29850] Description:
Bring H5DOappend(), H5P[s|g]et_object_flush_cb, and H5P[s|g]et_append_flush from revise_chunks branch to trunk. Brings along updated metadata cache entry tagging, and the internal object flush routine. Tested on: MacOSX/64 10.11.4 (amazon) w/serial, parallel & production (h5committest forthcoming)
Diffstat (limited to 'hl')
-rw-r--r--hl/src/H5DO.c192
-rw-r--r--hl/src/H5DOpublic.h3
-rw-r--r--hl/test/Makefile.am5
-rw-r--r--hl/test/test_dset_append.c1196
4 files changed, 1394 insertions, 2 deletions
diff --git a/hl/src/H5DO.c b/hl/src/H5DO.c
index 99dbd93..bfadfaa 100644
--- a/hl/src/H5DO.c
+++ b/hl/src/H5DO.c
@@ -97,3 +97,195 @@ done:
return(ret_value);
} /* end H5DOwrite_chunk() */
+
+/*
+ * Function: H5DOappend()
+ *
+ * Purpose: To append elements to a dataset.
+ * axis: the dataset dimension (zero-based) for the append
+ * extension: the # of elements to append for the axis-th dimension
+ * memtype: the datatype
+ * buf: buffer with data for the append
+ *
+ * Return: Non-negative on success/Negative on failure
+ *
+ * Programmer: Vailin Choi; Jan 2014
+ *
+ * Note:
+ * This routine is copied from the fast forward feature branch: features/hdf5_ff
+ * src/H5FF.c:H5DOappend() with the following modifications:
+ * 1) Remove and replace macro calls such as
+ * FUNC_ENTER_API, H5TRACE, HGOTO_ERROR
+ * accordingly because hl does not have these macros
+ * 2) Replace H5I_get_type() by H5Iget_type()
+ * 3) Replace H5P_isa_class() by H5Pisa_class()
+ * 4) Fix a bug in the following: replace extension by size[axis]
+ * if(extension < old_size) {
+ * ret_value = FAIL;
+ * goto done;
+ * }
+ */
+herr_t
+H5DOappend(hid_t dset_id, hid_t dxpl_id, unsigned axis, size_t extension,
+ hid_t memtype, const void *buf)
+{
+ hbool_t created_dxpl = FALSE; /* Whether we created a DXPL */
+ hsize_t size[H5S_MAX_RANK]; /* The new size (after extension */
+ hsize_t old_size = 0; /* The size of the dimension to be extended */
+ int sndims; /* Number of dimensions in dataspace (signed) */
+ unsigned ndims; /* Number of dimensions in dataspace */
+ hid_t space_id = FAIL; /* Old file space */
+ hid_t new_space_id = FAIL; /* New file space (after extension) */
+ hid_t mem_space_id = FAIL; /* Memory space for data buffer */
+ hssize_t snelmts; /* Number of elements in selection (signed) */
+ hsize_t nelmts; /* Number of elements in selection */
+ hid_t dapl = FAIL; /* Dataset access property list */
+
+ hsize_t start[H5S_MAX_RANK]; /* H5Sselect_Hyperslab: starting offset */
+ hsize_t count[H5S_MAX_RANK]; /* H5Sselect_hyperslab: # of blocks to select */
+ hsize_t stride[H5S_MAX_RANK]; /* H5Sselect_hyperslab: # of elements to move when selecting */
+ hsize_t block[H5S_MAX_RANK]; /* H5Sselect_hyperslab: # of elements in a block */
+
+ hsize_t *boundary = NULL; /* Boundary set in append flush property */
+ H5D_append_cb_t append_cb; /* Callback function set in append flush property */
+ void *udata; /* User data set in append flush property */
+ hbool_t hit = FALSE; /* Boundary is hit or not */
+ hsize_t k; /* Local index variable */
+ unsigned u; /* Local index variable */
+ herr_t ret_value = FAIL; /* Return value */
+
+ /* check arguments */
+ if(H5I_DATASET != H5Iget_type(dset_id))
+ goto done;
+
+ /* If the user passed in a default DXPL, create one to pass to H5Dwrite() */
+ if(H5P_DEFAULT == dxpl_id) {
+ if((dxpl_id = H5Pcreate(H5P_DATASET_XFER)) < 0)
+ goto done;
+ created_dxpl = TRUE;
+ } /* end if */
+ else if(TRUE != H5Pisa_class(dxpl_id, H5P_DATASET_XFER))
+ goto done;
+
+ /* Get the dataspace of the dataset */
+ if(FAIL == (space_id = H5Dget_space(dset_id)))
+ goto done;
+
+ /* Get the rank of this dataspace */
+ if((sndims = H5Sget_simple_extent_ndims(space_id)) < 0)
+ goto done;
+ ndims = (unsigned)sndims;
+
+ /* Verify correct axis */
+ if(axis >= ndims)
+ goto done;
+
+ /* Get the dimensions sizes of the dataspace */
+ if(H5Sget_simple_extent_dims(space_id, size, NULL) < 0)
+ goto done;
+
+ /* Adjust the dimension size of the requested dimension,
+ but first record the old dimension size */
+ old_size = size[axis];
+ size[axis] += extension;
+ if(size[axis] < old_size)
+ goto done;
+
+ /* Set the extent of the dataset to the new dimension */
+ if(H5Dset_extent(dset_id, size) < 0)
+ goto done;
+
+ /* Get the new dataspace of the dataset */
+ if(FAIL == (new_space_id = H5Dget_space(dset_id)))
+ goto done;
+
+ /* Select a hyperslab corresponding to the append operation */
+ for(u = 0 ; u < ndims ; u++) {
+ start[u] = 0;
+ stride[u] = 1;
+ count[u] = size[u];
+ block[u] = 1;
+ if(u == axis) {
+ count[u] = extension;
+ start[u] = old_size;
+ } /* end if */
+ } /* end for */
+ if(FAIL == H5Sselect_hyperslab(new_space_id, H5S_SELECT_SET, start, stride, count, block))
+ goto done;
+
+ /* The # of elemnts in the new extended dataspace */
+ if((snelmts = H5Sget_select_npoints(new_space_id)) < 0)
+ goto done;
+ nelmts = (hsize_t)snelmts;
+
+ /* create a memory space */
+ mem_space_id = H5Screate_simple(1, &nelmts, NULL);
+
+ /* Write the data */
+ if(H5Dwrite(dset_id, memtype, mem_space_id, new_space_id, dxpl_id, buf) < 0)
+ goto done;
+
+ /* Obtain the dataset's access property list */
+ if((dapl = H5Dget_access_plist(dset_id)) < 0)
+ goto done;
+
+ /* Allocate the boundary array */
+ boundary = (hsize_t *)HDmalloc(ndims * sizeof(hsize_t));
+
+ /* Retrieve the append flush property */
+ if(H5Pget_append_flush(dapl, ndims, boundary, &append_cb, &udata) < 0)
+ goto done;
+
+ /* No boundary for this axis */
+ if(boundary[axis] == 0)
+ goto done;
+
+ /* Determine whether a boundary is hit or not */
+ for(k = start[axis]; k < size[axis]; k++)
+ if(!((k + 1) % boundary[axis])) {
+ hit = TRUE;
+ break;
+ }
+
+ if(hit) { /* Hit the boundary */
+ /* Invoke callback if there is one */
+ if(append_cb && append_cb(dset_id, size, udata) < 0)
+ goto done;
+
+ /* Do a dataset flush */
+ if(H5Dflush(dset_id) < 0)
+ goto done;
+ } /* end if */
+
+ /* Indicate success */
+ ret_value = SUCCEED;
+
+done:
+ /* Close dxpl if we created it vs. one was passed in */
+ if(created_dxpl) {
+ if(H5Pclose(dxpl_id) < 0)
+ ret_value = FAIL;
+ } /* end if */
+
+ /* Close old dataspace */
+ if(space_id != FAIL && H5Sclose(space_id) < 0)
+ ret_value = FAIL;
+
+ /* Close new dataspace */
+ if(new_space_id != FAIL && H5Sclose(new_space_id) < 0)
+ ret_value = FAIL;
+
+ /* Close memory dataspace */
+ if(mem_space_id != FAIL && H5Sclose(mem_space_id) < 0)
+ ret_value = FAIL;
+
+ /* Close the dataset access property list */
+ if(dapl != FAIL && H5Pclose(dapl) < 0)
+ ret_value = FAIL;
+
+ if(boundary)
+ HDfree(boundary);
+
+ return ret_value;
+} /* H5DOappend() */
+
diff --git a/hl/src/H5DOpublic.h b/hl/src/H5DOpublic.h
index 9cf6921..1e5eb7a 100644
--- a/hl/src/H5DOpublic.h
+++ b/hl/src/H5DOpublic.h
@@ -30,6 +30,9 @@ extern "C" {
H5_HLDLL herr_t H5DOwrite_chunk(hid_t dset_id, hid_t dxpl_id, uint32_t filters,
const hsize_t *offset, size_t data_size, const void *buf);
+H5_HLDLL herr_t H5DOappend(hid_t dset_id, hid_t dxpl_id, unsigned axis,
+ size_t extension, hid_t memtype, const void *buf);
+
#ifdef __cplusplus
}
#endif
diff --git a/hl/test/Makefile.am b/hl/test/Makefile.am
index bea760e..7bfd6b2 100644
--- a/hl/test/Makefile.am
+++ b/hl/test/Makefile.am
@@ -28,7 +28,8 @@ LDADD=$(LIBH5_HL) $(LIBH5TEST) $(LIBHDF5)
# Test programs. These are our main targets. They should be listed in the
# order to be executed, generally most specific tests to least specific tests.
-TEST_PROG=test_lite test_image test_file_image test_table test_ds test_packet test_dset_opt
+TEST_PROG=test_lite test_image test_file_image test_table test_ds test_packet test_dset_opt \
+ test_dset_append
check_PROGRAMS=$(TEST_PROG)
# These programs generate test files for the tests. They don't need to be
@@ -46,7 +47,7 @@ endif
CHECK_CLEANFILES+=combine_tables[1-2].h5 test_ds[1-9].h5 test_ds10.h5 \
test_image[1-3].h5 file_img[1-2].h5 test_lite[1-4].h5 test_table.h5 \
test_packet_table.h5 test_packet_compress.h5 test_detach.h5 \
- test_dectris.h5
+ test_dectris.h5 test_append.h5
# Sources for test_packet executable
test_packet_SOURCES=test_packet.c test_packet_vlen.c
diff --git a/hl/test/test_dset_append.c b/hl/test/test_dset_append.c
new file mode 100644
index 0000000..0f193d9
--- /dev/null
+++ b/hl/test/test_dset_append.c
@@ -0,0 +1,1196 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+* Copyright by The HDF Group. *
+* Copyright by the Board of Trustees of the University of Illinois. *
+* All rights reserved. *
+* *
+* This file is part of HDF5. The full HDF5 copyright notice, including *
+* terms governing use, modification, and redistribution, is contained in *
+* the files COPYING and Copyright.html. COPYING can be found at the root *
+* of the source code distribution tree; Copyright.html can be found at the *
+* root level of an installed copy of the electronic HDF5 document set and *
+* is linked from the top-level documents page. It can also be found at *
+* http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+* access to either file, you may request a copy from help@hdfgroup.org. *
+* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+#include <stdlib.h>
+#include <string.h>
+#include "h5hltest.h"
+#include "H5srcdir.h"
+#include "H5DOpublic.h"
+#include <math.h>
+
+#if defined(H5_HAVE_ZLIB_H) && !defined(H5_ZLIB_HEADER)
+# define H5_ZLIB_HEADER "zlib.h"
+#endif
+#if defined(H5_ZLIB_HEADER)
+# include H5_ZLIB_HEADER /* "zlib.h" */
+#endif
+
+#define FILE "test_append.h5"
+#define DNAME_UNLIM "dataset_unlim"
+#define DNAME_LESS "dataset_less"
+#define DNAME_VARY "dataset_vary"
+#define DNAME_ROW "dataset_row"
+#define DNAME_COLUMN "dataset_column"
+#define DBUGNAME1 "dataset_bug1"
+#define DBUGNAME2 "dataset_bug2"
+
+/* The callback function for the object flush property */
+static herr_t
+flush_func(hid_t H5_ATTR_UNUSED obj_id, void *_udata)
+{
+ unsigned *flush_ct = (unsigned*)_udata;
+ ++(*flush_ct);
+ return 0;
+}
+
+/* The callback function for the append flush property */
+static herr_t
+append_func(hid_t H5_ATTR_UNUSED dset_id, hsize_t H5_ATTR_UNUSED *cur_dims, void *_udata)
+{
+ unsigned *append_ct = (unsigned *)_udata;
+ ++(*append_ct);
+ return 0;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: test_dataset_append_rows_columns
+ *
+ * Purpose: Verify that the object flush property and the append flush property
+ * are working properly when appending rows and columns to a dataset
+ * with 2 extendible dimensions.
+ *
+ * Return: Success: 0
+ * Failure: 1
+ *
+ * Programmer: Vailin Choi; Jan 2014
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+test_dataset_append_rows_columns(hid_t fid)
+{
+ hid_t did = -1; /* Dataset ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t dcpl = -1; /* A copy of dataset creation property */
+ hid_t dapl = -1; /* A copy of dataset access property */
+ hid_t ffapl = -1; /* The file's file access property list */
+
+ hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
+ hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; /* Maximum dimension sizes */
+ hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
+ int lbuf[10], cbuf[6]; /* The data buffers */
+ int buf[6][13], rbuf[6][13]; /* The data buffers */
+
+ hsize_t boundary[2] = {1, 1}; /* Boundary sizes */
+ unsigned append_ct = 0; /* The # of appends */
+ unsigned *flush_ptr; /* Points to the flush counter */
+
+ int i, j; /* Local index variables */
+
+ TESTING("Append flush with H5DOappend()--append rows & columns");
+
+ /* Get the file's file access property list */
+ if((ffapl = H5Fget_access_plist(fid)) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Set to create a chunked dataset with 2 extendible dimensions */
+ if((sid = H5Screate_simple(2, dims, maxdims)) < 0)
+ FAIL_STACK_ERROR;
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Set append flush property */
+ if((dapl = H5Pcreate(H5P_DATASET_ACCESS)) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pset_append_flush(dapl, 2, boundary, append_func, &append_ct) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Create the dataset */
+ if((did = H5Dcreate2(fid, DNAME_UNLIM, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, dapl)) < 0)
+ TEST_ERROR;
+
+ /* Append 6 rows to the dataset */
+ for(i = 0; i < 6; i++) {
+ for(j = 0; j < 10; j++)
+ lbuf[j] = buf[i][j] = (i * 10) + (j + 1);
+ if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)1, H5T_NATIVE_INT, lbuf) < 0)
+ TEST_ERROR;
+ } /* end for */
+
+ /* Verify the # of appends */
+ if(append_ct != 6)
+ TEST_ERROR;
+
+ /* Retrieve and verify object flush counts */
+ if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
+ FAIL_STACK_ERROR;
+ if(*flush_ptr != 6)
+ TEST_ERROR;
+
+ /* Append 3 columns to the dataset */
+ for(i = 0; i < 3; i++) {
+ for(j = 0; j < 6; j++)
+ cbuf[j] = buf[j][i + 10] = ((i * 6) + (j + 1)) * -1;
+ if(H5DOappend(did, H5P_DEFAULT, 1, (size_t)1, H5T_NATIVE_INT, cbuf) < 0)
+ TEST_ERROR;
+ } /* end for */
+
+ /* Verify the # of appends */
+ if(append_ct != 9)
+ TEST_ERROR;
+
+ /* Retrieve and verify object flush counts */
+ if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
+ FAIL_STACK_ERROR;
+ if(*flush_ptr != 9)
+ TEST_ERROR;
+
+ /* Read the dataset */
+ if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Verify the data */
+ for(i = 0; i < 6; i++)
+ for(j = 0; j < 13; j++)
+ if(buf[i][j] != rbuf[i][j])
+ TEST_ERROR;
+
+ /* Clear the buffer */
+ HDmemset(rbuf, 0, sizeof(rbuf));
+
+ /* Close the dataset */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Open the dataset again */
+ if((did = H5Dopen2(fid, DNAME_UNLIM, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Read the dataset */
+ if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Verify the data */
+ for(i = 0; i < 6; i++)
+ for(j = 0; j < 13; j++)
+ if(buf[i][j] != rbuf[i][j])
+ TEST_ERROR;
+
+ /* Closing */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Sclose(sid) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(dapl) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(dcpl) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(ffapl) < 0)
+ FAIL_STACK_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dapl);
+ H5Pclose(dcpl);
+ H5Pclose(sid);
+ H5Dclose(did);
+ H5Pclose(ffapl);
+ } H5E_END_TRY;
+
+ return 1;
+} /* test_dataset_append_rows_columns() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_dataset_append_rows
+ *
+ * Purpose: Verify that the object flush property and the append flush property
+ * are working properly when appending rows to a dataset with
+ * one extendible dimension (row).
+ *
+ * Return: Success: 0
+ * Failure: 1
+ *
+ * Programmer: Vailin Choi; Jan 2014
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+test_dataset_append_rows(hid_t fid)
+{
+ hid_t did = -1; /* Dataset ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t dcpl = -1; /* A copy of dataset creation property */
+ hid_t dapl = -1; /* A copy of dataset access property */
+ hid_t ffapl = -1; /* The file's file access property list */
+
+ hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
+ hsize_t maxdims[2] = {H5S_UNLIMITED, 10}; /* Maximum dimension sizes */
+ hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
+ int lbuf[10]; /* The data buffer */
+ int buf[6][10], rbuf[6][10]; /* The data buffers */
+ int i, j; /* Local index variables */
+
+ hsize_t boundary[2] = {1, 0}; /* Boundary sizes */
+ unsigned append_ct = 0; /* The # of appends */
+ unsigned *flush_ptr; /* Points to the flush counter */
+
+ TESTING("Append flush with H5DOappend()--append rows");
+
+ /* Get the file's file access property list */
+ if((ffapl = H5Fget_access_plist(fid)) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Set to create a chunked dataset with 1 extendible dimension */
+ if((sid = H5Screate_simple(2, dims, maxdims)) < 0)
+ FAIL_STACK_ERROR;
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Set append flush property */
+ if((dapl = H5Pcreate(H5P_DATASET_ACCESS)) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pset_append_flush(dapl, 2, boundary, append_func, &append_ct) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Create the dataset */
+ if((did = H5Dcreate2(fid, DNAME_ROW, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, dapl)) < 0)
+ TEST_ERROR;
+
+ /* Append 6 rows to the dataset */
+ for(i = 0; i < 6; i++) {
+ for(j = 0; j < 10; j++)
+ lbuf[j] = buf[i][j] = (i * 10) + (j + 1);
+ if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)1, H5T_NATIVE_INT, lbuf) < 0)
+ TEST_ERROR;
+ } /* end for */
+
+ /* Verify the # of appends */
+ if(append_ct != 6)
+ TEST_ERROR;
+
+ if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
+ FAIL_STACK_ERROR;
+ if(*flush_ptr != 6)
+ TEST_ERROR;
+
+ /* Read the dataset */
+ if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Verify the data */
+ for(i = 0; i < 6; i++)
+ for(j = 0; j < 10; j++)
+ if(buf[i][j] != rbuf[i][j])
+ TEST_ERROR;
+
+ /* Clear the buffer */
+ HDmemset(rbuf, 0, sizeof(rbuf));
+
+ /* Close the dataset */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Open the dataset again */
+ if((did = H5Dopen2(fid, DNAME_ROW, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Read the dataset */
+ if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Verify the data */
+ for(i = 0; i < 6; i++)
+ for(j = 0; j < 10; j++)
+ if(buf[i][j] != rbuf[i][j])
+ TEST_ERROR;
+
+ /* Closing */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Sclose(sid) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(dapl) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(dcpl) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(ffapl) < 0)
+ FAIL_STACK_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dapl);
+ H5Pclose(dcpl);
+ H5Pclose(sid);
+ H5Dclose(did);
+ H5Pclose(ffapl);
+ } H5E_END_TRY;
+
+ return 1;
+} /* test_dataset_append_rows() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_dataset_append_columns
+ *
+ * Purpose: Verify that the object flush property and the append flush property
+ * are working properly when appending columns to a dataset
+ * with one extendible dimension (column).
+ *
+ * Return: Success: 0
+ * Failure: 1
+ *
+ * Programmer: Vailin Choi; Jan 2014
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+test_dataset_append_columns(hid_t fid)
+{
+ hid_t did = -1; /* Dataset ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t dcpl = -1; /* A copy of dataset creation property */
+ hid_t dapl = -1; /* A copy of dataset access property */
+ hid_t ffapl = -1; /* The file's file access property list */
+
+ hsize_t dims[2] = {6, 0}; /* Current dimension sizes */
+ hsize_t maxdims[2] = {6, H5S_UNLIMITED}; /* Maximum dimension sizes */
+ hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
+ int cbuf[6]; /* The data buffer */
+ int buf[6][3], rbuf[6][3]; /* The data buffers */
+ int i, j; /* Local index variable */
+
+ hsize_t boundary[2] = {0, 1}; /* Boundary sizes */
+ unsigned append_ct = 0; /* The # of appends */
+ unsigned *flush_ptr; /* Points to the flush counter */
+
+ TESTING("Append flush with H5DOappend()--append columns");
+
+ /* Get the file's file access property list */
+ if((ffapl = H5Fget_access_plist(fid)) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Set to create a chunked dataset with 1 extendible dimension */
+ if((sid = H5Screate_simple(2, dims, maxdims)) < 0)
+ FAIL_STACK_ERROR;
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Set append flush property */
+ if((dapl = H5Pcreate(H5P_DATASET_ACCESS)) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pset_append_flush(dapl, 2, boundary, append_func, &append_ct) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Create the dataset */
+ if((did = H5Dcreate2(fid, DNAME_COLUMN, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, dapl)) < 0)
+ TEST_ERROR;
+
+ /* Append 3 columns to the dataset */
+ for(i = 0; i < 3; i++) {
+ for(j = 0; j < 6; j++)
+ cbuf[j] = buf[j][i] = ((i * 6) + (j + 1)) * -1;
+ if(H5DOappend(did, H5P_DEFAULT, 1, (size_t)1, H5T_NATIVE_INT, cbuf) < 0)
+ TEST_ERROR;
+ } /* end for */
+
+ /* Verify the # of appends */
+ if(append_ct != 3)
+ TEST_ERROR;
+
+ /* Retrieve and verify object flush counts */
+ if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
+ FAIL_STACK_ERROR;
+ if(*flush_ptr != 3)
+ TEST_ERROR;
+
+ /* Read the dataset */
+ if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Verify the data */
+ for(i = 0; i < 6; i++)
+ for(j = 0; j < 3; j++)
+ if(buf[i][j] != rbuf[i][j])
+ TEST_ERROR;
+
+ /* Clear the buffer */
+ HDmemset(rbuf, 0, sizeof(rbuf));
+
+ /* Close the dataset */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Open the dataset again */
+ if((did = H5Dopen2(fid, DNAME_COLUMN, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Read the dataset */
+ if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Verify the data */
+ for(i = 0; i < 6; i++)
+ for(j = 0; j < 3; j++)
+ if(buf[i][j] != rbuf[i][j])
+ TEST_ERROR;
+
+ /* Closing */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Sclose(sid) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(dapl) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(dcpl) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(ffapl) < 0)
+ FAIL_STACK_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dapl);
+ H5Pclose(dcpl);
+ H5Pclose(sid);
+ H5Dclose(did);
+ H5Pclose(ffapl);
+ } H5E_END_TRY;
+
+ return 1;
+} /* test_dataset_append_columns() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_dataset_append_BUG1
+ *
+ * Purpose: Verify that the object flush property and the append flush property
+ * are working properly when appending rows and columns to an
+ * extendible dataset.
+ * A BUG occurs:
+ * when the extendible dataset is set up as follows:
+ * hsize_t dims[2] = {0, 10};
+ * hsize_t maxdims[2] = {H5S_UNLIMITED, 50};
+ * when append 6 rows and 3 columns to the dataset;
+ * The data is correct when the dataset is read at this point;
+ * The data is incorrect when the dataset is closed, opened again, and read at this point;
+ * NOTE: the problem does not occur when H5Dflush() is not performed for each row/column.
+ *
+ * Return: Success: 0
+ * Failure: 1
+ *
+ * Programmer: Vailin Choi; Jan 2014
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+test_dataset_append_BUG1(hid_t fid)
+{
+ hid_t did = -1; /* Dataset ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t dcpl = -1; /* Dataset creation property */
+ hid_t dapl = -1; /* Dataset access property */
+ hid_t ffapl = -1; /* The file's file access property list */
+
+ hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
+ hsize_t maxdims[2] = {H5S_UNLIMITED, 50}; /* Maximum dimension sizes */
+ hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
+ int lbuf[10], cbuf[6]; /* The data buffers */
+ int buf[6][13], rbuf[6][13]; /* The data buffers */
+ int i, j; /* Local index variables */
+
+ hsize_t boundary[2] = {1, 1}; /* Boundary sizes */
+ unsigned append_ct = 0; /* The # of appends */
+ unsigned *flush_ptr; /* Points to the flush counter */
+
+ TESTING("Append flush with H5DOappend()--append rows & columns--BUG1");
+
+ /* Get the file's file access property list */
+ if((ffapl = H5Fget_access_plist(fid)) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Set to create a chunked dataset with 2 extendible dimensions */
+ if((sid = H5Screate_simple(2, dims, maxdims)) < 0)
+ FAIL_STACK_ERROR;
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Set append flush property */
+ if((dapl = H5Pcreate(H5P_DATASET_ACCESS)) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pset_append_flush(dapl, 2, boundary, append_func, &append_ct) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Create the dataset */
+ if((did = H5Dcreate2(fid, DBUGNAME1, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, dapl)) < 0)
+ TEST_ERROR;
+
+ /* Append 6 rows to the dataset */
+ for(i = 0; i < 6; i++) {
+ for(j = 0; j < 10; j++)
+ lbuf[j] = buf[i][j] = (i * 10) + (j + 1);
+ if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)1, H5T_NATIVE_INT, lbuf) < 0)
+ TEST_ERROR;
+ } /* end for */
+
+ /* Verify the # of appends */
+ if(append_ct != 6)
+ TEST_ERROR;
+
+ /* Retrieve and verify object flush counts */
+ if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
+ FAIL_STACK_ERROR;
+ if(*flush_ptr != 6)
+ TEST_ERROR;
+
+ /* Append 3 columns to the dataset */
+ for(i = 0; i < 3; i++) {
+ for(j = 0; j < 6; j++)
+ cbuf[j] = buf[j][i+10] = ((i * 6) + (j + 1)) * -1;
+ if(H5DOappend(did, H5P_DEFAULT, 1, (size_t)1, H5T_NATIVE_INT, cbuf) < 0)
+ TEST_ERROR;
+ } /* end for */
+
+ /* Verify the # of appends */
+ if(append_ct != 9)
+ TEST_ERROR;
+
+ /* Retrieve and verify object flush counts */
+ if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
+ FAIL_STACK_ERROR;
+ if(*flush_ptr != 9)
+ TEST_ERROR;
+
+ /* Read the dataset */
+ if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Verify the data */
+ for(i = 0; i < 6; i++)
+ for(j = 0; j < 13; j++)
+ if(buf[i][j] != rbuf[i][j])
+ TEST_ERROR;
+
+#ifdef BUG1
+ HDmemset(rbuf, 0, sizeof(rbuf));
+
+ /* Close the dataset */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Open the dataset again */
+ if((did = H5Dopen(fid, DBUGNAME1, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Read the dataset */
+ if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Verify the data */
+ for(i = 0; i < 6; i++)
+ for(j = 0; j < 13; j++)
+ if(buf[i][j] != rbuf[i][j])
+ TEST_ERROR;
+#endif
+
+ /* Closing */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Sclose(sid) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(dapl) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(dcpl) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(ffapl) < 0)
+ FAIL_STACK_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Pclose(dapl);
+ H5Pclose(sid);
+ H5Dclose(did);
+ H5Pclose(ffapl);
+ } H5E_END_TRY;
+
+ return 1;
+} /* test_dataset_append_BUG1() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_dataset_append_BUG2
+ *
+ * Purpose: Verify that the object flush property and the append flush property
+ * are working properly when appending rows and columns to an
+ * extendible dataset.
+ * A BUG occurs:
+ * when the extendible dataset is set up as follows:
+ * hsize_t dims[2] = {0, 10};
+ * hsize_t maxdims[2] = {50, H5S_UNLIMITED};
+ * when append 6 rows and 3 columns to the dataset;
+ * The data is correct when the dataset is read at this point;
+ * The data is incorrect when the dataset is closed, opened again, and read at this point;
+ * NOTE: the problem does not occur when H5Dflush() is not performed for each row/column.
+ *
+ * Return: Success: 0
+ * Failure: 1
+ *
+ * Programmer: Vailin Choi; Jan 2014
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+test_dataset_append_BUG2(hid_t fid)
+{
+ hid_t did = -1; /* Dataset ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t dcpl = -1; /* Dataset creation property */
+ hid_t dapl = -1; /* Dataset access property */
+ hid_t ffapl = -1; /* The file's file access property list */
+
+ hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
+ hsize_t maxdims[2] = {50, H5S_UNLIMITED}; /* Maximum dimension sizes */
+ hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
+ int lbuf[10], cbuf[6]; /* Data buffers */
+ int buf[6][13], rbuf[6][13]; /* Data buffers */
+ int i, j; /* Local index variables */
+
+ hsize_t boundary[2] = {1, 1}; /* Boundary sizes */
+ unsigned append_ct = 0; /* The # of appends */
+ unsigned *flush_ptr; /* Points to the flush counter */
+
+ TESTING("Append flush with H5DOappend()--append rows & columns--BUG2");
+
+ /* Get the file's file access property list */
+ if((ffapl = H5Fget_access_plist(fid)) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Set to create a chunked dataset with 2 extendible dimensions */
+ if((sid = H5Screate_simple(2, dims, maxdims)) < 0)
+ FAIL_STACK_ERROR;
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Set append flush property */
+ if((dapl = H5Pcreate(H5P_DATASET_ACCESS)) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pset_append_flush(dapl, 2, boundary, append_func, &append_ct) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Create the dataset */
+ if((did = H5Dcreate2(fid, DBUGNAME2, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, dapl)) < 0)
+ TEST_ERROR;
+
+ /* Append 6 rows to the dataset */
+ for(i = 0; i < 6; i++) {
+ for(j = 0; j < 10; j++)
+ lbuf[j] = buf[i][j] = (i * 10) + (j + 1);
+ if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)1, H5T_NATIVE_INT, lbuf) < 0)
+ TEST_ERROR;
+ } /* end for */
+
+ /* Verify the # of appends */
+ if(append_ct != 6)
+ TEST_ERROR;
+
+ /* Retrieve and verify object flush counts */
+ if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
+ FAIL_STACK_ERROR;
+ if(*flush_ptr != 6)
+ TEST_ERROR;
+
+
+ /* Append 3 columns to the dataset */
+ for(i = 0; i < 3; i++) {
+ for(j = 0; j < 6; j++)
+ cbuf[j] = buf[j][i+10] = ((i * 6) + (j + 1)) * -1;
+ if(H5DOappend(did, H5P_DEFAULT, 1, (size_t)1, H5T_NATIVE_INT, cbuf) < 0)
+ TEST_ERROR;
+ } /* end for */
+
+ /* Verify the # of appends */
+ if(append_ct != 9)
+ TEST_ERROR;
+
+ /* Retrieve and verify object flush counts */
+ if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
+ FAIL_STACK_ERROR;
+ if(*flush_ptr != 9)
+ TEST_ERROR;
+
+ /* Read the dataset */
+ if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Verify the data */
+ for(i = 0; i < 6; i++)
+ for(j = 0; j < 13; j++)
+ if(buf[i][j] != rbuf[i][j])
+ TEST_ERROR;
+
+#ifdef BUG2
+ HDmemset(rbuf, 0, sizeof(rbuf));
+
+ /* Close the dataset */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Open the dataset again */
+ if((did = H5Dopen(fid, DBUGNAME2, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Read the dataset */
+ if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Verify the data */
+ for(i = 0; i < 6; i++)
+ for(j = 0; j < 13; j++)
+ if(buf[i][j] != rbuf[i][j])
+ TEST_ERROR;
+#endif
+
+ /* Closing */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Sclose(sid) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(dapl) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(dcpl) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(ffapl) < 0)
+ FAIL_STACK_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dcpl);
+ H5Pclose(dapl);
+ H5Pclose(sid);
+ H5Dclose(did);
+ H5Pclose(ffapl);
+ } H5E_END_TRY;
+
+ return 1;
+} /* test_dataset_append_BUG2() */
+
+
+/*-------------------------------------------------------------------------
+ * Function: test_dataset_append_less
+ *
+ * Purpose: Verify that the object flush property and the append flush property
+ * are working properly when appending rows and columns to an
+ * extendible dataset where the append size is less than the boundary
+ * size.
+ *
+ * Return: Success: 0
+ * Failure: 1
+ *
+ * Programmer: Vailin Choi; Jan 2014
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+test_dataset_append_less(hid_t fid)
+{
+ hid_t did = -1; /* Dataset ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t dcpl = -1; /* A copy of dataset creation property */
+ hid_t dapl = -1; /* A copy of dataset access property */
+ hid_t ffapl = -1; /* The file's file access property list */
+
+ hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
+ hsize_t maxdims[2] = {100, 100}; /* Maximum dimension sizes */
+ hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
+ int lbuf[20], cbuf[6][3]; /* Data buffers */
+ int buf[6][13], rbuf[6][13]; /* Data buffers */
+ int i, j, k; /* Local index variables */
+
+ hsize_t boundary[2] = {3, 3}; /* Boundary sizes */
+ unsigned append_ct = 0; /* The # of appends */
+ unsigned *flush_ptr; /* Points to the flush counter */
+
+ TESTING("Append flush with H5DOappend()--append size < boundary size");
+
+ /* Get the file's file access property list */
+ if((ffapl = H5Fget_access_plist(fid)) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Set to create a chunked dataset with 2 extendible dimensions */
+ if((sid = H5Screate_simple(2, dims, maxdims)) < 0)
+ FAIL_STACK_ERROR;
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Set append flush property */
+ if((dapl = H5Pcreate(H5P_DATASET_ACCESS)) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pset_append_flush(dapl, 2, boundary, append_func, &append_ct) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Create the dataset */
+ if((did = H5Dcreate2(fid, DNAME_LESS, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, dapl)) < 0)
+ TEST_ERROR;
+
+ /* Append to the dataset 2 rows at a time for 3 times */
+ for(i = 0, k = 0; i < 6; i++) {
+ for(j = 0; j < 10; j++, k++)
+ buf[i][j] = lbuf[k] = (i * 10) + (j + 1);
+
+ if((i + 1) % 2 == 0) {
+ if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)2, H5T_NATIVE_INT, lbuf) < 0)
+ TEST_ERROR;
+ k = 0;
+ } /* end if */
+ } /* end for */
+
+ /* Verify the # of appends */
+ if(append_ct != 2)
+ TEST_ERROR;
+
+ /* Retrieve and verify object flush counts */
+ if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
+ FAIL_STACK_ERROR;
+ if(*flush_ptr != 2)
+ TEST_ERROR;
+
+ /* Append 3 columns to the dataset, once */
+ for(i = 0; i < 3; i++)
+ for(j = 0; j < 6; j++, k++)
+ cbuf[j][i] = buf[j][i + 10] = ((i * 6) + (j + 1)) * -1;
+ if(H5DOappend(did, H5P_DEFAULT, 1, (size_t)3, H5T_NATIVE_INT, cbuf) < 0)
+ TEST_ERROR;
+
+ /* Verify the # of appends */
+ if(append_ct != 3)
+ TEST_ERROR;
+
+ /* Retrieve and verify object flush counts */
+ if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
+ FAIL_STACK_ERROR;
+ if(*flush_ptr != 3)
+ TEST_ERROR;
+
+ /* Read the dataset */
+ if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Verify the data */
+ for(i = 0; i < 6; i++)
+ for(j = 0; j < 13; j++)
+ if(buf[i][j] != rbuf[i][j])
+ TEST_ERROR;
+
+ /* Clear the buffer */
+ HDmemset(rbuf, 0, sizeof(rbuf));
+
+ /* Close the dataset */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Open the dataset again */
+ if((did = H5Dopen2(fid, DNAME_LESS, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Read the dataset */
+ if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Verify the data */
+ for(i = 0; i < 6; i++)
+ for(j = 0; j < 13; j++)
+ if(buf[i][j] != rbuf[i][j])
+ TEST_ERROR;
+
+ /* Closing */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Sclose(sid) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(dapl) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(dcpl) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(ffapl) < 0)
+ FAIL_STACK_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dapl);
+ H5Pclose(dcpl);
+ H5Pclose(sid);
+ H5Dclose(did);
+ H5Pclose(ffapl);
+ } H5E_END_TRY;
+
+ return 1;
+} /* test_dataset_append_less() */
+
+/*-------------------------------------------------------------------------
+ * Function: test_dataset_append_vary
+ *
+ * Purpose: Verify that the object flush property and the append flush property
+ * are working properly when appending rows and columns to an
+ * extendible dataset where
+ * row: the append size is 3 times of the boundary size
+ * the append callback/flush is performed on the 1st boundary hit
+ * column: the boundary is greater than the append size
+ * the boundary is not hit at all
+ *
+ * Return: Success: 0
+ * Failure: 1
+ *
+ * Programmer: Vailin Choi; Jan 2014
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+test_dataset_append_vary(hid_t fid)
+{
+ hid_t did = -1; /* Dataset ID */
+ hid_t sid = -1; /* Dataspace ID */
+ hid_t dcpl = -1; /* A copy of dataset creation property */
+ hid_t dapl = -1; /* A copy of dataset access property */
+ hid_t ffapl = -1; /* The file's file access property list */
+
+ hsize_t dims[2] = {0, 10}; /* Current dimension sizes */
+ hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; /* Maximum dimension sizes */
+ hsize_t chunk_dims[2] = {2,5}; /* Chunk dimension sizes */
+ int lbuf[60], cbuf[6][3]; /* Data buffers */
+ int buf[6][13], rbuf[6][13]; /* Data buffers */
+ int i, j, k; /* Local index variables */
+
+ hsize_t boundary[2] = {3, 7}; /* Boundary sizes */
+ unsigned append_ct = 0; /* The # of appends */
+ unsigned *flush_ptr; /* Points to the flush counter */
+
+ TESTING("Append flush with H5DOappend()--append & boundary size vary");
+
+ /* Get the file's file access property list */
+ if((ffapl = H5Fget_access_plist(fid)) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Set to create a chunked dataset with 2 extendible dimensions */
+ if((sid = H5Screate_simple(2, dims, maxdims)) < 0)
+ FAIL_STACK_ERROR;
+ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pset_chunk(dcpl, 2, chunk_dims) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Set append flush property */
+ if((dapl = H5Pcreate(H5P_DATASET_ACCESS)) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pset_append_flush(dapl, 2, boundary, append_func, &append_ct) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Create the dataset */
+ if((did = H5Dcreate2(fid, DNAME_VARY, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, dapl)) < 0)
+ TEST_ERROR;
+
+ /* Append 6 rows to the dataset, once */
+ for(i = 0, k = 0; i < 6; i++)
+ for(j = 0; j < 10; j++, k++)
+ buf[i][j] = lbuf[k] = (i * 10) + (j + 1);
+ if(H5DOappend(did, H5P_DEFAULT, 0, (size_t)6, H5T_NATIVE_INT, lbuf) < 0)
+ TEST_ERROR;
+
+ /* Verify the # of appends */
+ if(append_ct != 1)
+ TEST_ERROR;
+
+ /* Retrieve and verify object flush counts */
+ if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
+ FAIL_STACK_ERROR;
+ if(*flush_ptr != 1)
+ TEST_ERROR;
+
+ /* Append 3 columns to the dataset, once */
+ for(i = 0; i < 3; i++)
+ for(j = 0; j < 6; j++, k++)
+ cbuf[j][i] = buf[j][i + 10] = ((i * 6) + (j + 1)) * -1;
+ if(H5DOappend(did, H5P_DEFAULT, 1, (size_t)3, H5T_NATIVE_INT, cbuf) < 0)
+ TEST_ERROR;
+
+ /* Verify the # of appends */
+ if(append_ct != 1)
+ TEST_ERROR;
+
+ /* Retrieve and verify object flush counts */
+ if(H5Pget_object_flush_cb(ffapl, NULL, (void **)&flush_ptr) < 0)
+ FAIL_STACK_ERROR;
+ if(*flush_ptr != 1)
+ TEST_ERROR;
+
+ /* Read the dataset */
+ if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Verify the data */
+ for(i = 0; i < 6; i++)
+ for(j = 0; j < 13; j++)
+ if(buf[i][j] != rbuf[i][j])
+ TEST_ERROR;
+
+ /* Clear the dataset */
+ HDmemset(rbuf, 0, sizeof(rbuf));
+
+ /* Close the dataset */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Open the dataset again */
+ if((did = H5Dopen2(fid, DNAME_VARY, H5P_DEFAULT)) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Read the dataset */
+ if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Verify the data */
+ for(i = 0; i < 6; i++)
+ for(j = 0; j < 13; j++)
+ if(buf[i][j] != rbuf[i][j])
+ TEST_ERROR;
+
+ /* Closing */
+ if(H5Dclose(did) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Sclose(sid) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(dapl) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(dcpl) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Pclose(ffapl) < 0)
+ FAIL_STACK_ERROR;
+
+ PASSED();
+
+ return 0;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Pclose(dapl);
+ H5Pclose(dcpl);
+ H5Pclose(sid);
+ H5Dclose(did);
+ H5Pclose(ffapl);
+ } H5E_END_TRY;
+
+ return 1;
+} /* test_dataset_append_vary() */
+
+/*-------------------------------------------------------------------------
+ * Function: Main function
+ *
+ * Purpose: Test H5Pset/get_object_flush_cb() and H5Pset/get_append_flush()
+ * along with H5DOappend().
+ *
+ * Return: Success: 0
+ * Failure: 1
+ *
+ * Programmer: Vailin Choi; Jan 2014
+ *
+ *-------------------------------------------------------------------------
+ */
+int main(void)
+{
+ hid_t fid = -1; /* File ID */
+ hid_t fapl = -1; /* File access property list */
+ unsigned flush_ct = 0; /* The # of flushes */
+ int nerrors = 0; /* The # of errors encountered */
+
+ /* Get a copy of file access property list */
+ if((fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Set to use the latest library format */
+ if(H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Set object flush property */
+ if(H5Pset_object_flush_cb(fapl, flush_func, &flush_ct) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Create the test file */
+ if((fid = H5Fcreate(FILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0)
+ FAIL_STACK_ERROR;
+
+ nerrors += test_dataset_append_rows(fid);
+
+ flush_ct = 0; /* Reset flush counter */
+ nerrors += test_dataset_append_columns(fid);
+
+ flush_ct = 0; /* Reset flush counter */
+ nerrors += test_dataset_append_rows_columns(fid);
+
+#ifdef BUG1_BUG2
+/*
+ * The following tests illustrate the scenarios when H5DOappend does not work with extensible array indexing:
+ * - when the the dataset has 1 unlimited dimension and the other dimension is fixed but extendible
+ * - the dataset expands along 1 dimension and then expands along the other dimension
+ */
+ flush_ct = 0; /* Reset flush counter */
+ nerrors += test_dataset_append_BUG1(fid);
+
+ flush_ct = 0; /* Reset flush counter */
+ nerrors += test_dataset_append_BUG2(fid);
+#endif
+
+ flush_ct = 0; /* Reset flush counter */
+ nerrors += test_dataset_append_less(fid);
+
+ flush_ct = 0; /* Reset flush counter */
+ nerrors += test_dataset_append_vary(fid);
+
+ /* Closing */
+ if(H5Pclose(fapl) < 0)
+ FAIL_STACK_ERROR;
+ if(H5Fclose(fid) < 0)
+ FAIL_STACK_ERROR;
+
+ /* Check for errors */
+ if(nerrors)
+ goto error;
+
+ return 0;
+
+error:
+ return 1;
+}
+