summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--src/H5D.c4
-rw-r--r--src/H5Dchunk.c5
-rw-r--r--src/H5VLnative_dataset.c7
-rw-r--r--test/chunk_info.c1578
4 files changed, 815 insertions, 779 deletions
diff --git a/src/H5D.c b/src/H5D.c
index 9c3a096..a22f173 100644
--- a/src/H5D.c
+++ b/src/H5D.c
@@ -1167,7 +1167,7 @@ herr_t
H5Dget_chunk_info(hid_t dset_id, hid_t fspace_id, hsize_t chk_index, hsize_t *offset, unsigned *filter_mask, haddr_t *addr, hsize_t *size)
{
H5VL_object_t *vol_obj = NULL; /* Dataset for this operation */
- hsize_t nchunks= 0;
+ hsize_t nchunks = 0;
herr_t ret_value = SUCCEED;
FUNC_ENTER_API(FAIL)
@@ -1186,7 +1186,7 @@ H5Dget_chunk_info(hid_t dset_id, hid_t fspace_id, hsize_t chk_index, hsize_t *of
/* Check range for chunk index */
if(chk_index >= nchunks)
- HGOTO_ERROR(H5E_IO, H5E_BADRANGE, FAIL, "chunk index is out of range")
+ HGOTO_ERROR(H5E_IO, H5E_DATASET, FAIL, "chunk index is out of range")
/* Call private function to get the chunk info given the chunk's index */
if(H5VL_dataset_optional(vol_obj, H5P_DATASET_XFER_DEFAULT, H5_REQUEST_NULL, H5VL_NATIVE_DATASET_GET_CHUNK_INFO_BY_IDX, fspace_id, chk_index, offset, filter_mask, addr, size) < 0)
diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c
index 86639e6..e3bbd59 100644
--- a/src/H5Dchunk.c
+++ b/src/H5Dchunk.c
@@ -7124,8 +7124,8 @@ H5D__get_chunk_info_cb(const H5D_chunk_rec_t *chunk_rec, void *_udata)
* Purpose: Iterate over the chunks in the dataset to get the info
* of the desired chunk.
*
- * Note: Currently, this function only gets the number of all written
- * chunks, regardless the dataspace.
+ * Note: Currently, the domain of the index in this function is of all
+ * the written chunks, regardless the dataspace.
*
* Return: Success: SUCCEED
* Failure: FAIL
@@ -7293,7 +7293,6 @@ H5D__get_chunk_info_by_coord(const H5D_t *dset, const hsize_t *offset, unsigned*
HDassert(rdcc);
HDassert(H5D_CHUNKED == layout->type);
-/* Is this expensive? */
/* Search for cached chunks that haven't been written out */
for(ent = rdcc->head; ent; ent = ent->next)
/* Flush the chunk out to disk, to make certain the size is correct later */
diff --git a/src/H5VLnative_dataset.c b/src/H5VLnative_dataset.c
index 631eb19..4f7f2b5 100644
--- a/src/H5VLnative_dataset.c
+++ b/src/H5VLnative_dataset.c
@@ -509,15 +509,12 @@ H5VL__native_dataset_optional(void *obj, hid_t H5_ATTR_UNUSED dxpl_id,
dset = (H5D_t *)obj;
HDassert(dset);
HDassert(dset->shared);
+ HDassert(dset->shared->space);
/* When default dataspace is given, use the dataset's dataspace */
if(space_id == H5S_ALL)
- {
space = dset->shared->space;
- if(NULL == space)
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "unable to obtain a dataspace")
- } /* otherwise, use the given space ID */
- else
+ else /* otherwise, use the given space ID */
if(NULL == (space = (const H5S_t *)H5I_object_verify(space_id, H5I_DATASPACE)))
HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a valid dataspace ID")
diff --git a/test/chunk_info.c b/test/chunk_info.c
index bdc9bbc..081c833 100644
--- a/test/chunk_info.c
+++ b/test/chunk_info.c
@@ -12,8 +12,6 @@
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
/*
- * Programmer: Pedro Vicente <pvn@hdfgroup.edu>
- * April 7, 2008
*
* Purpose: Tests chunk query API functions
*
@@ -22,15 +20,17 @@
*
* Test structure:
* main()
- * test_get_chunk_info_highest18()
- * test_get_chunk_info_110()
+ * test_get_chunk_info_highest_v18()
+ * test_get_chunk_info_v110()
* test_chunk_info_single_chunk()
* test_chunk_info_implicit()
* test_chunk_info_fixed_array()
* test_chunk_info_extensible_array()
* test_chunk_info_version2_btrees()
* test_failed_attempts()
- * test_filter_mask_with_skip_compress()
+ * test_flt_msk_with_skip_compress()
+ *
+ * Helper functions:
*
*/
#define H5D_FRIEND
@@ -43,47 +43,95 @@
/* Test file names, using H5F_libver_t as indices */
const char *FILENAME[] = {
"tchunk_info_earliest",
- "tchunk_info_18",
- "tchunk_info_110",
- "tchunk_info_112",
+ "tchunk_info_v18",
+ "tchunk_info_v110",
+ "tchunk_info_v112",
NULL
};
/* File to be used in test_failed_attempts */
-#define FILTERMASK_FILE "tfilter_mask.h5"
+#define FILTERMASK_FILE "tflt_msk"
+#define BASIC_FILE "basic_query"
/* Parameters for testing chunk querying */
-#define RANK 2
-#define FILENAME_BUF_SIZE 1024
-#define DSET_SIMPLE_CHUNKED "Chunked Dataset"
-#define DSET_CONTIGUOUS "Contiguous Dataset"
-#define DSET_EMPTY "Empty Dataset"
-#define DSET_EMPTY_ALLOC "Empty Dataset with ALLOC_TIME_EARLY"
-#define DSET_SINGLE_CHUNK "Single Chunk Index Dataset"
-#define DSET_IMPLICIT_INDEX "Implicit Index Dataset"
-#define DSET_FIXED_ARR_INDEX "Fixed Array Index Dataset"
-#define DSET_EXT_ARR_INDEX "Extensible Array Index Dataset"
-#define DSET_V2_BTREE_INDEX "Version 2 B-Tree Index Dataset"
-#define DATASETNAME2 "skip_one_filter"
+#define SIMPLE_CHUNKED_DSET_NAME "Chunked Dataset"
+#define CONTIGUOUS_DSET_NAME "Contiguous Dataset"
+#define EMPTY_DSET_NAME "Empty Dataset"
+#define EMPTY_EARLY_ALLOC_DSET_NAME "Empty Dataset with ALLOC_TIME_EARLY"
+#define SINGLE_CHUNK_DSET_NAME "Single Chunk Index Dataset"
+#define IMPLICIT_INDEX_DSET_NAME "Implicit Index Dataset"
+#define FIXED_ARR_INDEX_DSET_NAME "Fixed Array Index Dataset"
+#define EXT_ARR_INDEX_DSET_NAME "Extensible Array Index Dataset"
+#define V2_BTREE_INDEX_DSET_NAME "Version 2 B-Tree Index Dataset"
+#define SKIP_FILTER_DSET_NAME "Dataset with Skipping One Filter"
+#define FILENAME_BUF_SIZE 256 /* Size for file names */
+#define RANK 2 /* Rank for datasets */
+
+/* Dimension of the dataset */
#define NX 24
#define NY 16
+
+/* Dimension of the chunk */
#define CHUNK_NX 6
#define CHUNK_NY 4
-#define SINGLE_CHUNK_SIZE (NX*NY*sizeof(int))
-#define CHUNK_SIZE 96
-#define NUM_CHUNKS 16
+
+/* X/Y coords of first chunk written */
+#define START_CHK_X 0
+#define START_CHK_Y 2
+
+/* X/Y coord of last chunk written */
+#define END_CHK_X 2
+#define END_CHK_Y 4
+
+/* X and Y coords of an empty chunk */
+#define EMPTY_CHK_X 0
+#define EMPTY_CHK_Y 0
+
+/* Size of a chunk when the entire dataset is a one single chunk */
+#define SINGLE_CHK_SIZE (NX*NY*sizeof(int))
+
+/* Size of a chunk */
+#define CHK_SIZE (CHUNK_NX*CHUNK_NY*sizeof(int))
+
+/* Size of an empty chunk */
+#define EMPTY_CHK_SIZE 0
+
+/* Number of maximum chunks without extending */
+#define NUM_CHUNKS ((NX/CHUNK_NX)*(NY/CHUNK_NY))
+
+/* Number of chunks that have been written */
#define NUM_CHUNKS_WRITTEN 4
+#define ONE_CHUNK_WRITTEN 1
+#define TWO_CHUNKS_WRITTEN 2
+#define NO_CHUNK_WRITTEN 0
+
+/* For testing invalid arguments */
+#define NONEXIST_CHK_INDEX 3
+#define OUTOFRANGE_CHK_INDEX 5
+#define INVALID_CHK_INDEX 5
+
+/* For compressed data */
#define DEFLATE_SIZE_ADJUST(s) (ceil(((double)(s))*1.001)+12)
+/* For use in error reporting */
+#define MSG_CHK_ADDR "Chunk address should not be HADDR_UNDEF because of H5D_ALLOC_TIME_EARLY."
+#define MSG_CHK_SIZE "Chunk size should not be 0 because of H5D_ALLOC_TIME_EARLY."
+
/* Utility function to initialize arguments */
void reinit_vars(unsigned *read_flt_msk, haddr_t *addr, hsize_t *size);
+/* Helper function containing common code that verifies indexing type
+ and number of chunks */
+static int verify_and_write(hid_t chunkfile, const char* dset_name, hid_t dspace, H5D_chunk_index_t exp_idx_type, hsize_t exp_num_chunks, unsigned flt_msk);
+static int verify_get_chunk_info(hid_t dset, hid_t dspace, hsize_t chk_index, hsize_t exp_chk_size, hsize_t *exp_offset, unsigned exp_flt_msk);
+static int verify_get_chunk_info_by_coord(hid_t dset, hsize_t *offset, hsize_t exp_chk_size, unsigned exp_flt_msk);
+
/*-------------------------------------------------------------------------
* Function: read_each_chunk (helper function)
*
* Purpose: Reads the chunk specified by its offset and verifies that
* it contains the same data as what was written. This function
- * is used in test_get_chunk_info.
+ * is used in various test_get_chunk_info... functions.
*
* Return: Success: SUCCEED
* Failure: FAIL
@@ -92,36 +140,31 @@ void reinit_vars(unsigned *read_flt_msk, haddr_t *addr, hsize_t *size);
*
*-------------------------------------------------------------------------
*/
-//EIP - May be this function should take a pointer to an array of chunk dimensions
-//EIP and its size, so it is not restricted to 2 dims only?
-static herr_t read_each_chunk(hid_t dset_id, hsize_t offset1, hsize_t offset2, void *direct_buf)
+static herr_t read_each_chunk(hid_t dset_id, hsize_t *offset, void *direct_buf)
{
int read_buf[CHUNK_NX][CHUNK_NY];
- hsize_t offset[2] = {offset1, offset2};
unsigned read_flt_msk = 0;
- herr_t ret; /* Return value */
HDmemset(&read_buf, 0, sizeof(read_buf));
/* Read the chunk specified by its offset */
- ret = H5Dread_chunk(dset_id, H5P_DEFAULT, offset, &read_flt_msk, read_buf);
- if(ret < 0) return(FAIL);
+ if(H5Dread_chunk(dset_id, H5P_DEFAULT, offset, &read_flt_msk, read_buf) < 0)
+ return FAIL;
/* Verify that read chunk is the same as the corresponding written one */
if(HDmemcmp(direct_buf, read_buf, CHUNK_NX*CHUNK_NY) != 0)
{
- HDfprintf(stderr, "Read chunk differs from written chunk at offset (%d,%d)\n", offset1, offset2);
- return(FAIL);
+ HDfprintf(stderr, "Read chunk differs from written chunk at offset (%d,%d)\n", offset[0], offset[1]);
+ return FAIL;
}
- return(SUCCEED);
+ return SUCCEED;
}
/*-------------------------------------------------------------------------
* Function: reinit_vars (helper function)
*
- * Purpose: Helper function to wipe out variables for the next use,
- * used in test_get_chunk_info.
+ * Purpose: Wipes out variables for the next use, used in various tests.
*
* Return: Won't fail
*
@@ -140,7 +183,218 @@ void reinit_vars(unsigned *read_flt_msk, haddr_t *addr, hsize_t *size)
}
/*-------------------------------------------------------------------------
- * Function: test_get_chunk_info_highest18
+ * Function: verify_get_chunk_info (helper function)
+ *
+ * Purpose: Verifies that H5Dget_chunk_info returns correct
+ * values for a chunk.
+ *
+ * Return: Success: SUCCEED
+ * Failure: FAIL
+ *
+ * Date: August 2019
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+verify_get_chunk_info(hid_t dset, hid_t dspace, hsize_t chk_index, hsize_t exp_chk_size, hsize_t *exp_offset, unsigned exp_flt_msk)
+{
+ unsigned read_flt_msk = 0; /* Read filter mask */
+ hsize_t out_offset[2] = {0, 0}; /* Buffer to get offset coordinates */
+ hsize_t size = 0; /* Size of an allocated/written chunk */
+ haddr_t addr = 0; /* Address of an allocated/written chunk */
+
+ if(H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
+ TEST_ERROR
+ CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
+ VERIFY(size, exp_chk_size, "H5Dget_chunk_info, chunk size");
+ VERIFY(read_flt_msk, exp_flt_msk, "H5Dget_chunk_info, filter mask");
+ VERIFY(out_offset[0], exp_offset[0], "H5Dget_chunk_info, offset[0]");
+ VERIFY(out_offset[1], exp_offset[1], "H5Dget_chunk_info, offset[1]");
+ return SUCCEED;
+
+error:
+ return FAIL;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: verify_get_chunk_info_by_coord (helper function)
+ *
+ * Purpose: Verifies that H5Dget_chunk_info_by_coord returns correct
+ * values for a chunk.
+ *
+ * Return: Success: SUCCEED
+ * Failure: FAIL
+ *
+ * Date: August 2019
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+verify_get_chunk_info_by_coord(hid_t dset, hsize_t *offset, hsize_t exp_chk_size, unsigned exp_flt_msk)
+{
+ unsigned read_flt_msk = 0; /* Read filter mask */
+ hsize_t size = 0; /* Size of an allocated/written chunk */
+ haddr_t addr = 0; /* Address of an allocated/written chunk */
+
+ /* Get info of the chunk at logical coordinates specified by offset */
+ if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
+ TEST_ERROR
+ CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord");
+ VERIFY(size, exp_chk_size, "H5Dget_chunk_info_by_coord, chunk size");
+ VERIFY(read_flt_msk, exp_flt_msk, "H5Dget_chunk_info_by_coord, filter mask");
+ return SUCCEED;
+
+error:
+ return FAIL;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: verify_empty_chunk_info (helper function)
+ *
+ * Purpose: Verifies that H5Dget_chunk_info_by_coord returns correct
+ * values for an empty chunk.
+ *
+ * Return: Success: SUCCEED
+ * Failure: FAIL
+ *
+ * Date: August 2018
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+verify_empty_chunk_info(hid_t dset, hsize_t *offset)
+{
+ unsigned read_flt_msk = 0; /* Read filter mask */
+ hsize_t size = 0; /* Size of an allocated/written chunk */
+ haddr_t addr = 0; /* Address of an allocated/written chunk */
+
+ /* Get info of the chunk at logical coordinates specified by offset */
+ if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
+ TEST_ERROR
+ VERIFY(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord, chunk address");
+ VERIFY(size, EMPTY_CHK_SIZE, "H5Dget_chunk_info_by_coord, chunk size");
+ return SUCCEED;
+
+error:
+ return FAIL;
+}
+
+/*-------------------------------------------------------------------------
+ * Function: index_type_str (helper function)
+ *
+ * Purpose: Returns the string containing the text associated with the
+ * given indexing scheme. For use in error messages.
+ *
+ * Return: Success: a valid indexing scheme string
+ * Failure: a note indicating the indexing type is invalid
+ *
+ * Date: August 2019
+ *
+ *-------------------------------------------------------------------------
+ */
+static const char*
+index_type_str(H5D_chunk_index_t idx_type)
+{
+ switch (idx_type) {
+ case H5D_CHUNK_IDX_SINGLE:
+ return("Single Chunk index type");
+ case H5D_CHUNK_IDX_NONE:
+ return("Implicit index type");
+ case H5D_CHUNK_IDX_FARRAY:
+ return("Fixed Array index type");
+ case H5D_CHUNK_IDX_EARRAY:
+ return("Extensible Array index type");
+ case H5D_CHUNK_IDX_BT2:
+ return("Version 2 B-tree index type");
+ case H5D_CHUNK_IDX_BTREE:
+ return("Version 1 B-tree index type (default)");
+ case H5D_CHUNK_IDX_NTYPES:
+ default:
+ return("invalid index type");
+ }
+} /* index_type_str */
+
+/*-------------------------------------------------------------------------
+ * Function: verify_and_write (helper function)
+ *
+ * Purpose: Verifies that chunk indexing scheme and number of chunks of
+ * the dataset matches the expected values, then write data to
+ * a subset of chunks. This function opens the dataset then
+ * closes it after writing.
+ *
+ * Return: Success: SUCCEED
+ * Failure: FAIL
+ *
+ * Date: August 2019
+ *
+ *-------------------------------------------------------------------------
+ */
+static int
+verify_and_write(hid_t chunkfile, const char* dset_name, hid_t dspace, H5D_chunk_index_t exp_idx_type, hsize_t exp_num_chunks, unsigned flt_msk)
+{
+ hid_t dset = H5I_INVALID_HID; /* Dataset ID */
+ H5D_chunk_index_t idx_type; /* Dataset chunk index type */
+ hsize_t offset[2] = {0, 0}; /* Offset coordinates of a chunk */
+ hsize_t nchunks = 0; /* Number of chunks */
+ hsize_t ii, jj; /* Array indices */
+ int n; /* Used as chunk index, but int to avoid conversion warning */
+ int direct_buf[NUM_CHUNKS][CHUNK_NX][CHUNK_NY];/* Data in chunks */
+
+ /* Open the dataset */
+ if((dset = H5Dopen2(chunkfile, dset_name, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /* Get the chunk indexing type of the dataset */
+ if(H5Dget_chunk_index_type(dset, &idx_type) < 0)
+ TEST_ERROR
+
+ /* Ensure the correct chunk indexing scheme is used */
+ if(idx_type != exp_idx_type)
+ {
+ char msg[256];
+ sprintf(msg, "Should be using %s.\n", index_type_str(idx_type));
+ FAIL_PUTS_ERROR(msg);
+ }
+
+ /* Get and verify the number of chunks */
+ if(H5Dget_num_chunks(dset, dspace, &nchunks) < 0) TEST_ERROR
+ VERIFY(nchunks, exp_num_chunks, "H5Dget_num_chunks, number of chunks");
+
+ /* Get and verify the number of chunks again, passing in H5S_ALL */
+ if(H5Dget_num_chunks(dset, H5S_ALL, &nchunks) < 0) TEST_ERROR
+ VERIFY(nchunks, exp_num_chunks, "H5Dget_num_chunks, number of chunks");
+
+ /* Initialize the array of chunk data for all NUM_CHUNKS chunks */
+ for(n = 0; n < NUM_CHUNKS; n++)
+ for(ii = 0; ii < CHUNK_NX; ii++)
+ for(jj = 0; jj < CHUNK_NY; jj++)
+ direct_buf[n][ii][jj] = n + 1;
+
+ /* Write only NUM_CHUNKS_WRITTEN chunks at the following logical coords:
+ (0,2) (0,3) (1,2) (1,3) */
+ n = 0;
+ for(ii = START_CHK_X; ii < END_CHK_X; ii++)
+ for(jj = START_CHK_Y; jj < END_CHK_Y; jj++, n++) {
+ offset[0] = ii * CHUNK_NX;
+ offset[1] = jj * CHUNK_NY;
+ if(H5Dwrite_chunk(dset, H5P_DEFAULT, flt_msk, offset, CHK_SIZE, (void*)direct_buf[n]) < 0)
+ TEST_ERROR
+ }
+
+ /* Close the dataset */
+ if(H5Dclose(dset) < 0) TEST_ERROR
+
+ return SUCCEED;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Dclose(dset);
+ } H5E_END_TRY;
+ return FAIL;
+} /* verify_and_write */
+
+/*-------------------------------------------------------------------------
+ * Function: test_get_chunk_info_highest_v18
*
* Purpose: Test getting various chunk information
*
@@ -161,61 +415,51 @@ void reinit_vars(unsigned *read_flt_msk, haddr_t *addr, hsize_t *size)
*-------------------------------------------------------------------------
*/
static herr_t
-test_get_chunk_info_highest18(hid_t fapl)
+test_get_chunk_info_highest_v18(hid_t fapl)
{
- char filename[FILENAME_BUF_SIZE];
- hid_t chunkfile = H5I_INVALID_HID; /* File ID */
- hid_t dspace = H5I_INVALID_HID; /* Dataspace ID */
- hid_t dset = H5I_INVALID_HID; /* Dataset ID */
- hid_t cparms = H5I_INVALID_HID; /* Creation plist */
+ char filename[FILENAME_BUF_SIZE]; /* File name */
+ hid_t chunkfile = H5I_INVALID_HID; /* File ID */
+ hid_t dspace = H5I_INVALID_HID; /* Dataspace ID */
+ hid_t dset = H5I_INVALID_HID; /* Dataset ID */
+ hid_t cparms = H5I_INVALID_HID; /* Creation plist */
+ hsize_t chunk_dims[2] = {CHUNK_NX, CHUNK_NY}; /* Chunk dimensions */
+ int direct_buf[NUM_CHUNKS][CHUNK_NX][CHUNK_NY]; /* Data in chunks */
+ hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; /* 2 unlimited dims */
hsize_t out_offset[2]; /* Buffer to get offset coordinates */
hsize_t size = 0; /* Size of an allocated/written chunk */
hsize_t nchunks = 0; /* Number of chunks */
haddr_t addr = 0; /* Address of an allocated/written chunk */
hsize_t chk_index = 0; /* Index of a chunk */
hsize_t dims[2] = {NX, NY};/* Dataset dimensions */
- hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
- hsize_t chunk_dims[2] = {CHUNK_NX, CHUNK_NY}; /* Chunk dimensions */
- int direct_buf[NUM_CHUNKS][CHUNK_NX][CHUNK_NY];/* Data in chunks */
- size_t buf_size = CHUNK_NX*CHUNK_NY*sizeof(int); /* Buffer size of a chk */
- unsigned filter_mask = 0; /* Filter mask */
+ unsigned flt_msk = 0; /* Filter mask */
unsigned read_flt_msk = 0; /* Filter mask after direct read */
int fillvalue = -1; /* Fill value */
int aggression = 9; /* Compression aggression setting */
- H5F_libver_t low, high; /* File format bounds */
- hsize_t offset[2] = {0, 0}; /* Offset coordinates of a chunk */
- int n; /* Used on buffer, to avoid conversion warning */
- hsize_t ii, jj;
+ hsize_t offset[2] = {0, 0}; /* Offset coordinates of a chunk */
const Bytef *z_src = (const Bytef*)(direct_buf);
Bytef *z_dst; /*destination buffer */
- uLongf z_dst_nbytes = (uLongf)DEFLATE_SIZE_ADJUST(buf_size);
- uLong z_src_nbytes = (uLong)buf_size;
+ uLongf z_dst_nbytes = (uLongf)DEFLATE_SIZE_ADJUST(CHK_SIZE);
+ uLong z_src_nbytes = (uLong)CHK_SIZE;
void *outbuf = NULL; /* Pointer to new buffer */
-
- herr_t ret;
+ hsize_t ii, jj; /* Array indices */
+ int n; /* Used as chunk index, but int to avoid conversion warning */
+ herr_t ret; /* Temporary returned value for verifying failure */
TESTING("getting chunk information in file with version prior to 1.10");
/* Create the file */
h5_fixname(FILENAME[H5F_LIBVER_V18], fapl, filename, sizeof filename);
- /* Set high bound to V18 to test chunked dataset that use B-tree v1
- structures to index chunks */
- high = H5F_LIBVER_V18;
-
- /* Low bound can be anything below 1.10, which was when the new chunk storage
- was introduced */
- low = H5F_LIBVER_EARLIEST;
-
- /* Set version bounds for creating the file */
- if(H5Pset_libver_bounds(fapl, low, high) < 0)
+ /* Set version bounds for creating the file. High bound to V18 to test
+ chunked dataset that use B-tree v1 structures to index chunks. */
+ if(H5Pset_libver_bounds(fapl, H5F_LIBVER_EARLIEST, H5F_LIBVER_V18) < 0)
TEST_ERROR
chunkfile = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl);
if(chunkfile < 0)
TEST_ERROR
- /* Create the file and memory dataspaces */
+ /* Create the file and memory dataspace */
if((dspace = H5Screate_simple(RANK, dims, maxdims)) < 0)
TEST_ERROR
@@ -231,7 +475,7 @@ test_get_chunk_info_highest18(hid_t fapl)
if(H5Pset_fill_value(cparms, H5T_NATIVE_INT, &fillvalue) < 0) TEST_ERROR
/* Create a new dataset using cparms creation properties */
- dset = H5Dcreate2(chunkfile, DSET_SIMPLE_CHUNKED, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
+ dset = H5Dcreate2(chunkfile, SIMPLE_CHUNKED_DSET_NAME, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
if(dset < 0) TEST_ERROR
/* Initialize the array of chunk data for all NUM_CHUNKS chunks */
@@ -262,21 +506,19 @@ test_get_chunk_info_highest18(hid_t fapl)
/* Write only NUM_CHUNKS_WRITTEN chunks at the following logical coords:
(0,2) (0,3) (1,2) (1,3) */
n = 0;
- for(ii = 0; ii < 2; ii++)
- for(jj = 2; jj < 4; jj++, n++)
- {
+ for(ii = START_CHK_X; ii < END_CHK_X; ii++)
+ for(jj = START_CHK_Y; jj < END_CHK_Y; jj++, n++) {
offset[0] = ii * CHUNK_NX;
offset[1] = jj * CHUNK_NY;
- ret = H5Dwrite_chunk(dset, H5P_DEFAULT, filter_mask, offset, buf_size, (void*)direct_buf[n]);
+ ret = H5Dwrite_chunk(dset, H5P_DEFAULT, flt_msk, offset, CHK_SIZE, (void*)direct_buf[n]);
if(ret < 0) TEST_ERROR
}
- /* Read each chunk and verify the values */
- n = 0;
- for(ii = 0; ii < 2; ii++)
- for(jj = 2; jj < 4; jj++, n++)
- if(read_each_chunk(dset, ii*CHUNK_NX, jj*CHUNK_NY, (void*)direct_buf[n]) < 0)
- TEST_ERROR
+ /* Read each chunk in the subset of chunks and verify the values */
+ /* if(read_each_chunk(dset, offset, (void*)direct_buf[chk_index]) < 0)
+ if(read_each_chunk(dset) == FAIL)
+ TEST_ERROR
+ */
/* Free the read buffer */
if(outbuf)
@@ -285,122 +527,88 @@ test_get_chunk_info_highest18(hid_t fapl)
if(H5Fflush(dset, H5F_SCOPE_LOCAL) < 0)
TEST_ERROR
- /* Close the dataset then... */
+ /* Close the dataset */
if(H5Dclose(dset) < 0) TEST_ERROR
/* ...open it again to test the chunk query functions */
- if((dset = H5Dopen2(chunkfile, DSET_SIMPLE_CHUNKED, H5P_DEFAULT)) < 0)
+ if((dset = H5Dopen2(chunkfile, SIMPLE_CHUNKED_DSET_NAME, H5P_DEFAULT)) < 0)
TEST_ERROR
/* Get and verify the number of chunks written */
if(H5Dget_num_chunks(dset, dspace, &nchunks) < 0) TEST_ERROR
- if(nchunks != NUM_CHUNKS_WRITTEN) TEST_ERROR
+ VERIFY(nchunks, NUM_CHUNKS_WRITTEN, "H5Dget_num_chunks, number of chunks");
- /* Go through all written chunks, get their info and verify the values */
- chk_index = 0;
- for(ii = 0; ii < 2; ii++)
- for(jj = 2; jj < 4; jj++, chk_index++) {
- int kk;
-
- if(H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
- VERIFY(out_offset[0], ii * CHUNK_NX, "H5Dget_chunk_info, offset");
- VERIFY(out_offset[1], jj * CHUNK_NY, "H5Dget_chunk_info, offset");
-
- /* Reset variables to pass in to the next call */
- reinit_vars(&read_flt_msk, &addr, &size);
-
- /* Copy offsets to pass in to the next call */
- for(kk = 0; kk < RANK; kk++)
- offset[kk] = out_offset[kk];
-
- /* Get info of the chunk at the specified offsets and verify its info */
- if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info_by_coord, chunk size");
- }
-
- /* Get and verify info of the last chunk, passing in H5S_ALL */
- chk_index = 3;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
- ret = H5Dget_chunk_info(dset, H5S_ALL, chk_index, out_offset, &read_flt_msk, &addr, &size);
- if(ret < 0) TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
- VERIFY(out_offset[0], 6, "H5Dget_chunk_info, offset");
- VERIFY(out_offset[1], 12, "H5Dget_chunk_info, offset");
+ /* Get and verify info of the last written chunk again, passing in H5S_ALL
+ this time */
+ offset[0] = 6;
+ offset[1] = 12;
+ if(verify_get_chunk_info(dset, H5S_ALL, NUM_CHUNKS_WRITTEN-1, CHK_SIZE, offset, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info failed\n");
/* Attempt to get info of a non-existing chunk, should fail */
- chk_index = 5;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
+ chk_index = OUTOFRANGE_CHK_INDEX;
H5E_BEGIN_TRY {
ret = H5Dget_chunk_info(dset, H5S_ALL, chk_index, out_offset, &read_flt_msk, &addr, &size);
} H5E_END_TRY;
if(ret != FAIL)
- FAIL_PUTS_ERROR(" Attempt to get info of a non-existing chunk.")
+ FAIL_PUTS_ERROR(" Attempt to get info of a non-existing chunk.");
/* Attempt to get info of empty chunks, verify the returned addr and size */
offset[0] = 0;
offset[1] = 0;
- ret = H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size);
- if(ret < 0) TEST_ERROR
- VERIFY(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord, chunk address");
- VERIFY(size, 0, "H5Dget_chunk_info_by_coord, chunk size");
+ if(verify_empty_chunk_info(dset, offset) == FAIL)
+ TEST_ERROR
offset[0] = 3 * CHUNK_NX;
offset[1] = 3 * CHUNK_NY;
- ret = H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size);
- if(ret < 0) TEST_ERROR
- VERIFY(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord, chunk address");
- VERIFY(size, 0, "H5Dget_chunk_info_by_coord, chunk size");
+ if(verify_empty_chunk_info(dset, offset) == FAIL)
+ TEST_ERROR
- /* Read each chunk and verify the values */
- n = 0;
- for(ii = 0; ii < 2; ii++)
- for(jj = 2; jj < 4; jj++, n++)
- if(read_each_chunk(dset, ii*CHUNK_NX, jj*CHUNK_NY, (void*)direct_buf[n]) < 0)
- TEST_ERROR
+ /* Go through all written chunks, get their info and verify the values */
+ chk_index = 0;
+ for(ii = START_CHK_X; ii < END_CHK_X; ii++)
+ for(jj = START_CHK_Y; jj < END_CHK_Y; jj++, chk_index++) {
+ offset[0] = ii * CHUNK_NX;
+ offset[1] = jj * CHUNK_NY;
+
+ if(verify_get_chunk_info(dset, dspace, chk_index, CHK_SIZE, offset, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info failed\n");
+
+ /* Use the same offset to pass into the next ...by_coord function */
+ if(verify_get_chunk_info_by_coord(dset, offset, CHK_SIZE, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info_by_coord failed\n");
+ }
/* Close the first dataset */
if(H5Dclose(dset) < 0) TEST_ERROR
/* Create an empty dataset and close it */
- dset = H5Dcreate2(chunkfile, DSET_EMPTY, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
+ dset = H5Dcreate2(chunkfile, EMPTY_DSET_NAME, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
if(dset < 0) TEST_ERROR
if(H5Dclose(dset) < 0) TEST_ERROR
/* Reopen the empty dataset to verify the chunk query functions on it */
- if((dset = H5Dopen2(chunkfile, DSET_EMPTY, H5P_DEFAULT)) < 0)
+ if((dset = H5Dopen2(chunkfile, EMPTY_DSET_NAME, H5P_DEFAULT)) < 0)
TEST_ERROR
/* Verify that the number of chunks is 0 */
if(H5Dget_num_chunks(dset, dspace, &nchunks) < 0) TEST_ERROR
- if(nchunks != 0) TEST_ERROR
+ VERIFY(nchunks, NO_CHUNK_WRITTEN, "H5Dget_num_chunks, number of chunks");
/* Attempt to get info of a chunk from an empty dataset, should fail */
- chk_index = 0;
- reinit_vars(&read_flt_msk, &addr, &size);
+ chk_index = OUTOFRANGE_CHK_INDEX;
H5E_BEGIN_TRY {
ret = H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size);
} H5E_END_TRY;
if(ret != FAIL)
- FAIL_PUTS_ERROR(" Attempt to get info of a non-existing chunk.")
+ FAIL_PUTS_ERROR(" Attempt to get info of a non-existing chunk.");
/* Attempt to get info of a chunk given its coords from an empty dataset,
should succeed with the returned address as HADDR_UNDEF and size as 0 */
- offset[0] = 0;
- offset[1] = 0;
- if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
+ offset[0] = EMPTY_CHK_X;
+ offset[1] = EMPTY_CHK_Y;
+ if(verify_empty_chunk_info(dset, offset) == FAIL)
TEST_ERROR
- VERIFY(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord, chunk address");
- VERIFY(size, 0, "H5Dget_chunk_info_by_coord, chunk size");
if(H5Dclose(dset) < 0) TEST_ERROR
@@ -414,29 +622,29 @@ test_get_chunk_info_highest18(hid_t fapl)
TEST_ERROR
/* Create an empty dataset and close it */
- dset = H5Dcreate2(chunkfile, DSET_EMPTY_ALLOC, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
+ dset = H5Dcreate2(chunkfile, EMPTY_EARLY_ALLOC_DSET_NAME, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
if(dset < 0) TEST_ERROR
if(H5Dclose(dset) < 0) TEST_ERROR
/* Reopen the empty dataset to verify the chunk query functions on it */
- if((dset = H5Dopen2(chunkfile, DSET_EMPTY_ALLOC, H5P_DEFAULT)) < 0)
+ if((dset = H5Dopen2(chunkfile, EMPTY_EARLY_ALLOC_DSET_NAME, H5P_DEFAULT)) < 0)
TEST_ERROR
/* Verify that the number of chunks is NUM_CHUNKS */
if(H5Dget_num_chunks(dset, dspace, &nchunks) < 0) TEST_ERROR
- if(nchunks != NUM_CHUNKS) TEST_ERROR
+ VERIFY(nchunks, NUM_CHUNKS, "H5Dget_num_chunks, number of chunks");
/* Attempt to get info of a chunk from an empty dataset, verify the
returned address and size in the case of H5D_ALLOC_TIME_EARLY */
- chk_index = 0;
+ chk_index = NONEXIST_CHK_INDEX;
reinit_vars(&read_flt_msk, &addr, &size);
ret = H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size);
if(ret < 0) TEST_ERROR
/* Because of H5D_ALLOC_TIME_EARLY, addr cannot be HADDR_UNDEF and size not 0 */
if(addr == HADDR_UNDEF)
- FAIL_PUTS_ERROR("Chunk address should not be HADDR_UNDEF because of H5D_ALLOC_TIME_EARLY.");
- if(size == 0)
- FAIL_PUTS_ERROR("Chunk size should not be 0 because of H5D_ALLOC_TIME_EARLY.");
+ FAIL_PUTS_ERROR(MSG_CHK_ADDR);
+ if(size == EMPTY_CHK_SIZE)
+ FAIL_PUTS_ERROR(MSG_CHK_SIZE);
chk_index = 10;
reinit_vars(&read_flt_msk, &addr, &size);
@@ -444,9 +652,9 @@ test_get_chunk_info_highest18(hid_t fapl)
if(ret < 0) TEST_ERROR
/* Because of H5D_ALLOC_TIME_EARLY, addr cannot be HADDR_UNDEF and size not 0 */
if(addr == HADDR_UNDEF)
- FAIL_PUTS_ERROR("Chunk address should not be HADDR_UNDEF because of H5D_ALLOC_TIME_EARLY.");
- if(size == 0)
- FAIL_PUTS_ERROR("Chunk size should not be 0 because of H5D_ALLOC_TIME_EARLY.");
+ FAIL_PUTS_ERROR(MSG_CHK_ADDR);
+ if(size == EMPTY_CHK_SIZE)
+ FAIL_PUTS_ERROR(MSG_CHK_SIZE);
/* Attempt to get info of a chunk given its coords from an empty dataset,
verify the returned address and size */
@@ -456,9 +664,9 @@ test_get_chunk_info_highest18(hid_t fapl)
TEST_ERROR
/* Because of H5D_ALLOC_TIME_EARLY, addr cannot be HADDR_UNDEF and size not 0 */
if(addr == HADDR_UNDEF)
- FAIL_PUTS_ERROR("Chunk address should not be HADDR_UNDEF because of H5D_ALLOC_TIME_EARLY.");
+ FAIL_PUTS_ERROR(MSG_CHK_ADDR);
if(size == 0)
- FAIL_PUTS_ERROR("Chunk size should not be 0 because of H5D_ALLOC_TIME_EARLY.");
+ FAIL_PUTS_ERROR(MSG_CHK_SIZE);
if(H5Dclose(dset) < 0) TEST_ERROR
@@ -480,39 +688,7 @@ error:
H5_FAILED();
return FAIL;
-} /* test_get_chunk_info_highest18() */
-
-
-/*-------------------------------------------------------------------------
- * Function: verify_idx_type
- *
- * Purpose: Helper function to ensure that the correct chunk indexing
- * scheme is being used.
- *
- * Return: Success: TRUE/FALSE
- * Failure: FAIL
- *-------------------------------------------------------------------------
- */
-static htri_t
-verify_idx_type(hid_t dset, H5D_chunk_index_t expected_idx_type)
-{
- H5D_chunk_index_t idx_type;
-
- /* Get the chunk indexing type of the dataset */
- if(H5Dget_chunk_index_type(dset, &idx_type) < 0)
- TEST_ERROR
-
- /* Simply return FALSE, not FAIL, if the dataset's indexing type is
- not as expected */
- if(idx_type != expected_idx_type)
- return FALSE;
-
- /* Indicating that the correct chunk indexing type is used */
- return TRUE;
-
-error:
- return FAIL;
-}
+} /* test_get_chunk_info_highest_v18() */
/*-------------------------------------------------------------------------
* Function: test_chunk_info_single_chunk
@@ -532,7 +708,7 @@ error:
*-------------------------------------------------------------------------
*/
static herr_t
-test_chunk_info_single_chunk(char *filename, hid_t fapl)
+test_chunk_info_single_chunk(const char *filename, hid_t fapl)
{
hid_t chunkfile = H5I_INVALID_HID; /* File ID */
hid_t dspace = H5I_INVALID_HID; /* Dataspace ID */
@@ -540,8 +716,9 @@ test_chunk_info_single_chunk(char *filename, hid_t fapl)
hid_t cparms = H5I_INVALID_HID; /* Creation plist */
hsize_t dims[2] = {NX, NY}; /* Dataset dimensions */
hsize_t chunk_dims[2] = {NX, NY}; /* Chunk dimensions */
- int in_buf[NX][NY]; /* Input buffer */
- unsigned filter_mask = 0; /* Filter mask */
+ int data_buf[NX][NY]; /* Input buffer */
+ H5D_chunk_index_t idx_type; /* Dataset chunk index type */
+ unsigned flt_msk = 0; /* Filter mask */
unsigned read_flt_msk = 0; /* Filter mask after direct read */
hsize_t offset[2]; /* Offset coordinates of a chunk */
hsize_t out_offset[2] = {0, 0}; /* Buffer to get offset coordinates */
@@ -549,8 +726,8 @@ test_chunk_info_single_chunk(char *filename, hid_t fapl)
hsize_t nchunks = 0; /* Number of chunks */
haddr_t addr = 0; /* Address of an allocated/written chunk */
hsize_t chk_index = 0; /* Index of a chunk */
- int ii, jj;
- herr_t ret = 0;
+ hsize_t ii, jj; /* Array indices */
+ herr_t ret; /* Temporary returned value for verifying failure */
TESTING(" Single Chunk index");
@@ -570,61 +747,55 @@ test_chunk_info_single_chunk(char *filename, hid_t fapl)
TEST_ERROR
/* Create a new dataset using cparms creation properties */
- dset = H5Dcreate2(chunkfile, DSET_SINGLE_CHUNK, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
+ dset = H5Dcreate2(chunkfile, SINGLE_CHUNK_DSET_NAME, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
if(dset < 0) TEST_ERROR
- /* Ensure we're using the correct chunk indexing scheme */
- if(verify_idx_type(dset, H5D_CHUNK_IDX_SINGLE) == FALSE)
- FAIL_PUTS_ERROR("Should be using Single Chunk index type");
-
- /* Close the dataset then... */
+ /* Close the dataset */
if(H5Dclose(dset) < 0) TEST_ERROR
/* ...open it again to test the chunk query functions on a single empty
chunk */
- if((dset = H5Dopen2(chunkfile, DSET_SINGLE_CHUNK, H5P_DEFAULT)) < 0)
+ if((dset = H5Dopen2(chunkfile, SINGLE_CHUNK_DSET_NAME, H5P_DEFAULT)) < 0)
+ TEST_ERROR
+
+ /* Ensure the correct chunk indexing scheme is used */
+ if(H5Dget_chunk_index_type(dset, &idx_type) < 0)
TEST_ERROR
+ if(idx_type != H5D_CHUNK_IDX_SINGLE)
+ FAIL_PUTS_ERROR("Should be using Single Chunk index type");
/* Get the number of chunks and verify that no chunk has been written */
if(H5Dget_num_chunks(dset, dspace, &nchunks) < 0) TEST_ERROR
- if(nchunks != 0) TEST_ERROR
+ VERIFY(nchunks, NO_CHUNK_WRITTEN, "H5Dget_num_chunks, number of chunks");
/* Initialize the array of chunk data for the single chunk */
for(ii = 0; ii < NX; ii++)
for(jj = 0; jj < NY; jj++)
- in_buf[ii][jj] = (ii*jj);
+ data_buf[ii][jj] = (int)(ii*jj);
/* Write the chunk */
- if(H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, in_buf) < 0)
+ if(H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data_buf) < 0)
TEST_ERROR
/* Get and verify that one chunk had been written */
if(H5Dget_num_chunks(dset, dspace, &nchunks) < 0) TEST_ERROR
- if(nchunks != 1) TEST_ERROR
-
- /* Get and verify info of the first and only chunk */
- chk_index = 0;
- reinit_vars(&read_flt_msk, &addr, &size);
- if(H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, SINGLE_CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
- VERIFY(out_offset[0], 0, "H5Dget_chunk_info, offset");
- VERIFY(out_offset[1], 0, "H5Dget_chunk_info, offset");
+ VERIFY(nchunks, ONE_CHUNK_WRITTEN, "H5Dget_num_chunks, number of chunks");
- /* Get info of the chunk at logical coordinates (0,0) */
+ /* Offset of the only chunk */
offset[0] = 0;
offset[1] = 0;
- if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord");
- VERIFY(size, SINGLE_CHUNK_SIZE, "H5Dget_chunk_info_by_coord, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info_by_coord, filter mask");
+
+ /* Get and verify info of the first and only chunk */
+ if(verify_get_chunk_info(dset, H5S_ALL, 0, SINGLE_CHK_SIZE, offset, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification H5Dget_chunk_info failed\n");
+
+ /* Get and verify info of the chunk at logical coordinates (0,0) */
+ if(verify_get_chunk_info_by_coord(dset, offset, SINGLE_CHK_SIZE, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info_by_coord failed\n");
/* Attempt to get chunk info given an invalid chunk index and verify
* that failure occurs */
- chk_index = 3;
+ chk_index = INVALID_CHK_INDEX;
reinit_vars(&read_flt_msk, &addr, &size);
H5E_BEGIN_TRY {
ret = H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size);
@@ -651,8 +822,6 @@ error:
H5_FAILED();
return FAIL;
} /* test_chunk_info_single_chunk() */
-
-
/*-------------------------------------------------------------------------
* Function: test_chunk_info_implicit
@@ -674,25 +843,15 @@ error:
static herr_t
test_chunk_info_implicit(char *filename, hid_t fapl)
{
- hid_t chunkfile = H5I_INVALID_HID; /* File ID */
- hid_t dspace = H5I_INVALID_HID; /* Dataspace ID */
- hid_t dset = H5I_INVALID_HID; /* Dataset ID */
- hid_t cparms = H5I_INVALID_HID; /* Creation plist */
- hsize_t dims[2] = {NX, NY};/* Dataset dimensions */
- hsize_t chunk_dims[2] = {CHUNK_NX, CHUNK_NY}; /* Chunk dimensions */
- int direct_buf[NUM_CHUNKS][CHUNK_NX][CHUNK_NY];/* Data in chunks */
- size_t buf_size = CHUNK_NX*CHUNK_NY*sizeof(int); /* Buffer size of a chk */
- unsigned filter_mask = 0; /* Filter mask */
- unsigned read_flt_msk = 0; /* Filter mask after direct read */
- hsize_t offset[2]; /* Offset coordinates of a chunk */
- hsize_t out_offset[2] = {0, 0}; /* Buffer to get offset coordinates */
- hsize_t size = 0; /* Size of an allocated/written chunk */
- hsize_t nchunks = 0; /* Number of chunks */
- haddr_t addr = 0; /* Address of an allocated/written chunk */
- hsize_t chk_index = 0; /* Index of a chunk */
- int n; /* Used on buffer, to avoid conversion warning*/
- hsize_t ii, jj;
- herr_t ret;
+ hid_t chunkfile = H5I_INVALID_HID; /* File ID */
+ hid_t dspace = H5I_INVALID_HID; /* Dataspace ID */
+ hid_t dset = H5I_INVALID_HID; /* Dataset ID */
+ hid_t cparms = H5I_INVALID_HID; /* Creation plist */
+ hsize_t dims[2] = {NX, NY}; /* Dataset dimensions */
+ hsize_t chunk_dims[2] = {CHUNK_NX, CHUNK_NY}; /* Chunk dimensions */
+ unsigned flt_msk = 0; /* Filter mask */
+ hsize_t chk_index = 0; /* Index of a chunk */
+ hsize_t ii, jj; /* Array indices */
TESTING(" Implicit index");
@@ -711,91 +870,40 @@ test_chunk_info_implicit(char *filename, hid_t fapl)
if(H5Pset_chunk(cparms, RANK, chunk_dims) < 0)
TEST_ERROR
+ /* Set allocation time to early */
if(H5Pset_alloc_time(cparms, H5D_ALLOC_TIME_EARLY) < 0)
TEST_ERROR
/* Create a new dataset using cparms creation properties */
- dset = H5Dcreate2(chunkfile, DSET_IMPLICIT_INDEX, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
+ dset = H5Dcreate2(chunkfile, IMPLICIT_INDEX_DSET_NAME, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
if(dset < 0) TEST_ERROR
- /* Ensure we're using the correct chunk indexing scheme */
- if(verify_idx_type(dset, H5D_CHUNK_IDX_NONE) == FALSE)
- FAIL_PUTS_ERROR("Should be using Implicit index type");
-
- /* Close the dataset then... */
+ /* Close the dataset */
if(H5Dclose(dset) < 0) TEST_ERROR
- /* ...open it again to test the chunk query functions */
- if((dset = H5Dopen2(chunkfile, DSET_IMPLICIT_INDEX, H5P_DEFAULT)) < 0)
- TEST_ERROR
-
- /* Get and verify the number of chunks */
- if(H5Dget_num_chunks(dset, dspace, &nchunks) < 0) TEST_ERROR
-
- /* All chunks because of H5D_ALLOC_TIME_EARLY */
- if(nchunks != NUM_CHUNKS) TEST_ERROR
-
- /* Get and verify the number of chunks again, passing in H5S_ALL */
- if(H5Dget_num_chunks(dset, H5S_ALL, &nchunks) < 0) TEST_ERROR
- if(nchunks != NUM_CHUNKS) TEST_ERROR
-
- /* Initialize the array of chunk data for all NUM_CHUNKS chunks */
- for(n = 0; n < NUM_CHUNKS; n++)
- for(ii = 0; ii < CHUNK_NX; ii++)
- for(jj = 0; jj < CHUNK_NY; jj++)
- direct_buf[n][ii][jj] = n + 1;
-
- /* Write only NUM_CHUNKS_WRITTEN chunks at the following logical coords:
- (0,2) (0,3) (1,2) (1,3) */
- n = 0;
- for(ii = 0; ii < 2; ii++)
- for(jj = 2; jj < 4; jj++, n++)
- {
- offset[0] = ii * CHUNK_NX;
- offset[1] = jj * CHUNK_NY;
- ret = H5Dwrite_chunk(dset, H5P_DEFAULT, filter_mask, offset, buf_size, (void*)direct_buf[n]);
- if(ret < 0) TEST_ERROR
- }
-
- if(H5Fflush(dset, H5F_SCOPE_LOCAL) < 0) TEST_ERROR
-
- /* Close the dataset then... */
- if(H5Dclose(dset) < 0) TEST_ERROR
+ /* Verify chunk indexing scheme and number of chunks, and write data to a
+ subset of chunks. */
+ if(verify_and_write(chunkfile, IMPLICIT_INDEX_DSET_NAME, dspace, H5D_CHUNK_IDX_NONE, NUM_CHUNKS, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification and write failed\n");
- /* ...open it again to test the chunk query functions */
- if((dset = H5Dopen2(chunkfile, DSET_IMPLICIT_INDEX, H5P_DEFAULT)) < 0)
+ /* Open the dataset again to test getting chunk info */
+ if((dset = H5Dopen2(chunkfile, IMPLICIT_INDEX_DSET_NAME, H5P_DEFAULT)) < 0)
TEST_ERROR
- if(H5Dget_num_chunks(dset, H5S_ALL, &nchunks) < 0) TEST_ERROR
-
/* Go through all chunks, and get their info and verify the values */
chk_index = 0;
for(ii = 0; ii < NX/CHUNK_NX; ii++)
for(jj = 0; jj < NY/CHUNK_NY; jj++, chk_index++) {
- int kk;
-
- if(H5Dget_chunk_info(dset, H5S_ALL, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
- VERIFY(out_offset[0], ii * CHUNK_NX, "H5Dget_chunk_info, offset");
- VERIFY(out_offset[1], jj * CHUNK_NY, "H5Dget_chunk_info, offset");
+ hsize_t offset[2] = {ii * CHUNK_NX, jj * CHUNK_NY};
- /* Reset variables to pass in to the next call */
- reinit_vars(&read_flt_msk, &addr, &size);
-
- /* Copy offsets to pass in to the next call */
- for(kk = 0; kk < RANK; kk++)
- offset[kk] = out_offset[kk];
+ if(verify_get_chunk_info(dset, H5S_ALL, chk_index, CHK_SIZE, offset, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info failed\n");
/* Get info of a chunk and verify its information. Note that
all chunks in this dataset are allocated because of the property
H5D_ALLOC_TIME_EARLY */
- if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
+ if(verify_get_chunk_info_by_coord(dset, offset, CHK_SIZE, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info_by_coord failed\n");
}
/* Release resourse */
@@ -837,18 +945,16 @@ error:
*-------------------------------------------------------------------------
*/
static herr_t
-test_chunk_info_fixed_array(char *filename, hid_t fapl)
+test_chunk_info_fixed_array(const char *filename, hid_t fapl)
{
- hid_t chunkfile = H5I_INVALID_HID; /* File ID */
- hid_t dspace = H5I_INVALID_HID; /* Dataspace ID */
- hid_t dset = H5I_INVALID_HID; /* Dataset ID */
- hid_t cparms = H5I_INVALID_HID; /* Creation plist */
- hsize_t dims[2] = {NX, NY};/* Dataset dimensions */
+ hid_t chunkfile = H5I_INVALID_HID; /* File ID */
+ hid_t dspace = H5I_INVALID_HID; /* Dataspace ID */
+ hid_t dset = H5I_INVALID_HID; /* Dataset ID */
+ hid_t cparms = H5I_INVALID_HID; /* Creation plist */
+ hsize_t dims[2] = {NX, NY}; /* Dataset dimensions */
hsize_t chunk_dims[2] = {CHUNK_NX, CHUNK_NY}; /* Chunk dimensions */
int direct_buf[NUM_CHUNKS][CHUNK_NX][CHUNK_NY];/* Data in chunks */
- int out_buf[NX][NY]; /* Buffer to read data in */
- size_t buf_size = CHUNK_NX*CHUNK_NY*sizeof(int); /* Buffer size of a chk */
- unsigned filter_mask = 0; /* Filter mask */
+ unsigned flt_msk = 0; /* Filter mask */
unsigned read_flt_msk = 0; /* Filter mask after direct read */
hsize_t offset[2]; /* Offset coordinates of a chunk */
hsize_t out_offset[2] = {0, 0}; /* Buffer to get offset coordinates */
@@ -856,9 +962,9 @@ test_chunk_info_fixed_array(char *filename, hid_t fapl)
hsize_t nchunks = 0; /* Number of chunks */
haddr_t addr = 0; /* Address of an allocated/written chunk */
hsize_t chk_index = 0; /* Index of a chunk */
- int n; /* Used on buffer, to avoid conversion warning */
- hsize_t ii, jj;
- herr_t ret;
+ hsize_t ii, jj; /* Array indices */
+ int n; /* Used as chunk index, but int to avoid conversion warning */
+ herr_t ret; /* Temporary returned value for verifying failure */
TESTING(" Fixed Array index");
@@ -878,139 +984,69 @@ test_chunk_info_fixed_array(char *filename, hid_t fapl)
TEST_ERROR
/* Create a new dataset using cparms creation properties */
- dset = H5Dcreate2(chunkfile, DSET_FIXED_ARR_INDEX, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
+ dset = H5Dcreate2(chunkfile, FIXED_ARR_INDEX_DSET_NAME, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
if(dset < 0) TEST_ERROR
- /* Ensure we're using the correct chunk indexing scheme */
- if(verify_idx_type(dset, H5D_CHUNK_IDX_FARRAY) == FALSE)
- FAIL_PUTS_ERROR("Should be using Fixed Array index type");
-
- /* Close the dataset then... */
+ /* Close the dataset */
if(H5Dclose(dset) < 0) TEST_ERROR
- /* ...open it again to test the chunk query functions */
- if((dset = H5Dopen2(chunkfile, DSET_FIXED_ARR_INDEX, H5P_DEFAULT)) < 0)
- TEST_ERROR
-
- /* Get the number of chunks and verify that no chunk has been written */
- if(H5Dget_num_chunks(dset, dspace, &nchunks) < 0) TEST_ERROR
- if(nchunks != 0) TEST_ERROR
-
- /* Initialize the array of chunk data for all NUM_CHUNKS chunks */
- for(n = 0; n < NUM_CHUNKS; n++)
- for(ii = 0; ii < CHUNK_NX; ii++)
- for(jj = 0; jj < CHUNK_NY; jj++)
- direct_buf[n][ii][jj] = n + 1;
-
- /* Write only NUM_CHUNKS_WRITTEN chunks at the following logical coords:
- (0,2) (0,3) (1,2) (1,3) */
- n = 0;
- for(ii = 0; ii < 2; ii++)
- for(jj = 2; jj < 4; jj++, n++)
- {
- offset[0] = ii * CHUNK_NX;
- offset[1] = jj * CHUNK_NY;
- ret = H5Dwrite_chunk(dset, H5P_DEFAULT, filter_mask, offset, buf_size, (void*)direct_buf[n]);
- if(ret < 0) TEST_ERROR
- }
+ /* Verify chunk indexing scheme and number of chunks, and write data
+ to a subset of chunks */
+ if(verify_and_write(chunkfile, FIXED_ARR_INDEX_DSET_NAME, dspace, H5D_CHUNK_IDX_FARRAY, NO_CHUNK_WRITTEN, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification and write failed\n");
- /* Read the entire dataset back */
- if(H5Dread(dset, H5T_NATIVE_INT, dspace, dspace, H5P_DEFAULT, out_buf) < 0)
+ /* Open the dataset again to test getting chunk info */
+ if((dset = H5Dopen2(chunkfile, FIXED_ARR_INDEX_DSET_NAME, H5P_DEFAULT)) < 0)
TEST_ERROR
/* Get and verify the number of chunks written */
if(H5Dget_num_chunks(dset, dspace, &nchunks) < 0) TEST_ERROR
- if(nchunks != NUM_CHUNKS_WRITTEN) TEST_ERROR
+ VERIFY(nchunks, NUM_CHUNKS_WRITTEN, "H5Dget_num_chunks, number of chunks");
- /* Get and verify info of the first chunk */
+ /* Get and verify info of each written chunk */
chk_index = 0;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
- if(H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
-
- /* Get and verify info of the second chunk */
- chk_index = 1;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
- if(H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
-
- /* Get and verify info of the third chunk */
- chk_index = 2;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
- if(H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
-
- /* Get and verify info of the last chunk */
- chk_index = 3;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
- if(H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
+ for(ii = START_CHK_X; ii < END_CHK_X; ii++)
+ for(jj = START_CHK_Y; jj < END_CHK_Y; jj++, chk_index++) {
+ offset[0] = ii * CHUNK_NX;
+ offset[1] = jj * CHUNK_NY;
+ if(verify_get_chunk_info(dset, dspace, chk_index, CHK_SIZE, offset, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info failed\n");
+ }
- /* Attempt to get info of empty chunk, should fail */
- chk_index = 5;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
+ /* Attempt to get info using an out-of-range index, chk_index is now > NUM_CHUNKS_WRITTEN. should fail */
H5E_BEGIN_TRY {
ret = H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size);
} H5E_END_TRY;
if(ret != FAIL)
FAIL_PUTS_ERROR(" Attempted to get info of a chunk using an out-of-range index.");
- /* Get info of the chunk at logical coordinates (0,2) */
- offset[0] = 0;
- offset[1] = 2 * CHUNK_NY;
- if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info_by_coord, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info_by_coord, filter mask");
-
- /* Get info of the chunk at logical coordinates (1,3) */
- offset[0] = 1 * CHUNK_NX;
- offset[1] = 3 * CHUNK_NY;
- if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info_by_coord, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info_by_coord, filter mask");
-
/* Attempt to get info of empty chunks, verify the returned address and size */
offset[0] = 0;
offset[1] = 0;
- if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
+ if(verify_empty_chunk_info(dset, offset) == FAIL)
TEST_ERROR
- VERIFY(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord, chunk address");
- VERIFY(size, 0, "H5Dget_chunk_info_by_coord, chunk size");
offset[0] = 3 * CHUNK_NX;
offset[1] = 3 * CHUNK_NY;
- if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
+ if(verify_empty_chunk_info(dset, offset) == FAIL)
TEST_ERROR
- VERIFY(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord, chunk address");
- VERIFY(size, 0, "H5Dget_chunk_info_by_coord, chunk size");
+
+ /* Initialize the array of chunk data for all NUM_CHUNKS chunks */
+ for(n = 0; n < NUM_CHUNKS; n++)
+ for(ii = 0; ii < CHUNK_NX; ii++)
+ for(jj = 0; jj < CHUNK_NY; jj++)
+ direct_buf[n][ii][jj] = n + 1;
/* Read each chunk and verify the values */
- n = 0;
- for(ii = 0; ii < 2; ii++)
- for(jj = 2; jj < 4; jj++, n++)
- if(read_each_chunk(dset, ii*CHUNK_NX, jj*CHUNK_NY, (void*)direct_buf[n]) < 0)
+ chk_index = 0;
+ for(ii = START_CHK_X; ii < END_CHK_X; ii++)
+ for(jj = START_CHK_Y; jj < END_CHK_Y; jj++, chk_index++) {
+ offset[0] = ii * CHUNK_NX;
+ offset[1] = jj * CHUNK_NY;
+
+ if(read_each_chunk(dset, offset, (void*)direct_buf[chk_index]) < 0)
TEST_ERROR
+ }
/* Release resourse */
if(H5Dclose(dset) < 0) TEST_ERROR
@@ -1050,18 +1086,17 @@ error:
*-------------------------------------------------------------------------
*/
static herr_t
-test_chunk_info_extensible_array(char *filename, hid_t fapl)
+test_chunk_info_extensible_array(const char *filename, hid_t fapl)
{
- hid_t chunkfile = H5I_INVALID_HID; /* File ID */
- hid_t dspace = H5I_INVALID_HID; /* Dataspace ID */
- hid_t dset = H5I_INVALID_HID; /* Dataset ID */
- hid_t cparms = H5I_INVALID_HID; /* Creation plist */
- hsize_t dims[2] = {NX, NY};/* Dataset dimensions */
- hsize_t chunk_dims[2] = {CHUNK_NX, CHUNK_NY}; /* Chunk dimensions */
- hsize_t maxdims[2] = {H5S_UNLIMITED, NY}; /* One unlimited dimension */
+ hid_t chunkfile = H5I_INVALID_HID; /* File ID */
+ hid_t dspace = H5I_INVALID_HID; /* Dataspace ID */
+ hid_t dset = H5I_INVALID_HID; /* Dataset ID */
+ hid_t cparms = H5I_INVALID_HID; /* Creation plist */
+ hsize_t dims[2] = {NX, NY}; /* Dataset dimensions */
+ hsize_t chunk_dims[2] = {CHUNK_NX, CHUNK_NY}; /* Chunk dimensions */
+ hsize_t maxdims[2] = {H5S_UNLIMITED, NY}; /* One unlimited dimension */
int direct_buf[NUM_CHUNKS][CHUNK_NX][CHUNK_NY];/* Data in chunks */
- size_t buf_size = CHUNK_NX*CHUNK_NY*sizeof(int); /* Buffer size of a chk */
- unsigned filter_mask = 0; /* Filter mask */
+ unsigned flt_msk = 0; /* Filter mask */
unsigned read_flt_msk = 0; /* Filter mask after direct read */
hsize_t offset[2]; /* Offset coordinates of a chunk */
hsize_t out_offset[2] = {0, 0}; /* Buffer to get offset coordinates */
@@ -1069,9 +1104,9 @@ test_chunk_info_extensible_array(char *filename, hid_t fapl)
hsize_t nchunks = 0; /* Number of chunks */
haddr_t addr = 0; /* Address of an allocated/written chunk */
hsize_t chk_index = 0; /* Index of a chunk */
- int n; /* Used on buffer, to avoid conversion warning */
- hsize_t ii, jj;
- herr_t ret;
+ hsize_t ii, jj; /* Array indices */
+ int n; /* Used as chunk index, but int to avoid conversion warning */
+ herr_t ret; /* Temporary returned value for verifying failure */
TESTING(" Extensible Array index");
@@ -1091,143 +1126,74 @@ test_chunk_info_extensible_array(char *filename, hid_t fapl)
TEST_ERROR
/* Create a new dataset using cparms creation properties */
- dset = H5Dcreate2(chunkfile, DSET_EXT_ARR_INDEX, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
+ dset = H5Dcreate2(chunkfile, EXT_ARR_INDEX_DSET_NAME, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
if(dset < 0) TEST_ERROR
- /* Ensure we're using the correct chunk indexing scheme */
- if(verify_idx_type(dset, H5D_CHUNK_IDX_EARRAY) == FALSE)
- FAIL_PUTS_ERROR("Should be using Extensible Array index type");
-
- /* Close the dataset then... */
+ /* Close the dataset */
if(H5Dclose(dset) < 0) TEST_ERROR
- /* ...open it again to test the chunk query functions */
- if((dset = H5Dopen2(chunkfile, DSET_EXT_ARR_INDEX, H5P_DEFAULT)) < 0)
- TEST_ERROR
-
- /* Get the number of chunks and verify that no chunk has been written */
- if(H5Dget_num_chunks(dset, dspace, &nchunks) < 0) TEST_ERROR
- if(nchunks != 0) TEST_ERROR
-
- /* Initialize the array of chunk data for all NUM_CHUNKS chunks */
- for(n = 0; n < NUM_CHUNKS; n++)
- for(ii = 0; ii < CHUNK_NX; ii++)
- for(jj = 0; jj < CHUNK_NY; jj++)
- direct_buf[n][ii][jj] = n + 1;
+ /* Verify chunk indexing scheme and number of chunks, and write data
+ to a subset of chunks */
+ if(verify_and_write(chunkfile, EXT_ARR_INDEX_DSET_NAME, dspace, H5D_CHUNK_IDX_EARRAY, NO_CHUNK_WRITTEN, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification and write failed\n");
- /* Write only NUM_CHUNKS_WRITTEN chunks at the following logical coords:
- (0,2) (0,3) (1,2) (1,3) */
- n = 0;
- for(ii = 0; ii < 2; ii++)
- for(jj = 2; jj < 4; jj++, n++)
- {
- offset[0] = ii * CHUNK_NX;
- offset[1] = jj * CHUNK_NY;
- ret = H5Dwrite_chunk(dset, H5P_DEFAULT, filter_mask, offset, buf_size, (void*)direct_buf[n]);
- if(ret < 0) TEST_ERROR
- }
+ /* Open the dataset again to test getting chunk info */
+ if((dset = H5Dopen2(chunkfile, EXT_ARR_INDEX_DSET_NAME, H5P_DEFAULT)) < 0)
+ TEST_ERROR
/* Get and verify the number of chunks written */
if(H5Dget_num_chunks(dset, dspace, &nchunks) < 0) TEST_ERROR
- if(nchunks != NUM_CHUNKS_WRITTEN) TEST_ERROR
+ VERIFY(nchunks, NUM_CHUNKS_WRITTEN, "H5Dget_num_chunks, number of chunks");
- /* Get and verify info of the first chunk */
+ /* Get and verify info of each written chunk */
chk_index = 0;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
- if(H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
- VERIFY(out_offset[0], 0, "H5Dget_chunk_info, offset");
- VERIFY(out_offset[1], 8, "H5Dget_chunk_info, offset");
+ for(ii = START_CHK_X; ii < END_CHK_X; ii++)
+ for(jj = START_CHK_Y; jj < END_CHK_Y; jj++, chk_index++) {
+ offset[0] = ii * CHUNK_NX;
+ offset[1] = jj * CHUNK_NY;
- /* Get and verify info of the second chunk */
- chk_index = 1;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
- if(H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
- VERIFY(out_offset[0], 0, "H5Dget_chunk_info, offset");
- VERIFY(out_offset[1], 12, "H5Dget_chunk_info, offset");
+ if(verify_get_chunk_info(dset, dspace, chk_index, CHK_SIZE, offset, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info failed\n");
- /* Get and verify info of the third chunk */
- chk_index = 2;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
- if(H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
- VERIFY(out_offset[0], 6, "H5Dget_chunk_info, offset");
- VERIFY(out_offset[1], 8, "H5Dget_chunk_info, offset");
-
- /* Get and verify info of the last chunk */
- chk_index = 3;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
- if(H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
- VERIFY(out_offset[0], 6, "H5Dget_chunk_info, offset");
- VERIFY(out_offset[1], 12, "H5Dget_chunk_info, offset");
+ if(verify_get_chunk_info_by_coord(dset, offset, CHK_SIZE, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info_by_coord failed\n");
+ }
/* Attempt to get info using an out-of-range index, should fail */
- chk_index = 5;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
+ chk_index = OUTOFRANGE_CHK_INDEX;
H5E_BEGIN_TRY {
ret = H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size);
} H5E_END_TRY;
if(ret != FAIL)
FAIL_PUTS_ERROR(" Attempted to get info of a chunk using an out-of-range index.");
- /* Get info of the chunk at logical coordinates (0,2) */
- offset[0] = 0;
- offset[1] = 2 * CHUNK_NY;
- if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info_by_coord, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info_by_coord, filter mask");
-
- /* Get info of the chunk at logical coordinates (1,3) */
- offset[0] = 1 * CHUNK_NX;
- offset[1] = 3 * CHUNK_NY;
- if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info_by_coord, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info_by_coord, filter mask");
-
/* Attempt to get info of empty chunks, verify the returned address and size */
offset[0] = 0;
offset[1] = 0;
- if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
+ if(verify_empty_chunk_info(dset, offset) == FAIL)
TEST_ERROR
- VERIFY(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord, chunk address");
- VERIFY(size, 0, "H5Dget_chunk_info_by_coord, chunk size");
offset[0] = 3 * CHUNK_NX;
offset[1] = 3 * CHUNK_NY;
- if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
+ if(verify_empty_chunk_info(dset, offset) == FAIL)
TEST_ERROR
- VERIFY(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord, chunk address");
- VERIFY(size, 0, "H5Dget_chunk_info_by_coord, chunk size");
+
+ /* Initialize the array of chunk data for all NUM_CHUNKS chunks */
+ for(n = 0; n < NUM_CHUNKS; n++)
+ for(ii = 0; ii < CHUNK_NX; ii++)
+ for(jj = 0; jj < CHUNK_NY; jj++)
+ direct_buf[n][ii][jj] = n + 1;
/* Read each chunk and verify the values */
- n = 0;
- for(ii = 0; ii < 2; ii++)
- for(jj = 2; jj < 4; jj++, n++)
- if(read_each_chunk(dset, ii*CHUNK_NX, jj*CHUNK_NY, (void*)direct_buf[n]) < 0)
+ chk_index = 0;
+ for(ii = START_CHK_X; ii < END_CHK_X; ii++)
+ for(jj = START_CHK_Y; jj < END_CHK_Y; jj++, chk_index++) {
+ offset[0] = ii * CHUNK_NX;
+ offset[1] = jj * CHUNK_NY;
+
+ if(read_each_chunk(dset, offset, (void*)direct_buf[chk_index]) < 0)
TEST_ERROR
+ }
/* Release resourse */
if(H5Dclose(dset) < 0) TEST_ERROR
@@ -1267,18 +1233,17 @@ error:
*-------------------------------------------------------------------------
*/
static herr_t
-test_chunk_info_version2_btrees(char *filename, hid_t fapl)
+test_chunk_info_version2_btrees(const char *filename, hid_t fapl)
{
- hid_t chunkfile = H5I_INVALID_HID; /* File ID */
- hid_t dspace = H5I_INVALID_HID; /* Dataspace ID */
- hid_t dset = H5I_INVALID_HID; /* Dataset ID */
- hid_t cparms = H5I_INVALID_HID; /* Creation plist */
+ hid_t chunkfile = H5I_INVALID_HID; /* File ID */
+ hid_t dspace = H5I_INVALID_HID; /* Dataspace ID */
+ hid_t dset = H5I_INVALID_HID; /* Dataset ID */
+ hid_t cparms = H5I_INVALID_HID; /* Creation plist */
hsize_t dims[2] = {NX, NY};/* Dataset dimensions */
hsize_t chunk_dims[2] = {CHUNK_NX, CHUNK_NY}; /* Chunk dimensions */
hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; /* Two unlimited dims */
int direct_buf[NUM_CHUNKS][CHUNK_NX][CHUNK_NY];/* Data in chunks */
- size_t buf_size = CHUNK_NX*CHUNK_NY*sizeof(int); /* Buffer size of a chk */
- unsigned filter_mask = 0; /* Filter mask */
+ unsigned flt_msk = 0; /* Filter mask */
unsigned read_flt_msk = 0; /* Filter mask after direct read */
hsize_t offset[2]; /* Offset coordinates of a chunk */
hsize_t out_offset[2] = {0, 0}; /* Buffer to get offset coordinates */
@@ -1286,9 +1251,8 @@ test_chunk_info_version2_btrees(char *filename, hid_t fapl)
hsize_t nchunks = 0; /* Number of chunks */
haddr_t addr = 0; /* Address of an allocated/written chunk */
hsize_t chk_index = 0; /* Index of a chunk */
- int n; /* Used on buffer, to avoid conversion warning */
- hsize_t ii, jj;
- herr_t ret;
+ hsize_t ii, jj; /* Array indices */
+ herr_t ret; /* Temporary returned value for verifying failure */
TESTING(" Version 2 B-trees index");
@@ -1308,143 +1272,68 @@ test_chunk_info_version2_btrees(char *filename, hid_t fapl)
TEST_ERROR
/* Create a new dataset using cparms creation properties */
- dset = H5Dcreate2(chunkfile, DSET_V2_BTREE_INDEX, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
+ dset = H5Dcreate2(chunkfile, V2_BTREE_INDEX_DSET_NAME, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
if(dset < 0) TEST_ERROR
- /* Ensure we're using the correct chunk indexing scheme */
- if(verify_idx_type(dset, H5D_CHUNK_IDX_BT2) == FALSE)
- FAIL_PUTS_ERROR("Should be using Version 2 B-tree index type");
-
- /* Close the dataset then... */
+ /* Close the dataset */
if(H5Dclose(dset) < 0) TEST_ERROR
- /* ...open it again to test the chunk query functions */
- if((dset = H5Dopen2(chunkfile, DSET_V2_BTREE_INDEX, H5P_DEFAULT)) < 0)
- TEST_ERROR
-
- /* Get the number of chunks and verify that no chunk has been written */
- if(H5Dget_num_chunks(dset, dspace, &nchunks) < 0) TEST_ERROR
- if(nchunks != 0) TEST_ERROR
-
- /* Initialize the array of chunk data for all NUM_CHUNKS chunks */
- for(n = 0; n < NUM_CHUNKS; n++)
- for(ii = 0; ii < CHUNK_NX; ii++)
- for(jj = 0; jj < CHUNK_NY; jj++)
- direct_buf[n][ii][jj] = n + 1;
+ /* Verify chunk indexing scheme and number of chunks, and write data
+ to a subset of chunks */
+ if(verify_and_write(chunkfile, V2_BTREE_INDEX_DSET_NAME, dspace, H5D_CHUNK_IDX_BT2, NO_CHUNK_WRITTEN, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification and write failed\n");
- /* Write only NUM_CHUNKS_WRITTEN chunks at the following logical coords:
- (0,2) (0,3) (1,2) (1,3) */
- n = 0;
- for(ii = 0; ii < 2; ii++)
- for(jj = 2; jj < 4; jj++, n++)
- {
- offset[0] = ii * CHUNK_NX;
- offset[1] = jj * CHUNK_NY;
- ret = H5Dwrite_chunk(dset, H5P_DEFAULT, filter_mask, offset, buf_size, (void*)direct_buf[n]);
- if(ret < 0) TEST_ERROR
- }
+ /* Open the dataset again to test getting chunk info */
+ if((dset = H5Dopen2(chunkfile, V2_BTREE_INDEX_DSET_NAME, H5P_DEFAULT)) < 0)
+ TEST_ERROR
/* Get and verify the number of chunks written */
if(H5Dget_num_chunks(dset, dspace, &nchunks) < 0) TEST_ERROR
- if(nchunks != NUM_CHUNKS_WRITTEN) TEST_ERROR
+ VERIFY(nchunks, NUM_CHUNKS_WRITTEN, "H5Dget_num_chunks, number of chunks");
- /* Get and verify info of the first chunk */
+ /* Go through all written chunks, get their info and verify the values */
chk_index = 0;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
- if(H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
- VERIFY(out_offset[0], 0, "H5Dget_chunk_info, offset");
- VERIFY(out_offset[1], 8, "H5Dget_chunk_info, offset");
+ for(ii = START_CHK_X; ii < END_CHK_X; ii++)
+ for(jj = START_CHK_Y; jj < END_CHK_Y; jj++, chk_index++) {
+ offset[0] = ii * CHUNK_NX;
+ offset[1] = jj * CHUNK_NY;
- /* Get and verify info of the second chunk */
- chk_index = 1;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
- if(H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
- VERIFY(out_offset[0], 0, "H5Dget_chunk_info, offset");
- VERIFY(out_offset[1], 12, "H5Dget_chunk_info, offset");
+ if(verify_get_chunk_info(dset, dspace, chk_index, CHK_SIZE, offset, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info failed\n");
- /* Get and verify info of the third chunk */
- chk_index = 2;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
- if(H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
- VERIFY(out_offset[0], 6, "H5Dget_chunk_info, offset");
- VERIFY(out_offset[1], 8, "H5Dget_chunk_info, offset");
-
- /* Get and verify info of the last chunk */
- chk_index = 3;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
- if(H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
- VERIFY(out_offset[0], 6, "H5Dget_chunk_info, offset");
- VERIFY(out_offset[1], 12, "H5Dget_chunk_info, offset");
+ if(verify_get_chunk_info_by_coord(dset, offset, CHK_SIZE, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info_by_coord failed\n");
+ }
/* Attempt to provide out-of-range offsets, should fail */
- chk_index = 5;
- reinit_vars(&read_flt_msk, &addr, &size);
- out_offset[0] = out_offset[1] = 0;
+ chk_index = OUTOFRANGE_CHK_INDEX;
H5E_BEGIN_TRY {
ret = H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size);
} H5E_END_TRY;
if(ret != FAIL)
FAIL_PUTS_ERROR(" Attempted to get info of a chunk using an out-of-range index.");
- /* Get info of the chunk at logical coordinates (0,2) */
- offset[0] = 0;
- offset[1] = 2 * CHUNK_NY;
- if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info_by_coord, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info_by_coord, filter mask");
-
- /* Get info of the chunk at logical coordinates (1,3) */
- offset[0] = 1 * CHUNK_NX;
- offset[1] = 3 * CHUNK_NY;
- if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord");
- VERIFY(size, CHUNK_SIZE, "H5Dget_chunk_info_by_coord, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info_by_coord, filter mask");
-
/* Attempt to get info of empty chunks, verify the returned address and size */
offset[0] = 0;
offset[1] = 0;
- if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
+ if(verify_empty_chunk_info(dset, offset) == FAIL)
TEST_ERROR
- VERIFY(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord, chunk address");
- VERIFY(size, 0, "H5Dget_chunk_info_by_coord, chunk size");
offset[0] = 3 * CHUNK_NX;
offset[1] = 3 * CHUNK_NY;
- if(H5Dget_chunk_info_by_coord(dset, offset, &read_flt_msk, &addr, &size) < 0)
+ if(verify_empty_chunk_info(dset, offset) == FAIL)
TEST_ERROR
- VERIFY(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord, chunk address");
- VERIFY(size, 0, "H5Dget_chunk_info_by_coord, chunk size");
/* Read each chunk and verify the values */
- n = 0;
- for(ii = 0; ii < 2; ii++)
- for(jj = 2; jj < 4; jj++, n++)
- if(read_each_chunk(dset, ii*CHUNK_NX, jj*CHUNK_NY, (void*)direct_buf[n]) < 0)
+ chk_index = 0;
+ for(ii = START_CHK_X; ii < END_CHK_X; ii++)
+ for(jj = START_CHK_Y; jj < END_CHK_Y; jj++, chk_index++) {
+ offset[0] = ii * CHUNK_NX;
+ offset[1] = jj * CHUNK_NY;
+
+ if(read_each_chunk(dset, offset, (void*)direct_buf[chk_index]) < 0)
TEST_ERROR
+ }
/* Release resourse */
if(H5Dclose(dset) < 0) TEST_ERROR
@@ -1467,6 +1356,168 @@ error:
} /* test_chunk_info_version2_btrees() */
/*-------------------------------------------------------------------------
+ * Function: test_basic_query
+ *
+ * Purpose: Tests basic operations to ensure the chunk query functions
+ * work properly.
+ *
+ * Return: Success: SUCCEED
+ * Failure: FAIL
+ *
+ * Note: Note that the dataspace argument in these new functions are
+ * currently not used. The functionality involved the dataspace
+ * will be implemented in the next version.
+ *
+ * Date: August 2019
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+test_basic_query(hid_t fapl)
+{
+ char filename[FILENAME_BUF_SIZE]; /* File name */
+ hid_t basicfile = H5I_INVALID_HID; /* File ID */
+ hid_t dspace = H5I_INVALID_HID; /* Dataspace ID */
+ hid_t dset = H5I_INVALID_HID; /* Dataset ID */
+ hid_t cparms = H5I_INVALID_HID; /* Creation plist */
+ hsize_t dims[2] = {NX, NY}; /* Dataset dimensions */
+ hsize_t chunk_dims[2] = {CHUNK_NX, CHUNK_NY}; /* Chunk dimensions */
+ int direct_buf[CHUNK_NX][CHUNK_NY];/* Data in chunks */
+ unsigned flt_msk = 0; /* Filter mask */
+ unsigned read_flt_msk = 0; /* Filter mask after direct read */
+ hsize_t offset[2]; /* Offset coordinates of a chunk */
+ hsize_t out_offset[2] = {0, 0}; /* Buffer to get offset coordinates */
+ hsize_t size = 0; /* Size of an allocated/written chunk */
+ hsize_t nchunks = 0; /* Number of chunks */
+ haddr_t addr = 0; /* Address of an allocated/written chunk */
+ hsize_t chk_index = 0; /* Index of a chunk */
+ hsize_t ii, jj; /* Array indices */
+ herr_t ret; /* Temporary returned value for verifying failure */
+
+ TESTING("basic operations");
+
+ /* Create the file */
+ h5_fixname(BASIC_FILE, fapl, filename, sizeof filename);
+
+ /* Create a new file. */
+ if((basicfile = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0)
+ TEST_ERROR;
+
+ /* Create dataspace */
+ if((dspace = H5Screate_simple(RANK, dims, NULL)) < 0)
+ TEST_ERROR
+
+ /* Enable chunking */
+ if((cparms = H5Pcreate(H5P_DATASET_CREATE)) < 0)
+ TEST_ERROR
+
+ if(H5Pset_chunk(cparms, RANK, chunk_dims) < 0)
+ TEST_ERROR
+
+ /* Create a new dataset using cparms creation properties */
+ dset = H5Dcreate2(basicfile, SIMPLE_CHUNKED_DSET_NAME, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT);
+ if(dset < 0) TEST_ERROR
+
+ /* Get the number of chunks and verify that no chunk has been written */
+ if(H5Dget_num_chunks(dset, dspace, &nchunks) < 0) TEST_ERROR
+ VERIFY(nchunks, NO_CHUNK_WRITTEN, "H5Dget_num_chunks, number of chunks");
+
+ /* Initialize the array of chunk data for the single chunk */
+ for(ii = 0; ii < CHUNK_NX; ii++)
+ for(jj = 0; jj < CHUNK_NY; jj++)
+ direct_buf[ii][jj] = (int)(ii*jj);
+
+ /* Write the chunk of data */
+ offset[0] = CHUNK_NX;
+ offset[1] = CHUNK_NY;
+ if(H5Dwrite_chunk(dset, H5P_DEFAULT, flt_msk, offset, CHK_SIZE, direct_buf) < 0)
+ TEST_ERROR;
+
+ /* Get and verify that one chunk had been written */
+ if(H5Dget_num_chunks(dset, dspace, &nchunks) < 0) TEST_ERROR
+ VERIFY(nchunks, ONE_CHUNK_WRITTEN, "H5Dget_num_chunks, number of chunks");
+
+ /* Get and verify info of the first and only chunk */
+ if(verify_get_chunk_info(dset, H5S_ALL, 0, CHK_SIZE, offset, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification H5Dget_chunk_info failed\n");
+
+ /* Get and verify info of the chunk at the offset (CHUNK_NX,CHUNK_NY) */
+ if(verify_get_chunk_info_by_coord(dset, offset, CHK_SIZE, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info_by_coord failed\n");
+
+ /* Attempt to get chunk info given an invalid chunk index and verify
+ * that failure occurs */
+ chk_index = INVALID_CHK_INDEX;
+ reinit_vars(&read_flt_msk, &addr, &size);
+ H5E_BEGIN_TRY {
+ ret = H5Dget_chunk_info(dset, dspace, chk_index, out_offset, &read_flt_msk, &addr, &size);
+ } H5E_END_TRY;
+ if(ret != FAIL)
+ TEST_ERROR
+
+ /* Write the chunk of data to another location */
+ offset[0] = 0;
+ offset[1] = 0;
+ if(H5Dwrite_chunk(dset, H5P_DEFAULT, flt_msk, offset, CHK_SIZE, direct_buf) < 0)
+ TEST_ERROR;
+
+ /* Get and verify that two chunks had been written */
+ if(H5Dget_num_chunks(dset, dspace, &nchunks) < 0) TEST_ERROR
+ VERIFY(nchunks, TWO_CHUNKS_WRITTEN, "H5Dget_num_chunks, number of chunks");
+
+ /* Get and verify info of the first written chunk in the dataset, its
+ offset should be (0,0) */
+ if(verify_get_chunk_info(dset, H5S_ALL, 0, CHK_SIZE, offset, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification H5Dget_chunk_info failed\n");
+
+ /* Get and verify info of the chunk at the offset (0,0) */
+ if(verify_get_chunk_info_by_coord(dset, offset, CHK_SIZE, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info_by_coord failed\n");
+
+ /* Get and verify info of the second written chunk in the dataset, its
+ offset should be (CHUNK_NX, CHUNK_NY) */
+ offset[0] = CHUNK_NX;
+ offset[1] = CHUNK_NY;
+ if(verify_get_chunk_info(dset, H5S_ALL, 1, CHK_SIZE, offset, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification H5Dget_chunk_info failed\n");
+
+ /* Get and verify info of the chunk at the offset (CHUNK_NX, CHUNK_NY) */
+ if(verify_get_chunk_info_by_coord(dset, offset, CHK_SIZE, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info_by_coord failed\n");
+
+ /* Get and verify info of an empty chunk, at offset
+ (2*CHUNK_NX, 2*CHUNK_NY) */
+ offset[0] = 2*CHUNK_NX;
+ offset[1] = 2*CHUNK_NY;
+ /* Get and verify info of the chunk at the offset (CHUNK_NX, CHUNK_NY) */
+ if(verify_empty_chunk_info(dset, offset) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info_by_coord failed\n");
+
+ /* Release resourse */
+ if(H5Dclose(dset) < 0) TEST_ERROR
+ if(H5Sclose(dspace) < 0) TEST_ERROR
+ if(H5Pclose(cparms) < 0) TEST_ERROR
+ if(H5Fclose(basicfile) < 0) TEST_ERROR
+
+ /* Remove the test file */
+ remove(filename);
+
+ PASSED();
+ return SUCCEED;
+
+error:
+ H5E_BEGIN_TRY {
+ H5Dclose(dset);
+ H5Sclose(dspace);
+ H5Pclose(cparms);
+ H5Fclose(basicfile);
+ } H5E_END_TRY;
+
+ H5_FAILED();
+ return FAIL;
+} /* test_basic_query() */
+
+/*-------------------------------------------------------------------------
* Function: test_failed_attempts
*
* Purpose: Test attempting to use chunk query functions incorrectly.
@@ -1483,13 +1534,13 @@ error:
*-------------------------------------------------------------------------
*/
static herr_t
-test_failed_attempts(char *filename, hid_t fapl)
+test_failed_attempts(const char *filename, hid_t fapl)
{
- hid_t chunkfile = H5I_INVALID_HID; /* File ID */
- hid_t dspace = H5I_INVALID_HID; /* Dataspace ID */
- hid_t dset = H5I_INVALID_HID; /* Dataset ID */
+ hid_t chunkfile = H5I_INVALID_HID; /* File ID */
+ hid_t dspace = H5I_INVALID_HID; /* Dataspace ID */
+ hid_t dset = H5I_INVALID_HID; /* Dataset ID */
hsize_t dims[2] = {NX, NY};/* Dataset dimensions */
- int in_buf[NX][NY]; /* Input buffer */
+ int data_buf[NX][NY]; /* Input buffer */
unsigned read_flt_msk = 0; /* Filter mask after direct read */
hsize_t offset[2]; /* Offset coordinates of a chunk */
hsize_t out_offset[2] = {0, 0}; /* Buffer to get offset coordinates */
@@ -1497,8 +1548,8 @@ test_failed_attempts(char *filename, hid_t fapl)
hsize_t nchunks = 0; /* Number of chunks */
haddr_t addr = 0; /* Address of an allocated/written chunk */
hsize_t chk_index = 0; /* Index of a chunk */
- int ii, jj;
- herr_t ret = 0;
+ hsize_t ii, jj; /* Array indices */
+ herr_t ret; /* Temporary returned value for verifying failure */
TESTING(" Invalid Operations");
@@ -1511,23 +1562,23 @@ test_failed_attempts(char *filename, hid_t fapl)
TEST_ERROR
/* Create a contiguous dataset */
- dset = H5Dcreate2(chunkfile, DSET_CONTIGUOUS, H5T_NATIVE_INT, dspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
+ dset = H5Dcreate2(chunkfile, CONTIGUOUS_DSET_NAME, H5T_NATIVE_INT, dspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
if(dset < 0) TEST_ERROR
/* Initialize the array of data */
for(ii = 0; ii < NX; ii++)
for(jj = 0; jj < NY; jj++)
- in_buf[ii][jj] = (ii*jj);
+ data_buf[ii][jj] = (int)(ii*jj);
/* Write the data */
- if(H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, in_buf) < 0)
+ if(H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data_buf) < 0)
TEST_ERROR
- /* Close the dataset then... */
+ /* Close the dataset */
if(H5Dclose(dset) < 0) TEST_ERROR
- /* ...open it again to test the chunk query functions on contiguous dataset */
- if((dset = H5Dopen2(chunkfile, DSET_CONTIGUOUS, H5P_DEFAULT)) < 0)
+ /* Open it again to test the chunk query functions on contiguous dataset */
+ if((dset = H5Dopen2(chunkfile, CONTIGUOUS_DSET_NAME, H5P_DEFAULT)) < 0)
TEST_ERROR
/* Attempt to get the number of chunks on contiguous dataset, should fail */
@@ -1576,7 +1627,7 @@ error:
} /* test_failed_attempts() */
/*-------------------------------------------------------------------------
- * Function: test_get_chunk_info_110
+ * Function: test_get_chunk_info_v110
*
* Purpose: Test getting various chunk information in version 1.10.
*
@@ -1588,20 +1639,20 @@ error:
* will be implemented in the next version.
*
* Description:
- * This function tests the new API functions added for EED-343:
- * H5Dget_num_chunks, H5Dget_chunk_info, and H5Dget_chunk_info_by_coord
- * for low bound beyond 1.8.
+ * This function tests the new API functions added for HDFFV-10677:
+ * H5Dget_num_chunks, H5Dget_chunk_info, and
+ * H5Dget_chunk_info_by_coord for low bound beyond 1.8.
*
* Date: October 2018
*
*-------------------------------------------------------------------------
*/
static herr_t
-test_get_chunk_info_110(hid_t fapl)
+test_get_chunk_info_v110(hid_t fapl)
{
- hid_t chunkfile = H5I_INVALID_HID; /* File ID */
- char filename[FILENAME_BUF_SIZE];
- H5F_libver_t low, high; /* File format bounds */
+ char filename[FILENAME_BUF_SIZE]; /* File name */
+ hid_t chunkfile = H5I_INVALID_HID; /* File ID */
+ H5F_libver_t low, high; /* File format bounds */
TESTING("getting chunk information in file with versions 1.10 and later");
HDprintf("\n"); /* to list sub-tests */
@@ -1655,10 +1706,10 @@ test_get_chunk_info_110(hid_t fapl)
error:
H5_FAILED();
return FAIL;
-} /* test_get_chunk_info_110() */
+} /* test_get_chunk_info_v110() */
/*-------------------------------------------------------------------------
- * Function: test_filter_mask_with_skip_compress
+ * Function: test_flt_msk_with_skip_compress
*
* Purpose: Test getting chunk info when compression filter is skipped.
*
@@ -1670,37 +1721,33 @@ error:
*-------------------------------------------------------------------------
*/
static herr_t
-test_filter_mask_with_skip_compress(hid_t fapl)
+test_flt_msk_with_skip_compress(hid_t fapl)
{
- hid_t filter_file = H5I_INVALID_HID; /* File for filter mask */
- char filename[FILENAME_BUF_SIZE];
- hid_t dataspace = -1, dataset = -1;
- hid_t mem_space = -1;
- hid_t cparms = -1, dxpl = -1;
- hsize_t dims[2] = {NX, NY};
- hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};
- hsize_t chunk_dims[2] ={CHUNK_NX, CHUNK_NY};
- unsigned filter_mask = 0;
- unsigned read_flt_msk = 0;
- int direct_buf[CHUNK_NX][CHUNK_NY];
- int check_chunk[CHUNK_NX][CHUNK_NY];
- hsize_t offset[2] = {0, 0};
- hsize_t out_offset[2] = {0, 0}; /* Buffer to get offset coordinates */
- hsize_t size = 0; /* Size of an allocated/written chunk */
- hsize_t nchunks = 0; /* Number of chunks */
- haddr_t addr = 0; /* Address of an allocated/written chunk */
- hsize_t chk_index = 0; /* Index of a chunk */
- size_t buf_size = CHUNK_NX*CHUNK_NY*sizeof(int);
- int aggression = 9; /* Compression aggression setting */
- unsigned read_filter_mask = 0; /* filter mask after direct read */
- int read_direct_buf[CHUNK_NX][CHUNK_NY];
- hsize_t read_buf_size = 0; /* buf size */
- hsize_t start[2]; /* Start of hyperslab */
- hsize_t stride[2]; /* Stride of hyperslab */
- hsize_t count[2]; /* Block count */
- hsize_t block[2]; /* Block sizes */
- int ii, jj, n;
- herr_t status;
+ char filename[FILENAME_BUF_SIZE]; /* File name */
+ hid_t filter_file = H5I_INVALID_HID; /* File ID for filter mask */
+ hid_t dspace = H5I_INVALID_HID; /* Dataspace ID */
+ hid_t mem_space = H5I_INVALID_HID; /* Dataspace ID */
+ hid_t dset = H5I_INVALID_HID; /* Dataset ID */
+ hid_t cparms = H5I_INVALID_HID; /* Creation plist */
+ hid_t dxpl = H5I_INVALID_HID; /* Transfer plist */
+ hsize_t dims[2] = {NX, NY}; /* Dataset dimensions */
+ hsize_t maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED}; /* 2 unlimited dims */
+ hsize_t chunk_dims[2] = {CHUNK_NX, CHUNK_NY}; /* Chunk dimensions */
+ int direct_buf[CHUNK_NX][CHUNK_NY]; /* One chunk of data */
+ int check_chunk[CHUNK_NX][CHUNK_NY]; /* Buffer to read data in */
+ int read_direct_buf[CHUNK_NX][CHUNK_NY]; /* Buffer to read a chunk */
+ hsize_t read_buf_size = 0; /* buf size */
+ unsigned flt_msk = 0; /* Filter mask */
+ unsigned read_flt_msk = 0; /* Filter mask after direct read */
+ hsize_t offset[2] = {0, 0}; /* Offset coordinates of a chunk */
+ hsize_t nchunks = 0; /* Number of chunks */
+ hsize_t chk_index = 0; /* Index of a chunk */
+ int aggression = 9; /* Compression aggression setting */
+ hsize_t start[2]; /* Start of hyperslab */
+ hsize_t stride[2]; /* Stride of hyperslab */
+ hsize_t count[2]; /* Block count */
+ hsize_t block[2]; /* Block sizes */
+ int ii, jj; /* Array indices */
TESTING("getting filter mask when compression filter is skipped");
@@ -1712,7 +1759,7 @@ test_filter_mask_with_skip_compress(hid_t fapl)
TEST_ERROR;
/* Create file data space with unlimited dimensions. */
- if((dataspace = H5Screate_simple(RANK, dims, maxdims)) < 0)
+ if((dspace = H5Screate_simple(RANK, dims, maxdims)) < 0)
TEST_ERROR;
/* Create memory data space. */
@@ -1724,14 +1771,14 @@ test_filter_mask_with_skip_compress(hid_t fapl)
if((cparms = H5Pcreate(H5P_DATASET_CREATE)) < 0)
TEST_ERROR;
- if((status = H5Pset_chunk( cparms, RANK, chunk_dims)) < 0)
+ if(H5Pset_chunk( cparms, RANK, chunk_dims) < 0)
TEST_ERROR;
- if((status = H5Pset_deflate( cparms, (unsigned ) aggression)) < 0)
+ if(H5Pset_deflate( cparms, (unsigned ) aggression) < 0)
TEST_ERROR;
/* Create a new dataset using cparms creation properties. */
- if((dataset = H5Dcreate2(filter_file, DATASETNAME2, H5T_NATIVE_INT, dataspace, H5P_DEFAULT, cparms, H5P_DEFAULT)) < 0)
+ if((dset = H5Dcreate2(filter_file, SKIP_FILTER_DSET_NAME, H5T_NATIVE_INT, dspace, H5P_DEFAULT, cparms, H5P_DEFAULT)) < 0)
TEST_ERROR;
/* Create transfer property list for writing */
@@ -1740,26 +1787,25 @@ test_filter_mask_with_skip_compress(hid_t fapl)
/* Initialize data for one chunk */
for(ii = 0; ii < CHUNK_NX; ii++)
- for(jj = 0; jj < CHUNK_NY; jj++) {
- direct_buf[ii][jj] = n++;
- }
+ for(jj = 0; jj < CHUNK_NY; jj++)
+ direct_buf[ii][jj] = (int)(ii*jj);
/* Indicate the compression filter is to be skipped. */
- filter_mask = 0x00000001;
+ flt_msk = 0x00000001;
/* Write a chunk of uncompressed data */
offset[0] = CHUNK_NX;
offset[1] = CHUNK_NY;
- if((status = H5Dwrite_chunk(dataset, H5P_DEFAULT, filter_mask, offset, buf_size, direct_buf)) < 0)
+ if(H5Dwrite_chunk(dset, H5P_DEFAULT, flt_msk, offset, CHK_SIZE, direct_buf) < 0)
TEST_ERROR;
- if(H5Fflush(dataset, H5F_SCOPE_LOCAL) < 0)
+ if(H5Fflush(dset, H5F_SCOPE_LOCAL) < 0)
TEST_ERROR;
/* Close and re-open the dataset */
- if(H5Dclose(dataset) < 0)
+ if(H5Dclose(dset) < 0)
TEST_ERROR;
- if((dataset = H5Dopen2(filter_file, DATASETNAME2, H5P_DEFAULT)) < 0)
+ if((dset = H5Dopen2(filter_file, SKIP_FILTER_DSET_NAME, H5P_DEFAULT)) < 0)
TEST_ERROR;
/* Select hyperslab for the chunk just written in the file */
@@ -1767,88 +1813,80 @@ test_filter_mask_with_skip_compress(hid_t fapl)
stride[0] = 1; stride[1] = 1;
count[0] = 1; count[1] = 1;
block[0] = CHUNK_NX; block[1] = CHUNK_NY;
- if((status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, start, stride, count, block)) < 0)
+ if(H5Sselect_hyperslab(dspace, H5S_SELECT_SET, start, stride, count, block) < 0)
TEST_ERROR;
/* Read the chunk back */
- if((status = H5Dread(dataset, H5T_NATIVE_INT, mem_space, dataspace, H5P_DEFAULT, check_chunk)) < 0)
+ if(H5Dread(dset, H5T_NATIVE_INT, mem_space, dspace, H5P_DEFAULT, check_chunk) < 0)
TEST_ERROR;
/* Check that the values read are the same as the values written */
- for(ii = 0; ii < CHUNK_NX; ii++) {
- for(jj = 0; jj < CHUNK_NY; jj++) {
+ for(ii = 0; ii < CHUNK_NX; ii++)
+ for(jj = 0; jj < CHUNK_NY; jj++)
if(direct_buf[ii][jj] != check_chunk[ii][jj]) {
HDprintf(" 1. Read different values than written.");
HDprintf(" At index %d,%d\n", ii, jj);
HDprintf(" direct_buf=%d, check_chunk=%d\n", direct_buf[ii][jj], check_chunk[ii][jj]);
TEST_ERROR;
}
- }
- }
/* Query chunk storage size */
- if((status = H5Dget_chunk_storage_size(dataset, offset, &read_buf_size)) < 0)
+ if(H5Dget_chunk_storage_size(dset, offset, &read_buf_size) < 0)
TEST_ERROR;
- if(read_buf_size != buf_size)
+ if(read_buf_size != CHK_SIZE)
TEST_ERROR;
/* Read the raw chunk back with H5Dread_chunk */
HDmemset(&read_direct_buf, 0, sizeof(read_direct_buf));
- if((status = H5Dread_chunk(dataset, H5P_DEFAULT, offset, &read_filter_mask, read_direct_buf)) < 0)
+ if(H5Dread_chunk(dset, H5P_DEFAULT, offset, &read_flt_msk, read_direct_buf) < 0)
TEST_ERROR;
- if(read_filter_mask != filter_mask)
+ if(read_flt_msk != flt_msk)
TEST_ERROR;
/* Check that the direct chunk read is the same as the chunk written */
- for(ii = 0; ii < CHUNK_NX; ii++) {
- for(jj = 0; jj < CHUNK_NY; jj++) {
+ for(ii = 0; ii < CHUNK_NX; ii++)
+ for(jj = 0; jj < CHUNK_NY; jj++)
if(direct_buf[ii][jj] != read_direct_buf[ii][jj]) {
HDprintf(" 1. Read different values than written.");
HDprintf(" At index %d,%d\n", ii, jj);
HDprintf(" direct_buf=%d, read_direct_buf=%d\n", direct_buf[ii][jj], read_direct_buf[ii][jj]);
TEST_ERROR;
}
- }
- }
/* Get and verify the number of chunks written */
- if(H5Dget_num_chunks(dataset, H5S_ALL, &nchunks) < 0) TEST_ERROR
- if(nchunks != 1) TEST_ERROR
+ if(H5Dget_num_chunks(dset, H5S_ALL, &nchunks) < 0) TEST_ERROR
+ VERIFY(nchunks, ONE_CHUNK_WRITTEN, "H5Dget_num_chunks, number of chunks");
/* Get and verify info of the first and only chunk */
chk_index = 0;
- reinit_vars(&read_flt_msk, &addr, &size);
- if(H5Dget_chunk_info(dataset, H5S_ALL, chk_index, out_offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info");
- VERIFY(size, buf_size, "H5Dget_chunk_info, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info, filter mask");
- VERIFY(out_offset[0], CHUNK_NX, "H5Dget_chunk_info, offset");
- VERIFY(out_offset[1], CHUNK_NY, "H5Dget_chunk_info, offset");
+ offset[0] = CHUNK_NX;
+ offset[1] = CHUNK_NY;
+ if(verify_get_chunk_info(dset, H5S_ALL, chk_index, CHK_SIZE, offset, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info failed\n");
/* Get info of the chunk at the specified offsets and verify its info */
- if(H5Dget_chunk_info_by_coord(dataset, offset, &read_flt_msk, &addr, &size) < 0)
- TEST_ERROR
- CHECK(addr, HADDR_UNDEF, "H5Dget_chunk_info_by_coord");
- VERIFY(size, buf_size, "H5Dget_chunk_info_by_coord, chunk size");
- VERIFY(read_flt_msk, filter_mask, "H5Dget_chunk_info_by_coord, filter mask");
+ if(verify_get_chunk_info_by_coord(dset, offset, CHK_SIZE, flt_msk) == FAIL)
+ FAIL_PUTS_ERROR("Verification verify_get_chunk_info_by_coord failed\n");
/* Release resourse */
- if(H5Dclose(dataset) < 0) TEST_ERROR
+ if(H5Dclose(dset) < 0) TEST_ERROR
if(H5Sclose(mem_space) < 0) TEST_ERROR
- if(H5Sclose(dataspace) < 0) TEST_ERROR
+ if(H5Sclose(dspace) < 0) TEST_ERROR
if(H5Pclose(cparms) < 0) TEST_ERROR
if(H5Pclose(dxpl) < 0) TEST_ERROR
if(H5Fclose(filter_file) < 0) TEST_ERROR
+ /* Remove the test file */
+ remove(filename);
+
PASSED();
return SUCCEED;
error:
H5E_BEGIN_TRY {
- H5Dclose(dataset);
+ H5Dclose(dset);
H5Sclose(mem_space);
- H5Sclose(dataspace);
+ H5Sclose(dspace);
H5Pclose(cparms);
H5Pclose(dxpl);
H5Fclose(filter_file);
@@ -1856,7 +1894,7 @@ error:
H5_FAILED();
return FAIL;
-} /* test_filter_mask_with_skip_compress() */
+} /* test_flt_msk_with_skip_compress() */
/*-------------------------------------------------------------------------
* Function: main
@@ -1882,14 +1920,17 @@ main(void)
/* Create a copy of file access property list */
if((fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0) TEST_ERROR
+ /* Test basic operations on the chunk query functions */
+ nerrors += test_basic_query(fapl) < 0 ? 1 : 0;
+
/* Tests getting chunk information of version 1.8 and prior */
- nerrors += test_get_chunk_info_highest18(fapl) < 0 ? 1 : 0;
+ nerrors += test_get_chunk_info_highest_v18(fapl) < 0 ? 1 : 0;
/* Tests getting chunk information of version 1.10 */
- nerrors += test_get_chunk_info_110(fapl) < 0 ? 1 : 0;
+ nerrors += test_get_chunk_info_v110(fapl) < 0 ? 1 : 0;
/* Tests getting filter mask when compression filter is skipped */
- nerrors += test_filter_mask_with_skip_compress(fapl) < 0 ? 1 : 0;
+ nerrors += test_flt_msk_with_skip_compress(fapl) < 0 ? 1 : 0;
if(nerrors)
TEST_ERROR
@@ -1909,7 +1950,6 @@ error:
/****************************************************************************
Additional tests to be added:
-- create/write to a dataset, do the query before closing the dataset
- do the query when extending the dataset (shrink or expand)
- verify that invalid input parameters are handled properly