summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorVailin Choi <vchoi@hdfgroup.org>2010-08-20 19:25:49 (GMT)
committerVailin Choi <vchoi@hdfgroup.org>2010-08-20 19:25:49 (GMT)
commit8b07a4a214c0fc8c5d6e3a859f32ee5127cdf473 (patch)
tree633586c55f1316ce7545f9d5f98844d678f1adcb
parent9c218ea879de3f13464aed3c96d5298d1c934774 (diff)
downloadhdf5-8b07a4a214c0fc8c5d6e3a859f32ee5127cdf473.zip
hdf5-8b07a4a214c0fc8c5d6e3a859f32ee5127cdf473.tar.gz
hdf5-8b07a4a214c0fc8c5d6e3a859f32ee5127cdf473.tar.bz2
[svn-r19267] Modifications to use Fixed Array Indexing for extendible chunked dataset
(fixed max. dim. setting but exclude H5S_UNLIMITED)
-rw-r--r--src/H5Dchunk.c35
-rw-r--r--src/H5Dfarray.c43
-rw-r--r--src/H5Dlayout.c70
-rw-r--r--src/H5Oprivate.h7
-rw-r--r--src/H5Pdcpl.c2
-rw-r--r--test/dsets.c147
6 files changed, 197 insertions, 107 deletions
diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c
index 98dfff9..fa4b62c 100644
--- a/src/H5Dchunk.c
+++ b/src/H5Dchunk.c
@@ -190,7 +190,7 @@ H5D_nonexistent_readvv(const H5D_io_info_t *io_info,
/* Helper routines */
static herr_t H5D_chunk_set_info_real(H5O_layout_chunk_t *layout, unsigned ndims,
- const hsize_t *curr_dims);
+ const hsize_t *curr_dims, const hsize_t *max_dims);
static void *H5D_chunk_alloc(size_t size, const H5O_pline_t *pline);
static void *H5D_chunk_xfree(void *chk, const H5O_pline_t *pline);
static void *H5D_chunk_realloc(void *chk, size_t size,
@@ -283,10 +283,15 @@ H5FL_BLK_DEFINE_STATIC(chunk);
* Programmer: Quincey Koziol
* Tuesday, June 30, 2009
*
+ * Modifications:
+ * Vailin Choi; June 2010
+ * Modified to handle extendible datdaset for Fixed Array Indexing.
+ * (fixed max. dim. setting but not H5S_UNLIMITED)
+ *
*-------------------------------------------------------------------------
*/
static herr_t
-H5D_chunk_set_info_real(H5O_layout_chunk_t *layout, unsigned ndims, const hsize_t *curr_dims)
+H5D_chunk_set_info_real(H5O_layout_chunk_t *layout, unsigned ndims, const hsize_t *curr_dims, const hsize_t *max_dims)
{
unsigned u; /* Local index variable */
herr_t ret_value = SUCCEED; /* Return value */
@@ -299,17 +304,21 @@ H5D_chunk_set_info_real(H5O_layout_chunk_t *layout, unsigned ndims, const hsize_
HDassert(curr_dims);
/* Compute the # of chunks in dataset dimensions */
- for(u = 0, layout->nchunks = 1; u < ndims; u++) {
+ for(u = 0, layout->nchunks = 1, layout->max_nchunks = 1; u < ndims; u++) {
/* Round up to the next integer # of chunks, to accomodate partial chunks */
layout->chunks[u] = ((curr_dims[u] + layout->dim[u]) - 1) / layout->dim[u];
+ layout->max_chunks[u] = ((max_dims[u] + layout->dim[u]) - 1) / layout->dim[u];
/* Accumulate the # of chunks */
layout->nchunks *= layout->chunks[u];
+ layout->max_nchunks *= layout->max_chunks[u];
} /* end for */
/* Get the "down" sizes for each dimension */
if(H5V_array_down(ndims, layout->chunks, layout->down_chunks) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't compute 'down' chunk size value")
+ if(H5V_array_down(ndims, layout->max_chunks, layout->max_down_chunks) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't compute 'down' chunk size value")
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -326,12 +335,18 @@ done:
* Programmer: Quincey Koziol
* Tuesday, June 30, 2009
*
+ * Modifications:
+ * Vailin Choi; June 2010
+ * Modified to handle extendible datdaset for Fixed Array Indexing.
+ * (fixed max. dim. setting but not H5S_UNLIMITED)
+ *
*-------------------------------------------------------------------------
*/
herr_t
H5D_chunk_set_info(const H5D_t *dset)
{
hsize_t curr_dims[H5O_LAYOUT_NDIMS]; /* Curr. size of dataset dimensions */
+ hsize_t max_dims[H5O_LAYOUT_NDIMS]; /* Max. size of dataset dimensions */
int sndims; /* Rank of dataspace */
unsigned ndims; /* Rank of dataspace */
herr_t ret_value = SUCCEED; /* Return value */
@@ -342,12 +357,12 @@ H5D_chunk_set_info(const H5D_t *dset)
HDassert(dset);
/* Get the dim info for dataset */
- if((sndims = H5S_get_simple_extent_dims(dset->shared->space, curr_dims, NULL)) < 0)
+ if((sndims = H5S_get_simple_extent_dims(dset->shared->space, curr_dims, max_dims)) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataspace dimensions")
H5_ASSIGN_OVERFLOW(ndims, sndims, int, unsigned);
/* Set the base layout information */
- if(H5D_chunk_set_info_real(&dset->shared->layout.u.chunk, ndims, curr_dims) < 0)
+ if(H5D_chunk_set_info_real(&dset->shared->layout.u.chunk, ndims, curr_dims, max_dims) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set layout's chunk info")
/* Call the index's "resize" callback */
@@ -5062,6 +5077,11 @@ done:
* Programmer: Peter Cao
* August 20, 2005
*
+ * Modifications:
+ * Vailin Choi; June 2010
+ * Modified to handle extendible datdaset for Fixed Array Indexing.
+ * (fixed max. dim. setting but not H5S_UNLIMITED)
+ *
*-------------------------------------------------------------------------
*/
herr_t
@@ -5075,6 +5095,7 @@ H5D_chunk_copy(H5F_t *f_src, H5O_storage_chunk_t *storage_src,
H5D_chk_idx_info_t idx_info_src; /* Source chunked index info */
int sndims; /* Rank of dataspace */
hsize_t curr_dims[H5O_LAYOUT_NDIMS]; /* Curr. size of dataset dimensions */
+ hsize_t max_dims[H5O_LAYOUT_NDIMS]; /* Curr. size of dataset dimensions */
H5O_pline_t _pline; /* Temporary pipeline info */
const H5O_pline_t *pline; /* Pointer to pipeline info to use */
H5T_path_t *tpath_src_mem = NULL, *tpath_mem_dst = NULL; /* Datatype conversion paths */
@@ -5133,12 +5154,12 @@ H5D_chunk_copy(H5F_t *f_src, H5O_storage_chunk_t *storage_src,
unsigned ndims; /* Rank of dataspace */
/* Get the dim info for dataset */
- if((sndims = H5S_extent_get_dims(ds_extent_src, curr_dims, NULL)) < 0)
+ if((sndims = H5S_extent_get_dims(ds_extent_src, curr_dims, max_dims)) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataspace dimensions")
H5_ASSIGN_OVERFLOW(ndims, sndims, int, unsigned);
/* Set the source layout chunk information */
- if(H5D_chunk_set_info_real(layout_src, ndims, curr_dims) < 0)
+ if(H5D_chunk_set_info_real(layout_src, ndims, curr_dims, max_dims) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set layout's chunk info")
} /* end block */
diff --git a/src/H5Dfarray.c b/src/H5Dfarray.c
index 1c910d5..f260bcc 100644
--- a/src/H5Dfarray.c
+++ b/src/H5Dfarray.c
@@ -78,7 +78,7 @@ typedef struct H5D_farray_del_ud_t {
typedef struct H5D_farray_it_ud_t {
H5D_chunk_common_ud_t common; /* Common info for Fixed Array user data (must be first) */
H5D_chunk_rec_t chunk_rec; /* Generic chunk record for callback */
- hsize_t chunk_offset[H5O_LAYOUT_NDIMS]; /* Chunk offset */
+ hsize_t chunk_offset[H5O_LAYOUT_NDIMS]; /* Chunk offset for max dim */
hbool_t filtered; /* Whether the chunks are filtered */
H5D_chunk_cb_func_t cb; /* Chunk callback routine */
void *udata; /* User data for chunk callback routine */
@@ -776,6 +776,11 @@ done:
* Programmer: Vailin Choi
* Thursday, April 30, 2009
*
+ * Modifications:
+ * Vailin Choi; June 2010
+ * Modified to handle extendible datdaset.
+ * (fixed max. dim. setting but not H5S_UNLIMITED)
+ *
*-------------------------------------------------------------------------
*/
static herr_t
@@ -817,7 +822,7 @@ H5D_farray_idx_create(const H5D_chk_idx_info_t *idx_info)
} /* end else */
cparam.max_dblk_page_nelmts_bits = idx_info->layout->u.farray.cparam.max_dblk_page_nelmts_bits;
HDassert(cparam.max_dblk_page_nelmts_bits > 0);
- cparam.nelmts = idx_info->layout->nchunks;
+ cparam.nelmts = idx_info->layout->max_nchunks;
/* Set up the user data */
udata.f = idx_info->f;
@@ -876,6 +881,11 @@ H5D_farray_idx_is_space_alloc(const H5O_storage_chunk_t *storage)
* Programmer: Vailin Choi
* Thursday, April 30, 2009
*
+ * Modifications:
+ * Vailin Choi; June 2010
+ * Modified to handle extendible datdaset.
+ * (fixed max. dim. setting but not H5S_UNLIMITED)
+ *
*-------------------------------------------------------------------------
*/
static herr_t
@@ -906,7 +916,7 @@ H5D_farray_idx_insert(const H5D_chk_idx_info_t *idx_info, H5D_chunk_ud_t *udata)
fa = idx_info->storage->u.farray.fa;
/* Calculate the index of this chunk */
- if(H5V_chunk_index((idx_info->layout->ndims - 1), udata->common.offset, idx_info->layout->dim, idx_info->layout->down_chunks, &idx) < 0)
+ if(H5V_chunk_index((idx_info->layout->ndims - 1), udata->common.offset, idx_info->layout->dim, idx_info->layout->max_down_chunks, &idx) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_BADRANGE, FAIL, "can't get chunk index")
/* Check for filters on chunks */
@@ -1022,6 +1032,11 @@ done:
* Programmer: Vailin Choi
* Thursday, April 30, 2009
*
+ * Modifications:
+ * Vailin Choi; June 2010
+ * Modified to handle extendible datdaset.
+ * (fixed max. dim. setting but not H5S_UNLIMITED)
+ *
*-------------------------------------------------------------------------
*/
static herr_t
@@ -1052,7 +1067,7 @@ H5D_farray_idx_get_addr(const H5D_chk_idx_info_t *idx_info, H5D_chunk_ud_t *udat
fa = idx_info->storage->u.farray.fa;
/* Calculate the index of this chunk */
- if(H5V_chunk_index((idx_info->layout->ndims - 1), udata->common.offset, idx_info->layout->dim, idx_info->layout->down_chunks, &idx) < 0)
+ if(H5V_chunk_index((idx_info->layout->ndims - 1), udata->common.offset, idx_info->layout->dim, idx_info->layout->max_down_chunks, &idx) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_BADRANGE, FAIL, "can't get chunk index")
/* Check for filters on chunks */
@@ -1093,6 +1108,11 @@ done:
* Programmer: Vailin Choi
* Thursday, April 30, 2009
*
+ * Modifications:
+ * Vailin Choi; June 2010
+ * Modified to handle extendible datdaset.
+ * (fixed max. dim. setting but not H5S_UNLIMITED)
+ *
*-------------------------------------------------------------------------
*/
static int
@@ -1117,8 +1137,10 @@ H5D_farray_idx_iterate_cb(hsize_t UNUSED idx, const void *_elmt, void *_udata)
udata->chunk_rec.chunk_addr = *(const haddr_t *)_elmt;
/* Make "generic chunk" callback */
- if((ret_value = (udata->cb)(&udata->chunk_rec, udata->udata)) < 0)
- HERROR(H5E_DATASET, H5E_CALLBACK, "failure in generic chunk iterator callback");
+ if(H5F_addr_defined(udata->chunk_rec.chunk_addr)) {
+ if((ret_value = (udata->cb)(&udata->chunk_rec, udata->udata)) < 0)
+ HERROR(H5E_DATASET, H5E_CALLBACK, "failure in generic chunk iterator callback");
+ }
/* Update coordinates of chunk in dataset */
ndims = udata->common.layout->ndims - 1;
@@ -1130,7 +1152,7 @@ H5D_farray_idx_iterate_cb(hsize_t UNUSED idx, const void *_elmt, void *_udata)
udata->chunk_rec.offset[curr_dim] += udata->common.layout->dim[curr_dim];
/* Check if we went off the end of the current dimension */
- if(udata->chunk_offset[curr_dim] >= udata->common.layout->chunks[curr_dim]) {
+ if(udata->chunk_offset[curr_dim] >= udata->common.layout->max_chunks[curr_dim]) {
/* Reset coordinate & move to next faster dimension */
udata->chunk_offset[curr_dim] = 0;
udata->chunk_rec.offset[curr_dim] = 0;
@@ -1228,6 +1250,11 @@ done:
* Programmer: Vailin Choi
* Thursday, April 30, 2009
*
+ * Modifications:
+ * Vailin Choi; June 2010
+ * Modified to handle extendible datdaset.
+ * (fixed max. dim. setting but not H5S_UNLIMITED)
+ *
*-------------------------------------------------------------------------
*/
static herr_t
@@ -1258,7 +1285,7 @@ H5D_farray_idx_remove(const H5D_chk_idx_info_t *idx_info, H5D_chunk_common_ud_t
fa = idx_info->storage->u.farray.fa;
/* Calculate the index of this chunk */
- if(H5V_chunk_index((idx_info->layout->ndims - 1), udata->offset, idx_info->layout->dim, idx_info->layout->down_chunks, &idx) < 0)
+ if(H5V_chunk_index((idx_info->layout->ndims - 1), udata->offset, idx_info->layout->dim, idx_info->layout->max_down_chunks, &idx) < 0)
HGOTO_ERROR(H5E_DATASPACE, H5E_BADRANGE, FAIL, "can't get chunk index")
/* Check for filters on chunks */
diff --git a/src/H5Dlayout.c b/src/H5Dlayout.c
index fe3437b..bbcdcdd 100644
--- a/src/H5Dlayout.c
+++ b/src/H5Dlayout.c
@@ -250,6 +250,11 @@ done:
* Programmer: Quincey Koziol
* Thursday, January 15, 2009
*
+ * Modifications:
+ * Vailin Choi; June 2010
+ * Modified to use Fixed Array indexing for extendible chunked dataset.
+ * (fixed max. dim. setting but exclude H5S_UNLIMITED)
+ *
*-------------------------------------------------------------------------
*/
herr_t
@@ -275,52 +280,42 @@ H5D_layout_set_latest_version(H5O_layout_t *layout, const H5S_t *space)
/* Avoid scalar/null dataspace */
if(ndims > 0) {
hsize_t max_dims[H5O_LAYOUT_NDIMS]; /* Maximum dimension sizes */
- hsize_t curr_dims[H5O_LAYOUT_NDIMS]; /* Current dimension sizes */
- unsigned unlim_count; /* Count of unlimited max. dimensions */
- hbool_t fixed = FALSE; /* Fixed dimension or not */
+ unsigned unlim_count = 0; /* Count of unlimited max. dimensions */
unsigned u; /* Local index variable */
/* Query the dataspace's dimensions */
- if(H5S_get_simple_extent_dims(space, curr_dims, max_dims) < 0)
+ if(H5S_get_simple_extent_dims(space, NULL, max_dims) < 0)
HGOTO_ERROR(H5E_OHDR, H5E_CANTGET, FAIL, "can't get dataspace max. dimensions")
/* Spin through the max. dimensions, looking for unlimited dimensions */
- unlim_count = 0;
for(u = 0; u < ndims; u++)
if(max_dims[u] == H5S_UNLIMITED)
unlim_count++;
- /* Check if it is fixed dimension */
- if(0 == unlim_count) {
- fixed = TRUE;
- for(u = 0; u < ndims; u++)
- if(curr_dims[u] != max_dims[u]) {
- fixed = FALSE;
- break;
- } /* end if */
- } /* end if */
-
- /* If we have only 1 unlimited dimension, we can use extensible array index */
- if(1 == unlim_count) {
- /* Set the chunk index type to an extensible array */
- layout->u.chunk.idx_type = H5D_CHUNK_IDX_EARRAY;
- layout->storage.u.chunk.idx_type = H5D_CHUNK_IDX_EARRAY;
- layout->storage.u.chunk.ops = H5D_COPS_EARRAY;
-
- /* Set the extensible array creation parameters */
- /* (use hard-coded defaults for now, until we give applications
- * control over this with a property list - QAK)
- */
- layout->u.chunk.u.earray.cparam.max_nelmts_bits = H5D_EARRAY_MAX_NELMTS_BITS;
- layout->u.chunk.u.earray.cparam.idx_blk_elmts = H5D_EARRAY_IDX_BLK_ELMTS;
- layout->u.chunk.u.earray.cparam.sup_blk_min_data_ptrs = H5D_EARRAY_SUP_BLK_MIN_DATA_PTRS;
- layout->u.chunk.u.earray.cparam.data_blk_min_elmts = H5D_EARRAY_DATA_BLK_MIN_ELMTS;
- layout->u.chunk.u.earray.cparam.max_dblk_page_nelmts_bits = H5D_EARRAY_MAX_DBLOCK_PAGE_NELMTS_BITS;
- } /* end if */
- /* Chunked datasets with fixed dimensions */
- else if(layout->type == H5D_CHUNKED && fixed) {
- /* Set the chunk index type to a fixed array */
- layout->u.chunk.idx_type = H5D_CHUNK_IDX_FARRAY;
+ /* Chunked datasets with unlimited dimension(s) */
+ if(unlim_count) { /* dataset with unlimited dimension(s) must be chunked */
+ HDassert(layout->type == H5D_CHUNKED);
+ if(1 == unlim_count) { /* Chunked dataset with only 1 unlimited dimension */
+ layout->u.chunk.idx_type = H5D_CHUNK_IDX_EARRAY;
+ layout->storage.u.chunk.idx_type = H5D_CHUNK_IDX_EARRAY;
+ layout->storage.u.chunk.ops = H5D_COPS_EARRAY;
+
+ /* Set the extensible array creation parameters */
+ /* (use hard-coded defaults for now, until we give applications
+ * control over this with a property list - QAK)
+ */
+ layout->u.chunk.u.earray.cparam.max_nelmts_bits = H5D_EARRAY_MAX_NELMTS_BITS;
+ layout->u.chunk.u.earray.cparam.idx_blk_elmts = H5D_EARRAY_IDX_BLK_ELMTS;
+ layout->u.chunk.u.earray.cparam.sup_blk_min_data_ptrs = H5D_EARRAY_SUP_BLK_MIN_DATA_PTRS;
+ layout->u.chunk.u.earray.cparam.data_blk_min_elmts = H5D_EARRAY_DATA_BLK_MIN_ELMTS;
+ layout->u.chunk.u.earray.cparam.max_dblk_page_nelmts_bits = H5D_EARRAY_MAX_DBLOCK_PAGE_NELMTS_BITS;
+ } else { /* Chunked dataset with > 1 unlimited dimensions */
+ /* Add setup to use v2 B-tree chunk indices here */
+ }
+ } else if(layout->type == H5D_CHUNKED) {
+ /* Chunked dataset with fixed dimensions (with or without max. dimension setting) */
+ /* Set the chunk index type to Fixed Array */
+ layout->u.chunk.idx_type = H5D_CHUNK_IDX_FARRAY;
layout->storage.u.chunk.idx_type = H5D_CHUNK_IDX_FARRAY;
layout->storage.u.chunk.ops = H5D_COPS_FARRAY;
@@ -330,9 +325,6 @@ H5D_layout_set_latest_version(H5O_layout_t *layout, const H5S_t *space)
*/
layout->u.chunk.u.farray.cparam.max_dblk_page_nelmts_bits = H5D_FARRAY_MAX_DBLK_PAGE_NELMTS_BITS;
} /* end if */
- else {
- /* Add setup for v2 B-tree indices here */
- } /* end else */
} /* end if */
done:
diff --git a/src/H5Oprivate.h b/src/H5Oprivate.h
index 0195561..4bef827 100644
--- a/src/H5Oprivate.h
+++ b/src/H5Oprivate.h
@@ -467,8 +467,11 @@ typedef struct H5O_layout_chunk_t {
unsigned enc_bytes_per_dim; /* Encoded # of bytes for storing each chunk dimension */
uint32_t size; /* Size of chunk in bytes */
hsize_t nchunks; /* Number of chunks in dataset */
- hsize_t chunks[H5O_LAYOUT_NDIMS]; /* # of chunks in dataset dimensions */
- hsize_t down_chunks[H5O_LAYOUT_NDIMS]; /* "down" size of number of chunks in each dimension */
+ hsize_t max_nchunks; /* Max. number of chunks in dataset */
+ hsize_t chunks[H5O_LAYOUT_NDIMS]; /* # of chunks in each dataset dimension */
+ hsize_t max_chunks[H5O_LAYOUT_NDIMS]; /* # of chunks in each dataset's max. dimension */
+ hsize_t down_chunks[H5O_LAYOUT_NDIMS]; /* "down" size of number of chunks in each dimension */
+ hsize_t max_down_chunks[H5O_LAYOUT_NDIMS]; /* "down" size of number of chunks in each max dim */
union {
H5O_layout_chunk_farray_t farray; /* Information for fixed array index */
H5O_layout_chunk_earray_t earray; /* Information for extensible array index */
diff --git a/src/H5Pdcpl.c b/src/H5Pdcpl.c
index e672ad0..5bc2783 100644
--- a/src/H5Pdcpl.c
+++ b/src/H5Pdcpl.c
@@ -54,7 +54,7 @@
#define H5D_DEF_STORAGE_COMPACT_INIT {(hbool_t)FALSE, (size_t)0, NULL}
#define H5D_DEF_STORAGE_CONTIG_INIT {HADDR_UNDEF, (hsize_t)0}
#define H5D_DEF_STORAGE_CHUNK_INIT {H5D_CHUNK_IDX_BTREE, HADDR_UNDEF, H5D_COPS_BTREE, {{NULL}}}
-#define H5D_DEF_LAYOUT_CHUNK_INIT {H5D_CHUNK_IDX_BTREE, (uint8_t)0, (unsigned)1, {1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1}, (unsigned)0, (uint32_t)0, (hsize_t)0, {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}, {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}, {{{(uint8_t)0}}}}
+#define H5D_DEF_LAYOUT_CHUNK_INIT {H5D_CHUNK_IDX_BTREE, (uint8_t)0, (unsigned)1, {1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1}, (unsigned)0, (uint32_t)0, (hsize_t)0, (hsize_t)0, {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}, {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}, {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}, {0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}, {{{(uint8_t)0}}}}
#ifdef H5_HAVE_C99_DESIGNATED_INITIALIZER
#define H5D_DEF_STORAGE_COMPACT {H5D_COMPACT, { .compact = H5D_DEF_STORAGE_COMPACT_INIT }}
#define H5D_DEF_STORAGE_CONTIG {H5D_CONTIGUOUS, { .contig = H5D_DEF_STORAGE_CONTIG_INIT }}
diff --git a/test/dsets.c b/test/dsets.c
index 57b9989..a0a1f39 100644
--- a/test/dsets.c
+++ b/test/dsets.c
@@ -119,9 +119,10 @@ const char *FILENAME[] = {
#define DSET_DEPREC_NAME_FILTER "deprecated_filter"
/* Dataset names for testing Fixed Array Indexing */
-#define DSET_FIXED_NAME "DSET_FIXED"
-#define DSET_BTREE_NAME "DSET_BTREE"
-#define DSET_BIG_FIXED_NAME "DSET_FIXED_BIG"
+#define DSET_FIXED_MAX "DSET_FIXED_MAX"
+#define DSET_FIXED_NOMAX "DSET_FIXED_NOMAX"
+#define DSET_FIXED_BIG "DSET_FIXED_BIG"
+
#define POINTS 72
#define POINTS_BIG 2500
@@ -7974,25 +7975,31 @@ error:
* Function: test_fixed_array
*
* Purpose: Tests support for Fixed Array Indexing
- * Repeat the following test with/without compression filter
- * Repeat the following test with H5D_ALLOC_TIME_EARLY/H5D_ALLOC_TIME_LATE/H5D_ALLOC_TIME_INCR
- * Verify that the btree indexing type is used when the dataset
- * is created with a current and maximum dimensions
- * Create the first dataset with fixed dimensions
- * Write elements to the first dataset
- *
- * Create the second dataset with fixed dimensions but with bigger size so
- * that Fixed Array Indexing with paging is involved
- * Write elements to the second dataset
- *
- * Read from the first dataset and verify the elements read are correct
- * Read from the second dataset and verify the elements read are correct
+ * Repeat the following test with/without compression filter
+ * Repeat the following test with H5D_ALLOC_TIME_EARLY/H5D_ALLOC_TIME_LATE/H5D_ALLOC_TIME_INCR
+ * For the old format, verify that v1 btree indexing type is used for all
+ * following datasets
+ * For the new format, verify that Fixed Array indexing type is used when
+ * the dataset is created with the following dimension setting:
+ * (cur dim, max dim) (but not H5S_UNLIMITED)
+ * (cur dim, max dim = NULL)
+ * (cur dim, max dim = curr dim)
+ * Write elements to each dataset
+ * Read from the dataset and verify the elements read are correct
+ * Note that the third dataset is created with fixed dimensions but with bigger size
+ * so that Fixed Array Indexing with paging is involved
*
* Return: Success: 0
* Failure: -1
*
* Programmer: Vailin Choi; 2009
*
+ * Modifications:
+ * Vailin Choi; June 2010
+ * Made changes to verify that Fixed Array indexing is used also for
+ * extendible chunked dataset.
+ * (fixed max. dim. setting but exclude H5S_UNLIMITED)
+ *
*-------------------------------------------------------------------------
*/
static herr_t
@@ -8094,31 +8101,61 @@ test_fixed_array(hid_t fapl)
/* Set allocation time */
if(H5Pset_alloc_time(dcpl, alloc_time) < 0) FAIL_STACK_ERROR
- /*
- * Verify the correct indexing type is used
- */
+ /* Initialization of chunk array for repeated coordinates */
+ for(i = 0; i < dim2[0]/chunk_dim2[0]; i++)
+ for(j = 0; j < dim2[1]/chunk_dim2[1]; j++)
+ chunks[i][j] = 0;
+
+ /* Generate random point coordinates. Only one point is selected per chunk */
+ for(i = 0; i < POINTS; i++){
+ do {
+ chunk_row = (int)HDrandom () % (int)(dim2[0]/chunk_dim2[0]);
+ chunk_col = (int)HDrandom () % (int)(dim2[1]/chunk_dim2[1]);
+ } while (chunks[chunk_row][chunk_col]);
+
+ wbuf[i] = chunks[chunk_row][chunk_col] = chunk_row+chunk_col+1;
+ coord[i][0] = (hsize_t)chunk_row * chunk_dim2[0];
+ coord[i][1] = (hsize_t)chunk_col * chunk_dim2[1];
+ } /* end for */
- /* Create a dataset with current and maximum dimensions */
+ /* Create first dataset with cur and max dimensions */
if((sid_max = H5Screate_simple(2, dim2, dim2_max)) < 0) FAIL_STACK_ERROR
- dsid_max = H5Dcreate2(fid, DSET_BTREE_NAME, H5T_NATIVE_INT, sid_max, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ dsid_max = H5Dcreate2(fid, DSET_FIXED_MAX, H5T_NATIVE_INT, sid_max, H5P_DEFAULT, dcpl, H5P_DEFAULT);
if(dsid_max < 0)
FAIL_PUTS_ERROR(" Creating Chunked Dataset with maximum dimensions.")
/* Get the chunk index type */
if(H5D_layout_idx_type_test(dsid_max, &idx_type) < 0) FAIL_STACK_ERROR
- /* Should be using btree indexing type */
- if(idx_type != H5D_CHUNK_IDX_BTREE)
- FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+ /* Chunk index type depends on whether we are using the latest version of the format */
+ if(low == H5F_LIBVER_LATEST) {
+ if(idx_type != H5D_CHUNK_IDX_FARRAY)
+ FAIL_PUTS_ERROR("should be using Fixed Array as index");
+ } /* end if */
+ else {
+ if(idx_type != H5D_CHUNK_IDX_BTREE)
+ FAIL_PUTS_ERROR("should be using v1 B-tree as index");
+ } /* end else */
+
+ /* Create dataspace for write buffer */
+ if((mem_id = H5Screate_simple(1, msize, NULL)) < 0) TEST_ERROR;
+
+ /* Select the random points for writing */
+ if(H5Sselect_elements(sid_max, H5S_SELECT_SET, POINTS, (const hsize_t *)coord) < 0)
+ TEST_ERROR;
+
+ /* Write into dataset */
+ if(H5Dwrite(dsid_max, H5T_NATIVE_INT, mem_id, sid_max, H5P_DEFAULT, wbuf) < 0) TEST_ERROR;
/* Closing */
if(H5Dclose(dsid_max) < 0) FAIL_STACK_ERROR
if(H5Sclose(sid_max) < 0) FAIL_STACK_ERROR
+ if(H5Sclose(mem_id) < 0) FAIL_STACK_ERROR
- /* Create the first dataset with fixed dimensions */
+ /* Create second dataset with curr dim but NULL max dim */
if((sid = H5Screate_simple(2, dim2, NULL)) < 0) FAIL_STACK_ERROR
- dsid = H5Dcreate2(fid, DSET_FIXED_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ dsid = H5Dcreate2(fid, DSET_FIXED_NOMAX, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT);
if(dsid < 0)
FAIL_PUTS_ERROR(" Creating Chunked Dataset.")
@@ -8135,23 +8172,6 @@ test_fixed_array(hid_t fapl)
FAIL_PUTS_ERROR("should be using v1 B-tree as index");
} /* end else */
- /* Initialization of chunk array for repeated coordinates */
- for(i = 0; i < dim2[0]/chunk_dim2[0]; i++)
- for(j = 0; j < dim2[1]/chunk_dim2[1]; j++)
- chunks[i][j] = 0;
-
- /* Generate random point coordinates. Only one point is selected per chunk */
- for(i = 0; i < POINTS; i++){
- do {
- chunk_row = (int)HDrandom () % (int)(dim2[0]/chunk_dim2[0]);
- chunk_col = (int)HDrandom () % (int)(dim2[1]/chunk_dim2[1]);
- } while (chunks[chunk_row][chunk_col]);
-
- wbuf[i] = chunks[chunk_row][chunk_col] = chunk_row+chunk_col+1;
- coord[i][0] = (hsize_t)chunk_row * chunk_dim2[0];
- coord[i][1] = (hsize_t)chunk_col * chunk_dim2[1];
- } /* end for */
-
/* Create dataspace for write buffer */
if((mem_id = H5Screate_simple(1, msize, NULL)) < 0) TEST_ERROR;
@@ -8167,9 +8187,9 @@ test_fixed_array(hid_t fapl)
if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
if(H5Sclose(mem_id) < 0) FAIL_STACK_ERROR
- /* Create the second dataset with bigger size and with fixed dimensions */
+ /* Create the third dataset with bigger size and both curr & max dimensions are the same */
if((sid_big = H5Screate_simple(2, dim2_big, dim2_big)) < 0) FAIL_STACK_ERROR
- dsid_big = H5Dcreate2(fid, DSET_BIG_FIXED_NAME, H5T_NATIVE_INT, sid_big, H5P_DEFAULT, dcpl, H5P_DEFAULT);
+ dsid_big = H5Dcreate2(fid, DSET_FIXED_BIG, H5T_NATIVE_INT, sid_big, H5P_DEFAULT, dcpl, H5P_DEFAULT);
if(dsid_big < 0)
FAIL_PUTS_ERROR(" Creating Big Chunked Dataset.")
@@ -8220,7 +8240,7 @@ test_fixed_array(hid_t fapl)
if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
/* Open the first dataset */
- if((dsid = H5Dopen2(fid, DSET_FIXED_NAME, H5P_DEFAULT)) < 0) TEST_ERROR;
+ if((dsid = H5Dopen2(fid, DSET_FIXED_MAX, H5P_DEFAULT)) < 0) TEST_ERROR;
/* Get dataset dataspace */
if((sid = H5Dget_space(dsid)) < 0) TEST_ERROR;
@@ -8248,8 +8268,35 @@ test_fixed_array(hid_t fapl)
if(H5Sclose(mem_id) < 0) FAIL_STACK_ERROR
/* Open the second dataset */
- if((dsid_big = H5Dopen2(fid, DSET_BIG_FIXED_NAME, H5P_DEFAULT)) < 0) TEST_ERROR;
+ if((dsid = H5Dopen2(fid, DSET_FIXED_NOMAX, H5P_DEFAULT)) < 0) TEST_ERROR;
+
+ /* Get dataset dataspace */
+ if((sid = H5Dget_space(dsid)) < 0) TEST_ERROR;
+
+ /* Create dataspace for read buffer */
+ if((mem_id = H5Screate_simple(1, msize, NULL)) < 0) TEST_ERROR;
+
+ /* Select the random points for reading */
+ if(H5Sselect_elements (sid, H5S_SELECT_SET, POINTS, (const hsize_t *)coord) < 0) TEST_ERROR;
+
+ /* Read from dataset */
+ if(H5Dread(dsid, H5T_NATIVE_INT, mem_id, sid, H5P_DEFAULT, rbuf) < 0) TEST_ERROR;
+
+ /* Verify that written and read data are the same */
+ for(i = 0; i < POINTS; i++)
+ if(rbuf[i] != wbuf[i]){
+ printf(" Line %d: Incorrect value, wbuf[%u]=%d, rbuf[%u]=%d\n",
+ __LINE__,(unsigned)i,wbuf[i],(unsigned)i,rbuf[i]);
+ TEST_ERROR;
+ } /* end if */
+
+ /* Closing */
+ if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
+ if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
+ if(H5Sclose(mem_id) < 0) FAIL_STACK_ERROR
+ /* Open the third dataset */
+ if((dsid_big = H5Dopen2(fid, DSET_FIXED_BIG, H5P_DEFAULT)) < 0) TEST_ERROR;
/* Get dataset dataspace */
if((sid_big = H5Dget_space(dsid_big)) < 0) TEST_ERROR;
@@ -8275,9 +8322,9 @@ test_fixed_array(hid_t fapl)
if(H5Sclose(big_mem_id) < 0) FAIL_STACK_ERROR
/* Delete datasets */
- if(H5Ldelete(fid, DSET_BIG_FIXED_NAME, H5P_DEFAULT) < 0) FAIL_STACK_ERROR
- if(H5Ldelete(fid, DSET_FIXED_NAME, H5P_DEFAULT) < 0) FAIL_STACK_ERROR
- if(H5Ldelete(fid, DSET_BTREE_NAME, H5P_DEFAULT) < 0) FAIL_STACK_ERROR
+ if(H5Ldelete(fid, DSET_FIXED_BIG, H5P_DEFAULT) < 0) FAIL_STACK_ERROR
+ if(H5Ldelete(fid, DSET_FIXED_NOMAX, H5P_DEFAULT) < 0) FAIL_STACK_ERROR
+ if(H5Ldelete(fid, DSET_FIXED_MAX, H5P_DEFAULT) < 0) FAIL_STACK_ERROR
/* Close everything */
if(H5Fclose(fid) < 0) FAIL_STACK_ERROR