summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2015-04-18 06:39:34 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2015-04-18 06:39:34 (GMT)
commit1eaaae98214d9042e979209e93e31c490efa4d79 (patch)
tree45b2258e7edc6ef8040b6e5339cf2e313d883b8f /src
parent524bfed32ef710dd28a3ff400965f01893a3f66b (diff)
downloadhdf5-1eaaae98214d9042e979209e93e31c490efa4d79.zip
hdf5-1eaaae98214d9042e979209e93e31c490efa4d79.tar.gz
hdf5-1eaaae98214d9042e979209e93e31c490efa4d79.tar.bz2
[svn-r26842] Description:
Cache the dataset's rank & dimension sizes, instead of querying them frequently, to speed up various checks & algorithms. Also, a few minor cleanups. Tested on: MacOSX/64 10.10.2 (amazon) w/serial & parallel Linux/32 2.6.18 (jam) w/serial & parallel
Diffstat (limited to 'src')
-rw-r--r--src/H5Dchunk.c79
-rw-r--r--src/H5Dcompact.c15
-rw-r--r--src/H5Dcontig.c13
-rw-r--r--src/H5Ddeprec.c27
-rw-r--r--src/H5Defl.c13
-rw-r--r--src/H5Dint.c75
-rw-r--r--src/H5Dio.c11
-rw-r--r--src/H5Dmpio.c3
-rw-r--r--src/H5Dpkg.h9
-rw-r--r--src/H5S.c9
10 files changed, 131 insertions, 123 deletions
diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c
index a5cf5b6..e99f00f 100644
--- a/src/H5Dchunk.c
+++ b/src/H5Dchunk.c
@@ -326,8 +326,6 @@ H5D__chunk_direct_write(const H5D_t *dset, hid_t dxpl_id, uint32_t filters, hsiz
H5F_block_t old_chunk; /* Offset/length of old chunk */
H5D_chk_idx_info_t idx_info; /* Chunked index info */
hbool_t need_insert = FALSE; /* Whether the chunk needs to be inserted into the index */
- int space_ndims; /* Dataset's space rank */
- hsize_t space_dim[H5O_LAYOUT_NDIMS]; /* Dataset's dataspace dimensions */
herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_STATIC_TAG(dxpl_id, dset->oloc.addr, FAIL)
@@ -338,12 +336,8 @@ H5D__chunk_direct_write(const H5D_t *dset, hid_t dxpl_id, uint32_t filters, hsiz
if(H5D__alloc_storage(dset, dxpl_id, H5D_ALLOC_WRITE, FALSE, NULL) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize storage")
- /* Retrieve the dataset dimensions */
- if((space_ndims = H5S_get_simple_extent_dims(dset->shared->space, space_dim, NULL)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to get simple dataspace info")
-
/* Calculate the index of this chunk */
- if(H5VM_chunk_index((unsigned)space_ndims, offset,
+ if(H5VM_chunk_index(dset->shared->ndims, offset,
layout->u.chunk.dim, layout->u.chunk.down_chunks, &chunk_idx) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't get chunk index")
@@ -475,23 +469,15 @@ done:
herr_t
H5D__chunk_set_info(const H5D_t *dset)
{
- hsize_t curr_dims[H5O_LAYOUT_NDIMS]; /* Curr. size of dataset dimensions */
- int sndims; /* Rank of dataspace */
- unsigned ndims; /* Rank of dataspace */
- herr_t ret_value = SUCCEED; /* Return value */
+ herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_PACKAGE
/* Sanity checks */
HDassert(dset);
- /* Get the dim info for dataset */
- if((sndims = H5S_get_simple_extent_dims(dset->shared->space, curr_dims, NULL)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataspace dimensions")
- H5_ASSIGN_OVERFLOW(ndims, sndims, int, unsigned);
-
/* Set the base layout information */
- if(H5D__chunk_set_info_real(&dset->shared->layout.u.chunk, ndims, curr_dims) < 0)
+ if(H5D__chunk_set_info_real(&dset->shared->layout.u.chunk, dset->shared->ndims, dset->shared->curr_dims) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set layout's chunk info")
/* Call the index's "resize" callback */
@@ -519,10 +505,7 @@ static herr_t
H5D__chunk_construct(H5F_t UNUSED *f, H5D_t *dset)
{
const H5T_t *type = dset->shared->type; /* Convenience pointer to dataset's datatype */
- hsize_t max_dims[H5O_LAYOUT_NDIMS]; /* Maximum size of data in elements */
- hsize_t dims[H5O_LAYOUT_NDIMS]; /* Dimension size of data in elements */
uint64_t chunk_size; /* Size of chunk in bytes */
- int ndims; /* Rank of dataspace */
unsigned u; /* Local index variable */
herr_t ret_value = SUCCEED; /* Return value */
@@ -537,9 +520,7 @@ H5D__chunk_construct(H5F_t UNUSED *f, H5D_t *dset)
HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "no chunk information set?")
/* Set up layout information */
- if((ndims = H5S_GET_EXTENT_NDIMS(dset->shared->space)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to get rank")
- if(dset->shared->layout.u.chunk.ndims != (unsigned)ndims)
+ if(dset->shared->layout.u.chunk.ndims != dset->shared->ndims)
HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "dimensionality of chunks doesn't match the dataspace")
/* Increment # of chunk dimensions, to account for datatype size as last element */
@@ -553,10 +534,6 @@ H5D__chunk_construct(H5F_t UNUSED *f, H5D_t *dset)
/* Set the last dimension of the chunk size to the size of the datatype */
dset->shared->layout.u.chunk.dim[dset->shared->layout.u.chunk.ndims - 1] = (uint32_t)H5T_GET_SIZE(type);
- /* Get local copy of dataset dimensions (for sanity checking) */
- if(H5S_get_simple_extent_dims(dset->shared->space, dims, max_dims) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to query maximum dimensions")
-
/* Sanity check dimensions */
for(u = 0; u < dset->shared->layout.u.chunk.ndims - 1; u++) {
/* Don't allow zero-sized chunk dimensions */
@@ -568,7 +545,7 @@ H5D__chunk_construct(H5F_t UNUSED *f, H5D_t *dset)
* the maximum dimension size. If any dimension size is zero, there
* will be no such restriction.
*/
- if(dims[u] && max_dims[u] != H5S_UNLIMITED && max_dims[u] < dset->shared->layout.u.chunk.dim[u])
+ if(dset->shared->curr_dims[u] && dset->shared->max_dims[u] != H5S_UNLIMITED && dset->shared->max_dims[u] < dset->shared->layout.u.chunk.dim[u])
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "chunk size must be <= maximum dimension size for fixed-sized dimensions")
} /* end for */
@@ -3297,8 +3274,8 @@ H5D__chunk_allocate(const H5D_t *dset, hid_t dxpl_id, hbool_t full_overwrite,
H5D_chunk_coll_info_t chunk_info; /* chunk address information for doing I/O */
#endif /* H5_HAVE_PARALLEL */
hbool_t carry; /* Flag to indicate that chunk increment carrys to higher dimension (sorta) */
- int space_ndims; /* Dataset's space rank */
- hsize_t space_dim[H5O_LAYOUT_NDIMS]; /* Dataset's dataspace dimensions */
+ unsigned space_ndims; /* Dataset's space rank */
+ const hsize_t *space_dim; /* Dataset's dataspace dimensions */
const uint32_t *chunk_dim = layout->u.chunk.dim; /* Convenience pointer to chunk dimensions */
unsigned op_dim; /* Current operating dimension */
H5D_fill_buf_info_t fb_info; /* Dataset's fill buffer info */
@@ -3313,9 +3290,8 @@ H5D__chunk_allocate(const H5D_t *dset, hid_t dxpl_id, hbool_t full_overwrite,
HDassert(TRUE == H5P_isa_class(dxpl_id, H5P_DATASET_XFER));
/* Retrieve the dataset dimensions */
- if((space_ndims = H5S_get_simple_extent_dims(dset->shared->space, space_dim, NULL)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to get simple dataspace info")
- space_dim[space_ndims] = layout->u.chunk.dim[space_ndims];
+ space_dim = dset->shared->curr_dims;
+ space_ndims = dset->shared->ndims;
/* The last dimension in chunk_offset is always 0 */
chunk_offset[space_ndims] = (hsize_t)0;
@@ -3429,7 +3405,7 @@ H5D__chunk_allocate(const H5D_t *dset, hid_t dxpl_id, hbool_t full_overwrite,
* certain dimension, max_unalloc is updated in order to avoid allocating
* those chunks again.
*/
- for(op_dim = 0; op_dim < (unsigned)space_ndims; op_dim++) {
+ for(op_dim = 0; op_dim < space_ndims; op_dim++) {
H5D_chunk_ud_t udata; /* User data for querying chunk info */
int i; /* Local index variable */
@@ -3438,7 +3414,7 @@ H5D__chunk_allocate(const H5D_t *dset, hid_t dxpl_id, hbool_t full_overwrite,
continue;
else {
/* Reset the chunk offset indices */
- HDmemset(chunk_offset, 0, ((unsigned)space_ndims * sizeof(chunk_offset[0])));
+ HDmemset(chunk_offset, 0, (space_ndims * sizeof(chunk_offset[0])));
chunk_offset[op_dim] = min_unalloc[op_dim];
carry = FALSE;
@@ -3456,7 +3432,7 @@ H5D__chunk_allocate(const H5D_t *dset, hid_t dxpl_id, hbool_t full_overwrite,
hsize_t chunk_idx;
/* Calculate the index of this chunk */
- if(H5VM_chunk_index((unsigned)space_ndims, chunk_offset,
+ if(H5VM_chunk_index(space_ndims, chunk_offset,
layout->u.chunk.dim, layout->u.chunk.down_chunks,
&chunk_idx) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't get chunk index")
@@ -3473,7 +3449,7 @@ H5D__chunk_allocate(const H5D_t *dset, hid_t dxpl_id, hbool_t full_overwrite,
unsigned u; /* Local index variable */
hbool_t outside_orig = FALSE;
- for(u = 0; u < (unsigned)space_ndims; u++) {
+ for(u = 0; u < space_ndims; u++) {
HDassert(chunk_offset[u] < space_dim[u]);
if(chunk_offset[u] >= old_dim[u])
outside_orig = TRUE;
@@ -4018,14 +3994,13 @@ H5D__chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dim)
const H5O_layout_t *layout = &(dset->shared->layout); /* Dataset's layout */
const H5D_rdcc_t *rdcc = &(dset->shared->cache.chunk); /*raw data chunk cache */
H5D_rdcc_ent_t *ent = NULL; /* Cache entry */
- int space_ndims; /* Dataset's space rank */
- hsize_t space_dim[H5O_LAYOUT_NDIMS]; /* Current dataspace dimensions */
+ unsigned space_ndims; /* Dataset's space rank */
+ const hsize_t *space_dim; /* Current dataspace dimensions */
unsigned op_dim; /* Current operating dimension */
hbool_t shrunk_dim[H5O_LAYOUT_NDIMS]; /* Dimensions which have shrunk */
H5D_chunk_it_ud1_t udata; /* Chunk index iterator user data */
hbool_t udata_init = FALSE; /* Whether the chunk index iterator user data has been initialized */
H5D_chunk_common_ud_t idx_udata; /* User data for index removal routine */
- H5D_chunk_ud_t chk_udata; /* User data for getting chunk info */
H5S_t *chunk_space = NULL; /* Dataspace for a chunk */
hsize_t chunk_dim[H5O_LAYOUT_NDIMS]; /* Chunk dimensions */
hsize_t chunk_offset[H5O_LAYOUT_NDIMS]; /* Offset of current chunk */
@@ -4047,10 +4022,8 @@ H5D__chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dim)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't fill dxpl cache")
/* Go get the rank & dimensions (including the element size) */
- if((space_ndims = H5S_get_simple_extent_dims(dset->shared->space, space_dim,
- NULL)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions")
- space_dim[space_ndims] = layout->u.chunk.dim[space_ndims];
+ space_dim = dset->shared->curr_dims;
+ space_ndims = dset->shared->ndims;
/* The last dimension in chunk_offset is always 0 */
chunk_offset[space_ndims] = (hsize_t)0;
@@ -4069,14 +4042,14 @@ H5D__chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dim)
/* (also copy the chunk dimensions into 'hsize_t' array for creating dataspace) */
/* (also compute the dimensions which have been shrunk) */
elmts_per_chunk = 1;
- for(u = 0; u < (unsigned)space_ndims; u++) {
+ for(u = 0; u < space_ndims; u++) {
elmts_per_chunk *= layout->u.chunk.dim[u];
chunk_dim[u] = layout->u.chunk.dim[u];
shrunk_dim[u] = space_dim[u] < old_dim[u];
} /* end for */
/* Create a dataspace for a chunk & set the extent */
- if(NULL == (chunk_space = H5S_create_simple((unsigned)space_ndims, chunk_dim, NULL)))
+ if(NULL == (chunk_space = H5S_create_simple(space_ndims, chunk_dim, NULL)))
HGOTO_ERROR(H5E_DATASPACE, H5E_CANTCREATE, FAIL, "can't create simple dataspace")
/* Reset hyperslab start array */
@@ -4160,7 +4133,7 @@ H5D__chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dim)
if(has_fill)
for(ent = rdcc->head; ent; ent = ent->next)
/* Check for chunk offset outside of new dimensions */
- for(u = 0; u < (unsigned)space_ndims; u++)
+ for(u = 0; u < space_ndims; u++)
if((hsize_t)ent->offset[u] >= space_dim[u]) {
/* Mark the entry as "deleted" */
ent->deleted = TRUE;
@@ -4176,12 +4149,12 @@ H5D__chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dim)
HDassert((hsize_t) max_mod_chunk_off[op_dim] >= min_mod_chunk_off[op_dim]);
/* Reset the chunk offset indices */
- HDmemset(chunk_offset, 0, ((unsigned)space_ndims * sizeof(chunk_offset[0])));
+ HDmemset(chunk_offset, 0, (space_ndims * sizeof(chunk_offset[0])));
chunk_offset[op_dim] = min_mod_chunk_off[op_dim];
/* Initialize "dims_outside_fill" array */
ndims_outside_fill = 0;
- for(u = 0; u < (unsigned)space_ndims; u++)
+ for(u = 0; u < space_ndims; u++)
if((hssize_t)chunk_offset[u] > max_fill_chunk_off[u]) {
dims_outside_fill[u] = TRUE;
ndims_outside_fill++;
@@ -4196,7 +4169,7 @@ H5D__chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dim)
int i; /* Local index variable */
/* Calculate the index of this chunk */
- if(H5VM_chunk_index((unsigned)space_ndims, chunk_offset,
+ if(H5VM_chunk_index(space_ndims, chunk_offset,
layout->u.chunk.dim, layout->u.chunk.down_chunks,
&(chk_io_info.store->chunk.index)) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't get chunk index")
@@ -4210,12 +4183,14 @@ H5D__chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dim)
HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to write fill value")
} /* end if */
else {
+ H5D_chunk_ud_t chk_udata; /* User data for getting chunk info */
+
#ifndef NDEBUG
/* Make sure this chunk is really outside the new dimensions */
{
hbool_t outside_dim = FALSE;
- for(u = 0; u < (unsigned)space_ndims; u++)
+ for(u = 0; u < space_ndims; u++)
if(chunk_offset[u] >= space_dim[u]) {
outside_dim = TRUE;
break;
@@ -4501,7 +4476,7 @@ H5D__chunk_update_cache(H5D_t *dset, hid_t dxpl_id)
HDassert(dset->shared->layout.u.chunk.ndims > 0 && dset->shared->layout.u.chunk.ndims <= H5O_LAYOUT_NDIMS);
/* Get the rank */
- rank = dset->shared->layout.u.chunk.ndims-1;
+ rank = dset->shared->layout.u.chunk.ndims - 1;
HDassert(rank > 0);
/* 1-D dataset's chunks can't have their index change */
diff --git a/src/H5Dcompact.c b/src/H5Dcompact.c
index 789beab..1d8b97c 100644
--- a/src/H5Dcompact.c
+++ b/src/H5Dcompact.c
@@ -174,11 +174,8 @@ H5D__compact_construct(H5F_t *f, H5D_t *dset)
hssize_t stmp_size; /* Temporary holder for raw data size */
hsize_t tmp_size; /* Temporary holder for raw data size */
hsize_t max_comp_data_size; /* Max. allowed size of compact data */
- hsize_t dim[H5O_LAYOUT_NDIMS]; /* Current size of data in elements */
- hsize_t max_dim[H5O_LAYOUT_NDIMS]; /* Maximum size of data in elements */
- int ndims; /* Rank of dataspace */
- int i; /* Local index variable */
- herr_t ret_value = SUCCEED; /* Return value */
+ unsigned u; /* Local index variable */
+ herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_STATIC
@@ -187,11 +184,9 @@ H5D__compact_construct(H5F_t *f, H5D_t *dset)
HDassert(dset);
/* Check for invalid dataset dimensions */
- if((ndims = H5S_get_simple_extent_dims(dset->shared->space, dim, max_dim)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataspace dimensions")
- for(i = 0; i < ndims; i++)
- if(max_dim[i] > dim[i])
- HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "extendible compact dataset")
+ for(u = 0; u < dset->shared->ndims; u++)
+ if(dset->shared->max_dims[u] > dset->shared->curr_dims[u])
+ HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "extendible compact dataset not allowed")
/*
* Compact dataset is stored in dataset object header message of
diff --git a/src/H5Dcontig.c b/src/H5Dcontig.c
index dc09768..e913a3f 100644
--- a/src/H5Dcontig.c
+++ b/src/H5Dcontig.c
@@ -396,10 +396,7 @@ H5D__contig_construct(H5F_t *f, H5D_t *dset)
size_t dt_size; /* Size of datatype */
hsize_t tmp_size; /* Temporary holder for raw data size */
size_t tmp_sieve_buf_size; /* Temporary holder for sieve buffer size */
- hsize_t dim[H5O_LAYOUT_NDIMS]; /* Current size of data in elements */
- hsize_t max_dim[H5O_LAYOUT_NDIMS]; /* Maximum size of data in elements */
- int ndims; /* Rank of dataspace */
- int i; /* Local index variable */
+ unsigned u; /* Local index variable */
herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_STATIC
@@ -415,11 +412,9 @@ H5D__contig_construct(H5F_t *f, H5D_t *dset)
*/
/* Check for invalid dataset dimensions */
- if((ndims = H5S_get_simple_extent_dims(dset->shared->space, dim, max_dim)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize contiguous storage")
- for(i = 0; i < ndims; i++)
- if(max_dim[i] > dim[i])
- HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "extendible contiguous non-external dataset")
+ for(u = 0; u < dset->shared->ndims; u++)
+ if(dset->shared->max_dims[u] > dset->shared->curr_dims[u])
+ HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "extendible contiguous non-external dataset not allowed")
/* Retrieve the number of elements in the dataspace */
if((snelmts = H5S_GET_EXTENT_NPOINTS(dset->shared->space)) < 0)
diff --git a/src/H5Ddeprec.c b/src/H5Ddeprec.c
index 0b2fee6..b3dae7b 100644
--- a/src/H5Ddeprec.c
+++ b/src/H5Ddeprec.c
@@ -341,9 +341,7 @@ static herr_t
H5D__extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id)
{
htri_t changed; /* Flag to indicate that the dataspace was successfully extended */
- H5S_t *space; /* Dataset's dataspace */
- int rank; /* Dataspace # of dimensions */
- hsize_t curr_dims[H5O_LAYOUT_NDIMS];/* Current dimension sizes */
+ hsize_t old_dims[H5S_MAX_RANK]; /* Current (i.e. old, if changed) dimension sizes */
H5O_fill_t *fill; /* Dataset's fill value */
herr_t ret_value = SUCCEED; /* Return value */
@@ -364,20 +362,30 @@ H5D__extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id)
*/
/* Retrieve the current dimensions */
- space = dataset->shared->space;
- if((rank = H5S_get_simple_extent_dims(space, curr_dims, NULL)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions")
+ HDcompile_assert(sizeof(old_dims) == sizeof(dataset->shared->curr_dims));
+ HDmemcpy(old_dims, dataset->shared->curr_dims, H5S_MAX_RANK * sizeof(old_dims[0]));
/* Increase the size of the data space */
- if((changed = H5S_extend(space, size)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to increase size of data space")
+ if((changed = H5S_extend(dataset->shared->space, size)) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to increase size of dataspace")
/* Updated the dataset's info if the dataspace was successfully extended */
if(changed) {
+ /* Get the extended dimension sizes */
+ /* (Need to retrieve this here, since the 'size' dimensions could
+ * extend one dimension but be smaller in a different dimension,
+ * and the dataspace's extent is the larger of the current and
+ * 'size' dimension values. - QAK)
+ */
+ if(H5S_get_simple_extent_dims(dataset->shared->space, dataset->shared->curr_dims, NULL) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions")
+
/* Update the index values for the cached chunks for this dataset */
if(H5D_CHUNKED == dataset->shared->layout.type) {
+ /* Update general information for chunks */
if(H5D__chunk_set_info(dataset) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to update # of chunks")
+ /* Update the chunk cache indices */
if(H5D__chunk_update_cache(dataset, dxpl_id) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update cached chunk indices")
} /* end if */
@@ -385,8 +393,7 @@ H5D__extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id)
/* Allocate space for the new parts of the dataset, if appropriate */
fill = &dataset->shared->dcpl_cache.fill;
if(fill->alloc_time == H5D_ALLOC_TIME_EARLY)
- if(H5D__alloc_storage(dataset, dxpl_id, H5D_ALLOC_EXTEND, FALSE,
- curr_dims) < 0)
+ if(H5D__alloc_storage(dataset, dxpl_id, H5D_ALLOC_EXTEND, FALSE, old_dims) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize dataset with fill value")
/* Mark the dataspace as dirty, for later writing to the file */
diff --git a/src/H5Defl.c b/src/H5Defl.c
index 38c8ccd..355492f 100644
--- a/src/H5Defl.c
+++ b/src/H5Defl.c
@@ -126,14 +126,11 @@ static herr_t
H5D__efl_construct(H5F_t *f, H5D_t *dset)
{
size_t dt_size; /* Size of datatype */
- hsize_t dim[H5O_LAYOUT_NDIMS]; /* Current size of data in elements */
- hsize_t max_dim[H5O_LAYOUT_NDIMS]; /* Maximum size of data in elements */
hssize_t stmp_size; /* Temporary holder for raw data size */
hsize_t tmp_size; /* Temporary holder for raw data size */
hsize_t max_points; /* Maximum elements */
hsize_t max_storage; /* Maximum storage size */
- int ndims; /* Rank of dataspace */
- int i; /* Local index variable */
+ unsigned u; /* Local index variable */
herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_STATIC
@@ -149,11 +146,9 @@ H5D__efl_construct(H5F_t *f, H5D_t *dset)
*/
/* Check for invalid dataset dimensions */
- if((ndims = H5S_get_simple_extent_dims(dset->shared->space, dim, max_dim)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize contiguous storage")
- for(i = 1; i < ndims; i++)
- if(max_dim[i] > dim[i])
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "only the first dimension can be extendible")
+ for(u = 1; u < dset->shared->ndims; u++)
+ if(dset->shared->max_dims[u] > dset->shared->curr_dims[u])
+ HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "only the first dimension can be extendible")
/* Retrieve the size of the dataset's datatype */
if(0 == (dt_size = H5T_get_size(dset->shared->type)))
diff --git a/src/H5Dint.c b/src/H5Dint.c
index 8e1fcec..c626475 100644
--- a/src/H5Dint.c
+++ b/src/H5Dint.c
@@ -62,6 +62,7 @@ static H5D_shared_t *H5D__new(hid_t dcpl_id, hbool_t creating,
hbool_t vl_type);
static herr_t H5D__init_type(H5F_t *file, const H5D_t *dset, hid_t type_id,
const H5T_t *type);
+static herr_t H5D__cache_dataspace_info(const H5D_t *dset);
static herr_t H5D__init_space(H5F_t *file, const H5D_t *dset, const H5S_t *space);
static herr_t H5D__update_oh_info(H5F_t *file, hid_t dxpl_id, H5D_t *dset,
hid_t dapl_id);
@@ -665,6 +666,41 @@ done:
/*-------------------------------------------------------------------------
+ * Function: H5D__cache_dataspace_info
+ *
+ * Purpose: Cache dataspace info for a dataset
+ *
+ * Return: Success: SUCCEED
+ * Failure: FAIL
+ *
+ * Programmer: Quincey Koziol
+ * Wednesday, November 19, 2014
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+H5D__cache_dataspace_info(const H5D_t *dset)
+{
+ int sndims; /* Signed number of dimensions of dataspace rank */
+ unsigned u; /* Local index value */
+ herr_t ret_value = SUCCEED; /* Return value */
+
+ FUNC_ENTER_STATIC
+
+ /* Sanity checking */
+ HDassert(dset);
+
+ /* Cache info for dataset's dataspace */
+ if((sndims = H5S_get_simple_extent_dims(dset->shared->space, dset->shared->curr_dims, dset->shared->max_dims)) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't cache dataspace dimensions")
+ dset->shared->ndims = (unsigned)sndims;
+
+done:
+ FUNC_LEAVE_NOAPI(ret_value)
+} /* end H5D__cache_dataspace_info() */
+
+
+/*-------------------------------------------------------------------------
* Function: H5D__init_space
*
* Purpose: Copy a dataspace for a dataset's use, performing all the
@@ -698,6 +734,10 @@ H5D__init_space(H5F_t *file, const H5D_t *dset, const H5S_t *space)
if(NULL == (dset->shared->space = H5S_copy(space, FALSE, TRUE)))
HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't copy dataspace")
+ /* Cache the dataset's dataspace info */
+ if(H5D__cache_dataspace_info(dset) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't cache dataspace info")
+
/* Set the latest format, if requested */
if(use_latest_format)
if(H5S_set_latest_version(dset->shared->space) < 0)
@@ -1251,6 +1291,10 @@ H5D__open_oid(H5D_t *dataset, hid_t dapl_id, hid_t dxpl_id)
if(NULL == (dataset->shared->space = H5S_read(&(dataset->oloc), dxpl_id)))
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to load dataspace info from dataset header")
+ /* Cache the dataset's dataspace info */
+ if(H5D__cache_dataspace_info(dataset) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't cache dataspace info")
+
/* Get a datatype ID for the dataset's datatype */
if((dataset->shared->type_id = H5I_register(H5I_DATATYPE, dataset->shared->type, FALSE)) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTREGISTER, FAIL, "unable to register type")
@@ -2161,9 +2205,7 @@ done:
herr_t
H5D__set_extent(H5D_t *dset, const hsize_t *size, hid_t dxpl_id)
{
- H5S_t *space; /* Dataset's dataspace */
- int rank; /* Dataspace # of dimensions */
- hsize_t curr_dims[H5O_LAYOUT_NDIMS];/* Current dimension sizes */
+ hsize_t curr_dims[H5S_MAX_RANK]; /* Current dimension sizes */
htri_t changed; /* Whether the dataspace changed size */
herr_t ret_value = SUCCEED; /* Return value */
@@ -2187,29 +2229,30 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size, hid_t dxpl_id)
if(H5D__check_filters(dset) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't apply filters")
- /* Get the data space */
- space = dset->shared->space;
-
- /* Check if we are shrinking or expanding any of the dimensions */
- if((rank = H5S_get_simple_extent_dims(space, curr_dims, NULL)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions")
+ /* Keep the current dataspace dimensions for later */
+ HDcompile_assert(sizeof(curr_dims) == sizeof(dset->shared->curr_dims));
+ HDmemcpy(curr_dims, dset->shared->curr_dims, H5S_MAX_RANK * sizeof(curr_dims[0]));
- /* Modify the size of the data space */
- if((changed = H5S_set_extent(space, size)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to modify size of data space")
+ /* Modify the size of the dataspace */
+ if((changed = H5S_set_extent(dset->shared->space, size)) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to modify size of dataspace")
/* Don't bother updating things, unless they've changed */
if(changed) {
- hbool_t shrink = FALSE; /* Flag to indicate a dimension has shrank */
- hbool_t expand = FALSE; /* Flag to indicate a dimension has grown */
- unsigned u; /* Local index variable */
+ hbool_t shrink = FALSE; /* Flag to indicate a dimension has shrank */
+ hbool_t expand = FALSE; /* Flag to indicate a dimension has grown */
+ unsigned u; /* Local index variable */
/* Determine if we are shrinking and/or expanding any dimensions */
- for(u = 0; u < (unsigned)rank; u++) {
+ for(u = 0; u < dset->shared->ndims; u++) {
+ /* Check for various status changes */
if(size[u] < curr_dims[u])
shrink = TRUE;
if(size[u] > curr_dims[u])
expand = TRUE;
+
+ /* Update the cached copy of the dataset's dimensions */
+ dset->shared->curr_dims[u] = size[u];
} /* end for */
/*-------------------------------------------------------------------------
diff --git a/src/H5Dio.c b/src/H5Dio.c
index 1c77d93..44080dc 100644
--- a/src/H5Dio.c
+++ b/src/H5Dio.c
@@ -302,8 +302,6 @@ H5D__pre_write(H5D_t *dset, hbool_t direct_write, hid_t mem_type_id,
uint32_t direct_filters;
hsize_t *direct_offset;
uint32_t direct_datasize;
- int ndims = 0;
- hsize_t dims[H5O_LAYOUT_NDIMS];
hsize_t internal_offset[H5O_LAYOUT_NDIMS];
unsigned u; /* Local index variable */
@@ -324,12 +322,9 @@ H5D__pre_write(H5D_t *dset, hbool_t direct_write, hid_t mem_type_id,
/* The library's chunking code requires the offset terminates with a zero. So transfer the
* offset array to an internal offset array */
- if((ndims = H5S_get_simple_extent_dims(dset->shared->space, dims, NULL)) < 0)
- HGOTO_ERROR(H5E_DATASPACE, H5E_CANTGET, FAIL, "can't retrieve dataspace extent dims")
-
- for(u = 0; u < (unsigned)ndims; u++) {
+ for(u = 0; u < dset->shared->ndims; u++) {
/* Make sure the offset doesn't exceed the dataset's dimensions */
- if(direct_offset[u] > dims[u])
+ if(direct_offset[u] > dset->shared->curr_dims[u])
HGOTO_ERROR(H5E_DATASPACE, H5E_BADTYPE, FAIL, "offset exceeds dimensions of dataset")
/* Make sure the offset fall right on a chunk's boundary */
@@ -340,7 +335,7 @@ H5D__pre_write(H5D_t *dset, hbool_t direct_write, hid_t mem_type_id,
} /* end for */
/* Terminate the offset with a zero */
- internal_offset[ndims] = 0;
+ internal_offset[dset->shared->ndims] = 0;
/* write raw data */
if(H5D__chunk_direct_write(dset, dxpl_id, direct_filters, internal_offset, direct_datasize, buf) < 0)
diff --git a/src/H5Dmpio.c b/src/H5Dmpio.c
index bd1531d..db487cd 100644
--- a/src/H5Dmpio.c
+++ b/src/H5Dmpio.c
@@ -860,8 +860,7 @@ H5D__link_chunk_collective_io(H5D_io_info_t *io_info, const H5D_type_info_t *typ
mspace = chunk_info->mspace;
/* Look up address of chunk */
- if(H5D__chunk_lookup(io_info->dset, io_info->dxpl_id, chunk_info->coords,
- chunk_info->index, &udata) < 0)
+ if(H5D__chunk_lookup(io_info->dset, io_info->dxpl_id, chunk_info->coords, chunk_info->index, &udata) < 0)
HGOTO_ERROR(H5E_STORAGE, H5E_CANTGET, FAIL, "couldn't get chunk address")
ctg_store.contig.dset_addr = udata.chunk_block.offset;
} /* end else */
diff --git a/src/H5Dpkg.h b/src/H5Dpkg.h
index 4ec140f..fa59412 100644
--- a/src/H5Dpkg.h
+++ b/src/H5Dpkg.h
@@ -325,9 +325,9 @@ typedef struct H5D_chunk_info_t {
uint32_t chunk_points; /* Number of elements selected in chunk */
hsize_t coords[H5O_LAYOUT_NDIMS]; /* Coordinates of chunk in file dataset's dataspace */
H5S_t *fspace; /* Dataspace describing chunk & selection in it */
- unsigned fspace_shared; /* Indicate that the file space for a chunk is shared and shouldn't be freed */
+ hbool_t fspace_shared; /* Indicate that the file space for a chunk is shared and shouldn't be freed */
H5S_t *mspace; /* Dataspace describing selection in memory corresponding to this chunk */
- unsigned mspace_shared; /* Indicate that the memory space for a chunk is shared and shouldn't be freed */
+ hbool_t mspace_shared; /* Indicate that the memory space for a chunk is shared and shouldn't be freed */
} H5D_chunk_info_t;
/* Main structure holding the mapping between file chunks and memory */
@@ -418,6 +418,11 @@ typedef struct H5D_shared_t {
H5O_layout_t layout; /* Data layout */
hbool_t checked_filters;/* TRUE if dataset passes can_apply check */
+ /* Cached dataspace info */
+ unsigned ndims; /* The dataset's dataspace rank */
+ hsize_t curr_dims[H5S_MAX_RANK]; /* The curr. size of dataset dimensions */
+ hsize_t max_dims[H5S_MAX_RANK]; /* The max. size of dataset dimensions */
+
/* Buffered/cached information for types of raw data storage*/
struct {
H5D_rdcdc_t contig; /* Information about contiguous data */
diff --git a/src/H5S.c b/src/H5S.c
index 4a1783e..dd2bb0a 100644
--- a/src/H5S.c
+++ b/src/H5S.c
@@ -2144,14 +2144,13 @@ H5S_extend(H5S_t *space, const hsize_t *size)
HDassert(size);
/* Check through all the dimensions to see if modifying the dataspace is allowed */
- for(u = 0; u < space->extent.rank; u++) {
- if(space->extent.size[u]<size[u]) {
- if(space->extent.max && H5S_UNLIMITED!=space->extent.max[u] &&
- space->extent.max[u]<size[u])
+ for(u = 0; u < space->extent.rank; u++)
+ if(space->extent.size[u] < size[u]) {
+ if(space->extent.max && H5S_UNLIMITED != space->extent.max[u] &&
+ space->extent.max[u] < size[u])
HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "dimension cannot be increased")
ret_value++;
} /* end if */
- } /* end for */
/* Update */
if(ret_value) {