summaryrefslogtreecommitdiffstats
path: root/src/H5Ddeprec.c
diff options
context:
space:
mode:
authorMohamad Chaarawi <chaarawi@hdfgroup.org>2015-05-11 19:11:04 (GMT)
committerMohamad Chaarawi <chaarawi@hdfgroup.org>2015-05-11 19:11:04 (GMT)
commit5520ff138ca79bb5aa46146908773c910f767554 (patch)
treec9ea17f4530c1eade235ea077f179d3557042233 /src/H5Ddeprec.c
parentc3218d9f39ec281506251c2c6ba0b395adfa9013 (diff)
parent67ba6cb57d84dd34c69930d469ae445697b49731 (diff)
downloadhdf5-5520ff138ca79bb5aa46146908773c910f767554.zip
hdf5-5520ff138ca79bb5aa46146908773c910f767554.tar.gz
hdf5-5520ff138ca79bb5aa46146908773c910f767554.tar.bz2
[svn-r27050] merge from trunk.
Diffstat (limited to 'src/H5Ddeprec.c')
-rw-r--r--src/H5Ddeprec.c32
1 files changed, 20 insertions, 12 deletions
diff --git a/src/H5Ddeprec.c b/src/H5Ddeprec.c
index e87cbdb..e9842e2 100644
--- a/src/H5Ddeprec.c
+++ b/src/H5Ddeprec.c
@@ -241,6 +241,7 @@ H5Dopen1(hid_t loc_id, const char *name)
H5VL_object_t *obj = NULL; /* object token of loc_id */
H5VL_loc_params_t loc_params;
hid_t dapl_id = H5P_DATASET_ACCESS_DEFAULT; /* dapl to use to open dataset */
+ hid_t dxpl_id = H5AC_ind_dxpl_id; /* dxpl to use to open datset */
hid_t ret_value;
FUNC_ENTER_API(FAIL)
@@ -258,7 +259,7 @@ H5Dopen1(hid_t loc_id, const char *name)
HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier")
/* Create the dataset through the VOL */
- if(NULL == (dset = H5VL_dataset_open(obj->vol_obj, loc_params, obj->vol_info->vol_cls, name, dapl_id, H5AC_dxpl_id, H5_REQUEST_NULL)))
+ if(NULL == (dset = H5VL_dataset_open(obj->vol_obj, loc_params, obj->vol_info->vol_cls, name, dapl_id, dxpl_id, H5_REQUEST_NULL)))
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to create dataset")
/* Get an atom for the dataset */
@@ -267,7 +268,7 @@ H5Dopen1(hid_t loc_id, const char *name)
done:
if (ret_value < 0 && dset)
- if(H5VL_dataset_close (dset, obj->vol_info->vol_cls, H5AC_dxpl_id, H5_REQUEST_NULL) < 0)
+ if(H5VL_dataset_close (dset, obj->vol_info->vol_cls, dxpl_id, H5_REQUEST_NULL) < 0)
HDONE_ERROR(H5E_DATASET, H5E_CLOSEERROR, FAIL, "unable to release dataset")
FUNC_LEAVE_API(ret_value)
} /* end H5Dopen1() */
@@ -329,9 +330,7 @@ static herr_t
H5D__extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id)
{
htri_t changed; /* Flag to indicate that the dataspace was successfully extended */
- H5S_t *space; /* Dataset's dataspace */
- int rank; /* Dataspace # of dimensions */
- hsize_t curr_dims[H5O_LAYOUT_NDIMS];/* Current dimension sizes */
+ hsize_t old_dims[H5S_MAX_RANK]; /* Current (i.e. old, if changed) dimension sizes */
H5O_fill_t *fill; /* Dataset's fill value */
herr_t ret_value = SUCCEED; /* Return value */
@@ -352,20 +351,30 @@ H5D__extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id)
*/
/* Retrieve the current dimensions */
- space = dataset->shared->space;
- if((rank = H5S_get_simple_extent_dims(space, curr_dims, NULL)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions")
+ HDcompile_assert(sizeof(old_dims) == sizeof(dataset->shared->curr_dims));
+ HDmemcpy(old_dims, dataset->shared->curr_dims, H5S_MAX_RANK * sizeof(old_dims[0]));
/* Increase the size of the data space */
- if((changed = H5S_extend(space, size)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to increase size of data space")
+ if((changed = H5S_extend(dataset->shared->space, size)) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to increase size of dataspace")
/* Updated the dataset's info if the dataspace was successfully extended */
if(changed) {
+ /* Get the extended dimension sizes */
+ /* (Need to retrieve this here, since the 'size' dimensions could
+ * extend one dimension but be smaller in a different dimension,
+ * and the dataspace's extent is the larger of the current and
+ * 'size' dimension values. - QAK)
+ */
+ if(H5S_get_simple_extent_dims(dataset->shared->space, dataset->shared->curr_dims, NULL) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions")
+
/* Update the index values for the cached chunks for this dataset */
if(H5D_CHUNKED == dataset->shared->layout.type) {
+ /* Update general information for chunks */
if(H5D__chunk_set_info(dataset) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to update # of chunks")
+ /* Update the chunk cache indices */
if(H5D__chunk_update_cache(dataset, dxpl_id) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update cached chunk indices")
} /* end if */
@@ -373,8 +382,7 @@ H5D__extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id)
/* Allocate space for the new parts of the dataset, if appropriate */
fill = &dataset->shared->dcpl_cache.fill;
if(fill->alloc_time == H5D_ALLOC_TIME_EARLY)
- if(H5D__alloc_storage(dataset, dxpl_id, H5D_ALLOC_EXTEND, FALSE,
- curr_dims) < 0)
+ if(H5D__alloc_storage(dataset, dxpl_id, H5D_ALLOC_EXTEND, FALSE, old_dims) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize dataset with fill value")
/* Mark the dataspace as dirty, for later writing to the file */