summaryrefslogtreecommitdiffstats
path: root/src/H5Ddeprec.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/H5Ddeprec.c')
-rw-r--r--src/H5Ddeprec.c26
1 files changed, 16 insertions, 10 deletions
diff --git a/src/H5Ddeprec.c b/src/H5Ddeprec.c
index e95084e..e5bb321 100644
--- a/src/H5Ddeprec.c
+++ b/src/H5Ddeprec.c
@@ -154,7 +154,7 @@ H5Dcreate1(hid_t loc_id, const char *name, hid_t type_id, hid_t space_id,
/* Create the dataset through the VOL */
if(NULL == (dset = H5VL_dataset_create(obj->vol_obj, loc_params, obj->vol_info->vol_cls, name, dcpl_id,
- H5P_DATASET_ACCESS_DEFAULT, H5AC_dxpl_id, H5_REQUEST_NULL)))
+ H5P_DATASET_ACCESS_DEFAULT, H5AC_ind_read_dxpl_id, H5_REQUEST_NULL)))
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to create dataset")
/* Get an atom for the dataset */
@@ -163,7 +163,7 @@ H5Dcreate1(hid_t loc_id, const char *name, hid_t type_id, hid_t space_id,
done:
if (ret_value < 0 && dset)
- if(H5VL_dataset_close (dset, obj->vol_info->vol_cls, H5AC_dxpl_id, H5_REQUEST_NULL) < 0)
+ if(H5VL_dataset_close (dset, obj->vol_info->vol_cls, H5AC_ind_read_dxpl_id, H5_REQUEST_NULL) < 0)
HDONE_ERROR(H5E_DATASET, H5E_CLOSEERROR, FAIL, "unable to release dataset")
FUNC_LEAVE_API(ret_value)
} /* end H5Dcreate1() */
@@ -190,10 +190,9 @@ hid_t
H5Dopen1(hid_t loc_id, const char *name)
{
void *dset = NULL; /* dset token from VOL plugin */
- H5VL_object_t *obj = NULL; /* object token of loc_id */
+ H5VL_object_t *obj = NULL; /* object token of loc_id */
H5VL_loc_params_t loc_params;
- hid_t dapl_id = H5P_DATASET_ACCESS_DEFAULT; /* dapl to use to open dataset */
- hid_t dxpl_id = H5AC_ind_dxpl_id; /* dxpl to use to open datset */
+ hid_t dxpl_id = H5AC_ind_read_dxpl_id; /* dxpl to use to open datset */
hid_t ret_value;
FUNC_ENTER_API(FAIL)
@@ -211,7 +210,8 @@ H5Dopen1(hid_t loc_id, const char *name)
HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier")
/* Create the dataset through the VOL */
- if(NULL == (dset = H5VL_dataset_open(obj->vol_obj, loc_params, obj->vol_info->vol_cls, name, dapl_id, dxpl_id, H5_REQUEST_NULL)))
+ if(NULL == (dset = H5VL_dataset_open(obj->vol_obj, loc_params, obj->vol_info->vol_cls, name,
+ H5P_DATASET_ACCESS_DEFAULT, dxpl_id, H5_REQUEST_NULL)))
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to create dataset")
/* Get an atom for the dataset */
@@ -258,7 +258,7 @@ H5Dextend(hid_t dset_id, const hsize_t size[])
HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no size specified")
/* Increase size */
- if(H5D__extend(dset, size, H5AC_dxpl_id) < 0)
+ if(H5D__extend(dset, size, H5AC_ind_read_dxpl_id) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to extend dataset")
done:
@@ -377,10 +377,16 @@ H5D__extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id)
/* Allocate space for the new parts of the dataset, if appropriate */
fill = &dataset->shared->dcpl_cache.fill;
- if(fill->alloc_time == H5D_ALLOC_TIME_EARLY)
- if(H5D__alloc_storage(dataset, dxpl_id, H5D_ALLOC_EXTEND, FALSE, old_dims) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize dataset with fill value")
+ if(fill->alloc_time == H5D_ALLOC_TIME_EARLY) {
+ H5D_io_info_t io_info;
+ io_info.dset = dataset;
+ io_info.raw_dxpl_id = H5AC_rawdata_dxpl_id;
+ io_info.md_dxpl_id = dxpl_id;
+
+ if(H5D__alloc_storage(&io_info, H5D_ALLOC_EXTEND, FALSE, old_dims) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize dataset with fill value")
+ }
/* Mark the dataspace as dirty, for later writing to the file */
if(H5D__mark(dataset, dxpl_id, H5D_MARK_SPACE) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to mark dataspace as dirty")