summaryrefslogtreecommitdiffstats
path: root/src/H5Ddeprec.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/H5Ddeprec.c')
-rw-r--r--src/H5Ddeprec.c373
1 files changed, 174 insertions, 199 deletions
diff --git a/src/H5Ddeprec.c b/src/H5Ddeprec.c
index 0807048..f4f4223 100644
--- a/src/H5Ddeprec.c
+++ b/src/H5Ddeprec.c
@@ -36,9 +36,11 @@
/* Headers */
/***********/
#include "H5private.h" /* Generic Functions */
+#include "H5CXprivate.h" /* API Contexts */
#include "H5Dpkg.h" /* Datasets */
#include "H5Eprivate.h" /* Error handling */
#include "H5Iprivate.h" /* IDs */
+#include "H5VLprivate.h" /* Virtual Object Layer */
/****************/
@@ -60,10 +62,6 @@
/* Local Prototypes */
/********************/
-#ifndef H5_NO_DEPRECATED_SYMBOLS
-static herr_t H5D__extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id);
-#endif /* H5_NO_DEPRECATED_SYMBOLS */
-
/*********************/
/* Package Variables */
@@ -83,28 +81,28 @@ static herr_t H5D__extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id);
#ifndef H5_NO_DEPRECATED_SYMBOLS
/*-------------------------------------------------------------------------
- * Function: H5Dcreate1
+ * Function: H5Dcreate1
*
- * Purpose: Creates a new dataset named NAME at LOC_ID, opens the
- * dataset for access, and associates with that dataset constant
- * and initial persistent properties including the type of each
- * datapoint as stored in the file (TYPE_ID), the size of the
- * dataset (SPACE_ID), and other initial miscellaneous
- * properties (DCPL_ID).
+ * Purpose: Creates a new dataset named NAME at LOC_ID, opens the
+ * dataset for access, and associates with that dataset constant
+ * and initial persistent properties including the type of each
+ * datapoint as stored in the file (TYPE_ID), the size of the
+ * dataset (SPACE_ID), and other initial miscellaneous
+ * properties (DCPL_ID).
*
- * All arguments are copied into the dataset, so the caller is
- * allowed to derive new types, data spaces, and creation
- * parameters from the old ones and reuse them in calls to
- * create other datasets.
+ * All arguments are copied into the dataset, so the caller is
+ * allowed to derive new types, data spaces, and creation
+ * parameters from the old ones and reuse them in calls to
+ * create other datasets.
*
- * Return: Success: The object ID of the new dataset. At this
- * point, the dataset is ready to receive its
- * raw data. Attempting to read raw data from
- * the dataset will probably return the fill
- * value. The dataset should be closed when
- * the caller is no longer interested in it.
+ * Return: Success: The object ID of the new dataset. At this
+ * point, the dataset is ready to receive its
+ * raw data. Attempting to read raw data from
+ * the dataset will probably return the fill
+ * value. The dataset should be closed when
+ * the caller is no longer interested in it.
*
- * Failure: FAIL
+ * Failure: H5I_INVALID_HID
*
* Programmer: Robb Matzke
* Wednesday, December 3, 1997
@@ -115,57 +113,70 @@ hid_t
H5Dcreate1(hid_t loc_id, const char *name, hid_t type_id, hid_t space_id,
hid_t dcpl_id)
{
- H5G_loc_t loc; /* Object location to insert dataset into */
- H5D_t *dset = NULL; /* New dataset's info */
- const H5S_t *space; /* Dataspace for dataset */
- hid_t ret_value; /* Return value */
+ void *dset = NULL; /* dset object from VOL connector */
+ H5VL_object_t *vol_obj = NULL; /* object of loc_id */
+ H5VL_loc_params_t loc_params;
+ hid_t ret_value = H5I_INVALID_HID; /* Return value */
- FUNC_ENTER_API(FAIL)
+ FUNC_ENTER_API(H5I_INVALID_HID)
H5TRACE5("i", "i*siii", loc_id, name, type_id, space_id, dcpl_id);
/* Check arguments */
- if(H5G_loc(loc_id, &loc) < 0)
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a location ID")
- if(!name || !*name)
- HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no name")
- if(H5I_DATATYPE != H5I_get_type(type_id))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a datatype ID")
- if(NULL == (space = (const H5S_t *)H5I_object_verify(space_id,H5I_DATASPACE)))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a dataspace ID")
+ if(!name)
+ HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, H5I_INVALID_HID, "name parameter cannot be NULL")
+ if(!*name)
+ HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, H5I_INVALID_HID, "name parameter cannot be an empty string")
+
+ /* Set up collective metadata if appropriate */
+ if(H5CX_set_loc(loc_id) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, H5I_INVALID_HID, "can't set collective metadata read")
+
if(H5P_DEFAULT == dcpl_id)
dcpl_id = H5P_DATASET_CREATE_DEFAULT;
else
if(TRUE != H5P_isa_class(dcpl_id, H5P_DATASET_CREATE))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not dataset create property list ID")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, H5I_INVALID_HID, "not dataset create property list ID")
+
+ /* Set the DCPL for the API context */
+ H5CX_set_dcpl(dcpl_id);
- /* Build and open the new dataset */
- if(NULL == (dset = H5D__create_named(&loc, name, type_id, space, H5P_LINK_CREATE_DEFAULT, dcpl_id, H5P_DATASET_ACCESS_DEFAULT, H5AC_ind_read_dxpl_id)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to create dataset")
+ /* Set location parameters */
+ loc_params.type = H5VL_OBJECT_BY_SELF;
+ loc_params.obj_type = H5I_get_type(loc_id);
- /* Register the new dataset to get an ID for it */
- if((ret_value = H5I_register(H5I_DATASET, dset, TRUE)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTREGISTER, FAIL, "unable to register dataset")
+ /* get the location object */
+ if(NULL == (vol_obj = H5VL_vol_object(loc_id)))
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, H5I_INVALID_HID, "invalid location identifier")
+
+ /* Create the dataset through the VOL */
+ if(NULL == (dset = H5VL_dataset_create(vol_obj, &loc_params, name, H5P_LINK_CREATE_DEFAULT, type_id, space_id, dcpl_id,
+ H5P_DATASET_ACCESS_DEFAULT, H5P_DATASET_XFER_DEFAULT, H5_REQUEST_NULL)))
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, H5I_INVALID_HID, "unable to create dataset")
+
+ /* Get an atom for the dataset */
+ if((ret_value = H5VL_register(H5I_DATASET, dset, vol_obj->connector, TRUE)) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTREGISTER, H5I_INVALID_HID, "unable to register dataset")
done:
- if(ret_value < 0)
- if(dset && H5D_close(dset) < 0)
- HDONE_ERROR(H5E_DATASET, H5E_CLOSEERROR, FAIL, "unable to release dataset")
+ if (H5I_INVALID_HID == ret_value)
+ if(dset && H5VL_dataset_close(vol_obj, H5P_DATASET_XFER_DEFAULT, H5_REQUEST_NULL) < 0)
+ HDONE_ERROR(H5E_DATASET, H5E_CLOSEERROR, H5I_INVALID_HID, "unable to release dataset")
FUNC_LEAVE_API(ret_value)
} /* end H5Dcreate1() */
/*-------------------------------------------------------------------------
- * Function: H5Dopen1
+ * Function: H5Dopen1
*
- * Purpose: Finds a dataset named NAME at LOC_ID, opens it, and returns
- * its ID. The dataset should be close when the caller is no
- * longer interested in it.
+ * Purpose: Finds a dataset named NAME at LOC_ID, opens it, and returns
+ * its ID. The dataset should be close when the caller is no
+ * longer interested in it.
*
- * Note: Deprecated in favor of H5Dopen2
+ * Note: Deprecated in favor of H5Dopen2
*
- * Return: Success: A new dataset ID
- * Failure: FAIL
+ * Return: Success: A new dataset ID
+ * Failure: H5I_INVALID_HID
*
* Programmer: Robb Matzke
* Thursday, December 4, 1997
@@ -175,46 +186,55 @@ done:
hid_t
H5Dopen1(hid_t loc_id, const char *name)
{
- H5D_t *dset = NULL;
- H5G_loc_t loc; /* Object location of group */
- hid_t ret_value;
+ void *dset = NULL; /* dset object from VOL connector */
+ H5VL_object_t *vol_obj = NULL; /* object of loc_id */
+ H5VL_loc_params_t loc_params;
+ hid_t ret_value = H5I_INVALID_HID; /* Return value */
- FUNC_ENTER_API(FAIL)
+ FUNC_ENTER_API(H5I_INVALID_HID)
H5TRACE2("i", "i*s", loc_id, name);
/* Check args */
- if(H5G_loc(loc_id, &loc) < 0)
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a location")
- if(!name || !*name)
- HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no name")
+ if (!name)
+ HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, H5I_INVALID_HID, "name parameter cannot be NULL")
+ if (!*name)
+ HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, H5I_INVALID_HID, "name parameter cannot be an empty string")
+
+ /* Set location parameters */
+ loc_params.type = H5VL_OBJECT_BY_SELF;
+ loc_params.obj_type = H5I_get_type(loc_id);
+
+ /* get the location object */
+ if(NULL == (vol_obj = H5VL_vol_object(loc_id)))
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, H5I_INVALID_HID, "invalid location identifier")
/* Open the dataset */
- if(NULL == (dset = H5D__open_name(&loc, name, H5P_DATASET_ACCESS_DEFAULT, H5AC_ind_read_dxpl_id)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTOPENOBJ, FAIL, "unable to open dataset")
+ if(NULL == (dset = H5VL_dataset_open(vol_obj, &loc_params, name, H5P_DATASET_ACCESS_DEFAULT, H5P_DATASET_XFER_DEFAULT, H5_REQUEST_NULL)))
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTOPENOBJ, H5I_INVALID_HID, "unable to open dataset")
- /* Register an atom for the dataset */
- if((ret_value = H5I_register(H5I_DATASET, dset, TRUE)) < 0)
- HGOTO_ERROR(H5E_ATOM, H5E_CANTREGISTER, FAIL, "can't register dataset atom")
+ /* Get an atom for the dataset */
+ if((ret_value = H5VL_register(H5I_DATASET, dset, vol_obj->connector, TRUE)) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTREGISTER, H5I_INVALID_HID, "can't register dataset atom")
done:
- if(ret_value < 0)
- if(dset && H5D_close(dset) < 0)
- HDONE_ERROR(H5E_DATASET, H5E_CLOSEERROR, FAIL, "unable to release dataset")
+ if(H5I_INVALID_HID == ret_value)
+ if(dset && H5VL_dataset_close(vol_obj, H5P_DATASET_XFER_DEFAULT, H5_REQUEST_NULL) < 0)
+ HDONE_ERROR(H5E_DATASET, H5E_CLOSEERROR, H5I_INVALID_HID, "unable to release dataset")
FUNC_LEAVE_API(ret_value)
} /* end H5Dopen1() */
/*-------------------------------------------------------------------------
- * Function: H5Dextend
+ * Function: H5Dextend
*
- * Purpose: This function makes sure that the dataset is at least of size
- * SIZE. The dimensionality of SIZE is the same as the data
- * space of the dataset being changed.
+ * Purpose: This function makes sure that the dataset is at least of size
+ * SIZE. The dimensionality of SIZE is the same as the data
+ * space of the dataset being changed.
*
- * Note: Deprecated in favor of H5Dset_extent
+ * Note: Deprecated in favor of H5Dset_extent
*
- * Return: Non-negative on success/Negative on failure
+ * Return: Non-negative on success/Negative on failure
*
* Programmer: Robb Matzke
* Friday, January 30, 1998
@@ -224,158 +244,113 @@ done:
herr_t
H5Dextend(hid_t dset_id, const hsize_t size[])
{
- H5D_t *dset;
- herr_t ret_value = SUCCEED; /* Return value */
+ H5VL_object_t *vol_obj = NULL; /* Dataset structure */
+ hid_t sid = H5I_INVALID_HID; /* Dataspace ID */
+ H5S_t *ds = NULL; /* Dataspace struct */
+ int ndims; /* Dataset/space rank */
+ hsize_t dset_dims[H5S_MAX_RANK]; /* Current dataset dimensions */
+ int i; /* Local index variable */
+ herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_API(FAIL)
H5TRACE2("e", "i*h", dset_id, size);
/* Check args */
- if(NULL == (dset = (H5D_t *)H5I_object_verify(dset_id, H5I_DATASET)))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a dataset")
+ if(NULL == (vol_obj = (H5VL_object_t *)H5I_object_verify(dset_id, H5I_DATASET)))
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid dataset identifier")
if(!size)
- HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no size specified")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no size specified")
+
+ /* Get the dataspace pointer for the dataset */
+ if(H5VL_dataset_get(vol_obj, H5VL_DATASET_GET_SPACE, H5P_DATASET_XFER_DEFAULT, H5_REQUEST_NULL, &sid) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to get dataspace")
+ if(H5I_INVALID_HID == sid)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "received an invalid dataspace from the dataset")
+ if(NULL == (ds = (H5S_t *)H5I_object_verify(sid, H5I_DATASPACE)))
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "couldn't get dataspace structure from ID")
+
+ /* Get the dataset's current extent */
+ if(H5S_get_simple_extent_dims(ds, dset_dims, NULL) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions")
+
+ /* Get the dataset dimensions */
+ ndims = H5S_GET_EXTENT_NDIMS(ds);
+
+ /* Make certain that the dataset dimensions don't decrease in any dimension.
+ *
+ * (Shrinking dimensions is possible with H5Dset_extent, but not H5Dextend)
+ *
+ * XXX (VOL_MERGE): I feel like we should fail here instead of just silently
+ * not doing what we're supposed to do.
+ */
+ for(i = 0; i < ndims; i++)
+ if(size[i] > dset_dims[i])
+ dset_dims[i] = size[i];
+
+ /* Set up collective metadata if appropriate */
+ if(H5CX_set_loc(dset_id) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set collective metadata read info")
/* Increase size */
- if(H5D__extend(dset, size, H5AC_ind_read_dxpl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to extend dataset")
+ if ((ret_value = H5VL_dataset_specific(vol_obj, H5VL_DATASET_SET_EXTENT, H5P_DATASET_XFER_DEFAULT, H5_REQUEST_NULL, dset_dims)) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to extend dataset")
done:
+ /* Close the dataspace */
+ if(sid != H5I_INVALID_HID && H5I_dec_app_ref(sid) < 0)
+ HDONE_ERROR(H5E_DATASET, H5E_CLOSEERROR, FAIL, "can't close dataspace")
+
FUNC_LEAVE_API(ret_value)
} /* end H5Dextend() */
-
/*-------------------------------------------------------------------------
- * Function: H5D__extend
+ * Function: H5Dvlen_reclaim
*
- * Purpose: Increases the size of a dataset.
+ * Purpose: Frees the buffers allocated for storing variable-length data
+ * in memory. Only frees the VL data in the selection defined in the
+ * dataspace. The dataset transfer property list is required to find the
+ * correct allocation/free methods for the VL data in the buffer.
*
- * Return: Non-negative on success/Negative on failure
+ * Return: Non-negative on success, negative on failure
*
- * Programmer: Robb Matzke
- * Friday, January 30, 1998
+ * Programmer: Quincey Koziol
+ * Thursday, June 10, 1999
*
*-------------------------------------------------------------------------
*/
-static herr_t
-H5D__extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id)
+herr_t
+H5Dvlen_reclaim(hid_t type_id, hid_t space_id, hid_t dxpl_id, void *buf)
{
- htri_t changed; /* Flag to indicate that the dataspace was successfully extended */
- hsize_t old_dims[H5S_MAX_RANK]; /* Current (i.e. old, if changed) dimension sizes */
- H5O_fill_t *fill; /* Dataset's fill value */
- herr_t ret_value = SUCCEED; /* Return value */
+ H5S_t *space; /* Dataspace for iteration */
+ herr_t ret_value; /* Return value */
- FUNC_ENTER_STATIC
+ FUNC_ENTER_API(FAIL)
+ H5TRACE4("e", "iii*x", type_id, space_id, dxpl_id, buf);
/* Check args */
- HDassert(dataset);
- HDassert(size);
-
- /* Check if the filters in the DCPL will need to encode, and if so, can they? */
- if(H5D__check_filters(dataset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't apply filters")
+ if(H5I_DATATYPE != H5I_get_type(type_id) || buf == NULL)
+ HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "invalid argument")
+ if(NULL == (space = (H5S_t *)H5I_object_verify(space_id, H5I_DATASPACE)))
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid dataspace")
+ if(!(H5S_has_extent(space)))
+ HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "dataspace does not have extent set")
+
+ /* Get the default dataset transfer property list if the user didn't provide one */
+ if(H5P_DEFAULT == dxpl_id)
+ dxpl_id = H5P_DATASET_XFER_DEFAULT;
+ else
+ if(TRUE != H5P_isa_class(dxpl_id, H5P_DATASET_XFER))
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not xfer parms")
- /*
- * NOTE: Restrictions on extensions were checked when the dataset was
- * created. All extensions are allowed here since none should be
- * able to muck things up.
- */
+ /* Set DXPL for operation */
+ H5CX_set_dxpl(dxpl_id);
- /* Retrieve the current dimensions */
- HDcompile_assert(sizeof(old_dims) == sizeof(dataset->shared->curr_dims));
- HDmemcpy(old_dims, dataset->shared->curr_dims, H5S_MAX_RANK * sizeof(old_dims[0]));
-
- /* Increase the size of the data space */
- if((changed = H5S_extend(dataset->shared->space, size)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to increase size of dataspace")
-
- /* Updated the dataset's info if the dataspace was successfully extended */
- if(changed) {
- /* Get the extended dimension sizes */
- /* (Need to retrieve this here, since the 'size' dimensions could
- * extend one dimension but be smaller in a different dimension,
- * and the dataspace's extent is the larger of the current and
- * 'size' dimension values. - QAK)
- */
- if(H5S_get_simple_extent_dims(dataset->shared->space, dataset->shared->curr_dims, NULL) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions")
-
- /* Update the index values for the cached chunks for this dataset */
- if(H5D_CHUNKED == dataset->shared->layout.type) {
- hbool_t update_chunks = FALSE; /* Flag to indicate chunk cache update is needed */
-
- /* Check if we need to track & update scaled dimension information */
- if(dataset->shared->ndims > 1) {
- unsigned u; /* Local indicate variable */
-
- /* Update scaled chunk information */
- for(u = 0; u < dataset->shared->ndims; u++) {
- hsize_t scaled; /* Scaled value */
-
- /* Compute the scaled dimension size value */
- scaled = size[u] / dataset->shared->layout.u.chunk.dim[u];
-
- /* Check if scaled dimension size changed */
- if(scaled != dataset->shared->cache.chunk.scaled_dims[u]) {
- hsize_t scaled_power2up; /* New size value, rounded to next power of 2 */
-
- /* Update the scaled dimension size value for the current dimension */
- dataset->shared->cache.chunk.scaled_dims[u] = scaled;
-
- /* Check if algorithm for computing hash values will change */
- if((scaled > dataset->shared->cache.chunk.nslots &&
- dataset->shared->cache.chunk.scaled_dims[u] <= dataset->shared->cache.chunk.nslots)
- || (scaled <= dataset->shared->cache.chunk.nslots &&
- dataset->shared->cache.chunk.scaled_dims[u] > dataset->shared->cache.chunk.nslots))
- update_chunks = TRUE;
-
- if( !(scaled_power2up = H5VM_power2up(scaled)) )
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to get the next power of 2")
-
- /* Check if the number of bits required to encode the scaled size value changed */
- if(dataset->shared->cache.chunk.scaled_power2up[u] != scaled_power2up) {
- /* Update the 'power2up' & 'encode_bits' values for the current dimension */
- dataset->shared->cache.chunk.scaled_power2up[u] = scaled_power2up;
- dataset->shared->cache.chunk.scaled_encode_bits[u] = H5VM_log2_gen(scaled_power2up);
-
- /* Indicate that the chunk cache indices should be updated */
- update_chunks = TRUE;
- } /* end if */
- } /* end if */
- } /* end for */
- } /* end if */
-
- /* Update general information for chunks */
- if(H5D__chunk_set_info(dataset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to update # of chunks")
-
- /* Check for updating chunk cache indices */
- if(update_chunks) {
- /* Update the chunk cache indices */
- if(H5D__chunk_update_cache(dataset, dxpl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update cached chunk indices")
- } /* end if */
- } /* end if */
-
- /* Allocate space for the new parts of the dataset, if appropriate */
- fill = &dataset->shared->dcpl_cache.fill;
- if(fill->alloc_time == H5D_ALLOC_TIME_EARLY) {
- H5D_io_info_t io_info;
-
- io_info.dset = dataset;
- io_info.raw_dxpl_id = H5AC_rawdata_dxpl_id;
- io_info.md_dxpl_id = dxpl_id;
-
- if(H5D__alloc_storage(&io_info, H5D_ALLOC_EXTEND, FALSE, old_dims) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize dataset with fill value")
- }
- /* Mark the dataspace as dirty, for later writing to the file */
- if(H5D__mark(dataset, dxpl_id, H5D_MARK_SPACE) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to mark dataspace as dirty")
- } /* end if */
+ /* Call internal routine */
+ ret_value = H5T_reclaim(type_id, space, buf);
done:
- FUNC_LEAVE_NOAPI(ret_value)
-} /* end H5D__extend() */
+ FUNC_LEAVE_API(ret_value)
+} /* end H5Dvlen_reclaim() */
+
#endif /* H5_NO_DEPRECATED_SYMBOLS */