summaryrefslogtreecommitdiffstats
path: root/src/H5Ddeprec.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/H5Ddeprec.c')
-rw-r--r--src/H5Ddeprec.c192
1 files changed, 48 insertions, 144 deletions
diff --git a/src/H5Ddeprec.c b/src/H5Ddeprec.c
index 0807048..51ebeba 100644
--- a/src/H5Ddeprec.c
+++ b/src/H5Ddeprec.c
@@ -36,6 +36,7 @@
/* Headers */
/***********/
#include "H5private.h" /* Generic Functions */
+#include "H5CXprivate.h" /* API Contexts */
#include "H5Dpkg.h" /* Datasets */
#include "H5Eprivate.h" /* Error handling */
#include "H5Iprivate.h" /* IDs */
@@ -60,10 +61,6 @@
/* Local Prototypes */
/********************/
-#ifndef H5_NO_DEPRECATED_SYMBOLS
-static herr_t H5D__extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id);
-#endif /* H5_NO_DEPRECATED_SYMBOLS */
-
/*********************/
/* Package Variables */
@@ -118,6 +115,8 @@ H5Dcreate1(hid_t loc_id, const char *name, hid_t type_id, hid_t space_id,
H5G_loc_t loc; /* Object location to insert dataset into */
H5D_t *dset = NULL; /* New dataset's info */
const H5S_t *space; /* Dataspace for dataset */
+ hid_t dapl_id = H5P_DEFAULT; /* DAPL used by library */
+hbool_t api_ctx_pushed = FALSE; /* Whether API context pushed */
hid_t ret_value; /* Return value */
FUNC_ENTER_API(FAIL)
@@ -138,8 +137,16 @@ H5Dcreate1(hid_t loc_id, const char *name, hid_t type_id, hid_t space_id,
if(TRUE != H5P_isa_class(dcpl_id, H5P_DATASET_CREATE))
HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not dataset create property list ID")
+/* Set API context */
+if(H5CX_push() < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, H5I_INVALID_HID, "can't set API context")
+api_ctx_pushed = TRUE;
+/* Verify access property list and set up collective metadata if appropriate */
+if(H5CX_set_apl(&dapl_id, H5P_CLS_DACC, loc_id, TRUE) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, H5I_INVALID_HID, "can't set access property list info")
+
/* Build and open the new dataset */
- if(NULL == (dset = H5D__create_named(&loc, name, type_id, space, H5P_LINK_CREATE_DEFAULT, dcpl_id, H5P_DATASET_ACCESS_DEFAULT, H5AC_ind_read_dxpl_id)))
+ if(NULL == (dset = H5D__create_named(&loc, name, type_id, space, H5P_LINK_CREATE_DEFAULT, dcpl_id, dapl_id)))
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to create dataset")
/* Register the new dataset to get an ID for it */
@@ -150,6 +157,8 @@ done:
if(ret_value < 0)
if(dset && H5D_close(dset) < 0)
HDONE_ERROR(H5E_DATASET, H5E_CLOSEERROR, FAIL, "unable to release dataset")
+if(api_ctx_pushed && H5CX_pop() < 0)
+ HDONE_ERROR(H5E_DATASET, H5E_CANTRESET, H5I_INVALID_HID, "can't reset API context")
FUNC_LEAVE_API(ret_value)
} /* end H5Dcreate1() */
@@ -177,7 +186,8 @@ H5Dopen1(hid_t loc_id, const char *name)
{
H5D_t *dset = NULL;
H5G_loc_t loc; /* Object location of group */
- hid_t ret_value;
+hbool_t api_ctx_pushed = FALSE; /* Whether API context pushed */
+ hid_t ret_value; /* Return value */
FUNC_ENTER_API(FAIL)
H5TRACE2("i", "i*s", loc_id, name);
@@ -188,8 +198,13 @@ H5Dopen1(hid_t loc_id, const char *name)
if(!name || !*name)
HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no name")
+/* Set API context */
+if(H5CX_push() < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, H5I_INVALID_HID, "can't set API context")
+api_ctx_pushed = TRUE;
+
/* Open the dataset */
- if(NULL == (dset = H5D__open_name(&loc, name, H5P_DATASET_ACCESS_DEFAULT, H5AC_ind_read_dxpl_id)))
+ if(NULL == (dset = H5D__open_name(&loc, name, H5P_DATASET_ACCESS_DEFAULT)))
HGOTO_ERROR(H5E_DATASET, H5E_CANTOPENOBJ, FAIL, "unable to open dataset")
/* Register an atom for the dataset */
@@ -200,6 +215,8 @@ done:
if(ret_value < 0)
if(dset && H5D_close(dset) < 0)
HDONE_ERROR(H5E_DATASET, H5E_CLOSEERROR, FAIL, "unable to release dataset")
+if(api_ctx_pushed && H5CX_pop() < 0)
+ HDONE_ERROR(H5E_DATASET, H5E_CANTRESET, H5I_INVALID_HID, "can't reset API context")
FUNC_LEAVE_API(ret_value)
} /* end H5Dopen1() */
@@ -224,8 +241,11 @@ done:
herr_t
H5Dextend(hid_t dset_id, const hsize_t size[])
{
- H5D_t *dset;
- herr_t ret_value = SUCCEED; /* Return value */
+ H5D_t *dset; /* Pointer to dataset to modify */
+ hsize_t dset_dims[H5S_MAX_RANK]; /* Current dataset dimensions */
+ unsigned u; /* Local index variable */
+hbool_t api_ctx_pushed = FALSE; /* Whether API context pushed */
+ herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_API(FAIL)
H5TRACE2("e", "i*h", dset_id, size);
@@ -236,146 +256,30 @@ H5Dextend(hid_t dset_id, const hsize_t size[])
if(!size)
HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no size specified")
+ /* Make certain that the dataset dimensions don't decrease */
+ /* (Shrinking dimensions is possible with H5Dset_extent, but not H5Dextend) */
+ if(H5S_get_simple_extent_dims(dset->shared->space, dset_dims, NULL) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions")
+ for(u = 0; u < dset->shared->ndims; u++)
+ if(size[u] > dset_dims[u])
+ dset_dims[u] = size[u];
+
+/* Set API context */
+if(H5CX_push() < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set API context")
+api_ctx_pushed = TRUE;
+/* Set up collective metadata if appropriate */
+if(H5CX_set_loc(dset_id, TRUE) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set collective metadata read info")
+
/* Increase size */
- if(H5D__extend(dset, size, H5AC_ind_read_dxpl_id) < 0)
+ if(H5D__set_extent(dset, dset_dims) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to extend dataset")
done:
+if(api_ctx_pushed && H5CX_pop() < 0)
+ HDONE_ERROR(H5E_DATASET, H5E_CANTRESET, FAIL, "can't reset API context")
FUNC_LEAVE_API(ret_value)
} /* end H5Dextend() */
-
-
-/*-------------------------------------------------------------------------
- * Function: H5D__extend
- *
- * Purpose: Increases the size of a dataset.
- *
- * Return: Non-negative on success/Negative on failure
- *
- * Programmer: Robb Matzke
- * Friday, January 30, 1998
- *
- *-------------------------------------------------------------------------
- */
-static herr_t
-H5D__extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id)
-{
- htri_t changed; /* Flag to indicate that the dataspace was successfully extended */
- hsize_t old_dims[H5S_MAX_RANK]; /* Current (i.e. old, if changed) dimension sizes */
- H5O_fill_t *fill; /* Dataset's fill value */
- herr_t ret_value = SUCCEED; /* Return value */
-
- FUNC_ENTER_STATIC
-
- /* Check args */
- HDassert(dataset);
- HDassert(size);
-
- /* Check if the filters in the DCPL will need to encode, and if so, can they? */
- if(H5D__check_filters(dataset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't apply filters")
-
- /*
- * NOTE: Restrictions on extensions were checked when the dataset was
- * created. All extensions are allowed here since none should be
- * able to muck things up.
- */
-
- /* Retrieve the current dimensions */
- HDcompile_assert(sizeof(old_dims) == sizeof(dataset->shared->curr_dims));
- HDmemcpy(old_dims, dataset->shared->curr_dims, H5S_MAX_RANK * sizeof(old_dims[0]));
-
- /* Increase the size of the data space */
- if((changed = H5S_extend(dataset->shared->space, size)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to increase size of dataspace")
-
- /* Updated the dataset's info if the dataspace was successfully extended */
- if(changed) {
- /* Get the extended dimension sizes */
- /* (Need to retrieve this here, since the 'size' dimensions could
- * extend one dimension but be smaller in a different dimension,
- * and the dataspace's extent is the larger of the current and
- * 'size' dimension values. - QAK)
- */
- if(H5S_get_simple_extent_dims(dataset->shared->space, dataset->shared->curr_dims, NULL) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions")
-
- /* Update the index values for the cached chunks for this dataset */
- if(H5D_CHUNKED == dataset->shared->layout.type) {
- hbool_t update_chunks = FALSE; /* Flag to indicate chunk cache update is needed */
-
- /* Check if we need to track & update scaled dimension information */
- if(dataset->shared->ndims > 1) {
- unsigned u; /* Local indicate variable */
-
- /* Update scaled chunk information */
- for(u = 0; u < dataset->shared->ndims; u++) {
- hsize_t scaled; /* Scaled value */
-
- /* Compute the scaled dimension size value */
- scaled = size[u] / dataset->shared->layout.u.chunk.dim[u];
-
- /* Check if scaled dimension size changed */
- if(scaled != dataset->shared->cache.chunk.scaled_dims[u]) {
- hsize_t scaled_power2up; /* New size value, rounded to next power of 2 */
-
- /* Update the scaled dimension size value for the current dimension */
- dataset->shared->cache.chunk.scaled_dims[u] = scaled;
-
- /* Check if algorithm for computing hash values will change */
- if((scaled > dataset->shared->cache.chunk.nslots &&
- dataset->shared->cache.chunk.scaled_dims[u] <= dataset->shared->cache.chunk.nslots)
- || (scaled <= dataset->shared->cache.chunk.nslots &&
- dataset->shared->cache.chunk.scaled_dims[u] > dataset->shared->cache.chunk.nslots))
- update_chunks = TRUE;
-
- if( !(scaled_power2up = H5VM_power2up(scaled)) )
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to get the next power of 2")
-
- /* Check if the number of bits required to encode the scaled size value changed */
- if(dataset->shared->cache.chunk.scaled_power2up[u] != scaled_power2up) {
- /* Update the 'power2up' & 'encode_bits' values for the current dimension */
- dataset->shared->cache.chunk.scaled_power2up[u] = scaled_power2up;
- dataset->shared->cache.chunk.scaled_encode_bits[u] = H5VM_log2_gen(scaled_power2up);
-
- /* Indicate that the chunk cache indices should be updated */
- update_chunks = TRUE;
- } /* end if */
- } /* end if */
- } /* end for */
- } /* end if */
-
- /* Update general information for chunks */
- if(H5D__chunk_set_info(dataset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to update # of chunks")
-
- /* Check for updating chunk cache indices */
- if(update_chunks) {
- /* Update the chunk cache indices */
- if(H5D__chunk_update_cache(dataset, dxpl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update cached chunk indices")
- } /* end if */
- } /* end if */
-
- /* Allocate space for the new parts of the dataset, if appropriate */
- fill = &dataset->shared->dcpl_cache.fill;
- if(fill->alloc_time == H5D_ALLOC_TIME_EARLY) {
- H5D_io_info_t io_info;
-
- io_info.dset = dataset;
- io_info.raw_dxpl_id = H5AC_rawdata_dxpl_id;
- io_info.md_dxpl_id = dxpl_id;
-
- if(H5D__alloc_storage(&io_info, H5D_ALLOC_EXTEND, FALSE, old_dims) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize dataset with fill value")
- }
- /* Mark the dataspace as dirty, for later writing to the file */
- if(H5D__mark(dataset, dxpl_id, H5D_MARK_SPACE) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to mark dataspace as dirty")
- } /* end if */
-
-done:
- FUNC_LEAVE_NOAPI(ret_value)
-} /* end H5D__extend() */
#endif /* H5_NO_DEPRECATED_SYMBOLS */