summaryrefslogtreecommitdiffstats
path: root/src/H5Dint.c
diff options
context:
space:
mode:
authorDana Robinson <derobins@hdfgroup.org>2018-09-24 19:23:41 (GMT)
committerDana Robinson <derobins@hdfgroup.org>2018-09-24 19:23:41 (GMT)
commita56b4db4ae25d30e6e7eb96f27d7b91e35148591 (patch)
tree7738d08a9734103fd12242eb5ff0244dd74c214d /src/H5Dint.c
parent37309c1bd98b3d810f09569c72ac375e75117a91 (diff)
downloadhdf5-a56b4db4ae25d30e6e7eb96f27d7b91e35148591.zip
hdf5-a56b4db4ae25d30e6e7eb96f27d7b91e35148591.tar.gz
hdf5-a56b4db4ae25d30e6e7eb96f27d7b91e35148591.tar.bz2
Remainder of vol_normalization changes (dataset, attribute, files, objects).
Diffstat (limited to 'src/H5Dint.c')
-rw-r--r--src/H5Dint.c134
1 files changed, 73 insertions, 61 deletions
diff --git a/src/H5Dint.c b/src/H5Dint.c
index 2d4985e..9af391e 100644
--- a/src/H5Dint.c
+++ b/src/H5Dint.c
@@ -22,8 +22,8 @@
/* Headers */
/***********/
#include "H5private.h" /* Generic Functions */
-#include "H5Dpkg.h" /* Datasets */
#include "H5CXprivate.h" /* API Contexts */
+#include "H5Dpkg.h" /* Datasets */
#include "H5Eprivate.h" /* Error handling */
#include "H5FLprivate.h" /* Free Lists */
#include "H5FOprivate.h" /* File objects */
@@ -48,18 +48,17 @@
/* General stuff */
static H5D_shared_t *H5D__new(hid_t dcpl_id, hbool_t creating, hbool_t vl_type);
-static herr_t H5D__init_type(H5F_t *file, const H5D_t *dset, hid_t type_id,
- const H5T_t *type);
+static herr_t H5D__init_type(H5F_t *file, const H5D_t *dset, hid_t type_id, const H5T_t *type);
static herr_t H5D__cache_dataspace_info(const H5D_t *dset);
static herr_t H5D__init_space(H5F_t *file, const H5D_t *dset, const H5S_t *space);
static herr_t H5D__update_oh_info(H5F_t *file, H5D_t *dset, hid_t dapl_id);
-static herr_t H5D__build_file_prefix(const H5D_t *dset, hid_t dapl_id,
- const char *prefix_type, char **file_prefix);
+static herr_t H5D__build_file_prefix(const H5D_t *dset, hid_t dapl_id, const char *prefix_type, char **file_prefix);
static herr_t H5D__open_oid(H5D_t *dataset, hid_t dapl_id);
static herr_t H5D__init_storage(const H5D_io_info_t *io_info, hbool_t full_overwrite,
hsize_t old_dim[]);
static herr_t H5D__append_flush_setup(H5D_t *dset, hid_t dapl_id);
-static herr_t H5D__close_cb(H5D_t *dataset);
+static herr_t H5D__close_cb(H5D_t *dset);
+
/*********************/
/* Package Variables */
@@ -281,6 +280,37 @@ H5D_term_package(void)
/*-------------------------------------------------------------------------
+ * Function: H5D__close_cb
+ *
+ * Purpose: Called when the ref count reaches zero on the dataset's ID
+ *
+ * Return: SUCCEED/FAIL
+ *
+ *-------------------------------------------------------------------------
+ */
+static herr_t
+H5D__close_cb(H5D_t *dset)
+{
+ herr_t ret_value = SUCCEED; /* Return value */
+
+ FUNC_ENTER_STATIC
+
+ /* Sanity check */
+ HDassert(dset);
+ HDassert(dset->oloc.file);
+ HDassert(dset->shared);
+ HDassert(dset->shared->fo_count > 0);
+
+ /* Close the dataset */
+ if(H5D_close(dset) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CLOSEERROR, FAIL, "unable to close dataset");
+
+done:
+ FUNC_LEAVE_NOAPI(ret_value)
+} /* end H5D__close_cb() */
+
+
+/*-------------------------------------------------------------------------
* Function: H5D__create_named
*
* Purpose: Internal routine to create a new dataset.
@@ -505,7 +535,8 @@ H5D__init_type(H5F_t *file, const H5D_t *dset, hid_t type_id, const H5T_t *type)
HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't copy datatype")
/* Convert a datatype (if committed) to a transient type if the committed datatype's file
- location is different from the file location where the dataset will be created */
+ * location is different from the file location where the dataset will be created.
+ */
if(H5T_convert_committed_datatype(dset->shared->type, file) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't get shared datatype info")
@@ -520,7 +551,7 @@ H5D__init_type(H5F_t *file, const H5D_t *dset, hid_t type_id, const H5T_t *type)
/* Get a datatype ID for the dataset's datatype */
if((dset->shared->type_id = H5I_register(H5I_DATATYPE, dset->shared->type, FALSE)) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTREGISTER, FAIL, "unable to register type")
- } /* end if */
+ }
/* Not a custom datatype, just use it directly */
else {
if(H5I_inc_ref(type_id, FALSE) < 0)
@@ -529,7 +560,7 @@ H5D__init_type(H5F_t *file, const H5D_t *dset, hid_t type_id, const H5T_t *type)
/* Use existing datatype */
dset->shared->type_id = type_id;
dset->shared->type = (H5T_t *)type; /* (Cast away const OK - QAK) */
- } /* end else */
+ }
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -569,7 +600,7 @@ H5D__cache_dataspace_info(const H5D_t *dset)
if( !(scaled_power2up = H5VM_power2up(dset->shared->curr_dims[u])) )
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to get the next power of 2")
dset->shared->curr_power2up[u] = scaled_power2up;
- } /* end for */
+ }
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -685,7 +716,7 @@ H5D__update_oh_info(H5F_t *file, H5D_t *dset, hid_t dapl_id)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to convert fill value to dataset type")
fill_prop->fill_defined = TRUE;
- } /* end if */
+ }
else if(fill_status == H5D_FILL_VALUE_UNDEFINED)
fill_prop->fill_defined = FALSE;
else
@@ -848,7 +879,7 @@ H5D__build_file_prefix(const H5D_t *dset, hid_t dapl_id, const char *prefix_type
*/
if(HDstrcmp(prefix_type, H5D_ACS_VDS_PREFIX_NAME) == 0)
prefix = HDgetenv("HDF5_VDS_PREFIX");
- else if (HDstrcmp(prefix_type, H5D_ACS_EFILE_PREFIX_NAME) == 0)
+ else if(HDstrcmp(prefix_type, H5D_ACS_EFILE_PREFIX_NAME) == 0)
prefix = HDgetenv("HDF5_EXTFILE_PREFIX");
else
HGOTO_ERROR(H5E_PLIST, H5E_BADTYPE, FAIL, "prefix name is not sensible")
@@ -1300,7 +1331,7 @@ done:
dataset->shared->extfile_prefix = (char *)H5MM_xfree(dataset->shared->extfile_prefix);
dataset->shared->vds_prefix = (char *)H5MM_xfree(dataset->shared->vds_prefix);
dataset->shared = H5FL_FREE(H5D_shared_t, dataset->shared);
- } /* end if */
+ }
H5O_loc_free(&(dataset->oloc));
H5G_name_free(&(dataset->path));
@@ -1561,36 +1592,6 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5D__close_cb
- *
- * Purpose: Callback routine for closing a dataset ID. Closes the dataset
- * object that was attached to the ID.
- *
- * Return: Non-negative on success/Negative on failure
- *
- *-------------------------------------------------------------------------
- */
-static herr_t
-H5D__close_cb(H5D_t *dataset)
-{
- herr_t ret_value = SUCCEED; /* Return value */
-
- FUNC_ENTER_STATIC
-
- /* check args */
- HDassert(dataset && dataset->oloc.file && dataset->shared);
- HDassert(dataset->shared->fo_count > 0);
-
- /* Call actual dataset close routine */
- if(H5D_close(dataset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCLOSEOBJ, FAIL, "can't close dataset");
-
-done:
- FUNC_LEAVE_NOAPI(ret_value)
-} /* end H5D__close_cb() */
-
-
-/*-------------------------------------------------------------------------
* Function: H5D_close
*
* Purpose: Insures that all data has been saved to the file, closes the
@@ -1726,8 +1727,7 @@ H5D_close(H5D_t *dataset)
if(H5AC_cork(dataset->oloc.file, dataset->oloc.addr, H5AC__UNCORK, NULL) < 0)
HDONE_ERROR(H5E_DATASET, H5E_CANTUNCORK, FAIL, "unable to uncork an object")
- /*
- * Release datatype, dataspace and creation property list -- there isn't
+ /* Release datatype, dataspace and creation property list -- there isn't
* much we can do if one of these fails, so we just continue.
*/
free_failed |= (H5I_dec_ref(dataset->shared->type_id) < 0) ||
@@ -1761,6 +1761,7 @@ H5D_close(H5D_t *dataset)
*/
dataset->oloc.file = NULL;
dataset->shared = H5FL_FREE(H5D_shared_t, dataset->shared);
+
} /* end if */
else {
/* Decrement the ref. count for this object in the top file */
@@ -1798,7 +1799,7 @@ done:
* Function: H5D_mult_refresh_close
*
* Purpose: Closing down the needed information when the dataset has
- * multiple opens. (From H5O_refresh_metadata_close())
+ * multiple opens. (From H5O__refresh_metadata_close())
*
* Return: Non-negative on success/Negative on failure
*-------------------------------------------------------------------------
@@ -2275,13 +2276,15 @@ done:
/*-------------------------------------------------------------------------
- * Function: H5D__get_offset
+ * Function: H5D__get_offset
+ *
+ * Purpose: Private function for H5D__get_offset. Returns the address
+ * of dataset in file.
*
- * Purpose: Private function for H5D__get_offset. Returns the address
- * of dataset in file.
+ * Return: Success: The address of dataset
+ *
+ * Failure: HADDR_UNDEF (but also a valid value)
*
- * Return: Success: the address of dataset
- * Failure: HADDR_UNDEF
*-------------------------------------------------------------------------
*/
haddr_t
@@ -2301,7 +2304,8 @@ H5D__get_offset(const H5D_t *dset)
case H5D_CONTIGUOUS:
/* If dataspace hasn't been allocated or dataset is stored in
- * an external file, the value will be HADDR_UNDEF. */
+ * an external file, the value will be HADDR_UNDEF.
+ */
if(dset->shared->dcpl_cache.efl.nused == 0 || H5F_addr_defined(dset->shared->layout.storage.u.contig.addr))
/* Return the absolute dataset offset from the beginning of file. */
ret_value = dset->shared->layout.storage.u.contig.addr + H5F_BASE_ADDR(dset->oloc.file);
@@ -2311,7 +2315,7 @@ H5D__get_offset(const H5D_t *dset)
case H5D_NLAYOUTS:
default:
HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, HADDR_UNDEF, "unknown dataset layout type")
- } /*lint !e788 All appropriate cases are covered */
+ }
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -2419,6 +2423,8 @@ H5D__vlen_get_buf_size(void H5_ATTR_UNUSED *elem, hid_t type_id,
{
H5D_vlen_bufsize_t *vlen_bufsize = (H5D_vlen_bufsize_t *)op_data;
H5T_t *dt; /* Datatype for operation */
+ H5S_t *mspace; /* Memory dataspace for operation */
+ H5S_t *fspace; /* File dataspace for operation */
herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_PACKAGE
@@ -2434,12 +2440,18 @@ H5D__vlen_get_buf_size(void H5_ATTR_UNUSED *elem, hid_t type_id,
if(NULL == (vlen_bufsize->fl_tbuf = H5FL_BLK_REALLOC(vlen_fl_buf, vlen_bufsize->fl_tbuf, H5T_get_size(dt))))
HGOTO_ERROR(H5E_DATASET, H5E_NOSPACE, FAIL, "can't resize tbuf")
+ /* Get the memory dataspace from the ID */
+ if(NULL == (mspace = (H5S_t *)H5I_object_verify(vlen_bufsize->mspace_id, H5I_DATASPACE)))
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a dataspace")
+
/* Select point to read in */
- if(H5S_select_elements(vlen_bufsize->fspace, H5S_SELECT_SET, (size_t)1, point) < 0)
+ if(NULL == (fspace = (H5S_t *)H5I_object_verify(vlen_bufsize->fspace_id, H5I_DATASPACE)))
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a dataspace")
+ if(H5S_select_elements(fspace, H5S_SELECT_SET, (size_t)1, point) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTCREATE, FAIL, "can't select point")
/* Read in the point (with the custom VL memory allocator) */
- if(H5D__read(vlen_bufsize->dset, type_id, vlen_bufsize->mspace, vlen_bufsize->fspace, vlen_bufsize->fl_tbuf) < 0)
+ if(H5D__read(vlen_bufsize->dset, type_id, mspace, fspace, vlen_bufsize->fl_tbuf) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_READERROR, FAIL, "can't read point")
done:
@@ -2509,11 +2521,11 @@ done:
herr_t
H5D__set_extent(H5D_t *dset, const hsize_t *size)
{
- hsize_t curr_dims[H5S_MAX_RANK]; /* Current dimension sizes */
- htri_t changed; /* Whether the dataspace changed size */
- size_t u, v; /* Local index variable */
+ hsize_t curr_dims[H5S_MAX_RANK]; /* Current dimension sizes */
+ htri_t changed; /* Whether the dataspace changed size */
+ size_t u, v; /* Local index variable */
unsigned dim_idx; /* Dimension index */
- herr_t ret_value = SUCCEED; /* Return value */
+ herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_PACKAGE_TAG(dset->oloc.addr)
@@ -2649,7 +2661,7 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size)
if(H5D__alloc_storage(&io_info, H5D_ALLOC_EXTEND, FALSE, curr_dims) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to extend dataset storage")
- } /* end if */
+ }
/*-------------------------------------------------------------------------
* Remove chunk information in the case of chunked datasets
* This removal takes place only in case we are shrinking the dateset
@@ -3255,7 +3267,7 @@ H5D_get_access_plist(const H5D_t *dset)
if(NULL == (old_plist = (H5P_genplist_t *)H5I_object(H5P_LST_DATASET_ACCESS_ID_g)))
HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a property list")
if((new_dapl_id = H5P_copy_plist(old_plist, TRUE)) < 0)
- HGOTO_ERROR(H5E_INTERNAL, H5E_CANTINIT, FAIL, "can't copy dataset access property list")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTINIT, FAIL, "can't copy dataset access property list")
if(NULL == (new_plist = (H5P_genplist_t *)H5I_object(new_dapl_id)))
HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a property list")