summaryrefslogtreecommitdiffstats
path: root/src/H5Dint.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/H5Dint.c')
-rw-r--r--src/H5Dint.c563
1 files changed, 283 insertions, 280 deletions
diff --git a/src/H5Dint.c b/src/H5Dint.c
index 301e803..e6a709e 100644
--- a/src/H5Dint.c
+++ b/src/H5Dint.c
@@ -164,7 +164,7 @@ H5D_init(void)
/* Initialize the ID group for the dataset IDs */
if (H5I_register_type(H5I_DATASET_CLS) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize interface")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize interface");
/* Reset the "default dataset" information */
memset(&H5D_def_dset, 0, sizeof(H5D_shared_t));
@@ -176,19 +176,19 @@ H5D_init(void)
* default dataset with them.
*/
if (NULL == (def_dcpl = (H5P_genplist_t *)H5I_object(H5P_LST_DATASET_CREATE_ID_g)))
- HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "can't get default dataset creation property list")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "can't get default dataset creation property list");
/* Get the default data storage layout */
if (H5P_get(def_dcpl, H5D_CRT_LAYOUT_NAME, &H5D_def_dset.layout) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't retrieve layout")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't retrieve layout");
/* Get the default dataset creation properties */
if (H5P_get(def_dcpl, H5D_CRT_EXT_FILE_LIST_NAME, &H5D_def_dset.dcpl_cache.efl) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't retrieve external file list")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't retrieve external file list");
if (H5P_get(def_dcpl, H5D_CRT_FILL_VALUE_NAME, &H5D_def_dset.dcpl_cache.fill) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't retrieve fill value")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't retrieve fill value");
if (H5P_get(def_dcpl, H5O_CRT_PIPELINE_NAME, &H5D_def_dset.dcpl_cache.pline) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't retrieve pipeline filter")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't retrieve pipeline filter");
/* Retrieve the prefixes of VDS and external file from the environment variable */
H5D_prefix_vds_env = HDgetenv("HDF5_VDS_PREFIX");
@@ -347,7 +347,7 @@ H5D__create_named(const H5G_loc_t *loc, const char *name, hid_t type_id, const H
/* Create the new dataset and link it to its parent group */
if (H5L_link_object(loc, name, &ocrt_info, lcpl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to create and link to dataset")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to create and link to dataset");
assert(ocrt_info.new_obj);
/* Set the return value */
@@ -383,7 +383,7 @@ H5D__get_space_status(const H5D_t *dset, H5D_space_status_t *allocation)
if (H5D__get_num_chunks(dset, dset->shared->space, &n_chunks_alloc) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL,
- "unable to retrieve number of allocated chunks in dataset")
+ "unable to retrieve number of allocated chunks in dataset");
assert(n_chunks_alloc <= n_chunks_total);
@@ -427,7 +427,7 @@ H5D__new(hid_t dcpl_id, hid_t dapl_id, hbool_t creating, hbool_t vl_type)
/* Allocate new shared dataset structure */
if (NULL == (new_dset = H5FL_MALLOC(H5D_shared_t)))
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed")
+ HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed");
/* Copy the default dataset information */
H5MM_memcpy(new_dset, &H5D_def_dset, sizeof(H5D_shared_t));
@@ -437,26 +437,26 @@ H5D__new(hid_t dcpl_id, hid_t dapl_id, hbool_t creating, hbool_t vl_type)
*/
if (!vl_type && creating && dcpl_id == H5P_DATASET_CREATE_DEFAULT) {
if (H5I_inc_ref(dcpl_id, FALSE) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINC, NULL, "can't increment default DCPL ID")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINC, NULL, "can't increment default DCPL ID");
new_dset->dcpl_id = dcpl_id;
} /* end if */
else {
/* Get the property list */
if (NULL == (plist = (H5P_genplist_t *)H5I_object(dcpl_id)))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, NULL, "not a property list")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, NULL, "not a property list");
new_dset->dcpl_id = H5P_copy_plist(plist, FALSE);
} /* end else */
if (!vl_type && creating && dapl_id == H5P_DATASET_ACCESS_DEFAULT) {
if (H5I_inc_ref(dapl_id, FALSE) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINC, NULL, "can't increment default DAPL ID")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINC, NULL, "can't increment default DAPL ID");
new_dset->dapl_id = dapl_id;
} /* end if */
else {
/* Get the property list */
if (NULL == (plist = (H5P_genplist_t *)H5I_object(dapl_id)))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, NULL, "not a property list")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, NULL, "not a property list");
new_dset->dapl_id = H5P_copy_plist(plist, FALSE);
} /* end else */
@@ -504,11 +504,11 @@ H5D__init_type(H5F_t *file, const H5D_t *dset, hid_t type_id, H5T_t *type)
/* Check whether the datatype is relocatable */
if ((relocatable = H5T_is_relocatable(type)) < 0)
- HGOTO_ERROR(H5E_DATATYPE, H5E_CANTINIT, FAIL, "can't check datatype?")
+ HGOTO_ERROR(H5E_DATATYPE, H5E_CANTINIT, FAIL, "can't check datatype?");
/* Check whether the datatype is immutable */
if ((immutable = H5T_is_immutable(type)) < 0)
- HGOTO_ERROR(H5E_DATATYPE, H5E_CANTINIT, FAIL, "can't check datatype?")
+ HGOTO_ERROR(H5E_DATATYPE, H5E_CANTINIT, FAIL, "can't check datatype?");
/* To use at least v18 format versions or not */
use_at_least_v18 = (H5F_LOW_BOUND(file) >= H5F_LIBVER_V18);
@@ -517,30 +517,30 @@ H5D__init_type(H5F_t *file, const H5D_t *dset, hid_t type_id, H5T_t *type)
if (!immutable || relocatable || use_at_least_v18) {
/* Copy datatype for dataset */
if ((dset->shared->type = H5T_copy(type, H5T_COPY_ALL)) == NULL)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't copy datatype")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't copy datatype");
/* Convert a datatype (if committed) to a transient type if the committed datatype's file
* location is different from the file location where the dataset will be created.
*/
if (H5T_convert_committed_datatype(dset->shared->type, file) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't get shared datatype info")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't get shared datatype info");
/* Mark any datatypes as being on disk now */
if (H5T_set_loc(dset->shared->type, H5F_VOL_OBJ(file), H5T_LOC_DISK) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't set datatype location")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't set datatype location");
/* Set the version for datatype */
if (H5T_set_version(file, dset->shared->type) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set version of datatype")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set version of datatype");
/* Get a datatype ID for the dataset's datatype */
if ((dset->shared->type_id = H5I_register(H5I_DATATYPE, dset->shared->type, FALSE)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTREGISTER, FAIL, "unable to register type")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTREGISTER, FAIL, "unable to register type");
} /* end if */
/* Not a custom datatype, just use it directly */
else {
if (H5I_inc_ref(type_id, FALSE) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINC, FAIL, "Can't increment datatype ID")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINC, FAIL, "Can't increment datatype ID");
/* Use existing datatype */
dset->shared->type_id = type_id;
@@ -575,7 +575,7 @@ H5D__cache_dataspace_info(const H5D_t *dset)
/* Cache info for dataset's dataspace */
if ((sndims = H5S_get_simple_extent_dims(dset->shared->space, dset->shared->curr_dims,
dset->shared->max_dims)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't cache dataspace dimensions")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't cache dataspace dimensions");
dset->shared->ndims = (unsigned)sndims;
/* Compute the initial 'power2up' values */
@@ -583,7 +583,7 @@ H5D__cache_dataspace_info(const H5D_t *dset)
hsize_t scaled_power2up; /* Scaled value, rounded to next power of 2 */
if (!(scaled_power2up = H5VM_power2up(dset->shared->curr_dims[u])))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to get the next power of 2")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to get the next power of 2");
dset->shared->curr_power2up[u] = scaled_power2up;
}
@@ -615,19 +615,19 @@ H5D__init_space(H5F_t *file, const H5D_t *dset, const H5S_t *space)
/* Copy dataspace for dataset */
if (NULL == (dset->shared->space = H5S_copy(space, FALSE, TRUE)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't copy dataspace")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't copy dataspace");
/* Cache the dataset's dataspace info */
if (H5D__cache_dataspace_info(dset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't cache dataspace info")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't cache dataspace info");
/* Set the version for dataspace */
if (H5S_set_version(file, dset->shared->space) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set latest version of datatype")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set latest version of datatype");
/* Set the dataset's dataspace to 'all' selection */
if (H5S_select_all(dset->shared->space, TRUE) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to set all selection")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to set all selection");
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -658,7 +658,7 @@ H5D__use_minimized_dset_headers(H5F_t *file, hbool_t *minimize)
/* Get the dataset object header minimize flag for this call */
if (H5CX_get_dset_min_ohdr_flag(minimize) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL,
- "can't get dataset object header minimize flag from API context")
+ "can't get dataset object header minimize flag from API context");
if (FALSE == *minimize)
*minimize = H5F_get_min_dset_ohdr(file);
@@ -702,25 +702,25 @@ H5D__calculate_minimum_header_size(H5F_t *file, H5D_t *dset, H5O_t *ohdr)
/* Datatype message size */
get_value = H5O_msg_size_oh(file, ohdr, H5O_DTYPE_ID, type, 0);
if (get_value == 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "Can't get size of datatype message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "Can't get size of datatype message");
ret_value += get_value;
/* Shared Dataspace message size */
get_value = H5O_msg_size_oh(file, ohdr, H5O_SDSPACE_ID, dset->shared->space, 0);
if (get_value == 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "can't get size of dataspace message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "can't get size of dataspace message");
ret_value += get_value;
/* "Layout" message size */
get_value = H5O_msg_size_oh(file, ohdr, H5O_LAYOUT_ID, &dset->shared->layout, 0);
if (get_value == 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "can't get size of layout message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "can't get size of layout message");
ret_value += get_value;
/* Fill Value message size */
get_value = H5O_msg_size_oh(file, ohdr, H5O_FILL_NEW_ID, fill_prop, 0);
if (get_value == 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "can't get size of fill value message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "can't get size of fill value message");
ret_value += get_value;
/* "Continuation" message size */
@@ -729,7 +729,7 @@ H5D__calculate_minimum_header_size(H5F_t *file, H5D_t *dset, H5O_t *ohdr)
*/
get_value = H5O_msg_size_oh(file, ohdr, H5O_CONT_ID, continuation, 0);
if (get_value == 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "can't get size of continuation message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "can't get size of continuation message");
ret_value += get_value;
/* Fill Value (backwards compatibility) message size */
@@ -741,12 +741,12 @@ H5D__calculate_minimum_header_size(H5F_t *file, H5D_t *dset, H5O_t *ohdr)
H5MM_memcpy(&old_fill_prop, fill_prop, sizeof(old_fill_prop));
if (H5O_msg_reset_share(H5O_FILL_ID, &old_fill_prop) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "can't reset the copied fill property")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "can't reset the copied fill property");
get_value = H5O_msg_size_oh(file, ohdr, H5O_FILL_ID, &old_fill_prop, 0);
if (get_value == 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0,
- "can't get size of fill value (backwards compat) message")
+ "can't get size of fill value (backwards compat) message");
ret_value += get_value;
}
@@ -756,7 +756,7 @@ H5D__calculate_minimum_header_size(H5F_t *file, H5D_t *dset, H5O_t *ohdr)
if (pline->nused > 0) {
get_value = H5O_msg_size_oh(file, ohdr, H5O_PLINE_ID, pline, 0);
if (get_value == 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "can't get size of filter message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "can't get size of filter message");
ret_value += get_value;
}
}
@@ -765,7 +765,7 @@ H5D__calculate_minimum_header_size(H5F_t *file, H5D_t *dset, H5O_t *ohdr)
if (dset->shared->dcpl_cache.efl.nused > 0) {
get_value = H5O_msg_size_oh(file, ohdr, H5O_EFL_ID, &dset->shared->dcpl_cache.efl, 0);
if (get_value == 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "can't get size of external file link message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "can't get size of external file link message");
ret_value += get_value;
}
@@ -778,7 +778,7 @@ H5D__calculate_minimum_header_size(H5F_t *file, H5D_t *dset, H5O_t *ohdr)
time_t mtime;
get_value = H5O_msg_size_oh(file, ohdr, H5O_MTIME_NEW_ID, &mtime, 0);
if (get_value == 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "can't get size of modification time message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, 0, "can't get size of modification time message");
ret_value += get_value;
}
}
@@ -813,15 +813,15 @@ H5D__prepare_minimized_oh(H5F_t *file, H5D_t *dset, H5O_loc_t *oloc)
oh = H5O_create_ohdr(file, dset->shared->dcpl_id);
if (NULL == oh)
- HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, FAIL, "can't instantiate object header")
+ HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, FAIL, "can't instantiate object header");
ohdr_size = H5D__calculate_minimum_header_size(file, dset, oh);
if (ohdr_size == 0)
- HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, FAIL, "computed header size is invalid")
+ HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, FAIL, "computed header size is invalid");
/* Special allocation of space for compact datasets is handled by the call here. */
if (H5O_apply_ohdr(file, oh, dset->shared->dcpl_id, ohdr_size, (size_t)1, oloc) == FAIL)
- HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, FAIL, "can't apply object header to file")
+ HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, FAIL, "can't apply object header to file");
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -869,7 +869,7 @@ H5D__update_oh_info(H5F_t *file, H5D_t *dset, hid_t dapl_id)
/* Retrieve "defined" status of fill value */
if (H5P_is_fill_value_defined(fill_prop, &fill_status) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't tell if fill value defined")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't tell if fill value defined");
/* Special case handling for variable-length types */
if (H5T_detect_class(type, H5T_VLEN, FALSE)) {
@@ -885,26 +885,26 @@ H5D__update_oh_info(H5F_t *file, H5D_t *dset, hid_t dapl_id)
/* Don't allow never writing fill values with variable-length types */
if (fill_prop->fill_time == H5D_FILL_TIME_NEVER)
HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL,
- "Dataset doesn't support VL datatype when fill value is not defined")
+ "Dataset doesn't support VL datatype when fill value is not defined");
} /* end if */
/* Determine whether fill value is defined or not */
if (fill_status == H5D_FILL_VALUE_DEFAULT || fill_status == H5D_FILL_VALUE_USER_DEFINED) {
/* Convert fill value buffer to dataset's datatype */
if (fill_prop->buf && fill_prop->size > 0 && H5O_fill_convert(fill_prop, type, &fill_changed) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to convert fill value to dataset type")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to convert fill value to dataset type");
fill_prop->fill_defined = TRUE;
}
else if (fill_status == H5D_FILL_VALUE_UNDEFINED)
fill_prop->fill_defined = FALSE;
else
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to determine if fill value is defined")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to determine if fill value is defined");
/* Check for invalid fill & allocation time setting */
if (fill_prop->fill_defined == FALSE && fill_prop->fill_time == H5D_FILL_TIME_ALLOC)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL,
- "fill value writing on allocation set, but no fill value defined")
+ "fill value writing on allocation set, but no fill value defined");
/* Check if the fill value info changed */
if (fill_changed) {
@@ -913,19 +913,19 @@ H5D__update_oh_info(H5F_t *file, H5D_t *dset, hid_t dapl_id)
/* Get dataset's property list object */
assert(dset->shared->dcpl_id != H5P_DATASET_CREATE_DEFAULT);
if (NULL == (dc_plist = (H5P_genplist_t *)H5I_object(dset->shared->dcpl_id)))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "can't get dataset creation property list")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "can't get dataset creation property list");
/* Update dataset creation property */
if (H5P_set(dc_plist, H5D_CRT_FILL_VALUE_NAME, fill_prop) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set fill value info")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set fill value info");
} /* end if */
if (H5D__use_minimized_dset_headers(file, &use_minimized_header) == FAIL)
- HGOTO_ERROR(H5E_ARGS, H5E_CANTGET, FAIL, "can't get minimize settings")
+ HGOTO_ERROR(H5E_ARGS, H5E_CANTGET, FAIL, "can't get minimize settings");
if (TRUE == use_minimized_header) {
if (H5D__prepare_minimized_oh(file, dset, oloc) == FAIL)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't create minimized dataset object header")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't create minimized dataset object header");
} /* end if */
else {
/* Add the dataset's raw data size to the size of the header, if the
@@ -936,26 +936,26 @@ H5D__update_oh_info(H5F_t *file, H5D_t *dset, hid_t dapl_id)
/* Create an object header for the dataset */
if (H5O_create(file, ohdr_size, (size_t)1, dset->shared->dcpl_id, oloc /*out*/) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to create dataset object header")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to create dataset object header");
} /* if using default/minimized object headers */
assert(file == dset->oloc.file);
/* Pin the object header */
if (NULL == (oh = H5O_pin(oloc)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTPIN, FAIL, "unable to pin dataset object header")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTPIN, FAIL, "unable to pin dataset object header");
/* Write the dataspace header message */
if (H5S_append(file, oh, dset->shared->space) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update dataspace header message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update dataspace header message");
/* Write the datatype header message */
if (H5O_msg_append_oh(file, oh, H5O_DTYPE_ID, H5O_MSG_FLAG_CONSTANT, 0, type) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update datatype header message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update datatype header message");
/* Write new fill value message */
if (H5O_msg_append_oh(file, oh, H5O_FILL_NEW_ID, H5O_MSG_FLAG_CONSTANT, 0, fill_prop) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update new fill value header message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update new fill value header message");
/* If there is valid information for the old fill value struct, add it */
/* (only if we aren't using v18 format versions and above */
@@ -971,12 +971,12 @@ H5D__update_oh_info(H5F_t *file, H5D_t *dset, hid_t dapl_id)
/* Write old fill value */
if (H5O_msg_append_oh(file, oh, H5O_FILL_ID, H5O_MSG_FLAG_CONSTANT, 0, &old_fill_prop) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update old fill value header message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update old fill value header message");
} /* end if */
/* Update/create the layout (and I/O pipeline & EFL) messages */
if (H5D__layout_oh_create(file, oh, dset, dapl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update layout/pline/efl header message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update layout/pline/efl header message");
/* Indicate that the layout information was initialized */
layout_init = TRUE;
@@ -987,7 +987,7 @@ H5D__update_oh_info(H5F_t *file, H5D_t *dset, hid_t dapl_id)
/* Get dataset's property list object */
if (NULL == (dc_plist = (H5P_genplist_t *)H5I_object(dset->shared->dcpl_id)))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "can't get dataset creation property list")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "can't get dataset creation property list");
/* Check whether to add a "bogus" message */
if ((H5P_exist_plist(dc_plist, H5O_BOGUS_MSG_FLAGS_NAME) > 0) &&
@@ -998,14 +998,14 @@ H5D__update_oh_info(H5F_t *file, H5D_t *dset, hid_t dapl_id)
/* Retrieve "bogus" message ID */
if (H5P_get(dc_plist, H5O_BOGUS_MSG_ID_NAME, &bogus_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get bogus ID options")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get bogus ID options");
/* Retrieve "bogus" message flags */
if (H5P_get(dc_plist, H5O_BOGUS_MSG_FLAGS_NAME, &bogus_flags) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get bogus message options")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get bogus message options");
/* Add a "bogus" message (for error testing). */
if (H5O_bogus_oh(file, oh, bogus_id, (unsigned)bogus_flags) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to create 'bogus' message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to create 'bogus' message");
} /* end if */
}
#endif /* H5O_ENABLE_BOGUS */
@@ -1016,7 +1016,7 @@ H5D__update_oh_info(H5F_t *file, H5D_t *dset, hid_t dapl_id)
*/
if (!use_at_least_v18)
if (H5O_touch_oh(file, oh, TRUE) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update modification time message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update modification time message");
done:
/* Release pointer to object header itself */
@@ -1070,7 +1070,7 @@ H5D__build_file_prefix(const H5D_t *dset, H5F_prefix_open_t prefix_type, char **
if (prefix == NULL || *prefix == '\0') {
if (H5CX_get_vds_prefix(&prefix) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get the prefix for vds file")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get the prefix for vds file");
}
}
else if (H5F_PREFIX_EFILE == prefix_type) {
@@ -1078,11 +1078,11 @@ H5D__build_file_prefix(const H5D_t *dset, H5F_prefix_open_t prefix_type, char **
if (prefix == NULL || *prefix == '\0') {
if (H5CX_get_ext_file_prefix(&prefix) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get the prefix for the external file")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get the prefix for the external file");
}
}
else
- HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "prefix name is not sensible")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "prefix name is not sensible");
/* Prefix has to be checked for NULL / empty string again because the
* code above might have updated it.
@@ -1101,12 +1101,12 @@ H5D__build_file_prefix(const H5D_t *dset, H5F_prefix_open_t prefix_type, char **
file_prefix_len = filepath_len + prefix_len - HDstrlen("${ORIGIN}") + 1;
if (NULL == (*file_prefix = (char *)H5MM_malloc(file_prefix_len)))
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "unable to allocate buffer")
+ HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "unable to allocate buffer");
HDsnprintf(*file_prefix, file_prefix_len, "%s%s", filepath, prefix + HDstrlen("${ORIGIN}"));
} /* end if */
else {
if (NULL == (*file_prefix = (char *)H5MM_strdup(prefix)))
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed")
+ HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed");
} /* end else */
} /* end else */
@@ -1155,14 +1155,14 @@ H5D__create(H5F_t *file, hid_t type_id, const H5S_t *space, hid_t dcpl_id, hid_t
/* Get the dataset's datatype */
if (NULL == (dt = (H5T_t *)H5I_object(type_id)))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, NULL, "not a datatype")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, NULL, "not a datatype");
/* If this is a named datatype, get the pointer via the VOL plugin */
type = H5T_get_actual_type(dt);
/* Check if the datatype is "sensible" for use in a dataset */
if (H5T_is_sensible(type) != TRUE)
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, NULL, "datatype is not sensible")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, NULL, "datatype is not sensible");
/* Check if the datatype is/contains a VL-type */
if (H5T_detect_class(type, H5T_VLEN, FALSE))
@@ -1170,11 +1170,11 @@ H5D__create(H5F_t *file, hid_t type_id, const H5S_t *space, hid_t dcpl_id, hid_t
/* Check if the dataspace has an extent set (or is NULL) */
if (!H5S_has_extent(space))
- HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, NULL, "dataspace extent has not been set.")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, NULL, "dataspace extent has not been set.");
/* Initialize the dataset object */
if (NULL == (new_dset = H5FL_CALLOC(H5D_t)))
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed")
+ HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed");
/* Set up & reset dataset location */
dset_loc.oloc = &(new_dset->oloc);
@@ -1183,15 +1183,15 @@ H5D__create(H5F_t *file, hid_t type_id, const H5S_t *space, hid_t dcpl_id, hid_t
/* Initialize the shared dataset space */
if (NULL == (new_dset->shared = H5D__new(dcpl_id, dapl_id, TRUE, has_vl_type)))
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed")
+ HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed");
/* Copy & initialize datatype for dataset */
if (H5D__init_type(file, new_dset, type_id, type) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "can't copy datatype")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "can't copy datatype");
/* Copy & initialize dataspace for dataset */
if (H5D__init_space(file, new_dset, space) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "can't copy dataspace")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "can't copy dataspace");
/* Set the dataset's checked_filters flag to enable writing */
new_dset->shared->checked_filters = TRUE;
@@ -1205,72 +1205,72 @@ H5D__create(H5F_t *file, hid_t type_id, const H5S_t *space, hid_t dcpl_id, hid_t
htri_t ignore_filters = FALSE; /* Ignore optional filters or not */
if ((ignore_filters = H5Z_ignore_filters(new_dset->shared->dcpl_id, dt, space)) < 0)
- HGOTO_ERROR(H5E_ARGS, H5E_CANTINIT, NULL, "H5Z_has_optional_filter() failed")
+ HGOTO_ERROR(H5E_ARGS, H5E_CANTINIT, NULL, "H5Z_has_optional_filter() failed");
if (FALSE == ignore_filters) {
/* Check if the filters in the DCPL can be applied to this dataset */
if (H5Z_can_apply(new_dset->shared->dcpl_id, new_dset->shared->type_id) < 0)
- HGOTO_ERROR(H5E_ARGS, H5E_CANTINIT, NULL, "I/O filters can't operate on this dataset")
+ HGOTO_ERROR(H5E_ARGS, H5E_CANTINIT, NULL, "I/O filters can't operate on this dataset");
/* Make the "set local" filter callbacks for this dataset */
if (H5Z_set_local(new_dset->shared->dcpl_id, new_dset->shared->type_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to set local filter parameters")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to set local filter parameters");
} /* ignore_filters */
/* Get new dataset's property list object */
if (NULL == (dc_plist = (H5P_genplist_t *)H5I_object(new_dset->shared->dcpl_id)))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, NULL, "can't get dataset creation property list")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, NULL, "can't get dataset creation property list");
/* Retrieve the properties we need */
pline = &new_dset->shared->dcpl_cache.pline;
if (H5P_get(dc_plist, H5O_CRT_PIPELINE_NAME, pline) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, NULL, "can't retrieve pipeline filter")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, NULL, "can't retrieve pipeline filter");
pline_copied = TRUE;
layout = &new_dset->shared->layout;
if (H5P_get(dc_plist, H5D_CRT_LAYOUT_NAME, layout) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, NULL, "can't retrieve layout")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, NULL, "can't retrieve layout");
layout_copied = TRUE;
fill = &new_dset->shared->dcpl_cache.fill;
if (H5P_get(dc_plist, H5D_CRT_FILL_VALUE_NAME, fill) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, NULL, "can't retrieve fill value info")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, NULL, "can't retrieve fill value info");
fill_copied = TRUE;
efl = &new_dset->shared->dcpl_cache.efl;
if (H5P_get(dc_plist, H5D_CRT_EXT_FILE_LIST_NAME, efl) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, NULL, "can't retrieve external file list")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, NULL, "can't retrieve external file list");
efl_copied = TRUE;
if (FALSE == ignore_filters) {
/* Check that chunked layout is used if filters are enabled */
if (pline->nused > 0 && H5D_CHUNKED != layout->type)
- HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, NULL, "filters can only be used with chunked layout")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, NULL, "filters can only be used with chunked layout");
}
/* Check if the alloc_time is the default and error out */
if (fill->alloc_time == H5D_ALLOC_TIME_DEFAULT)
- HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, NULL, "invalid space allocation state")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, NULL, "invalid space allocation state");
/* Don't allow compact datasets to allocate space later */
if (layout->type == H5D_COMPACT && fill->alloc_time != H5D_ALLOC_TIME_EARLY)
- HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, NULL, "compact dataset must have early space allocation")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, NULL, "compact dataset must have early space allocation");
} /* end if */
/* Set the version for the I/O pipeline message */
if (H5O_pline_set_version(file, &new_dset->shared->dcpl_cache.pline) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, NULL, "can't set latest version of I/O filter pipeline")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, NULL, "can't set latest version of I/O filter pipeline");
/* Set the version for the fill message */
if (H5O_fill_set_version(file, &new_dset->shared->dcpl_cache.fill) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, NULL, "can't set latest version of fill value")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, NULL, "can't set latest version of fill value");
/* Set the latest version for the layout message */
if (H5D__layout_set_version(file, &new_dset->shared->layout) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, NULL, "can't set latest version of layout")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, NULL, "can't set latest version of layout");
if (new_dset->shared->layout.version >= H5O_LAYOUT_VERSION_4) {
/* Use latest indexing type for layout message version >= 4 */
if (H5D__layout_set_latest_indexing(&new_dset->shared->layout, new_dset->shared->space,
&new_dset->shared->dcpl_cache) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, NULL, "can't set latest indexing")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, NULL, "can't set latest indexing");
} /* end if */
/* Check if the file driver would like to force early space allocation */
@@ -1288,37 +1288,37 @@ H5D__create(H5F_t *file, hid_t type_id, const H5S_t *space, hid_t dcpl_id, hid_t
/* Set the dataset's I/O operations */
if (H5D__layout_set_io_ops(new_dset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to initialize I/O operations")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to initialize I/O operations");
/* Create the layout information for the new dataset */
if (new_dset->shared->layout.ops->construct &&
(new_dset->shared->layout.ops->construct)(file, new_dset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to construct layout information")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to construct layout information");
/* Update the dataset's object header info. */
if (H5D__update_oh_info(file, new_dset, new_dset->shared->dapl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "can't update the metadata cache")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "can't update the metadata cache");
/* Indicate that the layout information was initialized */
layout_init = TRUE;
/* Set up append flush parameters for the dataset */
if (H5D__append_flush_setup(new_dset, new_dset->shared->dapl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to set up flush append property")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to set up flush append property");
/* Set the external file prefix */
if (H5D__build_file_prefix(new_dset, H5F_PREFIX_EFILE, &new_dset->shared->extfile_prefix) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to initialize external file prefix")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to initialize external file prefix");
/* Set the VDS file prefix */
if (H5D__build_file_prefix(new_dset, H5F_PREFIX_VDS, &new_dset->shared->vds_prefix) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to initialize VDS prefix")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to initialize VDS prefix");
/* Add the dataset to the list of opened objects in the file */
if (H5FO_top_incr(new_dset->oloc.file, new_dset->oloc.addr) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINC, NULL, "can't incr object ref. count")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINC, NULL, "can't incr object ref. count");
if (H5FO_insert(new_dset->oloc.file, new_dset->oloc.addr, new_dset->shared, TRUE) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINSERT, NULL, "can't insert dataset into list of open objects")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINSERT, NULL, "can't insert dataset into list of open objects");
new_dset->shared->fo_count = 1;
/* Success */
@@ -1415,18 +1415,18 @@ H5D__open_name(const H5G_loc_t *loc, const char *name, hid_t dapl_id)
/* Find the dataset object */
if (H5G_loc_find(loc, name, &dset_loc) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_NOTFOUND, NULL, "not found")
+ HGOTO_ERROR(H5E_DATASET, H5E_NOTFOUND, NULL, "not found");
loc_found = TRUE;
/* Check that the object found is the correct type */
if (H5O_obj_type(&oloc, &obj_type) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, NULL, "can't get object type")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, NULL, "can't get object type");
if (obj_type != H5O_TYPE_DATASET)
- HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, NULL, "not a dataset")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, NULL, "not a dataset");
/* Open the dataset */
if (NULL == (dset = H5D_open(&dset_loc, dapl_id)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "can't open dataset")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "can't open dataset");
/* Set return value */
ret_value = dset;
@@ -1466,23 +1466,23 @@ H5D_open(const H5G_loc_t *loc, hid_t dapl_id)
/* Allocate the dataset structure */
if (NULL == (dataset = H5FL_CALLOC(H5D_t)))
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed")
+ HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, NULL, "memory allocation failed");
/* Shallow copy (take ownership) of the object location object */
if (H5O_loc_copy_shallow(&(dataset->oloc), loc->oloc) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, NULL, "can't copy object location")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, NULL, "can't copy object location");
/* Shallow copy (take ownership) of the group hier. path */
if (H5G_name_copy(&(dataset->path), loc->path, H5_COPY_SHALLOW) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, NULL, "can't copy path")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, NULL, "can't copy path");
/* Get the external file prefix */
if (H5D__build_file_prefix(dataset, H5F_PREFIX_EFILE, &extfile_prefix) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to initialize external file prefix")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to initialize external file prefix");
/* Get the VDS prefix */
if (H5D__build_file_prefix(dataset, H5F_PREFIX_VDS, &vds_prefix) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to initialize VDS prefix")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL, "unable to initialize VDS prefix");
/* Check if dataset was already open */
if (NULL == (shared_fo = (H5D_shared_t *)H5FO_opened(dataset->oloc.file, dataset->oloc.addr))) {
@@ -1491,15 +1491,15 @@ H5D_open(const H5G_loc_t *loc, hid_t dapl_id)
/* Open the dataset object */
if (H5D__open_oid(dataset, dapl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_NOTFOUND, NULL, "not found")
+ HGOTO_ERROR(H5E_DATASET, H5E_NOTFOUND, NULL, "not found");
/* Add the dataset to the list of opened objects in the file */
if (H5FO_insert(dataset->oloc.file, dataset->oloc.addr, dataset->shared, FALSE) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINSERT, NULL, "can't insert dataset into list of open objects")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINSERT, NULL, "can't insert dataset into list of open objects");
/* Increment object count for the object in the top file */
if (H5FO_top_incr(dataset->oloc.file, dataset->oloc.addr) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINC, NULL, "can't increment object count")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINC, NULL, "can't increment object count");
/* We're the first dataset to use the shared info */
dataset->shared->fo_count = 1;
@@ -1529,25 +1529,25 @@ H5D_open(const H5G_loc_t *loc, hid_t dapl_id)
if (HDstrcmp(extfile_prefix, dataset->shared->extfile_prefix) != 0)
HGOTO_ERROR(
H5E_DATASET, H5E_CANTOPENOBJ, NULL,
- "new external file prefix does not match external file prefix of already open dataset")
+ "new external file prefix does not match external file prefix of already open dataset");
}
else {
if (extfile_prefix || dataset->shared->extfile_prefix)
HGOTO_ERROR(
H5E_DATASET, H5E_CANTOPENOBJ, NULL,
- "new external file prefix does not match external file prefix of already open dataset")
+ "new external file prefix does not match external file prefix of already open dataset");
}
/* Check if the object has been opened through the top file yet */
if (H5FO_top_count(dataset->oloc.file, dataset->oloc.addr) == 0) {
/* Open the object through this top file */
if (H5O_open(&(dataset->oloc)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTOPENOBJ, NULL, "unable to open object header")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTOPENOBJ, NULL, "unable to open object header");
} /* end if */
/* Increment object count for the object in the top file */
if (H5FO_top_incr(dataset->oloc.file, dataset->oloc.addr) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINC, NULL, "can't increment object count")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINC, NULL, "can't increment object count");
} /* end else */
/* Set the dataset to return */
@@ -1615,7 +1615,7 @@ H5D__append_flush_setup(H5D_t *dset, hid_t dapl_id)
/* Get append flush property */
if (H5P_get(dapl, H5D_ACS_APPEND_FLUSH_NAME, &info) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't get append flush info")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't get append flush info");
if (info.ndims > 0) {
hsize_t curr_dims[H5S_MAX_RANK]; /* current dimension sizes */
hsize_t max_dims[H5S_MAX_RANK]; /* current dimension sizes */
@@ -1624,10 +1624,10 @@ H5D__append_flush_setup(H5D_t *dset, hid_t dapl_id)
/* Get dataset rank */
if ((rank = H5S_get_simple_extent_dims(dset->shared->space, curr_dims, max_dims)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions");
if (info.ndims != (unsigned)rank)
HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL,
- "boundary dimension rank does not match dataset rank")
+ "boundary dimension rank does not match dataset rank");
/* Validate boundary sizes */
for (u = 0; u < info.ndims; u++)
@@ -1638,7 +1638,7 @@ H5D__append_flush_setup(H5D_t *dset, hid_t dapl_id)
/* At least one boundary dimension is not extendible */
if (u != info.ndims)
- HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "boundary dimension is not valid")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "boundary dimension is not valid");
/* Copy append flush settings */
dset->shared->append_flush.ndims = info.ndims;
@@ -1678,37 +1678,37 @@ H5D__open_oid(H5D_t *dataset, hid_t dapl_id)
/* (Set the 'vl_type' parameter to FALSE since it doesn't matter from here) */
if (NULL == (dataset->shared = H5D__new(H5P_DATASET_CREATE_DEFAULT, dapl_id, FALSE, FALSE)))
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed")
+ HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed");
/* Open the dataset object */
if (H5O_open(&(dataset->oloc)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTOPENOBJ, FAIL, "unable to open")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTOPENOBJ, FAIL, "unable to open");
/* Get the type and space */
if (NULL == (dataset->shared->type = (H5T_t *)H5O_msg_read(&(dataset->oloc), H5O_DTYPE_ID, NULL)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to load type info from dataset header")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to load type info from dataset header");
if (H5T_set_loc(dataset->shared->type, H5F_VOL_OBJ(dataset->oloc.file), H5T_LOC_DISK) < 0)
- HGOTO_ERROR(H5E_DATATYPE, H5E_CANTINIT, FAIL, "invalid datatype location")
+ HGOTO_ERROR(H5E_DATATYPE, H5E_CANTINIT, FAIL, "invalid datatype location");
if (NULL == (dataset->shared->space = H5S_read(&(dataset->oloc))))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to load dataspace info from dataset header")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to load dataspace info from dataset header");
/* Cache the dataset's dataspace info */
if (H5D__cache_dataspace_info(dataset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't cache dataspace info")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't cache dataspace info");
/* Get a datatype ID for the dataset's datatype */
if ((dataset->shared->type_id = H5I_register(H5I_DATATYPE, dataset->shared->type, FALSE)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTREGISTER, FAIL, "unable to register type")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTREGISTER, FAIL, "unable to register type");
/* Get dataset creation property list object */
if (NULL == (plist = (H5P_genplist_t *)H5I_object(dataset->shared->dcpl_id)))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "can't get dataset creation property list")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "can't get dataset creation property list");
/* Get the layout/pline/efl message information */
if (H5D__layout_oh_read(dataset, dapl_id, plist) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get layout/pline/efl info")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get layout/pline/efl info");
/* Indicate that the layout information was initialized */
layout_init = TRUE;
@@ -1730,7 +1730,8 @@ H5D__open_oid(H5D_t *dataset, hid_t dapl_id)
assert(H5D_COMPACT == dataset->shared->layout.storage.type);
if ((dset_nelemts = H5S_GET_EXTENT_NPOINTS(dataset->shared->space)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get number of elements in dataset's dataspace")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL,
+ "can't get number of elements in dataset's dataspace");
dset_data_size = (size_t)dset_nelemts * dset_type_size;
@@ -1742,25 +1743,25 @@ H5D__open_oid(H5D_t *dataset, hid_t dapl_id)
/* Set up flush append property */
if (H5D__append_flush_setup(dataset, dapl_id))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to set up flush append property")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to set up flush append property");
/* Point at dataset's copy, to cache it for later */
fill_prop = &dataset->shared->dcpl_cache.fill;
/* Try to get the new fill value message from the object header */
if ((msg_exists = H5O_msg_exists(&(dataset->oloc), H5O_FILL_NEW_ID)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't check if message exists")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't check if message exists");
if (msg_exists) {
if (NULL == H5O_msg_read(&(dataset->oloc), H5O_FILL_NEW_ID, fill_prop))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't retrieve message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't retrieve message");
} /* end if */
else {
/* For backward compatibility, try to retrieve the old fill value message */
if ((msg_exists = H5O_msg_exists(&(dataset->oloc), H5O_FILL_ID)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't check if message exists")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't check if message exists");
if (msg_exists) {
if (NULL == H5O_msg_read(&(dataset->oloc), H5O_FILL_ID, fill_prop))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't retrieve message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't retrieve message");
} /* end if */
else {
/* Set the space allocation time appropriately, based on the type of dataset storage */
@@ -1784,7 +1785,7 @@ H5D__open_oid(H5D_t *dataset, hid_t dapl_id)
case H5D_LAYOUT_ERROR:
case H5D_NLAYOUTS:
default:
- HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "not implemented yet")
+ HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "not implemented yet");
} /* end switch */ /*lint !e788 All appropriate cases are covered */
} /* end else */
@@ -1802,9 +1803,9 @@ H5D__open_oid(H5D_t *dataset, hid_t dapl_id)
/* Set revised fill value properties, if they are different from the defaults */
if (H5P_fill_value_cmp(&H5D_def_dset.dcpl_cache.fill, fill_prop, sizeof(H5O_fill_t))) {
if (H5P_set(plist, H5D_CRT_FILL_VALUE_NAME, fill_prop) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set fill value")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set fill value");
if (H5P_set(plist, H5D_CRT_ALLOC_TIME_STATE_NAME, &alloc_time_state) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set allocation time state")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set allocation time state");
} /* end if */
/*
@@ -1816,7 +1817,7 @@ H5D__open_oid(H5D_t *dataset, hid_t dapl_id)
!(*dataset->shared->layout.ops->is_space_alloc)(&dataset->shared->layout.storage) &&
H5F_HAS_FEATURE(dataset->oloc.file, H5FD_FEAT_ALLOCATE_EARLY))
if (H5D__alloc_storage(dataset, H5D_ALLOC_OPEN, FALSE, NULL) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize file storage")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize file storage");
done:
if (ret_value < 0) {
@@ -1957,7 +1958,7 @@ H5D_close(H5D_t *dataset)
default:
assert("not implemented yet" && 0);
#ifdef NDEBUG
- HGOTO_ERROR(H5E_IO, H5E_UNSUPPORTED, FAIL, "unsupported storage layout")
+ HGOTO_ERROR(H5E_IO, H5E_UNSUPPORTED, FAIL, "unsupported storage layout");
#endif /* NDEBUG */
} /* end switch */ /*lint !e788 All appropriate cases are covered */
@@ -2024,17 +2025,17 @@ H5D_close(H5D_t *dataset)
else {
/* Decrement the ref. count for this object in the top file */
if (H5FO_top_decr(dataset->oloc.file, dataset->oloc.addr) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTRELEASE, FAIL, "can't decrement count for object")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTRELEASE, FAIL, "can't decrement count for object");
/* Check reference count for this object in the top file */
if (H5FO_top_count(dataset->oloc.file, dataset->oloc.addr) == 0) {
if (H5O_close(&(dataset->oloc), NULL) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to close")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to close");
} /* end if */
else
/* Free object location (i.e. "unhold" the file if appropriate) */
if (H5O_loc_free(&(dataset->oloc)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTRELEASE, FAIL, "problem attempting to free location")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTRELEASE, FAIL, "problem attempting to free location");
} /* end else */
/* Release the dataset's path info */
@@ -2047,7 +2048,7 @@ H5D_close(H5D_t *dataset)
/* Check if anything failed in the middle... */
if (free_failed)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL,
- "couldn't free a component of the dataset, but the dataset was freed anyway.")
+ "couldn't free a component of the dataset, but the dataset was freed anyway.");
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -2071,7 +2072,7 @@ H5D_mult_refresh_close(hid_t dset_id)
FUNC_ENTER_NOAPI(FAIL)
if (NULL == (dataset = (H5D_t *)H5VL_object_verify(dset_id, H5I_DATASET)))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a dataset")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a dataset");
/* check args */
assert(dataset);
@@ -2121,13 +2122,13 @@ H5D_mult_refresh_close(hid_t dset_id)
default:
assert("not implemented yet" && 0);
#ifdef NDEBUG
- HGOTO_ERROR(H5E_IO, H5E_UNSUPPORTED, FAIL, "unsupported storage layout")
+ HGOTO_ERROR(H5E_IO, H5E_UNSUPPORTED, FAIL, "unsupported storage layout");
#endif /* NDEBUG */
} /* end switch */ /*lint !e788 All appropriate cases are covered */
/* Destroy any cached layout information for the dataset */
if (dataset->shared->layout.ops->dest && (dataset->shared->layout.ops->dest)(dataset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTRELEASE, FAIL, "unable to destroy layout info")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTRELEASE, FAIL, "unable to destroy layout info");
} /* end if */
done:
@@ -2157,23 +2158,23 @@ H5D_mult_refresh_reopen(H5D_t *dataset)
if (dataset->shared->fo_count > 1) {
/* Release dataspace info */
if (H5S_close(dataset->shared->space) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTRELEASE, FAIL, "unable to release dataspace")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTRELEASE, FAIL, "unable to release dataspace");
/* Re-load dataspace info */
if (NULL == (dataset->shared->space = H5S_read(&(dataset->oloc))))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to load dataspace info from dataset header")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to load dataspace info from dataset header");
/* Cache the dataset's dataspace info */
if (H5D__cache_dataspace_info(dataset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't cache dataspace info")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't cache dataspace info");
/* Release layout info */
if (H5O_msg_reset(H5O_LAYOUT_ID, &dataset->shared->layout) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTRESET, FAIL, "unable to reset layout info")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTRESET, FAIL, "unable to reset layout info");
/* Re-load layout message info */
if (NULL == H5O_msg_read(&(dataset->oloc), H5O_LAYOUT_ID, &(dataset->shared->layout)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to read data layout message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to read data layout message");
} /* end if */
done:
@@ -2255,7 +2256,8 @@ H5D__alloc_storage(H5D_t *dset, H5D_time_alloc_t time_alloc, hbool_t full_overwr
if (layout->storage.u.contig.size > 0) {
/* Reserve space in the file for the entire array */
if (H5D__contig_alloc(f, &layout->storage.u.contig /*out*/) < 0)
- HGOTO_ERROR(H5E_IO, H5E_CANTINIT, FAIL, "unable to initialize contiguous storage")
+ HGOTO_ERROR(H5E_IO, H5E_CANTINIT, FAIL,
+ "unable to initialize contiguous storage");
/* Indicate that we should initialize storage space */
must_init_space = TRUE;
@@ -2272,7 +2274,7 @@ H5D__alloc_storage(H5D_t *dset, H5D_time_alloc_t time_alloc, hbool_t full_overwr
if (!(*dset->shared->layout.ops->is_space_alloc)(&dset->shared->layout.storage)) {
/* Create the root of the index that manages chunked storage */
if (H5D__chunk_create(dset /*in,out*/) < 0)
- HGOTO_ERROR(H5E_IO, H5E_CANTINIT, FAIL, "unable to initialize chunked storage")
+ HGOTO_ERROR(H5E_IO, H5E_CANTINIT, FAIL, "unable to initialize chunked storage");
/* Indicate that we set the storage addr */
addr_set = TRUE;
@@ -2301,7 +2303,7 @@ H5D__alloc_storage(H5D_t *dset, H5D_time_alloc_t time_alloc, hbool_t full_overwr
if (NULL ==
(layout->storage.u.compact.buf = H5MM_malloc(layout->storage.u.compact.size)))
HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL,
- "unable to allocate memory for compact dataset")
+ "unable to allocate memory for compact dataset");
if (!full_overwrite)
memset(layout->storage.u.compact.buf, 0, layout->storage.u.compact.size);
layout->storage.u.compact.dirty = TRUE;
@@ -2330,7 +2332,7 @@ H5D__alloc_storage(H5D_t *dset, H5D_time_alloc_t time_alloc, hbool_t full_overwr
default:
assert("not implemented yet" && 0);
#ifdef NDEBUG
- HGOTO_ERROR(H5E_IO, H5E_UNSUPPORTED, FAIL, "unsupported storage layout")
+ HGOTO_ERROR(H5E_IO, H5E_UNSUPPORTED, FAIL, "unsupported storage layout");
#endif /* NDEBUG */
} /* end switch */ /*lint !e788 All appropriate cases are covered */
@@ -2350,14 +2352,14 @@ H5D__alloc_storage(H5D_t *dset, H5D_time_alloc_t time_alloc, hbool_t full_overwr
time_alloc == H5D_ALLOC_WRITE))
if (H5D__init_storage(dset, full_overwrite, old_dim) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL,
- "unable to initialize dataset with fill value")
+ "unable to initialize dataset with fill value");
} /* end if */
else {
H5D_fill_value_t fill_status; /* The fill value status */
/* Check the dataset's fill-value status */
if (H5P_is_fill_value_defined(&dset->shared->dcpl_cache.fill, &fill_status) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't tell if fill value defined")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't tell if fill value defined");
/* If we are filling the dataset on allocation or "if set" and
* the fill value _is_ set, do that now */
@@ -2366,7 +2368,7 @@ H5D__alloc_storage(H5D_t *dset, H5D_time_alloc_t time_alloc, hbool_t full_overwr
fill_status == H5D_FILL_VALUE_USER_DEFINED))
if (H5D__init_storage(dset, full_overwrite, old_dim) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL,
- "unable to initialize dataset with fill value")
+ "unable to initialize dataset with fill value");
} /* end else */
} /* end if */
@@ -2380,7 +2382,7 @@ H5D__alloc_storage(H5D_t *dset, H5D_time_alloc_t time_alloc, hbool_t full_overwr
if (time_alloc != H5D_ALLOC_CREATE && addr_set)
/* Mark the layout as dirty, for later writing to the file */
if (H5D__mark(dset, H5D_MARK_LAYOUT) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to mark dataspace as dirty")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to mark dataspace as dirty");
} /* end if */
done:
@@ -2413,7 +2415,7 @@ H5D__init_storage(H5D_t *dset, hbool_t full_overwrite, hsize_t old_dim[])
/* Fill the compact dataset storage */
if (H5D__compact_fill(dset) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL,
- "unable to initialize compact dataset storage")
+ "unable to initialize compact dataset storage");
} /* end if */
break;
@@ -2423,7 +2425,7 @@ H5D__init_storage(H5D_t *dset, hbool_t full_overwrite, hsize_t old_dim[])
if ((dset->shared->dcpl_cache.efl.nused == 0 || dset->shared->dcpl_cache.fill.buf) &&
!full_overwrite)
if (H5D__contig_fill(dset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to allocate all chunks of dataset")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to allocate all chunks of dataset");
break;
case H5D_CHUNKED:
@@ -2439,7 +2441,7 @@ H5D__init_storage(H5D_t *dset, hbool_t full_overwrite, hsize_t old_dim[])
old_dim = zero_dim;
if (H5D__chunk_allocate(dset, full_overwrite, old_dim) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to allocate all chunks of dataset")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to allocate all chunks of dataset");
break;
} /* end block */
@@ -2451,7 +2453,7 @@ H5D__init_storage(H5D_t *dset, hbool_t full_overwrite, hsize_t old_dim[])
default:
assert("not implemented yet" && 0);
#ifdef NDEBUG
- HGOTO_ERROR(H5E_IO, H5E_UNSUPPORTED, FAIL, "unsupported storage layout")
+ HGOTO_ERROR(H5E_IO, H5E_UNSUPPORTED, FAIL, "unsupported storage layout");
#endif /* NDEBUG */
} /* end switch */ /*lint !e788 All appropriate cases are covered */
@@ -2480,7 +2482,7 @@ H5D__get_storage_size(const H5D_t *dset, hsize_t *storage_size)
if ((*dset->shared->layout.ops->is_space_alloc)(&dset->shared->layout.storage)) {
if (H5D__chunk_allocated(dset, storage_size) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL,
- "can't retrieve chunked dataset allocated size")
+ "can't retrieve chunked dataset allocated size");
} /* end if */
else
*storage_size = 0;
@@ -2507,7 +2509,7 @@ H5D__get_storage_size(const H5D_t *dset, hsize_t *storage_size)
case H5D_LAYOUT_ERROR:
case H5D_NLAYOUTS:
default:
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a dataset type")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a dataset type");
} /*lint !e788 All appropriate cases are covered */
done:
@@ -2554,7 +2556,7 @@ H5D__get_offset(const H5D_t *dset)
case H5D_LAYOUT_ERROR:
case H5D_NLAYOUTS:
default:
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, HADDR_UNDEF, "unknown dataset layout type")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, HADDR_UNDEF, "unknown dataset layout type");
}
done:
@@ -2585,7 +2587,7 @@ H5D__vlen_get_buf_size_alloc(size_t size, void *info)
if (size > vlen_bufsize_com->vl_tbuf_size) {
if (NULL ==
(vlen_bufsize_com->vl_tbuf = H5FL_BLK_REALLOC(vlen_vl_buf, vlen_bufsize_com->vl_tbuf, size)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTALLOC, NULL, "can't reallocate temporary VL data buffer")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTALLOC, NULL, "can't reallocate temporary VL data buffer");
vlen_bufsize_com->vl_tbuf_size = size;
} /* end if */
@@ -2625,7 +2627,7 @@ H5D__vlen_get_buf_size_cb(void H5_ATTR_UNUSED *elem, hid_t type_id, unsigned H5_
/* Select point to read in */
if (H5S_select_elements(vlen_bufsize->fspace, H5S_SELECT_SET, (size_t)1, point) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCREATE, H5_ITER_ERROR, "can't select point")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTCREATE, H5_ITER_ERROR, "can't select point");
{
dset_info.dset = vlen_bufsize->dset;
@@ -2636,7 +2638,7 @@ H5D__vlen_get_buf_size_cb(void H5_ATTR_UNUSED *elem, hid_t type_id, unsigned H5_
/* Read in the point (with the custom VL memory allocator) */
if (H5D__read(1, &dset_info) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_READERROR, FAIL, "can't read data")
+ HGOTO_ERROR(H5E_DATASET, H5E_READERROR, FAIL, "can't read data");
}
done:
@@ -2680,35 +2682,35 @@ H5D__vlen_get_buf_size(H5D_t *dset, hid_t type_id, hid_t space_id, hsize_t *size
/* Check args */
if (NULL == (type = (H5T_t *)H5I_object(type_id)))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not an valid base datatype")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not an valid base datatype");
if (NULL == (space = (H5S_t *)H5I_object(space_id)))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid dataspace")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid dataspace");
if (!(H5S_has_extent(space)))
- HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "dataspace does not have extent set")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "dataspace does not have extent set");
/* Save the dataset */
vlen_bufsize.dset = dset;
/* Get a copy of the dataset's dataspace */
if (NULL == (fspace = H5S_copy(dset->shared->space, FALSE, TRUE)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "unable to get dataspace")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "unable to get dataspace");
vlen_bufsize.fspace = fspace;
/* Create a scalar for the memory dataspace */
if (NULL == (mspace = H5S_create(H5S_SCALAR)))
- HGOTO_ERROR(H5E_DATASPACE, H5E_CANTCREATE, FAIL, "can't create dataspace")
+ HGOTO_ERROR(H5E_DATASPACE, H5E_CANTCREATE, FAIL, "can't create dataspace");
vlen_bufsize.mspace = mspace;
/* Grab the temporary buffers required */
if (NULL == (vlen_bufsize.common.fl_tbuf = H5FL_BLK_MALLOC(vlen_fl_buf, H5T_get_size(type))))
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "no temporary buffers available")
+ HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "no temporary buffers available");
if (NULL == (vlen_bufsize.common.vl_tbuf = H5FL_BLK_MALLOC(vlen_vl_buf, (size_t)1)))
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "no temporary buffers available")
+ HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "no temporary buffers available");
vlen_bufsize.common.vl_tbuf_size = 1;
/* Set the memory manager to the special allocation routine */
if (H5CX_set_vlen_alloc_info(H5D__vlen_get_buf_size_alloc, &vlen_bufsize.common, NULL, NULL) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set VL data allocation routine")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set VL data allocation routine");
/* Set the initial number of bytes required */
vlen_bufsize.common.size = 0;
@@ -2774,22 +2776,22 @@ H5D__vlen_get_buf_size_gen_cb(void H5_ATTR_UNUSED *elem, hid_t type_id, unsigned
/* Check args */
if (NULL == (dt = (H5T_t *)H5I_object(type_id)))
- HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "not a datatype")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "not a datatype");
/* Make certain there is enough fixed-length buffer available */
if (NULL == (vlen_bufsize->common.fl_tbuf =
H5FL_BLK_REALLOC(vlen_fl_buf, vlen_bufsize->common.fl_tbuf, H5T_get_size(dt))))
- HGOTO_ERROR(H5E_DATASET, H5E_NOSPACE, FAIL, "can't resize tbuf")
+ HGOTO_ERROR(H5E_DATASET, H5E_NOSPACE, FAIL, "can't resize tbuf");
/* Select point to read in */
if (H5S_select_elements(vlen_bufsize->fspace, H5S_SELECT_SET, (size_t)1, point) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCREATE, FAIL, "can't select point")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTCREATE, FAIL, "can't select point");
/* Read in the point (with the custom VL memory allocator) */
if (H5VL_dataset_read(1, &vlen_bufsize->dset_vol_obj, &type_id, &vlen_bufsize->mspace_id,
&vlen_bufsize->fspace_id, vlen_bufsize->dxpl_id, &vlen_bufsize->common.fl_tbuf,
H5_REQUEST_NULL) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_READERROR, FAIL, "can't read point")
+ HGOTO_ERROR(H5E_DATASET, H5E_READERROR, FAIL, "can't read point");
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -2823,11 +2825,11 @@ H5D__vlen_get_buf_size_gen(H5VL_object_t *vol_obj, hid_t type_id, hid_t space_id
/* Check args */
if (NULL == (type = (H5T_t *)H5I_object(type_id)))
- HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "not an valid datatype")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "not an valid datatype");
if (NULL == (space = (H5S_t *)H5I_object(space_id)))
- HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "invalid dataspace")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "invalid dataspace");
if (!(H5S_has_extent(space)))
- HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "dataspace does not have extent set")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "dataspace does not have extent set");
/* Save the dataset */
vlen_bufsize.dset_vol_obj = (const H5VL_object_t *)vol_obj;
@@ -2838,33 +2840,33 @@ H5D__vlen_get_buf_size_gen(H5VL_object_t *vol_obj, hid_t type_id, hid_t space_id
/* Get a copy of the dataset's dataspace */
if (H5VL_dataset_get(vol_obj, &vol_cb_args, H5P_DATASET_XFER_DEFAULT, H5_REQUEST_NULL) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataspace")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataspace");
vlen_bufsize.fspace_id = vol_cb_args.args.get_space.space_id;
if (NULL == (vlen_bufsize.fspace = (H5S_t *)H5I_object(vlen_bufsize.fspace_id)))
- HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "not a dataspace")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "not a dataspace");
/* Create a scalar for the memory dataspace */
if (NULL == (mspace = H5S_create(H5S_SCALAR)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCREATE, FAIL, "can't create dataspace")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTCREATE, FAIL, "can't create dataspace");
if ((vlen_bufsize.mspace_id = H5I_register(H5I_DATASPACE, mspace, TRUE)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTREGISTER, FAIL, "unable to register dataspace ID")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTREGISTER, FAIL, "unable to register dataspace ID");
/* Grab the temporary buffers required */
if (NULL == (vlen_bufsize.common.fl_tbuf = H5FL_BLK_MALLOC(vlen_fl_buf, H5T_get_size(type))))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTALLOC, FAIL, "no temporary buffers available")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTALLOC, FAIL, "no temporary buffers available");
if (NULL == (vlen_bufsize.common.vl_tbuf = H5FL_BLK_MALLOC(vlen_vl_buf, (size_t)1)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTALLOC, FAIL, "no temporary buffers available")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTALLOC, FAIL, "no temporary buffers available");
vlen_bufsize.common.vl_tbuf_size = 1;
/* Set the VL allocation callbacks on a DXPL */
if (NULL == (dxpl = (H5P_genplist_t *)H5I_object(H5P_DATASET_XFER_DEFAULT)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get default DXPL")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get default DXPL");
if ((vlen_bufsize.dxpl_id = H5P_copy_plist(dxpl, TRUE)) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't copy property list");
if (NULL == (dxpl = (H5P_genplist_t *)H5I_object(vlen_bufsize.dxpl_id)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get copied DXPL")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get copied DXPL");
if (H5P_set_vlen_mem_manager(dxpl, H5D__vlen_get_buf_size_alloc, &vlen_bufsize.common, NULL, NULL) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set VL data allocation routine on DXPL")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set VL data allocation routine on DXPL");
/* Set the initial number of bytes required */
vlen_bufsize.common.size = 0;
@@ -2940,7 +2942,7 @@ H5D__check_filters(H5D_t *dataset)
/* Retrieve the "defined" status of the fill value */
if (H5P_is_fill_value_defined(fill, &fill_status) < 0)
- HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "Couldn't retrieve fill value from dataset.")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "Couldn't retrieve fill value from dataset.");
/* See if we can check the filter status */
if (fill_status == H5D_FILL_VALUE_DEFAULT || fill_status == H5D_FILL_VALUE_USER_DEFINED) {
@@ -2948,7 +2950,7 @@ H5D__check_filters(H5D_t *dataset)
(fill->fill_time == H5D_FILL_TIME_IFSET && fill_status == H5D_FILL_VALUE_USER_DEFINED)) {
/* Filters must have encoding enabled. Ensure that all filters can be applied */
if (H5Z_can_apply(dataset->shared->dcpl_id, dataset->shared->type_id) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_CANAPPLY, FAIL, "can't apply filters")
+ HGOTO_ERROR(H5E_PLINE, H5E_CANAPPLY, FAIL, "can't apply filters");
dataset->shared->checked_filters = TRUE;
} /* end if */
@@ -2985,18 +2987,18 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size)
/* Check if we are allowed to modify this file */
if (0 == (H5F_INTENT(dset->oloc.file) & H5F_ACC_RDWR))
- HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "no write intent on file")
+ HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "no write intent on file");
/* Check if we are allowed to modify the space; only datasets with chunked and external storage are
* allowed to be modified */
if (H5D_COMPACT == dset->shared->layout.type)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "dataset has compact storage")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "dataset has compact storage");
if (H5D_CONTIGUOUS == dset->shared->layout.type && 0 == dset->shared->dcpl_cache.efl.nused)
- HGOTO_ERROR(H5E_ARGS, H5E_BADRANGE, FAIL, "dataset has contiguous storage")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADRANGE, FAIL, "dataset has contiguous storage");
/* Check if the filters in the DCPL will need to encode, and if so, can they? */
if (H5D__check_filters(dset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't apply filters")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't apply filters");
/* Keep the current dataspace dimensions for later */
HDcompile_assert(sizeof(curr_dims) == sizeof(dset->shared->curr_dims));
@@ -3004,7 +3006,7 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size)
/* Modify the size of the dataspace */
if ((changed = H5S_set_extent(dset->shared->space, size)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to modify size of dataspace")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to modify size of dataspace");
/* Don't bother updating things, unless they've changed */
if (changed) {
@@ -3026,7 +3028,8 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size)
/* Compute the scaled dimension size value */
if (dset->shared->layout.u.chunk.dim[dim_idx] == 0)
- HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "chunk size must be > 0, dim = %u ", dim_idx)
+ HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "chunk size must be > 0, dim = %u ",
+ dim_idx);
scaled = size[dim_idx] / dset->shared->layout.u.chunk.dim[dim_idx];
@@ -3046,7 +3049,7 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size)
update_chunks = TRUE;
if (!(scaled_power2up = H5VM_power2up(scaled)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to get the next power of 2")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to get the next power of 2");
/* Check if the number of bits required to encode the scaled size value changed */
if (dset->shared->cache.chunk.scaled_power2up[dim_idx] != scaled_power2up) {
@@ -3073,13 +3076,13 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size)
if (H5D_CHUNKED == dset->shared->layout.type) {
/* Set the cached chunk info */
if (H5D__chunk_set_info(dset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to update # of chunks")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to update # of chunks");
/* Check if updating the chunk cache indices is necessary */
if (update_chunks)
/* Update the chunk cache indices */
if (H5D__chunk_update_cache(dset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update cached chunk indices")
+ HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update cached chunk indices");
} /* end if */
/* Operations for virtual datasets */
@@ -3088,21 +3091,21 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size)
if (H5D_virtual_check_min_dims(dset) < 0)
HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL,
"virtual dataset dimensions not large enough to contain all limited dimensions "
- "in all selections")
+ "in all selections");
/* Patch the virtual selection dataspaces */
for (u = 0; u < dset->shared->layout.storage.u.virt.list_nused; u++) {
/* Patch extent */
if (H5S_set_extent(dset->shared->layout.storage.u.virt.list[u].source_dset.virtual_select,
size) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to modify size of dataspace")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to modify size of dataspace");
dset->shared->layout.storage.u.virt.list[u].virtual_space_status = H5O_VIRTUAL_STATUS_CORRECT;
/* Patch sub-source datasets */
for (v = 0; v < dset->shared->layout.storage.u.virt.list[u].sub_dset_nused; v++)
if (H5S_set_extent(dset->shared->layout.storage.u.virt.list[u].sub_dset[v].virtual_select,
size) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to modify size of dataspace")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to modify size of dataspace");
} /* end for */
/* Mark virtual datasets as not fully initialized so internal
@@ -3113,7 +3116,7 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size)
/* Allocate space for the new parts of the dataset, if appropriate */
if (expand && dset->shared->dcpl_cache.fill.alloc_time == H5D_ALLOC_TIME_EARLY)
if (H5D__alloc_storage(dset, H5D_ALLOC_EXTEND, FALSE, curr_dims) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to extend dataset storage")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to extend dataset storage");
/*-------------------------------------------------------------------------
* Remove chunk information in the case of chunked datasets
@@ -3127,7 +3130,7 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size)
(*dset->shared->layout.ops->is_data_cached)(dset->shared))))
/* Remove excess chunks */
if (H5D__chunk_prune_by_extent(dset, curr_dims) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to remove chunks")
+ HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to remove chunks");
/* Update chunks that are no longer edge chunks as a result of
* expansion */
@@ -3135,12 +3138,12 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size)
(dset->shared->layout.u.chunk.flags & H5O_LAYOUT_CHUNK_DONT_FILTER_PARTIAL_BOUND_CHUNKS) &&
(dset->shared->dcpl_cache.pline.nused > 0))
if (H5D__chunk_update_old_edge_chunks(dset, curr_dims) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to do update old edge chunks")
+ HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to do update old edge chunks");
} /* end if */
/* Mark the dataspace as dirty, for later writing to the file */
if (H5D__mark(dset, H5D_MARK_SPACE) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to mark dataspace as dirty")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to mark dataspace as dirty");
} /* end if */
done:
@@ -3175,7 +3178,7 @@ H5D__flush_sieve_buf(H5D_t *dataset)
if (H5F_shared_block_write(
H5F_SHARED(dataset->oloc.file), H5FD_MEM_DRAW, dataset->shared->cache.contig.sieve_loc,
dataset->shared->cache.contig.sieve_size, dataset->shared->cache.contig.sieve_buf) < 0)
- HGOTO_ERROR(H5E_IO, H5E_WRITEERROR, FAIL, "block write failed")
+ HGOTO_ERROR(H5E_IO, H5E_WRITEERROR, FAIL, "block write failed");
/* Reset sieve buffer dirty flag */
dataset->shared->cache.contig.sieve_dirty = FALSE;
@@ -3209,7 +3212,7 @@ H5D__flush_real(H5D_t *dataset)
if (!dataset->shared->closing)
/* Flush cached raw data for each kind of dataset layout */
if (dataset->shared->layout.ops->flush && (dataset->shared->layout.ops->flush)(dataset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTFLUSH, FAIL, "unable to flush raw data")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTFLUSH, FAIL, "unable to flush raw data");
done:
FUNC_LEAVE_NOAPI_TAG(ret_value)
@@ -3238,15 +3241,15 @@ H5D__flush(H5D_t *dset, hid_t dset_id)
/* Currently, H5Oflush causes H5Fclose to trigger an assertion failure in metadata cache.
* Leave this situation for the future solution */
if (H5F_HAS_FEATURE(dset->oloc.file, H5FD_FEAT_HAS_MPI))
- HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "H5Oflush isn't supported for parallel")
+ HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "H5Oflush isn't supported for parallel");
/* Flush any dataset information still cached in memory */
if (H5D__flush_real(dset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTFLUSH, FAIL, "unable to flush cached dataset info")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTFLUSH, FAIL, "unable to flush cached dataset info");
/* Flush object's metadata to file */
if (H5O_flush_common(&dset->oloc, dset_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTFLUSH, FAIL, "unable to flush dataset and object flush callback")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTFLUSH, FAIL, "unable to flush dataset and object flush callback");
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -3283,7 +3286,7 @@ H5D__format_convert(H5D_t *dataset)
assert(dataset->shared->layout.u.chunk.idx_type != H5D_CHUNK_IDX_BTREE);
if (NULL == (newlayout = (H5O_layout_t *)H5MM_calloc(sizeof(H5O_layout_t))))
- HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "unable to allocate buffer")
+ HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "unable to allocate buffer");
/* Set up the current index info */
idx_info.f = dataset->oloc.file;
@@ -3311,7 +3314,7 @@ H5D__format_convert(H5D_t *dataset)
if (new_idx_info.storage->ops->init &&
(new_idx_info.storage->ops->init)(&new_idx_info, dataset->shared->space, dataset->oloc.addr) <
0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't initialize indexing information")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't initialize indexing information");
init_new_index = TRUE;
/* If the current chunk index exists */
@@ -3319,30 +3322,30 @@ H5D__format_convert(H5D_t *dataset)
/* Create v1 B-tree chunk index */
if ((new_idx_info.storage->ops->create)(&new_idx_info) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't create chunk index")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't create chunk index");
/* Iterate over the chunks in the current index and insert the chunk addresses
* into the version 1 B-tree chunk index
*/
if (H5D__chunk_format_convert(dataset, &idx_info, &new_idx_info) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_BADITER, FAIL, "unable to iterate/convert chunk index")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADITER, FAIL, "unable to iterate/convert chunk index");
} /* end if */
/* Delete the old "current" layout message */
if (H5O_msg_remove(&dataset->oloc, H5O_LAYOUT_ID, H5O_ALL, FALSE) < 0)
- HGOTO_ERROR(H5E_SYM, H5E_CANTDELETE, FAIL, "unable to delete layout message")
+ HGOTO_ERROR(H5E_SYM, H5E_CANTDELETE, FAIL, "unable to delete layout message");
delete_old_layout = TRUE;
/* Append the new layout message to the object header */
if (H5O_msg_create(&dataset->oloc, H5O_LAYOUT_ID, 0, H5O_UPDATE_TIME, newlayout) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update layout header message")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update layout header message");
add_new_layout = TRUE;
/* Release the old (current) chunk index */
if (idx_info.storage->ops->dest && (idx_info.storage->ops->dest)(&idx_info) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTFREE, FAIL, "unable to release chunk index info")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTFREE, FAIL, "unable to release chunk index info");
/* Copy the new layout to the dataset's layout */
H5MM_memcpy(&dataset->shared->layout, newlayout, sizeof(H5O_layout_t));
@@ -3355,18 +3358,18 @@ H5D__format_convert(H5D_t *dataset)
dataset->shared->layout.version = H5O_LAYOUT_VERSION_DEFAULT;
if (H5O_msg_write(&(dataset->oloc), H5O_LAYOUT_ID, 0, H5O_UPDATE_TIME,
&(dataset->shared->layout)) < 0)
- HGOTO_ERROR(H5E_FILE, H5E_CANTINIT, FAIL, "unable to update layout message")
+ HGOTO_ERROR(H5E_FILE, H5E_CANTINIT, FAIL, "unable to update layout message");
break;
case H5D_VIRTUAL:
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "virtual dataset layout not supported")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "virtual dataset layout not supported");
case H5D_LAYOUT_ERROR:
case H5D_NLAYOUTS:
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid dataset layout type")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid dataset layout type");
default:
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "unknown dataset layout type")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "unknown dataset layout type");
} /* end switch */
done:
@@ -3434,12 +3437,12 @@ H5D__mark(const H5D_t *dataset, unsigned flags)
/* Pin the object header */
if (NULL == (oh = H5O_pin(&dataset->oloc)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTPIN, FAIL, "unable to pin dataset object header")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTPIN, FAIL, "unable to pin dataset object header");
/* Update the layout on disk, if it's been changed */
if (flags & H5D_MARK_LAYOUT) {
if (H5D__layout_oh_write(dataset, oh, update_flags) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update layout info")
+ HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update layout info");
/* Reset the "update the modification time" flag, so we only do it once */
update_flags = 0;
@@ -3448,7 +3451,7 @@ H5D__mark(const H5D_t *dataset, unsigned flags)
/* Update the dataspace on disk, if it's been changed */
if (flags & H5D_MARK_SPACE) {
if (H5S_write(dataset->oloc.file, oh, update_flags, dataset->shared->space) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update file with new dataspace")
+ HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update file with new dataspace");
/* Reset the "update the modification time" flag, so we only do it once */
update_flags = 0;
@@ -3493,7 +3496,7 @@ H5D__flush_all_cb(void *_dataset, hid_t H5_ATTR_UNUSED id, void *_udata)
if (f == dataset->oloc.file)
/* Flush the dataset's information */
if (H5D__flush_real(dataset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, H5_ITER_ERROR, "unable to flush cached dataset info")
+ HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, H5_ITER_ERROR, "unable to flush cached dataset info");
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -3520,7 +3523,7 @@ H5D_flush_all(H5F_t *f)
/* Iterate over all the open datasets */
if (H5I_iterate(H5I_DATASET, H5D__flush_all_cb, f, FALSE) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_BADITER, FAIL, "unable to flush cached dataset info")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADITER, FAIL, "unable to flush cached dataset info");
done:
FUNC_LEAVE_NOAPI(ret_value)
@@ -3552,21 +3555,21 @@ H5D_get_create_plist(const H5D_t *dset)
/* Check args */
if (NULL == (dcpl_plist = (H5P_genplist_t *)H5I_object(dset->shared->dcpl_id)))
- HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "can't get property list")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "can't get property list");
/* Copy the creation property list */
if ((new_dcpl_id = H5P_copy_plist(dcpl_plist, TRUE)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to copy the creation property list")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to copy the creation property list");
if (NULL == (new_plist = (H5P_genplist_t *)H5I_object(new_dcpl_id)))
- HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "can't get property list")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "can't get property list");
/* Retrieve any object creation properties */
if (H5O_get_create_plist(&dset->oloc, new_plist) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get object creation info")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get object creation info");
/* Get the layout property */
if (H5P_peek(new_plist, H5D_CRT_LAYOUT_NAME, &copied_layout) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get layout")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get layout");
/* Reset layout values set when dataset is created */
copied_layout.ops = NULL;
@@ -3590,7 +3593,7 @@ H5D_get_create_plist(const H5D_t *dset)
/* Reset address and pointer of the array struct for the chunked storage index */
if (H5D_chunk_idx_reset(&copied_layout.storage.u.chunk, TRUE) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL,
- "unable to reset chunked storage index in dest")
+ "unable to reset chunked storage index in dest");
/* Reset chunk index ops */
copied_layout.storage.u.chunk.ops = NULL;
@@ -3609,11 +3612,11 @@ H5D_get_create_plist(const H5D_t *dset)
/* Set back the (possibly modified) layout property to property list */
if (H5P_poke(new_plist, H5D_CRT_LAYOUT_NAME, &copied_layout) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to set layout")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to set layout");
/* Get the fill value property */
if (H5P_peek(new_plist, H5D_CRT_FILL_VALUE_NAME, &copied_fill) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get fill value")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get fill value");
/* Check if there is a fill value, but no type yet */
if (copied_fill.buf != NULL && copied_fill.type == NULL) {
@@ -3621,12 +3624,12 @@ H5D_get_create_plist(const H5D_t *dset)
/* Copy the dataset type into the fill value message */
if (NULL == (copied_fill.type = H5T_copy(dset->shared->type, H5T_COPY_TRANSIENT)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to copy dataset datatype for fill value")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to copy dataset datatype for fill value");
/* Set up type conversion function */
if (NULL == (tpath = H5T_path_find(dset->shared->type, copied_fill.type)))
HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL,
- "unable to convert between src and dest data types")
+ "unable to convert between src and dest data types");
/* Convert disk form of fill value into memory form */
if (!H5T_path_noop(tpath)) {
@@ -3637,11 +3640,11 @@ H5D_get_create_plist(const H5D_t *dset)
/* Wrap copies of types to convert */
dst_id = H5I_register(H5I_DATATYPE, H5T_copy(copied_fill.type, H5T_COPY_TRANSIENT), FALSE);
if (dst_id < 0)
- HGOTO_ERROR(H5E_DATATYPE, H5E_CANTINIT, FAIL, "unable to copy/register datatype")
+ HGOTO_ERROR(H5E_DATATYPE, H5E_CANTINIT, FAIL, "unable to copy/register datatype");
src_id = H5I_register(H5I_DATATYPE, H5T_copy(dset->shared->type, H5T_COPY_ALL), FALSE);
if (src_id < 0) {
H5I_dec_ref(dst_id);
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to copy/register datatype")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to copy/register datatype");
} /* end if */
/* Allocate a background buffer */
@@ -3649,7 +3652,7 @@ H5D_get_create_plist(const H5D_t *dset)
if (H5T_path_bkg(tpath) && NULL == (bkg_buf = H5FL_BLK_CALLOC(type_conv, bkg_size))) {
H5I_dec_ref(src_id);
H5I_dec_ref(dst_id);
- HGOTO_ERROR(H5E_DATASET, H5E_CANTALLOC, FAIL, "memory allocation failed")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTALLOC, FAIL, "memory allocation failed");
} /* end if */
/* Convert fill value */
@@ -3659,14 +3662,14 @@ H5D_get_create_plist(const H5D_t *dset)
H5I_dec_ref(dst_id);
if (bkg_buf)
bkg_buf = H5FL_BLK_FREE(type_conv, bkg_buf);
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCONVERT, FAIL, "datatype conversion failed")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTCONVERT, FAIL, "datatype conversion failed");
} /* end if */
/* Release local resources */
if (H5I_dec_ref(src_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTDEC, FAIL, "unable to close temporary object")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTDEC, FAIL, "unable to close temporary object");
if (H5I_dec_ref(dst_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTDEC, FAIL, "unable to close temporary object")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTDEC, FAIL, "unable to close temporary object");
if (bkg_buf)
bkg_buf = H5FL_BLK_FREE(type_conv, bkg_buf);
} /* end if */
@@ -3674,11 +3677,11 @@ H5D_get_create_plist(const H5D_t *dset)
/* Set back the (possibly modified) fill value property to property list */
if (H5P_poke(new_plist, H5D_CRT_FILL_VALUE_NAME, &copied_fill) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to set fill value")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to set fill value");
/* Get the fill value property */
if (H5P_peek(new_plist, H5D_CRT_EXT_FILE_LIST_NAME, &copied_efl) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get external file list")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get external file list");
/* Reset efl name_offset and heap_addr, these are the values when the dataset is created */
if (copied_efl.slot) {
@@ -3691,7 +3694,7 @@ H5D_get_create_plist(const H5D_t *dset)
/* Set back the (possibly modified) external file list property to property list */
if (H5P_poke(new_plist, H5D_CRT_EXT_FILE_LIST_NAME, &copied_efl) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to set external file list")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to set external file list");
/* Set the return value */
ret_value = new_dcpl_id;
@@ -3736,87 +3739,87 @@ H5D_get_access_plist(const H5D_t *dset)
/* Make a copy of the dataset's dataset access property list */
if (NULL == (old_plist = (H5P_genplist_t *)H5I_object(dset->shared->dapl_id)))
- HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "can't get property list")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "can't get property list");
if ((new_dapl_id = H5P_copy_plist(old_plist, TRUE)) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTINIT, FAIL, "can't copy dataset access property list")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTINIT, FAIL, "can't copy dataset access property list");
if (NULL == (new_plist = (H5P_genplist_t *)H5I_object(new_dapl_id)))
- HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a property list")
+ HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a property list");
/* If the dataset is chunked then copy the rdcc & append flush parameters.
* Otherwise, use the default values. */
if (dset->shared->layout.type == H5D_CHUNKED) {
if (H5P_set(new_plist, H5D_ACS_DATA_CACHE_NUM_SLOTS_NAME, &(dset->shared->cache.chunk.nslots)) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set data cache number of slots")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set data cache number of slots");
if (H5P_set(new_plist, H5D_ACS_DATA_CACHE_BYTE_SIZE_NAME, &(dset->shared->cache.chunk.nbytes_max)) <
0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set data cache byte size")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set data cache byte size");
if (H5P_set(new_plist, H5D_ACS_PREEMPT_READ_CHUNKS_NAME, &(dset->shared->cache.chunk.w0)) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set preempt read chunks")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set preempt read chunks");
if (H5P_set(new_plist, H5D_ACS_APPEND_FLUSH_NAME, &dset->shared->append_flush) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set append flush property")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set append flush property");
}
else {
/* Get the default FAPL */
if (NULL == (def_dapl = (H5P_genplist_t *)H5I_object(H5P_LST_DATASET_ACCESS_ID_g)))
- HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "not a property list")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "not a property list");
/* Set the data cache number of slots to the value of the default FAPL */
if (H5P_get(def_dapl, H5D_ACS_DATA_CACHE_NUM_SLOTS_NAME, &def_chunk_info.nslots) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get data number of slots");
if (H5P_set(new_plist, H5D_ACS_DATA_CACHE_NUM_SLOTS_NAME, &def_chunk_info.nslots) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set data cache number of slots")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set data cache number of slots");
/* Set the data cache byte size to the value of the default FAPL */
if (H5P_get(def_dapl, H5D_ACS_DATA_CACHE_BYTE_SIZE_NAME, &def_chunk_info.nbytes_max) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get data cache byte size");
if (H5P_set(new_plist, H5D_ACS_DATA_CACHE_BYTE_SIZE_NAME, &def_chunk_info.nbytes_max) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set data cache byte size")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set data cache byte size");
/* Set the preempt read chunks property to the value of the default FAPL */
if (H5P_get(def_dapl, H5D_ACS_PREEMPT_READ_CHUNKS_NAME, &def_chunk_info.w0) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get preempt read chunks");
if (H5P_set(new_plist, H5D_ACS_PREEMPT_READ_CHUNKS_NAME, &def_chunk_info.w0) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set preempt read chunks")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set preempt read chunks");
/* Set the append flush property to its default value */
if (H5P_set(new_plist, H5D_ACS_APPEND_FLUSH_NAME, &def_append_flush_info) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set append flush property")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set append flush property");
} /* end if-else */
/* If the dataset is virtual then copy the VDS view & printf gap options.
* Otherwise, use the default values. */
if (dset->shared->layout.type == H5D_VIRTUAL) {
if (H5P_set(new_plist, H5D_ACS_VDS_VIEW_NAME, &(dset->shared->layout.storage.u.virt.view)) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set VDS view")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set VDS view");
if (H5P_set(new_plist, H5D_ACS_VDS_PRINTF_GAP_NAME,
&(dset->shared->layout.storage.u.virt.printf_gap)) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set VDS printf gap")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set VDS printf gap");
}
else {
/* Get the default FAPL if necessary */
if (!def_dapl && NULL == (def_dapl = (H5P_genplist_t *)H5I_object(H5P_LST_DATASET_ACCESS_ID_g)))
- HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "not a property list")
+ HGOTO_ERROR(H5E_DATASET, H5E_BADTYPE, FAIL, "not a property list");
/* Set the data cache number of slots to the value of the default FAPL */
if (H5P_get(def_dapl, H5D_ACS_VDS_VIEW_NAME, &def_vds_view) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get VDS view");
if (H5P_set(new_plist, H5D_ACS_VDS_VIEW_NAME, &def_vds_view) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set VDS view")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set VDS view");
/* Set the data cache byte size to the value of the default FAPL */
if (H5P_get(def_dapl, H5D_ACS_VDS_PRINTF_GAP_NAME, &def_vds_gap) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get VDS printf gap");
if (H5P_set(new_plist, H5D_ACS_VDS_PRINTF_GAP_NAME, &def_vds_gap) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set VDS printf gap")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set VDS printf gap");
}
/* Set the vds prefix option */
if (H5P_set(new_plist, H5D_ACS_VDS_PREFIX_NAME, &(dset->shared->vds_prefix)) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set vds prefix")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set vds prefix");
/* Set the external file prefix option */
if (H5P_set(new_plist, H5D_ACS_EFILE_PREFIX_NAME, &(dset->shared->extfile_prefix)) < 0)
- HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set external file prefix")
+ HGOTO_ERROR(H5E_PLIST, H5E_CANTSET, FAIL, "can't set external file prefix");
/* Set the return value */
ret_value = new_dapl_id;
@@ -3850,15 +3853,15 @@ H5D__get_space(const H5D_t *dset)
/* If the layout is virtual, update the extent */
if (dset->shared->layout.type == H5D_VIRTUAL)
if (H5D__virtual_set_extent_unlim(dset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update virtual dataset extent")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to update virtual dataset extent");
/* Read the dataspace message and return a dataspace object */
if (NULL == (space = H5S_copy(dset->shared->space, FALSE, TRUE)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to get dataspace")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to get dataspace");
/* Create an ID */
if ((ret_value = H5I_register(H5I_DATASPACE, space, TRUE)) < 0)
- HGOTO_ERROR(H5E_ID, H5E_CANTREGISTER, FAIL, "unable to register dataspace")
+ HGOTO_ERROR(H5E_ID, H5E_CANTREGISTER, FAIL, "unable to register dataspace");
done:
if (ret_value < 0)
@@ -3889,19 +3892,19 @@ H5D__get_type(const H5D_t *dset)
/* Patch the datatype's "top level" file pointer */
if (H5T_patch_file(dset->shared->type, dset->oloc.file) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to patch datatype's file pointer")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to patch datatype's file pointer");
/* Copy the dataset's datatype */
if (NULL == (dt = H5T_copy_reopen(dset->shared->type)))
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to copy datatype")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to copy datatype");
/* Mark any datatypes as being in memory now */
if (H5T_set_loc(dt, NULL, H5T_LOC_MEMORY) < 0)
- HGOTO_ERROR(H5E_DATATYPE, H5E_CANTINIT, FAIL, "invalid datatype location")
+ HGOTO_ERROR(H5E_DATATYPE, H5E_CANTINIT, FAIL, "invalid datatype location");
/* Lock copied type */
if (H5T_lock(dt, FALSE) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to lock transient datatype")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to lock transient datatype");
/* Create an ID */
if (H5T_is_named(dt)) {
@@ -3910,10 +3913,10 @@ H5D__get_type(const H5D_t *dset)
* returned datatype.
*/
if ((ret_value = H5VL_wrap_register(H5I_DATATYPE, dt, TRUE)) < 0)
- HGOTO_ERROR(H5E_ID, H5E_CANTREGISTER, FAIL, "unable to register datatype")
+ HGOTO_ERROR(H5E_ID, H5E_CANTREGISTER, FAIL, "unable to register datatype");
} /* end if */
else if ((ret_value = H5I_register(H5I_DATATYPE, dt, TRUE)) < 0)
- HGOTO_ERROR(H5E_ID, H5E_CANTREGISTER, FAIL, "unable to register datatype")
+ HGOTO_ERROR(H5E_ID, H5E_CANTREGISTER, FAIL, "unable to register datatype");
done:
if (ret_value < 0)
@@ -3948,17 +3951,17 @@ H5D__refresh(H5D_t *dset, hid_t dset_id)
if (dset->shared->layout.type == H5D_VIRTUAL) {
/* Hold open the source datasets' files */
if (H5D__virtual_hold_source_dset_files(dset, &head) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINC, FAIL, "unable to hold VDS source files open")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINC, FAIL, "unable to hold VDS source files open");
virt_dsets_held = TRUE;
/* Refresh source datasets for virtual dataset */
if (H5D__virtual_refresh_source_dsets(dset) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTFLUSH, FAIL, "unable to refresh VDS source datasets")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTFLUSH, FAIL, "unable to refresh VDS source datasets");
} /* end if */
/* Refresh dataset object */
if ((H5O_refresh_metadata(&dset->oloc, dset_id)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTFLUSH, FAIL, "unable to refresh dataset")
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTFLUSH, FAIL, "unable to refresh dataset");
done:
/* Release hold on (source) virtual datasets' files */