summaryrefslogtreecommitdiffstats
path: root/src/H5Dint.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/H5Dint.c')
-rw-r--r--src/H5Dint.c18
1 files changed, 10 insertions, 8 deletions
diff --git a/src/H5Dint.c b/src/H5Dint.c
index c063bb9..d662cec 100644
--- a/src/H5Dint.c
+++ b/src/H5Dint.c
@@ -97,6 +97,7 @@ static herr_t H5D__vlen_get_buf_size_cb(void *elem, hid_t type_id, unsigned ndim
const hsize_t *point, void *op_data);
static herr_t H5D__vlen_get_buf_size_gen_cb(void *elem, hid_t type_id, unsigned ndim,
const hsize_t *point, void *op_data);
+static herr_t H5D__check_filters(H5D_t *dataset);
/*********************/
@@ -671,7 +672,7 @@ H5D__cache_dataspace_info(const H5D_t *dset)
for(u = 0; u < dset->shared->ndims; u++) {
hsize_t scaled_power2up; /* Scaled value, rounded to next power of 2 */
- if( !(scaled_power2up = H5VM_power2up(dset->shared->curr_dims[u])) )
+ if(!(scaled_power2up = H5VM__power2up(dset->shared->curr_dims[u])))
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to get the next power of 2")
dset->shared->curr_power2up[u] = scaled_power2up;
}
@@ -908,7 +909,7 @@ H5D__prepare_minimized_oh(H5F_t *file, H5D_t *dset, H5O_loc_t *oloc)
HDassert(dset);
HDassert(oloc);
- oh = H5O__create_ohdr(file, dset->shared->dcpl_id);
+ oh = H5O_create_ohdr(file, dset->shared->dcpl_id);
if(NULL == oh)
HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, FAIL, "can't instantiate object header")
@@ -917,7 +918,7 @@ H5D__prepare_minimized_oh(H5F_t *file, H5D_t *dset, H5O_loc_t *oloc)
HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, FAIL, "computed header size is invalid")
/* Special allocation of space for compact datsets is handled by the call here. */
- if(H5O__apply_ohdr(file, oh, dset->shared->dcpl_id, ohdr_size, (size_t)1, oloc) == FAIL)
+ if(H5O_apply_ohdr(file, oh, dset->shared->dcpl_id, ohdr_size, (size_t)1, oloc) == FAIL)
HGOTO_ERROR(H5E_OHDR, H5E_BADVALUE, FAIL, "can't apply object header to file")
done:
@@ -1022,7 +1023,8 @@ H5D__update_oh_info(H5F_t *file, H5D_t *dset, hid_t dapl_id)
if(TRUE == use_minimized_header) {
if(H5D__prepare_minimized_oh(file, dset, oloc) == FAIL)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't create minimized dataset object header")
- } else {
+ } /* end if */
+ else {
/* Add the dataset's raw data size to the size of the header, if the
* raw data will be stored as compact
*/
@@ -2960,13 +2962,13 @@ done:
* Return: Non-negative on success/Negative on failure
*-------------------------------------------------------------------------
*/
-herr_t
+static herr_t
H5D__check_filters(H5D_t *dataset)
{
H5O_fill_t *fill; /* Dataset's fill value */
herr_t ret_value = SUCCEED; /* Return value */
- FUNC_ENTER_PACKAGE
+ FUNC_ENTER_STATIC
/* Check args */
HDassert(dataset);
@@ -3086,14 +3088,14 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size)
dset->shared->cache.chunk.scaled_dims[dim_idx] > dset->shared->cache.chunk.nslots))
update_chunks = TRUE;
- if(!(scaled_power2up = H5VM_power2up(scaled)))
+ if(!(scaled_power2up = H5VM__power2up(scaled)))
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to get the next power of 2")
/* Check if the number of bits required to encode the scaled size value changed */
if(dset->shared->cache.chunk.scaled_power2up[dim_idx] != scaled_power2up) {
/* Update the 'power2up' & 'encode_bits' values for the current dimension */
dset->shared->cache.chunk.scaled_power2up[dim_idx] = scaled_power2up;
- dset->shared->cache.chunk.scaled_encode_bits[dim_idx] = H5VM_log2_gen(scaled_power2up);
+ dset->shared->cache.chunk.scaled_encode_bits[dim_idx] = H5VM__log2_gen(scaled_power2up);
/* Indicate that the cached chunk indices need to be updated */
update_chunks = TRUE;