summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBinh-Minh Ribler <bmribler@hdfgroup.org>2018-07-16 19:52:43 (GMT)
committerBinh-Minh Ribler <bmribler@hdfgroup.org>2018-07-16 19:52:43 (GMT)
commit55666ace551369b3d72430c090ab9f1571e1be75 (patch)
tree17569dc0d6a7d90f0ed16e5e817718c5551f0299
parent2725634ed99d330d38f449ac92203913f1fd2c55 (diff)
parent83ca39ba9a2d6852dd7754b533f7839e9d2a0107 (diff)
downloadhdf5-55666ace551369b3d72430c090ab9f1571e1be75.zip
hdf5-55666ace551369b3d72430c090ab9f1571e1be75.tar.gz
hdf5-55666ace551369b3d72430c090ab9f1571e1be75.tar.bz2
Merge pull request #1134 in HDFFV/hdf5 from ~BMRIBLER/hdf5_bmr_cpp4:develop to develop
Fixed HDFFV-10481 and HDFFV-10477 * commit '83ca39ba9a2d6852dd7754b533f7839e9d2a0107': Combined macro lines as Dana commented Platforms tested: Linux/64 (jelly) (very minor) Fixed division-by-zero issues Description: Fixed HDFFV-10481 and HDFFV-10477, division by 0. Fixed another occurrence beside what were reported. Also, changed a local variable to avoid an unnecessary cast. Platforms tested: Linux/64 (jelly) Linux/32 (jam) Darwin (osx1010test)
-rw-r--r--src/H5Dbtree.c22
-rw-r--r--src/H5Dchunk.c2
-rw-r--r--src/H5Dint.c36
3 files changed, 36 insertions, 24 deletions
diff --git a/src/H5Dbtree.c b/src/H5Dbtree.c
index c23f089..8abfe27 100644
--- a/src/H5Dbtree.c
+++ b/src/H5Dbtree.c
@@ -666,12 +666,13 @@ done:
static herr_t
H5D__btree_decode_key(const H5B_shared_t *shared, const uint8_t *raw, void *_key)
{
- const H5O_layout_chunk_t *layout; /* Chunk layout description */
- H5D_btree_key_t *key = (H5D_btree_key_t *) _key; /* Pointer to decoded key */
- hsize_t tmp_offset; /* Temporary coordinate offset, from file */
- unsigned u; /* Local index variable */
+ const H5O_layout_chunk_t *layout; /* Chunk layout description */
+ H5D_btree_key_t *key = (H5D_btree_key_t *) _key; /* Pointer to decoded key */
+ hsize_t tmp_offset; /* Temporary coordinate offset, from file */
+ unsigned u; /* Local index variable */
+ herr_t ret_value = SUCCEED; /* Return value */
- FUNC_ENTER_STATIC_NOERR
+ FUNC_ENTER_STATIC
/* check args */
HDassert(shared);
@@ -684,16 +685,21 @@ H5D__btree_decode_key(const H5B_shared_t *shared, const uint8_t *raw, void *_key
/* decode */
UINT32DECODE(raw, key->nbytes);
UINT32DECODE(raw, key->filter_mask);
- for(u = 0; u < layout->ndims; u++) {
+ for(u = 0; u < layout->ndims; u++)
+ {
+ if (layout->dim[u] == 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "chunk size must be > 0, dim = %u ", u)
+
/* Retrieve coordinate offset */
- UINT64DECODE(raw, tmp_offset);
+ UINT64DECODE(raw, tmp_offset);
HDassert(0 == (tmp_offset % layout->dim[u]));
/* Convert to a scaled offset */
key->scaled[u] = tmp_offset / layout->dim[u];
} /* end for */
- FUNC_LEAVE_NOAPI(SUCCEED)
+done:
+ FUNC_LEAVE_NOAPI(ret_value)
} /* end H5D__btree_decode_key() */
diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c
index e64a60f..4906f3c 100644
--- a/src/H5Dchunk.c
+++ b/src/H5Dchunk.c
@@ -947,6 +947,8 @@ H5D__chunk_init(H5F_t *f, const H5D_t *dset, hid_t dapl_id)
hsize_t scaled_power2up; /* Scaled value, rounded to next power of 2 */
/* Initial scaled dimension sizes */
+ if(dset->shared->layout.u.chunk.dim[u] == 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "chunk size must be > 0, dim = %u ", u)
rdcc->scaled_dims[u] = dset->shared->curr_dims[u] / dset->shared->layout.u.chunk.dim[u];
if( !(scaled_power2up = H5VM_power2up(rdcc->scaled_dims[u])) )
diff --git a/src/H5Dint.c b/src/H5Dint.c
index 2f67226..e8874a2 100644
--- a/src/H5Dint.c
+++ b/src/H5Dint.c
@@ -2580,10 +2580,11 @@ done:
herr_t
H5D__set_extent(H5D_t *dset, const hsize_t *size)
{
- hsize_t curr_dims[H5S_MAX_RANK]; /* Current dimension sizes */
- htri_t changed; /* Whether the dataspace changed size */
- size_t u, v; /* Local index variable */
- herr_t ret_value = SUCCEED; /* Return value */
+ hsize_t curr_dims[H5S_MAX_RANK]; /* Current dimension sizes */
+ htri_t changed; /* Whether the dataspace changed size */
+ size_t u, v; /* Local index variable */
+ unsigned dim_idx; /* Dimension index */
+ herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_PACKAGE_VOL_TAG(dset->oloc.addr)
@@ -2620,11 +2621,11 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size)
hbool_t update_chunks = FALSE; /* Flag to indicate chunk cache update is needed */
/* Determine if we are shrinking and/or expanding any dimensions */
- for(u = 0; u < (size_t)dset->shared->ndims; u++) {
+ for(dim_idx = 0; dim_idx < dset->shared->ndims; dim_idx++) {
/* Check for various status changes */
- if(size[u] < curr_dims[u])
+ if(size[dim_idx] < curr_dims[dim_idx])
shrink = TRUE;
- if(size[u] > curr_dims[u])
+ if(size[dim_idx] > curr_dims[dim_idx])
expand = TRUE;
/* Chunked storage specific checks */
@@ -2632,30 +2633,33 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size)
hsize_t scaled; /* Scaled value */
/* Compute the scaled dimension size value */
- scaled = size[u] / dset->shared->layout.u.chunk.dim[u];
+ if(dset->shared->layout.u.chunk.dim[dim_idx] == 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "chunk size must be > 0, dim = %u ", dim_idx)
+
+ scaled = size[dim_idx] / dset->shared->layout.u.chunk.dim[dim_idx];
/* Check if scaled dimension size changed */
- if(scaled != dset->shared->cache.chunk.scaled_dims[u]) {
+ if(scaled != dset->shared->cache.chunk.scaled_dims[dim_idx]) {
hsize_t scaled_power2up; /* Scaled value, rounded to next power of 2 */
/* Update the scaled dimension size value for the current dimension */
- dset->shared->cache.chunk.scaled_dims[u] = scaled;
+ dset->shared->cache.chunk.scaled_dims[dim_idx] = scaled;
/* Check if algorithm for computing hash values will change */
if((scaled > dset->shared->cache.chunk.nslots &&
- dset->shared->cache.chunk.scaled_dims[u] <= dset->shared->cache.chunk.nslots)
+ dset->shared->cache.chunk.scaled_dims[dim_idx] <= dset->shared->cache.chunk.nslots)
|| (scaled <= dset->shared->cache.chunk.nslots &&
- dset->shared->cache.chunk.scaled_dims[u] > dset->shared->cache.chunk.nslots))
+ dset->shared->cache.chunk.scaled_dims[dim_idx] > dset->shared->cache.chunk.nslots))
update_chunks = TRUE;
if(!(scaled_power2up = H5VM_power2up(scaled)))
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to get the next power of 2")
/* Check if the number of bits required to encode the scaled size value changed */
- if(dset->shared->cache.chunk.scaled_power2up[u] != scaled_power2up) {
+ if(dset->shared->cache.chunk.scaled_power2up[dim_idx] != scaled_power2up) {
/* Update the 'power2up' & 'encode_bits' values for the current dimension */
- dset->shared->cache.chunk.scaled_power2up[u] = scaled_power2up;
- dset->shared->cache.chunk.scaled_encode_bits[u] = H5VM_log2_gen(scaled_power2up);
+ dset->shared->cache.chunk.scaled_power2up[dim_idx] = scaled_power2up;
+ dset->shared->cache.chunk.scaled_encode_bits[dim_idx] = H5VM_log2_gen(scaled_power2up);
/* Indicate that the cached chunk indices need to be updated */
update_chunks = TRUE;
@@ -2664,7 +2668,7 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size)
} /* end if */
/* Update the cached copy of the dataset's dimensions */
- dset->shared->curr_dims[u] = size[u];
+ dset->shared->curr_dims[dim_idx] = size[dim_idx];
} /* end for */
/*-------------------------------------------------------------------------