diff options
author | Binh-Minh Ribler <bmribler@hdfgroup.org> | 2018-07-16 04:31:09 (GMT) |
---|---|---|
committer | Binh-Minh Ribler <bmribler@hdfgroup.org> | 2018-07-16 04:31:09 (GMT) |
commit | fa5d96dc5efc463015b4455e6ed2b96346b3b12f (patch) | |
tree | e53c588e8ca90a1cd508e87c6b4b5656b2b2f3fe /src | |
parent | ec31438afdaf575368938e930eb3af0865a342b3 (diff) | |
download | hdf5-fa5d96dc5efc463015b4455e6ed2b96346b3b12f.zip hdf5-fa5d96dc5efc463015b4455e6ed2b96346b3b12f.tar.gz hdf5-fa5d96dc5efc463015b4455e6ed2b96346b3b12f.tar.bz2 |
Fixed division-by-zero issues
Description:
Fixed HDFFV-10481 and HDFFV-10477, division by 0.
Fixed another occurrence beside what were reported.
Also, changed a local variable to avoid an unnecessary cast.
Platforms tested:
Linux/64 (jelly)
Linux/32 (jam)
Darwin (osx1010test)
Diffstat (limited to 'src')
-rw-r--r-- | src/H5Dbtree.c | 23 | ||||
-rw-r--r-- | src/H5Dchunk.c | 3 | ||||
-rw-r--r-- | src/H5Dint.c | 37 |
3 files changed, 39 insertions, 24 deletions
diff --git a/src/H5Dbtree.c b/src/H5Dbtree.c index c23f089..04f5419 100644 --- a/src/H5Dbtree.c +++ b/src/H5Dbtree.c @@ -666,12 +666,13 @@ done: static herr_t H5D__btree_decode_key(const H5B_shared_t *shared, const uint8_t *raw, void *_key) { - const H5O_layout_chunk_t *layout; /* Chunk layout description */ - H5D_btree_key_t *key = (H5D_btree_key_t *) _key; /* Pointer to decoded key */ - hsize_t tmp_offset; /* Temporary coordinate offset, from file */ - unsigned u; /* Local index variable */ + const H5O_layout_chunk_t *layout; /* Chunk layout description */ + H5D_btree_key_t *key = (H5D_btree_key_t *) _key; /* Pointer to decoded key */ + hsize_t tmp_offset; /* Temporary coordinate offset, from file */ + unsigned u; /* Local index variable */ + herr_t ret_value = SUCCEED; /* Return value */ - FUNC_ENTER_STATIC_NOERR + FUNC_ENTER_STATIC /* check args */ HDassert(shared); @@ -684,16 +685,22 @@ H5D__btree_decode_key(const H5B_shared_t *shared, const uint8_t *raw, void *_key /* decode */ UINT32DECODE(raw, key->nbytes); UINT32DECODE(raw, key->filter_mask); - for(u = 0; u < layout->ndims; u++) { + for(u = 0; u < layout->ndims; u++) + { + if (layout->dim[u] == 0) + HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, + "chunk size must be > 0, dim = %u ", u) + /* Retrieve coordinate offset */ - UINT64DECODE(raw, tmp_offset); + UINT64DECODE(raw, tmp_offset); HDassert(0 == (tmp_offset % layout->dim[u])); /* Convert to a scaled offset */ key->scaled[u] = tmp_offset / layout->dim[u]; } /* end for */ - FUNC_LEAVE_NOAPI(SUCCEED) +done: + FUNC_LEAVE_NOAPI(ret_value) } /* end H5D__btree_decode_key() */ diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c index e3f6410..7b8377d 100644 --- a/src/H5Dchunk.c +++ b/src/H5Dchunk.c @@ -947,6 +947,9 @@ H5D__chunk_init(H5F_t *f, const H5D_t *dset, hid_t dapl_id) hsize_t scaled_power2up; /* Scaled value, rounded to next power of 2 */ /* Initial scaled dimension sizes */ + if(dset->shared->layout.u.chunk.dim[u] == 0) + HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, + "chunk size must be > 0, dim = %u ", u) rdcc->scaled_dims[u] = dset->shared->curr_dims[u] / dset->shared->layout.u.chunk.dim[u]; if( !(scaled_power2up = H5VM_power2up(rdcc->scaled_dims[u])) ) diff --git a/src/H5Dint.c b/src/H5Dint.c index 2f67226..0c7fee0 100644 --- a/src/H5Dint.c +++ b/src/H5Dint.c @@ -2580,10 +2580,11 @@ done: herr_t H5D__set_extent(H5D_t *dset, const hsize_t *size) { - hsize_t curr_dims[H5S_MAX_RANK]; /* Current dimension sizes */ - htri_t changed; /* Whether the dataspace changed size */ - size_t u, v; /* Local index variable */ - herr_t ret_value = SUCCEED; /* Return value */ + hsize_t curr_dims[H5S_MAX_RANK]; /* Current dimension sizes */ + htri_t changed; /* Whether the dataspace changed size */ + size_t u, v; /* Local index variable */ + unsigned dim_idx; /* Dimension index */ + herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_PACKAGE_VOL_TAG(dset->oloc.addr) @@ -2620,11 +2621,11 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size) hbool_t update_chunks = FALSE; /* Flag to indicate chunk cache update is needed */ /* Determine if we are shrinking and/or expanding any dimensions */ - for(u = 0; u < (size_t)dset->shared->ndims; u++) { + for(dim_idx = 0; dim_idx < dset->shared->ndims; dim_idx++) { /* Check for various status changes */ - if(size[u] < curr_dims[u]) + if(size[dim_idx] < curr_dims[dim_idx]) shrink = TRUE; - if(size[u] > curr_dims[u]) + if(size[dim_idx] > curr_dims[dim_idx]) expand = TRUE; /* Chunked storage specific checks */ @@ -2632,30 +2633,34 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size) hsize_t scaled; /* Scaled value */ /* Compute the scaled dimension size value */ - scaled = size[u] / dset->shared->layout.u.chunk.dim[u]; + if(dset->shared->layout.u.chunk.dim[dim_idx] == 0) + HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, + "chunk size must be > 0, dim = %u ", dim_idx) + + scaled = size[dim_idx] / dset->shared->layout.u.chunk.dim[dim_idx]; /* Check if scaled dimension size changed */ - if(scaled != dset->shared->cache.chunk.scaled_dims[u]) { + if(scaled != dset->shared->cache.chunk.scaled_dims[dim_idx]) { hsize_t scaled_power2up; /* Scaled value, rounded to next power of 2 */ /* Update the scaled dimension size value for the current dimension */ - dset->shared->cache.chunk.scaled_dims[u] = scaled; + dset->shared->cache.chunk.scaled_dims[dim_idx] = scaled; /* Check if algorithm for computing hash values will change */ if((scaled > dset->shared->cache.chunk.nslots && - dset->shared->cache.chunk.scaled_dims[u] <= dset->shared->cache.chunk.nslots) + dset->shared->cache.chunk.scaled_dims[dim_idx] <= dset->shared->cache.chunk.nslots) || (scaled <= dset->shared->cache.chunk.nslots && - dset->shared->cache.chunk.scaled_dims[u] > dset->shared->cache.chunk.nslots)) + dset->shared->cache.chunk.scaled_dims[dim_idx] > dset->shared->cache.chunk.nslots)) update_chunks = TRUE; if(!(scaled_power2up = H5VM_power2up(scaled))) HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to get the next power of 2") /* Check if the number of bits required to encode the scaled size value changed */ - if(dset->shared->cache.chunk.scaled_power2up[u] != scaled_power2up) { + if(dset->shared->cache.chunk.scaled_power2up[dim_idx] != scaled_power2up) { /* Update the 'power2up' & 'encode_bits' values for the current dimension */ - dset->shared->cache.chunk.scaled_power2up[u] = scaled_power2up; - dset->shared->cache.chunk.scaled_encode_bits[u] = H5VM_log2_gen(scaled_power2up); + dset->shared->cache.chunk.scaled_power2up[dim_idx] = scaled_power2up; + dset->shared->cache.chunk.scaled_encode_bits[dim_idx] = H5VM_log2_gen(scaled_power2up); /* Indicate that the cached chunk indices need to be updated */ update_chunks = TRUE; @@ -2664,7 +2669,7 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size) } /* end if */ /* Update the cached copy of the dataset's dimensions */ - dset->shared->curr_dims[u] = size[u]; + dset->shared->curr_dims[dim_idx] = size[dim_idx]; } /* end for */ /*------------------------------------------------------------------------- |