summaryrefslogtreecommitdiffstats
path: root/src/H5Dint.c
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2009-04-23 18:25:16 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2009-04-23 18:25:16 (GMT)
commitf098d20ab95c885414f82ee310b3692929e76227 (patch)
tree8f9eaca942902a988ccc12e983144d6b3d0b04c4 /src/H5Dint.c
parentb1f8b21130a4f95d2dcd4b9343ed05e93c33f35c (diff)
downloadhdf5-f098d20ab95c885414f82ee310b3692929e76227.zip
hdf5-f098d20ab95c885414f82ee310b3692929e76227.tar.gz
hdf5-f098d20ab95c885414f82ee310b3692929e76227.tar.bz2
[svn-r16847] Description:
Bring r16846 from revise_chunks branch back to trunk: Fix broken (for how long?) H5_ASSIGN_OVERFLOW macro to actually detect overflows during assignments, along with several errors it [now] detected. Cleaned up a fix minor warnings and/or pieces of code also. Tested on: FreeBSD/32 6.3 (duty) in debug mode (h5committest not needed - multi-platform test performed on branch)
Diffstat (limited to 'src/H5Dint.c')
-rw-r--r--src/H5Dint.c46
1 files changed, 35 insertions, 11 deletions
diff --git a/src/H5Dint.c b/src/H5Dint.c
index 0e71ef8..95be734 100644
--- a/src/H5Dint.c
+++ b/src/H5Dint.c
@@ -457,8 +457,9 @@ H5D_get_space_status(H5D_t *dset, H5D_space_status_t *allocation, hid_t dxpl_id)
{
H5S_t *space; /* Dataset's dataspace */
hsize_t space_allocated; /* The number of bytes allocated for chunks */
- hssize_t total_elem; /* The total number of elements in dataspace */
- size_t type_size; /* The size of the datatype for the dataset */
+ hssize_t snelmts; /* Temporary holder for number of elements in dataspace */
+ hsize_t nelmts; /* Number of elements in dataspace */
+ size_t dt_size; /* Size of datatype */
hsize_t full_size; /* The number of bytes in the dataset when fully populated */
herr_t ret_value = SUCCEED;
@@ -471,16 +472,20 @@ H5D_get_space_status(H5D_t *dset, H5D_space_status_t *allocation, hid_t dxpl_id)
HDassert(space);
/* Get the total number of elements in dataset's dataspace */
- if((total_elem=H5S_GET_EXTENT_NPOINTS(space)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCOUNT, FAIL, "unable to get # of dataspace elements")
+ if((snelmts = H5S_GET_EXTENT_NPOINTS(space)) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to retrieve number of elements in dataspace")
+ nelmts = (hsize_t)snelmts;
/* Get the size of the dataset's datatype */
- if((type_size = H5T_get_size(dset->shared->type)) == 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTCOUNT, FAIL, "unable to get size of datatype")
+ if(0 == (dt_size = H5T_GET_SIZE(dset->shared->type)))
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to retrieve size of datatype")
/* Compute the maximum size of the dataset in bytes */
- H5_CHECK_OVERFLOW(total_elem,hssize_t,hsize_t);
- full_size=((hsize_t)total_elem)*type_size;
+ full_size = nelmts * dt_size;
+
+ /* Check for overflow during multiplication */
+ if(nelmts != (full_size / dt_size))
+ HGOTO_ERROR(H5E_DATASET, H5E_OVERFLOW, FAIL, "size of dataset's storage overflowed")
/* Difficult to error check, since the error value is 0 and 0 is a valid value... :-/ */
space_allocated = H5D_get_storage_size(dset, dxpl_id);
@@ -1392,10 +1397,29 @@ H5D_open_oid(H5D_t *dataset, hid_t dapl_id, hid_t dxpl_id)
* truncate the dimension sizes to 32-bits of information. - QAK 5/26/04
*/
if(dataset->shared->layout.version < 3) {
- hssize_t tmp_size; /* Temporary holder for raw data size */
+ hssize_t snelmts; /* Temporary holder for number of elements in dataspace */
+ hsize_t nelmts; /* Number of elements in dataspace */
+ size_t dt_size; /* Size of datatype */
+ hsize_t tmp_size; /* Temporary holder for raw data size */
+
+ /* Retrieve the number of elements in the dataspace */
+ if((snelmts = H5S_GET_EXTENT_NPOINTS(dataset->shared->space)) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to retrieve number of elements in dataspace")
+ nelmts = (hsize_t)snelmts;
+
+ /* Get the datatype's size */
+ if(0 == (dt_size = H5T_GET_SIZE(dataset->shared->type)))
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to retrieve size of datatype")
+
+ /* Compute the size of the dataset's contiguous storage */
+ tmp_size = nelmts * dt_size;
+
+ /* Check for overflow during multiplication */
+ if(nelmts != (tmp_size / dt_size))
+ HGOTO_ERROR(H5E_DATASET, H5E_OVERFLOW, FAIL, "size of dataset's storage overflowed")
- tmp_size = H5S_GET_EXTENT_NPOINTS(dataset->shared->space) * H5T_get_size(dataset->shared->type);
- H5_ASSIGN_OVERFLOW(dataset->shared->layout.u.contig.size, tmp_size, hssize_t, hsize_t);
+ /* Assign the dataset's contiguous storage size */
+ dataset->shared->layout.u.contig.size = tmp_size;
} /* end if */
/* Get the sieve buffer size for this dataset */