summaryrefslogtreecommitdiffstats
path: root/src/H5V.c
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2008-05-16 03:27:13 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2008-05-16 03:27:13 (GMT)
commitc23c6b939229efde53e105008d29b8bf441f5d99 (patch)
treef38a5b13b938dcf88fa5d7c33b613adaa1f2396c /src/H5V.c
parent70cdaa12b2bceadd6a6f6d7b66eaad626e8523d6 (diff)
downloadhdf5-c23c6b939229efde53e105008d29b8bf441f5d99.zip
hdf5-c23c6b939229efde53e105008d29b8bf441f5d99.tar.gz
hdf5-c23c6b939229efde53e105008d29b8bf441f5d99.tar.bz2
[svn-r15016] Description:
Port revision 15015 back to 1.8 branch: > Detect chunks that are >4GB before dataset gets created and return error > to application. > > Tweak lots of internal variables that hold the chunk size/dimensions to > use an 'uint32_t', instead of a 'size_t', so that the integer size is constant. > > Correct a number of our tests which were creating datasets with chunks > that were >4GB and add some specific tests for >4GB chunk size detection. > > Minor whitespace & other code cleanups. Tested on: Mac OS X/32 10.5.2 (amazon) Forthcoming testing on other platforms...
Diffstat (limited to 'src/H5V.c')
-rw-r--r--src/H5V.c31
1 files changed, 16 insertions, 15 deletions
diff --git a/src/H5V.c b/src/H5V.c
index 3e06636..34cf714 100644
--- a/src/H5V.c
+++ b/src/H5V.c
@@ -399,33 +399,34 @@ done:
*/
htri_t
H5V_hyper_disjointp(unsigned n,
- const hsize_t *offset1, const size_t *size1,
- const hsize_t *offset2, const size_t *size2)
+ const hsize_t *offset1, const uint32_t *size1,
+ const hsize_t *offset2, const uint32_t *size2)
{
unsigned u;
- htri_t ret_value=FALSE; /* Return value */
+ htri_t ret_value = FALSE; /* Return value */
/* Use FUNC_ENTER_NOAPI_NOINIT_NOFUNC here to avoid performance issues */
FUNC_ENTER_NOAPI_NOINIT_NOFUNC(H5V_hyper_disjointp)
- if (!n || !size1 || !size2) HGOTO_DONE(TRUE)
+ if(!n || !size1 || !size2)
+ HGOTO_DONE(TRUE)
- for (u=0; u<n; u++) {
- assert (size1[u]<HSIZET_MAX);
- assert (size2[u]<HSIZET_MAX);
+ for(u = 0; u < n; u++) {
+ HDassert(size1[u] < HSIZET_MAX);
+ HDassert(size2[u] < HSIZET_MAX);
- if (0==size1[u] || 0==size2[u])
+ if(0 == size1[u] || 0 == size2[u])
HGOTO_DONE(TRUE)
- if (((offset1?offset1[u]:0) < (offset2?offset2[u]:0) &&
- ((offset1?offset1[u]:0) + size1[u] <= (offset2?offset2[u]:0))) ||
- ((offset2?offset2[u]:0) < (offset1?offset1[u]:0) &&
- ((offset2?offset2[u]:0) + size2[u] <= (offset1?offset1[u]:0))))
+ if(((offset1 ? offset1[u] : 0) < (offset2 ? offset2[u] : 0) &&
+ ((offset1 ? offset1[u] : 0) + size1[u] <= (offset2 ? offset2[u] : 0))) ||
+ ((offset2 ? offset2[u] : 0) < (offset1 ? offset1[u] : 0) &&
+ ((offset2 ? offset2[u] : 0) + size2[u] <= (offset1 ? offset1[u] : 0))))
HGOTO_DONE(TRUE)
- }
+ } /* end for */
done:
FUNC_LEAVE_NOAPI(ret_value)
-}
+} /* end H5V_hyper_disjointp() */
/*-------------------------------------------------------------------------
@@ -1225,7 +1226,7 @@ H5V_array_calc(hsize_t offset, unsigned n, const hsize_t *total_size, hsize_t *c
*-------------------------------------------------------------------------
*/
herr_t
-H5V_chunk_index(unsigned ndims, const hsize_t *coord, const size_t *chunk,
+H5V_chunk_index(unsigned ndims, const hsize_t *coord, const uint32_t *chunk,
const hsize_t *down_nchunks, hsize_t *chunk_idx)
{
hsize_t scaled_coord[H5V_HYPER_NDIMS]; /* Scaled, coordinates, in terms of chunks */