summaryrefslogtreecommitdiffstats
path: root/src/H5Dcontig.c
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2015-04-18 06:39:34 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2015-04-18 06:39:34 (GMT)
commit1eaaae98214d9042e979209e93e31c490efa4d79 (patch)
tree45b2258e7edc6ef8040b6e5339cf2e313d883b8f /src/H5Dcontig.c
parent524bfed32ef710dd28a3ff400965f01893a3f66b (diff)
downloadhdf5-1eaaae98214d9042e979209e93e31c490efa4d79.zip
hdf5-1eaaae98214d9042e979209e93e31c490efa4d79.tar.gz
hdf5-1eaaae98214d9042e979209e93e31c490efa4d79.tar.bz2
[svn-r26842] Description:
Cache the dataset's rank & dimension sizes, instead of querying them frequently, to speed up various checks & algorithms. Also, a few minor cleanups. Tested on: MacOSX/64 10.10.2 (amazon) w/serial & parallel Linux/32 2.6.18 (jam) w/serial & parallel
Diffstat (limited to 'src/H5Dcontig.c')
-rw-r--r--src/H5Dcontig.c13
1 files changed, 4 insertions, 9 deletions
diff --git a/src/H5Dcontig.c b/src/H5Dcontig.c
index dc09768..e913a3f 100644
--- a/src/H5Dcontig.c
+++ b/src/H5Dcontig.c
@@ -396,10 +396,7 @@ H5D__contig_construct(H5F_t *f, H5D_t *dset)
size_t dt_size; /* Size of datatype */
hsize_t tmp_size; /* Temporary holder for raw data size */
size_t tmp_sieve_buf_size; /* Temporary holder for sieve buffer size */
- hsize_t dim[H5O_LAYOUT_NDIMS]; /* Current size of data in elements */
- hsize_t max_dim[H5O_LAYOUT_NDIMS]; /* Maximum size of data in elements */
- int ndims; /* Rank of dataspace */
- int i; /* Local index variable */
+ unsigned u; /* Local index variable */
herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_STATIC
@@ -415,11 +412,9 @@ H5D__contig_construct(H5F_t *f, H5D_t *dset)
*/
/* Check for invalid dataset dimensions */
- if((ndims = H5S_get_simple_extent_dims(dset->shared->space, dim, max_dim)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize contiguous storage")
- for(i = 0; i < ndims; i++)
- if(max_dim[i] > dim[i])
- HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "extendible contiguous non-external dataset")
+ for(u = 0; u < dset->shared->ndims; u++)
+ if(dset->shared->max_dims[u] > dset->shared->curr_dims[u])
+ HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "extendible contiguous non-external dataset not allowed")
/* Retrieve the number of elements in the dataspace */
if((snelmts = H5S_GET_EXTENT_NPOINTS(dset->shared->space)) < 0)