summaryrefslogtreecommitdiffstats
path: root/src/H5Defl.c
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2015-04-18 06:39:34 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2015-04-18 06:39:34 (GMT)
commit1eaaae98214d9042e979209e93e31c490efa4d79 (patch)
tree45b2258e7edc6ef8040b6e5339cf2e313d883b8f /src/H5Defl.c
parent524bfed32ef710dd28a3ff400965f01893a3f66b (diff)
downloadhdf5-1eaaae98214d9042e979209e93e31c490efa4d79.zip
hdf5-1eaaae98214d9042e979209e93e31c490efa4d79.tar.gz
hdf5-1eaaae98214d9042e979209e93e31c490efa4d79.tar.bz2
[svn-r26842] Description:
Cache the dataset's rank & dimension sizes, instead of querying them frequently, to speed up various checks & algorithms. Also, a few minor cleanups. Tested on: MacOSX/64 10.10.2 (amazon) w/serial & parallel Linux/32 2.6.18 (jam) w/serial & parallel
Diffstat (limited to 'src/H5Defl.c')
-rw-r--r--src/H5Defl.c13
1 files changed, 4 insertions, 9 deletions
diff --git a/src/H5Defl.c b/src/H5Defl.c
index 38c8ccd..355492f 100644
--- a/src/H5Defl.c
+++ b/src/H5Defl.c
@@ -126,14 +126,11 @@ static herr_t
H5D__efl_construct(H5F_t *f, H5D_t *dset)
{
size_t dt_size; /* Size of datatype */
- hsize_t dim[H5O_LAYOUT_NDIMS]; /* Current size of data in elements */
- hsize_t max_dim[H5O_LAYOUT_NDIMS]; /* Maximum size of data in elements */
hssize_t stmp_size; /* Temporary holder for raw data size */
hsize_t tmp_size; /* Temporary holder for raw data size */
hsize_t max_points; /* Maximum elements */
hsize_t max_storage; /* Maximum storage size */
- int ndims; /* Rank of dataspace */
- int i; /* Local index variable */
+ unsigned u; /* Local index variable */
herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_STATIC
@@ -149,11 +146,9 @@ H5D__efl_construct(H5F_t *f, H5D_t *dset)
*/
/* Check for invalid dataset dimensions */
- if((ndims = H5S_get_simple_extent_dims(dset->shared->space, dim, max_dim)) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize contiguous storage")
- for(i = 1; i < ndims; i++)
- if(max_dim[i] > dim[i])
- HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "only the first dimension can be extendible")
+ for(u = 1; u < dset->shared->ndims; u++)
+ if(dset->shared->max_dims[u] > dset->shared->curr_dims[u])
+ HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "only the first dimension can be extendible")
/* Retrieve the size of the dataset's datatype */
if(0 == (dt_size = H5T_get_size(dset->shared->type)))