summaryrefslogtreecommitdiffstats
path: root/src/H5Dint.c
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2015-05-13 21:30:24 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2015-05-13 21:30:24 (GMT)
commit7e231953a25fd4cc82f5b14b48b2c928cb3748e7 (patch)
treedd725497a7dff53acdab7120b76e628a7b533ffb /src/H5Dint.c
parent67ba6cb57d84dd34c69930d469ae445697b49731 (diff)
downloadhdf5-7e231953a25fd4cc82f5b14b48b2c928cb3748e7.zip
hdf5-7e231953a25fd4cc82f5b14b48b2c928cb3748e7.tar.gz
hdf5-7e231953a25fd4cc82f5b14b48b2c928cb3748e7.tar.bz2
[svn-r27058] Description:
Convert internal chunk structures to use 'scaled' coordinates instead of absolute coordinates. Tested on: Mac OSX/64 10.10.3 (amazon) w/parallel & serial Linux 2.6.x/32 (jam) w/parallel & serial Linux 2.6.x/64 (koala) w/serial
Diffstat (limited to 'src/H5Dint.c')
-rw-r--r--src/H5Dint.c42
1 files changed, 40 insertions, 2 deletions
diff --git a/src/H5Dint.c b/src/H5Dint.c
index 23824e3..2289ac1 100644
--- a/src/H5Dint.c
+++ b/src/H5Dint.c
@@ -2240,6 +2240,7 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size, hid_t dxpl_id)
if(changed) {
hbool_t shrink = FALSE; /* Flag to indicate a dimension has shrank */
hbool_t expand = FALSE; /* Flag to indicate a dimension has grown */
+ hbool_t update_chunks = FALSE; /* Flag to indicate chunk cache update is needed */
unsigned u; /* Local index variable */
/* Determine if we are shrinking and/or expanding any dimensions */
@@ -2250,6 +2251,39 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size, hid_t dxpl_id)
if(size[u] > curr_dims[u])
expand = TRUE;
+ /* Chunked storage specific checks */
+ if(H5D_CHUNKED == dset->shared->layout.type && dset->shared->ndims > 1) {
+ hsize_t scaled; /* Scaled value */
+
+ /* Compute the scaled dimension size value */
+ scaled = size[u] / dset->shared->layout.u.chunk.dim[u];
+
+ /* Check if scaled dimension size changed */
+ if(scaled != dset->shared->cache.chunk.scaled_dims[u]) {
+ hsize_t scaled_power2up; /* Scaled value, rounded to next power of 2 */
+
+ /* Update the scaled dimension size value for the current dimension */
+ dset->shared->cache.chunk.scaled_dims[u] = scaled;
+
+ /* Check if algorithm for computing hash values will change */
+ if((scaled > dset->shared->cache.chunk.nslots &&
+ dset->shared->cache.chunk.scaled_dims[u] <= dset->shared->cache.chunk.nslots)
+ || (scaled <= dset->shared->cache.chunk.nslots &&
+ dset->shared->cache.chunk.scaled_dims[u] > dset->shared->cache.chunk.nslots))
+ update_chunks = TRUE;
+
+ /* Check if the number of bits required to encode the scaled size value changed */
+ if(dset->shared->cache.chunk.scaled_power2up[u] != (scaled_power2up = H5VM_power2up(scaled))) {
+ /* Update the 'power2up' & 'encode_bits' values for the current dimension */
+ dset->shared->cache.chunk.scaled_power2up[u] = scaled_power2up;
+ dset->shared->cache.chunk.scaled_encode_bits[u] = H5VM_log2_gen(scaled_power2up);
+
+ /* Indicate that the cached chunk indices need to be updated */
+ update_chunks = TRUE;
+ } /* end if */
+ } /* end if */
+ } /* end if */
+
/* Update the cached copy of the dataset's dimensions */
dset->shared->curr_dims[u] = size[u];
} /* end for */
@@ -2263,8 +2297,12 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size, hid_t dxpl_id)
/* Set the cached chunk info */
if(H5D__chunk_set_info(dset) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to update # of chunks")
- if(H5D__chunk_update_cache(dset, dxpl_id) < 0)
- HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update cached chunk indices")
+
+ /* Check if updating the chunk cache indices is necessary */
+ if(update_chunks)
+ /* Update the chunk cache indices */
+ if(H5D__chunk_update_cache(dset, dxpl_id) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update cached chunk indices")
} /* end if */
/* Allocate space for the new parts of the dataset, if appropriate */