summaryrefslogtreecommitdiffstats
path: root/src/H5Ddeprec.c
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2009-07-02 23:38:38 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2009-07-02 23:38:38 (GMT)
commit856a80b949ba7f980487a7a3185afbfd7c77a724 (patch)
tree4d5b144078985ac6a7ea7e483b0f5c19e4f665d4 /src/H5Ddeprec.c
parentb0df711c3c1a31d8ddfc6600308b42ee81c8a27f (diff)
downloadhdf5-856a80b949ba7f980487a7a3185afbfd7c77a724.zip
hdf5-856a80b949ba7f980487a7a3185afbfd7c77a724.tar.gz
hdf5-856a80b949ba7f980487a7a3185afbfd7c77a724.tar.bz2
[svn-r17150] Description:
Bring r17148:17149 from trunk into 1.8 branch: Refactor how chunked dataset information is computed, moving it earlier and avoiding more recomputation. Tested on: FreeBSD/32 6.3 (duty) in debug mode FreeBSD/64 6.3 (liberty) w/C++ & FORTRAN, in debug mode Linux/32 2.6 (jam) w/PGI compilers, w/C++ & FORTRAN, w/threadsafe, in debug mode Linux/64-amd64 2.6 (smirom) w/Intel compilers w/default API=1.6.x, w/C++ & FORTRAN, in production mode Solaris/32 2.10 (linew) w/deprecated symbols disabled, w/C++ & FORTRAN, w/szip filter, in production mode Linux/64-ia64 2.6 (cobalt) w/Intel compilers, w/C++ & FORTRAN, in production mode Linux/64-ia64 2.4 (tg-login3) w/parallel, w/FORTRAN, in debug mode Linux/64-amd64 2.6 (abe) w/parallel, w/FORTRAN, in production mode Mac OS X/32 10.5.7 (amazon) in debug mode Mac OS X/32 10.5.7 (amazon) w/C++ & FORTRAN, w/threadsafe, in production mode
Diffstat (limited to 'src/H5Ddeprec.c')
-rw-r--r--src/H5Ddeprec.c5
1 files changed, 4 insertions, 1 deletions
diff --git a/src/H5Ddeprec.c b/src/H5Ddeprec.c
index 5b0621e..637cb70 100644
--- a/src/H5Ddeprec.c
+++ b/src/H5Ddeprec.c
@@ -346,9 +346,12 @@ H5D_extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id)
/* Updated the dataset's info if the dataspace was successfully extended */
if(changed) {
/* Update the index values for the cached chunks for this dataset */
- if(H5D_CHUNKED == dataset->shared->layout.type)
+ if(H5D_CHUNKED == dataset->shared->layout.type) {
+ if(H5D_chunk_set_info(dataset) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to update # of chunks")
if(H5D_chunk_update_cache(dataset, dxpl_id) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update cached chunk indices")
+ } /* end if */
/* Allocate space for the new parts of the dataset, if appropriate */
fill = &dataset->shared->dcpl_cache.fill;