summaryrefslogtreecommitdiffstats
path: root/src/H5Ddeprec.c
diff options
context:
space:
mode:
authorNeil Fortner <nfortne2@hdfgroup.org>2010-03-24 16:41:14 (GMT)
committerNeil Fortner <nfortne2@hdfgroup.org>2010-03-24 16:41:14 (GMT)
commitd3d4d8782bbed2af7d1593759e3665c459c0ab1c (patch)
treef3f3f449f62cc3ea2462112bbd38a5b1057c1d67 /src/H5Ddeprec.c
parent2ee007117626d307a248b6b02f5ddb71766cb35f (diff)
downloadhdf5-d3d4d8782bbed2af7d1593759e3665c459c0ab1c.zip
hdf5-d3d4d8782bbed2af7d1593759e3665c459c0ab1c.tar.gz
hdf5-d3d4d8782bbed2af7d1593759e3665c459c0ab1c.tar.bz2
[svn-r18445] Purpose: Fix bug 1637
Description: Modified algorithm for extending a dataset with early allocation so it only deals with the new chunks. Formerly, it would loop over all chunks, checking to see if each existed in cache and on disk, causing major performance issues with large numbers of chunks. Tested: jam, linew, amani (h5committest)
Diffstat (limited to 'src/H5Ddeprec.c')
-rw-r--r--src/H5Ddeprec.c11
1 files changed, 9 insertions, 2 deletions
diff --git a/src/H5Ddeprec.c b/src/H5Ddeprec.c
index de7f48a..e208910 100644
--- a/src/H5Ddeprec.c
+++ b/src/H5Ddeprec.c
@@ -320,6 +320,8 @@ H5D_extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id)
{
htri_t changed; /* Flag to indicate that the dataspace was successfully extended */
H5S_t *space; /* Dataset's dataspace */
+ int rank; /* Dataspace # of dimensions */
+ hsize_t curr_dims[H5O_LAYOUT_NDIMS];/* Current dimension sizes */
H5O_fill_t *fill; /* Dataset's fill value */
herr_t ret_value = SUCCEED; /* Return value */
@@ -339,8 +341,12 @@ H5D_extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id)
* able to muck things up.
*/
- /* Increase the size of the data space */
+ /* Retrieve the current dimensions */
space = dataset->shared->space;
+ if((rank = H5S_get_simple_extent_dims(space, curr_dims, NULL)) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions")
+
+ /* Increase the size of the data space */
if((changed = H5S_extend(space, size)) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to increase size of data space")
@@ -357,7 +363,8 @@ H5D_extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id)
/* Allocate space for the new parts of the dataset, if appropriate */
fill = &dataset->shared->dcpl_cache.fill;
if(fill->alloc_time == H5D_ALLOC_TIME_EARLY)
- if(H5D_alloc_storage(dataset, dxpl_id, H5D_ALLOC_EXTEND, FALSE) < 0)
+ if(H5D_alloc_storage(dataset, dxpl_id, H5D_ALLOC_EXTEND, FALSE,
+ curr_dims) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize dataset with fill value")
/* Mark the dataspace as dirty, for later writing to the file */