summaryrefslogtreecommitdiffstats
path: root/src/H5Ddeprec.c
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2007-10-11 22:24:35 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2007-10-11 22:24:35 (GMT)
commit637fa77ea7e720eb7b5a932ceeb0d05beeefb978 (patch)
tree38eb85b7a0489fb9730d86bd042bf402c47bf897 /src/H5Ddeprec.c
parente5413fa795c68dda46c11aee4a3615f52377b0df (diff)
downloadhdf5-637fa77ea7e720eb7b5a932ceeb0d05beeefb978.zip
hdf5-637fa77ea7e720eb7b5a932ceeb0d05beeefb978.tar.gz
hdf5-637fa77ea7e720eb7b5a932ceeb0d05beeefb978.tar.bz2
[svn-r14203] Description:
Break up H5D source file into H5D/H5Dint/H5Ddeprec Attempt fix for "szip noencoder" build failure. Tested on: FreeBSD/32 6.2 (duty) in debug mode FreeBSD/64 6.2 (liberty) w/C++ & FORTRAN, in debug mode Linux/32 2.6 (kagiso) w/PGI compilers, w/C++ & FORTRAN, w/threadsafe, in debug mode Linux/64-amd64 2.6 (smirom) w/default API=1.6.x, w/C++ & FORTRAN, in production mode Linux/64-ia64 2.6 (cobalt) w/Intel compilers, w/C++ & FORTRAN, in production mode Solaris/32 2.10 (linew) w/deprecated symbols disabled, w/C++ & FORTRAN, w/szip filter, in production mode Mac OS X/32 10.4.10 (amazon) in debug mode
Diffstat (limited to 'src/H5Ddeprec.c')
-rw-r--r--src/H5Ddeprec.c28
1 files changed, 4 insertions, 24 deletions
diff --git a/src/H5Ddeprec.c b/src/H5Ddeprec.c
index d23df69..95f89cf 100644
--- a/src/H5Ddeprec.c
+++ b/src/H5Ddeprec.c
@@ -327,30 +327,9 @@ H5D_extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id)
HDassert(dataset);
HDassert(size);
- /* Check if the filters in the DCPL will need to encode, and if so, can they?
- * Filters need encoding if fill value is defined and a fill policy is set that requires
- * writing on an extend.
- */
- fill = &dataset->shared->dcpl_cache.fill;
- if(!dataset->shared->checked_filters) {
- H5D_fill_value_t fill_status; /* Whether the fill value is defined */
-
- /* Retrieve the "defined" status of the fill value */
- if(H5P_is_fill_value_defined(fill, &fill_status) < 0)
- HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "Couldn't retrieve fill value from dataset.")
-
- /* See if we can check the filter status */
- if(fill_status == H5D_FILL_VALUE_DEFAULT || fill_status == H5D_FILL_VALUE_USER_DEFINED) {
- if(fill->fill_time == H5D_FILL_TIME_ALLOC ||
- (fill->fill_time == H5D_FILL_TIME_IFSET && fill_status == H5D_FILL_VALUE_USER_DEFINED)) {
- /* Filters must have encoding enabled. Ensure that all filters can be applied */
- if(H5Z_can_apply(dataset->shared->dcpl_id, dataset->shared->type_id) < 0)
- HGOTO_ERROR(H5E_PLINE, H5E_CANAPPLY, FAIL, "can't apply filters")
-
- dataset->shared->checked_filters = TRUE;
- } /* end if */
- } /* end if */
- } /* end if */
+ /* Check if the filters in the DCPL will need to encode, and if so, can they? */
+ if(H5D_check_filters(dataset) < 0)
+ HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't apply filters")
/*
* NOTE: Restrictions on extensions were checked when the dataset was
@@ -375,6 +354,7 @@ H5D_extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id)
HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update cached chunk indices")
/* Allocate space for the new parts of the dataset, if appropriate */
+ fill = &dataset->shared->dcpl_cache.fill;
if(fill->alloc_time == H5D_ALLOC_TIME_EARLY)
if(H5D_alloc_storage(dataset->oloc.file, dxpl_id, dataset, H5D_ALLOC_EXTEND, TRUE, FALSE) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize dataset with fill value")