diff options
author | Quincey Koziol <koziol@hdfgroup.org> | 2005-06-18 05:10:22 (GMT) |
---|---|---|
committer | Quincey Koziol <koziol@hdfgroup.org> | 2005-06-18 05:10:22 (GMT) |
commit | d6410304369bea0644da9ab594f22af6201fa16e (patch) | |
tree | dbbc72537715fa4c4fb53e153970735dc3003a66 /src/H5Dio.c | |
parent | 04e424638ad15f4663f77468f70ec58b7ac8df22 (diff) | |
download | hdf5-d6410304369bea0644da9ab594f22af6201fa16e.zip hdf5-d6410304369bea0644da9ab594f22af6201fa16e.tar.gz hdf5-d6410304369bea0644da9ab594f22af6201fa16e.tar.bz2 |
[svn-r10951] Purpose:
Bug fix
Description:
Hyperslab selections that had a selection offset and were applied to a
chunked dataset could get into an infinite loop or core dump if the same
selection was used multiple times, with different selection offsets.
Solution:
"Normalize" the selection with the selection offset, generate the
selections for the chunks overlapped and then "denormalize" the selection.
Platforms tested:
FreeBSD 4.11 (sleipnir)
Too minor to require h5committest
Diffstat (limited to 'src/H5Dio.c')
-rw-r--r-- | src/H5Dio.c | 22 |
1 files changed, 16 insertions, 6 deletions
diff --git a/src/H5Dio.c b/src/H5Dio.c index b5259a3..f1d12f8 100644 --- a/src/H5Dio.c +++ b/src/H5Dio.c @@ -2358,6 +2358,8 @@ H5D_create_chunk_map(const H5D_t *dataset, const H5T_t *mem_type, const H5S_t *f H5S_t *tmp_mspace=NULL; /* Temporary memory dataspace */ H5S_t *equiv_mspace=NULL; /* Equivalent memory dataspace */ hbool_t equiv_mspace_init=0;/* Equivalent memory dataspace was created */ + hssize_t old_offset[H5O_LAYOUT_NDIMS]; /* Old selection offset */ + hbool_t file_space_normalized = FALSE; /* File dataspace was normalized */ hid_t f_tid=(-1); /* Temporary copy of file datatype for iteration */ hbool_t iter_init=0; /* Selection iteration info has been initialized */ unsigned f_ndims; /* The number of dimensions of the file's dataspace */ @@ -2404,6 +2406,16 @@ H5D_create_chunk_map(const H5D_t *dataset, const H5T_t *mem_type, const H5S_t *f if(H5S_get_simple_extent_dims(file_space, fm->f_dims, NULL)<0) HGOTO_ERROR (H5E_DATASPACE, H5E_CANTGET, FAIL, "unable to get dimensionality") + /* Normalize hyperslab selections by adjusting them by the offset */ + /* (It might be worthwhile to normalize both the file and memory dataspaces + * before any (contiguous, chunked, etc) file I/O operation, in order to + * speed up hyperslab calculations by removing the extra checks and/or + * additions involving the offset and the hyperslab selection -QAK) + */ + if(H5S_hyper_normalize_offset(file_space, old_offset)<0) + HGOTO_ERROR (H5E_DATASET, H5E_BADSELECT, FAIL, "unable to normalize dataspace by offset") + file_space_normalized = TRUE; + /* Decide the number of chunks in each dimension*/ for(u=0; u<f_ndims; u++) { /* Keep the size of the chunk dimensions as hsize_t for various routines */ @@ -2564,6 +2576,10 @@ done: if(H5I_dec_ref(f_tid)<0) HDONE_ERROR (H5E_DATASET, H5E_CANTFREE, FAIL, "Can't decrement temporary datatype ID") } /* end if */ + if(file_space_normalized) { + if(H5S_hyper_denormalize_offset(file_space, old_offset)<0) + HGOTO_ERROR (H5E_DATASET, H5E_BADSELECT, FAIL, "unable to normalize dataspace by offset") + } /* end if */ FUNC_LEAVE_NOAPI(ret_value) } /* end H5D_create_chunk_map() */ @@ -2722,12 +2738,6 @@ H5D_create_chunk_file_map_hyper(const fm_map *fm) HGOTO_ERROR (H5E_DATASPACE, H5E_CANTINIT, FAIL, "unable to convert selection to span trees") } /* end if */ - /* Normalize hyperslab selections by adjusting them by the offset */ - if(H5S_hyper_normalize_offset(tmp_fchunk)<0) { - (void)H5S_close(tmp_fchunk); - HGOTO_ERROR (H5E_DATASET, H5E_BADSELECT, FAIL, "unable to normalize dataspace by offset") - } /* end if */ - /* "AND" temporary chunk and current chunk */ if(H5S_select_hyperslab(tmp_fchunk,H5S_SELECT_AND,coords,NULL,fm->chunk_dim,NULL)<0) { (void)H5S_close(tmp_fchunk); |