summaryrefslogtreecommitdiffstats
path: root/src
diff options
context:
space:
mode:
authorQuincey Koziol <koziol@hdfgroup.org>2000-11-10 22:26:53 (GMT)
committerQuincey Koziol <koziol@hdfgroup.org>2000-11-10 22:26:53 (GMT)
commit38865598298878b740e6cdc1fbd60f4f386f5c71 (patch)
tree900eeb4ee2fd46f837826195f47e2e129ace7971 /src
parent169579ee4d4584274db94e0048b1854878c9ad01 (diff)
downloadhdf5-38865598298878b740e6cdc1fbd60f4f386f5c71.zip
hdf5-38865598298878b740e6cdc1fbd60f4f386f5c71.tar.gz
hdf5-38865598298878b740e6cdc1fbd60f4f386f5c71.tar.bz2
[svn-r2861] Purpose:
Bug fix. Description: When writing data and the dataset needed to be extended, the write call ended up using the old dataspace extents (prior to the dataspace extend) and bad parameters would get into the I/O channel, eventually causing it to dump core. Solution: Every time a dataset is extended, throw away the previous dataspace information and re-retrieve it. Platforms tested: FreeBSD 4.1.1 (hawkwind)
Diffstat (limited to 'src')
-rw-r--r--src/H5RA.c36
1 files changed, 36 insertions, 0 deletions
diff --git a/src/H5RA.c b/src/H5RA.c
index 27a2b01..33c5132 100644
--- a/src/H5RA.c
+++ b/src/H5RA.c
@@ -764,6 +764,15 @@ H5RA_write(H5RA_t *ra, hssize_t start_row, hsize_t nrows, H5T_t *type,
HGOTO_ERROR(H5E_RAGGED, H5E_CANTINIT, FAIL,
"unable to extend raw dataset");
}
+ /* Throw away previous raw file dataspace */
+ H5S_close(rf_space);
+
+ /* Retrieve newly extended raw file dataspace */
+ if (NULL==(rf_space=H5D_get_space(ra->raw)) ||
+ H5S_get_simple_extent_dims(rf_space, raw_cur_size, NULL)<0) {
+ HGOTO_ERROR(H5E_RAGGED, H5E_CANTINIT, FAIL,
+ "unable to determine current raw data extents");
+ }
}
hs_offset[0] = start_row;
hs_offset[1] = 0;
@@ -798,6 +807,15 @@ H5RA_write(H5RA_t *ra, hssize_t start_row, hsize_t nrows, H5T_t *type,
HGOTO_ERROR(H5E_RAGGED, H5E_CANTINIT, FAIL,
"unable to extend meta dataset");
}
+ /* Throw away previous meta file dataspace */
+ H5S_close(mf_space);
+
+ /* Retrieve newly extended meta file dataspace */
+ if (NULL==(mf_space=H5D_get_space(ra->meta)) ||
+ H5S_get_simple_extent_dims(mf_space, &meta_cur_size, NULL)<0) {
+ HGOTO_ERROR(H5E_RAGGED, H5E_CANTINIT, FAIL,
+ "unable to determine current raw data extents");
+ }
}
if (H5S_set_extent_simple(mm_space, 1, &nrows, NULL)<0 ||
H5S_select_hyperslab(mf_space, H5S_SELECT_SET, &start_row, NULL,
@@ -891,6 +909,15 @@ H5RA_fix_overflow(H5RA_t *ra, H5T_t *type, H5RA_meta_t *meta, hsize_t nelmts,
HGOTO_ERROR(H5E_RAGGED, H5E_CANTINIT, FAIL,
"unable to extend overflow dataset");
}
+ /* Throw away previous overflow file dataspace */
+ H5S_close(of_space);
+
+ /* Retrieve newly extended overflow file dataspace */
+ if (NULL==(of_space=H5D_get_space(ra->over)) ||
+ H5S_get_simple_extent_dims(of_space, &cur_size, NULL)<0) {
+ HGOTO_ERROR(H5E_RAGGED, H5E_CANTINIT, FAIL,
+ "unable to determine current overflow data extents");
+ }
#endif
} else {
@@ -908,6 +935,15 @@ H5RA_fix_overflow(H5RA_t *ra, H5T_t *type, H5RA_meta_t *meta, hsize_t nelmts,
HGOTO_ERROR(H5E_RAGGED, H5E_CANTINIT, FAIL,
"unable to extend overflow dataset");
}
+ /* Throw away previous overflow file dataspace */
+ H5S_close(of_space);
+
+ /* Retrieve newly extended overflow file dataspace */
+ if (NULL==(of_space=H5D_get_space(ra->over)) ||
+ H5S_get_simple_extent_dims(of_space, &cur_size, NULL)<0) {
+ HGOTO_ERROR(H5E_RAGGED, H5E_CANTINIT, FAIL,
+ "unable to determine current overflow data extents");
+ }
}
/* Write the data */