diff options
author | jhendersonHDF <jhenderson@hdfgroup.org> | 2023-04-11 19:08:46 (GMT) |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-04-11 19:08:46 (GMT) |
commit | 027ee7c63389a6587acdd30fe386043ce7d78bad (patch) | |
tree | cf9b5250c6c4ef5b435ea20ad08c6e75c764836a | |
parent | 281071907b9744ed4d560343bb6fe052efdf497b (diff) | |
download | hdf5-027ee7c63389a6587acdd30fe386043ce7d78bad.zip hdf5-027ee7c63389a6587acdd30fe386043ce7d78bad.tar.gz hdf5-027ee7c63389a6587acdd30fe386043ce7d78bad.tar.bz2 |
Fix a heap buffer overflow during H5D__compact_readvv (GitHub #2606) (#2664)
-rw-r--r-- | release_docs/RELEASE.txt | 19 | ||||
-rw-r--r-- | src/H5Dint.c | 27 |
2 files changed, 46 insertions, 0 deletions
diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index be040f4..a33c97b 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -151,6 +151,25 @@ Bug Fixes since HDF5-1.13.3 release =================================== Library ------- + - Fixed a heap buffer overflow that occurs when reading from + a dataset with a compact layout within a malformed HDF5 file + + During opening of a dataset that has a compact layout, the + library allocates a buffer that stores the dataset's raw data. + The dataset's object header that gets written to the file + contains information about how large of a buffer the library + should allocate. If this object header is malformed such that + it causes the library to allocate a buffer that is too small + to hold the dataset's raw data, future I/O to the dataset can + result in heap buffer overflows. To fix this issue, an extra + check is now performed for compact datasets to ensure that + the size of the allocated buffer matches the expected size + of the dataset's raw data (as calculated from the dataset's + dataspace and datatype information). If the two sizes do not + match, opening of the dataset will fail. + + (JTH - 2023/04/04, GH-2606) + - Fixed a memory corruption issue that can occur when reading from a dataset using a hyperslab selection in the file dataspace and a point selection in the memory dataspace diff --git a/src/H5Dint.c b/src/H5Dint.c index 34a9d75..e930c6a 100644 --- a/src/H5Dint.c +++ b/src/H5Dint.c @@ -1718,6 +1718,33 @@ H5D__open_oid(H5D_t *dataset, hid_t dapl_id) /* Indicate that the layout information was initialized */ layout_init = TRUE; + /* + * Now that we've read the dataset's datatype, dataspace and + * layout information, perform a quick check for compact datasets + * to ensure that the size of the internal buffer that was + * allocated for the dataset's raw data matches the size of + * the data. A corrupted file can cause a mismatch between the + * two, which might result in buffer overflows during future + * I/O to the dataset. + */ + if (H5D_COMPACT == dataset->shared->layout.type) { + hssize_t dset_nelemts = 0; + size_t dset_type_size = H5T_GET_SIZE(dataset->shared->type); + size_t dset_data_size = 0; + + HDassert(H5D_COMPACT == dataset->shared->layout.storage.type); + + if ((dset_nelemts = H5S_GET_EXTENT_NPOINTS(dataset->shared->space)) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get number of elements in dataset's dataspace") + + dset_data_size = (size_t)dset_nelemts * dset_type_size; + + if (dataset->shared->layout.storage.u.compact.size != dset_data_size) + HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, + "bad value from dataset header - size of compact dataset's data buffer doesn't match " + "size of dataset data"); + } + /* Set up flush append property */ if (H5D__append_flush_setup(dataset, dapl_id)) HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to set up flush append property") |