From 895ebf705ea5b830685424cbfe0ebef7cfd90d28 Mon Sep 17 00:00:00 2001 From: jhendersonHDF Date: Fri, 14 Apr 2023 20:30:21 -0500 Subject: Fix a heap buffer overflow during H5D__compact_readvv (GitHub #2606) (#2664) (#2726) --- release_docs/RELEASE.txt | 19 +++++++++++++++++++ src/H5Dint.c | 27 +++++++++++++++++++++++++++ 2 files changed, 46 insertions(+) diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 61a2d40..acc5411 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -223,6 +223,25 @@ Bug Fixes since HDF5-1.12.1 release =================================== Library ------- + - Fixed a heap buffer overflow that occurs when reading from + a dataset with a compact layout within a malformed HDF5 file + + During opening of a dataset that has a compact layout, the + library allocates a buffer that stores the dataset's raw data. + The dataset's object header that gets written to the file + contains information about how large of a buffer the library + should allocate. If this object header is malformed such that + it causes the library to allocate a buffer that is too small + to hold the dataset's raw data, future I/O to the dataset can + result in heap buffer overflows. To fix this issue, an extra + check is now performed for compact datasets to ensure that + the size of the allocated buffer matches the expected size + of the dataset's raw data (as calculated from the dataset's + dataspace and datatype information). If the two sizes do not + match, opening of the dataset will fail. + + (JTH - 2023/04/04, GH-2606) + - Fix for CVE-2019-8396 Malformed HDF5 files may have truncated content which does not match diff --git a/src/H5Dint.c b/src/H5Dint.c index 24985f3..95be827 100644 --- a/src/H5Dint.c +++ b/src/H5Dint.c @@ -1764,6 +1764,33 @@ H5D__open_oid(H5D_t *dataset, hid_t dapl_id) /* Indicate that the layout information was initialized */ layout_init = TRUE; + /* + * Now that we've read the dataset's datatype, dataspace and + * layout information, perform a quick check for compact datasets + * to ensure that the size of the internal buffer that was + * allocated for the dataset's raw data matches the size of + * the data. A corrupted file can cause a mismatch between the + * two, which might result in buffer overflows during future + * I/O to the dataset. + */ + if (H5D_COMPACT == dataset->shared->layout.type) { + hssize_t dset_nelemts = 0; + size_t dset_type_size = H5T_GET_SIZE(dataset->shared->type); + size_t dset_data_size = 0; + + HDassert(H5D_COMPACT == dataset->shared->layout.storage.type); + + if ((dset_nelemts = H5S_GET_EXTENT_NPOINTS(dataset->shared->space)) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get number of elements in dataset's dataspace") + + dset_data_size = (size_t)dset_nelemts * dset_type_size; + + if (dataset->shared->layout.storage.u.compact.size != dset_data_size) + HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, + "bad value from dataset header - size of compact dataset's data buffer doesn't match " + "size of dataset data"); + } + /* Set up flush append property */ if (H5D__append_flush_setup(dataset, dapl_id)) HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to set up flush append property") -- cgit v0.12