diff options
-rw-r--r-- | COPYING | 4 | ||||
-rw-r--r-- | CTestConfig.cmake | 4 | ||||
-rw-r--r-- | README.txt | 2 | ||||
-rwxr-xr-x | bin/h5vers | 10 | ||||
-rw-r--r-- | c++/src/cpp_doc_config | 2 | ||||
-rwxr-xr-x | config/cmake/scripts/HDF5config.cmake | 4 | ||||
-rw-r--r-- | config/lt_vers.am | 30 | ||||
-rw-r--r-- | configure.ac | 2 | ||||
-rw-r--r-- | java/src/hdf/hdf5lib/H5.java | 4 | ||||
-rw-r--r-- | java/test/TestH5.java | 4 | ||||
-rw-r--r-- | release_docs/RELEASE.txt | 401 | ||||
-rw-r--r-- | src/H5public.h | 8 | ||||
-rw-r--r-- | tools/test/h5repack/testfiles/h5repack_layout.h5-plugin_version_test.ddl | 14 |
13 files changed, 174 insertions, 315 deletions
@@ -49,7 +49,7 @@ advised of the possibility of such damage. Limited portions of HDF5 were developed by Lawrence Berkeley National Laboratory (LBNL). LBNL's Copyright Notice and Licensing Terms can be found here: COPYING_LBNL_HDF5 file in this directory or at -http://support.hdfgroup.org/ftp/HDF5/releases/COPYING_LBNL_HDF5. +https://support.hdfgroup.org/ftp/HDF5/releases/COPYING_LBNL_HDF5. ----------------------------------------------------------------------------- ----------------------------------------------------------------------------- @@ -99,7 +99,7 @@ and/or accompanying materials: HDF5 is available with the SZIP compression library but SZIP is not part of HDF5 and has separate copyright and license terms. See “Szip Compression -in HDF Products” (www.hdfgroup.org/doc_resource/SZIP/) for further details. +in HDF Products” (https://support.hdfgroup.org/doc_resource/SZIP/) for further details. ----------------------------------------------------------------------------- diff --git a/CTestConfig.cmake b/CTestConfig.cmake index 3f5f380..8efea80 100644 --- a/CTestConfig.cmake +++ b/CTestConfig.cmake @@ -10,10 +10,10 @@ set (CTEST_NIGHTLY_START_TIME "18:00:00 CST") set (CTEST_DROP_METHOD "http") if (CDASH_LOCAL) set (CTEST_DROP_SITE "cdash-internal.hdfgroup.org") - set (CTEST_DROP_LOCATION "/submit.php?project=HDF5Trunk") + set (CTEST_DROP_LOCATION "/submit.php?project=HDF5110") else () set (CTEST_DROP_SITE "cdash.hdfgroup.org") - set (CTEST_DROP_LOCATION "/submit.php?project=HDF5+Trunk") + set (CTEST_DROP_LOCATION "/submit.php?project=HDF5110") endif () set (CTEST_DROP_SITE_CDASH TRUE) @@ -1,4 +1,4 @@ -HDF5 version 1.11.0 currently under development +HDF5 version 1.10.2-snap0 currently under development Please refer to the release_docs/INSTALL file for installation instructions. ------------------------------------------------------------------------------ @@ -200,7 +200,7 @@ my $H5_JAVA = $file; $H5_JAVA =~ s/[^\/]*$/..\/java\/src\/hdf\/hdf5lib\/H5.java/; die "unable to read file: $H5_JAVA\n" unless -r $file; my $TESTH5_JAVA = $file; -$TESTH5_JAVA =~ s/[^\/]*$/..\/java\/test\/TestH5.java/; +$TESTH5_JAVA =~ s/[^\/]*$/..\/java\/test\/TestH5.java/; die "unable to read file: $TESTH5_JAVA\n" unless -r $file; my $REPACK_LAYOUT_PLUGIN_VERSION = $file; $REPACK_LAYOUT_PLUGIN_VERSION =~ s/[^\/]*$/..\/tools\/test\/h5repack\/testfiles\/h5repack_layout.h5-plugin_version_test.ddl/; @@ -356,7 +356,7 @@ if ($HDF5CONFIGCMAKE) { my $sub_rel_ver_str = ( $newver[3] eq "" ? sprintf("\"%s\"", "") - : sprintf("\"%s\"", "-".$newver[3].", currently under development") + : sprintf("\"%s\"", "-".$newver[3]) ); my $version_string = sprintf("\"%d.%d.%d\"", @newver[0,1,2]); @@ -379,7 +379,7 @@ if ($H5_JAVA) { my $version_string2 = sprintf("%d, %d, %d", @newver[0,1,2]); $data =~ s/\@version HDF5 .* <BR>/\@version HDF5 $version_string1 <BR>/; - $data =~ s/ public final static int LIB_VERSION\[\] = { \d*, \d*, \d* };/ public final static int LIB_VERSION[] = { $version_string2 };/; + $data =~ s/ public final static int LIB_VERSION\[\] = { \d*, \d*, \d* };/ public final static int LIB_VERSION[] = { $version_string2 };/; write_file($H5_JAVA, $data); } @@ -397,7 +397,7 @@ if ($TESTH5_JAVA) { my $version_string2 = sprintf("int majnum = %d, minnum = %d, relnum = %d", @newver[0,1,2]); $data =~ s/ int libversion\[\] = { .* };/ int libversion\[\] = { $version_string1 };/; - $data =~ s/ int majnum = \d*, minnum = \d*, relnum = \d*;/ $version_string2;/; + $data =~ s/ int majnum = \d*, minnum = \d*, relnum = \d*;/ $version_string2;/; write_file($TESTH5_JAVA, $data); } @@ -412,7 +412,7 @@ if ($REPACK_LAYOUT_PLUGIN_VERSION) { write_file($REPACK_LAYOUT_PLUGIN_VERSION, $data); } -# helper function to read the file for updating c++/src/cpp_doc_config, +# helper function to read the file for updating c++/src/cpp_doc_config, # config/cmake/scripts/HDF5Config.cmake, and java files. # The version string in that file is not at the top, so the string replacement # is not for the first line, and reading/writing the entire file as one string diff --git a/c++/src/cpp_doc_config b/c++/src/cpp_doc_config index ce23041..2b90350 100644 --- a/c++/src/cpp_doc_config +++ b/c++/src/cpp_doc_config @@ -38,7 +38,7 @@ PROJECT_NAME = "HDF5 C++ API" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = "1.11.0" +PROJECT_NUMBER = "1.10.2-snap0, currently under development" # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/config/cmake/scripts/HDF5config.cmake b/config/cmake/scripts/HDF5config.cmake index 239d517..ab8d8d6 100755 --- a/config/cmake/scripts/HDF5config.cmake +++ b/config/cmake/scripts/HDF5config.cmake @@ -27,8 +27,8 @@ cmake_minimum_required (VERSION 3.2.2 FATAL_ERROR) # NO_MAC_FORTRAN - Yes to be SHARED on a Mac
##############################################################################
-set (CTEST_SOURCE_VERSION "1.11.0")
-set (CTEST_SOURCE_VERSEXT "")
+set (CTEST_SOURCE_VERSION "1.10.2")
+set (CTEST_SOURCE_VERSEXT "-snap0")
##############################################################################
# handle input parameters to script.
diff --git a/config/lt_vers.am b/config/lt_vers.am index eb630a7..3d75074 100644 --- a/config/lt_vers.am +++ b/config/lt_vers.am @@ -18,9 +18,9 @@ # See libtool versioning documentation online. # After making changes, run bin/reconfigure to update other configure related # files like Makefile.in. -LT_VERS_INTERFACE = 1000 -LT_VERS_AGE = 0 +LT_VERS_INTERFACE = 101 LT_VERS_REVISION = 0 +LT_VERS_AGE = 0 ## If the API changes *at all*, increment LT_VERS_INTERFACE and ## reset LT_VERS_REVISION to 0. @@ -41,32 +41,32 @@ LT_VERS_REVISION = 0 ## the effects of the H5_V1_x_COMPAT flag. ## ## Version numbers for wrapper shared library files. -LT_CXX_VERS_INTERFACE = 1000 +LT_CXX_VERS_INTERFACE = 101 LT_CXX_VERS_REVISION = 0 LT_CXX_VERS_AGE = 0 -LT_F_VERS_INTERFACE = 1000 +LT_F_VERS_INTERFACE = 101 LT_F_VERS_REVISION = 0 -LT_F_VERS_AGE = 0 +LT_F_VERS_AGE = 1 -LT_HL_VERS_INTERFACE = 1000 -LT_HL_VERS_REVISION = 0 +LT_HL_VERS_INTERFACE = 100 +LT_HL_VERS_REVISION = 1 LT_HL_VERS_AGE = 0 -LT_HL_CXX_VERS_INTERFACE = 1000 +LT_HL_CXX_VERS_INTERFACE = 101 LT_HL_CXX_VERS_REVISION = 0 -LT_HL_CXX_VERS_AGE = 0 +LT_HL_CXX_VERS_AGE = 1 -LT_HL_F_VERS_INTERFACE = 1000 -LT_HL_F_VERS_REVISION = 0 +LT_HL_F_VERS_INTERFACE = 100 +LT_HL_F_VERS_REVISION = 1 LT_HL_F_VERS_AGE = 0 -LT_JAVA_VERS_INTERFACE = 1000 +LT_JAVA_VERS_INTERFACE = 101 LT_JAVA_VERS_REVISION = 0 -LT_JAVA_VERS_AGE = 0 +LT_JAVA_VERS_AGE = 1 -LT_TOOLS_VERS_INTERFACE = 1000 -LT_TOOLS_VERS_REVISION = 0 +LT_TOOLS_VERS_INTERFACE = 100 +LT_TOOLS_VERS_REVISION = 1 LT_TOOLS_VERS_AGE = 0 diff --git a/configure.ac b/configure.ac index ca4d4b6..afd36cd 100644 --- a/configure.ac +++ b/configure.ac @@ -26,7 +26,7 @@ AC_PREREQ([2.69]) ## NOTE: Do not forget to change the version number here when we do a ## release!!! ## -AC_INIT([HDF5], [1.11.0], [help@hdfgroup.org]) +AC_INIT([HDF5], [1.10.2-snap0], [help@hdfgroup.org]) AC_CONFIG_SRCDIR([src/H5.c]) AC_CONFIG_HEADERS([src/H5config.h]) diff --git a/java/src/hdf/hdf5lib/H5.java b/java/src/hdf/hdf5lib/H5.java index a1099d8..996feeb 100644 --- a/java/src/hdf/hdf5lib/H5.java +++ b/java/src/hdf/hdf5lib/H5.java @@ -216,7 +216,7 @@ import hdf.hdf5lib.structs.H5O_info_t; * exception handlers to print out the HDF-5 error stack. * <hr> * - * @version HDF5 1.11.0 <BR> + * @version HDF5 1.10.2 <BR> * <b>See also: <a href ="./hdf.hdf5lib.HDFArray.html"> hdf.hdf5lib.HDFArray</a> </b><BR> * <a href ="./hdf.hdf5lib.HDF5Constants.html"> hdf.hdf5lib.HDF5Constants</a><BR> * <a href ="./hdf.hdf5lib.HDF5CDataTypes.html"> hdf.hdf5lib.HDF5CDataTypes</a><BR> @@ -239,7 +239,7 @@ public class H5 implements java.io.Serializable { * * Make sure to update the versions number when a different library is used. */ - public final static int LIB_VERSION[] = { 1, 11, 0 }; + public final static int LIB_VERSION[] = { 1, 10, 2 }; public final static String H5PATH_PROPERTY_KEY = "hdf.hdf5lib.H5.hdf5lib"; diff --git a/java/test/TestH5.java b/java/test/TestH5.java index 4c84988..b5864e8 100644 --- a/java/test/TestH5.java +++ b/java/test/TestH5.java @@ -164,7 +164,7 @@ public class TestH5 { */ @Test public void testH5get_libversion() { - int libversion[] = { 1, 11, 0 }; + int libversion[] = { 1, 10, 2 }; try { H5.H5get_libversion(libversion); @@ -186,7 +186,7 @@ public class TestH5 { */ @Test public void testH5check_version() { - int majnum = 1, minnum = 11, relnum = 0; + int majnum = 1, minnum = 10, relnum = 2; try { H5.H5check_version(majnum, minnum, relnum); diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 22accd3..2c118b9 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -1,4 +1,4 @@ -HDF5 version 1.11.0 currently under development +HDF5 version 1.10.2-snap0 currently under development ================================================================================ @@ -92,7 +92,70 @@ New Features C++ Library: ------------ - - + - New wrappers for C APIs: + (HDFFV-10004, HDFFV-10139, HDFFV-10145 - PRs #209, #232, #310, #334, #348) + + // Sets/Gets the strategy and the threshold value that the library will + // will employ in managing file space. + FileCreatPropList::setFileSpaceStrategy - H5Pset_file_space_strategy + FileCreatPropList::getFileSpaceStrategy - H5Pget_file_space_strategy + + // Sets/Gets the file space page size for paged aggregation. + FileCreatPropList::setFileSpacePagesize - H5Pset_file_space_page_size + FileCreatPropList::getFileSpacePagesize - H5Pget_file_space_page_size + + // Checks if the given ID is valid. + IdComponent::isValid - H5Iis_valid + + // Sets/Gets the number of soft or user-defined links that can be + // traversed before a failure occurs. + LinkAccPropList::setNumLinks - H5Pset_nlinks + LinkAccPropList::getNumLinks - H5Pget_nlinks + + // Returns a copy of the creation property list of a datatype. + DataType::getCreatePlist - H5Tget_create_plist + + // Opens/Closes an object within a group or a file, regardless object type + Group::getObjId - H5Oopen + Group::closeObjId - H5Oclose + + // Maps elements of a virtual dataset to elements of the source dataset. + DSetCreatPropList::setVirtual - H5Pset_virtual + + // Gets general information about this file. + H5File::getFileInfo - H5Fget_info2 + + // Returns the number of members in a type. + IdComponent::getNumMembers - H5Inmembers + + // Determines if an element type exists. + IdComponent::typeExists - H5Itype_exists + + // Determines if an object exists. + H5Location::exists - H5Lexists. + + // Returns the header version of an HDF5 object. + H5Object::objVersion - H5Oget_info for version + -BMR, 2017/03/20 + + - New exception: ObjHeaderIException for H5O interface. + (HDFFV-10145 - PR #334) -BMR, 2017/03/15 + + - New class LinkAccPropList for link access property list, to be used by + wrappers of H5Lexists. (HDFFV-10145 - PR #232) -BMR, 2017/01/04 + + - New constructors to open datatypes in ArrayType, CompType, DataType, + EnumType, FloatType, IntType, StrType, and VarLenType. + (HDFFV-10056 - PR #209) -BMR, 2016/12/26 + + - New member functions + DSetCreatPropList::setNbit() to setup N-bit compression for a dataset. + (HDFFV-8623 patch 7) -BMR, 2016/04/25 + + ArrayType::getArrayNDims() const + ArrayType::getArrayDims() const + both to replace the non-const versions. (HDFFV-9725) -BMR, 2016/04/25 + Tools: ------ @@ -143,6 +206,22 @@ Bug Fixes since HDF5-1.10.0-patch1 release h5format_convert fails. (HDFFV-9434 VC 2016/05/29) + - Fix error when copying dataset with attribute which is a compound datatype + consisting of a variable length string. + (HDFFV-7991 VC 2016/08/19, 2016/08/21, 2016/08/24) + + - H5DOappend will not fail if a dataset has no append callback registered. + (HDFFV-9960 VC 2016/08/05, 2016/08/14) + + - Fix the problem where the committed datatype's file location is different + from the file location of an attribute with that committed datatype. + (HDFFV-9940 VC 2016/07/03, 2016/07/06) + + - (a) Throw an error instead of assertion when v1 btree level hits the 1 byte limit. + (b) Modifications to better handle error recovery when conversion by + h5format_convert fails. + (HDFFV-9434 VC 2016/05/29) + Configuration ------------- - Configuration will check for the strtoll and strtoull functions @@ -191,7 +270,36 @@ Bug Fixes since HDF5-1.10.0-patch1 release C++ APIs -------- - - + - Due to the change in the C API, the overloaded functions of + PropList::setProperty now need const for some arguments. They are + planned for deprecation and are replaced by new versions with proper + consts. (PR #344) -BMR, 2017/03/17 + + - The problem where a user-defined function cannot access both, attribute + and dataset, using only one argument is now fixed. + (HDFFV-9920 - PR #45) -BMR, 2016/10/11 + + - The high-level API Packet Table (PT) did not write data correctly when + the datatype is a compound type that has string type as one of the + members. This problem started in 1.8.15, after the fix of HDFFV-9042 + was applied, which caused the Packet Table to use native type to access + the data. It should be up to the application to specify whether the + buffer to be read into memory is in the machine's native architecture. + Thus, the PT is fixed to not use native type but to make a copy of the + user's provided datatype during creation or the packet table's datatype + during opening. If an application wishes to use native type to read the + data, then the application will request that. However, the Packet Table + doesn't provide a way to specify memory datatype in this release. This + feature will be available in future releases, HDFFV-10023. + (HDFFV-9758 - PRs #93 and #108) -BMR, 2016/10/27 + + - The obsolete macros, H5_NO_NAMESPACE and H5_NO_STD, are removed. + (HDFFV-9532 - PR #92) -BMR, 2016/10/23 + + - In-memory array information, ArrayType::rank and + ArrayType::dimensions, were removed. This is an implementation + detail and should not affect applications. (HDFFV-9725) -BMR, 2016/04/25 + Testing @@ -201,18 +309,6 @@ Bug Fixes since HDF5-1.10.0-patch1 release Supported Platforms =================== - Linux 2.6.32-573.22.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) - #1 SMP x86_64 GNU/Linux compilers: - (mayll/platypus) Version 4.4.7 20120313 - Version 4.8.4 - PGI C, Fortran, C++ for 64-bit target on - x86-64; - Version 16.10-0 - Intel(R) C (icc), C++ (icpc), Fortran (icc) - compilers: - Version 15.0.3.187 (Build 20150407) - MPICH 3.1.4 compiled with GCC 4.9.3 - Linux 2.6.32-573.18.1.el6.ppc64 gcc (GCC) 4.4.7 20120313 (Red Hat 4.4.7-4) #1 SMP ppc64 GNU/Linux g++ (GCC) 4.4.7 20120313 (Red Hat 4.4.7-4) (ostrich) GNU Fortran (GCC) 4.4.7 20120313 (Red Hat 4.4.7-4) @@ -288,15 +384,13 @@ Windows 7 x64 Cygwin n y/n n y y y Windows 10 y y/y n y y y Windows 10 x64 y y/y n y y y Mac OS X Mountain Lion 10.8.5 64-bit n y/y n y y y -Mac OS X Mavericks 10.9.5 64-bit n y/y n y y ? -Mac OS X Yosemite 10.10.5 64-bit n y/y n y y ? -Mac OS X El Capitan 10.11.6 64-bit n y/y n y y ? -CentOS 6.7 Linux 2.6.18 x86_64 GNU n y/y n y y y -CentOS 6.7 Linux 2.6.18 x86_64 Intel n y/y n y y y -CentOS 6.7 Linux 2.6.32 x86_64 PGI n y/y n y y y +Mac OS X Mavericks 10.9.5 64-bit n y/y n y y y +Mac OS X Yosemite 10.10.5 64-bit n y/y n y y y +Mac OS X El Capitan 10.11.6 64-bit n y/y n y y y +CentOS 7.2 Linux 2.6.32 x86_64 PGI n y/y n y y y CentOS 7.2 Linux 2.6.32 x86_64 GNU y y/y y y y y CentOS 7.2 Linux 2.6.32 x86_64 Intel n y/y n y y y -Linux 2.6.32-573.18.1.el6.ppc64 n y/n n y y y +Linux 2.6.32-573.18.1.el6.ppc64 n y/y n y y y Platform Shared Shared Shared Thread- @@ -313,10 +407,8 @@ Mac OS X Mountain Lion 10.8.5 64-bit y n y y Mac OS X Mavericks 10.9.5 64-bit y n y y Mac OS X Yosemite 10.10.5 64-bit y n y y Mac OS X El Capitan 10.11.6 64-bit y n y y -CentOS 6.7 Linux 2.6.18 x86_64 GNU y y y y -CentOS 6.7 Linux 2.6.18 x86_64 Intel y y y n -CentOS 6.7 Linux 2.6.32 x86_64 PGI y y y n -CentOS 7.2 Linux 2.6.32 x86_64 GNU y y y n +CentOS 7.2 Linux 2.6.32 x86_64 PGI y y y n +CentOS 7.2 Linux 2.6.32 x86_64 GNU y y y y CentOS 7.2 Linux 2.6.32 x86_64 Intel y y y n Linux 2.6.32-573.18.1.el6.ppc64 y y y n @@ -328,6 +420,18 @@ More Tested Platforms ===================== The following platforms are not supported but have been tested for this release. + Linux 2.6.32-573.22.1.el6 GNU C (gcc), Fortran (gfortran), C++ (g++) + #1 SMP x86_64 GNU/Linux compilers: + (mayll/platypus) Version 4.4.7 20120313 + Version 4.8.4 + PGI C, Fortran, C++ for 64-bit target on + x86-64; + Version 16.10-0 + Intel(R) C (icc), C++ (icpc), Fortran (icc) + compilers: + Version 15.0.3.187 (Build 20150407) + MPICH 3.1.4 compiled with GCC 4.9.3 + Linux 2.6.32-573.18.1.el6.ppc64 MPICH mpich 3.1.4 compiled with #1 SMP ppc64 GNU/Linux IBM XL C/C++ for Linux, V13.1 (ostrich) and IBM XL Fortran for Linux, V15.1 @@ -350,249 +454,4 @@ The following platforms are not supported but have been tested for this release. Known Problems ============== -* "make check" fails on CYGWIN when building shared lib files is enabled. The - default on Cygwin has been changed to disable shared. It can be enabled with - the --enable-shared configure option but is likely to fail "make check" - with GCC compilers. (LK -2015/04/16) - -* CLANG compiler with the options -fcatch-undefined-behavior and -ftrapv - catches some undefined behavior in the alignment algorithm of the macro DETECT_I - in H5detect.c (Issue 8147). Since the algorithm is trying to detect the alignment - of integers, ideally the flag -fcatch-undefined-behavior shouldn't to be used for - H5detect.c. In the future, we can separate flags for H5detect.c from the rest of - the library. (SLU - 2013/10/16) - -* The 5.9 C++ compiler on Sun failed to compile a C++ test ttypes.cpp. It - complains with this message: - "/home/hdf5/src/H5Vprivate.h", line 130: Error: __func__ is not defined. - - The reason is that __func__ is a predefined identifier in C99 standard. The - HDF5 C library uses it in H5private.h. The test ttypes.cpp includes - H5private.h (H5Tpkg.h<-H5Fprivate.h<-H5Vprivate.h<-H5private.h). Sun's 5.9 - C++ compiler doesn't support __func__, thus fails to compile the C++ test. - But 5.11 C++ compiler does. To check whether your Sun C++ compiler knows this - identifier, try to compile the following simple C++ program: - #include<stdio.h> - - int main(void) - { - printf("%s\n", __func__); - return 0; - } - (SLU - 2012/11/5) - -* The C++ and FORTRAN bindings are not currently working on FreeBSD with the - native release 8.2 compilers (4.2.1), but are working with gcc 4.6 from the - ports (and probably gcc releases after that). - (QAK - 2012/10/19) - -* The data conversion test dt_arith.c has failures (segmentation fault) from - "long double" to other datatypes during hard conversion when the library - is built with the default GCC 4.2.1 on Mac Lion system. It only happens - with optimization (-O3, -O2, and -O1). Some newer versions of GCC do not - have this problem. Users should disable optimization or try newer version - of GCC. (Issue 8017. SLU - 2012/6/12) - -* The data conversion test dt_arith.c fails in "long double" to integer - conversion on Ubuntu 11.10 (3.0.0.13 kernal) with GCC 4.6.1 if the library - is built with optimization -O3 or -O2. The older GCC (4.5) or newer kernal - (3.2.2 on Fedora) doesn't have the problem. Users should lower down the - optimization level (-O1 or -O0) by defining CFLAGS in the command line of - "configure" like: - - CFLAGS=-O1 ./configure - - It will overwrite the library's default optimization level. (Issue 7829. - SLU - 2012/2/7) - -* --with-mpe configure option does not work with Mpich2. AKC - 2011/03/10) - -* While working on the 1.8.6 release of HDF5, a bug was discovered that can - occur when reading from a dataset in parallel shortly after it has been - written to collectively. The issue was exposed by a new test in the parallel - HDF5 test suite, but had existed before that. We believe the problem lies with - certain MPI implementations and/or filesystems. - - We have provided a pure MPI test program, as well as a standalone HDF5 - program, that can be used to determine if this is an issue on your system. - They should be run across multiple nodes with a varying number of processes. - These programs can be found at: - http://www.hdfgroup.org/ftp/HDF5/examples/known_problems/ - -* Parallel mode in AIX will fail some of the testcheck_version.sh tests where - it treats "exit(134) the same as if process 0 had received an abort signal. - This is fixed and will be available in the next release. AKC - 2009/11/3 - -* The PathScale MPI implementation, accessing a Panasas file system, would - cause H5Fcreate() with H5F_ACC_EXCL to fail even when the file is not - existing. This is due to the MPI_File_open() call failing if the amode has - the MPI_MODE_EXCL bit set. (See bug 1468 for details.) AKC - 2009/8/11 - -* Parallel tests failed with 16 processes with data inconsistency at testphdf5 - / dataset_readAll. Parallel tests also failed with 32 and 64 processes with - collective abort of all ranks at t_posix_compliant / allwrite_allread_blocks - with MPI IO. (CMC - 2009/04/28) - -* For SNL, spirit/liberty/thunderbird: The serial tests pass but parallel - tests failed with MPI-IO file locking message. AKC - 2007/6/25. -* On Intel 64 Linux cluster (RH 4, Linux 2.6.9) with Intel 10.0 compilers use - -mp -O1 compilation flags to build the libraries. Higher level of optimization - causes failures in several HDF5 library tests. -* For HPUX 11.23 many tools tests failed for 64-bit version when linked to the - shared libraries (tested for 1.8.0-beta2) -* For SNL, Red Storm: only paralle HDF5 is supported. The serial tests pass - and the parallel tests also pass with lots of non-fatal error messages. -* on SUN 5.10 C++ test fails in the "Testing Shared Datatypes with Attributes" test -* configuring with --enable-debug=all produces compiler errors on most - platforms. Users who want to run HDF5 in debug mode should use - --enable-debug rather than --enable-debug=all to enable debugging - information on most modules. -* On Mac OS 10.4, test/dt_arith.c has some errors in conversion from long - double to (unsigned) long long and from (unsigned)long long to long double. -* On Altix SGI with Intel 9.0 testmeta.c would not compile with -O3 - optimization flag. -* On VAX, Scaleoffset filter isn't supported. The filter cannot be applied to - HDF5 data generated on VAX. Scaleoffset filter only supports IEEE standard - for floating-point data. -* On Cray X1, a lone colon on the command line of h5dump --xml (as in - the testh5dumpxml.sh script) is misinterpereted by the operating system - and causes an error. -* On mpich 1.2.5 and 1.2.6, we found that if more than two processes - contribute no IO and the application asks to do IO with collective, we found - that when using 4 processors, a simple collective write will be hung - sometimes. This can be verified with t_mpi test under testpar. -* The dataset created or rewritten with the v1.6.3 library or after can't - be read with the v1.6.2 library or before when Fletcher32 EDC(filter) is - enabled. There was a bug in the calculating code of the Fletcher32 - checksum in the library before v1.6.3. The checksum value wasn't consistent - between big-endian and little-endian systems. This bug was fixed in - Release 1.6.3. However, after fixing the bug, the checksum value is no - longer the same as before on little-endian system. The library release - after 1.6.4 can still read the dataset created or rewritten with the library - of v1.6.2 or before. SLU - 2005/6/30 -* For the version 6(6.02 and 6.04) of Portland Group compiler on AMD Opteron - processor, there's a bug in the compiler for optimization(-O2). The library - failed in several tests but all related to multi driver. The problem has - been reported to the vendor. -* On IBM AIX systems, parallel HDF5 mode will fail some tests with error - messages like "INFO: 0031-XXX ...". This is from the command poe. - Set the environment variable MP_INFOLEVEL to 0 to minimize the messages - and run the tests again. - The tests may fail with messages like "The socket name is already - in use". HDF5 does not use sockets (except for stream-VFD). This is - due to problems of the poe command trying to set up the debug socket. - Check if there are many old /tmp/s.pedb.* staying around. These are - sockets used by the poe command and left behind due to failed commands. - Ask your system administrator to clean them out. Lastly, request IBM - to provide a mean to run poe without the debug socket. - -* The C++ library's tests fails when compiling with PGI C++ compiler. The - workaround until the problem is correctly handled is to use the - flag "--instantiate=local" prior to the configure and build steps, as: - setenv CXX "pgCC --instantiate=local" for pgCC 5.02 and higher - - -* The stream-vfd test uses ip port 10007 for testing. If another - application is already using that port address, the test will hang - indefinitely and has to be terminated by the kill command. To try the - test again, change the port address in test/stream_test.c to one not - being used in the host. - -* The --enable-static-exec configure flag will only statically link libraries - if the static version of that library is present. If only the shared version - of a library exists (i.e., most system libraries on Solaris, AIX, and Mac, - for example, only have shared versions), the flag should still result in a - successful compilation, but note that the installed executables will not be - fully static. Thus, the only guarantee on these systems is that the - executable is statically linked with just the HDF5 library. - -* With the gcc 2.95.2 compiler, HDF 5 uses the `-ansi' flag during - compilation. The ANSI version of the compiler complains about not being - able to handle the `long long' datatype with the warning: - - warning: ANSI C does not support `long long' - - This warning is innocuous and can be safely ignored. - -* Certain platforms give false negatives when testing h5ls: - - Cray J90 and Cray T90IEEE give errors during testing when displaying - some floating-point values. These are benign differences due to - the different precision in the values displayed and h5ls appears to - be dumping floating-point numbers correctly. - -* Not all platforms behave correctly with szip's shared libraries. Szip is - disabled in these cases, and a message is relayed at configure time. Static - libraries should be working on all systems that support szip, and should be - used when shared libraries are unavailable. There is also a configure error - on Altix machines that incorrectly reports when a version of szip without - an encoder is being used. - -* On some platforms that use Intel and Absoft compilers to build HDF5 fortran library, - compilation may fail for fortranlib_test.f90, fflush1.f90 and fflush2.f90 - complaining about exit subroutine. Comment out the line - IF (total_error .ne. 0) CALL exit (total_error) - -* Information about building with PGI and Intel compilers is available in - INSTALL file sections 5.7 and 5.8 - -* On at least one system, (SDSC DataStar), the scheduler (in this case - LoadLeveler) sends job status updates to standard error when you run - any executable that was compiled with the parallel compilers. - - This causes problems when running "make check" on parallel builds, as - many of the tool tests function by saving the output from test runs, - and comparing it to an exemplar. - - The best solution is to reconfigure the target system so it no longer - inserts the extra text. However, this may not be practical. - - In such cases, one solution is to "setenv HDF5_Make_Ignore yes" prior to - the configure and build. This will cause "make check" to continue after - detecting errors in the tool tests. However, in the case of SDSC DataStar, - it also leaves you with some 150 "failed" tests to examine by hand. - - A second solution is to write a script to run serial tests and filter - out the text added by the scheduler. A sample script used on SDSC - DataStar is given below, but you will probably have to customize it - for your installation. - - Observe that the basic idea is to insert the script as the first item - on the command line which executes the the test. The script then - executes the test and filters out the offending text before passing - it on. - - #!/bin/csh - - set STDOUT_FILE=~/bin/serial_filter.stdout - set STDERR_FILE=~/bin/serial_filter.stderr - - rm -f $STDOUT_FILE $STDERR_FILE - - ($* > $STDOUT_FILE) >& $STDERR_FILE - - set RETURN_VALUE=$status - - cat $STDOUT_FILE - - tail +3 $STDERR_FILE - - exit $RETURN_VALUE - - You get the HDF make files and test scipts to execute your filter script - by setting the environment variable "RUNSERIAL" to the full path of the - script prior to running configure for parallel builds. Remember to - "unsetenv RUNSERIAL" before running configure for a serial build. - - Note that the RUNSERIAL environment variable exists so that we can - can prefix serial runs as necessary on the target system. On DataStar, - no prefix is necessary. However on an MPICH system, the prefix might - have to be set to something like "/usr/local/mpi/bin/mpirun -np 1" to - get the serial tests to run at all. - - In such cases, you will have to include the regular prefix in your - filter script. - -* H5Ocopy() does not copy reg_ref attributes correctly when shared-message - is turn on. The value of the reference in the destination attriubte is - wrong. This H5Ocopy problem will affect h5copy tool diff --git a/src/H5public.h b/src/H5public.h index cab90aa..2e73ec9 100644 --- a/src/H5public.h +++ b/src/H5public.h @@ -93,11 +93,11 @@ extern "C" { /* Version numbers */ #define H5_VERS_MAJOR 1 /* For major interface/format changes */ -#define H5_VERS_MINOR 11 /* For minor interface/format changes */ -#define H5_VERS_RELEASE 0 /* For tweaks, bug-fixes, or development */ -#define H5_VERS_SUBRELEASE "" /* For pre-releases like snap0 */ +#define H5_VERS_MINOR 10 /* For minor interface/format changes */ +#define H5_VERS_RELEASE 2 /* For tweaks, bug-fixes, or development */ +#define H5_VERS_SUBRELEASE "snap0" /* For pre-releases like snap0 */ /* Empty string for real releases. */ -#define H5_VERS_INFO "HDF5 library version: 1.11.0" /* Full version string */ +#define H5_VERS_INFO "HDF5 library version: 1.10.2-snap0" /* Full version string */ #define H5check() H5check_version(H5_VERS_MAJOR,H5_VERS_MINOR, \ H5_VERS_RELEASE) diff --git a/tools/test/h5repack/testfiles/h5repack_layout.h5-plugin_version_test.ddl b/tools/test/h5repack/testfiles/h5repack_layout.h5-plugin_version_test.ddl index a951638..561ab14 100644 --- a/tools/test/h5repack/testfiles/h5repack_layout.h5-plugin_version_test.ddl +++ b/tools/test/h5repack/testfiles/h5repack_layout.h5-plugin_version_test.ddl @@ -11,7 +11,7 @@ GROUP "/" { USER_DEFINED_FILTER { FILTER_ID 260 COMMENT dynlib4 - PARAMS { 9 1 11 0 } + PARAMS { 9 1 10 2 } } } FILLVALUE { @@ -33,7 +33,7 @@ GROUP "/" { USER_DEFINED_FILTER { FILTER_ID 260 COMMENT dynlib4 - PARAMS { 9 1 11 0 } + PARAMS { 9 1 10 2 } } } FILLVALUE { @@ -55,7 +55,7 @@ GROUP "/" { USER_DEFINED_FILTER { FILTER_ID 260 COMMENT dynlib4 - PARAMS { 9 1 11 0 } + PARAMS { 9 1 10 2 } } } FILLVALUE { @@ -77,7 +77,7 @@ GROUP "/" { USER_DEFINED_FILTER { FILTER_ID 260 COMMENT dynlib4 - PARAMS { 9 1 11 0 } + PARAMS { 9 1 10 2 } } } FILLVALUE { @@ -99,7 +99,7 @@ GROUP "/" { USER_DEFINED_FILTER { FILTER_ID 260 COMMENT dynlib4 - PARAMS { 9 1 11 0 } + PARAMS { 9 1 10 2 } } } FILLVALUE { @@ -121,7 +121,7 @@ GROUP "/" { USER_DEFINED_FILTER { FILTER_ID 260 COMMENT dynlib4 - PARAMS { 9 1 11 0 } + PARAMS { 9 1 10 2 } } } FILLVALUE { @@ -143,7 +143,7 @@ GROUP "/" { USER_DEFINED_FILTER { FILTER_ID 260 COMMENT dynlib4 - PARAMS { 9 1 11 0 } + PARAMS { 9 1 10 2 } } } FILLVALUE { |