From 442946d6c98797d0c426976ff747219266cf9e14 Mon Sep 17 00:00:00 2001 From: Vailin Choi Date: Tue, 5 May 2015 12:06:07 -0500 Subject: [svn-r27016] Bring revisions #26986 - #27003 from trunk to revise_chunks. h5committested. --- README.txt | 2 +- c++/src/Makefile.in | 2 +- c++/src/cpp_doc_config | 2 +- config/cmake/scripts/HDF518config.cmake | 2 +- config/lt_vers.am | 2 +- configure | 22 ++--- configure.ac | 2 +- fortran/src/Makefile.in | 2 +- hl/c++/src/Makefile.in | 2 +- hl/fortran/src/Makefile.in | 2 +- hl/src/H5LTparse.c | 144 ++++++++++++++++---------------- hl/src/H5LTparse.h | 4 +- hl/src/Makefile.in | 2 +- release_docs/INSTALL_CMake.txt | 99 +++++++++++++--------- release_docs/RELEASE.txt | 2 +- src/H5Dchunk.c | 35 +++----- src/H5Dint.c | 2 +- src/H5Dpkg.h | 4 +- src/H5public.h | 4 +- src/Makefile.in | 2 +- 20 files changed, 170 insertions(+), 168 deletions(-) diff --git a/README.txt b/README.txt index 5de45c1..66a8b9e 100644 --- a/README.txt +++ b/README.txt @@ -1,4 +1,4 @@ -HDF5 version 1.9.217-swmr0 currently under development +HDF5 version 1.9.218-swmr0 currently under development Please refer to the release_docs/INSTALL file for installation instructions. ------------------------------------------------------------------------------ diff --git a/c++/src/Makefile.in b/c++/src/Makefile.in index 3afedda..0487f69 100644 --- a/c++/src/Makefile.in +++ b/c++/src/Makefile.in @@ -670,7 +670,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog *.clog2 # After making changes, run bin/reconfigure to update other configure related # files like Makefile.in. LT_VERS_INTERFACE = 6 -LT_VERS_REVISION = 207 +LT_VERS_REVISION = 208 LT_VERS_AGE = 0 # This is our main target diff --git a/c++/src/cpp_doc_config b/c++/src/cpp_doc_config index 7ea93f6..4072c1b 100644 --- a/c++/src/cpp_doc_config +++ b/c++/src/cpp_doc_config @@ -38,7 +38,7 @@ PROJECT_NAME = "HDF5 C++ API" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = HDF5 version 1.9.216 currently under development +PROJECT_NUMBER = HDF5 version 1.9.218 currently under development # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/config/cmake/scripts/HDF518config.cmake b/config/cmake/scripts/HDF518config.cmake index 70423a3..9766b1c 100755 --- a/config/cmake/scripts/HDF518config.cmake +++ b/config/cmake/scripts/HDF518config.cmake @@ -5,7 +5,7 @@ ######################################################################### cmake_minimum_required(VERSION 3.1.0 FATAL_ERROR) -set(CTEST_SOURCE_VERSION 1.8.15) +set(CTEST_SOURCE_VERSION 1.8.15-pre7) set(CTEST_SOURCE_NAME hdf5-${CTEST_SOURCE_VERSION}) set(CTEST_BINARY_NAME "build") set(CTEST_DASHBOARD_ROOT "${CTEST_SCRIPT_DIRECTORY}") diff --git a/config/lt_vers.am b/config/lt_vers.am index 3cb67a4..38a6357 100644 --- a/config/lt_vers.am +++ b/config/lt_vers.am @@ -19,7 +19,7 @@ # After making changes, run bin/reconfigure to update other configure related # files like Makefile.in. LT_VERS_INTERFACE = 6 -LT_VERS_REVISION = 207 +LT_VERS_REVISION = 208 LT_VERS_AGE = 0 ## If the API changes *at all*, increment LT_VERS_INTERFACE and diff --git a/configure b/configure index 8df18a6..d5dc355 100755 --- a/configure +++ b/configure @@ -1,7 +1,7 @@ #! /bin/sh # From configure.ac Id: configure.ac 22697 2012-08-19 14:35:47Z hdftest . # Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.69 for HDF5 1.9.217-swmr0. +# Generated by GNU Autoconf 2.69 for HDF5 1.9.218-swmr0. # # Report bugs to . # @@ -591,8 +591,8 @@ MAKEFLAGS= # Identity of this package. PACKAGE_NAME='HDF5' PACKAGE_TARNAME='hdf5' -PACKAGE_VERSION='1.9.217-swmr0' -PACKAGE_STRING='HDF5 1.9.217-swmr0' +PACKAGE_VERSION='1.9.218-swmr0' +PACKAGE_STRING='HDF5 1.9.218-swmr0' PACKAGE_BUGREPORT='help@hdfgroup.org' PACKAGE_URL='' @@ -1472,7 +1472,7 @@ if test "$ac_init_help" = "long"; then # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF -\`configure' configures HDF5 1.9.217-swmr0 to adapt to many kinds of systems. +\`configure' configures HDF5 1.9.218-swmr0 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... @@ -1542,7 +1542,7 @@ fi if test -n "$ac_init_help"; then case $ac_init_help in - short | recursive ) echo "Configuration of HDF5 1.9.217-swmr0:";; + short | recursive ) echo "Configuration of HDF5 1.9.218-swmr0:";; esac cat <<\_ACEOF @@ -1731,7 +1731,7 @@ fi test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF -HDF5 configure 1.9.217-swmr0 +HDF5 configure 1.9.218-swmr0 generated by GNU Autoconf 2.69 Copyright (C) 2012 Free Software Foundation, Inc. @@ -2668,7 +2668,7 @@ cat >config.log <<_ACEOF This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. -It was created by HDF5 $as_me 1.9.217-swmr0, which was +It was created by HDF5 $as_me 1.9.218-swmr0, which was generated by GNU Autoconf 2.69. Invocation command line was $ $0 $@ @@ -3539,7 +3539,7 @@ fi # Define the identity of the package. PACKAGE='hdf5' - VERSION='1.9.217-swmr0' + VERSION='1.9.218-swmr0' cat >>confdefs.h <<_ACEOF @@ -28577,7 +28577,7 @@ Usage: $0 [OPTIONS] Report bugs to ." lt_cl_version="\ -HDF5 config.lt 1.9.217-swmr0 +HDF5 config.lt 1.9.218-swmr0 configured by $0, generated by GNU Autoconf 2.69. Copyright (C) 2011 Free Software Foundation, Inc. @@ -30693,7 +30693,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" -This file was extended by HDF5 $as_me 1.9.217-swmr0, which was +This file was extended by HDF5 $as_me 1.9.218-swmr0, which was generated by GNU Autoconf 2.69. Invocation command line was CONFIG_FILES = $CONFIG_FILES @@ -30759,7 +30759,7 @@ _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ -HDF5 config.status 1.9.217-swmr0 +HDF5 config.status 1.9.218-swmr0 configured by $0, generated by GNU Autoconf 2.69, with options \\"\$ac_cs_config\\" diff --git a/configure.ac b/configure.ac index 6fde896..06d0592 100644 --- a/configure.ac +++ b/configure.ac @@ -26,7 +26,7 @@ AC_PREREQ([2.69]) ## NOTE: Do not forget to change the version number here when we do a ## release!!! ## -AC_INIT([HDF5], [1.9.217-swmr0], [help@hdfgroup.org]) +AC_INIT([HDF5], [1.9.218-swmr0], [help@hdfgroup.org]) AC_CONFIG_SRCDIR([src/H5.c]) AC_CONFIG_HEADER([src/H5config.h]) diff --git a/fortran/src/Makefile.in b/fortran/src/Makefile.in index f9f4a7b..f57e7e7 100644 --- a/fortran/src/Makefile.in +++ b/fortran/src/Makefile.in @@ -724,7 +724,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog *.clog2 # After making changes, run bin/reconfigure to update other configure related # files like Makefile.in. LT_VERS_INTERFACE = 6 -LT_VERS_REVISION = 207 +LT_VERS_REVISION = 208 LT_VERS_AGE = 0 AM_FCLIBS = $(LIBHDF5) diff --git a/hl/c++/src/Makefile.in b/hl/c++/src/Makefile.in index 1813a3c..61cb11c 100644 --- a/hl/c++/src/Makefile.in +++ b/hl/c++/src/Makefile.in @@ -662,7 +662,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog *.clog2 # After making changes, run bin/reconfigure to update other configure related # files like Makefile.in. LT_VERS_INTERFACE = 6 -LT_VERS_REVISION = 207 +LT_VERS_REVISION = 208 LT_VERS_AGE = 0 # This is our main target diff --git a/hl/fortran/src/Makefile.in b/hl/fortran/src/Makefile.in index 1989c5a..7567dfc 100644 --- a/hl/fortran/src/Makefile.in +++ b/hl/fortran/src/Makefile.in @@ -680,7 +680,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog *.clog2 # After making changes, run bin/reconfigure to update other configure related # files like Makefile.in. LT_VERS_INTERFACE = 6 -LT_VERS_REVISION = 207 +LT_VERS_REVISION = 208 LT_VERS_AGE = 0 # Our main target, the high-level fortran library diff --git a/hl/src/H5LTparse.c b/hl/src/H5LTparse.c index 36591d3..6ca95c54 100644 --- a/hl/src/H5LTparse.c +++ b/hl/src/H5LTparse.c @@ -1641,229 +1641,229 @@ yyreduce: switch (yyn) { case 2: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 101 "H5LTparse.y" { memset(arr_stack, 0, STACK_SIZE*sizeof(struct arr_info)); /*initialize here?*/ } break; case 3: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 102 "H5LTparse.y" { return (yyval.hid);} break; case 13: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 116 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_I8BE); } break; case 14: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 117 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_I8LE); } break; case 15: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 118 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_I16BE); } break; case 16: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 119 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_I16LE); } break; case 17: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 120 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_I32BE); } break; case 18: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 121 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_I32LE); } break; case 19: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 122 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_I64BE); } break; case 20: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 123 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_I64LE); } break; case 21: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 124 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_U8BE); } break; case 22: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 125 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_U8LE); } break; case 23: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 126 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_U16BE); } break; case 24: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 127 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_U16LE); } break; case 25: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 128 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_U32BE); } break; case 26: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 129 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_U32LE); } break; case 27: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 130 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_U64BE); } break; case 28: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 131 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_U64LE); } break; case 29: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 132 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_CHAR); } break; case 30: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 133 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_SCHAR); } break; case 31: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 134 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_UCHAR); } break; case 32: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 135 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_SHORT); } break; case 33: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 136 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_USHORT); } break; case 34: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 137 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_INT); } break; case 35: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 138 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_UINT); } break; case 36: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 139 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_LONG); } break; case 37: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 140 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_ULONG); } break; case 38: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 141 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_LLONG); } break; case 39: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 142 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_ULLONG); } break; case 40: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 145 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_IEEE_F32BE); } break; case 41: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 146 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_IEEE_F32LE); } break; case 42: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 147 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_IEEE_F64BE); } break; case 43: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 148 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_IEEE_F64LE); } break; case 44: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 149 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_FLOAT); } break; case 45: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 150 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_DOUBLE); } break; case 46: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 151 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_LDOUBLE); } break; case 47: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 155 "H5LTparse.y" { csindex++; cmpd_stack[csindex].id = H5Tcreate(H5T_COMPOUND, 1); /*temporarily set size to 1*/ } break; case 48: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 157 "H5LTparse.y" { (yyval.hid) = cmpd_stack[csindex].id; cmpd_stack[csindex].id = 0; @@ -1873,13 +1873,13 @@ yyreduce: break; case 51: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 166 "H5LTparse.y" { cmpd_stack[csindex].is_field = 1; /*notify lexer a compound member is parsed*/ } break; case 52: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 168 "H5LTparse.y" { size_t origin_size, new_size; @@ -1915,7 +1915,7 @@ yyreduce: break; case 53: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 201 "H5LTparse.y" { (yyval.sval) = yylval.sval; @@ -1923,25 +1923,25 @@ yyreduce: break; case 54: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 206 "H5LTparse.y" { (yyval.ival) = 0; } break; case 55: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 208 "H5LTparse.y" { (yyval.ival) = yylval.ival; } break; case 57: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 212 "H5LTparse.y" { asindex++; /*pushd onto the stack*/ } break; case 58: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 214 "H5LTparse.y" { (yyval.hid) = H5Tarray_create2((yyvsp[(5) - (6)].hid), arr_stack[asindex].ndims, arr_stack[asindex].dims); @@ -1952,13 +1952,13 @@ yyreduce: break; case 61: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 224 "H5LTparse.y" { arr_stack[asindex].is_dim = 1; /*notice lexer of dimension size*/ } break; case 62: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 225 "H5LTparse.y" { unsigned ndims = arr_stack[asindex].ndims; arr_stack[asindex].dims[ndims] = (hsize_t)yylval.ival; @@ -1968,19 +1968,19 @@ yyreduce: break; case 65: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 236 "H5LTparse.y" { (yyval.hid) = H5Tvlen_create((yyvsp[(3) - (4)].hid)); H5Tclose((yyvsp[(3) - (4)].hid)); } break; case 66: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 241 "H5LTparse.y" { is_opq_size = 1; } break; case 67: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 242 "H5LTparse.y" { size_t size = (size_t)yylval.ival; @@ -1990,13 +1990,13 @@ yyreduce: break; case 68: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 247 "H5LTparse.y" { is_opq_tag = 1; } break; case 69: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 248 "H5LTparse.y" { H5Tset_tag((yyvsp[(7) - (13)].hid), yylval.sval); @@ -2005,19 +2005,19 @@ yyreduce: break; case 70: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 252 "H5LTparse.y" { (yyval.hid) = (yyvsp[(7) - (15)].hid); } break; case 73: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 260 "H5LTparse.y" { is_str_size = 1; } break; case 74: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 261 "H5LTparse.y" { if((yyvsp[(5) - (6)].ival) == H5T_VARIABLE_TOKEN) @@ -2029,7 +2029,7 @@ yyreduce: break; case 75: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 269 "H5LTparse.y" { if((yyvsp[(9) - (10)].ival) == H5T_STR_NULLTERM_TOKEN) @@ -2042,7 +2042,7 @@ yyreduce: break; case 76: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 278 "H5LTparse.y" { if((yyvsp[(13) - (14)].ival) == H5T_CSET_ASCII_TOKEN) @@ -2053,7 +2053,7 @@ yyreduce: break; case 77: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 285 "H5LTparse.y" { if((yyvsp[(17) - (18)].hid) == H5T_C_S1_TOKEN) @@ -2064,7 +2064,7 @@ yyreduce: break; case 78: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 292 "H5LTparse.y" { hid_t str_id = (yyvsp[(19) - (20)].hid); @@ -2085,67 +2085,67 @@ yyreduce: break; case 79: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 309 "H5LTparse.y" {(yyval.ival) = H5T_VARIABLE_TOKEN;} break; case 81: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 312 "H5LTparse.y" {(yyval.ival) = H5T_STR_NULLTERM_TOKEN;} break; case 82: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 313 "H5LTparse.y" {(yyval.ival) = H5T_STR_NULLPAD_TOKEN;} break; case 83: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 314 "H5LTparse.y" {(yyval.ival) = H5T_STR_SPACEPAD_TOKEN;} break; case 84: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 316 "H5LTparse.y" {(yyval.ival) = H5T_CSET_ASCII_TOKEN;} break; case 85: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 317 "H5LTparse.y" {(yyval.ival) = H5T_CSET_UTF8_TOKEN;} break; case 86: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 319 "H5LTparse.y" {(yyval.hid) = H5T_C_S1_TOKEN;} break; case 87: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 320 "H5LTparse.y" {(yyval.hid) = H5T_FORTRAN_S1_TOKEN;} break; case 88: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 324 "H5LTparse.y" { is_enum = 1; enum_id = H5Tenum_create((yyvsp[(3) - (4)].hid)); H5Tclose((yyvsp[(3) - (4)].hid)); } break; case 89: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 326 "H5LTparse.y" { is_enum = 0; /*reset*/ (yyval.hid) = enum_id; } break; case 92: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 331 "H5LTparse.y" { is_enum_memb = 1; /*indicate member of enum*/ @@ -2158,7 +2158,7 @@ yyreduce: break; case 93: -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 340 "H5LTparse.y" { char char_val=(char)yylval.ival; @@ -2205,7 +2205,7 @@ yyreduce: break; -/* Line 1792 of yacc.c */ +/* Line 1807 of yacc.c */ #line 2191 "H5LTparse.c" default: break; } diff --git a/hl/src/H5LTparse.h b/hl/src/H5LTparse.h index 1461830..621dacd 100644 --- a/hl/src/H5LTparse.h +++ b/hl/src/H5LTparse.h @@ -109,7 +109,7 @@ extern int H5LTyydebug; #if ! defined YYSTYPE && ! defined YYSTYPE_IS_DECLARED typedef union YYSTYPE { -/* Line 2058 of yacc.c */ +/* Line 2065 of yacc.c */ #line 68 "H5LTparse.y" int ival; /*for integer token*/ @@ -117,7 +117,7 @@ typedef union YYSTYPE hid_t hid; /*for hid_t token*/ -/* Line 2058 of yacc.c */ +/* Line 2065 of yacc.c */ #line 122 "H5LTparse.h" } YYSTYPE; # define YYSTYPE_IS_TRIVIAL 1 diff --git a/hl/src/Makefile.in b/hl/src/Makefile.in index 8a3bf09..c82dde0 100644 --- a/hl/src/Makefile.in +++ b/hl/src/Makefile.in @@ -661,7 +661,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog *.clog2 # After making changes, run bin/reconfigure to update other configure related # files like Makefile.in. LT_VERS_INTERFACE = 6 -LT_VERS_REVISION = 207 +LT_VERS_REVISION = 208 LT_VERS_AGE = 0 # This library is our main target. diff --git a/release_docs/INSTALL_CMake.txt b/release_docs/INSTALL_CMake.txt index cb5bae5..7f67eb2 100644 --- a/release_docs/INSTALL_CMake.txt +++ b/release_docs/INSTALL_CMake.txt @@ -26,7 +26,7 @@ Obtaining HDF5 source code 2. Obtain compressed (*.tar or *.zip) HDF5 source from http://www.hdfgroup.org/ftp/HDF5/current/src/ and put it in "myhdfstuff". - Uncompress the file. + Uncompress the file. There should be a hdf5-1.8."X" folder. CMake version 1. We suggest you obtain the latest CMake from the Kitware web site. @@ -38,14 +38,19 @@ CMake version II. Quick Step Building HDF5 Libraries with CMake Script Mode ======================================================================== This short set of instructions is written for users who want to quickly -build the HDF5 C, C++, Fortran, and High-level shared libraries and tools -from the HDF5 source code package using the CMake tools. This procedure -will use the default settings in the config/cmake/cacheinit.cmake file. +build the HDF5 C and C++ static libraries and tools with SZIP and ZLIB +compression included. This procedure will use the default settings in +the config/cmake/cacheinit.cmake file. HDF Group recommends using the ctest script mode to build HDF5. The following files referenced below are available at the HDF web site: http://www.hdfgroup.org/HDF5/release/cmakebuild.html +Single compressed file with all the files needed, including source: + hdf5-1.8.15-CMake.zip or hdf5-1.8.15-CMake.tar.gz + +Individual files +----------------------------------------------- CMake build script: CTestScript.cmake @@ -55,6 +60,7 @@ External compression szip and zlib libraries: Platform configuration files: HDF518config.cmake +----------------------------------------------- To build HDF5 with the SZIP and ZLIB external libraries you will need to: @@ -66,9 +72,9 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to: 3. Download the CTestScript.cmake file to "myhdfstuff". CTestScript.cmake file should not be modified. - 4. Download the platform configuration file to "myhdfstuff". - Do not modify the file unless you want to change default build - environment. + 4. Download the platform configuration file, HDF518config.cmake, + to "myhdfstuff". Do not modify the file unless you want to change + default build environment. (See http://www.hdfgroup.org/HDF5/release/chgcmkbuild.html) 5. From the "myhdfstuff" directory execute the CTest Script with the following options: @@ -78,22 +84,28 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to: On 64-bit Windows with Visual Studio 2012, execute: ctest -S HDF518config.cmake,64-VS2012 -C Release -VV -O hdf5.log On 32-bit Windows with Visual Studio 2013, execute: - ctest -S HDF518config.cmake,32-VS2012 -C Release -VV -O hdf5.log + ctest -S HDF518config.cmake,32-VS2013 -C Release -VV -O hdf5.log On 64-bit Windows with Visual Studio 2013, execute: - ctest -S HDF518config.cmake,64-VS2012 -C Release -VV -O hdf5.log + ctest -S HDF518config.cmake,64-VS2013 -C Release -VV -O hdf5.log On Linux and Mac, execute: ctest -S HDF518config.cmake -C Release -VV -O hdf5.log The command above will configure, build, test, and create an install - package in the myhdfstuff/hdf5-1.8/build folder. + package in the myhdfstuff folder. It will have the format: + HDF5-1.8.NN-. + On Unix, will be "Linux". A similar .sh file will also be created. + On Windows, will be "win64" or "win32". If you have an + installer on your system, you will also see a similar file that ends + in either .exe (NSIS) or .msi (WiX). + The -S option uses the script version of ctest. The value for the -C option (as shown above, "-C Release") must match the setting for CTEST_BUILD_CONFIGURATION in the platform configuration file. - The -VV option is for verbose; use -V for less verbose. + The -VV option is for most verbose; use -V for less verbose. The "-O hdf5.log" option saves the output to a log file hdf5.log. @@ -114,7 +126,7 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to: On Linux, change to the install destination directory (create it if doesn't exist) and execute: - /myhdfstuff/hdf5-1.8/build/HDF5-1.8."X"-Linux.sh + /myhdfstuff/HDF5-1.8."X"-Linux.sh After accepting the license, the script will prompt: By default the HDF5 will be installed in: "/HDF5-1.8."X"-Linux" @@ -130,7 +142,7 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to: ------lib ------share - On Mac you will find HDF5-1.8."X"-Darwin.dmg in the build folder. Click + On Mac you will find HDF5-1.8."X"-Darwin.dmg in the myhdfstuff folder. Click on the dmg file to proceed with installation. After accepting the license, there will be a folder with the following structure: HDF_Group @@ -142,7 +154,7 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to: ------share By default the installation will create the bin, include, lib and cmake - folders in the /HDF_Group/HDF5/1.8. + folders in the /HDF_Group/HDF5/1.8."X" ======================================================================== @@ -165,10 +177,10 @@ Notes: This short set of instructions is written for users who want to 5. Configure the C library, tools and tests with one of the following commands: On Windows 32 bit - cmake -G "Visual Studio 11" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.8."X" + cmake -G "Visual Studio 11 2012" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.8."X" On Windows 64 bit - cmake -G "Visual Studio 11 Win64" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.8."X" + cmake -G "Visual Studio 11 2012 Win64" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.8."X" On Linux and Mac cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ../hdf5-1.8."X" @@ -273,7 +285,7 @@ IV. Further considerations CTEST_USE_LAUNCHERS:BOOL=ON CMAKE_BUILD_WITH_INSTALL_RPATH:BOOL=OFF - 4. Windows developers should install NSIS to create an install image with CPack. + 4. Windows developers should install NSIS or WiX to create an install image with CPack. Visual Studio Express users will not be able to package HDF5 into an install image executable. @@ -281,7 +293,7 @@ IV. Further considerations the settings for the developers' environment. Then the only options needed on the command line are those options that are different. Example using HDF default cache file: - cmake -C ../config/cmake/cacheinit.cmake -G "Visual Studio 11" \ + cmake -C ../config/cmake/cacheinit.cmake -G "Visual Studio 11 2012" \ -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF \ -DCMAKE_BUILD_TYPE:STRING=Release .. @@ -360,12 +372,12 @@ These five steps are described in detail below. * MinGW Makefiles * NMake Makefiles * Unix Makefiles - * Visual Studio 12 - * Visual Studio 12 Win64 - * Visual Studio 11 - * Visual Studio 11 Win64 - * Visual Studio 10 - * Visual Studio 10 Win64 + * Visual Studio 12 2013 + * Visual Studio 12 2013 Win64 + * Visual Studio 11 2012 + * Visual Studio 11 2012 Win64 + * Visual Studio 10 2010 + * Visual Studio 10 2010 Win64 is: * SZIP_INCLUDE_DIR:PATH= @@ -432,7 +444,7 @@ These five steps are described in detail below. 2.2 Preferred command line example on Windows in c:\MyHDFstuff\hdf5\build directory: - cmake -C ../config/cmake/cacheinit.cmake -G "Visual Studio 11" \ + cmake -C ../config/cmake/cacheinit.cmake -G "Visual Studio 11 2012" \ -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF \ -DCMAKE_BUILD_TYPE:STRING=Release .. @@ -635,7 +647,7 @@ adding an option (${CTEST_SCRIPT_ARG}) to the platform configuration script. ######################################################################### cmake_minimum_required(VERSION 3.1.0 FATAL_ERROR) -set(CTEST_SOURCE_VERSION 1.8.X) +set(CTEST_SOURCE_VERSION 1.8.15-pre7) set(CTEST_SOURCE_NAME hdf5-${CTEST_SOURCE_VERSION}) set(CTEST_BINARY_NAME "build") set(CTEST_DASHBOARD_ROOT "${CTEST_SCRIPT_DIRECTORY}") @@ -742,10 +754,6 @@ set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ALLOW_EXTERNAL_SUPPORT:STRING set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_F2003:BOOL=ON") ### disable Fortran; change OFF to ON in order to build FORTRAN libraries set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_FORTRAN:BOOL=OFF") -if(APPLE) - ### allow shared builds without fortran on MAC - set(NO_MAC_FORTRAN "TRUE") -endif() ### disable test program builds #set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DBUILD_TESTING:BOOL=OFF") @@ -774,21 +782,30 @@ if(WIN32) else() include(${CTEST_SCRIPT_DIRECTORY}/CTestScript.cmake) if(APPLE) - if(EXISTS "${CTEST_BINARY_DIRECTORY}\\HDF5-${CTEST_SOURCE_VERSION}-Darwin.dmg") - file(COPY "${CTEST_BINARY_DIRECTORY}\\HDF5-${CTEST_SOURCE_VERSION}-Darwin.dmg" DESTINATION ${CTEST_SCRIPT_DIRECTORY}) + if(EXISTS "${CTEST_BINARY_DIRECTORY}/HDF5-${CTEST_SOURCE_VERSION}-Darwin.dmg") + file(COPY "${CTEST_BINARY_DIRECTORY}/HDF5-${CTEST_SOURCE_VERSION}-Darwin.dmg" DESTINATION ${CTEST_SCRIPT_DIRECTORY}) endif() - if(EXISTS "${CTEST_BINARY_DIRECTORY}\\HDF5-${CTEST_SOURCE_VERSION}-Darwin.tar.gz") - file(COPY "${CTEST_BINARY_DIRECTORY}\\HDF5-${CTEST_SOURCE_VERSION}-Darwin.tar.gz" DESTINATION ${CTEST_SCRIPT_DIRECTORY}) + if(EXISTS "${CTEST_BINARY_DIRECTORY}/HDF5-${CTEST_SOURCE_VERSION}-Darwin.tar.gz") + file(COPY "${CTEST_BINARY_DIRECTORY}/HDF5-${CTEST_SOURCE_VERSION}-Darwin.tar.gz" DESTINATION ${CTEST_SCRIPT_DIRECTORY}) endif() - if(EXISTS "${CTEST_BINARY_DIRECTORY}\\HDF5-${CTEST_SOURCE_VERSION}-Darwin.sh") - file(COPY "${CTEST_BINARY_DIRECTORY}\\HDF5-${CTEST_SOURCE_VERSION}-Darwin.sh" DESTINATION ${CTEST_SCRIPT_DIRECTORY}) + if(EXISTS "${CTEST_BINARY_DIRECTORY}/HDF5-${CTEST_SOURCE_VERSION}-Darwin.sh") + file(COPY "${CTEST_BINARY_DIRECTORY}/HDF5-${CTEST_SOURCE_VERSION}-Darwin.sh" DESTINATION ${CTEST_SCRIPT_DIRECTORY}) endif() else() - if(EXISTS "${CTEST_BINARY_DIRECTORY}\\HDF5-${CTEST_SOURCE_VERSION}-Linux.sh") - file(COPY "${CTEST_BINARY_DIRECTORY}\\HDF5-${CTEST_SOURCE_VERSION}-Linux.sh" DESTINATION ${CTEST_SCRIPT_DIRECTORY}) - endif() - if(EXISTS "${CTEST_BINARY_DIRECTORY}\\HDF5-${CTEST_SOURCE_VERSION}-Linux.tar.gz") - file(COPY "${CTEST_BINARY_DIRECTORY}\\HDF5-${CTEST_SOURCE_VERSION}-Linux.tar.gz" DESTINATION ${CTEST_SCRIPT_DIRECTORY}) + if(CYGWIN) + if(EXISTS "${CTEST_BINARY_DIRECTORY}/HDF5-${CTEST_SOURCE_VERSION}-CYGWIN.sh") + file(COPY "${CTEST_BINARY_DIRECTORY}/HDF5-${CTEST_SOURCE_VERSION}-CYGWIN.sh" DESTINATION ${CTEST_SCRIPT_DIRECTORY}) + endif() + if(EXISTS "${CTEST_BINARY_DIRECTORY}/HDF5-${CTEST_SOURCE_VERSION}-CYGWIN.tar.gz") + file(COPY "${CTEST_BINARY_DIRECTORY}/HDF5-${CTEST_SOURCE_VERSION}-CYGWIN.tar.gz" DESTINATION ${CTEST_SCRIPT_DIRECTORY}) + endif() + else() + if(EXISTS "${CTEST_BINARY_DIRECTORY}/HDF5-${CTEST_SOURCE_VERSION}-Linux.sh") + file(COPY "${CTEST_BINARY_DIRECTORY}/HDF5-${CTEST_SOURCE_VERSION}-Linux.sh" DESTINATION ${CTEST_SCRIPT_DIRECTORY}) + endif() + if(EXISTS "${CTEST_BINARY_DIRECTORY}/HDF5-${CTEST_SOURCE_VERSION}-Linux.tar.gz") + file(COPY "${CTEST_BINARY_DIRECTORY}/HDF5-${CTEST_SOURCE_VERSION}-Linux.tar.gz" DESTINATION ${CTEST_SCRIPT_DIRECTORY}) + endif() endif() endif() endif() diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index e4b91cf..bd8b061 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -1,4 +1,4 @@ -HDF5 version 1.9.217-swmr0 currently under development +HDF5 version 1.9.218-swmr0 currently under development ================================================================================ diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c index 84b7294..2f0be65 100644 --- a/src/H5Dchunk.c +++ b/src/H5Dchunk.c @@ -1240,7 +1240,7 @@ H5D__create_chunk_file_map_hyper(H5D_chunk_map_t *fm, const H5D_io_info_t hsize_t sel_points; /* Number of elements in file selection */ hsize_t start_coords[H5O_LAYOUT_NDIMS]; /* Starting coordinates of selection */ hsize_t coords[H5O_LAYOUT_NDIMS]; /* Current coordinates of chunk */ - hsize_t end[H5O_LAYOUT_NDIMS]; /* Current coordinates of chunk */ + hsize_t end[H5O_LAYOUT_NDIMS]; /* Final coordinates of chunk */ hsize_t chunk_index; /* Index of chunk */ hsize_t scaled[H5S_MAX_RANK]; /* Scaled coordinates for this chunk */ int curr_dim; /* Current dimension to increment */ @@ -1896,10 +1896,11 @@ H5D__chunk_read(H5D_io_info_t *io_info, const H5D_type_info_t *type_info, htri_t cacheable; /* Whether the chunk is cacheable */ /* Pass in chunk's coordinates in a union. */ - io_info->store->chunk.offset = chunk_info->coords; + io_info->store->chunk.offset = chunk_info->coords; io_info->store->chunk.index = chunk_info->index; /* Load the chunk into cache and lock it. */ + /* Determine if we should use the chunk cache */ if((cacheable = H5D__chunk_cacheable(io_info, udata.chunk_block.offset, FALSE)) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't tell if chunk is cacheable") @@ -2026,6 +2027,7 @@ H5D__chunk_write(H5D_io_info_t *io_info, const H5D_type_info_t *type_info, io_info->store->chunk.offset = &chunk_info->coords[0]; io_info->store->chunk.index = chunk_info->index; + /* Determine if we should use the chunk cache */ if((cacheable = H5D__chunk_cacheable(io_info, udata.chunk_block.offset, TRUE)) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't tell if chunk is cacheable") if(cacheable) { @@ -2522,7 +2524,6 @@ H5D__chunk_lookup(const H5D_t *dset, hid_t dxpl_id, const hsize_t *chunk_offset, udata->common.layout = &(dset->shared->layout.u.chunk); udata->common.storage = &(dset->shared->layout.storage.u.chunk); udata->common.offset = chunk_offset; - udata->common.rdcc = &(dset->shared->cache.chunk); udata->common.space_dim = dset->shared->curr_dims; udata->common.scaled = scaled; @@ -2539,7 +2540,7 @@ H5D__chunk_lookup(const H5D_t *dset, hid_t dxpl_id, const hsize_t *chunk_offset, ent = dset->shared->cache.chunk.slot[udata->idx_hint]; if(ent) - for(u = 0, found = TRUE; u < dset->shared->layout.u.chunk.ndims - 1; u++) + for(u = 0, found = TRUE; u < dset->shared->ndims; u++) if(chunk_offset[u] != ent->offset[u]) { found = FALSE; break; @@ -2623,7 +2624,7 @@ H5D__chunk_flush_entry(const H5D_t *dset, hid_t dxpl_id, const H5D_dxpl_cache_t HDassert(!ent->locked); buf = ent->chunk; - if(ent->dirty && !ent->deleted) { + if(ent->dirty) { H5D_chk_idx_info_t idx_info; /* Chunked index info */ H5D_chunk_ud_t udata; /* pass through B-tree */ hbool_t must_alloc = FALSE; /* Whether the chunk must be allocated */ @@ -2633,7 +2634,6 @@ H5D__chunk_flush_entry(const H5D_t *dset, hid_t dxpl_id, const H5D_dxpl_cache_t udata.common.layout = &dset->shared->layout.u.chunk; udata.common.storage = &dset->shared->layout.storage.u.chunk; udata.common.offset = ent->offset; - udata.common.rdcc = &(dset->shared->cache.chunk); udata.common.scaled = ent->scaled; udata.chunk_block.offset = ent->chunk_block.offset; udata.chunk_block.length = dset->shared->layout.u.chunk.size; @@ -4729,7 +4729,6 @@ H5D__chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dim) HDmemset(&udata, 0, sizeof udata); udata.common.layout = &layout->u.chunk; udata.common.storage = &layout->storage.u.chunk; - udata.common.rdcc = rdcc; udata.io_info = &chk_io_info; udata.idx_info = &idx_info; udata.space_dim = space_dim; @@ -4808,19 +4807,6 @@ H5D__chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dim) / chunk_dim[op_dim]); } /* end for */ - /* Check the cache for any entries that are outside the bounds. Mark these - * entries as deleted so they are not flushed to disk accidentally. This is - * only necessary if there are chunks that need to be filled. */ - if(has_fill) - for(ent = rdcc->head; ent; ent = ent->next) - /* Check for chunk offset outside of new dimensions */ - for(u = 0; u < space_ndims; u++) - if((hsize_t)ent->offset[u] >= space_dim[u]) { - /* Mark the entry as "deleted" */ - ent->deleted = TRUE; - break; - } /* end if */ - /* Main loop: fill or remove chunks */ for(op_dim = 0; op_dim < (unsigned)space_ndims; op_dim++) { /* Check if modification along this dimension is really necessary */ @@ -5058,7 +5044,6 @@ H5D__chunk_addrmap(const H5D_io_info_t *io_info, haddr_t chunk_addr[]) HDmemset(&udata, 0, sizeof(udata)); udata.common.layout = &dset->shared->layout.u.chunk; udata.common.storage = &dset->shared->layout.storage.u.chunk; - udata.common.rdcc = &(dset->shared->cache.chunk); udata.chunk_addr = chunk_addr; /* Compose chunked index info struct */ @@ -5184,7 +5169,6 @@ H5D__chunk_update_cache(H5D_t *dset, hid_t dxpl_id) { H5D_rdcc_t *rdcc = &(dset->shared->cache.chunk); /*raw data chunk cache */ H5D_rdcc_ent_t *ent, *next; /*cache entry */ - H5D_rdcc_ent_t *old_ent; /* Old cache entry */ H5D_rdcc_ent_t tmp_head; /* Sentinel entry for temporary entry list */ H5D_rdcc_ent_t *tmp_tail; /* Tail pointer for temporary entry list */ H5D_dxpl_cache_t _dxpl_cache; /* Data transfer property cache buffer */ @@ -5223,10 +5207,13 @@ H5D__chunk_update_cache(H5D_t *dset, hid_t dxpl_id) ent->idx = H5D__chunk_hash_val(dset->shared, ent->scaled); if(old_idx != ent->idx) { + H5D_rdcc_ent_t *old_ent; /* Old cache entry */ + /* Check if there is already a chunk at this chunk's new location */ old_ent = rdcc->slot[ent->idx]; if(old_ent != NULL) { - HDassert(old_ent->locked == 0); + HDassert(old_ent->locked == FALSE); + HDassert(old_ent->deleted == FALSE); /* Insert the old entry into the temporary list, but do not * evict (yet). Make sure we do not make any calls to the index @@ -5436,7 +5423,6 @@ H5D__chunk_copy_cb(const H5D_chunk_rec_t *chunk_rec, void *_udata) udata_dst.common.layout = udata->idx_info_dst->layout; udata_dst.common.storage = udata->idx_info_dst->storage; udata_dst.common.offset = chunk_rec->offset; - udata_dst.common.rdcc = NULL; udata_dst.chunk_block.offset = HADDR_UNDEF; udata_dst.chunk_block.length = chunk_rec->nbytes; udata_dst.filter_mask = chunk_rec->filter_mask; @@ -5713,7 +5699,6 @@ H5D__chunk_copy(H5F_t *f_src, H5O_storage_chunk_t *storage_src, HDmemset(&udata, 0, sizeof udata); udata.common.layout = layout_src; udata.common.storage = storage_src; - udata.common.rdcc = NULL; udata.file_src = f_src; udata.idx_info_dst = &idx_info_dst; udata.buf = buf; diff --git a/src/H5Dint.c b/src/H5Dint.c index c397da5..40a2be6 100644 --- a/src/H5Dint.c +++ b/src/H5Dint.c @@ -2399,7 +2399,7 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size, hid_t dxpl_id) */ /* Update the index values for the cached chunks for this dataset */ if(H5D_CHUNKED == dset->shared->layout.type) { - /* Update the cached chunk info */ + /* Set the cached chunk info */ if(H5D__chunk_set_info(dset) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to update # of chunks") diff --git a/src/H5Dpkg.h b/src/H5Dpkg.h index 9435bb8..8c0f806 100644 --- a/src/H5Dpkg.h +++ b/src/H5Dpkg.h @@ -417,7 +417,7 @@ typedef struct H5D_rdcc_t { struct H5D_rdcc_ent_t **slot; /* Chunk slots, each points to a chunk*/ H5SL_t *sel_chunks; /* Skip list containing information for each chunk selected */ H5S_t *single_space; /* Dataspace for single element I/O on chunks */ - H5D_chunk_info_t *single_chunk_info; /* Pointer to single chunk's info */ + H5D_chunk_info_t *single_chunk_info; /* Pointer to single chunk's info */ } H5D_rdcc_t; /* The raw data contiguous data cache */ @@ -533,7 +533,7 @@ typedef struct { typedef struct H5D_rdcc_ent_t { hbool_t locked; /*entry is locked in cache */ hbool_t dirty; /*needs to be written to disk? */ - hbool_t deleted; /*chunk about to be deleted (do not flush) */ + hbool_t deleted; /*chunk about to be deleted */ unsigned edge_chunk_state; /*states related to edge chunks (see above) */ hsize_t offset[H5O_LAYOUT_NDIMS]; /*chunk name */ hsize_t scaled[H5O_LAYOUT_NDIMS]; /*scaled chunk 'name' (coordinates) */ diff --git a/src/H5public.h b/src/H5public.h index f4df047..2db29ac 100644 --- a/src/H5public.h +++ b/src/H5public.h @@ -94,10 +94,10 @@ extern "C" { /* Version numbers */ #define H5_VERS_MAJOR 1 /* For major interface/format changes */ #define H5_VERS_MINOR 9 /* For minor interface/format changes */ -#define H5_VERS_RELEASE 217 /* For tweaks, bug-fixes, or development */ +#define H5_VERS_RELEASE 218 /* For tweaks, bug-fixes, or development */ #define H5_VERS_SUBRELEASE "swmr0" /* For pre-releases like snap0 */ /* Empty string for real releases. */ -#define H5_VERS_INFO "HDF5 library version: 1.9.217-swmr0" /* Full version string */ +#define H5_VERS_INFO "HDF5 library version: 1.9.218-swmr0" /* Full version string */ #define H5check() H5check_version(H5_VERS_MAJOR,H5_VERS_MINOR, \ H5_VERS_RELEASE) diff --git a/src/Makefile.in b/src/Makefile.in index 9862c48..d678aeb 100644 --- a/src/Makefile.in +++ b/src/Makefile.in @@ -726,7 +726,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog *.clog2 # After making changes, run bin/reconfigure to update other configure related # files like Makefile.in. LT_VERS_INTERFACE = 6 -LT_VERS_REVISION = 207 +LT_VERS_REVISION = 208 LT_VERS_AGE = 0 # Our main target, the HDF5 library -- cgit v0.12