From cf98752566a33067a5737e8a008dc5daa1f4c59c Mon Sep 17 00:00:00 2001 From: Larry Knox Date: Fri, 17 Apr 2015 17:24:47 -0500 Subject: [svn-r26840] configure configure.ac hl/src/H5LTparse.c hl/src/H5LTparse.h Address HDFFV-9010: configure issue with gcc 4.9.2 "-l ". Change default to disable_shared on CYGWIN. release_docs/INSTALL_Cygwin.txt release_docs/RELEASE.txt Update for changed default and no CYGWIN szip binary. c++/src/cpp_doc_config bin/h5vers AddressHDFFV-9010: Add cpp_doc_config to h5vers Tested with h5committest, on CYGWIN. --- bin/h5vers | 47 ++++++++++++- c++/src/cpp_doc_config | 2 +- configure | 21 ++++++ configure.ac | 21 ++++++ hl/src/H5LTparse.c | 144 ++++++++++++++++++++-------------------- hl/src/H5LTparse.h | 4 +- release_docs/INSTALL_Cygwin.txt | 8 ++- release_docs/RELEASE.txt | 5 ++ 8 files changed, 174 insertions(+), 78 deletions(-) diff --git a/bin/h5vers b/bin/h5vers index cb4b9e0..2aa8023 100755 --- a/bin/h5vers +++ b/bin/h5vers @@ -187,6 +187,10 @@ die "unable to read file: $RELEASE\n" unless -r $file; my $CONFIGURE = $file; $CONFIGURE =~ s/[^\/]*$/..\/configure.ac/; die "unable to read file: $CONFIGURE\n" unless -r $file; +# cpp_doc_config +my $CPP_DOC_CONFIG = $file; +$CPP_DOC_CONFIG =~ s/[^\/]*$/..\/c++\/src\/cpp_doc_config/; +die "unable to read file: $CPP_DOC_CONFIG\n" unless -r $file; # Get the current version number. open FILE, $file or die "unable to open $file: $!\n"; @@ -234,7 +238,8 @@ if ($set) { $README = ""; $RELEASE = ""; $CONFIGURE = ""; - $LT_VERS = ""; + $CPP_DOC_CONFIG = ""; + $LT_VERS = ""; @newver = @curver; } @@ -309,6 +314,46 @@ if ($RELEASE) { close FILE; } +# Update the c++/src/cpp_doc_config file +if ($CPP_DOC_CONFIG) { + my $data = read_file($CPP_DOC_CONFIG); + my $version_string = sprintf("HDF5 version %d.%d.%d%s %s", + @newver[0,1,2], + $newver[3] eq "" ? "" : "-".$newver[3], + "currently under development"); + + $data =~ s/PROJECT_NUMBER\s*=.*/PROJECT_NUMBER = $version_string/; + + write_file($CPP_DOC_CONFIG, $data); +} + +# helper function to read the file for updating c++/src/cpp_doc_config file. +# The version string in that file is not at the top, so the string replacement +# is not for the first line, and reading/writing the entire file as one string +# facilitates the substring replacement. +sub read_file { + my ($filename) = @_; + + open my $in, $filename or die "Could not open '$filename' for reading $!"; + local $/ = undef; + my $all = <$in>; + close $in; + + return $all; +} + +# helper function to write the file for updating c++/src/cpp_doc_config file. +sub write_file { + my ($filename, $content) = @_; + + open my $out, ">$filename" or die "Could not open '$filename' for writing $!";; + print $out $content; + close $out; + + return; +} + + sub gen_configure { my ($name, $conf) = @_; diff --git a/c++/src/cpp_doc_config b/c++/src/cpp_doc_config index f2caed2..7ea93f6 100644 --- a/c++/src/cpp_doc_config +++ b/c++/src/cpp_doc_config @@ -38,7 +38,7 @@ PROJECT_NAME = "HDF5 C++ API" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 1.8.13 +PROJECT_NUMBER = HDF5 version 1.9.216 currently under development # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/configure b/configure index cc937fa..adabfc3 100755 --- a/configure +++ b/configure @@ -7684,6 +7684,19 @@ fi ## ---------------------------------------------------------------------- +## Disable shared libraries on CYGWIN. (LK - 04/16/15) +## A number of tests run by "make check" fail on CYGWIN, so for HDF5 v1.8.15 +## we will change the default for shared libraries to disabled. + + +case "`uname`" in + CYGWIN*) + enable_shared="no" + CHECK_WARN="Shared libraries are not currently supported on CYGWIN." + ;; +esac + +## ---------------------------------------------------------------------- ## Fortran libraries are not currently supported on Mac. Disable them. ## (this is overridable with --enable-unsupported). ## @@ -21435,6 +21448,8 @@ ac_compiler_gnu=$ac_cv_c_compiler_gnu + + ## ---------------------------------------------------------------------- ## Check if we should install only statically linked executables. ## This check needs to occur after libtool is initialized because @@ -33129,5 +33144,11 @@ cat >> src/H5config.h <, specifically gfortran or m. +## This sed script corrects "-l " first and then "-l " with no library name. +## If the order is not preserved, all instances of "-l " will be removed. +sed -e '/^postdeps/ s/-l \(a-zA-Z\)/-l\1/g' -e '/^postdeps/ s/-l //g' -i libtool + ## show the configure settings cat src/libhdf5.settings diff --git a/configure.ac b/configure.ac index 9fd3817..5bcd530 100644 --- a/configure.ac +++ b/configure.ac @@ -637,6 +637,19 @@ AC_SUBST([RUNPARALLEL]) AC_SUBST([TESTPARALLEL]) ## ---------------------------------------------------------------------- +## Disable shared libraries on CYGWIN. (LK - 04/16/15) +## A number of tests run by "make check" fail on CYGWIN, so for HDF5 v1.8.15 +## we will change the default for shared libraries to disabled. + + +case "`uname`" in + CYGWIN*) + enable_shared="no" + CHECK_WARN="Shared libraries are not currently supported on CYGWIN." + ;; +esac + +## ---------------------------------------------------------------------- ## Fortran libraries are not currently supported on Mac. Disable them. ## (this is overridable with --enable-unsupported). ## @@ -687,6 +700,8 @@ LT_PREREQ([2.2]) ## win32-dll - This will build clean dlls on win32 platforms. LT_INIT([dlopen,win32-dll]) + + ## ---------------------------------------------------------------------- ## Check if we should install only statically linked executables. ## This check needs to occur after libtool is initialized because @@ -2899,5 +2914,11 @@ cat >> src/H5config.h <, specifically gfortran or m. +## This sed script corrects "-l " first and then "-l " with no library name. +## If the order is not preserved, all instances of "-l " will be removed. +sed -e '/^postdeps/ s/-l \([a-zA-Z]\)/-l\1/g' -e '/^postdeps/ s/-l //g' -i libtool + ## show the configure settings cat src/libhdf5.settings diff --git a/hl/src/H5LTparse.c b/hl/src/H5LTparse.c index 6ca95c54..36591d3 100644 --- a/hl/src/H5LTparse.c +++ b/hl/src/H5LTparse.c @@ -1641,229 +1641,229 @@ yyreduce: switch (yyn) { case 2: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 101 "H5LTparse.y" { memset(arr_stack, 0, STACK_SIZE*sizeof(struct arr_info)); /*initialize here?*/ } break; case 3: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 102 "H5LTparse.y" { return (yyval.hid);} break; case 13: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 116 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_I8BE); } break; case 14: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 117 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_I8LE); } break; case 15: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 118 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_I16BE); } break; case 16: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 119 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_I16LE); } break; case 17: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 120 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_I32BE); } break; case 18: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 121 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_I32LE); } break; case 19: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 122 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_I64BE); } break; case 20: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 123 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_I64LE); } break; case 21: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 124 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_U8BE); } break; case 22: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 125 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_U8LE); } break; case 23: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 126 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_U16BE); } break; case 24: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 127 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_U16LE); } break; case 25: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 128 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_U32BE); } break; case 26: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 129 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_U32LE); } break; case 27: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 130 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_U64BE); } break; case 28: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 131 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_STD_U64LE); } break; case 29: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 132 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_CHAR); } break; case 30: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 133 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_SCHAR); } break; case 31: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 134 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_UCHAR); } break; case 32: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 135 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_SHORT); } break; case 33: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 136 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_USHORT); } break; case 34: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 137 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_INT); } break; case 35: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 138 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_UINT); } break; case 36: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 139 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_LONG); } break; case 37: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 140 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_ULONG); } break; case 38: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 141 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_LLONG); } break; case 39: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 142 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_ULLONG); } break; case 40: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 145 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_IEEE_F32BE); } break; case 41: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 146 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_IEEE_F32LE); } break; case 42: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 147 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_IEEE_F64BE); } break; case 43: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 148 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_IEEE_F64LE); } break; case 44: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 149 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_FLOAT); } break; case 45: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 150 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_DOUBLE); } break; case 46: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 151 "H5LTparse.y" { (yyval.hid) = H5Tcopy(H5T_NATIVE_LDOUBLE); } break; case 47: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 155 "H5LTparse.y" { csindex++; cmpd_stack[csindex].id = H5Tcreate(H5T_COMPOUND, 1); /*temporarily set size to 1*/ } break; case 48: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 157 "H5LTparse.y" { (yyval.hid) = cmpd_stack[csindex].id; cmpd_stack[csindex].id = 0; @@ -1873,13 +1873,13 @@ yyreduce: break; case 51: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 166 "H5LTparse.y" { cmpd_stack[csindex].is_field = 1; /*notify lexer a compound member is parsed*/ } break; case 52: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 168 "H5LTparse.y" { size_t origin_size, new_size; @@ -1915,7 +1915,7 @@ yyreduce: break; case 53: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 201 "H5LTparse.y" { (yyval.sval) = yylval.sval; @@ -1923,25 +1923,25 @@ yyreduce: break; case 54: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 206 "H5LTparse.y" { (yyval.ival) = 0; } break; case 55: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 208 "H5LTparse.y" { (yyval.ival) = yylval.ival; } break; case 57: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 212 "H5LTparse.y" { asindex++; /*pushd onto the stack*/ } break; case 58: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 214 "H5LTparse.y" { (yyval.hid) = H5Tarray_create2((yyvsp[(5) - (6)].hid), arr_stack[asindex].ndims, arr_stack[asindex].dims); @@ -1952,13 +1952,13 @@ yyreduce: break; case 61: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 224 "H5LTparse.y" { arr_stack[asindex].is_dim = 1; /*notice lexer of dimension size*/ } break; case 62: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 225 "H5LTparse.y" { unsigned ndims = arr_stack[asindex].ndims; arr_stack[asindex].dims[ndims] = (hsize_t)yylval.ival; @@ -1968,19 +1968,19 @@ yyreduce: break; case 65: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 236 "H5LTparse.y" { (yyval.hid) = H5Tvlen_create((yyvsp[(3) - (4)].hid)); H5Tclose((yyvsp[(3) - (4)].hid)); } break; case 66: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 241 "H5LTparse.y" { is_opq_size = 1; } break; case 67: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 242 "H5LTparse.y" { size_t size = (size_t)yylval.ival; @@ -1990,13 +1990,13 @@ yyreduce: break; case 68: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 247 "H5LTparse.y" { is_opq_tag = 1; } break; case 69: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 248 "H5LTparse.y" { H5Tset_tag((yyvsp[(7) - (13)].hid), yylval.sval); @@ -2005,19 +2005,19 @@ yyreduce: break; case 70: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 252 "H5LTparse.y" { (yyval.hid) = (yyvsp[(7) - (15)].hid); } break; case 73: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 260 "H5LTparse.y" { is_str_size = 1; } break; case 74: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 261 "H5LTparse.y" { if((yyvsp[(5) - (6)].ival) == H5T_VARIABLE_TOKEN) @@ -2029,7 +2029,7 @@ yyreduce: break; case 75: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 269 "H5LTparse.y" { if((yyvsp[(9) - (10)].ival) == H5T_STR_NULLTERM_TOKEN) @@ -2042,7 +2042,7 @@ yyreduce: break; case 76: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 278 "H5LTparse.y" { if((yyvsp[(13) - (14)].ival) == H5T_CSET_ASCII_TOKEN) @@ -2053,7 +2053,7 @@ yyreduce: break; case 77: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 285 "H5LTparse.y" { if((yyvsp[(17) - (18)].hid) == H5T_C_S1_TOKEN) @@ -2064,7 +2064,7 @@ yyreduce: break; case 78: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 292 "H5LTparse.y" { hid_t str_id = (yyvsp[(19) - (20)].hid); @@ -2085,67 +2085,67 @@ yyreduce: break; case 79: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 309 "H5LTparse.y" {(yyval.ival) = H5T_VARIABLE_TOKEN;} break; case 81: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 312 "H5LTparse.y" {(yyval.ival) = H5T_STR_NULLTERM_TOKEN;} break; case 82: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 313 "H5LTparse.y" {(yyval.ival) = H5T_STR_NULLPAD_TOKEN;} break; case 83: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 314 "H5LTparse.y" {(yyval.ival) = H5T_STR_SPACEPAD_TOKEN;} break; case 84: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 316 "H5LTparse.y" {(yyval.ival) = H5T_CSET_ASCII_TOKEN;} break; case 85: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 317 "H5LTparse.y" {(yyval.ival) = H5T_CSET_UTF8_TOKEN;} break; case 86: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 319 "H5LTparse.y" {(yyval.hid) = H5T_C_S1_TOKEN;} break; case 87: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 320 "H5LTparse.y" {(yyval.hid) = H5T_FORTRAN_S1_TOKEN;} break; case 88: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 324 "H5LTparse.y" { is_enum = 1; enum_id = H5Tenum_create((yyvsp[(3) - (4)].hid)); H5Tclose((yyvsp[(3) - (4)].hid)); } break; case 89: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 326 "H5LTparse.y" { is_enum = 0; /*reset*/ (yyval.hid) = enum_id; } break; case 92: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 331 "H5LTparse.y" { is_enum_memb = 1; /*indicate member of enum*/ @@ -2158,7 +2158,7 @@ yyreduce: break; case 93: -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 340 "H5LTparse.y" { char char_val=(char)yylval.ival; @@ -2205,7 +2205,7 @@ yyreduce: break; -/* Line 1807 of yacc.c */ +/* Line 1792 of yacc.c */ #line 2191 "H5LTparse.c" default: break; } diff --git a/hl/src/H5LTparse.h b/hl/src/H5LTparse.h index 621dacd..1461830 100644 --- a/hl/src/H5LTparse.h +++ b/hl/src/H5LTparse.h @@ -109,7 +109,7 @@ extern int H5LTyydebug; #if ! defined YYSTYPE && ! defined YYSTYPE_IS_DECLARED typedef union YYSTYPE { -/* Line 2065 of yacc.c */ +/* Line 2058 of yacc.c */ #line 68 "H5LTparse.y" int ival; /*for integer token*/ @@ -117,7 +117,7 @@ typedef union YYSTYPE hid_t hid; /*for hid_t token*/ -/* Line 2065 of yacc.c */ +/* Line 2058 of yacc.c */ #line 122 "H5LTparse.h" } YYSTYPE; # define YYSTYPE_IS_TRIVIAL 1 diff --git a/release_docs/INSTALL_Cygwin.txt b/release_docs/INSTALL_Cygwin.txt index 2b72cac..26d3cb9 100644 --- a/release_docs/INSTALL_Cygwin.txt +++ b/release_docs/INSTALL_Cygwin.txt @@ -34,7 +34,7 @@ Preconditions: The following compilers are supported by HDF5 and included in the Cygwin package system: - gcc (4.7.3), which includes: + gcc (4.7.3 and 4.9.2), which includes: gcc4-core : C compiler gcc4-g++ : C++ compiler gcc4-fortran : fortran compiler @@ -72,7 +72,6 @@ Preconditions: The latest supported public release of SZIP is available from ftp://ftp.hdfgroup.org/lib-external/szip/2.1. - 2.3 Additional Utilities @@ -260,6 +259,11 @@ Build, Test and Install HDF5 on Cygwin dt_arith tests may fail due to the use of fork. This is a known issue with cygwin on Windows. + "make check" fails when building shared lib files is enabled. The default + on Cygwin has been changed to disable shared. It can be enabled with + the --enable-shared configure option but is likely to fail "make check" + with GCC compilers. + ----------------------------------------------------------------------- Need Further assistance, email help@hdfgroup.org diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 4bc2667..a7ba365 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -1383,6 +1383,11 @@ The following platforms are not supported but have been tested for this release. Known Problems ============== +* "make check" fails on CYGWIN when building shared lib files is enabled. The + default on Cygwin has been changed to disable shared. It can be enabled with + the --enable-shared configure option but is likely to fail "make check" + with GCC compilers. (LK -2015/04/16) + * CLANG compiler with the options -fcatch-undefined-behavior and -ftrapv catches some undefined behavior in the alignment algorithm of the macro DETECT_I in H5detect.c (Issue 8147). Since the algorithm is trying to detect the alignment -- cgit v0.12 From 524bfed32ef710dd28a3ff400965f01893a3f66b Mon Sep 17 00:00:00 2001 From: Quincey Koziol Date: Sat, 18 Apr 2015 00:02:24 -0500 Subject: [svn-r26841] Description: Minor formatting cleanups and remove unused field from callback struct. Tested on: Mac OSX/64 10.10.2 (amazon) w/serial & parallel (Too minor to require h5committest) --- src/H5AC.c | 2 +- src/H5ACprivate.h | 1 + src/H5Dchunk.c | 8 +++----- src/H5Dpkg.h | 1 - 4 files changed, 5 insertions(+), 7 deletions(-) diff --git a/src/H5AC.c b/src/H5AC.c index 7ed5047..e6bcbb6 100644 --- a/src/H5AC.c +++ b/src/H5AC.c @@ -598,7 +598,7 @@ H5AC_dest(H5F_t *f, hid_t dxpl_id) #ifdef H5_HAVE_PARALLEL H5AC_aux_t * aux_ptr = NULL; #endif /* H5_HAVE_PARALLEL */ - herr_t ret_value = SUCCEED; /* Return value */ + herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_NOAPI(FAIL) diff --git a/src/H5ACprivate.h b/src/H5ACprivate.h index ccecd83..0a958b0 100644 --- a/src/H5ACprivate.h +++ b/src/H5ACprivate.h @@ -42,6 +42,7 @@ #define H5AC__TRACE_FILE_ENABLED 0 #endif /* H5_METADATA_TRACE_FILE */ +/* Global metadata tag values */ #define H5AC__INVALID_TAG (haddr_t)0 #define H5AC__IGNORE_TAG (haddr_t)1 #define H5AC__SUPERBLOCK_TAG (haddr_t)2 diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c index 336aaf6..a5cf5b6 100644 --- a/src/H5Dchunk.c +++ b/src/H5Dchunk.c @@ -325,7 +325,7 @@ H5D__chunk_direct_write(const H5D_t *dset, hid_t dxpl_id, uint32_t filters, hsiz hsize_t chunk_idx; /* Global index of chunk */ H5F_block_t old_chunk; /* Offset/length of old chunk */ H5D_chk_idx_info_t idx_info; /* Chunked index info */ - hbool_t need_insert = FALSE; /* Whether the chunk needs to be inserted into the index */ + hbool_t need_insert = FALSE; /* Whether the chunk needs to be inserted into the index */ int space_ndims; /* Dataset's space rank */ hsize_t space_dim[H5O_LAYOUT_NDIMS]; /* Dataset's dataspace dimensions */ herr_t ret_value = SUCCEED; /* Return value */ @@ -382,7 +382,7 @@ H5D__chunk_direct_write(const H5D_t *dset, hid_t dxpl_id, uint32_t filters, hsiz /* Make sure the address of the chunk is returned. */ if(!H5F_addr_defined(udata.chunk_block.offset)) - HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "chunk address isn't defined") + HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "chunk address isn't defined") /* Evict the (old) entry from the cache if present, but do not flush * it to disk */ @@ -745,10 +745,8 @@ H5D__chunk_io_init(const H5D_io_info_t *io_info, const H5D_type_info_t *type_inf /* Set the number of dimensions for the memory dataspace */ H5_ASSIGN_OVERFLOW(fm->m_ndims, sm_ndims, int, unsigned); - /* Get dim number and dimensionality for each dataspace */ + /* Get rank for file dataspace */ fm->f_ndims = f_ndims = dataset->shared->layout.u.chunk.ndims - 1; - if(H5S_get_simple_extent_dims(file_space, fm->f_dims, NULL) < 0) - HGOTO_ERROR(H5E_DATASPACE, H5E_CANTGET, FAIL, "unable to get dimensionality") /* Normalize hyperslab selections by adjusting them by the offset */ /* (It might be worthwhile to normalize both the file and memory dataspaces diff --git a/src/H5Dpkg.h b/src/H5Dpkg.h index a3a3985..4ec140f 100644 --- a/src/H5Dpkg.h +++ b/src/H5Dpkg.h @@ -337,7 +337,6 @@ typedef struct H5D_chunk_map_t { const H5S_t *file_space; /* Pointer to the file dataspace */ unsigned f_ndims; /* Number of dimensions for file dataspace */ - hsize_t f_dims[H5O_LAYOUT_NDIMS]; /* File dataspace dimensions */ const H5S_t *mem_space; /* Pointer to the memory dataspace */ H5S_t *mchunk_tmpl; /* Dataspace template for new memory chunks */ -- cgit v0.12 From 1eaaae98214d9042e979209e93e31c490efa4d79 Mon Sep 17 00:00:00 2001 From: Quincey Koziol Date: Sat, 18 Apr 2015 01:39:34 -0500 Subject: [svn-r26842] Description: Cache the dataset's rank & dimension sizes, instead of querying them frequently, to speed up various checks & algorithms. Also, a few minor cleanups. Tested on: MacOSX/64 10.10.2 (amazon) w/serial & parallel Linux/32 2.6.18 (jam) w/serial & parallel --- src/H5Dchunk.c | 79 +++++++++++++++++++------------------------------------- src/H5Dcompact.c | 15 ++++------- src/H5Dcontig.c | 13 +++------- src/H5Ddeprec.c | 27 ++++++++++++------- src/H5Defl.c | 13 +++------- src/H5Dint.c | 75 +++++++++++++++++++++++++++++++++++++++++------------ src/H5Dio.c | 11 +++----- src/H5Dmpio.c | 3 +-- src/H5Dpkg.h | 9 +++++-- src/H5S.c | 9 +++---- 10 files changed, 131 insertions(+), 123 deletions(-) diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c index a5cf5b6..e99f00f 100644 --- a/src/H5Dchunk.c +++ b/src/H5Dchunk.c @@ -326,8 +326,6 @@ H5D__chunk_direct_write(const H5D_t *dset, hid_t dxpl_id, uint32_t filters, hsiz H5F_block_t old_chunk; /* Offset/length of old chunk */ H5D_chk_idx_info_t idx_info; /* Chunked index info */ hbool_t need_insert = FALSE; /* Whether the chunk needs to be inserted into the index */ - int space_ndims; /* Dataset's space rank */ - hsize_t space_dim[H5O_LAYOUT_NDIMS]; /* Dataset's dataspace dimensions */ herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_STATIC_TAG(dxpl_id, dset->oloc.addr, FAIL) @@ -338,12 +336,8 @@ H5D__chunk_direct_write(const H5D_t *dset, hid_t dxpl_id, uint32_t filters, hsiz if(H5D__alloc_storage(dset, dxpl_id, H5D_ALLOC_WRITE, FALSE, NULL) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize storage") - /* Retrieve the dataset dimensions */ - if((space_ndims = H5S_get_simple_extent_dims(dset->shared->space, space_dim, NULL)) < 0) - HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to get simple dataspace info") - /* Calculate the index of this chunk */ - if(H5VM_chunk_index((unsigned)space_ndims, offset, + if(H5VM_chunk_index(dset->shared->ndims, offset, layout->u.chunk.dim, layout->u.chunk.down_chunks, &chunk_idx) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't get chunk index") @@ -475,23 +469,15 @@ done: herr_t H5D__chunk_set_info(const H5D_t *dset) { - hsize_t curr_dims[H5O_LAYOUT_NDIMS]; /* Curr. size of dataset dimensions */ - int sndims; /* Rank of dataspace */ - unsigned ndims; /* Rank of dataspace */ - herr_t ret_value = SUCCEED; /* Return value */ + herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_PACKAGE /* Sanity checks */ HDassert(dset); - /* Get the dim info for dataset */ - if((sndims = H5S_get_simple_extent_dims(dset->shared->space, curr_dims, NULL)) < 0) - HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataspace dimensions") - H5_ASSIGN_OVERFLOW(ndims, sndims, int, unsigned); - /* Set the base layout information */ - if(H5D__chunk_set_info_real(&dset->shared->layout.u.chunk, ndims, curr_dims) < 0) + if(H5D__chunk_set_info_real(&dset->shared->layout.u.chunk, dset->shared->ndims, dset->shared->curr_dims) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set layout's chunk info") /* Call the index's "resize" callback */ @@ -519,10 +505,7 @@ static herr_t H5D__chunk_construct(H5F_t UNUSED *f, H5D_t *dset) { const H5T_t *type = dset->shared->type; /* Convenience pointer to dataset's datatype */ - hsize_t max_dims[H5O_LAYOUT_NDIMS]; /* Maximum size of data in elements */ - hsize_t dims[H5O_LAYOUT_NDIMS]; /* Dimension size of data in elements */ uint64_t chunk_size; /* Size of chunk in bytes */ - int ndims; /* Rank of dataspace */ unsigned u; /* Local index variable */ herr_t ret_value = SUCCEED; /* Return value */ @@ -537,9 +520,7 @@ H5D__chunk_construct(H5F_t UNUSED *f, H5D_t *dset) HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "no chunk information set?") /* Set up layout information */ - if((ndims = H5S_GET_EXTENT_NDIMS(dset->shared->space)) < 0) - HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "unable to get rank") - if(dset->shared->layout.u.chunk.ndims != (unsigned)ndims) + if(dset->shared->layout.u.chunk.ndims != dset->shared->ndims) HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "dimensionality of chunks doesn't match the dataspace") /* Increment # of chunk dimensions, to account for datatype size as last element */ @@ -553,10 +534,6 @@ H5D__chunk_construct(H5F_t UNUSED *f, H5D_t *dset) /* Set the last dimension of the chunk size to the size of the datatype */ dset->shared->layout.u.chunk.dim[dset->shared->layout.u.chunk.ndims - 1] = (uint32_t)H5T_GET_SIZE(type); - /* Get local copy of dataset dimensions (for sanity checking) */ - if(H5S_get_simple_extent_dims(dset->shared->space, dims, max_dims) < 0) - HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to query maximum dimensions") - /* Sanity check dimensions */ for(u = 0; u < dset->shared->layout.u.chunk.ndims - 1; u++) { /* Don't allow zero-sized chunk dimensions */ @@ -568,7 +545,7 @@ H5D__chunk_construct(H5F_t UNUSED *f, H5D_t *dset) * the maximum dimension size. If any dimension size is zero, there * will be no such restriction. */ - if(dims[u] && max_dims[u] != H5S_UNLIMITED && max_dims[u] < dset->shared->layout.u.chunk.dim[u]) + if(dset->shared->curr_dims[u] && dset->shared->max_dims[u] != H5S_UNLIMITED && dset->shared->max_dims[u] < dset->shared->layout.u.chunk.dim[u]) HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "chunk size must be <= maximum dimension size for fixed-sized dimensions") } /* end for */ @@ -3297,8 +3274,8 @@ H5D__chunk_allocate(const H5D_t *dset, hid_t dxpl_id, hbool_t full_overwrite, H5D_chunk_coll_info_t chunk_info; /* chunk address information for doing I/O */ #endif /* H5_HAVE_PARALLEL */ hbool_t carry; /* Flag to indicate that chunk increment carrys to higher dimension (sorta) */ - int space_ndims; /* Dataset's space rank */ - hsize_t space_dim[H5O_LAYOUT_NDIMS]; /* Dataset's dataspace dimensions */ + unsigned space_ndims; /* Dataset's space rank */ + const hsize_t *space_dim; /* Dataset's dataspace dimensions */ const uint32_t *chunk_dim = layout->u.chunk.dim; /* Convenience pointer to chunk dimensions */ unsigned op_dim; /* Current operating dimension */ H5D_fill_buf_info_t fb_info; /* Dataset's fill buffer info */ @@ -3313,9 +3290,8 @@ H5D__chunk_allocate(const H5D_t *dset, hid_t dxpl_id, hbool_t full_overwrite, HDassert(TRUE == H5P_isa_class(dxpl_id, H5P_DATASET_XFER)); /* Retrieve the dataset dimensions */ - if((space_ndims = H5S_get_simple_extent_dims(dset->shared->space, space_dim, NULL)) < 0) - HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to get simple dataspace info") - space_dim[space_ndims] = layout->u.chunk.dim[space_ndims]; + space_dim = dset->shared->curr_dims; + space_ndims = dset->shared->ndims; /* The last dimension in chunk_offset is always 0 */ chunk_offset[space_ndims] = (hsize_t)0; @@ -3429,7 +3405,7 @@ H5D__chunk_allocate(const H5D_t *dset, hid_t dxpl_id, hbool_t full_overwrite, * certain dimension, max_unalloc is updated in order to avoid allocating * those chunks again. */ - for(op_dim = 0; op_dim < (unsigned)space_ndims; op_dim++) { + for(op_dim = 0; op_dim < space_ndims; op_dim++) { H5D_chunk_ud_t udata; /* User data for querying chunk info */ int i; /* Local index variable */ @@ -3438,7 +3414,7 @@ H5D__chunk_allocate(const H5D_t *dset, hid_t dxpl_id, hbool_t full_overwrite, continue; else { /* Reset the chunk offset indices */ - HDmemset(chunk_offset, 0, ((unsigned)space_ndims * sizeof(chunk_offset[0]))); + HDmemset(chunk_offset, 0, (space_ndims * sizeof(chunk_offset[0]))); chunk_offset[op_dim] = min_unalloc[op_dim]; carry = FALSE; @@ -3456,7 +3432,7 @@ H5D__chunk_allocate(const H5D_t *dset, hid_t dxpl_id, hbool_t full_overwrite, hsize_t chunk_idx; /* Calculate the index of this chunk */ - if(H5VM_chunk_index((unsigned)space_ndims, chunk_offset, + if(H5VM_chunk_index(space_ndims, chunk_offset, layout->u.chunk.dim, layout->u.chunk.down_chunks, &chunk_idx) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't get chunk index") @@ -3473,7 +3449,7 @@ H5D__chunk_allocate(const H5D_t *dset, hid_t dxpl_id, hbool_t full_overwrite, unsigned u; /* Local index variable */ hbool_t outside_orig = FALSE; - for(u = 0; u < (unsigned)space_ndims; u++) { + for(u = 0; u < space_ndims; u++) { HDassert(chunk_offset[u] < space_dim[u]); if(chunk_offset[u] >= old_dim[u]) outside_orig = TRUE; @@ -4018,14 +3994,13 @@ H5D__chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dim) const H5O_layout_t *layout = &(dset->shared->layout); /* Dataset's layout */ const H5D_rdcc_t *rdcc = &(dset->shared->cache.chunk); /*raw data chunk cache */ H5D_rdcc_ent_t *ent = NULL; /* Cache entry */ - int space_ndims; /* Dataset's space rank */ - hsize_t space_dim[H5O_LAYOUT_NDIMS]; /* Current dataspace dimensions */ + unsigned space_ndims; /* Dataset's space rank */ + const hsize_t *space_dim; /* Current dataspace dimensions */ unsigned op_dim; /* Current operating dimension */ hbool_t shrunk_dim[H5O_LAYOUT_NDIMS]; /* Dimensions which have shrunk */ H5D_chunk_it_ud1_t udata; /* Chunk index iterator user data */ hbool_t udata_init = FALSE; /* Whether the chunk index iterator user data has been initialized */ H5D_chunk_common_ud_t idx_udata; /* User data for index removal routine */ - H5D_chunk_ud_t chk_udata; /* User data for getting chunk info */ H5S_t *chunk_space = NULL; /* Dataspace for a chunk */ hsize_t chunk_dim[H5O_LAYOUT_NDIMS]; /* Chunk dimensions */ hsize_t chunk_offset[H5O_LAYOUT_NDIMS]; /* Offset of current chunk */ @@ -4047,10 +4022,8 @@ H5D__chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dim) HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't fill dxpl cache") /* Go get the rank & dimensions (including the element size) */ - if((space_ndims = H5S_get_simple_extent_dims(dset->shared->space, space_dim, - NULL)) < 0) - HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions") - space_dim[space_ndims] = layout->u.chunk.dim[space_ndims]; + space_dim = dset->shared->curr_dims; + space_ndims = dset->shared->ndims; /* The last dimension in chunk_offset is always 0 */ chunk_offset[space_ndims] = (hsize_t)0; @@ -4069,14 +4042,14 @@ H5D__chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dim) /* (also copy the chunk dimensions into 'hsize_t' array for creating dataspace) */ /* (also compute the dimensions which have been shrunk) */ elmts_per_chunk = 1; - for(u = 0; u < (unsigned)space_ndims; u++) { + for(u = 0; u < space_ndims; u++) { elmts_per_chunk *= layout->u.chunk.dim[u]; chunk_dim[u] = layout->u.chunk.dim[u]; shrunk_dim[u] = space_dim[u] < old_dim[u]; } /* end for */ /* Create a dataspace for a chunk & set the extent */ - if(NULL == (chunk_space = H5S_create_simple((unsigned)space_ndims, chunk_dim, NULL))) + if(NULL == (chunk_space = H5S_create_simple(space_ndims, chunk_dim, NULL))) HGOTO_ERROR(H5E_DATASPACE, H5E_CANTCREATE, FAIL, "can't create simple dataspace") /* Reset hyperslab start array */ @@ -4160,7 +4133,7 @@ H5D__chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dim) if(has_fill) for(ent = rdcc->head; ent; ent = ent->next) /* Check for chunk offset outside of new dimensions */ - for(u = 0; u < (unsigned)space_ndims; u++) + for(u = 0; u < space_ndims; u++) if((hsize_t)ent->offset[u] >= space_dim[u]) { /* Mark the entry as "deleted" */ ent->deleted = TRUE; @@ -4176,12 +4149,12 @@ H5D__chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dim) HDassert((hsize_t) max_mod_chunk_off[op_dim] >= min_mod_chunk_off[op_dim]); /* Reset the chunk offset indices */ - HDmemset(chunk_offset, 0, ((unsigned)space_ndims * sizeof(chunk_offset[0]))); + HDmemset(chunk_offset, 0, (space_ndims * sizeof(chunk_offset[0]))); chunk_offset[op_dim] = min_mod_chunk_off[op_dim]; /* Initialize "dims_outside_fill" array */ ndims_outside_fill = 0; - for(u = 0; u < (unsigned)space_ndims; u++) + for(u = 0; u < space_ndims; u++) if((hssize_t)chunk_offset[u] > max_fill_chunk_off[u]) { dims_outside_fill[u] = TRUE; ndims_outside_fill++; @@ -4196,7 +4169,7 @@ H5D__chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dim) int i; /* Local index variable */ /* Calculate the index of this chunk */ - if(H5VM_chunk_index((unsigned)space_ndims, chunk_offset, + if(H5VM_chunk_index(space_ndims, chunk_offset, layout->u.chunk.dim, layout->u.chunk.down_chunks, &(chk_io_info.store->chunk.index)) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't get chunk index") @@ -4210,12 +4183,14 @@ H5D__chunk_prune_by_extent(H5D_t *dset, hid_t dxpl_id, const hsize_t *old_dim) HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to write fill value") } /* end if */ else { + H5D_chunk_ud_t chk_udata; /* User data for getting chunk info */ + #ifndef NDEBUG /* Make sure this chunk is really outside the new dimensions */ { hbool_t outside_dim = FALSE; - for(u = 0; u < (unsigned)space_ndims; u++) + for(u = 0; u < space_ndims; u++) if(chunk_offset[u] >= space_dim[u]) { outside_dim = TRUE; break; @@ -4501,7 +4476,7 @@ H5D__chunk_update_cache(H5D_t *dset, hid_t dxpl_id) HDassert(dset->shared->layout.u.chunk.ndims > 0 && dset->shared->layout.u.chunk.ndims <= H5O_LAYOUT_NDIMS); /* Get the rank */ - rank = dset->shared->layout.u.chunk.ndims-1; + rank = dset->shared->layout.u.chunk.ndims - 1; HDassert(rank > 0); /* 1-D dataset's chunks can't have their index change */ diff --git a/src/H5Dcompact.c b/src/H5Dcompact.c index 789beab..1d8b97c 100644 --- a/src/H5Dcompact.c +++ b/src/H5Dcompact.c @@ -174,11 +174,8 @@ H5D__compact_construct(H5F_t *f, H5D_t *dset) hssize_t stmp_size; /* Temporary holder for raw data size */ hsize_t tmp_size; /* Temporary holder for raw data size */ hsize_t max_comp_data_size; /* Max. allowed size of compact data */ - hsize_t dim[H5O_LAYOUT_NDIMS]; /* Current size of data in elements */ - hsize_t max_dim[H5O_LAYOUT_NDIMS]; /* Maximum size of data in elements */ - int ndims; /* Rank of dataspace */ - int i; /* Local index variable */ - herr_t ret_value = SUCCEED; /* Return value */ + unsigned u; /* Local index variable */ + herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_STATIC @@ -187,11 +184,9 @@ H5D__compact_construct(H5F_t *f, H5D_t *dset) HDassert(dset); /* Check for invalid dataset dimensions */ - if((ndims = H5S_get_simple_extent_dims(dset->shared->space, dim, max_dim)) < 0) - HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataspace dimensions") - for(i = 0; i < ndims; i++) - if(max_dim[i] > dim[i]) - HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "extendible compact dataset") + for(u = 0; u < dset->shared->ndims; u++) + if(dset->shared->max_dims[u] > dset->shared->curr_dims[u]) + HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "extendible compact dataset not allowed") /* * Compact dataset is stored in dataset object header message of diff --git a/src/H5Dcontig.c b/src/H5Dcontig.c index dc09768..e913a3f 100644 --- a/src/H5Dcontig.c +++ b/src/H5Dcontig.c @@ -396,10 +396,7 @@ H5D__contig_construct(H5F_t *f, H5D_t *dset) size_t dt_size; /* Size of datatype */ hsize_t tmp_size; /* Temporary holder for raw data size */ size_t tmp_sieve_buf_size; /* Temporary holder for sieve buffer size */ - hsize_t dim[H5O_LAYOUT_NDIMS]; /* Current size of data in elements */ - hsize_t max_dim[H5O_LAYOUT_NDIMS]; /* Maximum size of data in elements */ - int ndims; /* Rank of dataspace */ - int i; /* Local index variable */ + unsigned u; /* Local index variable */ herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_STATIC @@ -415,11 +412,9 @@ H5D__contig_construct(H5F_t *f, H5D_t *dset) */ /* Check for invalid dataset dimensions */ - if((ndims = H5S_get_simple_extent_dims(dset->shared->space, dim, max_dim)) < 0) - HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize contiguous storage") - for(i = 0; i < ndims; i++) - if(max_dim[i] > dim[i]) - HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "extendible contiguous non-external dataset") + for(u = 0; u < dset->shared->ndims; u++) + if(dset->shared->max_dims[u] > dset->shared->curr_dims[u]) + HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "extendible contiguous non-external dataset not allowed") /* Retrieve the number of elements in the dataspace */ if((snelmts = H5S_GET_EXTENT_NPOINTS(dset->shared->space)) < 0) diff --git a/src/H5Ddeprec.c b/src/H5Ddeprec.c index 0b2fee6..b3dae7b 100644 --- a/src/H5Ddeprec.c +++ b/src/H5Ddeprec.c @@ -341,9 +341,7 @@ static herr_t H5D__extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id) { htri_t changed; /* Flag to indicate that the dataspace was successfully extended */ - H5S_t *space; /* Dataset's dataspace */ - int rank; /* Dataspace # of dimensions */ - hsize_t curr_dims[H5O_LAYOUT_NDIMS];/* Current dimension sizes */ + hsize_t old_dims[H5S_MAX_RANK]; /* Current (i.e. old, if changed) dimension sizes */ H5O_fill_t *fill; /* Dataset's fill value */ herr_t ret_value = SUCCEED; /* Return value */ @@ -364,20 +362,30 @@ H5D__extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id) */ /* Retrieve the current dimensions */ - space = dataset->shared->space; - if((rank = H5S_get_simple_extent_dims(space, curr_dims, NULL)) < 0) - HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions") + HDcompile_assert(sizeof(old_dims) == sizeof(dataset->shared->curr_dims)); + HDmemcpy(old_dims, dataset->shared->curr_dims, H5S_MAX_RANK * sizeof(old_dims[0])); /* Increase the size of the data space */ - if((changed = H5S_extend(space, size)) < 0) - HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to increase size of data space") + if((changed = H5S_extend(dataset->shared->space, size)) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to increase size of dataspace") /* Updated the dataset's info if the dataspace was successfully extended */ if(changed) { + /* Get the extended dimension sizes */ + /* (Need to retrieve this here, since the 'size' dimensions could + * extend one dimension but be smaller in a different dimension, + * and the dataspace's extent is the larger of the current and + * 'size' dimension values. - QAK) + */ + if(H5S_get_simple_extent_dims(dataset->shared->space, dataset->shared->curr_dims, NULL) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions") + /* Update the index values for the cached chunks for this dataset */ if(H5D_CHUNKED == dataset->shared->layout.type) { + /* Update general information for chunks */ if(H5D__chunk_set_info(dataset) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "unable to update # of chunks") + /* Update the chunk cache indices */ if(H5D__chunk_update_cache(dataset, dxpl_id) < 0) HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update cached chunk indices") } /* end if */ @@ -385,8 +393,7 @@ H5D__extend(H5D_t *dataset, const hsize_t *size, hid_t dxpl_id) /* Allocate space for the new parts of the dataset, if appropriate */ fill = &dataset->shared->dcpl_cache.fill; if(fill->alloc_time == H5D_ALLOC_TIME_EARLY) - if(H5D__alloc_storage(dataset, dxpl_id, H5D_ALLOC_EXTEND, FALSE, - curr_dims) < 0) + if(H5D__alloc_storage(dataset, dxpl_id, H5D_ALLOC_EXTEND, FALSE, old_dims) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize dataset with fill value") /* Mark the dataspace as dirty, for later writing to the file */ diff --git a/src/H5Defl.c b/src/H5Defl.c index 38c8ccd..355492f 100644 --- a/src/H5Defl.c +++ b/src/H5Defl.c @@ -126,14 +126,11 @@ static herr_t H5D__efl_construct(H5F_t *f, H5D_t *dset) { size_t dt_size; /* Size of datatype */ - hsize_t dim[H5O_LAYOUT_NDIMS]; /* Current size of data in elements */ - hsize_t max_dim[H5O_LAYOUT_NDIMS]; /* Maximum size of data in elements */ hssize_t stmp_size; /* Temporary holder for raw data size */ hsize_t tmp_size; /* Temporary holder for raw data size */ hsize_t max_points; /* Maximum elements */ hsize_t max_storage; /* Maximum storage size */ - int ndims; /* Rank of dataspace */ - int i; /* Local index variable */ + unsigned u; /* Local index variable */ herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_STATIC @@ -149,11 +146,9 @@ H5D__efl_construct(H5F_t *f, H5D_t *dset) */ /* Check for invalid dataset dimensions */ - if((ndims = H5S_get_simple_extent_dims(dset->shared->space, dim, max_dim)) < 0) - HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize contiguous storage") - for(i = 1; i < ndims; i++) - if(max_dim[i] > dim[i]) - HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "only the first dimension can be extendible") + for(u = 1; u < dset->shared->ndims; u++) + if(dset->shared->max_dims[u] > dset->shared->curr_dims[u]) + HGOTO_ERROR(H5E_DATASET, H5E_UNSUPPORTED, FAIL, "only the first dimension can be extendible") /* Retrieve the size of the dataset's datatype */ if(0 == (dt_size = H5T_get_size(dset->shared->type))) diff --git a/src/H5Dint.c b/src/H5Dint.c index 8e1fcec..c626475 100644 --- a/src/H5Dint.c +++ b/src/H5Dint.c @@ -62,6 +62,7 @@ static H5D_shared_t *H5D__new(hid_t dcpl_id, hbool_t creating, hbool_t vl_type); static herr_t H5D__init_type(H5F_t *file, const H5D_t *dset, hid_t type_id, const H5T_t *type); +static herr_t H5D__cache_dataspace_info(const H5D_t *dset); static herr_t H5D__init_space(H5F_t *file, const H5D_t *dset, const H5S_t *space); static herr_t H5D__update_oh_info(H5F_t *file, hid_t dxpl_id, H5D_t *dset, hid_t dapl_id); @@ -665,6 +666,41 @@ done: /*------------------------------------------------------------------------- + * Function: H5D__cache_dataspace_info + * + * Purpose: Cache dataspace info for a dataset + * + * Return: Success: SUCCEED + * Failure: FAIL + * + * Programmer: Quincey Koziol + * Wednesday, November 19, 2014 + * + *------------------------------------------------------------------------- + */ +static herr_t +H5D__cache_dataspace_info(const H5D_t *dset) +{ + int sndims; /* Signed number of dimensions of dataspace rank */ + unsigned u; /* Local index value */ + herr_t ret_value = SUCCEED; /* Return value */ + + FUNC_ENTER_STATIC + + /* Sanity checking */ + HDassert(dset); + + /* Cache info for dataset's dataspace */ + if((sndims = H5S_get_simple_extent_dims(dset->shared->space, dset->shared->curr_dims, dset->shared->max_dims)) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't cache dataspace dimensions") + dset->shared->ndims = (unsigned)sndims; + +done: + FUNC_LEAVE_NOAPI(ret_value) +} /* end H5D__cache_dataspace_info() */ + + +/*------------------------------------------------------------------------- * Function: H5D__init_space * * Purpose: Copy a dataspace for a dataset's use, performing all the @@ -698,6 +734,10 @@ H5D__init_space(H5F_t *file, const H5D_t *dset, const H5S_t *space) if(NULL == (dset->shared->space = H5S_copy(space, FALSE, TRUE))) HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't copy dataspace") + /* Cache the dataset's dataspace info */ + if(H5D__cache_dataspace_info(dset) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't cache dataspace info") + /* Set the latest format, if requested */ if(use_latest_format) if(H5S_set_latest_version(dset->shared->space) < 0) @@ -1251,6 +1291,10 @@ H5D__open_oid(H5D_t *dataset, hid_t dapl_id, hid_t dxpl_id) if(NULL == (dataset->shared->space = H5S_read(&(dataset->oloc), dxpl_id))) HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to load dataspace info from dataset header") + /* Cache the dataset's dataspace info */ + if(H5D__cache_dataspace_info(dataset) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_CANTCOPY, FAIL, "can't cache dataspace info") + /* Get a datatype ID for the dataset's datatype */ if((dataset->shared->type_id = H5I_register(H5I_DATATYPE, dataset->shared->type, FALSE)) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTREGISTER, FAIL, "unable to register type") @@ -2161,9 +2205,7 @@ done: herr_t H5D__set_extent(H5D_t *dset, const hsize_t *size, hid_t dxpl_id) { - H5S_t *space; /* Dataset's dataspace */ - int rank; /* Dataspace # of dimensions */ - hsize_t curr_dims[H5O_LAYOUT_NDIMS];/* Current dimension sizes */ + hsize_t curr_dims[H5S_MAX_RANK]; /* Current dimension sizes */ htri_t changed; /* Whether the dataspace changed size */ herr_t ret_value = SUCCEED; /* Return value */ @@ -2187,29 +2229,30 @@ H5D__set_extent(H5D_t *dset, const hsize_t *size, hid_t dxpl_id) if(H5D__check_filters(dset) < 0) HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "can't apply filters") - /* Get the data space */ - space = dset->shared->space; - - /* Check if we are shrinking or expanding any of the dimensions */ - if((rank = H5S_get_simple_extent_dims(space, curr_dims, NULL)) < 0) - HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions") + /* Keep the current dataspace dimensions for later */ + HDcompile_assert(sizeof(curr_dims) == sizeof(dset->shared->curr_dims)); + HDmemcpy(curr_dims, dset->shared->curr_dims, H5S_MAX_RANK * sizeof(curr_dims[0])); - /* Modify the size of the data space */ - if((changed = H5S_set_extent(space, size)) < 0) - HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to modify size of data space") + /* Modify the size of the dataspace */ + if((changed = H5S_set_extent(dset->shared->space, size)) < 0) + HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to modify size of dataspace") /* Don't bother updating things, unless they've changed */ if(changed) { - hbool_t shrink = FALSE; /* Flag to indicate a dimension has shrank */ - hbool_t expand = FALSE; /* Flag to indicate a dimension has grown */ - unsigned u; /* Local index variable */ + hbool_t shrink = FALSE; /* Flag to indicate a dimension has shrank */ + hbool_t expand = FALSE; /* Flag to indicate a dimension has grown */ + unsigned u; /* Local index variable */ /* Determine if we are shrinking and/or expanding any dimensions */ - for(u = 0; u < (unsigned)rank; u++) { + for(u = 0; u < dset->shared->ndims; u++) { + /* Check for various status changes */ if(size[u] < curr_dims[u]) shrink = TRUE; if(size[u] > curr_dims[u]) expand = TRUE; + + /* Update the cached copy of the dataset's dimensions */ + dset->shared->curr_dims[u] = size[u]; } /* end for */ /*------------------------------------------------------------------------- diff --git a/src/H5Dio.c b/src/H5Dio.c index 1c77d93..44080dc 100644 --- a/src/H5Dio.c +++ b/src/H5Dio.c @@ -302,8 +302,6 @@ H5D__pre_write(H5D_t *dset, hbool_t direct_write, hid_t mem_type_id, uint32_t direct_filters; hsize_t *direct_offset; uint32_t direct_datasize; - int ndims = 0; - hsize_t dims[H5O_LAYOUT_NDIMS]; hsize_t internal_offset[H5O_LAYOUT_NDIMS]; unsigned u; /* Local index variable */ @@ -324,12 +322,9 @@ H5D__pre_write(H5D_t *dset, hbool_t direct_write, hid_t mem_type_id, /* The library's chunking code requires the offset terminates with a zero. So transfer the * offset array to an internal offset array */ - if((ndims = H5S_get_simple_extent_dims(dset->shared->space, dims, NULL)) < 0) - HGOTO_ERROR(H5E_DATASPACE, H5E_CANTGET, FAIL, "can't retrieve dataspace extent dims") - - for(u = 0; u < (unsigned)ndims; u++) { + for(u = 0; u < dset->shared->ndims; u++) { /* Make sure the offset doesn't exceed the dataset's dimensions */ - if(direct_offset[u] > dims[u]) + if(direct_offset[u] > dset->shared->curr_dims[u]) HGOTO_ERROR(H5E_DATASPACE, H5E_BADTYPE, FAIL, "offset exceeds dimensions of dataset") /* Make sure the offset fall right on a chunk's boundary */ @@ -340,7 +335,7 @@ H5D__pre_write(H5D_t *dset, hbool_t direct_write, hid_t mem_type_id, } /* end for */ /* Terminate the offset with a zero */ - internal_offset[ndims] = 0; + internal_offset[dset->shared->ndims] = 0; /* write raw data */ if(H5D__chunk_direct_write(dset, dxpl_id, direct_filters, internal_offset, direct_datasize, buf) < 0) diff --git a/src/H5Dmpio.c b/src/H5Dmpio.c index bd1531d..db487cd 100644 --- a/src/H5Dmpio.c +++ b/src/H5Dmpio.c @@ -860,8 +860,7 @@ H5D__link_chunk_collective_io(H5D_io_info_t *io_info, const H5D_type_info_t *typ mspace = chunk_info->mspace; /* Look up address of chunk */ - if(H5D__chunk_lookup(io_info->dset, io_info->dxpl_id, chunk_info->coords, - chunk_info->index, &udata) < 0) + if(H5D__chunk_lookup(io_info->dset, io_info->dxpl_id, chunk_info->coords, chunk_info->index, &udata) < 0) HGOTO_ERROR(H5E_STORAGE, H5E_CANTGET, FAIL, "couldn't get chunk address") ctg_store.contig.dset_addr = udata.chunk_block.offset; } /* end else */ diff --git a/src/H5Dpkg.h b/src/H5Dpkg.h index 4ec140f..fa59412 100644 --- a/src/H5Dpkg.h +++ b/src/H5Dpkg.h @@ -325,9 +325,9 @@ typedef struct H5D_chunk_info_t { uint32_t chunk_points; /* Number of elements selected in chunk */ hsize_t coords[H5O_LAYOUT_NDIMS]; /* Coordinates of chunk in file dataset's dataspace */ H5S_t *fspace; /* Dataspace describing chunk & selection in it */ - unsigned fspace_shared; /* Indicate that the file space for a chunk is shared and shouldn't be freed */ + hbool_t fspace_shared; /* Indicate that the file space for a chunk is shared and shouldn't be freed */ H5S_t *mspace; /* Dataspace describing selection in memory corresponding to this chunk */ - unsigned mspace_shared; /* Indicate that the memory space for a chunk is shared and shouldn't be freed */ + hbool_t mspace_shared; /* Indicate that the memory space for a chunk is shared and shouldn't be freed */ } H5D_chunk_info_t; /* Main structure holding the mapping between file chunks and memory */ @@ -418,6 +418,11 @@ typedef struct H5D_shared_t { H5O_layout_t layout; /* Data layout */ hbool_t checked_filters;/* TRUE if dataset passes can_apply check */ + /* Cached dataspace info */ + unsigned ndims; /* The dataset's dataspace rank */ + hsize_t curr_dims[H5S_MAX_RANK]; /* The curr. size of dataset dimensions */ + hsize_t max_dims[H5S_MAX_RANK]; /* The max. size of dataset dimensions */ + /* Buffered/cached information for types of raw data storage*/ struct { H5D_rdcdc_t contig; /* Information about contiguous data */ diff --git a/src/H5S.c b/src/H5S.c index 4a1783e..dd2bb0a 100644 --- a/src/H5S.c +++ b/src/H5S.c @@ -2144,14 +2144,13 @@ H5S_extend(H5S_t *space, const hsize_t *size) HDassert(size); /* Check through all the dimensions to see if modifying the dataspace is allowed */ - for(u = 0; u < space->extent.rank; u++) { - if(space->extent.size[u]extent.max && H5S_UNLIMITED!=space->extent.max[u] && - space->extent.max[u]extent.rank; u++) + if(space->extent.size[u] < size[u]) { + if(space->extent.max && H5S_UNLIMITED != space->extent.max[u] && + space->extent.max[u] < size[u]) HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "dimension cannot be increased") ret_value++; } /* end if */ - } /* end for */ /* Update */ if(ret_value) { -- cgit v0.12 From 371a27f01eecd3bdd456d24ee630f5f281fbfb93 Mon Sep 17 00:00:00 2001 From: Quincey Koziol Date: Sat, 18 Apr 2015 21:48:06 -0500 Subject: [svn-r26843] Description: Minor code & warning cleanups. Tested on: MacOSX/64 10.10.2 (amazon) w/serial & parallel Linux/32 2.6.18 (jam) w/serial & parallel --- src/H5Dchunk.c | 7 ++++--- src/H5Dpkg.h | 2 +- src/H5Oprivate.h | 4 ++-- src/H5VMprivate.h | 23 +++++++++++++++++++++++ 4 files changed, 30 insertions(+), 6 deletions(-) diff --git a/src/H5Dchunk.c b/src/H5Dchunk.c index e99f00f..6c7c5d5 100644 --- a/src/H5Dchunk.c +++ b/src/H5Dchunk.c @@ -2523,7 +2523,7 @@ H5D__chunk_flush_entry(const H5D_t *dset, hid_t dxpl_id, const H5D_dxpl_cache_t point_of_no_return = TRUE; ent->chunk = NULL; } /* end else */ - H5_ASSIGN_OVERFLOW(nbytes, udata.chunk_block.length, uint32_t, size_t); + H5_ASSIGN_OVERFLOW(nbytes, udata.chunk_block.length, hsize_t, size_t); if(H5Z_pipeline(&(dset->shared->dcpl_cache.pline), 0, &(udata.filter_mask), dxpl_cache->err_detect, dxpl_cache->filter_cb, &nbytes, &alloc, &buf) < 0) HGOTO_ERROR(H5E_PLINE, H5E_CANTFILTER, FAIL, "output pipeline failed") @@ -2532,7 +2532,7 @@ H5D__chunk_flush_entry(const H5D_t *dset, hid_t dxpl_id, const H5D_dxpl_cache_t if(nbytes > ((size_t)0xffffffff)) HGOTO_ERROR(H5E_DATASET, H5E_BADRANGE, FAIL, "chunk too large for 32-bit length") #endif /* H5_SIZEOF_SIZE_T > 4 */ - H5_ASSIGN_OVERFLOW(udata.chunk_block.length, nbytes, size_t, uint32_t); + H5_ASSIGN_OVERFLOW(udata.chunk_block.length, nbytes, size_t, hsize_t); /* Indicate that the chunk must be allocated */ must_alloc = TRUE; @@ -2565,7 +2565,8 @@ H5D__chunk_flush_entry(const H5D_t *dset, hid_t dxpl_id, const H5D_dxpl_cache_t /* Write the data to the file */ HDassert(H5F_addr_defined(udata.chunk_block.offset)); - if(H5F_block_write(dset->oloc.file, H5FD_MEM_DRAW, udata.chunk_block.offset, udata.chunk_block.length, dxpl_id, buf) < 0) + H5_CHECK_OVERFLOW(udata.chunk_block.length, hsize_t, size_t); + if(H5F_block_write(dset->oloc.file, H5FD_MEM_DRAW, udata.chunk_block.offset, (size_t)udata.chunk_block.length, dxpl_id, buf) < 0) HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to write raw data to file") /* Insert the chunk record into the index */ diff --git a/src/H5Dpkg.h b/src/H5Dpkg.h index fa59412..769fec1 100644 --- a/src/H5Dpkg.h +++ b/src/H5Dpkg.h @@ -83,7 +83,7 @@ typedef struct H5D_type_info_t { /* Computed/derived values */ size_t src_type_size; /* Size of source type */ - size_t dst_type_size; /* Size of destination type*/ + size_t dst_type_size; /* Size of destination type */ size_t max_type_size; /* Size of largest source/destination type */ hbool_t is_conv_noop; /* Whether the type conversion is a NOOP */ hbool_t is_xform_noop; /* Whether the data transform is a NOOP */ diff --git a/src/H5Oprivate.h b/src/H5Oprivate.h index e3a2d33..3707367 100644 --- a/src/H5Oprivate.h +++ b/src/H5Oprivate.h @@ -424,8 +424,8 @@ typedef struct H5O_layout_chunk_t { uint32_t dim[H5O_LAYOUT_NDIMS]; /* Size of chunk in elements */ uint32_t size; /* Size of chunk in bytes */ hsize_t nchunks; /* Number of chunks in dataset */ - hsize_t chunks[H5O_LAYOUT_NDIMS]; /* # of chunks in dataset dimensions */ - hsize_t down_chunks[H5O_LAYOUT_NDIMS]; /* "down" size of number of chunks in each dimension */ + hsize_t chunks[H5O_LAYOUT_NDIMS]; /* # of chunks in each dataset dimension */ + hsize_t down_chunks[H5O_LAYOUT_NDIMS]; /* "down" size of number of chunks in each dimension */ } H5O_layout_chunk_t; typedef struct H5O_layout_t { diff --git a/src/H5VMprivate.h b/src/H5VMprivate.h index 20821b2..dbe39ca 100644 --- a/src/H5VMprivate.h +++ b/src/H5VMprivate.h @@ -410,6 +410,29 @@ H5VM_log2_of2(uint32_t n) /*------------------------------------------------------------------------- + * Function: H5VM_power2up + * + * Purpose: Round up a number to the next power of 2 + * + * Return: Return the number which is a power of 2 + * + * Programmer: Vailin Choi; Nov 2014 + * + *------------------------------------------------------------------------- + */ +static H5_inline hsize_t UNUSED +H5VM_power2up(hsize_t n) +{ + hsize_t ret_value = 1; /* Return value */ + + while(ret_value < n) + ret_value <<= 1; + + return(ret_value); +} /* H5VM_power2up */ + + +/*------------------------------------------------------------------------- * Function: H5VM_limit_enc_size * * Purpose: Determine the # of bytes needed to encode values within a -- cgit v0.12 From 50db0eed22ef60b4a79b10deb0562662892c7891 Mon Sep 17 00:00:00 2001 From: Jerome Soumagne Date: Mon, 20 Apr 2015 15:32:32 -0500 Subject: [svn-r26848] Fix warnings in H5T Tested: local linux --- src/H5T.c | 146 +++++++++++++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 115 insertions(+), 31 deletions(-) diff --git a/src/H5T.c b/src/H5T.c index 9320d28..aaf8bc1 100644 --- a/src/H5T.c +++ b/src/H5T.c @@ -1,3 +1,4 @@ + /* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * * Copyright by the Board of Trustees of the University of Illinois. * @@ -67,10 +68,6 @@ * */ -/* Define the code template for types which need no extra initialization for the "GUTS" in the H5T_INIT_TYPE macro */ -#define H5T_INIT_TYPE_NONE_CORE { \ -} - /* Define the code template for bitfields for the "GUTS" in the H5T_INIT_TYPE macro */ #define H5T_INIT_TYPE_BITFIELD_CORE { \ dt->shared->type = H5T_BITFIELD; \ @@ -1986,7 +1983,7 @@ H5T_detect_class(const H5T_t *dt, H5T_class_t cls, hbool_t from_api) case H5T_VLEN: case H5T_ENUM: HGOTO_DONE(H5T_detect_class(dt->shared->parent, cls, from_api)); - + break; case H5T_NO_CLASS: case H5T_INTEGER: case H5T_FLOAT: @@ -3240,12 +3237,12 @@ H5T_copy(H5T_t *old_dt, H5T_copy_t method) HDassert(tmp != NULL); /* Apply the accumulated size change to the offset of the field */ - new_dt->shared->u.compnd.memb[i].offset += accum_change; + new_dt->shared->u.compnd.memb[i].offset += (size_t) accum_change; if(old_dt->shared->u.compnd.sorted != H5T_SORT_VALUE) { for(old_match = -1, j = 0; j < old_dt->shared->u.compnd.nmembs; j++) { if(!HDstrcmp(new_dt->shared->u.compnd.memb[i].name, old_dt->shared->u.compnd.memb[j].name)) { - old_match = j; + old_match = (int) j; break; } /* end if */ } /* end for */ @@ -3255,19 +3252,20 @@ H5T_copy(H5T_t *old_dt, H5T_copy_t method) HGOTO_ERROR(H5E_DATATYPE, H5E_CANTCOPY, NULL, "fields in datatype corrupted"); } /* end if */ else - old_match = i; + old_match = (int) i; /* If the field changed size, add that change to the accumulated size change */ if(new_dt->shared->u.compnd.memb[i].type->shared->size != old_dt->shared->u.compnd.memb[old_match].type->shared->size) { /* Adjust the size of the member */ new_dt->shared->u.compnd.memb[i].size = (old_dt->shared->u.compnd.memb[old_match].size*tmp->shared->size)/old_dt->shared->u.compnd.memb[old_match].type->shared->size; - accum_change += (new_dt->shared->u.compnd.memb[i].type->shared->size - old_dt->shared->u.compnd.memb[old_match].type->shared->size); + accum_change += (int) (new_dt->shared->u.compnd.memb[i].type->shared->size - old_dt->shared->u.compnd.memb[old_match].type->shared->size); + HDassert(accum_change >= 0); } /* end if */ } /* end for */ /* Apply the accumulated size change to the size of the compound struct */ - new_dt->shared->size += accum_change; + new_dt->shared->size += (size_t) accum_change; } break; @@ -3313,6 +3311,13 @@ H5T_copy(H5T_t *old_dt, H5T_copy_t method) new_dt->shared->size=new_dt->shared->u.array.nelem*new_dt->shared->parent->shared->size; break; + case H5T_NO_CLASS: + case H5T_INTEGER: + case H5T_FLOAT: + case H5T_TIME: + case H5T_STRING: + case H5T_BITFIELD: + case H5T_NCLASSES: default: break; } /* end switch */ @@ -3530,6 +3535,16 @@ H5T__free(H5T_t *dt) H5MM_xfree(dt->shared->u.opaque.tag); break; + case H5T_NO_CLASS: + case H5T_INTEGER: + case H5T_FLOAT: + case H5T_TIME: + case H5T_STRING: + case H5T_BITFIELD: + case H5T_REFERENCE: + case H5T_VLEN: + case H5T_ARRAY: + case H5T_NCLASSES: default: break; } /* end switch */ @@ -3796,11 +3811,14 @@ H5T_set_size(H5T_t *dt, size_t size) case H5T_ARRAY: case H5T_REFERENCE: HDassert("can't happen" && 0); + break; case H5T_NO_CLASS: case H5T_NCLASSES: HDassert("invalid type" && 0); + break; default: HDassert("not implemented yet" && 0); + break; } /* Commit (if we didn't convert this type to a VL string) */ @@ -3875,7 +3893,6 @@ H5T_cmp(const H5T_t *dt1, const H5T_t *dt2, hbool_t superset) unsigned *idx1 = NULL, *idx2 = NULL; size_t base_size; hbool_t swapped; - int i, j; unsigned u; int tmp; int ret_value = 0; @@ -3930,24 +3947,32 @@ H5T_cmp(const H5T_t *dt1, const H5T_t *dt2, hbool_t superset) for(u = 0; u < dt1->shared->u.compnd.nmembs; u++) idx1[u] = idx2[u] = u; if(dt1->shared->u.enumer.nmembs > 1) { - for(i = dt1->shared->u.compnd.nmembs - 1, swapped = TRUE; swapped && i >= 0; --i) + int i; + + for(i = (int) dt1->shared->u.compnd.nmembs - 1, swapped = TRUE; swapped && i >= 0; --i) { + int j; + for(j = 0, swapped=FALSE; j < i; j++) if(HDstrcmp(dt1->shared->u.compnd.memb[idx1[j]].name, dt1->shared->u.compnd.memb[idx1[j + 1]].name) > 0) { - tmp = idx1[j]; + unsigned tmp_idx = idx1[j]; idx1[j] = idx1[j + 1]; - idx1[j + 1] = tmp; + idx1[j + 1] = tmp_idx; swapped = TRUE; } - for(i = dt2->shared->u.compnd.nmembs - 1, swapped = TRUE; swapped && i >= 0; --i) + } + for(i = (int) dt2->shared->u.compnd.nmembs - 1, swapped = TRUE; swapped && i >= 0; --i) { + int j; + for(j = 0, swapped = FALSE; jshared->u.compnd.memb[idx2[j]].name, dt2->shared->u.compnd.memb[idx2[j + 1]].name) > 0) { - tmp = idx2[j]; + unsigned tmp_idx = idx2[j]; idx2[j] = idx2[j + 1]; - idx2[j + 1] = tmp; + idx2[j + 1] = tmp_idx; swapped = TRUE; } + } } /* end if */ #ifdef H5T_DEBUG @@ -4007,28 +4032,39 @@ H5T_cmp(const H5T_t *dt1, const H5T_t *dt2, hbool_t superset) HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, 0, "memory allocation failed"); for (u=0; ushared->u.enumer.nmembs; u++) idx1[u] = u; - if(dt1->shared->u.enumer.nmembs > 1) - for (i=dt1->shared->u.enumer.nmembs-1, swapped=TRUE; swapped && i>=0; --i) - for (j=0, swapped=FALSE; jshared->u.enumer.nmembs > 1) { + int i; + for (i = (int) dt1->shared->u.enumer.nmembs - 1, swapped = TRUE; swapped && i >= 0; --i) { + int j; + + for (j = 0, swapped = FALSE; j < i; j++) if (HDstrcmp(dt1->shared->u.enumer.name[idx1[j]], dt1->shared->u.enumer.name[idx1[j+1]]) > 0) { - tmp = idx1[j]; + unsigned tmp_idx = idx1[j]; idx1[j] = idx1[j+1]; - idx1[j+1] = tmp; + idx1[j+1] = tmp_idx; swapped = TRUE; } + } + } for (u=0; ushared->u.enumer.nmembs; u++) idx2[u] = u; - if(dt2->shared->u.enumer.nmembs > 1) - for (i=dt2->shared->u.enumer.nmembs-1, swapped=TRUE; swapped && i>=0; --i) - for (j=0, swapped=FALSE; jshared->u.enumer.nmembs > 1) { + int i; + + for (i = (int) dt2->shared->u.enumer.nmembs - 1, swapped = TRUE; swapped && i >= 0; --i) { + int j; + + for (j = 0, swapped = FALSE; j < i; j++) if (HDstrcmp(dt2->shared->u.enumer.name[idx2[j]], dt2->shared->u.enumer.name[idx2[j+1]]) > 0) { - tmp = idx2[j]; + unsigned tmp_idx = idx2[j]; idx2[j] = idx2[j+1]; - idx2[j+1] = tmp; + idx2[j+1] = tmp_idx; swapped = TRUE; } + } + } #ifdef H5T_DEBUG /* I don't quite trust the code above yet :-) --RPM */ @@ -4148,6 +4184,14 @@ H5T_cmp(const H5T_t *dt1, const H5T_t *dt2, hbool_t superset) HGOTO_DONE(1); break; + case H5T_NO_CLASS: + case H5T_INTEGER: + case H5T_FLOAT: + case H5T_TIME: + case H5T_STRING: + case H5T_BITFIELD: + case H5T_REFERENCE: + case H5T_NCLASSES: default: /* * Atomic datatypes... @@ -4255,13 +4299,23 @@ H5T_cmp(const H5T_t *dt1, const H5T_t *dt2, hbool_t superset) case H5R_BADTYPE: case H5R_MAXTYPE: HDassert("invalid type" && 0); + break; default: HDassert("not implemented yet" && 0); + break; } break; + case H5T_NO_CLASS: + case H5T_OPAQUE: + case H5T_COMPOUND: + case H5T_ENUM: + case H5T_VLEN: + case H5T_ARRAY: + case H5T_NCLASSES: default: HDassert("not implemented yet" && 0); + break; } break; } /* end switch */ @@ -4535,7 +4589,7 @@ H5T_path_find(const H5T_t *src, const H5T_t *dst, const char *name, } /* end if */ if(cmp > 0) md++; - HDmemmove(H5T_g.path + md + 1, H5T_g.path + md, (H5T_g.npaths - md) * sizeof(H5T_path_t*)); + HDmemmove(H5T_g.path + md + 1, H5T_g.path + md, (size_t) (H5T_g.npaths - md) * sizeof(H5T_path_t*)); H5T_g.npaths++; H5T_g.path[md] = path; table = path; @@ -5000,6 +5054,17 @@ H5T_is_sensible(const H5T_t *dt) ret_value=FALSE; break; + case H5T_NO_CLASS: + case H5T_INTEGER: + case H5T_FLOAT: + case H5T_TIME: + case H5T_STRING: + case H5T_BITFIELD: + case H5T_OPAQUE: + case H5T_REFERENCE: + case H5T_VLEN: + case H5T_ARRAY: + case H5T_NCLASSES: default: /* Assume all other datatype are sensible to store on disk */ ret_value=TRUE; @@ -5082,7 +5147,7 @@ H5T_set_loc(H5T_t *dt, H5F_t *f, H5T_loc_t loc) H5T_t *memb_type; /* Member's datatype pointer */ /* Apply the accumulated size change to the offset of the field */ - dt->shared->u.compnd.memb[i].offset += accum_change; + dt->shared->u.compnd.memb[i].offset += (size_t) accum_change; /* Set the member type pointer (for convenience) */ memb_type=dt->shared->u.compnd.memb[i].type; @@ -5105,13 +5170,14 @@ H5T_set_loc(H5T_t *dt, H5F_t *f, H5T_loc_t loc) dt->shared->u.compnd.memb[i].size = (dt->shared->u.compnd.memb[i].size*memb_type->shared->size)/old_size; /* Add that change to the accumulated size change */ - accum_change += (memb_type->shared->size - (int)old_size); + accum_change += (int) (memb_type->shared->size - old_size); + HDassert(accum_change >= 0); } /* end if */ } /* end if */ } /* end for */ /* Apply the accumulated size change to the datatype */ - dt->shared->size = (size_t)(dt->shared->size + accum_change); + dt->shared->size = dt->shared->size + (size_t) accum_change; break; case H5T_VLEN: /* Recurse on the VL information if it's VL, compound or array, then free VL sequence */ @@ -5145,6 +5211,15 @@ H5T_set_loc(H5T_t *dt, H5F_t *f, H5T_loc_t loc) } /* end if */ break; + case H5T_NO_CLASS: + case H5T_INTEGER: + case H5T_FLOAT: + case H5T_TIME: + case H5T_STRING: + case H5T_BITFIELD: + case H5T_OPAQUE: + case H5T_ENUM: + case H5T_NCLASSES: default: break; } /* end switch */ @@ -5235,6 +5310,15 @@ H5T_upgrade_version_cb(H5T_t *dt, void *op_value) dt->shared->version = dt->shared->parent->shared->version; break; + case H5T_NO_CLASS: + case H5T_INTEGER: + case H5T_FLOAT: + case H5T_TIME: + case H5T_STRING: + case H5T_BITFIELD: + case H5T_OPAQUE: + case H5T_REFERENCE: + case H5T_NCLASSES: default: break; } /* end switch */ -- cgit v0.12 From 0b0d31e5201e76eae1cb0d219d68934d8c0942b7 Mon Sep 17 00:00:00 2001 From: Allen Byrne Date: Mon, 20 Apr 2015 16:04:33 -0500 Subject: [svn-r26852] Correct component matching --- config/cmake/hdf5-config.cmake.in | 20 +++++++++----------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/config/cmake/hdf5-config.cmake.in b/config/cmake/hdf5-config.cmake.in index da5304d..667d3e6 100644 --- a/config/cmake/hdf5-config.cmake.in +++ b/config/cmake/hdf5-config.cmake.in @@ -105,20 +105,20 @@ endif () list (REMOVE_DUPLICATES ${HDF5_PACKAGE_NAME}_FIND_COMPONENTS) foreach (comp IN LISTS ${HDF5_PACKAGE_NAME}_FIND_COMPONENTS) set (hdf5_comp2) - if (${comp} MATCHES "C") + if (${comp} STREQUAL "C") set (hdf5_comp "hdf5") - elseif (${comp} MATCHES "CXX") + elseif (${comp} STREQUAL "CXX") set (hdf5_comp "hdf5_cpp") - elseif (${comp} MATCHES "HL") + elseif (${comp} STREQUAL "HL") set (hdf5_comp "hdf5_hl") - elseif (${comp} MATCHES "CXX_HL") + elseif (${comp} STREQUAL "CXX_HL") set (hdf5_comp "hdf5_hl_cpp") - elseif (${comp} MATCHES "Tools") + elseif (${comp} STREQUAL "Tools") set (hdf5_comp "hdf5_tools") - elseif (${comp} MATCHES "Fortran") + elseif (${comp} STREQUAL "Fortran") set (hdf5_comp2 "hdf5_f90cstub") set (hdf5_comp "hdf5_fortran") - elseif (${comp} MATCHES "Fortran_HL") + elseif (${comp} STREQUAL "Fortran_HL") set (hdf5_comp2 "hdf5_hl_f90cstub") set (hdf5_comp "hdf5_hl_fortran") endif () @@ -133,17 +133,15 @@ list (REMOVE_DUPLICATES ${HDF5_PACKAGE_NAME}_FIND_COMPONENTS) else () set (${HDF5_PACKAGE_NAME}_${comp}_FOUND 1) string(TOUPPER ${HDF5_PACKAGE_NAME}_${comp}_LIBRARY COMP_LIBRARY) - set (${COMP_LIBRARY} ${${COMP_LIBRARY}} ${hdf5_comp2} ${hdf5_comp}) + set (${HDF5_PACKAGE_NAME}_LIBRARIES ${${HDF5_PACKAGE_NAME}_LIBRARIES} ${hdf5_comp2} ${hdf5_comp}) endif () else () set (${HDF5_PACKAGE_NAME}_${comp}_FOUND 1) string(TOUPPER ${HDF5_PACKAGE_NAME}_${comp}_LIBRARY COMP_LIBRARY) - set (${COMP_LIBRARY} ${${COMP_LIBRARY}} ${hdf5_comp}) + set (${HDF5_PACKAGE_NAME}_LIBRARIES ${${HDF5_PACKAGE_NAME}_LIBRARIES} ${hdf5_comp}) endif () endif () endforeach () check_required_components(${HDF5_PACKAGE_NAME}_FIND_COMPONENTS) - -set (${HDF5_PACKAGE_NAME}_LIBRARIES ${${COMP_LIBRARY}}) \ No newline at end of file -- cgit v0.12 From 34a1da1731eb1818d5bf374745672cf289522c31 Mon Sep 17 00:00:00 2001 From: Albert Cheng Date: Mon, 20 Apr 2015 19:25:56 -0500 Subject: [svn-r26857] Bugs: The configure summary (libhdf5.settings) was missing from the configure output. Solution: Append the configure summary to the configure logfile if configure succeeds. Tested: hand ran cmakehdf5 in jam and inspected the logfile. --- bin/cmakehdf5 | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/bin/cmakehdf5 b/bin/cmakehdf5 index 1f0fb7c..4842f45 100755 --- a/bin/cmakehdf5 +++ b/bin/cmakehdf5 @@ -23,6 +23,7 @@ makelog="#${progname}_2build.log" testlog="#${progname}_3test.log" packlog="#${progname}_4pack.log" installlog="#${progname}_5install.log" +config_summary=libhdf5.settings exit_code=0 # This command should be in the source directory's bin/ @@ -212,9 +213,9 @@ if [ $? != 0 ]; then echo $vers failed. Aborted. exit 1 fi -echo Running Cmake for HDF5-${version} ... - +echo Running Cmake for HDF5-${version} ... # 4. Configure the C library, tools and tests with this command: +# If successful, append the configure summary to the configure logfile. STEP "Configure..." "cmake \ -C $cacheinit \ $build_cpp_lib \ @@ -225,7 +226,8 @@ STEP "Configure..." "cmake \ $build_tools \ $with_zlib \ $with_szlib \ - $srcdir" $configlog + $srcdir" $configlog &&\ + cat $config_summary >> $configlog # 5. Build the C library, tools and tests with this command: STEP "Build the library, tools and tests, ..." "cmake --build . --config Release" $makelog -- cgit v0.12 From b32caab787236a72c25dcb1c895c32da8832deab Mon Sep 17 00:00:00 2001 From: Allen Byrne Date: Tue, 21 Apr 2015 11:21:52 -0500 Subject: [svn-r26864] Change location of lib settings file --- CMakeInstallation.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index 0380adc..aa49cdc 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -139,7 +139,7 @@ configure_file ( ) install ( FILES ${HDF5_BINARY_DIR}/libhdf5.settings - DESTINATION ${HDF5_INSTALL_CMAKE_DIR} + DESTINATION ${HDF5_INSTALL_LIB_DIR} COMPONENT libraries ) -- cgit v0.12