diff options
author | Dana Robinson <derobins@hdfgroup.org> | 2022-04-13 21:17:29 (GMT) |
---|---|---|
committer | Dana Robinson <derobins@hdfgroup.org> | 2022-04-13 21:17:29 (GMT) |
commit | cabc39c3e197e2591449d2604bfee26465fb60e1 (patch) | |
tree | d5f39f5f5965584bf9bf49646a2af617adfd3e4e /bin | |
parent | 7355f4c505092a7a85474b47f18d5206028e2c95 (diff) | |
parent | ab69f5df770ee3cc6cd6c81d905a5317b894a002 (diff) | |
download | hdf5-feature/coding_standards.zip hdf5-feature/coding_standards.tar.gz hdf5-feature/coding_standards.tar.bz2 |
Merge branch 'develop' into feature/coding_standardsfeature/coding_standards
Diffstat (limited to 'bin')
45 files changed, 935 insertions, 3014 deletions
diff --git a/bin/COPYING b/bin/COPYING deleted file mode 100755 index 6497ace..0000000 --- a/bin/COPYING +++ /dev/null @@ -1,13 +0,0 @@ - - Copyright by The HDF Group and - The Board of Trustees of the University of Illinois. - All rights reserved. - - The files and subdirectories in this directory are part of HDF5. - The full HDF5 copyright notice, including terms governing use, - modification, and redistribution, is contained in the COPYING file - which can be found at the root of the source code distribution tree - or in https://support.hdfgroup.org/ftp/HDF5/releases. If you do - not have access to either file, you may request a copy from - help@hdfgroup.org. - diff --git a/bin/Makefile.am b/bin/Makefile.am index a39b695..25df36f 100644 --- a/bin/Makefile.am +++ b/bin/Makefile.am @@ -6,7 +6,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. ## diff --git a/bin/README b/bin/README deleted file mode 100644 index 1c77043..0000000 --- a/bin/README +++ /dev/null @@ -1,2 +0,0 @@ -The daily tests run copies of some of the scripts in this directory from another repository, notably snapshot and runtest. The copies in this directory should work, but are not used in daily tests, though they should be tested occasionally. - diff --git a/bin/batch/ctestP.sl.in.cmake b/bin/batch/ctestP.sl.in.cmake index 6399de7..1069fa9 100644 --- a/bin/batch/ctestP.sl.in.cmake +++ b/bin/batch/ctestP.sl.in.cmake @@ -10,5 +10,5 @@ cd @HDF5_BINARY_DIR@ ctest . -R MPI_TEST_ -C Release -T test >& ctestP.out -echo "Done running ctestP.sl" +echo "Done running ctestP.sl" diff --git a/bin/bbrelease b/bin/bbrelease index 395b23c..d056f6d 100755 --- a/bin/bbrelease +++ b/bin/bbrelease @@ -7,39 +7,20 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # # Make a release of hdf5. # -# Programmer: Robb Matzke -# Creation date: on or before 1998-01-29. +# NOTE: +# This script differs from bin/release in that this has an added +# --revision option to create private releases with the code revision +# hash in the version strings. # -# Modifications -# Robb Matzke, 1999-07-16 -# The SunOS 5.6 sed *must* have slashes as delimiters. I changed things like -# `sed s+/CVS++' to `sed 's/\/CVS//' -# -# Albert Cheng, 1999-10-26 -# Moved the MANIFEST checking to a separate command file so that -# it can be invoked individually. -# -# Albert Cheng, 2004-08-14 -# Added the --private option. -# -# James Laird, 2005-09-07 -# Added the md5 method. -# -# Larry Knox, 2016-08-30 -# Added the --revision option to create private releases with the -# code revision hash in the version strings. Currently the version -# of this script with the --revision option is named bbrelease. It -# can probably be merged into the original release script in the -# future. -# Commands to get the revision hash have now been converted to git -# to match the source repository change. +# This script can probably be merged into the original release script in +# the future. # Function definitions # @@ -47,26 +28,25 @@ USAGE() { cat << EOF -Usage: $0 -d <dir> [--docver BRANCHNAME] [-h] [--nocheck] [--private] <methods> ... - -d DIR The name of the directory where the releas(es) should be +Usage: $0 -d <dir> [--docver BRANCHNAME] [-h] [--private] [--revision] <methods> ... + -d DIR The name of the directory where the release(s) should be placed. --docver BRANCHNAME This is added for 1.8 and beyond to get the correct version of documentation files from the hdf5docs repository. BRANCHNAME for v1.8 should be hdf5_1_8. -h print the help page. - --nocheck Ignore errors in MANIFEST file. - --private Make a private release with today's date in version information. - --revision Make a private release with the code revision number in version information. - + --private Make a private release with today's date in version information. + --revision Make a private release with the code revision number in version information. + This must be run at the top level of the source directory. The other command-line options are the names of the programs to use for compressing the resulting tar archive (if none are given then "tar" is assumed): - tar -- use tar and don't do any compressing. - gzip -- use gzip with "-9" and append ".gz" to the output name. + tar -- use tar and don't do any compressing. + gzip -- use gzip with "-9" and append ".gz" to the output name. bzip2 -- use bzip2 with "-9" and append ".bz2" to the output name. - zip -- convert all text files to DOS style and form a zip file for Windows use. + zip -- convert all text files to DOS style and form a zip file for Windows use. doc -- produce the latest doc tree in addition to the archive. An md5 checksum is produced for each archive created and stored in the md5 file. @@ -97,15 +77,10 @@ EOF # Function name: tar2zip # Convert the release tarball to a Windows zipball. # -# Programmer: Albert Cheng -# Creation date: 2014-04-23 -# -# Modifications -# # Steps: -# 1. untar the tarball in a temporay directory; +# 1. untar the tarball in a temporary directory; # Note: do this in a temporary directory to avoid changing -# the original source directory which maybe around. +# the original source directory which may be around. # 2. convert all its text files to DOS (LF-CR) style; # 3. form a zip file which is usable by Windows users. # @@ -119,8 +94,8 @@ EOF tar2zip() { if [ $# -ne 3 ]; then - echo "usage: tar2zip <tarfilename> <zipfilename>" - return 1 + echo "usage: tar2zip <tarfilename> <zipfilename>" + return 1 fi ztmpdir=/tmp/tmpdir$$ mkdir -p $ztmpdir @@ -132,23 +107,23 @@ tar2zip() (cd $ztmpdir; tar xf -) < $tarfile # sanity check if [ ! -d $ztmpdir/$version ]; then - echo "untar did not create $ztmpdir/$version source dir" - # cleanup - rm -rf $ztmpdir - return 1 + echo "untar did not create $ztmpdir/$version source dir" + # cleanup + rm -rf $ztmpdir + return 1 fi # step 2: convert text files # There maybe a simpler way to do this. # options used in unix2dos: - # -k Keep the date stamp + # -k Keep the date stamp # -q quiet mode # grep redirect output to /dev/null because -q or -s are not portable. find $ztmpdir/$version | \ - while read inf; do \ - if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \ - unix2dos -q -k $inf; \ - fi\ - done + while read inf; do \ + if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \ + unix2dos -q -k $inf; \ + fi\ + done # step 3: make zipball # -9 maximum compression # -y Store symbolic links as such in the zip archive @@ -163,14 +138,6 @@ tar2zip() # This command must be run at the top level of the hdf5 source directory. # Verify this requirement. -# Since we are running bbrelease to create an HDF5 source tarfile for buildbot -# testing with source that is not for release, there is not a file named -# "configure" but there will be one named "configure.ac". The "configure" -# file will be created when autogen.sh runs. There probably will always -# be a bin/release file, but just in case it is removed, we can check for -# this script, bbrelease, in the bin directory. The bin/release script should -# continue to check for "configure" because it should be present in release -# source. if [ ! \( -f configure.ac -a -f bin/bbrelease \) ]; then echo "$0 must be run at the top level of the hdf5 source directory" exit 1 @@ -182,12 +149,11 @@ VERS=`perl bin/h5vers` VERS_OLD= test "$VERS" || exit 1 verbose=yes -check=yes release_date=`date +%F` today=`date +%Y%m%d` pmode='no' revmode='no' -tmpdir="../#release_tmp.$$" # tmp work directory +tmpdir="../#release_tmp.$$" # tmp work directory DOC_URL=https://git@bitbucket.hdfgroup.org/scm/hdffv/hdf5doc.git CPPLUS_RM_NAME=cpplus_RM @@ -195,11 +161,11 @@ CPPLUS_RM_NAME=cpplus_RM RESTORE_VERSION() { if [ X-${VERS_OLD} != X- ]; then - echo restoring version information back to $VERS_OLD - rm -f config/lt_vers.am - cp $tmpdir/lt_vers.am config/lt_vers.am - bin/h5vers -s $VERS_OLD - VERS_OLD= + echo restoring version information back to $VERS_OLD + rm -f config/lt_vers.am + cp $tmpdir/lt_vers.am config/lt_vers.am + bin/h5vers -s $VERS_OLD + VERS_OLD= fi } @@ -209,20 +175,17 @@ while [ -n "$1" ]; do arg=$1 shift case "$arg" in - -d) - DEST=$1 - shift - ;; - --nocheck) - check=no - ;; - -h) - USAGE - exit 0 - ;; - --private) - pmode=yes - ;; + -d) + DEST=$1 + shift + ;; + -h) + USAGE + exit 0 + ;; + --private) + pmode=yes + ;; --revision) revmode=yes ;; @@ -230,14 +193,14 @@ while [ -n "$1" ]; do DOCVERSION=$1 shift ;; - -*) - echo "Unknown switch: $arg" 1>&2 - USAGE - exit 1 - ;; - *) - methods="$methods $arg" - ;; + -*) + echo "Unknown switch: $arg" 1>&2 + USAGE + exit 1 + ;; + *) + methods="$methods $arg" + ;; esac done @@ -246,7 +209,7 @@ if [ "X$methods" = "X" ]; then methods="tar" fi -# Create the temporay work directory. +# Create the temporary work directory. if mkdir $tmpdir; then echo "temporary work directory for release. "\ "Can be deleted after release completes." > $tmpdir/README @@ -278,7 +241,7 @@ if [ X$revmode = Xyes ]; then cp config/lt_vers.am $tmpdir branch=`git branch | grep '*' | awk '{print $NF}'` revision=`git rev-parse --short HEAD` -# Set version information to m.n.r-r$revision. + # Set version information to m.n.r-r$revision. # (h5vers does not correctly handle just m.n.r-$today.) VERS=`echo $VERS | sed -e s/-.*//`-$revision echo Private release of $VERS @@ -299,30 +262,17 @@ if [ ! -d $DEST ]; then exit 1 fi -# Check the validity of the MANIFEST file. -bin/chkmanifest || fail=yes -if [ "X$fail" = "Xyes" ]; then - if [ $check = yes ]; then - exit 1 - else - echo "Continuing anyway..." - fi -fi - -# Create a manifest that contains only files for distribution. -MANIFEST=$tmpdir/H5_MANIFEST -grep '^\.' MANIFEST | grep -v _DO_NOT_DISTRIBUTE_ >$MANIFEST - -# Prepare the source tree for a release. -#ln -s `pwd` $tmpdir/$HDF5_VERS || exit 1 +# Create a symlink to the source so files in the tarball have the prefix +# we want (gnu's --transform isn't portable) ln -s `pwd` $tmpdir/$HDF5_IN_VERS || exit 1 + # Save a backup copy of Makefile if exists. test -f Makefile && mv Makefile $tmpdir/Makefile.x cp -p Makefile.dist Makefile -# Update README.txt and release_docs/RELEASE.txt with release information in +# Update README.md and release_docs/RELEASE.txt with release information in # line 1. -for f in README.txt release_docs/RELEASE.txt; do +for f in README.md release_docs/RELEASE.txt; do echo "HDF5 version $VERS released on $release_date" >$f.x sed -e 1d $f >>$f.x mv $f.x $f @@ -330,64 +280,60 @@ for f in README.txt release_docs/RELEASE.txt; do chmod 644 $f done -# trunk is different than branches. +# develop is different than branches. if [ "${DOCVERSION}" ]; then DOC_URL=https://git@bitbucket.hdfgroup.org/scm/hdffv/hdf5doc.git -b ${DOCVERSION} fi # Create the tar file test "$verbose" && echo " Running tar..." 1>&2 -( \ - cd $tmpdir; \ - tar cf $HDF5_VERS.tar $HDF5_IN_VERS/Makefile \ - `sed 's/^\.\//'$HDF5_IN_VERS'\//' $MANIFEST` || exit 1 \ -) +(cd "$tmpdir" && exec tar -ch --exclude-vcs -f "$HDF5_VERS.tar" "./$HDF5_IN_VERS" || exit 1 ) # Compress MD5file=$HDF5_VERS.md5 cp /dev/null $DEST/$MD5file for comp in $methods; do case $comp in - tar) - cp -p $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.tar - (cd $DEST; md5sum $HDF5_VERS.tar >> $MD5file) - ;; - gzip) - test "$verbose" && echo " Running gzip..." 1>&2 - gzip -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.gz - (cd $DEST; md5sum $HDF5_VERS.tar.gz >> $MD5file) - ;; - bzip2) - test "$verbose" && echo " Running bzip2..." 1>&2 - bzip2 -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.bz2 - (cd $DEST; md5sum $HDF5_VERS.tar.bz2 >> $MD5file) - ;; - zip) - test "$verbose" && echo " Creating zip ball..." 1>&2 - tar2zip $HDF5_IN_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2 - (cd $DEST; md5sum $HDF5_VERS.zip >> $MD5file) - ;; - doc) + tar) + cp -p $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.tar + (cd $DEST; md5sum $HDF5_VERS.tar >> $MD5file) + ;; + gzip) + test "$verbose" && echo " Running gzip..." 1>&2 + gzip -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.gz + (cd $DEST; md5sum $HDF5_VERS.tar.gz >> $MD5file) + ;; + bzip2) + test "$verbose" && echo " Running bzip2..." 1>&2 + bzip2 -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.bz2 + (cd $DEST; md5sum $HDF5_VERS.tar.bz2 >> $MD5file) + ;; + zip) + test "$verbose" && echo " Creating zip ball..." 1>&2 + tar2zip $HDF5_IN_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2 + (cd $DEST; md5sum $HDF5_VERS.zip >> $MD5file) + ;; + doc) if [ "${DOCVERSION}" = "" ]; then DOCVERSION=master fi - test "$verbose" && echo " Creating docs..." 1>&2 - # Check out docs from git repo - (cd $tmpdir; git clone $DOC_URL > /dev/null) || exit 1 - # Create doxygen C++ RM - (cd c++/src && doxygen cpp_doc_config > /dev/null ) || exit 1 - # Replace version of C++ RM with just-created version + test "$verbose" && echo " Creating docs..." 1>&2 + # Check out docs from git repo + (cd $tmpdir; git clone $DOC_URL > /dev/null) || exit 1 + # Create doxygen C++ RM + (cd c++/src && doxygen cpp_doc_config > /dev/null ) || exit 1 + # Replace version of C++ RM with just-created version rm -rf $tmpdir/${DOCVERSION}/html/$CPPLUS_RM_NAME mv c++/src/$CPPLUS_RM_NAME $tmpdir/${DOCVERSION}/html/$CPPLUS_RM_NAME # Compress the docs and move them to the release area mv $tmpdir/$DOCVERSION $tmpdir/${HDF5_VERS}_docs - (cd $tmpdir && tar cf ${HDF5_VERS}_docs.tar ${HDF5_VERS}_docs) - mv $tmpdir/${HDF5_VERS}_docs.tar $DEST - ;; - *) - echo "***Error*** Unknown method $comp" - exit 1 - ;; + (cd $tmpdir && tar cf ${HDF5_VERS}_docs.tar ${HDF5_VERS}_docs) + mv $tmpdir/${HDF5_VERS}_docs.tar $DEST + ;; + *) + echo "***Error*** Unknown method $comp" + exit 1 + ;; esac done @@ -408,4 +354,6 @@ fi # Remove temporary things rm -rf $tmpdir +echo "DONE" + exit 0 diff --git a/bin/buildhdf5 b/bin/buildhdf5 index 064000a..786c35d 100755 --- a/bin/buildhdf5 +++ b/bin/buildhdf5 @@ -7,7 +7,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # @@ -178,7 +178,7 @@ LOCATE_SZLIB() esac ;; # end of case ncsa unknown) - # Unknow domain. Give a shot at the some standard places. + # Unknown domain. Give a shot at the some standard places. szlibpaths="/usr/local" ;; esac # end of case $mydomain diff --git a/bin/checkapi b/bin/checkapi index 6882dea..b1d17b6 100755 --- a/bin/checkapi +++ b/bin/checkapi @@ -1,4 +1,4 @@ -#!/usr/bin/perl -w +#!/usr/bin/env perl # # Copyright by The HDF Group. # Copyright by the Board of Trustees of the University of Illinois. @@ -7,12 +7,14 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # require 5.003; +use warnings; + # Purpose: insures that API functions aren't called internally. # Usage: checkapi H5*.c my $filename = ""; diff --git a/bin/checkposix b/bin/checkposix index 30128e3..bca259d 100755 --- a/bin/checkposix +++ b/bin/checkposix @@ -1,5 +1,6 @@ -#!/usr/bin/perl -w +#!/usr/bin/env perl require 5.003; +use warnings; # # Copyright by The HDF Group. @@ -9,7 +10,7 @@ require 5.003; # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # @@ -43,7 +44,27 @@ foreach $arg (@ARGV) { # # If a user specifies one file, process it no matter what so people # can inspect files we normally skip (like H5system.c). + + $ignore = 0; + + # Ignored files in src/ if($#ARGV gt 0 and $filename =~ /H5FDmulti|H5FDstdio|H5VLpassthru|H5system|H5detect|H5make_libsettings/) { + $ignore = 1; + } + # Ignored atomic test files in test/ + if($#ARGV gt 0 and $filename =~ /atomic_reader|atomic_writer/) { + $ignore = 1; + } + # Ignored filter plugins in test/ + if($#ARGV gt 0 and $filename =~ /^filter_plugin\d_/) { + $ignore = 1; + } + # Ignored generators in test/ + if($#ARGV gt 0 and $filename =~ /^gen_/) { + $ignore = 1; + } + + if($ignore) { print "$filename is exempt from using Standard library macro wrappers\n"; next; } @@ -94,18 +115,21 @@ foreach $arg (@ARGV) { # These are really HDF5 functions/macros even though they don't # start with `h' or `H'. - next if $name =~ /^FUNC_(ENTER|LEAVE)(_(NO)?API|_PACKAGE|_STATIC)?(_NOFS|_NOCLEAR|_NOINIT)?(_NOFUNC|_TAG)?$/; + next if $name =~ /^FUNC_(ENTER|LEAVE)(_(NO)?API|_PACKAGE|_STATIC)?(_NAMECHECK_ONLY|_NOFS|_NOCLEAR|_NOINIT|_NOPUSH)?(_NOFUNC|_TAG)?$/; next if $name =~ /^(BEGIN|END)_FUNC$/; next if $name =~ /^U?INT(8|16|32|64)(ENCODE|DECODE)(_VAR)?$/; next if $name =~ /^CI_(PRINT_STATS|INC_SRC|INC_DST)$/; next if $name =~ /^(ABS|ADDR_OVERFLOW|ALL_MEMBERS|BOUND|CONSTR|DETECT_[I|F|M]|DOWN)$/; next if $name =~ /^(MIN3?|MAX3?|NELMTS|POWER_OF_TWO|REGION_OVERFLOW)$/; - next if $name =~ /^(UNIQUE_MEMBERS|S_ISDIR)$/; + next if $name =~ /^(SIZE_OVERFLOW|UNIQUE_MEMBERS|S_ISDIR)$/; next if $name =~ /^addr_defined$/; + next if $name =~ /^TERMINATOR$/; + + # Ignore callback invocation + next if $name =~ /^(op|cb|OP|iter_op|func)$/; - # These functions/macros are exempt. - # op, cb, and OP are often spuriously flagged so ignore them. - next if $name =~ /^(main|op|cb|OP)$/; + # Ignore main + next if $name =~ /^main$/; # This often appears in preprocessor lines that span multiple lines next if $name =~ /^(defined)$/; @@ -139,15 +163,38 @@ foreach $arg (@ARGV) { next if $name =~ /^(pow_fun|round_fun|abs_fun|lround_fun|llround_fun)$/; } + # This is a macro parameter in H5Rint.c. Ignore it in this file. + if($filename =~ /H5Rint/) { + next if $name =~ /^(func)$/; + } + + # Internal calls in the HDFS VFD (H5FDhdfs.c). Ignore it in this file. + if($filename =~ /H5FDhdfs/) { + next if $name =~ /^(hdfs)/; + } + + # Macros, etc. from the mirror VFD (H5FDmirror.c). Ignore in this file. + if($filename =~ /H5FDmirror/) { + next if $name =~ /^(LOG)/; + next if $name =~ /^(BSWAP_64|is_host_little_endian)$/; + } + + # These are things in H5FDs3comms.c and H5FDros3.c. Ignore them in these files. + if($filename =~ /H5FDs3comms|H5FDros3/) { + next if $name =~ /^(curl_|curlwritecallback|gmnow)/; + next if $name =~ /^(ros3_|ROS3_|S3COMMS_)/; + next if $name =~ /^(EVP_sha256|SHA256|ISO8601NOW)$/; + } + # TESTING (not comprehensive - just noise reduction) # Test macros and functions (testhdf5.h) next if $name =~ /^(AddTest|TestErrPrintf|TestSummary|TestCleanup|TestShutdown)$/; next if $name =~ /^(CHECK|CHECK_PTR|CHECK_PTR_NULL|CHECK_PTR_EQ|CHECK_I)$/; - next if $name =~ /^(VERIFY|VERIFY_STR|VERIFY|TYPE|MESSAGE|ERROR)$/; + next if $name =~ /^(VERIFY|VERIFY_STR|VERIFY_TYPE|MESSAGE|ERROR)$/; # Test macros and functions (h5test.h) - next if $name =~ /^(TESTING|PASSED|SKIPPED|FAIL_PUTS_ERROR|FAIL_STACK_ERROR|TEST_ERROR)$/; + next if $name =~ /^(TESTING|PASSED|SKIPPED|PUTS_ERROR|FAIL_PUTS_ERROR|FAIL_STACK_ERROR|TEST_ERROR|AT)$/; next if $name =~ /^(GetTestExpress)$/; # Ignore functions that start with test_ or check_ @@ -157,9 +204,49 @@ foreach $arg (@ARGV) { # Ignore functions that start with h5_ next if $name =~ /^h5_/; + # Ignore process completed status + next if $name =~ /(WIFEXITED|WEXITSTATUS|WIFSIGNALED|WTERMSIG|WCOREDUMP|WIFSTOPPED|WSTOPSIG)/; + # Ignore usage functions next if $name =~ /^usage$/; + # Ignore callbacks + next if $name =~ /(_cb\d?)$/; + + # Specific tests (not even remotely comprehensive) + + # accum test code + if($filename =~ /accum/) { + next if $name =~ /^(accum_)/; + } + + # cache test code + if($filename =~ /cache/) { + next if $name =~ /(_entry|_entries|_cache|_check|_dependency|_status|_op)$/; + next if $name =~ /^(verify_|smoke_check_|row_major_|col_major_)/; + next if $name =~ /^(resize_configs_are_equal|CACHE_ERROR)$/ + } + + # Splitter VFD test code. Ignore in vfd.c. + if($filename =~ /vfd/) { + next if $name =~ /^(SPLITTER_|splitter_)/; + next if $name =~ /(_splitter_)/; + next if $name =~ /^(file_exists)$/; + } + + # S3 VFD test code. Ignore in ros3.c and s3comms.c. + # HDFS VFD test code. Ignore in hdfs.c. + if($filename =~ /ros3|s3comms|hdfs/) { + next if $name =~ /^(JSVERIFY|JSFAILED_|JSERR_|jserr_|FAIL_)/; + next if $name =~ /^(curl_)/; + next if $name =~ /^(S3COMMS_FORMAT_CREDENTIAL|ISO8601NOW|gmnow)$/; + } + + # VDS test code. Ignore in vds.c. + if($filename =~ /vds/) { + next if $name =~ /^(vds_)/; + } + print "$filename:$.: $name\n"; } diff --git a/bin/chkconfigure b/bin/chkconfigure index d03f421..b2b53c6 100755 --- a/bin/chkconfigure +++ b/bin/chkconfigure @@ -6,7 +6,7 @@ ## This file is part of HDF5. The full HDF5 copyright notice, including ## terms governing use, modification, and redistribution, is contained in ## the COPYING file, which can be found at the root of the source code -## distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +## distribution tree, or in https://www.hdfgroup.org/licenses. ## If you do not have access to either file, you may request a copy from ## help@hdfgroup.org. ## diff --git a/bin/chkcopyright b/bin/chkcopyright index d67e030..eec371d 100755 --- a/bin/chkcopyright +++ b/bin/chkcopyright @@ -7,7 +7,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # diff --git a/bin/chkmanifest b/bin/chkmanifest deleted file mode 100755 index 95eb8f0..0000000 --- a/bin/chkmanifest +++ /dev/null @@ -1,154 +0,0 @@ -#!/bin/sh -# -# Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. -# - -# Check that all the files in MANIFEST exist and (if this is a -# GIT checkout) that all the GIT-managed files appear in the -# MANIFEST. -# - -verbose=yes -MANIFEST=/tmp/HD_MANIFEST.$$ -AUTOGEN=./autogen.sh -AUTOGEN_LOG=/tmp/autogen.log.$$ - -# Main -test "$verbose" && echo " Checking MANIFEST..." 1>&2 -# clean up $MANIFEST file when exits -trap "rm -f $MANIFEST" 0 - -# Only split lines on newline, not whitespace -set -f -IFS=' -' - -# First make sure i am in the directory in which there is an MANIFEST file -# and then do the checking from there. Will try the following, -# current directory, parent directory, the directory this command resides. -if [ -f MANIFEST ]; then - continue -elif [ -f ../MANIFEST ]; then - cd .. -else - commanddir=`dirname $0` - if [ -d "$commanddir" -a -f $commanddir/MANIFEST ]; then - cd $commanddir - continue - else - echo MANIFEST file not found. Abort. - exit 1 - fi -fi - -# Do an autogen if generated files (e.g., configure) is not present -if [ ! -f configure ]; then - echo " running $AUTOGEN" - $AUTOGEN > $AUTOGEN_LOG 2>&1 - if [ $? -ne 0 ]; then - echo $AUTOGEN encountered error. Abort. - echo output from $AUTOGEN: - cat $AUTOGEN_LOG - exit 1 - fi - rm $AUTOGEN_LOG -fi - -# Check for duplicate entries. This can be done at any time, but it may as -# well be sooner so that if something else fails the presence of duplicates -# will already be known. -errcode=0 -DUPLICATES=`perl -ne 's/#.*//; next if /^\s*$/; if ($uniq{$_}++) { print $_; }' MANIFEST` -if [ "$DUPLICATES" ]; then - cat 1>&2 <<EOF -These entries appear more than once in the MANIFEST: -$DUPLICATES -Please remove the duplicate lines and try again. - -EOF -errcode=1 -fi - -# Copy the manifest file to get a list of file names. -grep '^\.' MANIFEST | expand | cut -f1 -d' ' >$MANIFEST - -for file in `cat $MANIFEST`; do - if [ ! -f $file ]; then - echo "- $file" - fail=yes - fi -done - -# Get the list of files under version control and check that they are -# present. -# -# First get a list of all the pending files with git status and -# check those. -git_stat=`git status -s` -for file in $git_stat; do - - # Newly added files are not listed by git ls-files, which - # we check later. - - # The line listing new files starts with 'A'. - letter=`echo $file | head -c 1` - if [ "$letter" = "A" ]; then - # Convert the git status columns to './' so it matches - # the manifest file name. - # - # There is a space between the status columns and file name, hence - # the '3'. - path=`echo $file | sed 's/^.\{3\}/\.\//g'` - # Ignore directories - if [ ! -d $path ]; then - if (grep ^$path$ $MANIFEST >/dev/null); then - : - else - echo "- $path" - fail=yes - fi - fi - fi -done - -# Next check git ls-files, which gets a list of all files that are -# checked in. -git_ls=`git ls-files` -for file in $git_ls; do - path="./${file}" - # Ignore directories - if [ ! -d $path ]; then - if (grep ^$path$ $MANIFEST >/dev/null); then - : - else - echo "+ $path" - fail=yes - fi - fi -done - -# Finish up -if [ "X$fail" = "Xyes" ]; then - cat 1>&2 <<EOF -The MANIFEST is out of date. Files marked with a minus sign (-) no -longer exist; files marked with a plus sign (+) are GIT-managed but do -not appear in the MANIFEST. Please remedy the situation and try again. -EOF - exit 1 -fi - -if [ $errcode -ne 0 ]; then - exit 1 -fi - -test "$verbose" && echo " The MANIFEST is up to date." 1>&2 -exit 0 diff --git a/bin/cmakehdf5 b/bin/cmakehdf5 index da1aef8..e59c772 100755 --- a/bin/cmakehdf5 +++ b/bin/cmakehdf5 @@ -199,7 +199,7 @@ DUMP_LOGFILE() # Show a start time stamp TIMESTAMP -# Initialize njobs if $AMKE is defined +# Initialize njobs if $MAKE is defined if [ -n "$MAKE" ]; then # assume all arguments are for --jobs njobs=`echo $MAKE | cut -s -d' ' -f2-` @@ -365,7 +365,7 @@ STEP "Test the library and tools..." "ctest . -C Release $njobs" $testlog # 7. Create an install image with this command: STEP "Create an install image..." "cpack -C Release CPackConfig.cmake" $packlog -# The implementation of installation is imcomplete (only works for linux). +# The implementation of installation is incomplete (only works for linux). # Screen it out for now till it is completed. if false; then # 8. Install with this command: diff --git a/bin/debug-ohdr b/bin/debug-ohdr index 5b0a4b3..7becb42 100755 --- a/bin/debug-ohdr +++ b/bin/debug-ohdr @@ -1,4 +1,4 @@ -#!/usr/bin/perl +#!/usr/bin/env perl # # Copyright by The HDF Group. # Copyright by the Board of Trustees of the University of Illinois. @@ -7,7 +7,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # diff --git a/bin/dependencies b/bin/dependencies index 82247da..1171255 100755 --- a/bin/dependencies +++ b/bin/dependencies @@ -1,4 +1,4 @@ -#!/usr/bin/perl -w +#!/usr/bin/env perl # # Copyright by The HDF Group. # Copyright by the Board of Trustees of the University of Illinois. @@ -7,10 +7,12 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # +use warnings; + my $depend_file; my $new_depend_file; my $srcdir; @@ -7,7 +7,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # diff --git a/bin/distdep b/bin/distdep index 4643700..693f3a2 100755 --- a/bin/distdep +++ b/bin/distdep @@ -1,4 +1,7 @@ -#!/usr/bin/perl -p +#!/bin/sh +#! -*-perl-*- +eval 'exec perl -p -x -S $0 ${1+"$@"}' + if 0; # # Copyright by The HDF Group. # Copyright by the Board of Trustees of the University of Illinois. @@ -7,7 +10,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # @@ -1,5 +1,6 @@ -#!/usr/local/bin/perl -w +#!/usr/bin/env perl require 5.003; +use warnings; use Text::Tabs; # NOTE: THE FORMAT OF HRETURN_ERROR AND HGOTO_ERROR MACROS HAS @@ -12,11 +13,11 @@ use Text::Tabs; # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # -# Robb Matzke, matzke@llnl.gov +# Robb Matzke # 30 Aug 1997 # # Purpose: This script will read standard input which should be a diff --git a/bin/format_source b/bin/format_source new file mode 100755 index 0000000..dce34e5 --- /dev/null +++ b/bin/format_source @@ -0,0 +1,26 @@ +#!/bin/bash +# +# Recursively format all C & C++ sources and header files, except those in the +# 'config' directory and generated files, such as H5LTanalyze.c, etc. +# +# Note that any files or directories that are excluded here should also be +# added to the 'exclude' list in .github/workflows/clang-format-check.yml +# +# (Remember to update both bin/format_source and bin/format_source_patch) + +find . \( -type d -path ./config -prune -and -not -path ./config \) \ + -or \( \( \! \( \ + -name H5LTanalyze.c \ + -or -name H5LTparse.c \ + -or -name H5LTparse.h \ + -or -name H5Epubgen.h \ + -or -name H5Einit.h \ + -or -name H5Eterm.h \ + -or -name H5Edefin.h \ + -or -name H5version.h \ + -or -name H5overflow.h \ + \) \) \ + -and \( -iname *.h -or -iname *.c -or -iname *.cpp -or -iname *.hpp \) \) \ + | xargs clang-format -style=file -i -fallback-style=none + +exit 0 diff --git a/bin/format_source_patch b/bin/format_source_patch new file mode 100755 index 0000000..439baf2 --- /dev/null +++ b/bin/format_source_patch @@ -0,0 +1,34 @@ +#!/bin/bash +# +# Recursively format all C & C++ sources and header files, except those in the +# 'config' directory and generated files, such as H5LTanalyze.c, etc. +# +# Note that any files or directories that are excluded here should also be +# added to the 'exclude' list in .github/workflows/clang-format-check.yml +# +# (Remember to update both bin/format_source and bin/format_source_patch) + +find . \( -type d -path ./config -prune -and -not -path ./config \) \ + -or \( \( \! \( \ + -name H5LTanalyze.c \ + -or -name H5LTparse.c \ + -or -name H5LTparse.h \ + -or -name H5Epubgen.h \ + -or -name H5Einit.h \ + -or -name H5Eterm.h \ + -or -name H5Edefin.h \ + -or -name H5version.h \ + -or -name H5overflow.h \ + \) \) \ + -and \( -iname *.h -or -iname *.c -or -iname *.cpp -or -iname *.hpp \) \) \ + | xargs clang-format -style=file -i -fallback-style=none + +git diff > clang_format.patch + +# Delete if 0 size +if [ ! -s clang_format.patch ] +then + rm clang_format.patch +fi + +exit 0 diff --git a/bin/gcov_script b/bin/gcov_script index 9a6512d..679d675 100755 --- a/bin/gcov_script +++ b/bin/gcov_script @@ -7,7 +7,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. diff --git a/bin/genparser b/bin/genparser index e6aee5b..9ea3152 100755 --- a/bin/genparser +++ b/bin/genparser @@ -1,12 +1,12 @@ #! /bin/bash # -# Copyright by The HDF Group. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in +# Copyright by The HDF Group. +# All rights reserved. +# +# This file is part of HDF5. The full HDF5 copyright notice, including +# terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # @@ -27,7 +27,7 @@ # IMPORTANT OS X NOTE # # If you are using OS X, you will probably not have flex or bison -# installed. In addtion, even if you do have bison installed, the bison +# installed. In addition, even if you do have bison installed, the bison # version you have installed may also have a bug that makes it unable to # process our input files. # @@ -200,28 +200,34 @@ if [ "$verbose" = true ] ; then fi ${HDF5_FLEX} --nounistd -PH5LTyy -o ${path_to_hl_src}/H5LTanalyze.c ${path_to_hl_src}/H5LTanalyze.l -# fix H5LTparse.c to declare H5LTyyparse return type as an hid_t -# instead of int. Currently the generated function H5LTyyparse is +# fix H5LTparse.c and H5LTlparse.h to declare H5LTyyparse return type as an +# hid_t instead of int. Currently the generated function H5LTyyparse is # generated with a return value of type int, which is a mapping to the # flex yyparse function. The return value in the HL library should be -# an hid_t. -# I propose to not use flex to generate this function, but for now I am +# an hid_t. +# I propose to not use flex to generate this function, but for now I am # adding a perl command to find and replace this function declaration in # H5LTparse.c. perl -0777 -pi -e 's/int yyparse/hid_t yyparse/igs' ${path_to_hl_src}/H5LTparse.c perl -0777 -pi -e 's/int\nyyparse/hid_t\nyyparse/igs' ${path_to_hl_src}/H5LTparse.c perl -0777 -pi -e 's/int H5LTyyparse/hid_t H5LTyyparse/igs' ${path_to_hl_src}/H5LTparse.c +perl -0777 -pi -e 's/int yyparse/hid_t yyparse/igs' ${path_to_hl_src}/H5LTparse.h +perl -0777 -pi -e 's/int\nyyparse/hid_t\nyyparse/igs' ${path_to_hl_src}/H5LTparse.h +perl -0777 -pi -e 's/int H5LTyyparse/hid_t H5LTyyparse/igs' ${path_to_hl_src}/H5LTparse.h # Add code that disables warnings in the flex/bison-generated code. # # Note that the GCC pragmas did not exist until gcc 4.2. Earlier versions # will simply ignore them, but we want to avoid those warnings. +# +# Note also that although clang defines __GNUC__, it doesn't support every +# warning that GCC does. for f in ${path_to_hl_src}/H5LTparse.c ${path_to_hl_src}/H5LTanalyze.c do - echo '#if defined __GNUC__ && 402 <= __GNUC__ * 100 + __GNUC_MINOR__ ' >> tmp.out + echo '#if defined (__GNUC__) ' >> tmp.out + echo '#if ((__GNUC__ * 100) + __GNUC_MINOR__) >= 402 ' >> tmp.out echo '#pragma GCC diagnostic ignored "-Wconversion" ' >> tmp.out echo '#pragma GCC diagnostic ignored "-Wimplicit-function-declaration" ' >> tmp.out - echo '#pragma GCC diagnostic ignored "-Wlarger-than=" ' >> tmp.out echo '#pragma GCC diagnostic ignored "-Wmissing-prototypes" ' >> tmp.out echo '#pragma GCC diagnostic ignored "-Wnested-externs" ' >> tmp.out echo '#pragma GCC diagnostic ignored "-Wold-style-definition" ' >> tmp.out @@ -230,11 +236,19 @@ do echo '#pragma GCC diagnostic ignored "-Wsign-conversion" ' >> tmp.out echo '#pragma GCC diagnostic ignored "-Wstrict-overflow" ' >> tmp.out echo '#pragma GCC diagnostic ignored "-Wstrict-prototypes" ' >> tmp.out + echo '#if !defined (__clang__) ' >> tmp.out + echo '#pragma GCC diagnostic ignored "-Wlarger-than=" ' >> tmp.out + echo '#pragma GCC diagnostic ignored "-Wsuggest-attribute=const" ' >> tmp.out echo '#pragma GCC diagnostic ignored "-Wsuggest-attribute=pure" ' >> tmp.out + echo '#endif ' >> tmp.out echo '#pragma GCC diagnostic ignored "-Wswitch-default" ' >> tmp.out echo '#pragma GCC diagnostic ignored "-Wunused-function" ' >> tmp.out echo '#pragma GCC diagnostic ignored "-Wunused-macros" ' >> tmp.out echo '#pragma GCC diagnostic ignored "-Wunused-parameter" ' >> tmp.out + echo '#endif ' >> tmp.out + echo '#if ((__GNUC__ * 100) + __GNUC_MINOR__) >= 600 ' >> tmp.out + echo '#pragma GCC diagnostic ignored "-Wnull-dereference" ' >> tmp.out + echo '#endif ' >> tmp.out echo '#elif defined __SUNPRO_CC ' >> tmp.out echo '#pragma disable_warn ' >> tmp.out echo '#elif defined _MSC_VER ' >> tmp.out diff --git a/bin/h5cc.in b/bin/h5cc.in index 9c4e3ca..966bb40 100644 --- a/bin/h5cc.in +++ b/bin/h5cc.in @@ -7,7 +7,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. ## @@ -40,7 +40,7 @@ HL="@HL@" ## $CLINKER $H5BLD_CPPFLAGS $CPPFLAGS $H5BLD_CFLAGS $CFLAGS $LDFLAGS ## ## $LIBS $clibpath $link_objs $link_args $shared_link ## ## ## -## These settings can be overriden by setting HDF5_CFLAGS, ## +## These settings can be overridden by setting HDF5_CFLAGS, ## ## HDF5_CPPFLAGS, HDF5_LDFLAGS, or HDF5_LIBS in the environment. ## ## ## ############################################################################ @@ -83,10 +83,10 @@ CLINKERBASE="@CC@" # CFLAGS, CPPFLAGS and LDFLAGS are reserved for use by the script user. # FLAGS brought from the hdf5 build are put in H5BLD_*FLAGS. -# User's CPPFLAGS and CFLAGS come after their H5BLD counterparts. User's -# LDFLAGS come just before clibpath, user's LIBS come after $link_objs and -# before the hdf5 libraries in $link_args, followed by any external library -# paths and libraries from AM_LDFLAGS, LDFLAGS, AM_LIBS or LIBS carried in +# User's CPPFLAGS and CFLAGS come after their H5BLD counterparts. User's +# LDFLAGS come just before clibpath, user's LIBS come after $link_objs and +# before the hdf5 libraries in $link_args, followed by any external library +# paths and libraries from AM_LDFLAGS, LDFLAGS, AM_LIBS or LIBS carried in # from the hdf5 build. The order of the flags is intended to give precedence # to the user's flags. H5BLD_CFLAGS="@AM_CFLAGS@ @CFLAGS@" @@ -102,9 +102,9 @@ LDFLAGS="${HDF5_LDFLAGS:-$LDFLAGSBASE}" LIBS="${HDF5_LIBS:-$LIBSBASE}" # If a static library is available, the default will be to use it. If the only -# available library is shared, it will be used by default. The user can +# available library is shared, it will be used by default. The user can # override either default, although choosing an unavailable library will result -# in link errors. +# in link errors. STATIC_AVAILABLE="@enable_static@" if test "${STATIC_AVAILABLE}" = "yes"; then USE_SHARED_LIB="${HDF5_USE_SHLIB:-no}" @@ -121,13 +121,6 @@ usage() { echo " -echo Show all the shell commands executed" echo " -prefix=DIR Prefix directory to find HDF5 lib/ and include/" echo " subdirectories [default: $prefix]" - # A wonderfully informative "usage" message. - echo "usage: $prog_name [OPTIONS] <compile line>" - echo " OPTIONS:" - echo " -help This help message." - echo " -echo Show all the shell commands executed" - echo " -prefix=DIR Prefix directory to find HDF5 lib/ and include/" - echo " subdirectories [default: $prefix]" echo " -show Show the commands without executing them" echo " -showconfig Show the HDF5 library configuration summary" echo " -shlib Compile with shared HDF5 libraries [default for hdf5 built" @@ -147,11 +140,11 @@ usage() { echo " HDF5_CC - use a different C compiler" echo " HDF5_CLINKER - use a different linker" echo " HDF5_USE_SHLIB=[yes|no] - use shared or static version of the HDF5 library" - echo " [default: no except when built with only" + echo " [default: no except when built with only" echo " shared libraries]" echo " " echo " You can also add or change paths and flags to the compile line using" - echo " the following environment varibles or by assigning them to their counterparts" + echo " the following environment variables or by assigning them to their counterparts" echo " in the 'Things You Can Modify to Override...'" section of $prog_name echo " " echo " Variable Current value to be replaced" @@ -325,7 +318,7 @@ fi if test "x$do_link" = "xyes"; then shared_link="" -# conditionnaly link with the hl library +# conditionally link with the hl library if test "X$HL" = "Xhl"; then libraries=" $libraries -lhdf5_hl -lhdf5 " else @@ -386,10 +379,10 @@ if test "x$do_link" = "xyes"; then # module. It's okay if they're included twice in the compile line. link_args="$link_args $H5BLD_LDFLAGS $H5BLD_LIBS" - # User's CPPFLAGS and CFLAGS come after their H5BLD counterparts. User's - # LDFLAGS come just before clibpath, user's LIBS come after $link_objs and - # before the hdf5 libraries in $link_args, followed by any external library - # paths and libraries from AM_LDFLAGS, LDFLAGS, AM_LIBS or LIBS carried in + # User's CPPFLAGS and CFLAGS come after their H5BLD counterparts. User's + # LDFLAGS come just before clibpath, user's LIBS come after $link_objs and + # before the hdf5 libraries in $link_args, followed by any external library + # paths and libraries from AM_LDFLAGS, LDFLAGS, AM_LIBS or LIBS carried in # from the hdf5 build. The order of the flags is intended to give precedence # to the user's flags. $SHOW $CLINKER $H5BLD_CPPFLAGS $CPPFLAGS $H5BLD_CFLAGS $CFLAGS $LDFLAGS $clibpath $link_objs $LIBS $link_args $shared_link diff --git a/bin/h5redeploy.in b/bin/h5redeploy.in index 242459a..86183e8 100644 --- a/bin/h5redeploy.in +++ b/bin/h5redeploy.in @@ -7,7 +7,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # @@ -13,11 +13,11 @@ use strict; # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # -# Robb Matzke <matzke@llnl.gov> +# Robb Matzke # 17 July 1998 ### Purpose @@ -66,7 +66,7 @@ use strict; # ./H5public.h or ./src/H5public.h. # # If the version number is changed (either `-s' or `-i' was used on -# the command line) then the first line of the README.txt and RELEASE.txt files +# the command line) then the version line of the README.md and RELEASE.txt files # one directory above the H5public.h file is also modified so it looks # something like: This is hdf5-1.2.3-pre1 currently under development. # The AC_INIT macro in configure.ac will also change in this case to be @@ -156,10 +156,10 @@ while ($_ = shift) { } die "mutually exclusive options given\n" if $set && $inc; -# Determine file to use as H5public.h, README.txt, +# Determine file to use as H5public.h, README.md, # release_docs/RELEASE.txt, configure.ac, windows/src/H5pubconf.h # config/lt_vers.am and config/cmake/scripts/HDF5config.cmake. -# The README.txt, release_docs/RELEASE.txt, configure.ac, +# The README.md, release_docs/RELEASE.txt, configure.ac, # windows/src/H5pubconf.h, config/lt_vers.am and # config/cmake/scripts/HDF5config.cmake # files are always in the directory above H5public.h @@ -178,9 +178,9 @@ die "unable to read file: $LT_VERS\n" unless -r $file; my $HDF5CONFIGCMAKE = $file; $HDF5CONFIGCMAKE =~ s/[^\/]*$/..\/config\/cmake\/scripts\/HDF5config.cmake/; die "unable to read file: $HDF5CONFIGCMAKE\n" unless -r $file; -# README.txt +# README.md my $README = $file; -$README =~ s/[^\/]*$/..\/README.txt/; +$README =~ s/[^\/]*$/..\/README.md/; die "unable to read file: $README\n" unless -r $file; # release_docs/RELEASE.txt my $RELEASE = $file; @@ -213,7 +213,7 @@ my (@curver) = getvers $contents; # Determine the new version number. my @newver; #new version if ($set) { - if ($set =~ /(\d+)\.(\d+)\.(\d+)(-([a-zA-Z]\w*))?/) { + if ($set =~ /(\d+)\.(\d+)\.(\d+)(-([\da-zA-Z]\w*))?/) { @newver = ($1, $2, $3, $5); } elsif ($set =~ /(\d+)\D+(\d+)\D+(\d+)(\s*\(([a-zA-Z]\w*)\))?\D*$/) { @newver = ($1, $2, $3, $5); @@ -303,7 +303,7 @@ if ($LT_VERS && $version_increased) { # close FILE; } -# Update the README.txt file +# Update the README.md file if ($README) { open FILE, $README or die "$README: $!\n"; my @contents = <FILE>; diff --git a/bin/iostats b/bin/iostats index f054b9c..c42a1f9 100755 --- a/bin/iostats +++ b/bin/iostats @@ -1,4 +1,4 @@ -#!/usr/bin/perl +#!/usr/bin/env perl # # Copyright by The HDF Group. # Copyright by the Board of Trustees of the University of Illinois. @@ -7,7 +7,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # diff --git a/bin/locate_sw b/bin/locate_sw deleted file mode 100755 index bab7bd2..0000000 --- a/bin/locate_sw +++ /dev/null @@ -1,238 +0,0 @@ -#!/bin/sh -# -# Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. -# - -# Try to locate the software as named in argument. -# This is a sequential search of all possible locations of the software. -# Usage: locate_sw <SW-Name> -# It prints a string showing the paths leading to the include, lib and bin -# directory of the software, separated by colons. E.g., if the software is -# located in /usr/sdt/*, it prints -# /usr/sdt/include:/usr/sdt/lib:/usr/sdt/bin -# Any component that is not found will be returned as an empty string. E.g., -# if somehow the header files of the software are not found, it prints -# :/usr/sdt/lib;/usr/sdt/bin - -# Function definitions -USAGE() -{ - echo "Usage: locate_sw <SW-Name>" - echo " where <SW-Name> can be hdf4, hdf5, zlib" - echo " It prints the paths leading the header files (include)," - echo " library (lib), and tools (bin). E.g.," - echo " /usr/sdt/include:/usr/sdt/lib:/usr/sdt/bin" - echo " Any component that is not found will be returned as an empty string. E.g.," - echo " if somehow the header files of the software are not found, it prints" - echo " :/usr/sdt/lib;/usr/sdt/bin" - echo "Exit code: 0 if software located; otherwise non-zero" -} - -# locate hdf4 software -locate_hdf4() -{ -# this default is the best guess of locating hdf4 software -swpaths_defaults="/usr/ncsa /usr/sdt /usr/local" -swpaths= - -case "$OSname" in - SunOS) - case "$OSrelease" in - 5.7) - swpaths="/afs/ncsa/packages/hdf/SunOS_5.7" - ;; - *) - # use default - ;; - esac - ;; - HP-UX) - case "$OSrelease" in - B.11.00) - swpaths="/afs/ncsa/packages/hdf/HPUX_11.00" - ;; - *) - # use default - ;; - esac - ;; - Linux) - swpaths="/afs/ncsa/packages/hdf/Linux" - ;; - OSF1) - swpaths="/afs/ncsa/packages/hdf/OSF1_V4.0" - ;; - *) - # just use the defaults - ;; -esac - -# Check if the hdf4 software is actually available. -# Accept the directory only if needed .h, .a and tools are found -# in the same place. That way, they are more likely to be of the -# same version. -# -swpaths="$swpaths $swpaths_defaults" -for sw in $swpaths; do - if [ -r $sw/include/hdf.h -a -r $sw/lib/libdf.a -a -r $sw/bin/hdp ]; then - SW_inc=$sw/include - SW_lib=$sw/lib - SW_bin=$sw/bin - SW_Location=$sw - break - fi -done -} - -# locate hdf5 software -locate_hdf5() -{ -# this default is the best guess of locating hdf5 software -swpaths_defaults="/usr/ncsa /usr/sdt /usr/local" -swpaths= - -case "$OSname" in - SunOS) - case "$OSrelease" in - 5.7) - swpaths="/afs/ncsa/packages/hdf5/SunOS_5.7" - ;; - *) - # use default - ;; - esac - ;; - HP-UX) - case "$OSrelease" in - B.11.00) - swpaths="/afs/ncsa/packages/hdf5/HPUX_11.00" - ;; - *) - # use default - ;; - esac - ;; - Linux) - swpaths="/afs/ncsa/packages/hdf5/Linux" - ;; - FreeBSD) - swpaths="/afs/ncsa/packages/hdf5/FreeBSD" - ;; - OSF1) - swpaths="/afs/ncsa/packages/hdf5/OSF1_V4.0" - ;; - *) - # just use the defaults - ;; -esac - -# Check if the hdf5 software is actually available. -# Accept the directory only if needed .h, .a and tools are found -# in the same place. That way, they are more likely to be of the -# same version. -# -swpaths="$swpaths $swpaths_defaults" -for sw in $swpaths; do - if [ -r $sw/include/hdf5.h -a -r $sw/lib/libhdf5.a -a -r $sw/bin/h5dump ]; then - SW_inc=$sw/include - SW_lib=$sw/lib - SW_bin=$sw/bin - SW_Location=$sw - break - fi -done -} - -# locate zlib software -locate_zlib() -{ -# this default is the best guess of locating zlib software -swpaths_defaults="/usr /usr/local /usr/ncsa /usr/sdt" -swpaths= - - -# Check if the zlib software is actually available. -# Accept the directory only if needed .h, .a and tools are found -# in the same place. That way, they are more likely to be of the -# same version. -# Don't know something specific to check the bin directory. Maybe gzip? -# Just make sure it exists. -# -swpaths="$swpaths $swpaths_defaults" -for sw in $swpaths; do - if [ -r $sw/include/zlib.h -a \ - \( -r $sw/lib/libz.a -o -r $sw/lib/libz.so \) -a -d $cw/bin ]; then - SW_inc=$sw/include - SW_lib=$sw/lib - SW_bin=$sw/bin - SW_Location=$sw - break - fi -done - -# if none found, try HDF4 software which contains a version of zlib. -if [ x-$SW_Location = x- ]; then - locate_hdf4 -fi - -} - -# Main -# -# Options -# -if [ $# -lt 1 ]; then - USAGE - exit 1 -fi - -if [ "$1" = -h ]; then - USAGE - exit 0 -fi - -SW=$1 -shift - -# locations of the software seeked. -SW_inc= # include place -SW_lib= # library place -SW_bin= # binary place -SW_Location= # parent directory of all the above - -OSname=`uname -s` -OSrelease=`uname -r` - -case $SW in -hdf4|hdf) - locate_hdf4 - ;; -hdf5) - locate_hdf5 - ;; -zlib) - locate_zlib - ;; -*) - echo "unknown software ($SW)" - USAGE - exit 1 - ;; -esac - -# show the results located, separated by commas. -if [ -n "${SW_inc}" -a -n "${SW_lib}" -a -n "${SW_bin}" ]; then - echo ${SW_inc},${SW_lib},${SW_bin} - exit 0 -else - exit 1 -fi diff --git a/bin/make_err b/bin/make_err index 623c1b6..f2b044a 100755 --- a/bin/make_err +++ b/bin/make_err @@ -1,6 +1,7 @@ -#!/usr/bin/perl -w +#!/usr/bin/env perl require 5.003; $indent=4; +use warnings; # # Copyright by The HDF Group. @@ -10,7 +11,7 @@ $indent=4; # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # @@ -37,7 +38,7 @@ sub print_copyright ($) { print $fh " * This file is part of HDF5. The full HDF5 copyright notice, including *\n"; print $fh " * terms governing use, modification, and redistribution, is contained in *\n"; print $fh " * the COPYING file, which can be found at the root of the source code *\n"; - print $fh " * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *\n"; + print $fh " * distribution tree, or in https://www.hdfgroup.org/licenses. *\n"; print $fh " * If you do not have access to either file, you may request a copy from *\n"; print $fh " * help\@hdfgroup.org. *\n"; print $fh " * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */\n"; @@ -63,8 +64,8 @@ sub print_startprotect ($$) { $file =~ s/(\w*)\.h/$1/; # Print the ifdef info - print $fh "\n#ifndef _${file}_H\n"; - print $fh "#define _${file}_H\n"; + print $fh "\n#ifndef ${file}_H\n"; + print $fh "#define ${file}_H\n"; } ############################################################################## diff --git a/bin/make_overflow b/bin/make_overflow index ccd640e..37d6ded 100755 --- a/bin/make_overflow +++ b/bin/make_overflow @@ -1,6 +1,7 @@ -#!/usr/bin/perl -w +#!/usr/bin/env perl require 5.003; use strict; +use warnings; # Global settings @@ -15,7 +16,7 @@ my @ctypes = ( () ); # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # @@ -66,7 +67,7 @@ sub print_copyright ($) { print $fh " * This file is part of HDF5. The full HDF5 copyright notice, including *\n"; print $fh " * terms governing use, modification, and redistribution, is contained in *\n"; print $fh " * the COPYING file, which can be found at the root of the source code *\n"; - print $fh " * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *\n"; + print $fh " * distribution tree, or in https://www.hdfgroup.org/licenses. *\n"; print $fh " * If you do not have access to either file, you may request a copy from *\n"; print $fh " * help\@hdfgroup.org. *\n"; print $fh " * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */\n"; @@ -92,8 +93,8 @@ sub print_startprotect ($$) { $file =~ s/(\w*)\.h/$1/; # Print the ifdef info - print $fh "\n#ifndef _${file}_H\n"; - print $fh "#define _${file}_H\n"; + print $fh "\n#ifndef ${file}_H\n"; + print $fh "#define ${file}_H\n"; } ############################################################################## diff --git a/bin/make_vers b/bin/make_vers index 4de2dbd..f1399a4 100755 --- a/bin/make_vers +++ b/bin/make_vers @@ -1,14 +1,15 @@ -#!/usr/bin/perl -w +#!/usr/bin/env perl require 5.003; +use warnings; # Global settings # (The max_idx parameter is the only thing that needs to be changed when adding # support for a new major release. If support for a prior major release # is added (like support for 1.4, etc), the min_sup_idx parameter will -# need to be decremented. - QAK) +# need to be decremented.) -# Max. library "index" (0 = v1.0, 1 = 1.2, 2 = 1.4, 3 = 1.6, 4 = 1.8, 5 = 1.10, 6 = 1.12, etc) -$max_idx = 6; +# Max. library "index" (0 = v1.0, 1 = 1.2, 2 = 1.4, 3 = 1.6, 4 = 1.8, 5 = 1.10, 6 = 1.12, 7 = 1.14, etc) +$max_idx = 7; # Min. supported previous library version "index" (0 = v1.0, 1 = 1.2, etc) $min_sup_idx = 3; @@ -24,7 +25,7 @@ $indent = 2; # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # @@ -51,7 +52,7 @@ sub print_copyright ($) { print $fh " * This file is part of HDF5. The full HDF5 copyright notice, including *\n"; print $fh " * terms governing use, modification, and redistribution, is contained in *\n"; print $fh " * the COPYING file, which can be found at the root of the source code *\n"; - print $fh " * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *\n"; + print $fh " * distribution tree, or in https://www.hdfgroup.org/licenses. *\n"; print $fh " * If you do not have access to either file, you may request a copy from *\n"; print $fh " * help\@hdfgroup.org. *\n"; print $fh " * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */\n"; @@ -77,8 +78,8 @@ sub print_startprotect ($$) { $file =~ s/(\w*)\.h/$1/; # Print the ifdef info - print $fh "\n#ifndef _${file}_H\n"; - print $fh "#define _${file}_H\n"; + print $fh "\n#ifndef ${file}_H\n"; + print $fh "#define ${file}_H\n"; } ############################################################################## @@ -89,7 +90,8 @@ sub print_checkoptions ($) { my $curr_idx; # Current API version index # Print the option checking - print $fh "\n/* Issue error if contradicting macros have been defined. */\n"; + print $fh "\n\n/* Issue error if contradicting macros have been defined. */\n"; + print $fh "/* (Can't use an older (deprecated) API version if deprecated symbols have been disabled) */\n"; # Print the #ifdef print $fh "#if ("; @@ -118,26 +120,40 @@ sub print_checkoptions ($) { ############################################################################## # Print "global" API version macro settings # -sub print_globalapivers ($) { +sub print_globalapidefvers ($) { my $fh = shift; # File handle for output file my $curr_idx; # Current API version index # Print the descriptive comment - print $fh "\n\n/* If a particular \"global\" version of the library's interfaces is chosen,\n"; - print $fh " * set the versions for the API symbols affected.\n"; + print $fh "\n\n/* If a particular default \"global\" version of the library's interfaces is\n"; + print $fh " * chosen, set the corresponding version macro for API symbols.\n"; print $fh " *\n"; - print $fh " * Note: If an application has already chosen a particular version for an\n"; - print $fh " * API symbol, the individual API version macro takes priority.\n"; print $fh " */\n"; for $curr_idx ($min_sup_idx .. ($max_idx - 1)) { # Print API version ifdef - print $fh "#if defined(H5_USE_1", ($curr_idx * 2), "_API_DEFAULT) && !defined(H5_USE_1", ($curr_idx * 2), "_API)\n"; + print $fh "\n#if defined(H5_USE_1", ($curr_idx * 2), "_API_DEFAULT) && !defined(H5_USE_1", ($curr_idx * 2), "_API)\n"; # Print API version definition print $fh " " x $indent, "#define H5_USE_1", ($curr_idx * 2), "_API 1\n"; # Print API version endif - print $fh "#endif /* H5_USE_1", ($curr_idx * 2), "_API_DEFAULT && !H5_USE_1", ($curr_idx * 2), "_API */\n\n"; + print $fh "#endif /* H5_USE_1", ($curr_idx * 2), "_API_DEFAULT && !H5_USE_1", ($curr_idx * 2), "_API */\n"; } +} + +############################################################################## +# Print "global" API symbol version macro settings +# +sub print_globalapisymbolvers ($) { + my $fh = shift; # File handle for output file + my $curr_idx; # Current API version index + + # Print the descriptive comment + print $fh "\n\n/* If a particular \"global\" version of the library's interfaces is chosen,\n"; + print $fh " * set the versions for the API symbols affected.\n"; + print $fh " *\n"; + print $fh " * Note: If an application has already chosen a particular version for an\n"; + print $fh " * API symbol, the individual API version macro takes priority.\n"; + print $fh " */\n"; # Loop over supported older library APIs and define the appropriate macros for $curr_idx ($min_sup_idx .. ($max_idx - 1)) { @@ -338,7 +354,18 @@ sub parse_line ($) { my $vers_idx; # Index of version in array # Do some validation on the input - if(!( $_ =~ /v1[02468]/ || $_ =~ /v11[02468]/ )) { + # Note: v111 is allowed because H5O functions were prematurely versioned + # in HDF5 1.10. Because users were affected by this, the versioning + # was rescinded but the H5O version 2 functions were kept to be + # called directly. Now that the version macros are added in 1.12, + # along with a 3rd version of the H5O functions, the H5O function + # version for default api=v110 should be version 1 to work correctly + # with 1.10 applications that were using unversioned H5O functions, + # and the H5O function version should be version 3 for default api=v112 + # (the default api version for 1.12). Allowing a v111 entry and + # incrementing its index 13 lines below allows a version 2 that is + # never accessed via the H5O function macros. + if(!( $_ =~ /v1[02468]/ || $_ =~ /v11[02468]/ || $_ =~ /v111/ )) { die "bad version information: $name"; } if(exists($sym_versions{$_})) { @@ -351,6 +378,9 @@ sub parse_line ($) { #print "parse_line: _=$_\n"; # Get the index of the version ($vers_idx) = ($_ =~ /v1(\d+)/); + if($vers_idx == 11) { + $vers_idx++; + } $vers_idx /= 2; #print "parse_line: vers_idx='$vers_idx'\n"; push(@vers_nums, $vers_idx); @@ -443,8 +473,9 @@ sub create_public ($) { print_copyright(*HEADER); print_warning(*HEADER); print_startprotect(*HEADER, $file); + print_globalapidefvers(*HEADER); print_checkoptions(*HEADER); - print_globalapivers(*HEADER); + print_globalapisymbolvers(*HEADER); print_defaultapivers(*HEADER); print_endprotect(*HEADER, $file); @@ -7,7 +7,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # @@ -7,7 +7,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # @@ -19,7 +19,7 @@ # Created Date: 2005/07/06 # Modification: # Albert Cheng 2005/8/30 -# Changed from two arguments to mulitple arguments. +# Changed from two arguments to multiple arguments. if test $# -lt 2; then exit 1 diff --git a/bin/output_filter.sh b/bin/output_filter.sh index fb59dfd..ba68cb3 100644 --- a/bin/output_filter.sh +++ b/bin/output_filter.sh @@ -4,7 +4,7 @@ ## This file is part of HDF5. The full HDF5 copyright notice, including ## terms governing use, modification, and redistribution, is contained in ## the COPYING file, which can be found at the root of the source code -## distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +## distribution tree, or in https://www.hdfgroup.org/licenses. ## If you do not have access to either file, you may request a copy from ## help@hdfgroup.org. diff --git a/bin/pkgscrpts/h5rmflags b/bin/pkgscrpts/h5rmflags index 099956c..d0f87c0 100755 --- a/bin/pkgscrpts/h5rmflags +++ b/bin/pkgscrpts/h5rmflags @@ -7,7 +7,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # diff --git a/bin/pkgscrpts/makeHDF5BinaryTarfiles.pl b/bin/pkgscrpts/makeHDF5BinaryTarfiles.pl index f4a9ebd..d1042dc 100755 --- a/bin/pkgscrpts/makeHDF5BinaryTarfiles.pl +++ b/bin/pkgscrpts/makeHDF5BinaryTarfiles.pl @@ -8,7 +8,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # diff --git a/bin/release b/bin/release index 96c2e78..84555b6 100755 --- a/bin/release +++ b/bin/release @@ -7,30 +7,12 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # # Make a release of hdf5. -# -# Programmer: Robb Matzke -# Creation date: on or before 1998-01-29. -# -# Modifications -# Robb Matzke, 1999-07-16 -# The SunOS 5.6 sed *must* have slashes as delimiters. I changed things like -# `sed s+/CVS++' to `sed 's/\/CVS//' -# -# Albert Cheng, 1999-10-26 -# Moved the MANIFEST checking to a separate command file so that -# it can be invoked individually. -# -# Albert Cheng, 2004-08-14 -# Added the --private option. -# -# James Laird, 2005-09-07 -# Added the md5 method. # Function definitions # @@ -38,37 +20,36 @@ USAGE() { cat << EOF -Usage: $0 -d <dir> [--docver BRANCHNAME] [-h] [--nocheck] [--private] <methods> ... - -d DIR The name of the directory where the releas(es) should be +Usage: $0 -d <dir> [--docver BRANCHNAME] [-h] [--private] <methods> ... + -d DIR The name of the directory where the release(s) should be placed. --docver BRANCHNAME This is added for 1.8 and beyond to get the correct version of documentation files from the hdf5docs repository. BRANCHNAME for v1.8 should be hdf5_1_8. -h print the help page. - --nocheck Ignore errors in MANIFEST file. - --private Make a private release with today's date in version information. - + --private Make a private release with today's date in version information. + This must be run at the top level of the source directory. The other command-line options are the names of the programs to use for compressing the resulting tar archive (if none are given then "tar" is assumed): - tar -- use tar and don't do any compressing. - gzip -- use gzip with "-9" and append ".gz" to the output name. + tar -- use tar and don't do any compressing. + gzip -- use gzip with "-9" and append ".gz" to the output name. bzip2 -- use bzip2 with "-9" and append ".bz2" to the output name. - zip -- convert all text files to DOS style and form a zip file for Windows use. - cmake-tgz -- create a tar file using the gzip default level with a build-unix.sh + zip -- convert all text files to DOS style and form a zip file for Windows use. + cmake-tgz -- create a tar file using the gzip default level with a build-unix.sh command file and all other CMake files needed to build HDF5 source using CMake on unix machines. - cmake-zip -- convert all text files to DOS style and create a zip file inluding cmake - scripts and .bat files to build HDF5 source using CMake on Windows. - hpc-cmake-tgz - -- create a tar file using the gzip default level with a build-unix.sh + cmake-zip -- convert all text files to DOS style and create a zip file including cmake + scripts and .bat files to build HDF5 source using CMake on Windows. + hpc-cmake-tgz + -- create a tar file using the gzip default level with a build-unix.sh command file and all other CMake files needed to build HDF5 source using CMake on unix machines, with HDF5options.cmake files for serial and parallel builds on machines requiring batch jobs to run tests. The default is for parallel build, with serial only build by changing - the HDF5options.cmake symlink to ser-HDF5options.cmake. More + the HDF5options.cmake symlink to ser-HDF5options.cmake. More information is available in the README_HPC file. doc -- produce the latest doc tree in addition to the archive. @@ -100,11 +81,6 @@ EOF # Function name: tar2zip # Convert the release tarball to a Windows zipball. # -# Programmer: Albert Cheng -# Creation date: 2014-04-23 -# -# Modifications -# # Steps: # 1. untar the tarball in a temporary directory; # Note: do this in a temporary directory to avoid changing @@ -122,8 +98,8 @@ EOF tar2zip() { if [ $# -ne 3 ]; then - echo "usage: tar2zip <tarfilename> <zipfilename>" - return 1 + echo "usage: tar2zip <tarfilename> <zipfilename>" + return 1 fi ztmpdir=/tmp/ztmpdir$$ mkdir -p $ztmpdir @@ -135,23 +111,23 @@ tar2zip() (cd $ztmpdir; tar xf -) < $tarfile # sanity check if [ ! -d $ztmpdir/$version ]; then - echo "untar did not create $ztmpdir/$version source dir" - # cleanup - rm -rf $ztmpdir - return 1 + echo "untar did not create $ztmpdir/$version source dir" + # cleanup + rm -rf $ztmpdir + return 1 fi # step 2: convert text files # There maybe a simpler way to do this. # options used in unix2dos: - # -k Keep the date stamp + # -k Keep the date stamp # -q quiet mode # grep redirect output to /dev/null because -q or -s are not portable. find $ztmpdir/$version | \ - while read inf; do \ - if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \ - unix2dos -q -k $inf; \ - fi\ - done + while read inf; do \ + if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \ + unix2dos -q -k $inf; \ + fi\ + done # step 3: make zipball # -9 maximum compression # -y Store symbolic links as such in the zip archive @@ -167,24 +143,19 @@ tar2zip() # Function name: tar2cmakezip # Convert the release tarball to a Windows zipball with files to run CMake build. # -# Programmer: Larry Knox -# Creation date: 2017-02-20 -# -# Modifications -# # Steps: # 1. untar the tarball in a temporary directory; # Note: do this in a temporary directory to avoid changing # the original source directory which may be around. # 2. add build-unix.sh script. -# 3. add SZIP.tar.gz, ZLib.tar.gz and cmake files to top level directory. +# 3. add LIBAEC.tar.gz, ZLib.tar.gz and cmake files to top level directory. # 4. create gzipped tar file with these contents: # build-unix.sh script # hdf5-<version> source code directory extracted from tar file # CTestScript.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts # HDF5config.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts # HDF5options.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts -# SZip.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake +# LIBAEC.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake # ZLib.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake @@ -197,13 +168,13 @@ tar2zip() # # need function to create another temporary directory, extract the # $tmpdir/$HDF5_VERS.tar into it, create build-VS*.bat files, - # add CTestScript.cmake, HDF5config.cmake, SZIP.tar.gz + # add CTestScript.cmake, HDF5config.cmake, LIBAEC.tar.gz # ZLib.tar.gz, HDF5 examples, and then zip it. tar2cmakezip() { if [ $# -ne 3 ]; then - echo "usage: tar2cmakezip <tarfilename> <zipfilename>" - return 1 + echo "usage: tar2cmakezip <tarfilename> <zipfilename>" + return 1 fi cmziptmpdir=/tmp/cmziptmpdir$$ cmziptmpsubdir=$cmziptmpdir/CMake-$HDF5_VERS @@ -216,10 +187,10 @@ tar2cmakezip() (cd $cmziptmpsubdir; tar xf -) < $tarfile # sanity check if [ ! -d $cmziptmpsubdir/$version ]; then - echo "untar did not create $cmziptmpsubdir/$version source dir" - # cleanup - rm -rf $cmziptmpdir - return 1 + echo "untar did not create $cmziptmpsubdir/$version source dir" + # cleanup + rm -rf $cmziptmpdir + return 1 fi # step 2: add batch file for building CMake on window @@ -229,11 +200,14 @@ tar2cmakezip() (cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201564 -C Release -V -O hdf5.log" > build-VS2015-64.bat; chmod 755 build-VS2015-64.bat) (cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2017 -C Release -V -O hdf5.log" > build-VS2017-32.bat; chmod 755 build-VS2017-32.bat) (cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201764 -C Release -V -O hdf5.log" > build-VS2017-64.bat; chmod 755 build-VS2017-64.bat) + (cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2019 -C Release -V -O hdf5.log" > build-VS2019-32.bat; chmod 755 build-VS2019-32.bat) + (cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201964 -C Release -V -O hdf5.log" > build-VS2019-64.bat; chmod 755 build-VS2019-64.bat) - # step 3: add SZIP.tar.gz, ZLib.tar.gz and cmake files - cp /mnt/scr1/pre-release/hdf5/CMake/SZip.tar.gz $cmziptmpsubdir + # step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files + cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmziptmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmziptmpsubdir - cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.12.4-Source.zip $cmziptmpsubdir + cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.4-Source.zip $cmziptmpsubdir + cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-master.zip $cmziptmpsubdir cp $cmziptmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmziptmpsubdir cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmziptmpsubdir cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5options.cmake $cmziptmpsubdir @@ -241,15 +215,15 @@ tar2cmakezip() # step 4: convert text files # There maybe a simpler way to do this. # options used in unix2dos: - # -k Keep the date stamp + # -k Keep the date stamp # -q quiet mode # grep redirect output to /dev/null because -q or -s are not portable. find $cmziptmpsubdir/$version | \ - while read inf; do \ - if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \ - unix2dos -q -k $inf; \ - fi\ - done + while read inf; do \ + if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \ + unix2dos -q -k $inf; \ + fi\ + done # step 3: make zipball # -9 maximum compression @@ -266,24 +240,20 @@ tar2cmakezip() # Function name: tar2cmaketgz # Convert the release tarball to a gzipped tar file with files to run CMake build. # -# Programmer: Larry Knox -# Creation date: 2017-02-20 -# -# Modifications # # Steps: # 1. untar the tarball in a temporary directory; # Note: do this in a temporary directory to avoid changing # the original source directory which may be around. # 2. add build-unix.sh script. -# 3. add SZIP.tar.gz, ZLib.tar.gz and cmake files to top level directory. +# 3. add LIBAEC.tar.gz, ZLib.tar.gz and cmake files to top level directory. # 4. create gzipped tar file with these contents: # build-unix.sh script # hdf5-<version> source code directory extracted from tar file # CTestScript.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts # HDF5config.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts # HDF5options.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts -# SZip.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake +# LIBAEC.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake # ZLib.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake @@ -296,13 +266,13 @@ tar2cmakezip() # # need function to create another temporary directory, extract the # $tmpdir/$HDF5_VERS.tar into it, create build-unix.sh, - # add CTestScript.cmake, HDF5config.cmake, SZIP.tar.gz + # add CTestScript.cmake, HDF5config.cmake, LIBAEC.tar.gz # ZLib.tar.gz, HDF5 examples, and then tar.gz it. tar2cmaketgz() { if [ $# -ne 3 ]; then - echo "usage: tar2cmaketgz <tarfilename> <tgzfilename>" - return 1 + echo "usage: tar2cmaketgz <tarfilename> <tgzfilename>" + return 1 fi cmgztmpdir=/tmp/cmgztmpdir$$ cmgztmpsubdir=$cmgztmpdir/CMake-$HDF5_VERS @@ -315,25 +285,26 @@ tar2cmaketgz() (cd $cmgztmpsubdir; tar xf -) < $tarfile # sanity check if [ ! -d $cmgztmpsubdir/$version ]; then - echo "untar did not create $cmgztmpsubdir/$version source dir" - # cleanup - rm -rf $cmgztmpdir - return 1 + echo "untar did not create $cmgztmpsubdir/$version source dir" + # cleanup + rm -rf $cmgztmpdir + return 1 fi # step 2: add build-unix.sh script (cd $cmgztmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=Unix -C Release -V -O hdf5.log" > build-unix.sh; chmod 755 build-unix.sh) - # step 3: add SZIP.tar.gz, ZLib.tar.gz and cmake files - cp /mnt/scr1/pre-release/hdf5/CMake/SZip.tar.gz $cmgztmpsubdir + # step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files + cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmgztmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir - cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.12.4-Source.tar.gz $cmgztmpsubdir + cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.4-Source.tar.gz $cmgztmpsubdir + cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-master.tar.gz $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5options.cmake $cmgztmpsubdir - tar czf $DEST/CMake-$HDF5_VERS.tar.gz -C $cmgztmpdir . || exit 1 - + tar czf $DEST/CMake-$HDF5_VERS.tar.gz -C $cmgztmpdir . || exit 1 + # cleanup rm -rf $cmgztmpdir } @@ -343,24 +314,19 @@ tar2cmaketgz() # and HDF5options.cmake files for parallel or serial only builds where build # tests are run on compute nodes using batch scripts. # -# Programmer: Larry Knox -# Creation date: 2019-01-28 -# -# Modifications -# # Steps: # 1. untar the tarball in a temporary directory; # Note: do this in a temporary directory to avoid changing # the original source directory which may be around. # 2. add build-unix.sh script. -# 3. add SZIP.tar.gz, ZLib.tar.gz and cmake files to top level directory. +# 3. add LIBAEC.tar.gz, ZLib.tar.gz and cmake files to top level directory. # 4. create gzipped tar file with these contents: # build-unix.sh script # hdf5-<version> source code directory extracted from tar file # CTestScript.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts # HDF5config.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts # HDF5options.cmake cmake file copied from <hdf5 source code>/config/cmake/scripts -# SZip.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake +# LIBAEC.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake # ZLib.tar.gz copied from /mnt/scr1/pre-release/hdf5/CMake # # 5. For HPC-CMake tgz file the following are also needed in the top-level directory: @@ -379,13 +345,13 @@ tar2cmaketgz() # # need function to create another temporary directory, extract the # $tmpdir/$HDF5_VERS.tar into it, create build-unix.sh, - # add CTestScript.cmake, HDF5config.cmake, SZIP.tar.gz + # add CTestScript.cmake, HDF5config.cmake, LIBAEC.tar.gz # ZLib.tar.gz, HDF5 examples, and then tar.gz it. tar2hpccmaketgz() { if [ $# -ne 3 ]; then - echo "usage: tar2hpccmaketgz <tarfilename> <tgzfilename>" - return 1 + echo "usage: tar2hpccmaketgz <tarfilename> <tgzfilename>" + return 1 fi cmgztmpdir=/tmp/cmgztmpdir$$ cmgztmpsubdir=$cmgztmpdir/HPC-CMake-$HDF5_VERS @@ -398,20 +364,21 @@ tar2hpccmaketgz() (cd $cmgztmpsubdir; tar xf -) < $tarfile # sanity check if [ ! -d $cmgztmpsubdir/$version ]; then - echo "untar did not create $cmgztmpsubdir/$version source dir" - # cleanup - rm -rf $cmgztmpdir - return 1 + echo "untar did not create $cmgztmpsubdir/$version source dir" + # cleanup + rm -rf $cmgztmpdir + return 1 fi # step 2: add build-unix.sh script (cd $cmgztmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=Unix -C Release -V -O hdf5.log" > build-unix.sh; chmod 755 build-unix.sh) - # step 3: add SZIP.tar.gz, ZLib.tar.gz and cmake files - cp /mnt/scr1/pre-release/hdf5/CMake/SZip.tar.gz $cmgztmpsubdir + # step 3: add LIBAEC.tar.gz, ZLib.tar.gz and cmake files + cp /mnt/scr1/pre-release/hdf5/CMake/LIBAEC.tar.gz $cmgztmpsubdir cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir - cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.12.4-Source.tar.gz $cmgztmpsubdir + cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.4-Source.tar.gz $cmgztmpsubdir + cp /mnt/scr1/pre-release/hdf5/CMake/hdf5_plugins-master.tar.gz $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir @@ -419,8 +386,8 @@ tar2hpccmaketgz() cp $cmgztmpsubdir/$version/config/cmake/scripts/HPC/ser-HDF5options.cmake $cmgztmpsubdir cp $cmgztmpsubdir/$version/config/cmake/scripts/HPC/par-HDF5options.cmake $cmgztmpsubdir (cd $cmgztmpsubdir; ln -s par-HDF5options.cmake HDF5options.cmake) - tar czf $DEST/HPC-CMake-$HDF5_VERS.tar.gz -C $cmgztmpdir . || exit 1 - + tar czf $DEST/HPC-CMake-$HDF5_VERS.tar.gz -C $cmgztmpdir . || exit 1 + # cleanup rm -rf $cmgztmpdir } @@ -438,11 +405,10 @@ VERS=`perl bin/h5vers` VERS_OLD= test "$VERS" || exit 1 verbose=yes -check=yes release_date=`date +%F` today=`date +%Y%m%d` pmode='no' -tmpdir="../#release_tmp.$$" # tmp work directory +tmpdir="../#release_tmp.$$" # tmp work directory DOC_URL=https://git@bitbucket.hdfgroup.org/scm/hdffv/hdf5doc.git CPPLUS_RM_NAME=cpplus_RM MAINT_MODE_ENABLED="" @@ -459,11 +425,11 @@ fi RESTORE_VERSION() { if [ X-${VERS_OLD} != X- ]; then - echo restoring version information back to $VERS_OLD - rm -f config/lt_vers.am - cp $tmpdir/lt_vers.am config/lt_vers.am - bin/h5vers -s $VERS_OLD - VERS_OLD= + echo restoring version information back to $VERS_OLD + rm -f config/lt_vers.am + cp $tmpdir/lt_vers.am config/lt_vers.am + bin/h5vers -s $VERS_OLD + VERS_OLD= fi } @@ -473,32 +439,29 @@ while [ -n "$1" ]; do arg=$1 shift case "$arg" in - -d) - DEST=$1 - shift - ;; - --nocheck) - check=no - ;; - -h) - USAGE - exit 0 - ;; - --private) - pmode=yes - ;; + -d) + DEST=$1 + shift + ;; + -h) + USAGE + exit 0 + ;; + --private) + pmode=yes + ;; --docver) DOCVERSION=$1 shift ;; - -*) - echo "Unknown switch: $arg" 1>&2 - USAGE - exit 1 - ;; - *) - methods="$methods $arg" - ;; + -*) + echo "Unknown switch: $arg" 1>&2 + USAGE + exit 1 + ;; + *) + methods="$methods $arg" + ;; esac done @@ -507,7 +470,7 @@ if [ "X$methods" = "X" ]; then methods="tar" fi -# Create the temporay work directory. +# Create the temporary work directory. if mkdir $tmpdir; then echo "temporary work directory for release. "\ "Can be deleted after release completes." > $tmpdir/README @@ -541,35 +504,17 @@ if [ ! -d $DEST ]; then exit 1 fi -# Check the validity of the MANIFEST file. -bin/chkmanifest || fail=yes -if [ "X$fail" = "Xyes" ]; then - if [ $check = yes ]; then - echo "" - echo "Note! If you are running bin/release in a development branch" - echo "later than v 1.8 the MANIFEST check is expected to fail when" - echo "autogen.sh has not been run successfully. Either run autogen.sh " - echo "with /usr/hdf/bin/AUTOTOOLS at the beginning of PATH or add the" - echo "--nocheck argument to the bin/release command." - exit 1 - else - echo "Continuing anyway..." - fi -fi - -# Create a manifest that contains only files for distribution. -MANIFEST=$tmpdir/H5_MANIFEST -grep '^\.' MANIFEST | grep -v _DO_NOT_DISTRIBUTE_ >$MANIFEST - -# Prepare the source tree for a release. +# Create a symlink to the source so files in the tarball have the prefix +# we want (gnu's --transform isn't portable) ln -s `pwd` $tmpdir/$HDF5_VERS || exit 1 + # Save a backup copy of Makefile if exists. test -f Makefile && mv Makefile $tmpdir/Makefile.x cp -p Makefile.dist Makefile -# Update README.txt and release_docs/RELEASE.txt with release information in +# Update README.md and release_docs/RELEASE.txt with release information in # line 1. -for f in README.txt release_docs/RELEASE.txt; do +for f in README.md release_docs/RELEASE.txt; do echo "HDF5 version $VERS released on $release_date" >$f.x sed -e 1d $f >>$f.x mv $f.x $f @@ -577,79 +522,75 @@ for f in README.txt release_docs/RELEASE.txt; do chmod 644 $f done -# trunk is different than branches. +# develop is different than branches. if [ "${DOCVERSION}" ]; then DOC_URL="$DOC_URL -b ${DOCVERSION}" fi # Create the tar file test "$verbose" && echo " Running tar..." 1>&2 -( \ - cd $tmpdir; \ - tar cf $HDF5_VERS.tar $HDF5_VERS/Makefile \ - `sed 's/^\.\//hdf5-'$VERS'\//' $MANIFEST` || exit 1 \ -) +(cd "$tmpdir" && exec tar -ch --exclude-vcs -f "$HDF5_VERS.tar" "./$HDF5_VERS" || exit 1 ) # Compress MD5file=$HDF5_VERS.md5 cp /dev/null $DEST/$MD5file for comp in $methods; do case $comp in - tar) - cp -p $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.tar - (cd $DEST; md5sum $HDF5_VERS.tar >> $MD5file) - ;; - gzip) - test "$verbose" && echo " Running gzip..." 1>&2 - gzip -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.gz - (cd $DEST; md5sum $HDF5_VERS.tar.gz >> $MD5file) - ;; + tar) + cp -p $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.tar + (cd $DEST; md5sum $HDF5_VERS.tar >> $MD5file) + ;; + gzip) + test "$verbose" && echo " Running gzip..." 1>&2 + gzip -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.gz + (cd $DEST; md5sum $HDF5_VERS.tar.gz >> $MD5file) + ;; cmake-tgz) - test "$verbose" && echo " Creating CMake tar.gz file..." 1>&2 - tar2cmaketgz $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/CMake-$HDF5_VERS.tar.gz 1>&2 - (cd $DEST; md5sum CMake-$HDF5_VERS.tar.gz >> $MD5file) + test "$verbose" && echo " Creating CMake tar.gz file..." 1>&2 + tar2cmaketgz $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/CMake-$HDF5_VERS.tar.gz 1>&2 + (cd $DEST; md5sum CMake-$HDF5_VERS.tar.gz >> $MD5file) ;; hpc-cmake-tgz) - test "$verbose" && echo " Creating HPC-CMake tar.gz file..." 1>&2 - tar2hpccmaketgz $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/HPC-CMake-$HDF5_VERS.tar.gz 1>&2 - (cd $DEST; md5sum HPC-CMake-$HDF5_VERS.tar.gz >> $MD5file) + test "$verbose" && echo " Creating HPC-CMake tar.gz file..." 1>&2 + tar2hpccmaketgz $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/HPC-CMake-$HDF5_VERS.tar.gz 1>&2 + (cd $DEST; md5sum HPC-CMake-$HDF5_VERS.tar.gz >> $MD5file) + ;; + bzip2) + test "$verbose" && echo " Running bzip2..." 1>&2 + bzip2 -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.bz2 + (cd $DEST; md5sum $HDF5_VERS.tar.bz2 >> $MD5file) + ;; + zip) + test "$verbose" && echo " Creating zip ball..." 1>&2 + tar2zip $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2 + (cd $DEST; md5sum $HDF5_VERS.zip >> $MD5file) ;; - bzip2) - test "$verbose" && echo " Running bzip2..." 1>&2 - bzip2 -9 <$tmpdir/$HDF5_VERS.tar >$DEST/$HDF5_VERS.tar.bz2 - (cd $DEST; md5sum $HDF5_VERS.tar.bz2 >> $MD5file) - ;; - zip) - test "$verbose" && echo " Creating zip ball..." 1>&2 - tar2zip $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/$HDF5_VERS.zip 1>&2 - (cd $DEST; md5sum $HDF5_VERS.zip >> $MD5file) - ;; cmake-zip) test "$verbose" && echo " Creating CMake-zip ball..." 1>&2 tar2cmakezip $HDF5_VERS $tmpdir/$HDF5_VERS.tar $DEST/CMake-$HDF5_VERS.zip 1>&2 (cd $DEST; md5sum CMake-$HDF5_VERS.zip >> $MD5file) ;; - doc) + doc) if [ "${DOCVERSION}" = "" ]; then DOCVERSION=master fi - test "$verbose" && echo " Creating docs..." 1>&2 - # Check out docs from git repo - (cd $tmpdir; git clone -q $DOC_URL ${DOCVERSION} > /dev/null) || exit 1 + test "$verbose" && echo " Creating docs..." 1>&2 + # Check out docs from git repo + (cd $tmpdir; git clone -q $DOC_URL ${DOCVERSION} > /dev/null) || exit 1 # Create doxygen C++ RM - (cd c++/src && doxygen cpp_doc_config > /dev/null ) || exit 1 - # Replace version of C++ RM with just-created version - rm -rf $tmpdir/${DOCVERSION}/html/$CPPLUS_RM_NAME || exit 1 - mv c++/src/$CPPLUS_RM_NAME $tmpdir/${DOCVERSION}/html/$CPPLUS_RM_NAME || exit 1 + (cd c++/src && doxygen cpp_doc_config > /dev/null ) || exit 1 + # Replace version of C++ RM with just-created version + rm -rf $tmpdir/${DOCVERSION}/html/$CPPLUS_RM_NAME || exit 1 + mv c++/src/$CPPLUS_RM_NAME $tmpdir/${DOCVERSION}/html/$CPPLUS_RM_NAME || exit 1 # Compress the docs and move them to the release area - mv $tmpdir/${DOCVERSION} $tmpdir/${HDF5_VERS}_docs || exit 1 - (cd $tmpdir && tar cf ${HDF5_VERS}_docs.tar ${HDF5_VERS}_docs) || exit 1 - mv $tmpdir/${HDF5_VERS}_docs.tar $DEST || exit 1 - ;; - *) - echo "***Error*** Unknown method $comp" - exit 1 - ;; + mv $tmpdir/${DOCVERSION} $tmpdir/${HDF5_VERS}_docs || exit 1 + (cd $tmpdir && tar cf ${HDF5_VERS}_docs.tar ${HDF5_VERS}_docs) || exit 1 + mv $tmpdir/${HDF5_VERS}_docs.tar $DEST || exit 1 + ;; + *) + echo "***Error*** Unknown method $comp" + exit 1 + ;; esac done @@ -675,4 +616,6 @@ fi # Remove temporary things rm -rf $tmpdir +echo "DONE" + exit 0 diff --git a/bin/restore.sh b/bin/restore.sh index 60ac661..0597572 100755 --- a/bin/restore.sh +++ b/bin/restore.sh @@ -6,7 +6,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # @@ -48,6 +48,9 @@ rm -f bin/missing rm -f bin/test-driver rm -f bin/depcomp +echo "Remove files generated by autoheader" +rm -f src/H5config.h.in + echo "Remove files generated by bin/make_err" rm -f src/H5Epubgen.h rm -f src/H5Einit.h diff --git a/bin/runbkgprog b/bin/runbkgprog index 69fa2d0..b0d4b73 100755 --- a/bin/runbkgprog +++ b/bin/runbkgprog @@ -1,5 +1,6 @@ -#!/usr/bin/perl -w +#!/usr/bin/env perl require 5.003; +use warnings; $indent=4; # @@ -10,7 +11,7 @@ $indent=4; # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # diff --git a/bin/runtest b/bin/runtest deleted file mode 100755 index 2611f09..0000000 --- a/bin/runtest +++ /dev/null @@ -1,966 +0,0 @@ -#! /bin/sh -# -# Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. -# - -# run the hdf5/bin/snapshot -# Usage: -# runtest run the test for the local host -# runtest <hostname> run the test for <hostname> -# runtest -all run the test for all predefined hosts -# -# Assumptions in knowing where to find the right scripts to execute. -# 1. assume we are at the top level of the hdf5 source. So, bin/* are -# where the script files are. -# 2. after the cvs update is completed, we can go to the snapshot area -# hdf5 source tree and use the bin/* there. -# 3. Cannot use the snapshot area scripts from the beginning because -# for one, the current directory is renamed as previous right after -# a snapshot release; and for another, some scripts may be changed -# by the cvs update while it is being used. - -# local setup -DEBUGMODE="" -test -n "$DEBUGMODE" && echo "******** DEBUGMODE is $DEBUGMODE ************" -WHEREAMI='pwd' -CMD= - -# the name of this program -PROGNAME="bin/runtest $DEBUGMODE" - -# Setup -HOSTNAME=`hostname | cut -f1 -d.` # no domain part -TODAY=`date +%m%d%a` -WEEKDAY=`date +%a` -H5VER= # default to current CVS version -H5VERSION= # default to current CVS version -n_test=0 # Number of tests ran -n_pass=0 # Number of tests passed -n_fail=0 # Number of tests failed -n_skip=0 # Number of tests skipped - -# Default to do checkout (only once) and test, no release. -# Will run test only if there is significant differences from previous version. -# If srcdir is not used, don't launched multiple tests -SNAPSHOT="${DEBUGMODE:+echo }bin/snapshot" -SRCDIR="srcdir" -# Default standard Snaptest commands -SNAPCMD="$SRCDIR test clean" -# Default Standard snaptest command options -STANDARD_OPT="" -ENABLE_PARALLEL="--enable-parallel" -CONFIGNAME=$HOSTNAME # Name used in the SNAPTESTCFG file - -# test host default as local host. -TESTHOST="" - -################################# -# Function definitions -################################# - -# Print messages to stdout -# Use this to show output heading to stdout -PRINT() -{ - echo "$*" -} - -# Show seconds since midnight. -# This is used to calculate seconds elapsed -SecOfDay() -{ - set `date '+%H %M %S'` - t_sec=`expr $1 \* 3600 + $2 \* 60 + $3` - echo $t_sec -} - -# Calculated the elapsed time (in seconds) between the first -# and second time. If second time is smaller than the first, -# we assume the clock has passed midnight and calculate appropriately. -ElapsedTime() -{ - if [ $2 -lt $1 ]; then - t_sec=`expr 3600 \* 24 - $1 + $2` - else - t_sec=`expr $2 - $1` - fi - echo `expr $t_sec / 60`m `expr $t_sec % 60`s -} - -# Report errors -# $1--an error message to be printed -REPORT_ERR() -{ - ERRMSG=$1 - # print it with a banner shifted right a bit - PRINT " *************************************" - PRINT " `date`" - PRINT " $ERRMSG" - PRINT " *************************************" - # report it in the FAILED-LOG file too - PRINT "$ERRMSG" >> $FAILEDLOG -} - -# -# Report results of the last test done -REPORT_RESULT() -{ - if [ $retcode -eq 0 ]; then - if [ $skiptest = yes ]; then - n_skip=`expr $n_skip + 1` - PRINT "SKIPPED ${HOSTNAME}: $TEST_TYPE" | tee -a $SKIPPEDLOG - else - n_pass=`expr $n_pass + 1` - PRINT "PASSED ${HOSTNAME}: $TEST_TYPE" | tee -a $PASSEDLOG - fi - else - # test failed. - n_fail=`expr $n_fail + 1` - REPORT_ERR "****FAILED ${HOSTNAME}: $TEST_TYPE****" - fi -} - -# Print a blank line -PRINT_BLANK() -{ - PRINT -} - -# Print test trailer -PRINT_TEST_TRAILER() -{ - PRINT "*** finished $TEST_TYPE tests for $HOSTNAME ***" - date; EndTime=`SecOfDay` - PRINT Total time = `ElapsedTime $StartTime $EndTime` - PRINT_BLANK -} - -# Print trailer summary -PRINT_TRAILER() -{ - PRINT "*** finished tests in $HOSTNAME ***" - date; TotalEndTime=`SecOfDay` - PRINT "${HOSTNAME}: Ran $n_test($n_pass/$n_fail/$n_skip) $runtest_type, Grand total test time = " \ - "`ElapsedTime $TotalStartTime $TotalEndTime`" | tee -a $TIMELOG - PRINT_BLANK -} - -# Figure out which remote command to use to reach a host. -# Try ssh first, then rsh since fewer machines support rsh exec. -# $1--hostname to reach. -CHECK_RSH() -{ - # Figure out how to use ping command in this host. - # Some hosts use "ping host count", some use "ping -c count host". - # Test "ping -c 3 -w 5" since it has timeout feature. - # Test "ping -c ..." style before "ping host 3" because some machines - # that recognize -c treat 'ping localhost 3' as to ping host '3'. - if [ -z "$PING" ]; then - if ping -c 3 -w 5 localhost >/dev/null 2>&1; then - PING='ping -c 3 -w 5' - PINGCOUNT= - elif ping -c 3 localhost >/dev/null 2>&1; then - PING='ping -c 3' - PINGCOUNT= - elif ping localhost 3 >/dev/null 2>&1; then - PING=ping - PINGCOUNT=3 - else # don't know how to use ping. - PING=no_ping - PINGCOUNT= - fi - fi - # - host=$1 - # Try remote command with host if it responds to ping. - # Still try it if we don't know how to do ping. - if [ no_ping = "$PING" ] || $PING $host $PINGCOUNT >/dev/null 2>&1; then - if ssh $host -n hostname >/dev/null 2>&1; then - RSH=ssh - elif rsh $host -n hostname >/dev/null 2>&1; then - RSH=rsh - else - PRINT cannot remote command with $host - RSH="NoRemoteCommand" - fi - else - RSH="NotReachable" - fi -} - - -# Wait for a file for at most number of minutes -# $1--the file -# $2--number of minutes -# WAIT_STATUS set to: -# -1 if errors encountered -# 0 if file found within time limit -# 1 if file not found within time limit -WAITFOR() -{ - wait_file=$1 - nminutes=$2 - if [ -z "$wait_file" -o ! "$nminutes" -ge 0 ] - then - PRINT "errors in argument of WAITFOR(): wait_file($1) or nminutes($2)" - WAIT_STATUS=-1 - return - fi - while [ ! -f $wait_file ]; do - if [ $nminutes -gt 0 ]; then - PRINT "Wait For $wait_file to appear" - sleep 60 #sleep 1 minute - else - WAIT_STATUS=1 - return - fi - nminutes=`expr $nminutes - 1` - done - WAIT_STATUS=0 - return -} - - -# Wait till a file disappears for at most number of minutes. -# Useful to wait till a lock is removed by another process. -# $1--the file -# $2--number of minutes -# WAIT_STATUS set to: -# -1 if errors encountered -# 0 if file disappears within time limit -# 1 if file has not disappeared within time limit -WAITTILL() -{ - wait_file=$1 - nminutes=$2 - if [ -z "$wait_file" -o ! "$nminutes" -ge 0 ] - then - PRINT "errors in argument of WAITTILL(): wait_file($1) or nminutes($2)" - WAIT_STATUS=-1 - return - fi - while [ -f $wait_file ]; do - if [ $nminutes -gt 0 ]; then - PRINT "Wait till $wait_file has disappeared" - sleep 60 #sleep 1 minute - else - WAIT_STATUS=1 - return - fi - nminutes=`expr $nminutes - 1` - done - WAIT_STATUS=0 - return -} - - -# Run one snapshot test -# $*--Types of test being run -RUNSNAPTEST() -{ - SNAPCMD_OPT="$STANDARD_OPT" # snapshot test option - SRCDIRNAME=${HOSTNAME} - # restore CC, PATH in case they were changed in the last test. - CC="$CC_SAVED" - PATH=$PATH_SAVED - export PATH # DEC OSF1 needs to export PATH explicitly - TEST_TYPE=$* - retcode=0 - skiptest=no - date - PRINT "*** starting $TEST_TYPE tests in $HOSTNAME ***" - PRINT "Uname -a: `uname -a`" - - # Parse the test type and set options accordingly. - # See comments of SNAPTEST_CONFIG_PARSE(). - while [ $# -gt 0 ]; do - case $1 in - -n32) # want -n32 option - SRCDIRNAME=${SRCDIRNAME}-n32 - CC="cc -n32" - export CC - ;; - -64) # want -64 option - SRCDIRNAME=${SRCDIRNAME}-64 - CC="cc -64" - export CC - ;; - parallel) # want parallel test - SNAPCMD_OPT="$SNAPCMD_OPT $ENABLE_PARALLEL" - SRCDIRNAME=${SRCDIRNAME}-pp - ;; - standard) # standard test - ;; - --*) - # option for configure - SNAPCMD_OPT="$SNAPCMD_OPT $1" - ;; - op-configure) - # option for configure - SNAPCMD_OPT="$SNAPCMD_OPT $1 $2" - shift - ;; - op-snapshot) - # option for snapshot - shift - SNAPCMD_OPT="$SNAPCMD_OPT $1" - ;; - setenv) - # pass them along to snapshot set environment variable - shift - SNAPCMD_OPT="$SNAPCMD_OPT setenv $1 $2" - shift - ;; - setenvN) - # set environment variable with $1 values - # e.g., setenvN 3 x a b c is same as setenv x="a b c". - # pass them along to snapshot set environment variable - shift - envN=$1 - shift - envname=$1 - SNAPCMD_OPT="$SNAPCMD_OPT setenvN $envN $envname" - envalue= - while test $envN -gt 0; do - shift - envalue="$envalue $1" - envN=`expr $envN - 1` - done - SNAPCMD_OPT="$SNAPCMD_OPT $envalue" - ;; - skip) - # skip this test - skiptest=yes - ;; - srcdirname) - # Use this before using parallel and -n32 since this overrides - # the others. - shift - SRCDIRNAME=$1 - ;; - deploy) - # deploy the built binary. - shift - SNAPCMD_OPT="$SNAPCMD_OPT deploy $1" - ;; - deploydir) - # default directory for deployment. - shift - SNAPCMD_OPT="$SNAPCMD_OPT deploydir $1" - ;; - *) # unknown test - PRINT "$0: unknown type of test ($1)" - retcode=1 - ;; - esac - shift - done - - if [ $retcode -ne 0 -o $skiptest = yes ]; then - errcode=$retcode - return $retcode - fi - - # Track down the zlib software - ans=`$SNAPYARD/current/bin/locate_sw zlib` - if [ $? = 0 ]; then - Z_INC=`echo $ans | cut -f1 -d,` - Z_LIB=`echo $ans | cut -f2 -d,` - SNAPCMD_OPT="$SNAPCMD_OPT zlib $Z_INC,$Z_LIB" - else - # cannot locate zlib software. - # continue the test, maybe configure can find it. - : - fi - - if [ -n "${SRCDIRNAME}" ]; then - SNAPCMD_OPT="$SNAPCMD_OPT srcdirname ${SRCDIRNAME}" - fi - - # Setup log file name to save test output - THIS_MINUTE=`date +%H%M` - LOGFILE=${LOGBASENAME}/${SRCDIRNAME}_${TODAY}_${THIS_MINUTE} - PRINT "Running snapshot with output saved in" - PRINT " $LOGFILE" - (date; PRINT Hostname=$HOSTNAME) >> $LOGFILE - - ( - cd $SNAPYARD/current - $SNAPSHOT $SNAPCMD $SNAPCMD_OPT - ) >> $LOGFILE 2>&1 - retcode=$? - [ $retcode -ne 0 ] && errcode=$retcode - - date >> $LOGFILE - if [ $retcode -ne 0 ]; then - # Dump the first 10 lines and the last 30 lines of the LOGFILE. - ( ntail=30 - echo ========================= - echo "Dumping logfile of ${HOSTNAME}: $TEST_TYPE" - echo "Last $ntail lines of $LOGFILE" - echo ========================= - tail -$ntail $LOGFILE - echo ========================= - echo Dumping done - echo ========================= - echo "" - ) >> $FAILEDDETAIL - fi -} - -TIMELIMIT_PARSE() -{ - # Function returns timeparam for timekeeper via standard out - - # any debug statements should be 'echo "Debug string" >&2' or timekeeper - # will declare timeparam to be non-numeric and ignore it. - while read x y ; do - # Scan for entry for this weekday. - xd=`echo $x | cut -f1 -d/` - if [ "$xd" = ${WEEKDAY} ]; then - # strip away the weekday/ part. - timeparam=`echo $x | cut -f2 -d/` - break - fi - case "$x" in - '' | '#'*) - # blank or comment lines. Continue. - ;; - ???/*) - # Ignore any entry not of this weekday. - ;; - *) - timeparam="$x" - ;; - esac - done - echo $timeparam - return -} - -# configuration parsing. -# Taking configuration from input. -# This should be invoke with configure file as stdin. -# Syntax of the configure file: -# All lines started with the # are comment lines and are ignored. -# Blank lines are ignored too. -# Each config line starts with a "Scope" followed by test types. -# -# Scope can be: -# standard ... # what the standard test types are. -# <host>: <test> Do <test> for <host> -# all: <test> Do <test> for all hosts. -# <weekday>/... Use this scope if the <weekday> matches. -# <weekday> can be {Mon,Tue,Wed,Thu,Fri,Sat,Sun} -# If no <host>: input for a <host>, the standard test is used. -# -# Test types: -# standard tests defined in standard scope. -# -n32 -n32 mode. Apply to 64/32 bit OS such as IRIX64. -# parallel parallel mode. -# op-configure <option> configure option -# op-snapshot <option> snapshot option -# --* configure option -# setenv <name> <value> set environment variable <name> to <value> -# Pass along to snapshot -# setenvN <N> <name> <value> ... -# set environment variable with <N> values -# e.g., setenvN 3 x a b c is same as setenv x="a b c". -# Pass along to snapshot. -# skip skip this test -# srcdirname <name> use <name> as the build-directory. -# deploy <name> deploy the built binary at directory <name>. -# deploydir <name> use <name> as the default directory for deployment. -SNAPTEST_CONFIG_PARSE() -{ - while read x y ; do - # Scan for entry for this weekday. - xd=`echo $x | cut -f1 -d/` - if [ "$xd" = ${WEEKDAY} ]; then - # strip away the weekday/ part. - x=`echo $x | cut -f2 -d/` - fi - case "$x" in - '' | '#'*) - # blank or comment lines. Continue. - ;; - ???/*) - # Ignore any entry not of this weekday. - ;; - standard) - #standard configuration - STANDARD_OPT="$y" - ;; - all: | ${CONFIGNAME}:) - # types of test for all hosts or this host - if [ -n "$TEST_TYPES" ]; then - TEST_TYPES="$TEST_TYPES ; $y" - else - TEST_TYPES="$y" - fi - ;; - *:) # ignore types of test for other hosts - ;; - *) # unknown configuration option - PRINT $x $y - PRINT "***Unknown configuration option. Ignored.***" - ;; - esac - done -} - -# Snap Test configuration parsing. -# If TEST_TYPES is not set, set it to do the "standard" test. -SNAPTEST_CONFIG() -{ - TEST_TYPES= - STANDARD_OPT= - if [ -f $SNAPTESTCFG ]; then - SNAPTEST_CONFIG_PARSE < $SNAPTESTCFG - fi - TEST_TYPES=${TEST_TYPES:-'standard'} -} - - -# Show usage page -USAGE() -{ -cat <<EOF -Usage: runtest [-h] [-debug] [-r<version>] [-all] [-nocvs] [-nodiff] [<host> ...] - -h - print this help page - -debug - turn on debug mode - -r<version> - do runtest for <version> - -all - launch tests for all pre-defined testing hosts - -nocvs - do not do cvs commands - -nodiff - do not do diff commands - -setup - setup the directory structure for snapshot test - -configname <name> - use <name> as hostname in the parsing of the snaptest configure file - <host> - launch tests for <host> - --all and <host> are contradictory and whichever is specified last, is -the one to take effect. If neither are given, do the test for the -local host. -EOF -} - - -# Verify if directory ($1) exists. If not, create it. -CHECK_DIR() -{ - dir=$1 - if test ! -e $1; then - echo mkdir $1 - mkdir $1 - errcode=$? - elif test ! -d $1; then - echo $1 is not a directory - errcode=1 - fi -} - - -################################# -# Main -################################# -################################# -# Set up global variables -################################# -retcode=0 # error code of individula task -errcode=0 # error code of the whole test -skiptest=no # if test is skipped -CC_SAVED="$CC" # CC & PATH maybe changed within a test. -PATH_SAVED=$PATH # These save the original values. -timelimit=300 # default time limit (minutes) for the timekeeper - -################################# -# Parse options -################################# -while [ $# -gt 0 ]; do - case "$1" in - -h) # help--show usage - USAGE - exit 0 - ;; - -debug*) - # set debug mode - DEBUGMODE="$1" - SNAPSHOT="echo bin/snapshot" - PROGNAME="$PROGNAME $DEBUGMODE" - PRINT "******** DEBUGMODE is $DEBUGMODE ************" - ;; - -r*) - # version string - H5VER="$1" - ;; - -all) - # Test all hosts. - TESTHOST=-all - ;; - -nocvs) - # do not do cvs commands. - NOCVS=nocvs - ;; - -nodiff) - # do not do diff commands. - NODIFF=nodiff - ;; - -configname) - # use <name> as hostname in the parsing of the snaptest configure file. - shift - CONFIGNAME=$1 - ;; - -setup) - # setup the directory structure for snapshot test. - CMD=setup - ;; - -*) # Unknow option - PRINT "Unknown option ($1)" - USAGE - exit 1 - ;; - *) - TESTHOST=$* - break - ;; - esac - shift -done - -# setup H5VER if not set yet -if [ -z "$H5VER" -a -f bin/snapshot_version ] -then - . bin/snapshot_version -fi - -if [ -n "$H5VER" ] -then - H5VERSION=hdf5_`echo $H5VER | sed -e s/-r// -e s/\\\./_/g` - PROGNAME="$PROGNAME $H5VER" -else - H5VERSION=hdf5 -fi - -################################# -# Setup snapshot test directories -################################# -BASEDIR=${HOME}/snapshots-${H5VERSION} -# initial processing of setup option if requested -if test x-$CMD = x-setup; then - CHECK_DIR $BASEDIR - test $errcode -ne 0 && exit 1 -elif [ ! -d ${BASEDIR} ]; then - echo "BASEDIR ($BASEDIR) does not exist" - exit 1 -fi -# Show the real physical path rather than the symbolic path -SNAPYARD=`cd $BASEDIR && /bin/pwd` -# Log file basename -LOGDIR=${SNAPYARD}/log -LOGBASENAME=${LOGDIR} -PASSEDLOG=${LOGDIR}/PASSED_LOG_${TODAY} -FAILEDLOG=${LOGDIR}/FAILED_LOG_${TODAY} -FAILEDDETAIL=${LOGDIR}/FAILED_DETAIL_${TODAY} -SKIPPEDLOG=${LOGDIR}/SKIPPED_LOG_${TODAY} -TIMELOG=${LOGDIR}/TIME_LOG_${TODAY} -TIMEKEEPERLOG=${LOGDIR}/TIMEKEEPER_LOG_${TODAY} -CVSLOG=${LOGDIR}/CVS_LOG_${TODAY} -CVSLOG_LOCK=${LOGDIR}/CVS_LOG_LOCK_${TODAY} -DIFFLOG=${LOGDIR}/DIFF_LOG_${TODAY} -COPYRIGHT_ERR=${LOGDIR}/COPYRIGHT_ERR_${TODAY} -# Snap Test hosts and Configuration files -ALLHOSTSFILE=${SNAPYARD}/allhostfile -SNAPTESTCFG=${SNAPYARD}/snaptest.cfg -TIMELIMIT=${SNAPYARD}/timelimit -TMPFILE="${LOGDIR}/#runtest.${TODAY}.$$" - -# more processing of setup option if requested -if test x-$CMD = x-setup; then - CHECK_DIR $LOGDIR - test $errcode -ne 0 && exit 1 - CHECK_DIR $LOGDIR/OLD - test $errcode -ne 0 && exit 1 - CHECK_DIR $SNAPYARD/TestDir - test $errcode -ne 0 && exit 1 - # create empty test hosts or configure files if non-existing - for f in $ALLHOSTSFILE $SNAPTESTCFG; do - if test ! -f $f; then - echo Creating $f - touch $f - fi - done - # create or update the current source. - echo update current source - $SNAPSHOT checkout - # setup completed. Exit. - exit 0 -fi - -################################# -# Show some host status numbers -################################# -# df sometimes hangs due to file system problems. Invoke it as background -# process and give it 10 seconds to finish. If it hangs, just continue. -uptime -df & -sleep 10 - -################################# -# Setup test host(s) -################################# -if [ "$TESTHOST" = -all ]; then - if [ -f $ALLHOSTSFILE ]; then - TESTHOST=`sed -e '/^#/d;/^ *$/d' $ALLHOSTSFILE` - else - PRINT "could not access the all-hosts-file ($ALLHOSTSFILE)" - USAGE - exit 1 - fi -fi - - -################################# -# Setup to print a trailer summary when exiting not via -# the normal end of the script. -################################# -trap PRINT_TRAILER 0 - -# -TotalStartTime=`SecOfDay` - -# Process the configuration -SNAPTEST_CONFIG -PRINT STANDARD_OPT=$STANDARD_OPT -PRINT TEST_TYPES=$TEST_TYPES -PRINT_BLANK - -# Do a checkout if one has not been done today. -# Then check MANIFEST file and copyrights noitces. -if [ -z "$NOCVS" ]; then - PRINT "Running CVS checkout with output saved in" - PRINT " $CVSLOG" - # Set CVS lock first - touch $CVSLOG_LOCK - ($SNAPSHOT checkout ) >> $CVSLOG 2>&1 - # Save error code and remove the lock - errcode=$? - rm -f $CVSLOG_LOCK - if [ $errcode -ne 0 ]; then - # test failed. - REPORT_ERR "****FAILED ${HOSTNAME}: CVS checkout****" - exit $errcode - fi - # =================== - # Check MANIFEST file - # =================== - PRINT Checking MAINFEST file ... - (cd $SNAPYARD/current; bin/chkmanifest) > $TMPFILE 2>&1 - errcode=$? - if [ $errcode -eq 0 ]; then - # test passed. - cat $TMPFILE - else - # test failed. - REPORT_ERR "****FAILED ${HOSTNAME}: MANIFEST check****" - ( echo ========================= - echo "MANIFEST checking failed output" - echo ========================= - cat $TMPFILE - echo ========================= - echo "MANIFEST checking failed output done" - echo ========================= - echo "" - ) >> $FAILEDDETAIL - fi - rm $TMPFILE - PRINT_BLANK - # No copyright checking until what need copyright is decided. 2006/4/7. - if false; then - # ====================== - # Check Copyright notice - # ====================== - PRINT Checking Copyrights notices ... - if (cd $SNAPYARD/current; bin/chkcopyright) > $TMPFILE 2>&1 ; then - echo Passed. - else - # Save the output and report some of it. - # Do not report it as failed for runtest yet. - # Send a separate report mail via hardcoding. - # Need fixes/cleanup later. - echo "Failed. See detail in another report mail" - cp $TMPFILE $COPYRIGHT_ERR - nheadlines=300 - ntaillines=5 # Number of lines in report summary. - ( - echo ========================= - echo "Copyright checking failed. Showing first $nheadlines lines of output." - echo "Complete output is in file $COPYRIGHT_ERR" - echo ========================= - nreportlines=`wc -l < $COPYRIGHT_ERR` - if [ $nreportlines -le `expr $nheadlines + $ntaillines` ]; then - # Just print the whole file. - cat $COPYRIGHT_ERR - else - # Show the first $nheadlines plus report summary - head -$nheadlines $COPYRIGHT_ERR - echo ... - tail -$ntaillines $COPYRIGHT_ERR - fi - ) | Mail -s "${H5VERSION} Copyrights check Failed" hdf5lib - fi - rm $TMPFILE - PRINT_BLANK - fi -else - # make sure the cvs update, if done by another host, has completed. - # First wait for the presence of $CVSLOG which signals some host - # has started the cvs update. Then wait for the absense of $CVSLOG_LOCK - # which signals the host has completed the cvs update. - WAITFOR $CVSLOG 90 - if [ $WAIT_STATUS -ne 0 ]; then - errcode=$WAIT_STATUS - REPORT_ERR "****FAILED ${HOSTNAME}: Time expired waiting CVS update to start****" - exit $errcode - fi - WAITTILL $CVSLOG_LOCK 10 - if [ $WAIT_STATUS -ne 0 ]; then - errcode=$WAIT_STATUS - REPORT_ERR "****FAILED ${HOSTNAME}: Time expired waiting CVS update to finish****" - exit $errcode - fi -fi - -# run a snapshot diff to see if any significant differences between -# the current and previous versions -if [ -z "$NODIFF" ]; then - $SNAPSHOT diff >> $DIFFLOG 2>&1 - errcode=$? - # check the errcode only if NOT in DEBUG MODE - if [ -z "$DEBUGMODE" -a $errcode -eq 0 ]; then - # no need to run test - PRINT "NO TEST: no significant differences between current and previous versions" | - tee -a $PASSEDLOG - exit 0 - fi -fi - -# we can use the version of script in SNAPYARD/current now. -# Don't do the diff or cvs update any more. -PROGNAME="$SNAPYARD/current/$PROGNAME -nodiff -nocvs" - -# Decide to do test for the local host or for remote hosts -if [ -n "$TESTHOST" -a $HOSTNAME != "$TESTHOST" ]; then - date - PRINT "*** launching tests from $HOSTNAME ***" - PRINT_BLANK - TEST_TYPE="launching" - cd ${SNAPYARD}/log - # Fork off timekeeper if concurrent tests will be used. - if [ -n "$SRCDIR" ]; then - timelimit=`TIMELIMIT_PARSE < $TIMELIMIT` - ($SNAPYARD/current/bin/timekeeper $timelimit > $TIMEKEEPERLOG 2>&1 &) - PRINT " Fork off timekeeper $timelimit" - fi - runtest_type="hosts" - for h in $TESTHOST; do - # Must do CONFIGNAME before $h got changed by the second cut. - # cut returns the whole string if there is no / in the string - # at all. But that works okay for the CONFIGNAME too. - CONFIGNAME=`echo $h | cut -f2 -d/` - h=`echo $h | cut -f1 -d/` - n_test=`expr $n_test + 1` - TMP_OUTPUT="#${h}_${CONFIGNAME}.out" - (PRINT "==============" - PRINT "Testing $h" - PRINT "==============") > $TMP_OUTPUT - CHECK_RSH $h - # run the remote shell command with output to $TMP_OUTPUT - case "$RSH" in - rsh|ssh) - CMD="$RSH $h -n $PROGNAME -configname $CONFIGNAME" - PRINT $CMD - - # launch concurrent tests only if srcdir is used - if [ -n "$SRCDIR" ]; then - $CMD || REPORT_ERR "****FAILED ${h}: Abnormal exit from runtest****" && PRINT_BLANK & - echo $! > PID.${h}_${CONFIGNAME} - else - $CMD || REPORT_ERR "****FAILED ${h}: Abnormal exit from runtest****" && PRINT_BLANK - fi - ;; - NoRemoteCommand) - PRINT $h does not accept Remote Command "(`date`)" - ;; - NotReachable) - PRINT $h is not reachable "(`date`)" - ;; - *) - PRINT "CHECK_RSH for $h returned unknow result ($RSH)" - ;; - esac >> $TMP_OUTPUT 2>&1 - done - # wait for all launched tests to finish, then cat them back out. - wait - # Pause a moment in case the timekeeper is terminating processes. - wait 30 - for h in $TESTHOST; do - CONFIGNAME=`echo $h | cut -f2 -d/` - h=`echo $h | cut -f1 -d/` - TMP_OUTPUT="#${h}_${CONFIGNAME}.out" - cat $TMP_OUTPUT - # Verify test script did complete by checking the last lines - (tail -5 $TMP_OUTPUT | grep -s 'Grand total' > /dev/null 2>&1) || - (REPORT_ERR "****FAILED ${h}: snaptest did not complete****" && - PRINT_BLANK) - rm -f $TMP_OUTPUT PID.${h}_${CONFIGNAME} - done - exit 0 -fi - -# run the test(s) -# Note that first field is cut without -s but all subsequent cut -# must use -s. If -s is not used at all, a $TEST_TYPES that has -# no ';' (only 1 test), will pass through intact in all cut. That -# results in infinite looping. -# If -s is used with the first field, it will suppress completely -# a $TYPE_TYPES that has no ';' (only 1 tst ). That results in no -# test at all. -# Note that n_test must start as 1. -# -n_test=1 -runtest_type="tests" -TEST="`echo $TEST_TYPES | cut -f$n_test -d';'`" -while [ -n "$TEST" ]; do - StartTime=`SecOfDay` - RUNSNAPTEST $TEST - REPORT_RESULT - PRINT_TEST_TRAILER - - n_test=`expr $n_test + 1` - TEST="`echo $TEST_TYPES | cut -f$n_test -s -d';'`" -done -# dec n_test to show the actual number of tests ran. -n_test=`expr $n_test - 1` - -PRINT_TRAILER - -# disable trailer summary printing since all trailers have been -# printed and we are exiting normally. -trap 0 -exit $errcode diff --git a/bin/snapshot b/bin/snapshot deleted file mode 100755 index 1218caa..0000000 --- a/bin/snapshot +++ /dev/null @@ -1,837 +0,0 @@ -#!/bin/sh -# -# Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. - -# This script should be run nightly from cron. It checks out the source -# from the source repository and compares it against the previous -# snapshot. If anything significant changed then a new snapshot is -# created, the minor version number is incremented, and the change is -# checked back into the source repository. -# - - -# function definitions -TIMESTAMP() -{ - echo "=====" "$1": "`date`" "=====" -} - -EXIT_BANNER() -{ - TIMESTAMP "Exit $PROGNAME with status=$?" -} - -# Show current total disk usage. -DISKUSAGE() -{ - du -ks | \ - ( read x y; echo "Disk Usage=$x KB" ) -} - -# function provided for testing software downloaded as tar files. A version of -# this function that properly extracts the downloaded files can be provided in -# the snapshots-${sw}-overrides file. -EXTRACT() -{ - echo "Error: ${SWVERSION} is in source repository - does not need extraction." -} - -# Standard procedure for checking out or updating source code from an hdfgroup -# git repository. Override the function for other repositories or procedures. -SOURCE_CHECKOUT() -{ - if test -n $GIT_URL; then - if [ -n "$AUTOGEN" ]; then - echo "Creating fresh clone of $GIT_URL in $BASEDIR/current_src" - # Check out the current version from source repository. - (cd $BASEDIR; rm -rf current_src - if test -z $GIT_BRANCH; then - echo "Testing empty branch $GIT_BRANCH." - git clone $GIT_URL current_src - else - echo "Testing branch $GIT_BRANCH." - git clone $GIT_URL -b $GIT_BRANCH current_src - fi - ) || exit 1 - else - echo "Creating fresh clone of $GIT_URL in $BASEDIR/current" - # Check out the current version from source repository. - (cd $BASEDIR; rm -rf current - if test -n $GIT_BRANCH; then - git clone $GIT_URL -b $GIT_BRANCH current - else - git clone $GIT_URL current - fi ) || exit 1 - fi - else - echo "Warning! Source directory ("current") is not checked out from git." - fi -} - -# Standard procedure for running the configure command in a build (test) -# directory -RUNCONFIGURE() -{ - if [ "${CURRENT}" != "${TESTDIR}" -a "$CPSRC" = "yes" ]; then - echo "Copying source files to ${TESTDIR}." - cp -pr ${CURRENT}/* ${TESTDIR} - cd ${TESTDIR} - ./${CONFIGURE} - elif [ -n "${AUTOGEN}" ]; then - ${CURRENTSRC}/${CONFIGURE} - else - ${CURRENT}/${CONFIGURE} - fi -} - -# Sometimes "make distclean" doesn't adequately remove files from the previous -# build. If a build (test) directory was used, its contents can be entirely -# deleted to provide a clean start. If the test is building in the source -# directory, the contents can't be deleted, so run "make distclean". -DISTCLEAN() -{ - if [ "${srcdir}" = "yes" -a -n "${SRCDIRNAME}" -a -d ${BASEDIR}/TestDir/${SRCDIRNAME} ]; then - echo "Remove contents of $SRCDIRNAME.\n" - rm -rf ${BASEDIR}/TestDir/${SRCDIRNAME}/* - else - echo "$MAKE distclean" - (cd ${TESTDIR} && ${MAKE} distclean) - fi -} - -# Several of the software packages tested do not support make check-install. -# Those that support it should have a version of this function in their -# override with the following lines: -# TIMESTAMP "check-install $1" -# ${MAKE} check-install $1 -CHECKINSTALL() -{ - echo "check-install is not supported for ${SWVERSION}" -} - -# Function for hdf4 and hdf5 to override to check in changes after snapshot. -# Safety measure to avoid unintended checkins to other repositories. -COMMITSNAPSHOT() -{ - echo "original hdf5 script committed code changes back into git." -} - -DISPLAYUSAGE() -{ - set - - cat <<EOF -Usage: $PROGNAME [all] [checkout] [ftp <URL> [diff] [test] [srcdir] [release] [help] - [clean] [distclean] [echo] [deploy <dir>] [deploydir <dir>] - [zlib <zlib_path>] [releasedir <dir>] [srcdirname <dir>] [check-vfd] - [check-passthrough-vol] - [exec <command>] [module-load <module-list>] [op-configure <option>] - [--<option>] - all: Run all commands (checkout, test & release) - [Default is all] - checkout: Run source checkout - diff: Run diff on current and previous versions. Exit 0 if - no significant differences are found. Otherwise, non-zero. - deploy: deploy binary to directory <dir> - deploydir: use <dir> as the default directory for deployment - test: Run test - release: Run release - clean: Run make clean - distclean:Run make distclean - echo: Turn on echo mode (set -x) - setenv <name> <value>: - Set environment variable <name> to <value>. - setenvN <N> <name> <value> ...: - Set environment variable with <N> values. - E.g., setenvN 3 x a b c is same as setenv x="a b c". - srcdir: Use srcdir option (does not imply other commands) - "snapshot srcdir" is equivalent to "snapshot srcdir all" - "snapshot srcdir checkout" is equivalent to "snapshot checkout" - srcdirname <dir>: - Use <dir> as the srcdir testing directory if srcdir is choosen. - If <dir> starts with '-', it is append to the default name - E.g., "snapshot srcdir srcdirname -xx" uses hostname-xx - [Default is hostname] - help: Print this message - echo: Turn on shell echo - zlib <zlib_path>: - Use <zlib_path> as the ZLIB locations - [Default is $ZLIB_default] - releasedir <dir>: - Use <dir> as the release directory - [Default is $ReleaseDir_default] - check-vfd: - Run make check-vfd instead of just make check. - check-passthrough-vol: - Run make check-passthrough-vol instead of just make check. - NOTE: Will only succeed with passthrough VOL connectors - that use the native VOL connector as the terminal - connector. - exttest <testscript>; - Run testscript; - exec <command>: - Run <command>; - module-load <module-list>: - Load modules in comma-separated <module-list>; - op-configure <option>: - Pass <option> to the configure command - E.g., "snapshot op-configure --enable-parallel" - configures for parallel mode - --<option>: - Pass --<option> to the configure command - E.g., "snapshot --enable-parallel" - configures for parallel mode -EOF - exit $errcode -} - -# MAIN -# SGI /bin/sh replaces $0 as function name if used in a function. -# Set the name here to avoid that ambiguity and better style too. -PROGNAME=$0 -SNAPSHOTNAME= -HDFREPOS= -DOCVERSION="" -MODULELIST="" - -if [ -f bin/snapshot_params ]; then - . bin/snapshot_params - echo "Added snapshot_params." -fi -if [ -z "$SWVER" -a -f bin/snapshot_version ] -then - . bin/snapshot_version - echo "Added snapshot_version." -fi -if [ -n ${HDFREPOS} -a -f bin/snapshot-${HDFREPOS}-overrides ]; then - . bin/snapshot-${HDFREPOS}-overrides - echo "Added snapshot-${HDFREPOS}-overrides." -fi - -echo "=====================================" -echo "$PROGNAME $*" -echo "=====================================" -TIMESTAMP MAIN -uname -a - -# setup exit banner message -trap EXIT_BANNER 0 1 2 9 15 - -# Dump environment variables before option parsing -echo ===Dumping environment variables before option parsing === -printenv | sort -echo ===Done Dumping environment variables before option parsing === - -# snapshots release directory. Default relative to $BASEDIR. -ReleaseDir_default=release_dir - -# Where is the zlib library? -# At NCSA, half of the machines have it in /usr/lib, the other half at -# /usr/ncsa/lib. Leave it unset. -ZLIB_default= -ZLIB=$ZLIB_default - -# What compression methods to use? (md5 does checksum). Doc was apparently -# added as a compression method to create a separate tarfile containing the -# documentation files for v 1.8 and above. -if [ "${SWVERSION}" = "hdf5_1_6" ]; then - METHODS="gzip bzip2 md5" -else - METHODS="gzip bzip2 doc" -fi - -# Use User's MAKE if set. Else use generic make. -MAKE=${MAKE:-make} - -# Default check action. -CHECKVAL=check - -# -# Command options -cmd="all" -test_opt="" -errcode=0 -AUTOGEN="" -EXTTEST="" -EXEC_CMD_ARG="" -while [ $# -gt 0 ] ; do - case "$1" in - all) - cmd="all" - ;; - checkout-autogen) - cmdcheckout="checkout" - AUTOGEN="autogen" - cmd="" - ;; - checkout) - cmdcheckout="checkout" - cmd="" - ;; - ftp) - echo "Setting ftp flags in snapshot script" - cmdcheckout="checkout" - cmdftp="ftp" - cmd="" - shift - if [ $# -lt 1 ]; then - echo "URL missing" - errcode=1 - cmd="help" - break - fi - ftp_url="$1" - echo "ftp_url is $ftp_url" - ;; - diff) - cmddiff="diff" - cmd="" - ;; - deploy) - # deploy the built binary. - shift - if [ $# -lt 1 ]; then - echo "deploy <dir> missing" - errcode=1 - cmd="help" - break - fi - cmddeploy="deploy" - DEPLOYDIRNAME="$1" - ;; - deploydir) - # default directory for deployment. - shift - if [ $# -lt 1 ]; then - echo "deploydir <dir> missing" - errcode=1 - cmd="help" - break - fi - deploydir="$1" - ;; - test) - cmdtest="test" - cmd="" - ;; - setenv) - # set environment variable - shift - eval $1="$2" - export $1 - shift - ;; - setenvN) - # set environment variable with $1 values - # e.g., setenvN 3 x a b c is same as setenv x="a b c". - # a kludge now--the extra single quotes are needed - # else eval complains. - shift - envN=$1 - shift - envname=$1 - envalue= - while test $envN -gt 0; do - shift - envalue="$envalue $1" - envN=`expr $envN - 1` - done - eval $envname="'$envalue'" - export $envname - ;; - srcdir) - #use srcdir option for test - srcdir="yes" - ;; - srcdirname) - shift - if [ $# -lt 1 ]; then - echo "srcdirname <dir> missing" - errcode=1 - cmd="help" - break - fi - SRCDIRNAME="$1" - ;; - release) - cmdrel="release" - cmd="" - ;; - autogen-release) - cmdrel="autogen-release" - cmd="" - ;; - clean | distclean) - cmdclean="$1" - cmd="" - ;; - help) - cmd="help" - break - ;; - echo) - set -x - break - ;; - zlib) - shift - if [ $# -lt 1 ]; then - echo "ZLIB information missing" - errcode=1 - cmd="help" - break - fi - ZLIB="$1" - ;; - releasedir) - shift - if [ $# -lt 1 ]; then - echo "Release directory name missing" - errcode=1 - cmd="help" - break - fi - ReleaseDir="$1" - ;; - exttest) - shift - if [ $# -lt 1 ]; then - echo "exttest script name missing" - errcode=1 - cmd="help" - break - fi - cmd="" - EXTTEST="$1" - ;; - exec) - shift - if [ $# -lt 1 ]; then - echo "exec command name missing" - errcode=1 - cmd="help" - break - fi - cmd="" - EXEC_CMD_ARG="$@" - # exit the parsing while loop since all arguments have been consummed. - break - ;; - check-vfd) - CHECKVAL=check-vfd - ;; - check-passthrough-vol) - CHECKVAL=check-passthrough-vol - ;; - module-load) - shift - if [ $# -lt 1 ]; then - echo "missing module list to load" - errcode=1 - cmd="help" - break - fi - MODULELIST="$1" - ;; - --*) - OP_CONFIGURE="$OP_CONFIGURE $1" - ;; - op-configure) - shift - if [ $# -lt 1 ]; then - echo "op-configure option missing" - errcode=1 - cmd="help" - break - fi - OP_CONFIGURE="$OP_CONFIGURE $1" - ;; - *) - echo "Unkown option $1" - errcode=1 - cmd="help" - break - ;; - esac - shift -done - -if [ -n "$MODULELIST" ]; then - . ~/.bashrc - module use /opt/pkgs/modules/all - # load module command will take a space separated list of modules. - # If we have a comma separated list, convert ',' to ' '. - MODULELIST="$( echo -e "$MODULELIST" | tr ',' ' ' )" - module load $MODULELIST -fi - -# Dump environment variables after option parsing -echo ===Dumping environment variables after option parsing === -printenv | sort -echo ===Done Dumping environment variables after option parsing === - -if [ "$cmd" = help ]; then - DISPLAYUSAGE -fi - -# Setup the proper configure option (--with-zlib) to use zlib library -# provide ZLIB is non-empty. -ZLIB=${ZLIB:+"--with-zlib="$ZLIB} -# Adding --prefix as a configure option will put the path to the deploy -# directory in the initial libhdf5*.la files -if [ -n "$DEPLOYDIRNAME" ]; then - OP_CONFIGURE="$OP_CONFIGURE --prefix=${deploydir}/${DEPLOYDIRNAME}" -fi -CONFIGURE="configure $OP_CONFIGURE" -# echo "Configure command is $CONFIGURE" - -# Execute the requests -snapshot=yes - -BASEDIR=${HOME}/snapshots-${SNAPSHOTNAME} -if [ ! -d ${BASEDIR} ]; then - echo "BASEDIR ($BASEDIR) does not exist" - exit 1 -fi - -CURRENT=${BASEDIR}/current -PREVIOUS=${BASEDIR}/previous -ReleaseDir=${ReleaseDir:=${BASEDIR}/${ReleaseDir_default}} -HOSTNAME=`hostname | cut -f1 -d.` # no domain part - -# Try finding a version of diff that supports the -I option too. -DIFF=diff -for d in `echo $PATH | sed -e 's/:/ /g'` ; do - test -x $d/diff && $d/diff -I XYZ /dev/null /dev/null > /dev/null 2>&1 && - DIFF=$d/diff && break -done - -#============================= -# Run source checkout -#============================= -if [ "$cmd" = "all" -o -n "$cmdcheckout" ]; then - TIMESTAMP "checkout" - # ${BASEDIR}/bin is now updated from git by EveningMaint or DailyMaint - # to avoid updating the scripts in ${BASEDIR}/bin while they are running. - - if [ -z "$AUTOGEN" ]; then - # If there is a Makefile in ${CURRENT}, the last test done in it - # has not been distclean'ed. They would interfere with other - # --srcdir build since make considers the files in ${CURRENT} - # take precedence over files in its own build-directory. Run - # a "make distclean" to clean them all out. This is not really - # part of the "checkout" functions but this is the most convenient - # spot to do the distclean. We will also continue the checkout process - # regardless of the return code of distclean. - ( cd ${CURRENT}; test -f Makefile && ${MAKE} distclean) - fi - # echo "cmdftp is $cmdftp; ftp_url is $ftp_url" - if [ -n "$cmdftp" ]; then - echo "Get the NetCDF4 source from their ftp server." - echo "Command executed is: 2>&1 wget -N $ftp_url" - cd ${BASEDIR}; - WGET_OUTPUT="`2>&1 wget -N $ftp_url`" - errcode=$? - if [[ $errcode -ne 0 ]]; then - exit $errcode - fi - - if [ $? -ne 0 ];then - echo $0: "$WGET_OUTPUT" Exiting. - exit 1 - fi - - # echo "Wget output was $WGET_OUTPUT" - - if echo "$WGET_OUTPUT" | fgrep 'not retrieving' &> /dev/null - then - echo "Snapshot unchanged" - else - echo "New snapshot downloaded" - EXTRACT - fi - else - SOURCE_CHECKOUT - fi -fi # Do source checkout - - -#============================= -# Run Test the HDF5 library -#============================= -if [ "$cmd" = "all" -o -n "$cmdtest" -o -n "$cmddiff" ]; then - TIMESTAMP "Run Tests" - # setup if srcdir is used. - if [ -z "$srcdir" ]; then - TESTDIR=${CURRENT} - else - #create TESTDIR if not exist yet - case "$SRCDIRNAME" in - "") - SRCDIRNAME=$HOSTNAME - ;; - -*) - SRCDIRNAME="$HOSTNAME$SRCDIRNAME" - ;; - esac - TESTDIR=${BASEDIR}/TestDir/${SRCDIRNAME} - test -d ${TESTDIR} || mkdir ${TESTDIR} - # set TESTDIR to use the direct path to the local test directory - # rather than the path through ${BASEDIR}. - cd ${TESTDIR} - TESTDIR=`pwd -P` - cd ${CURRENT} - fi - # Make sure current version exists and is clean - if [ -d ${TESTDIR} ]; then - DISTCLEAN - else - errcode=$? - snapshot=no - exit $errcode - fi - - # Compare it with the previous version. Compare only files listed in - # the MANIFEST plus the MANIFEST itself. - if [ -d ${PREVIOUS} ]; then - if [ -z "${AUTOGEN}" ]; then - CURRENTSRC=${CURRENT} - else - CURRENTSRC=${BASEDIR}/current_src - fi - if (${DIFF} -c ${PREVIOUS}/MANIFEST ${CURRENTSRC}/MANIFEST); then - snapshot=no - for src in `grep '^\.' ${CURRENTSRC}/MANIFEST|expand|cut -f1 -d' '`; do - if ${DIFF} -I H5_VERS_RELEASE -I " released on " \ - -I " currently under development" \ - ${PREVIOUS}/$src ${CURRENTSRC}/$src - then - : #continue - else - snapshot=yes - break - fi - done - fi - fi - - # if diff is choosen, exit 0 if no significant differences are found. - # otherwise, exit 1. This includes cases of other failures. - if [ -n "$cmddiff" ]; then - if [ $snapshot = no ]; then - exit 0 - else - exit 1 - fi - fi - - #============================= - # Execute command if defined - #============================= - #echo BEFORE EXEC command - #echo EXEC_CMD_ARG=${EXEC_CMD_ARG} - - if [ -n "$EXEC_CMD_ARG" ]; then - TIMESTAMP ${EXEC_CMD_ARG} - TESTDIR=${BASEDIR}/TestDir/${SRCDIRNAME} - test -d ${TESTDIR} || mkdir ${TESTDIR} - if cd ${TESTDIR}; then - # clean up the directory before executing the command - # Do we need to clean first? - # rm -rf * - # - # If EXEC_CMD_ARG starts with a '/', it has an absolute path, else it is - # relative to the BASEDIR. - case "$EXEC_CMD_ARG" in - /*) - ${EXEC_CMD_ARG} - ;; - *) - ${BASEDIR}/${EXEC_CMD_ARG} - ;; - esac - errcode=$? - else - echo "${TESTDIR} not accessible" - errcode=1 - fi - # exit snapshot since nothing else to do, for now. - exit $errcode - fi - - # Build, run tests and install procedures - if [ "$snapshot" = "yes" ] && [ "$NOMAKE" != "yes" ]; then - FAIL_SECTION="" - if [ -f ${TESTDIR}/failsection ]; then - rm ${TESTDIR}/failsection - fi - if (cd ${TESTDIR} && \ - TIMESTAMP "configure" && echo "configure" > ${TESTDIR}/failsection && \ - RUNCONFIGURE && \ - sleep 2 && \ - TIMESTAMP "make" && echo "make" > ${TESTDIR}/failsection && \ - ${MAKE} && DISKUSAGE \ - TIMESTAMP ${CHECKVAL} && echo "make check" > ${TESTDIR}/failsection && \ - ${MAKE} ${CHECKVAL} && DISKUSAGE \ - TIMESTAMP "install" && echo "make install" > ${TESTDIR}/failsection && \ - ${MAKE} install && DISKUSAGE \ - TIMESTAMP "check-install" && echo "make check-install" > ${TESTDIR}/failsection && \ - CHECKINSTALL && DISKUSAGE \ - TIMESTAMP "uninstall" && echo "make uninstall" > ${TESTDIR}/failsection && \ - ${MAKE} uninstall && DISKUSAGE); then - : - else - errcode=$? - FAIL_SECTION=`cat ${TESTDIR}/failsection` - echo "Failed running ${FAIL_SECTION}" - snapshot=no - exit $errcode - fi - elif [ $CPSRC ]; then - cp -pr ${CURRENT}/* ${TESTDIR} - else - cmdclean="" - fi -fi # Test the HDF5 library - -# Run external test if configured - -#============================= -#============================= -#if [ -d "$CURRENT" ]; then -if [ "$EXTTEST" != "" ]; then - TIMESTAMP ${EXTTEST} - TESTDIR=${BASEDIR}/TestDir/${SRCDIRNAME} - test -d ${TESTDIR} || mkdir ${TESTDIR} - cd ${TESTDIR} - sleep 1 - TIMESTAMP $pwd - ls - ${BASEDIR}/${EXTTEST} - errcode=$? - exit $errcode -fi - -#============================= -# Run deployment if requested. -#============================= -if [ -n "$DEPLOYDIRNAME" ]; then - # The daily tests deploy to .../hdf5/... or .../hdf4/... except on cobalt where the - # deploy directory is in .../HDF5/... lc will take care of this. If hdf4 or hdf5 - # either upper or lower case isn't in the path, RELEASE.txt won't be found unless - # it is in $CURRENT. - POS4=`perl -e "print index(lc(\"${deploydir}/${DEPLOYDIRNAME}\"), 'hdf4')"` - POS5=`perl -e "print index(lc(\"${deploydir}/${DEPLOYDIRNAME}\"), 'hdf5')"` - if [ "${POS4}" -ge "0" ]; then - RELEASE_TXT_LOC="release_notes" - elif [ "${POS5}" -ge "0" ]; then - RELEASE_TXT_LOC="release_docs" - else - RELEASE_TXT_LOC="" - fi - - if [ "$snapshot" = "yes" ]; then - TIMESTAMP "deploy" - if (cd ${TESTDIR} && - ${BASEDIR}/bin/deploy ${deploydir}/${DEPLOYDIRNAME} && \ - TIMESTAMP "clean" && \ - ${MAKE} clean && \ - TIMESTAMP "check-install prefix=${deploydir}/${DEPLOYDIRNAME}" && \ - CHECKINSTALL prefix=${deploydir}/${DEPLOYDIRNAME}); then - cd ${CURRENT} - cp ${RELEASE_TXT_LOC}/RELEASE.txt ${deploydir}/${DEPLOYDIRNAME} - cp COPYING ${deploydir}/${DEPLOYDIRNAME} - #: #continue - else - errcode=$? - exit $errcode - fi - fi -fi # Deploy - - -#============================= -# Run Release snapshot, update version, and commit to source repository -#============================= -if [ "$cmd" = "all" -o -n "$cmdrel" ]; then - if [ "$snapshot" = "yes" ]; then - TIMESTAMP "release" - DISTCLEAN - ( - # Turn on exit on error in the sub-shell so that it does not - # commit source if errors encounter here. - set -e - if [ "$cmdrel" = "autogen-release" ]; then - cd ${BASEDIR}/current_src - else - cd ${CURRENT} - fi - if [ "$HDFREPOS" = "hdf4" ]; then - RELEASE_VERSION="`perl bin/h4vers -v`" - echo "Making snapshot release ($RELEASE_VERSION) to ${ReleaseDir}..." - bin/release -d $ReleaseDir $METHODS - perl bin/h4vers -i - elif [ "$HDFREPOS" = "hdf5" ]; then - RELEASE_VERSION="`perl bin/h5vers -v`" - echo "Making snapshot release ($RELEASE_VERSION) to ${ReleaseDir}..." - if [ "${DOCVERSION}" ]; then - bin/release -d $ReleaseDir --docver ${DOCVERSION} $METHODS - else - bin/release -d $ReleaseDir $METHODS - fi - perl bin/h5vers -i - else - echo "need real release steps. For now, only move current version to previous" - fi - COMMITSNAPSHOT - ) - errcode=$? - fi - - # Replace the previous version with the current version. - # Should check if the errcode of the release process but there - # are other failures after release was done (e.g. h5vers or git failures) - # that should allow the replacement to occure. - rm -rf ${PREVIOUS} - mv ${CURRENT} ${PREVIOUS} -fi #Release snapshot - - -#============================= -# Clean the test area. Default is no clean. -#============================= -if [ -n "$cmdclean" ] && [ "$NOMAKE" != "yes" ]; then - TIMESTAMP "clean" - # setup if srcdir is used. - if [ -z "$srcdir" ]; then - TESTDIR=${CURRENT} - else - case "$SRCDIRNAME" in - "") - SRCDIRNAME=$HOSTNAME - ;; - -*) - SRCDIRNAME="$HOSTNAME$SRCDIRNAME" - ;; - esac - TESTDIR=${BASEDIR}/TestDir/${SRCDIRNAME} - fi - # clean it - if (cd ${TESTDIR} && ${MAKE} $cmdclean ) then - : - else - errcode=$? - snapshot=no - exit $errcode - fi -fi # Clean the Test directory - -exit $errcode diff --git a/bin/snapshot_version b/bin/snapshot_version deleted file mode 100644 index 8aad05d..0000000 --- a/bin/snapshot_version +++ /dev/null @@ -1,19 +0,0 @@ -# -# Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. -# - -# default version for snapshot test -# H5VERSION matches with a source version symbolic name. Will test use the -# latest revision of that branch. If set to "hdf5", it uses the main -# version. -# H5VER tells runtest which version to run -H5VERSION=hdf5 diff --git a/bin/switch_maint_mode b/bin/switch_maint_mode index fb1568b..af63d6a 100755 --- a/bin/switch_maint_mode +++ b/bin/switch_maint_mode @@ -7,7 +7,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # diff --git a/bin/timekeeper b/bin/timekeeper deleted file mode 100755 index 14adf2c..0000000 --- a/bin/timekeeper +++ /dev/null @@ -1,129 +0,0 @@ -#!/bin/sh -## -# Copyright by The HDF Group. -# Copyright by the Board of Trustees of the University of Illinois. -# All rights reserved. -# -# This file is part of HDF5. The full HDF5 copyright notice, including -# terms governing use, modification, and redistribution, is contained in -# the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. -# If you do not have access to either file, you may request a copy from -# help@hdfgroup.org. -## -# As a time keeper of the remote daily test process launched by runtest. -# It sleeps for a certain time and then wakes up to hangup those processes -# that are still around, assuming they have run too long. -# -# Programmer: Albert Cheng -# Created Date: 2004/12/23 - -# variable initialization -waitminutes=300 # default to 5 hours == 300 minutes -debugtimelimit= -debugflag= # no debug - -# Function definitions -# -# PRINTMSG -# Print a one line message left justified in a field of 70 characters -# without newline. More output for this line later. -# -PRINTMSG() { - SPACES=" " - echo "$* $SPACES" | cut -c1-70 | tr -d '\012' -} - - -USAGE() -{ - echo "Usage: %0 [-h] [-debug] [<time-limit>]" - echo " Run timekeeper with <time-limit> minutes, default is $waitminutes." - echo " If <time-limit> is in the form of HH:MM, it means wait till then." - echo " -h print this help page" - echo " -debug run debug mode" -} - - -ParseOption() -{ - if [ $# -gt 0 -a "$1" = -h ]; then - shift - USAGE - exit 0 - fi - if [ $# -gt 0 -a "$1" = -debug ]; then - shift - debugflag=yes - waitminutes=1 # use shorter time for debug - fi - if [ $# -gt 0 ]; then - targettime=$1 - shift - - # find out it is minutes to wait or HH:MM to wake up - case $targettime in - *:*) # HH:MM - currenttime=`date +%H:%M` - currenthour=`echo $currenttime | cut -f1 -d:` - currentminute=`echo $currenttime | cut -f2 -d:` - targethour=`echo $targettime | cut -f1 -d:` - targetminute=`echo $targettime | cut -f2 -d:` - waitminutes=`expr \( $targethour - $currenthour \) \* 60 + $targetminute - $currentminute` - if test $waitminutes -le 0; then - # target time is in tomorrow, add 1 day of minutes - waitminutes=`expr 24 \* 60 + $waitminutes` - fi - ;; - *) - waitminutes=$targettime - ;; - esac - fi -} - - -# Main body -echo "Timekeeper started at `date`" -ParseOption $* -waitperiod=`expr $waitminutes \* 60` # convert to seconds - -if [ -z "$debugflag" ]; then - # normal time keeping mode - # sleep first - echo Timekeeper sleeping for $waitperiod seconds - sleep $waitperiod - # Look for any processes still around - echo "Timekeeper woke up at `date`, looking for processes to terminate..." - for x in PID.* ; do - if [ -f $x ]; then - pid=`cat $x` - # check if process is still around - if test X$pid \!= X && ps -p $pid > /dev/null; then - echo "terminating process $x ($pid)" - kill -HUP $pid - echo "Remote shell command ended. But some processes might still be" - echo "running in the remote machine. Login there to verify." - fi - fi - done -else - # Debug mode. Launch two rsh process, one ends before, the other after - # waitperiod. Must launch timekeeper from a subshell, else the debug - # will wait for it too. - myhostname=`hostname` - ( $0 $waitminutes &) - debugtimelimit=`expr $waitperiod - 10` - echo rsh $myhostname sleep $debugtimelimit - rsh $myhostname sleep $debugtimelimit & - echo $! > PID.before - debugtimelimit=`expr $waitperiod + 10` - echo rsh $myhostname sleep $debugtimelimit - rsh $myhostname sleep $debugtimelimit & - echo $! > PID.after - - wait - rm PID.before PID.after -fi - -echo "Timekeeper ended at `date`" @@ -1,4 +1,4 @@ -#!/usr/bin/perl -w +#!/usr/bin/env perl ## # Copyright by The HDF Group. # Copyright by the Board of Trustees of the University of Illinois. @@ -7,11 +7,12 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. ## require 5.003; +use warnings; $Source = ""; ############################################################################## @@ -27,58 +28,106 @@ $Source = ""; # usually the same as the package name. # %TypeString = ("haddr_t" => "a", + "H5A_info_t" => "Ai", + "H5A_operator1_t" => "Ao", + "H5A_operator2_t" => "AO", "hbool_t" => "b", + "H5AC_cache_config_t" => "Cc", + "H5AC_cache_image_config_t" => "CC", "double" => "d", "H5D_alloc_time_t" => "Da", + "H5D_append_cb_t" => "DA", "H5FD_mpio_collective_opt_t" => "Dc", "H5D_fill_time_t" => "Df", "H5D_fill_value_t" => "DF", + "H5D_gather_func_t" => "Dg", "H5FD_mpio_chunk_opt_t" => "Dh", "H5D_mpio_actual_io_mode_t" => "Di", + "H5FD_file_image_callbacks_t" => "DI", "H5D_chunk_index_t" => "Dk", "H5D_layout_t" => "Dl", "H5D_mpio_no_collective_cause_t" => "Dn", "H5D_mpio_actual_chunk_opt_mode_t" => "Do", + "H5D_operator_t" => "DO", "H5D_space_status_t" => "Ds", - "H5D_vds_view_t" => "Dv", + "H5D_scatter_func_t" => "DS", "H5FD_mpio_xfer_t" => "Dt", + "H5D_vds_view_t" => "Dv", + "H5FD_class_value_t" => "DV", + "H5D_chunk_iter_op_t" => "x", "herr_t" => "e", + "H5E_auto1_t" => "Ea", + "H5E_auto2_t" => "EA", + "H5ES_event_complete_func_t" => "EC", "H5E_direction_t" => "Ed", "H5E_error_t" => "Ee", - "H5E_type_t" => "Et", + "H5ES_event_insert_func_t" => "EI", "H5ES_status_t" => "Es", + "H5E_type_t" => "Et", + "H5FD_class_t" => "FC", "H5F_close_degree_t" => "Fd", "H5F_fspace_strategy_t" => "Ff", - "H5F_file_space_type_t" => "Ff", + "H5F_flush_cb_t" => "FF", + "H5F_info2_t" => "FI", "H5F_mem_t" => "Fm", "H5F_scope_t" => "Fs", - "H5F_fspace_type_t" => "Ft", + "H5F_file_space_type_t" => "Ft", "H5F_libver_t" => "Fv", + "H5G_iterate_t" => "Gi", "H5G_obj_t" => "Go", "H5G_stat_t" => "Gs", "hsize_t" => "h", + "H5_alloc_stats_t" => "Ha", + "H5_atclose_func_t" => "Hc", "hssize_t" => "Hs", - "H5E_major_t" => "i", - "H5E_minor_t" => "i", - "H5_iter_order_t" => "Io", - "H5_index_t" => "Ii", + "H5E_major_t" => "i", # H5E_major_t is typedef'd to hid_t + "H5E_minor_t" => "i", # H5E_minor_t is typedef'd to hid_t "hid_t" => "i", + "H5I_future_discard_func_t" => "ID", + "H5I_free_t" => "If", + "H5_index_t" => "Ii", + "H5I_iterate_func_t" => "II", + "H5_iter_order_t" => "Io", + "H5I_future_realize_func_t" => "IR", "int" => "Is", "int32_t" => "Is", + "H5I_search_func_t" => "IS", + "H5I_type_t" => "It", "unsigned" => "Iu", "unsigned int" => "Iu", "uint32_t" => "Iu", - "uint64_t" => "UL", - "H5I_type_t" => "It", + "H5O_token_t" => "k", + "H5L_iterate1_t" => "Li", + "H5L_iterate2_t" => "LI", "H5G_link_t" => "Ll", #Same as H5L_type_t now "H5L_type_t" => "Ll", + "H5L_elink_traverse_t" => "Lt", + "H5MM_allocate_t" => "Ma", "MPI_Comm" => "Mc", + "H5MM_free_t" => "Mf", "MPI_Info" => "Mi", + "H5M_iterate_t" => 'MI', "H5FD_mem_t" => "Mt", "off_t" => "o", + "H5O_iterate1_t" => "Oi", + "H5O_iterate2_t" => "OI", + "H5O_mcdt_search_cb_t" => "Os", "H5O_type_t" => "Ot", "H5P_class_t" => "p", - "hobj_ref_t" => "r", + "H5P_cls_create_func_t" => "Pc", + "H5P_prp_create_func_t" => "PC", + "H5P_prp_delete_func_t" => "PD", + "H5P_prp_get_func_t" => "PG", + "H5P_iterate_t" => "Pi", + "H5P_cls_close_func_t" => "Pl", + "H5P_prp_close_func_t" => "PL", + "H5P_prp_compare_func_t" => "PM", + "H5P_cls_copy_func_t" => "Po", + "H5P_prp_copy_func_t" => "PO", + "H5P_prp_set_func_t" => "PS", + "hdset_reg_ref_t" => "Rd", + "hobj_ref_t" => "Ro", + "H5R_ref_t" => "Rr", "H5R_type_t" => "Rt", "char" => "s", "unsigned char" => "s", @@ -87,20 +136,25 @@ $Source = ""; "H5S_sel_type" => "St", "htri_t" => "t", "H5T_cset_t", => "Tc", + "H5T_conv_t" => "TC", "H5T_direction_t", => "Td", + "H5T_pers_t" => "Te", + "H5T_conv_except_func_t" => "TE", "H5T_norm_t" => "Tn", "H5T_order_t" => "To", "H5T_pad_t" => "Tp", - "H5T_pers_t" => "Te", "H5T_sign_t" => "Ts", "H5T_class_t" => "Tt", "H5T_str_t" => "Tz", "unsigned long" => "Ul", "unsigned long long" => "UL", + "uint64_t" => "UL", "H5VL_attr_get_t" => "Va", + "H5VL_blob_optional_t" => "VA", "H5VL_attr_specific_t" => "Vb", - "H5VL_class_value_t" => "VC", + "H5VL_blob_specific_t" => "VB", "H5VL_dataset_get_t" => "Vc", + "H5VL_class_value_t" => "VC", "H5VL_dataset_specific_t" => "Vd", "H5VL_datatype_get_t" => "Ve", "H5VL_datatype_specific_t" => "Vf", @@ -108,83 +162,58 @@ $Source = ""; "H5VL_file_specific_t" => "Vh", "H5VL_group_get_t" => "Vi", "H5VL_group_specific_t" => "Vj", - "H5VL_link_create_type_t" => "Vk", + "H5VL_link_create_t" => "Vk", "H5VL_link_get_t" => "Vl", + "H5VL_get_conn_lvl_t" => "VL", "H5VL_link_specific_t" => "Vm", "H5VL_object_get_t" => "Vn", + "H5VL_request_notify_t" => "VN", "H5VL_object_specific_t" => "Vo", "H5VL_request_specific_t" => "Vr", - "void" => "x", - "FILE" => "x", - "H5A_operator_t" => "x", - "H5A_operator1_t" => "x", - "H5A_operator2_t" => "x", - "H5A_info_t" => "x", - "H5AC_cache_config_t" => "x", - "H5AC_cache_image_config_t" => "x", - "H5D_append_cb_t" => "x", - "H5D_gather_func_t" => "x", - "H5D_operator_t" => "x", - "H5D_scatter_func_t" => "x", - "H5E_auto_t" => "x", - "H5E_auto1_t" => "x", - "H5E_auto2_t" => "x", - "H5E_walk_t" => "x", - "H5E_walk1_t" => "x", - "H5E_walk2_t" => "x", - "H5F_flush_cb_t" => "x", - "H5F_info1_t" => "x", - "H5F_info2_t" => "x", - "H5F_retry_info_t" => "x", - "H5FD_t" => "x", - "H5FD_class_t" => "x", - "H5FD_stream_fapl_t" => "x", - "H5FD_ros3_fapl_t" => "x", - "H5FD_hdfs_fapl_t" => "x", - "H5FD_file_image_callbacks_t" => "x", - "H5G_iterate_t" => "x", - "H5G_info_t" => "x", - "H5I_free_t" => "x", - "H5I_iterate_func_t" => "x", - "H5I_search_func_t" => "x", - "H5L_class_t" => "x", - "H5L_elink_traverse_t" => "x", - "H5L_iterate_t" => "x", - "H5M_iterate_t" => 'x', - "H5MM_allocate_t" => "x", - "H5MM_free_t" => "x", - "H5O_info_t" => "x", - "H5O_iterate_t" => "x", - "H5O_mcdt_search_cb_t" => "x", - "H5P_cls_create_func_t" => "x", - "H5P_cls_copy_func_t" => "x", - "H5P_cls_close_func_t" => "x", - "H5P_iterate_t" => "x", - "H5P_prp_create_func_t" => "x", - "H5P_prp_copy_func_t" => "x", - "H5P_prp_close_func_t" => "x", - "H5P_prp_delete_func_t" => "x", - "H5P_prp_get_func_t" => "x", - "H5P_prp_set_func_t" => "x", - "H5P_prp_compare_func_t" => "x", - "H5T_cdata_t" => "x", - "H5T_conv_t" => "x", - "H5T_conv_except_func_t" => "x", - "H5VL_t" => "x", - "H5VL_class_t" => "x", - "H5VL_loc_params_t" => "x", - "H5VL_request_notify_t" => "x", - "H5Z_func_t" => "x", - "H5Z_filter_func_t" => "x", + "H5VL_attr_optional_t" => "Vs", + "H5VL_subclass_t" => "VS", + "H5VL_dataset_optional_t" => "Vt", + "H5VL_datatype_optional_t" => "Vu", + "H5VL_file_optional_t" => "Vv", + "H5VL_group_optional_t" => "Vw", + "H5VL_link_optional_t" => "Vx", + "H5VL_object_optional_t" => "Vy", + "H5VL_request_optional_t" => "Vz", "va_list" => "x", + "void" => "x", "size_t" => "z", "H5Z_SO_scale_type_t" => "Za", "H5Z_class_t" => "Zc", "H5Z_EDC_t" => "Ze", "H5Z_filter_t" => "Zf", + "H5Z_filter_func_t" => "ZF", "ssize_t" => "Zs", +# Types below must be defined here, as they appear in function arguments, +# but they are not yet supported in the H5_trace_args() routine yet. If +# they are used as an actual parameter type (and not just as a pointer to +# to the type), they must have a "real" abbreviation added (like the ones +# above), moved to the section of entries above, and support for displaying +# the type must be added to H5_trace_args(). + "H5ES_err_info_t" => "#", + "H5FD_t" => "#", + "H5FD_hdfs_fapl_t" => "#", + "H5FD_mirror_fapl_t" => "#", + "H5FD_ros3_fapl_t" => "#", + "H5FD_splitter_vfd_config_t" => "#", + "H5L_class_t" => "#", + "H5VL_class_t" => "#", + "H5VL_loc_params_t" => "#", + "H5VL_request_status_t" => "#", ); + +############################################################################## +# Maximum length of H5TRACE macro line +# If the ColumnLimit in .clang-format is changed, this value will need to be updated +# +my $max_trace_macro_line_len = 110; + + ############################################################################## # Print an error message. # @@ -215,8 +244,15 @@ sub argstring ($$$) { # Normalize the data type by removing redundant white space, # certain type qualifiers, and indirection. - $atype =~ s/^\bconst\b//; + $atype =~ s/^\bconst\b//; # Leading const + $atype =~ s/\s*const\s*//; # const after type, possibly in the middle of '*'s + $atype =~ s/^\bstatic\b//; $atype =~ s/\bH5_ATTR_UNUSED\b//g; + $atype =~ s/\bH5_ATTR_DEPRECATED_USED\b//g; + $atype =~ s/\bH5_ATTR_NDEBUG_UNUSED\b//g; + $atype =~ s/\bH5_ATTR_DEBUG_API_USED\b//g; + $atype =~ s/\bH5_ATTR_PARALLEL_UNUSED\b//g; + $atype =~ s/\bH5_ATTR_PARALLEL_USED\b//g; $atype =~ s/\s+/ /g; $ptr = length $1 if $atype =~ s/(\*+)//; $atype =~ s/^\s+//; @@ -234,53 +270,65 @@ sub argstring ($$$) { --$ptr; $tstr = $TypeString{"$atype*"}; } elsif (!exists $TypeString{$atype}) { - errmesg $file, $func, "untraceable type \`$atype", '*'x$ptr, "\'"; +# Defer throwing error until type is actually used +# errmesg $file, $func, "untraceable type \`$atype", '*'x$ptr, "\'"; } else { $tstr = $TypeString{$atype}; } - return ("*" x $ptr) . ($array?"[$array]":"") . $tstr; + return ("*" x $ptr) . ($array ? "[$array]" : "") . $tstr; } ############################################################################## # Given information about an API function, rewrite that function with # updated tracing information. # +my $file_api = 0; +my $file_args = 0; +my $total_api = 0; +my $total_args = 0; sub rewrite_func ($$$$$) { my ($file, $type, $name, $args, $body) = @_; - my ($arg,$trace); - my (@arg_name, @arg_str); + my ($arg, $trace, $argtrace); + my (@arg_name, @arg_str, @arg_type); local $_; + # Keep copy of original arguments + my $orig_args = $args; + # Parse return value my $rettype = argstring $file, $name, $type; - goto error if $rettype =~ /!/; # Parse arguments if ($args eq "void") { - $trace = "H5TRACE0(\"$rettype\",\"\");\n"; + $trace = "H5TRACE0(\"$rettype\", \"\");\n"; + $argtrace = "H5ARG_TRACE0(\"\")"; } else { # Split arguments. First convert `/*in,out*/' to get rid of the - # comma, then split the arguments on commas. - $args =~ s/(\/\*\s*in),\s*(out\s*\*\/)/$1_$2/g; + # comma and remove lines beginning with a '#', then split the arguments + # on commas. + $args =~ s/(\/\*\s*in),\s*(out\s*\*\/)/$1_$2/g; # Get rid of comma in 'in,out' + $args =~ s/H5FL_TRACK_PARAMS//g; # Remove free list macro + $args =~ s/\n#.*?\n/\n/g; # Remove lines beginning with '#' my @args = split /,[\s\n]*/, $args; my $argno = 0; my %names; for $arg (@args) { - if($arg=~/\w*\.{3}\w*/){ + if($arg=~/\w*\.{3}\w*/){ # Skip "..." for varargs parameter next; } - unless ($arg=~/^(([a-z_A-Z]\w*\s+)+\**) + unless ($arg=~/^((\s*[a-z_A-Z](\w|\*)*\s+)+(\s*\*\s*|\s*const\s*|\s*volatile\s*)*) ([a-z_A-Z]\w*)(\[.*?\])? (\s*\/\*\s*(in|out|in_out)\s*\*\/)?\s*$/x) { errmesg $file, $name, "unable to parse \`$arg\'"; goto error; } else { - my ($atype, $aname, $array, $adir) = ($1, $3, $4, $6); + my ($atype, $aname, $array, $adir) = ($1, $5, $6, $8); $names{$aname} = $argno++; $adir ||= "in"; $atype =~ s/\s+$//; push @arg_name, $aname; + push @arg_type, $atype; if ($adir eq "out") { push @arg_str, "x"; @@ -301,71 +349,184 @@ sub rewrite_func ($$$$$) { } } } + + # Compose the trace macro $trace = "H5TRACE" . scalar(@arg_str) . "(\"$rettype\", \""; + $argtrace = "H5ARG_TRACE" . scalar(@arg_str) . "(__func__, \""; $trace .= join("", @arg_str) . "\""; - my $len = 4 + length $trace; - for (@arg_name) { - if ($len + length >= 77) { - $trace .= ",\n $_"; - $len = 13 + length; + $argtrace .= join("", @arg_str) . "\""; + + # Add 4 for indenting the line + my $len = 4 + length($trace); + + for my $i (0 .. $#arg_name) { + # Handle wrapping + + # Be VERY careful here! clang-format and this script MUST agree + # on which lines get wrapped or there will be churn as each tries + # to undo the other's output. + # + # TWO cases must be handled: + # 1) The argument is that last one and ');' will be appended + # 2) The argument is NOT the last one and ',' will be appended + # + # NB: clang-format does NOT consider terminal newlines when + # counting columns for the ColumnLimit + # + # The extra '2' added after $len includes the ', ' that would be + # added BEFORE the argument. + # + my $adjust = ($i + 1 == scalar(@arg_str)) ? 2 : 1; + my $len_if_added = $len + 2 + length($arg_name[$i]) + $adjust; + + # Wrap lines that will be longer than the limit + if ($len_if_added > $max_trace_macro_line_len) { + # Wrap line, with indentation + $trace .= ",\n "; + $len = 13; # Set to 13, for indentation + + # Indent an extra space to account for extra digit in 'H5TRACE' macro + if (scalar(@arg_str) >= 10) { + $trace .= " "; + $len++; + } } else { - $trace .= ", $_"; - $len += 1 + length; + $trace .= ", "; + $len += 2; # Add 2, for ', ' } + + # Append argument + $trace .= "$arg_name[$i]"; + $argtrace .= ", $arg_name[$i]"; + + # Add length of appended argument name + $len += length($arg_name[$i]); } + + # Append final ');' for macro $trace .= ");\n"; + $argtrace .= ")"; } - goto error if grep {/!/} @arg_str; - - # The H5TRACE() statement - if ($body =~ /\/\*[ \t]*NO[ \t]*TRACE[ \t]*\*\//) { - # Ignored due to NO TRACE comment. - } elsif ($body =~ s/((\n[ \t]*)H5TRACE\d+\s*\(.*?\);)\n/"$2$trace"/es) { - # Replaced an H5TRACE macro. - } elsif ($body=~s/((\n[ \t]*)FUNC_ENTER\w*[ \t]*(\(.*?\))?;??)\n/"$1$2$trace"/es) { - # Added an H5TRACE macro after a FUNC_ENTER macro. - } else { - errmesg $file, $name, "unable to insert tracing information"; - print "body = ", $body, "\n"; - goto error; + + # Check for API / non-API routine name + if( $name =~ /H5[A-Z]{0,2}[a-z].*/) { + # The H5TRACE() statement, for API routines + if ($body =~ /\/\*[ \t]*NO[ \t]*TRACE[ \t]*\*\//) { + # Ignored due to NO TRACE comment. + } else { + # Check for known, but unsupported type + if ( $trace =~ /(^#)|([^*]#)/ ) { + # Check for unsupported return type + if ( $type =~ /(^#)|([^*]#)/ ) { + errmesg $file, $name, "unsupported type in return type\nAdd to TypeString hash in trace script and update H5_trace_args()"; + print "type = '$type'\n"; + } + + # Check for unsupported argument type + $index = 0; + for (@arg_str) { + if ( $_ =~ /(^#)|([^*]#)/ ) { + errmesg $file, $name, "unsupported type in args\nAdd to TypeString hash in trace script and update H5_trace_args()"; + print "type = $arg_type[$index]\n"; + } + $index++; + } + goto error; + } + + # Check for unknown (and therefore unsupported) type + if ( $trace =~ /(^!)|([^*]!)/ ) { + # Check for unsupported return type + if ( $type =~ /(^!)|([^*]!)/ ) { + errmesg $file, $name, "unknown type in return type\nAdd to TypeString hash in trace script and also update H5_trace_args() if used by value"; + print "type = '$type'\n"; + } + + # Check for unsupported argument type + $index = 0; + for (@arg_str) { + if ( $_ =~ /(^!)|([^*]!)/ ) { + errmesg $file, $name, "unknown type in args\nAdd to TypeString hash in trace script and also update H5_trace_args() if used by value"; + print "type = $arg_type[$index]\n"; + } + $index++; + } + goto error; + } + + if ($body =~ s/((\n[ \t]*)H5TRACE\d+\s*\(.*?\);)\n/"$2$trace"/es) { + # Replaced an H5TRACE macro. + } elsif ($body=~s/((\n[ \t]*)FUNC_ENTER\w*[ \t]*(\(.*?\))?;??)\n/"$1$2$trace"/es) { + # Added an H5TRACE macro after a FUNC_ENTER macro. + } else { + errmesg $file, $name, "unable to insert tracing information"; + print "body = ", $body, "\n"; + goto error; + } + } + + #Increment # of API routines modified + $file_api++; + } + + # Check for H5ARG_TRACE macros in non-API routines + if ( $body =~ /H5ARG_TRACE/ ) { + # Check for untraceable type (deferred until $argtrace used) + if ( $argtrace =~ /(^!)|([^*]!)/ ) { + errmesg $file, $name, "untraceable type in args"; + print "args = '$orig_args'\n"; + goto error; + } + + # Replace / update H5ARG_TRACE macro. + $body =~ s/(H5ARG_TRACE(\d+\s*\(.*?\))?)/"$argtrace"/esg; + + #Increment # of non-API routines modified + $file_args++; } - error: - return "\n$type\n$name($args)\n$body"; + return "\n$type\n$name($orig_args)\n$body"; } ############################################################################## # Process each source file, rewriting API functions with updated # tracing information. # -my $total_api = 0; for $file (@ARGV) { + $file_api = 0; + $file_args = 0; + # Ignore some files that do not need tracing macros - unless ($file eq "H5FDmulti.c" or $file eq "src/H5FDmulti.c" or $file eq "H5FDstdio.c" or $file eq "src/H5FDstdio.c") { - + unless ($file eq "H5FDmulti.c" or $file eq "src/H5FDmulti.c" or $file eq "H5FDstdio.c" or $file eq "src/H5FDstdio.c" or $file eq "src/H5TS.c" or $file eq "src/H5FDperform.c") { + # Snarf up the entire file open SOURCE, $file or die "$file: $!\n"; $Source = join "", <SOURCE>; close SOURCE; - # Make modifications + # Make a copy of the original data my $original = $Source; - my $napi = $Source =~ s/\n([A-Za-z]\w*(\s+[A-Za-z]\w*)*\s*\**)\n #type - (H5[A-Z]{0,2}[^_A-Z0-9]\w*) #name - \s*\((.*?)\)\s* #args - (\{.*?\n\}[^\n]*) #body - /rewrite_func($file,$1,$3,$4,$5)/segx; - $total_api += $napi; + + # Make modifications + $Source =~ s/\n([A-Za-z]\w*(\s+[A-Za-z]\w*)*\s*\**)\n #type + (H5[A-Z]{0,2}_?[a-zA-Z0-9_]\w*) #name + \s*\((.*?)\)\s* #args + (\{.*?\n\}[^\n]*) #body + /rewrite_func($file,$1,$3,$4,$5)/segx; # If the source changed then print out the new version if ($original ne $Source) { - printf "%s: instrumented %d API function%s\n", - $file, $napi, 1==$napi?"":"s"; + printf "%s: instrumented %d API function%s and %d argument list%s\n", + $file, $file_api, (1 == $file_api ? "" : "s"), + $file_args, (1 == $file_args ? "" : "s"); rename $file, "$file~" or die "unable to make backup"; open SOURCE, ">$file" or die "unable to modify source"; print SOURCE $Source; close SOURCE; + + $total_api += $file_api; + $total_args += $file_args; } } } @@ -377,6 +538,9 @@ if ($found_errors eq 1) { printf "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"; exit 1; } else { - printf "Finished processing HDF5 API calls\n"; + printf "Finished processing HDF5 API calls:\n"; + printf "\tinstrumented %d API function%s and %d argument list%s\n", + $total_api, (1 == $total_api ? "" : "s"), + $total_args, (1 == $total_args ? "" : "s"); } diff --git a/bin/warnhist b/bin/warnhist index 7e56246..0150138 100755 --- a/bin/warnhist +++ b/bin/warnhist @@ -1,5 +1,6 @@ -#!/usr/bin/perl -w +#!/usr/bin/env perl require 5.003; +use warnings; # # Copyright by The HDF Group. @@ -14,7 +15,7 @@ require 5.003; # http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have # access to either file, you may request a copy from help@hdfgroup.org. # -# Quincey Koziol, koziol@hdfgroup.org +# Quincey Koziol # 9 Aug 2013 # # Purpose: Given an input file containing the output from a build of the @@ -29,10 +30,13 @@ use Getopt::Std; # Global variables, for accumulating information my $totalcount = 0; +my $notecount = 0; +my $dupcount = 0; my %warn_count = (); my $warncount; my %warn_file = (); my %warn_file_line = (); +my %warn_file_line_offset = (); my %file_count = (); my $filecount; my $ignorecount = 0; @@ -47,10 +51,13 @@ my %file_warn_line = (); my $current_warning = 0; my $current_file = 0; my $warn_index; +my $genericize = 1; + +# Info about last name / line / offset for file my $last_c_name; my $last_fort_name; my $last_fort_line; -my $genericize = 1; +my $last_fort_offset; # Display usage sub do_help { @@ -71,7 +78,7 @@ sub do_help { print "\t-S <file string list>\tDisplay warnings for files which contain a string, <file string list>\n"; print "\t\t<file string list> is a comma separated list, with no spaces\n"; print "\t\tFor example: 'H5Fint' or 'H5Fint,H5Gnode'\n"; - print "\t-l\tDisplay line nunbers for file/warning\n"; + print "\t-l\tDisplay line numbers for file/warning\n"; print "\t-u\tLeave 'unique' types in warnings, instead of genericizing them\n"; print "\t-i <name list>\tIgnore named files, <name list>\n"; print "\t\t<name list> is a comma separated list, with no spaces\n"; @@ -97,7 +104,7 @@ if($options{h}) { # Parse list of file names to ignore if(exists $options{i}) { @ignorenames = split /,/, $options{i}; -#print @ignorenames; +#print STDERR @ignorenames; } # Parse list of warning indices to expand file names @@ -105,18 +112,18 @@ if(exists $options{w}) { my @tmp_indices; @tmp_indices = split /,/, $options{w}; -#print @tmp_indices; +#print STDERR @tmp_indices; for my $x (@tmp_indices) { -#print "x = '$x'\n"; +#print STDERR "x = '$x'\n"; if($x =~ /\-/) { my $start_index; my $end_index; -#print "matched = '$x'\n"; +#print STDERR "matched = '$x'\n"; ($start_index, $end_index) = split /\-/, $x; -#print "start_index = '$start_index', end_index = '$end_index'\n"; +#print STDERR "start_index = '$start_index', end_index = '$end_index'\n"; for my $y ($start_index..$end_index) { -#print "y = '$y'\n"; +#print STDERR "y = '$y'\n"; if(!exists $warn_file_indices{$y}) { $warn_file_indices{$y} = $y; } @@ -129,14 +136,14 @@ if(exists $options{w}) { } } #foreach (sort keys %warn_file_indices) { -# print "$_ : $warn_file_indices{$_}\n"; +# print STDERR "$_ : $warn_file_indices{$_}\n"; #} } # Parse list of warning strings to expand file names if(exists $options{s}) { @warn_match_strings = split /,/, $options{s}; -# print @warn_match_strings; +# print STDERR @warn_match_strings; } # Parse list of file indices to expand warnings @@ -144,18 +151,18 @@ if(exists $options{f}) { my @tmp_indices; @tmp_indices = split /,/, $options{f}; -#print @tmp_indices; +#print STDERR @tmp_indices; for my $x (@tmp_indices) { -#print "x = '$x'\n"; +#print STDERR "x = '$x'\n"; if($x =~ /\-/) { my $start_index; my $end_index; -#print "matched = '$x'\n"; +#print STDERR "matched = '$x'\n"; ($start_index, $end_index) = split /\-/, $x; -#print "start_index = '$start_index', end_index = '$end_index'\n"; +#print STDERR "start_index = '$start_index', end_index = '$end_index'\n"; for my $y ($start_index..$end_index) { -#print "y = '$y'\n"; +#print STDERR "y = '$y'\n"; if(!exists $file_warn_indices{$y}) { $file_warn_indices{$y} = $y; } @@ -168,14 +175,14 @@ if(exists $options{f}) { } } #foreach (sort keys %warn_file_indices) { -# print "$_ : $warn_file_indices{$_}\n"; +# print STDERR "$_ : $warn_file_indices{$_}\n"; #} } # Parse list of warning strings for files to expand warnings if(exists $options{S}) { @file_match_strings = split /,/, $options{S}; -# print @file_match_strings; +# print STDERR @file_match_strings; } # Check if warnings should stay unique and not be "genericized" @@ -196,7 +203,7 @@ while (<>) { # Retain last FORTRAN compile line, which comes a few lines before warning if($_ =~ /.*\.[fF]90:.*/) { - ($last_fort_name, $last_fort_line, $toss) = split /\:/, $_; + ($last_fort_name, $last_fort_line, $last_fort_offset) = split /\:/, $_; ($last_fort_line, $toss) = split /\./, $last_fort_line; } @@ -205,44 +212,45 @@ while (<>) { ($last_c_name, $toss) = split /\:/, $_; } + # Retain C/C++ compile line, which comes with the line of warning + if($_ =~ /.*[A-Za-z0-9_]\.[chC]\(.*[0-9]\):.*#.*/) { + $last_c_name = $_; + } + # Skip lines that don't have the word "warning" - next if $_ !~ /[Ww]arning:/; + next if $_ !~ /[Ww]arning/; # Skip warnings from linker next if $_ =~ /ld: warning:/; - # Skip warnings from build_py and install_lib + # Skip warnings from build_py and install_lib next if $_ =~ /warning: (build_py|install_lib)/; + # Skip variables with the word 'warning' in them + next if $_ =~ /_warning_/; + # "Hide" the C++ '::' symbol until we've parsed out the parts of the line while($_ =~ /\:\:/) { $_ =~ s/\:\:/@@@@/g; } # Check for weird formatting of warning message + $line = "??"; + $offset = "??"; if($_ =~ /^cc1: warning:.*/) { $name = $last_c_name; - $line = "??"; ($toss, $toss, $warning, $extra, $extra2) = split /\:/, $_; # Check for CMAKE build with warning on first line and no filename } elsif($_ =~ /^\s*[Ww]arning:.*/) { $name = $last_c_name; - $line = "??"; ($toss, $warning, $extra, $extra2) = split /\:/, $_; # Check for FORTRAN warning output } elsif($_ =~ /^Warning:.*/) { $name = $last_fort_name; $line = $last_fort_line; + $offset = $last_fort_offset; ($toss, $warning, $extra, $extra2) = split /\:/, $_; -#print "1:",$.,":",$_; -# $_ = <>; -#print "2:",$.,":",$_; -# if($_ =~ /^\sFC.*/) { -# $_ = <>; -#print "3:",$.,":",$_; -# } -# ($name, $line, $toss) = split /\:/, $_; -#print "4:","'",$name,"'","-","'",$line,"'","\n"; + # Check for improperly parsed filename or line if($name =~ /^$/) { print "Filename is a null string! Input line #$. is: '$_'"; @@ -256,11 +264,14 @@ while (<>) { } elsif($_ =~ /^\".*, line [0-9]+: *[Ww]arning:.*/) { ($name, $toss, $warning, $extra, $extra2) = split /\:/, $_; ($name, $line) = split /\,/, $name; - $name =~ s/^\"//g; - $name =~ s/\"$//g; - $line =~ s/^\s*line\s*//g; -# print "name:'", $name, "'-'", $line, "'\n"; -# print "warning:'", $warning, "'\n"; + $name =~ s/^\"//g; + $name =~ s/\"$//g; + $line =~ s/^\s*line\s*//g; + # Check for Intel icc warning + } elsif($_ =~ /.*[A-Za-z0-9_]\.[chC]\(.*[0-9]\):.*#.*/) { + ($last_c_name, $toss, $warning) = split /\:/, $last_c_name; + ($name, $line) = split /\(/, $last_c_name; + $line =~ s/\)//g; } else { # Check for 'character offset' field appended to file & line # # (This is probably specific to GCC) @@ -299,9 +310,7 @@ while (<>) { # Check for ignored file if(exists $options{i}) { for my $x (@ignorenames) { -#print "x = '$x'\n"; if($name =~ /$x/) { -# print "matched name = '$name'\n"; $ignorecount++; if(!(exists $ignored_files{$name})) { $ignored_files{$name} = $name; @@ -330,18 +339,28 @@ while (<>) { # Convert all quotes to ' $warning =~ s/‘/'/g; $warning =~ s/’/'/g; + $warning =~ s/"/'/g; # # These skipped messages & "genericizations" may be specific to GCC # Skip supplemental warning message - next if $warning =~ /near initialization for/; + if($warning =~ /near initialization for/) { + $notecount++; + next + } # Skip C++ supplemental warning message - next if $warning =~ /in call to/; + if($warning =~ /in call to/) { + $notecount++; + next + } # Skip GCC warning that should be a note - next if $_ =~ /\(this will be reported only once per input file\)/; + if($_ =~ /\(this will be reported only once per input file\)/) { + $notecount++; + next + } if($genericize) { # Eliminate C/C++ "{aka <some type>}" and "{aka '<some type>'}" info @@ -390,16 +409,16 @@ while (<>) { $warning =~ s/[A-Za-z_0-9]*\([A-Za-z_,0-9]*\) in [A-Za-z_0-9]*/-\(-\) in -/g; } } -# print "warning = $warning\n"; # <end possible GCC-specific code> # Check if we've already seen this warning on this line in this file # (Can happen for warnings from inside header files) - if( !exists $warn_file_line{$warning}{$name}{$line} ) { + if( !exists $warn_file_line_offset{$warning}{$name}{$line}{$offset} ) { # Increment count for [generic] warning $warn_count{$warning}++; $warn_file{$warning}{$name}++; $warn_file_line{$warning}{$name}{$line}++; + $warn_file_line_offset{$warning}{$name}{$line}{$offset}++; # Increment count for filename $file_count{$name}++; @@ -409,14 +428,20 @@ while (<>) { # Increment total count of warnings $totalcount++; } + else { + # Increment count of duplicate warnings + $dupcount++; + } -# print "name = $name\n"; -# print "line = $line\n"; -# print "offset = $offset\n"; -# print "warning = \"$warning\"\n"; +# print STDERR "name = $name\n"; +# print STDERR "line = $line\n"; +# print STDERR "offset = $offset\n"; +# print STDERR "warning = \"$warning\"\n"; } print "Total unique [non-ignored] warnings: $totalcount\n"; +print "Ignored notes / supplemental warning lines [not counted in unique warnings]: $notecount\n"; +print "Duplicated warning lines [not counted in unique warnings]: $dupcount\n"; print "Total ignored warnings: $ignorecount\n"; $warncount = keys %warn_count; print "Total unique kinds of warnings: $warncount\n"; @@ -427,7 +452,7 @@ print "Total files with warnings: $filecount\n\n"; print "# of Warnings by frequency (file count)\n"; print "=======================================\n"; for my $x (sort {$warn_count{$b} <=> $warn_count{$a}} keys(%warn_count)) { - printf ("[%2d] %4d (%2d) - %s\n", $current_warning++, $warn_count{$x}, scalar(keys %{$warn_file_line{$x}}), $x); + printf ("[%2d] %4d (%2d) - %s\n", $current_warning++, $warn_count{$x}, scalar(keys %{$warn_file{$x}}), $x); if((exists $options{W}) || (exists $options{w}) || (exists $options{s})) { my $curr_index = $current_warning - 1; my $match = 0; @@ -435,9 +460,9 @@ for my $x (sort {$warn_count{$b} <=> $warn_count{$a}} keys(%warn_count)) { # Check for string from list in current warning if(exists $options{s}) { for my $y (@warn_match_strings) { -# print "y = '$y'\n"; +# print STDERR "y = '$y'\n"; if($x =~ /$y/) { -# print "matched warning = '$x'\n"; +# print STDERR "matched warning = '$x'\n"; $match = 1; last; } @@ -465,7 +490,7 @@ for my $x (sort {$warn_count{$b} <=> $warn_count{$a}} keys(%warn_count)) { print "\n# of Warnings by filename (warning type)\n"; print "========================================\n"; for my $x (sort {$file_count{$b} <=> $file_count{$a}} keys(%file_count)) { - printf ("[%3d] %4d (%2d) - %s\n", $current_file++, $file_count{$x}, scalar(keys %{$file_warn_line{$x}}), $x); + printf ("[%3d] %4d (%2d) - %s\n", $current_file++, $file_count{$x}, scalar(keys %{$file_warn{$x}}), $x); if((exists $options{F}) || (exists $options{f}) || (exists $options{S})) { my $curr_index = $current_file - 1; my $match = 0; @@ -473,9 +498,9 @@ for my $x (sort {$file_count{$b} <=> $file_count{$a}} keys(%file_count)) { # Check for string from list in current file if(exists $options{S}) { for my $y (@file_match_strings) { -# print "y = '$y'\n"; +# print STDERR "y = '$y'\n"; if($x =~ /$y/) { -# print "matched warning = '$x'\n"; +# print STDERR "matched warning = '$x'\n"; $match = 1; last; } diff --git a/bin/yodconfigure b/bin/yodconfigure index 44d7d99..a91507d 100755 --- a/bin/yodconfigure +++ b/bin/yodconfigure @@ -7,7 +7,7 @@ # This file is part of HDF5. The full HDF5 copyright notice, including # terms governing use, modification, and redistribution, is contained in # the COPYING file, which can be found at the root of the source code -# distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. +# distribution tree, or in https://www.hdfgroup.org/licenses. # If you do not have access to either file, you may request a copy from # help@hdfgroup.org. # |