summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorLarry Knox <lrknox@hdfgroup.org>2024-02-16 20:12:10 (GMT)
committerGitHub <noreply@github.com>2024-02-16 20:12:10 (GMT)
commit034271b239c2f8434a3d9528da5587f4a9813a41 (patch)
tree93badd8efbd89b2202629b591f44e4caa4b8307b
parent413d10f6e3d4db5341413ba7cd4f819eb5156a51 (diff)
parent17a542fce3f43ddbcf2113e061772e9315875eb0 (diff)
downloadhdf5-034271b239c2f8434a3d9528da5587f4a9813a41.zip
hdf5-034271b239c2f8434a3d9528da5587f4a9813a41.tar.gz
hdf5-034271b239c2f8434a3d9528da5587f4a9813a41.tar.bz2
Merge pull request #4025 from lrknox/1_14_dev_sync3_lrk
Sync more develop changes to hdf5_1_14
-rw-r--r--.github/workflows/cmake-ctest.yml8
-rw-r--r--.github/workflows/daily-build.yml32
-rw-r--r--.github/workflows/release.yml1
-rw-r--r--.github/workflows/remove-files.yml60
-rw-r--r--HDF5Examples/C/H5PAR/ph5_dataset.c18
-rw-r--r--HDF5Examples/C/H5PAR/ph5_file_create.c18
-rw-r--r--HDF5Examples/C/H5PAR/ph5_filtered_writes.c18
-rw-r--r--HDF5Examples/C/H5PAR/ph5_filtered_writes_no_sel.c18
-rw-r--r--HDF5Examples/C/H5PAR/ph5_hyperslab_by_chunk.c18
-rw-r--r--HDF5Examples/C/H5PAR/ph5_hyperslab_by_col.c18
-rw-r--r--HDF5Examples/C/H5PAR/ph5_hyperslab_by_pattern.c18
-rw-r--r--HDF5Examples/C/H5PAR/ph5_hyperslab_by_row.c18
-rw-r--r--HDF5Examples/C/H5PAR/ph5example.c18
-rw-r--r--HDF5Examples/config/cmake/grepTest.cmake17
-rw-r--r--HDF5Examples/config/cmake/runTest.cmake19
-rw-r--r--config/cmake/HDF5UseFortran.cmake2
-rw-r--r--config/cmake/grepTest.cmake17
-rw-r--r--config/cmake/runTest.cmake24
-rw-r--r--tools/test/h5copy/CMakeTests.cmake8
-rw-r--r--tools/test/h5ls/CMakeTests.cmake13
-rw-r--r--tools/test/misc/CMakeTestsClear.cmake19
21 files changed, 322 insertions, 60 deletions
diff --git a/.github/workflows/cmake-ctest.yml b/.github/workflows/cmake-ctest.yml
index 54642a4..b21b47d 100644
--- a/.github/workflows/cmake-ctest.yml
+++ b/.github/workflows/cmake-ctest.yml
@@ -79,7 +79,7 @@ jobs:
Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/COPYING -Destination ${{ runner.workspace }}/build114/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/build114/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/README.md -Destination ${{ runner.workspace }}/build114/hdf5/
- Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/ci-StdShar-MSVC/* -Destination ${{ runner.workspace }}/build114/hdf5/ -Include *.zip
+ Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-MSVC/* -Destination ${{ runner.workspace}}/build114/hdf5/ -Include *.zip
cd "${{ runner.workspace }}/build114"
7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip hdf5
shell: pwsh
@@ -147,7 +147,7 @@ jobs:
cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/hdfsrc/README.md ${{ runner.workspace }}/build114/hdf5
- cp ${{ runner.workspace }}/hdf5/build114/ci-StdShar-GNUC/*.tar.gz ${{ runner.workspace }}/build114/hdf5
+ cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-GNUC/*.tar.gz ${{ runner.workspace }}/build114/hdf5
cd "${{ runner.workspace }}/build114"
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz hdf5
shell: bash
@@ -170,7 +170,7 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: docs-doxygen
- path: ${{ runner.workspace }}/hdf5/build114/ci-StdShar-GNUC/hdf5lib_docs/html
+ path: ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-GNUC/hdf5lib_docs/html
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
build_and_test_mac:
@@ -232,7 +232,7 @@ jobs:
cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/hdfsrc/README.md ${{ runner.workspace }}/build114/hdf5
- cp ${{ runner.workspace }}/hdf5/build114/ci-StdShar-Clang/*.tar.gz ${{ runner.workspace }}/build114/hdf5
+ cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-Clang/*.tar.gz ${{ runner.workspace }}/build114/hdf5
cd "${{ runner.workspace }}/build114"
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-osx12.tar.gz hdf5
shell: bash
diff --git a/.github/workflows/daily-build.yml b/.github/workflows/daily-build.yml
index 257b352..b3af5a4 100644
--- a/.github/workflows/daily-build.yml
+++ b/.github/workflows/daily-build.yml
@@ -12,6 +12,26 @@ permissions:
# A workflow run is made up of one or more jobs that can run sequentially or
# in parallel.
jobs:
+ get-old-names:
+ runs-on: ubuntu-latest
+ outputs:
+ hdf5-name: ${{ steps.gethdf5base.outputs.HDF5_NAME_BASE }}
+
+ steps:
+ - uses: actions/checkout@v4.1.1
+
+ - name: Get hdf5 release base name
+ uses: dsaltares/fetch-gh-release-asset@master
+ with:
+ version: 'tags/snapshot-1.14'
+ file: 'last-file.txt'
+
+ - name: Read base-name file
+ id: gethdf5base
+ run: echo "HDF5_NAME_BASE=$(cat last-file.txt)" >> $GITHUB_OUTPUT
+
+ - run: echo "hdf5 base name is ${{ steps.gethdf5base.outputs.HDF5_NAME_BASE }}."
+
call-workflow-tarball:
uses: ./.github/workflows/tarball.yml
with:
@@ -23,6 +43,7 @@ jobs:
uses: ./.github/workflows/cmake-ctest.yml
with:
file_base: ${{ needs.call-workflow-tarball.outputs.file_base }}
+ preset_name: ci-StdShar
#use_tag: snapshot-1.14
#use_environ: snapshots
if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }}
@@ -50,3 +71,14 @@ jobs:
use_environ: snapshots
if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }}
+ call-workflow-remove:
+ needs: [get-old-names, call-workflow-tarball, call-workflow-ctest, call-workflow-abi, call-workflow-release]
+ permissions:
+ contents: write # In order to allow file deletion
+ uses: ./.github/workflows/remove-files.yml
+ with:
+ file_base: ${{ needs.get-old-names.outputs.hdf5-name }}
+ use_tag: snapshot-1.14
+ use_environ: snapshots
+ if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }}
+
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 768581d..54c9a67 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -96,7 +96,6 @@ jobs:
use_environ: release
call-workflow-release:
- #needs: [call-workflow-tarball, call-workflow-ctest]
needs: [log-the-inputs, create-files-ctest, call-workflow-ctest, call-workflow-abi]
permissions:
contents: write # In order to allow tag creation
diff --git a/.github/workflows/remove-files.yml b/.github/workflows/remove-files.yml
new file mode 100644
index 0000000..1d72362
--- /dev/null
+++ b/.github/workflows/remove-files.yml
@@ -0,0 +1,60 @@
+name: hdf5 dev remove-files
+
+# Controls when the action will run. Triggers the workflow on a schedule
+on:
+ workflow_call:
+ inputs:
+ use_tag:
+ description: 'Release version tag'
+ type: string
+ required: false
+ default: snapshot
+ use_environ:
+ description: 'Environment to locate files'
+ type: string
+ required: true
+ default: snapshots
+ file_base:
+ description: "The common base name of the source tarballs"
+ required: true
+ type: string
+
+# Minimal permissions to be inherited by any job that doesn't declare its own permissions
+permissions:
+ contents: read
+
+# Previous workflows must pass to get here so tag the commit that created the files
+jobs:
+ PreRelease-delfiles:
+ runs-on: ubuntu-latest
+ environment: ${{ inputs.use_environ }}
+ permissions:
+ contents: write
+ steps:
+ - name: Get file base name
+ id: get-file-base
+ run: |
+ FILE_NAME_BASE=$(echo "${{ inputs.file_base }}")
+ echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT
+
+ - name: PreRelease delete from tag
+ id: delete_prerelease
+ if: ${{ (inputs.use_environ == 'snapshots') }}
+ uses: mknejp/delete-release-assets@v1
+ with:
+ token: ${{ github.token }}
+ tag: "${{ inputs.use_tag }}"
+ assets: |
+ ${{ steps.get-file-base.outputs.FILE_BASE }}-hdf5_compat_report.html
+ ${{ steps.get-file-base.outputs.FILE_BASE }}-hdf5_hl_compat_report.html
+ ${{ steps.get-file-base.outputs.FILE_BASE }}-hdf5_cpp_compat_report.html
+ ${{ steps.get-file-base.outputs.FILE_BASE }}-java_compat_report.html
+ ${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip
+ ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz
+ ${{ steps.get-file-base.outputs.FILE_BASE }}.zip
+ ${{ steps.get-file-base.outputs.FILE_BASE }}-osx12.tar.gz
+ ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz
+ ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc_s3.tar.gz
+ ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip
+ ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_intel.tar.gz
+ ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.zip
diff --git a/HDF5Examples/C/H5PAR/ph5_dataset.c b/HDF5Examples/C/H5PAR/ph5_dataset.c
index 9b8e8a8..0c25fcc 100644
--- a/HDF5Examples/C/H5PAR/ph5_dataset.c
+++ b/HDF5Examples/C/H5PAR/ph5_dataset.c
@@ -54,6 +54,24 @@ main(int argc, char **argv)
H5Pset_fapl_mpio(plist_id, comm, info);
/*
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata reads on FAPL to perform metadata reads
+ * collectively, which usually allows datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
+ */
+ H5Pset_all_coll_metadata_ops(plist_id, true);
+
+ /*
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata writes on FAPL to perform metadata writes
+ * collectively, which usually allows datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
+ */
+ H5Pset_coll_metadata_write(plist_id, true);
+
+ /*
* Create a new file collectively and release property list identifier.
*/
file_id = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, plist_id);
diff --git a/HDF5Examples/C/H5PAR/ph5_file_create.c b/HDF5Examples/C/H5PAR/ph5_file_create.c
index a3bd0a8..10938f2 100644
--- a/HDF5Examples/C/H5PAR/ph5_file_create.c
+++ b/HDF5Examples/C/H5PAR/ph5_file_create.c
@@ -37,6 +37,24 @@ main(int argc, char **argv)
H5Pset_fapl_mpio(plist_id, comm, info);
/*
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata reads on FAPL to perform metadata reads
+ * collectively, which usually allows datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
+ */
+ H5Pset_all_coll_metadata_ops(plist_id, true);
+
+ /*
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata writes on FAPL to perform metadata writes
+ * collectively, which usually allows datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
+ */
+ H5Pset_coll_metadata_write(plist_id, true);
+
+ /*
* Create a new file collectively.
*/
file_id = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, plist_id);
diff --git a/HDF5Examples/C/H5PAR/ph5_filtered_writes.c b/HDF5Examples/C/H5PAR/ph5_filtered_writes.c
index 104704a..34ed2fb 100644
--- a/HDF5Examples/C/H5PAR/ph5_filtered_writes.c
+++ b/HDF5Examples/C/H5PAR/ph5_filtered_writes.c
@@ -377,14 +377,24 @@ main(int argc, char **argv)
H5Pset_fapl_mpio(fapl_id, comm, info);
/*
- * OPTIONAL: Set collective metadata reads on FAPL to allow
- * parallel writes to filtered datasets to perform
- * better at scale. While not strictly necessary,
- * this is generally recommended.
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata reads on FAPL to perform metadata reads
+ * collectively, which usually allows filtered datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
*/
H5Pset_all_coll_metadata_ops(fapl_id, true);
/*
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata writes on FAPL to perform metadata writes
+ * collectively, which usually allows filtered datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
+ */
+ H5Pset_coll_metadata_write(fapl_id, true);
+
+ /*
* OPTIONAL: Set the latest file format version for HDF5 in
* order to gain access to different dataset chunk
* index types and better data encoding methods.
diff --git a/HDF5Examples/C/H5PAR/ph5_filtered_writes_no_sel.c b/HDF5Examples/C/H5PAR/ph5_filtered_writes_no_sel.c
index a4d9e16..d4f171f 100644
--- a/HDF5Examples/C/H5PAR/ph5_filtered_writes_no_sel.c
+++ b/HDF5Examples/C/H5PAR/ph5_filtered_writes_no_sel.c
@@ -271,14 +271,24 @@ main(int argc, char **argv)
H5Pset_fapl_mpio(fapl_id, comm, info);
/*
- * OPTIONAL: Set collective metadata reads on FAPL to allow
- * parallel writes to filtered datasets to perform
- * better at scale. While not strictly necessary,
- * this is generally recommended.
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata reads on FAPL to perform metadata reads
+ * collectively, which usually allows filtered datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
*/
H5Pset_all_coll_metadata_ops(fapl_id, true);
/*
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata writes on FAPL to perform metadata writes
+ * collectively, which usually allows filtered datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
+ */
+ H5Pset_coll_metadata_write(fapl_id, true);
+
+ /*
* OPTIONAL: Set the latest file format version for HDF5 in
* order to gain access to different dataset chunk
* index types and better data encoding methods.
diff --git a/HDF5Examples/C/H5PAR/ph5_hyperslab_by_chunk.c b/HDF5Examples/C/H5PAR/ph5_hyperslab_by_chunk.c
index a255b96..e00a0ef 100644
--- a/HDF5Examples/C/H5PAR/ph5_hyperslab_by_chunk.c
+++ b/HDF5Examples/C/H5PAR/ph5_hyperslab_by_chunk.c
@@ -65,6 +65,24 @@ main(int argc, char **argv)
H5Pset_fapl_mpio(plist_id, comm, info);
/*
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata reads on FAPL to perform metadata reads
+ * collectively, which usually allows datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
+ */
+ H5Pset_all_coll_metadata_ops(plist_id, true);
+
+ /*
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata writes on FAPL to perform metadata writes
+ * collectively, which usually allows datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
+ */
+ H5Pset_coll_metadata_write(plist_id, true);
+
+ /*
* Create a new file collectively and release property list identifier.
*/
file_id = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, plist_id);
diff --git a/HDF5Examples/C/H5PAR/ph5_hyperslab_by_col.c b/HDF5Examples/C/H5PAR/ph5_hyperslab_by_col.c
index b397fcf..49e5ce3 100644
--- a/HDF5Examples/C/H5PAR/ph5_hyperslab_by_col.c
+++ b/HDF5Examples/C/H5PAR/ph5_hyperslab_by_col.c
@@ -60,6 +60,24 @@ main(int argc, char **argv)
H5Pset_fapl_mpio(plist_id, comm, info);
/*
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata reads on FAPL to perform metadata reads
+ * collectively, which usually allows datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
+ */
+ H5Pset_all_coll_metadata_ops(plist_id, true);
+
+ /*
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata writes on FAPL to perform metadata writes
+ * collectively, which usually allows datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
+ */
+ H5Pset_coll_metadata_write(plist_id, true);
+
+ /*
* Create a new file collectively and release property list identifier.
*/
file_id = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, plist_id);
diff --git a/HDF5Examples/C/H5PAR/ph5_hyperslab_by_pattern.c b/HDF5Examples/C/H5PAR/ph5_hyperslab_by_pattern.c
index 77f3bef..bec3a2f 100644
--- a/HDF5Examples/C/H5PAR/ph5_hyperslab_by_pattern.c
+++ b/HDF5Examples/C/H5PAR/ph5_hyperslab_by_pattern.c
@@ -65,6 +65,24 @@ main(int argc, char **argv)
H5Pset_fapl_mpio(plist_id, comm, info);
/*
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata reads on FAPL to perform metadata reads
+ * collectively, which usually allows datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
+ */
+ H5Pset_all_coll_metadata_ops(plist_id, true);
+
+ /*
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata writes on FAPL to perform metadata writes
+ * collectively, which usually allows datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
+ */
+ H5Pset_coll_metadata_write(plist_id, true);
+
+ /*
* Create a new file collectively and release property list identifier.
*/
file_id = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, plist_id);
diff --git a/HDF5Examples/C/H5PAR/ph5_hyperslab_by_row.c b/HDF5Examples/C/H5PAR/ph5_hyperslab_by_row.c
index 5035786..1c08a32 100644
--- a/HDF5Examples/C/H5PAR/ph5_hyperslab_by_row.c
+++ b/HDF5Examples/C/H5PAR/ph5_hyperslab_by_row.c
@@ -49,6 +49,24 @@ main(int argc, char **argv)
H5Pset_fapl_mpio(plist_id, comm, info);
/*
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata reads on FAPL to perform metadata reads
+ * collectively, which usually allows datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
+ */
+ H5Pset_all_coll_metadata_ops(plist_id, true);
+
+ /*
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata writes on FAPL to perform metadata writes
+ * collectively, which usually allows datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
+ */
+ H5Pset_coll_metadata_write(plist_id, true);
+
+ /*
* Create a new file collectively and release property list identifier.
*/
file_id = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, plist_id);
diff --git a/HDF5Examples/C/H5PAR/ph5example.c b/HDF5Examples/C/H5PAR/ph5example.c
index 5ec2cdc..37d5d68 100644
--- a/HDF5Examples/C/H5PAR/ph5example.c
+++ b/HDF5Examples/C/H5PAR/ph5example.c
@@ -269,6 +269,24 @@ phdf5writeInd(char *filename)
assert(ret != FAIL);
MESG("H5Pset_fapl_mpio succeed");
+ /*
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata reads on FAPL to perform metadata reads
+ * collectively, which usually allows datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
+ */
+ H5Pset_all_coll_metadata_ops(acc_tpl1, true);
+
+ /*
+ * OPTIONAL: It is generally recommended to set collective
+ * metadata writes on FAPL to perform metadata writes
+ * collectively, which usually allows datasets
+ * to perform better at scale, although it is not
+ * strictly necessary.
+ */
+ H5Pset_coll_metadata_write(acc_tpl1, true);
+
/* create the file collectively */
fid1 = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl1);
assert(fid1 != FAIL);
diff --git a/HDF5Examples/config/cmake/grepTest.cmake b/HDF5Examples/config/cmake/grepTest.cmake
index 2ec8387..4031a1b 100644
--- a/HDF5Examples/config/cmake/grepTest.cmake
+++ b/HDF5Examples/config/cmake/grepTest.cmake
@@ -70,11 +70,18 @@ message (STATUS "COMMAND Result: ${TEST_RESULT}")
message (STATUS "COMMAND Error: ${TEST_ERROR}")
# remove special output
-file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
-string (FIND TEST_STREAM "_pmi_alps" TEST_FIND_RESULT)
-if (TEST_FIND_RESULT GREATER -1)
- string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
- file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_STREAM})
+if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}")
+ file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
+ string (FIND "${TEST_STREAM}" "_pmi_alps" TEST_FIND_RESULT)
+ if (TEST_FIND_RESULT GREATER -1)
+ string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
+ file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
+ endif ()
+ string (FIND "${TEST_STREAM}" "ulimit -s" TEST_FIND_RESULT)
+ if (TEST_FIND_RESULT GREATER -1)
+ string (REGEX REPLACE "^.*ulimit -s[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
+ file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
+ endif ()
endif ()
# if the TEST_ERRREF exists grep the error output with the error reference
diff --git a/HDF5Examples/config/cmake/runTest.cmake b/HDF5Examples/config/cmake/runTest.cmake
index d21765a..3507c34 100644
--- a/HDF5Examples/config/cmake/runTest.cmake
+++ b/HDF5Examples/config/cmake/runTest.cmake
@@ -133,11 +133,18 @@ endif ()
message (STATUS "COMMAND Error: ${TEST_ERROR}")
# remove special output
-file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
-string (FIND TEST_STREAM "_pmi_alps" TEST_FIND_RESULT)
-if (TEST_FIND_RESULT GREATER -1)
- string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
- file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_STREAM})
+if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}")
+ file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
+ string (FIND "${TEST_STREAM}" "_pmi_alps" TEST_FIND_RESULT)
+ if (TEST_FIND_RESULT GREATER -1)
+ string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
+ file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
+ endif ()
+ string (FIND "${TEST_STREAM}" "ulimit -s" TEST_FIND_RESULT)
+ if (TEST_FIND_RESULT GREATER -1)
+ string (REGEX REPLACE "^.*ulimit -s[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
+ file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
+ endif ()
endif ()
# remove special error output
@@ -148,7 +155,7 @@ else ()
# the error stack remains in the .err file
file (READ ${TEST_FOLDER}/${TEST_OUTPUT}.err TEST_STREAM)
endif ()
-string (FIND TEST_STREAM "no version information available" TEST_FIND_RESULT)
+string (FIND "${TEST_STREAM}" "no version information available" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*no version information available[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
# write back the changes to the original files
diff --git a/config/cmake/HDF5UseFortran.cmake b/config/cmake/HDF5UseFortran.cmake
index 3e058ad..7f125f1 100644
--- a/config/cmake/HDF5UseFortran.cmake
+++ b/config/cmake/HDF5UseFortran.cmake
@@ -345,7 +345,7 @@ string (REGEX REPLACE "}" "" OUT_VAR2 ${OUT_VAR2})
set (${HDF_PREFIX}_H5CONFIG_F_RKIND_SIZEOF "INTEGER, DIMENSION(1:num_rkinds) :: rkind_sizeof = (/${OUT_VAR2}/)")
# Setting definition if there is a 16 byte fortran integer
-string (FIND ${PAC_FC_ALL_INTEGER_KINDS_SIZEOF} "16" pos)
+string (FIND "${PAC_FC_ALL_INTEGER_KINDS_SIZEOF}" "16" pos)
if (${pos} EQUAL -1)
set (${HDF_PREFIX}_HAVE_Fortran_INTEGER_SIZEOF_16 0)
else ()
diff --git a/config/cmake/grepTest.cmake b/config/cmake/grepTest.cmake
index 2ec8387..4031a1b 100644
--- a/config/cmake/grepTest.cmake
+++ b/config/cmake/grepTest.cmake
@@ -70,11 +70,18 @@ message (STATUS "COMMAND Result: ${TEST_RESULT}")
message (STATUS "COMMAND Error: ${TEST_ERROR}")
# remove special output
-file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
-string (FIND TEST_STREAM "_pmi_alps" TEST_FIND_RESULT)
-if (TEST_FIND_RESULT GREATER -1)
- string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
- file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_STREAM})
+if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}")
+ file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
+ string (FIND "${TEST_STREAM}" "_pmi_alps" TEST_FIND_RESULT)
+ if (TEST_FIND_RESULT GREATER -1)
+ string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
+ file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
+ endif ()
+ string (FIND "${TEST_STREAM}" "ulimit -s" TEST_FIND_RESULT)
+ if (TEST_FIND_RESULT GREATER -1)
+ string (REGEX REPLACE "^.*ulimit -s[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
+ file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
+ endif ()
endif ()
# if the TEST_ERRREF exists grep the error output with the error reference
diff --git a/config/cmake/runTest.cmake b/config/cmake/runTest.cmake
index 0cfb9a3..4257c44 100644
--- a/config/cmake/runTest.cmake
+++ b/config/cmake/runTest.cmake
@@ -133,16 +133,18 @@ endif ()
message (STATUS "COMMAND Error: ${TEST_ERROR}")
# remove special output
-file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
-string (FIND TEST_STREAM "_pmi_alps" TEST_FIND_RESULT)
-if (TEST_FIND_RESULT GREATER -1)
- string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
- file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_STREAM})
-endif ()
-string (FIND TEST_STREAM "ulimit -s" TEST_FIND_RESULT)
-if (TEST_FIND_RESULT GREATER -1)
- string (REGEX REPLACE "^.*ulimit -s.*\n" "" TEST_STREAM "${TEST_STREAM}")
- file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_STREAM})
+if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}")
+ file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
+ string (FIND "${TEST_STREAM}" "_pmi_alps" TEST_FIND_RESULT)
+ if (TEST_FIND_RESULT GREATER -1)
+ string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
+ file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_STREAM})
+ endif ()
+ string (FIND "${TEST_STREAM}" "ulimit -s" TEST_FIND_RESULT)
+ if (TEST_FIND_RESULT GREATER -1)
+ string (REGEX REPLACE "^.*ulimit -s[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
+ file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_STREAM})
+ endif ()
endif ()
# remove special error output
@@ -153,7 +155,7 @@ else ()
# the error stack remains in the .err file
file (READ ${TEST_FOLDER}/${TEST_OUTPUT}.err TEST_STREAM)
endif ()
-string (FIND TEST_STREAM "no version information available" TEST_FIND_RESULT)
+string (FIND "${TEST_STREAM}" "no version information available" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*no version information available[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
# write back the changes to the original files
diff --git a/tools/test/h5copy/CMakeTests.cmake b/tools/test/h5copy/CMakeTests.cmake
index b4daa87..15d66d1 100644
--- a/tools/test/h5copy/CMakeTests.cmake
+++ b/tools/test/h5copy/CMakeTests.cmake
@@ -253,7 +253,7 @@
# Similar to ADD_H5_TEST macro. Compare to outputs from source & target
# files instead of checking with h5ls.
#
- macro (ADD_H5_CMP_TEST testname resultcode infile vparam sparam srcname dparam dstname)
+ macro (ADD_H5_CMP_TEST testname resultcode result_errcheck infile vparam sparam srcname dparam dstname)
# Remove any output file left over from previous test run
add_test (
NAME H5COPY-CMP-${testname}-clear-objects
@@ -276,9 +276,9 @@
-D "TEST_OUTPUT=./testfiles/${testname}.out.out"
-D "TEST_EXPECT=${resultcode}"
-D "TEST_REFERENCE=./testfiles/${testname}.out"
- -D "TEST_ERRREF=./testfiles/${testname}.err"
+ -D "TEST_ERRREF=${result_errcheck}"
-D "TEST_MASK=true"
- -P "${HDF_RESOURCES_DIR}/runTest.cmake"
+ -P "${HDF_RESOURCES_DIR}/grepTest.cmake"
)
endif ()
set_tests_properties (H5COPY-CMP-${testname} PROPERTIES DEPENDS H5COPY-CMP-${testname}-clear-objects)
@@ -598,7 +598,7 @@
#-----------------------------------------------------------------
# "Test copying object into group which doesn't exist, without -p"
#
- ADD_H5_CMP_TEST (h5copy_misc1 1 ${HDF_FILE1}.h5 -v -s /simple -d /g1/g2/simple)
+ ADD_H5_CMP_TEST (h5copy_misc1 1 "h5copy error" ${HDF_FILE1}.h5 -v -s /simple -d /g1/g2/simple)
#-------------------------------------------
# "Test copying objects to the same file "
diff --git a/tools/test/h5ls/CMakeTests.cmake b/tools/test/h5ls/CMakeTests.cmake
index 629f324..2d932b2 100644
--- a/tools/test/h5ls/CMakeTests.cmake
+++ b/tools/test/h5ls/CMakeTests.cmake
@@ -174,7 +174,7 @@
endif ()
endmacro ()
- macro (ADD_H5_ERR_TEST resultfile resultcode)
+ macro (ADD_H5_ERR_TEST resultfile resultcode result_errcheck)
# If using memchecker add tests without using scripts
if (HDF5_ENABLE_USING_MEMCHECKER)
add_test (NAME H5LS-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $<TARGET_FILE:h5ls${tgt_file_ext}> ${ARGN})
@@ -193,8 +193,9 @@
-D "TEST_OUTPUT=${resultfile}.out"
-D "TEST_EXPECT=${resultcode}"
-D "TEST_REFERENCE=${resultfile}.ls"
- -D "TEST_ERRREF=${resultfile}.err"
- -P "${HDF_RESOURCES_DIR}/runTest.cmake"
+ -D "TEST_ERRREF=${result_errcheck}"
+ -D "TEST_SKIP_COMPARE=true"
+ -P "${HDF_RESOURCES_DIR}/grepTest.cmake"
)
endif ()
set_tests_properties (H5LS-${resultfile} PROPERTIES
@@ -264,7 +265,7 @@
# test for displaying groups
# The following combination of arguments is expected to return an error message
# and return value 1
- ADD_H5_ERR_TEST (tgroup-1 1 -w80 -r -g tgroup.h5)
+ ADD_H5_ERR_TEST (tgroup-1 1 "option not compatible" -w80 -r -g tgroup.h5)
ADD_H5_TEST (tgroup-2 0 -w80 -g tgroup.h5/g1)
# test for files with groups that have long comments
@@ -305,7 +306,7 @@
# tests for no-dangling-links
# if this option is given on dangling link, h5ls should return exit code 1
# when used alone , expect to print out help and return exit code 1
- ADD_H5_ERR_TEST (textlinksrc-nodangle-1 1 -w80 --no-dangling-links textlinksrc.h5)
+ ADD_H5_ERR_TEST (textlinksrc-nodangle-1 1 "no-dangling-links must be used" -w80 --no-dangling-links textlinksrc.h5)
# external dangling link - expected exit code 1
ADD_H5_TEST (textlinksrc-nodangle-2 1 -w80 --follow-symlinks --no-dangling-links textlinksrc.h5)
# soft dangling link - expected exit code 1
@@ -367,7 +368,7 @@
endif ()
# test for non-existing file
- ADD_H5_ERR_TEST (nosuchfile 1 nosuchfile.h5)
+ ADD_H5_ERR_TEST (nosuchfile 1 "unable to open file" nosuchfile.h5)
# test for variable length data types in verbose mode
if (H5_WORDS_BIGENDIAN)
diff --git a/tools/test/misc/CMakeTestsClear.cmake b/tools/test/misc/CMakeTestsClear.cmake
index cfe237d..aead4c5 100644
--- a/tools/test/misc/CMakeTestsClear.cmake
+++ b/tools/test/misc/CMakeTestsClear.cmake
@@ -112,7 +112,7 @@
endif ()
endmacro ()
- macro (ADD_H5_ERR_CMP testname resultfile resultcode)
+ macro (ADD_H5_ERR_CMP testname resultfile resultcode result_errcheck)
if (NOT HDF5_ENABLE_USING_MEMCHECKER)
add_test (
NAME H5CLEAR_CMP-${testname}
@@ -124,8 +124,9 @@
-D "TEST_OUTPUT=${testname}.out"
-D "TEST_EXPECT=${resultcode}"
-D "TEST_REFERENCE=${resultfile}.mty"
- -D "TEST_ERRREF=${resultfile}.err"
- -P "${HDF_RESOURCES_DIR}/runTest.cmake"
+ -D "TEST_ERRREF=${result_errcheck}"
+ -D "TEST_SKIP_COMPARE=true"
+ -P "${HDF_RESOURCES_DIR}/grepTest.cmake"
)
if ("H5CLEAR_CMP-${testname}" MATCHES "${HDF5_DISABLE_TESTS_REGEX}")
set_tests_properties (H5CLEAR_CMP-${testname} PROPERTIES DISABLED true)
@@ -443,11 +444,11 @@
ADD_H5_CMP (h5clr_usage_junk h5clear_usage 1 "" junk.h5)
ADD_H5_CMP (h5clr_usage_none h5clear_usage 1 "" orig_h5clear_sec2_v3.h5)
ADD_H5_CMP (h5clr_missing_file_m h5clear_missing_file 1 "-m")
- ADD_H5_ERR_CMP (h5clr_open_fail_s h5clear_open_fail 1 "-s" junk.h5)
+ ADD_H5_ERR_CMP (h5clr_open_fail_s h5clear_open_fail 1 "h5clear error" "-s" junk.h5)
ADD_H5_CMP (h5clr_missing_file_ms h5clear_missing_file 1 "-m" "-s")
- ADD_H5_ERR_CMP (h5clr_open_fail_ms h5clear_open_fail 1 "-m" "-s" junk.h5)
- ADD_H5_ERR_CMP (h5clr_no_mdc_image_m h5clear_no_mdc_image 0 "-m" orig_h5clear_sec2_v2.h5)
- ADD_H5_ERR_CMP (h5clr_no_mdc_image_ms h5clear_no_mdc_image 0 "-s" "-m" orig_h5clear_sec2_v0.h5)
+ ADD_H5_ERR_CMP (h5clr_open_fail_ms h5clear_open_fail 1 "h5clear error" "-m" "-s" junk.h5)
+ ADD_H5_ERR_CMP (h5clr_no_mdc_image_m h5clear_no_mdc_image 0 "h5clear warning" "-m" orig_h5clear_sec2_v2.h5)
+ ADD_H5_ERR_CMP (h5clr_no_mdc_image_ms h5clear_no_mdc_image 0 "h5clear warning" "-s" "-m" orig_h5clear_sec2_v0.h5)
#
#
#
@@ -478,8 +479,8 @@
#
#
# h5clear_mdc_image.h5 already has cache image removed earlier, verify the expected warning from h5clear:
- ADD_H5_ERR_CMP (h5clr_mdc_image_m h5clear_no_mdc_image 0 "-m" mod_h5clear_mdc_image.h5)
- ADD_H5_ERR_CMP (h5clr_mdc_image_sm h5clear_no_mdc_image 0 "-s" "-m" mod_h5clear_mdc_image2.h5)
+ ADD_H5_ERR_CMP (h5clr_mdc_image_m h5clear_no_mdc_image 0 "h5clear warning" "-m" mod_h5clear_mdc_image.h5)
+ ADD_H5_ERR_CMP (h5clr_mdc_image_sm h5clear_no_mdc_image 0 "h5clear warning" "-s" "-m" mod_h5clear_mdc_image2.h5)
#
#
#