Merge pull request #4025 from lrknox/1_14_dev_sync3_lrk

Sync more develop changes to hdf5_1_14
This commit is contained in:
Larry Knox 2024-02-16 14:12:10 -06:00 committed by GitHub
commit 034271b239
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
21 changed files with 322 additions and 60 deletions

View File

@ -79,7 +79,7 @@ jobs:
Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/COPYING -Destination ${{ runner.workspace }}/build114/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/build114/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/hdfsrc/README.md -Destination ${{ runner.workspace }}/build114/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/ci-StdShar-MSVC/* -Destination ${{ runner.workspace }}/build114/hdf5/ -Include *.zip
Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-MSVC/* -Destination ${{ runner.workspace}}/build114/hdf5/ -Include *.zip
cd "${{ runner.workspace }}/build114"
7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip hdf5
shell: pwsh
@ -147,7 +147,7 @@ jobs:
cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/hdfsrc/README.md ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/build114/ci-StdShar-GNUC/*.tar.gz ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-GNUC/*.tar.gz ${{ runner.workspace }}/build114/hdf5
cd "${{ runner.workspace }}/build114"
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz hdf5
shell: bash
@ -170,7 +170,7 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: docs-doxygen
path: ${{ runner.workspace }}/hdf5/build114/ci-StdShar-GNUC/hdf5lib_docs/html
path: ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-GNUC/hdf5lib_docs/html
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
build_and_test_mac:
@ -232,7 +232,7 @@ jobs:
cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/hdfsrc/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/hdfsrc/README.md ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/build114/ci-StdShar-Clang/*.tar.gz ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-Clang/*.tar.gz ${{ runner.workspace }}/build114/hdf5
cd "${{ runner.workspace }}/build114"
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-osx12.tar.gz hdf5
shell: bash

View File

@ -12,6 +12,26 @@ permissions:
# A workflow run is made up of one or more jobs that can run sequentially or
# in parallel.
jobs:
get-old-names:
runs-on: ubuntu-latest
outputs:
hdf5-name: ${{ steps.gethdf5base.outputs.HDF5_NAME_BASE }}
steps:
- uses: actions/checkout@v4.1.1
- name: Get hdf5 release base name
uses: dsaltares/fetch-gh-release-asset@master
with:
version: 'tags/snapshot-1.14'
file: 'last-file.txt'
- name: Read base-name file
id: gethdf5base
run: echo "HDF5_NAME_BASE=$(cat last-file.txt)" >> $GITHUB_OUTPUT
- run: echo "hdf5 base name is ${{ steps.gethdf5base.outputs.HDF5_NAME_BASE }}."
call-workflow-tarball:
uses: ./.github/workflows/tarball.yml
with:
@ -23,6 +43,7 @@ jobs:
uses: ./.github/workflows/cmake-ctest.yml
with:
file_base: ${{ needs.call-workflow-tarball.outputs.file_base }}
preset_name: ci-StdShar
#use_tag: snapshot-1.14
#use_environ: snapshots
if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }}
@ -50,3 +71,14 @@ jobs:
use_environ: snapshots
if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }}
call-workflow-remove:
needs: [get-old-names, call-workflow-tarball, call-workflow-ctest, call-workflow-abi, call-workflow-release]
permissions:
contents: write # In order to allow file deletion
uses: ./.github/workflows/remove-files.yml
with:
file_base: ${{ needs.get-old-names.outputs.hdf5-name }}
use_tag: snapshot-1.14
use_environ: snapshots
if: ${{ needs.call-workflow-tarball.outputs.has_changes == 'true' }}

View File

@ -96,7 +96,6 @@ jobs:
use_environ: release
call-workflow-release:
#needs: [call-workflow-tarball, call-workflow-ctest]
needs: [log-the-inputs, create-files-ctest, call-workflow-ctest, call-workflow-abi]
permissions:
contents: write # In order to allow tag creation

60
.github/workflows/remove-files.yml vendored Normal file
View File

@ -0,0 +1,60 @@
name: hdf5 dev remove-files
# Controls when the action will run. Triggers the workflow on a schedule
on:
workflow_call:
inputs:
use_tag:
description: 'Release version tag'
type: string
required: false
default: snapshot
use_environ:
description: 'Environment to locate files'
type: string
required: true
default: snapshots
file_base:
description: "The common base name of the source tarballs"
required: true
type: string
# Minimal permissions to be inherited by any job that doesn't declare its own permissions
permissions:
contents: read
# Previous workflows must pass to get here so tag the commit that created the files
jobs:
PreRelease-delfiles:
runs-on: ubuntu-latest
environment: ${{ inputs.use_environ }}
permissions:
contents: write
steps:
- name: Get file base name
id: get-file-base
run: |
FILE_NAME_BASE=$(echo "${{ inputs.file_base }}")
echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT
- name: PreRelease delete from tag
id: delete_prerelease
if: ${{ (inputs.use_environ == 'snapshots') }}
uses: mknejp/delete-release-assets@v1
with:
token: ${{ github.token }}
tag: "${{ inputs.use_tag }}"
assets: |
${{ steps.get-file-base.outputs.FILE_BASE }}-hdf5_compat_report.html
${{ steps.get-file-base.outputs.FILE_BASE }}-hdf5_hl_compat_report.html
${{ steps.get-file-base.outputs.FILE_BASE }}-hdf5_cpp_compat_report.html
${{ steps.get-file-base.outputs.FILE_BASE }}-java_compat_report.html
${{ steps.get-file-base.outputs.FILE_BASE }}.doxygen.zip
${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-osx12.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc_s3.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_intel.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.zip

View File

@ -53,6 +53,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);
/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);
/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);
/*
* Create a new file collectively and release property list identifier.
*/

View File

@ -36,6 +36,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);
/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);
/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);
/*
* Create a new file collectively.
*/

View File

@ -377,13 +377,23 @@ main(int argc, char **argv)
H5Pset_fapl_mpio(fapl_id, comm, info);
/*
* OPTIONAL: Set collective metadata reads on FAPL to allow
* parallel writes to filtered datasets to perform
* better at scale. While not strictly necessary,
* this is generally recommended.
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows filtered datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(fapl_id, true);
/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows filtered datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(fapl_id, true);
/*
* OPTIONAL: Set the latest file format version for HDF5 in
* order to gain access to different dataset chunk

View File

@ -271,13 +271,23 @@ main(int argc, char **argv)
H5Pset_fapl_mpio(fapl_id, comm, info);
/*
* OPTIONAL: Set collective metadata reads on FAPL to allow
* parallel writes to filtered datasets to perform
* better at scale. While not strictly necessary,
* this is generally recommended.
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows filtered datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(fapl_id, true);
/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows filtered datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(fapl_id, true);
/*
* OPTIONAL: Set the latest file format version for HDF5 in
* order to gain access to different dataset chunk

View File

@ -64,6 +64,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);
/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);
/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);
/*
* Create a new file collectively and release property list identifier.
*/

View File

@ -59,6 +59,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);
/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);
/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);
/*
* Create a new file collectively and release property list identifier.
*/

View File

@ -64,6 +64,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);
/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);
/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);
/*
* Create a new file collectively and release property list identifier.
*/

View File

@ -48,6 +48,24 @@ main(int argc, char **argv)
plist_id = H5Pcreate(H5P_FILE_ACCESS);
H5Pset_fapl_mpio(plist_id, comm, info);
/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(plist_id, true);
/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(plist_id, true);
/*
* Create a new file collectively and release property list identifier.
*/

View File

@ -269,6 +269,24 @@ phdf5writeInd(char *filename)
assert(ret != FAIL);
MESG("H5Pset_fapl_mpio succeed");
/*
* OPTIONAL: It is generally recommended to set collective
* metadata reads on FAPL to perform metadata reads
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_all_coll_metadata_ops(acc_tpl1, true);
/*
* OPTIONAL: It is generally recommended to set collective
* metadata writes on FAPL to perform metadata writes
* collectively, which usually allows datasets
* to perform better at scale, although it is not
* strictly necessary.
*/
H5Pset_coll_metadata_write(acc_tpl1, true);
/* create the file collectively */
fid1 = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, acc_tpl1);
assert(fid1 != FAIL);

View File

@ -70,11 +70,18 @@ message (STATUS "COMMAND Result: ${TEST_RESULT}")
message (STATUS "COMMAND Error: ${TEST_ERROR}")
# remove special output
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
string (FIND TEST_STREAM "_pmi_alps" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_STREAM})
if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}")
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
string (FIND "${TEST_STREAM}" "_pmi_alps" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
endif ()
string (FIND "${TEST_STREAM}" "ulimit -s" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*ulimit -s[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
endif ()
endif ()
# if the TEST_ERRREF exists grep the error output with the error reference

View File

@ -133,11 +133,18 @@ endif ()
message (STATUS "COMMAND Error: ${TEST_ERROR}")
# remove special output
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
string (FIND TEST_STREAM "_pmi_alps" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_STREAM})
if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}")
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
string (FIND "${TEST_STREAM}" "_pmi_alps" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
endif ()
string (FIND "${TEST_STREAM}" "ulimit -s" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*ulimit -s[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
endif ()
endif ()
# remove special error output
@ -148,7 +155,7 @@ else ()
# the error stack remains in the .err file
file (READ ${TEST_FOLDER}/${TEST_OUTPUT}.err TEST_STREAM)
endif ()
string (FIND TEST_STREAM "no version information available" TEST_FIND_RESULT)
string (FIND "${TEST_STREAM}" "no version information available" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*no version information available[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
# write back the changes to the original files

View File

@ -345,7 +345,7 @@ string (REGEX REPLACE "}" "" OUT_VAR2 ${OUT_VAR2})
set (${HDF_PREFIX}_H5CONFIG_F_RKIND_SIZEOF "INTEGER, DIMENSION(1:num_rkinds) :: rkind_sizeof = (/${OUT_VAR2}/)")
# Setting definition if there is a 16 byte fortran integer
string (FIND ${PAC_FC_ALL_INTEGER_KINDS_SIZEOF} "16" pos)
string (FIND "${PAC_FC_ALL_INTEGER_KINDS_SIZEOF}" "16" pos)
if (${pos} EQUAL -1)
set (${HDF_PREFIX}_HAVE_Fortran_INTEGER_SIZEOF_16 0)
else ()

View File

@ -70,11 +70,18 @@ message (STATUS "COMMAND Result: ${TEST_RESULT}")
message (STATUS "COMMAND Error: ${TEST_ERROR}")
# remove special output
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
string (FIND TEST_STREAM "_pmi_alps" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_STREAM})
if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}")
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
string (FIND "${TEST_STREAM}" "_pmi_alps" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
endif ()
string (FIND "${TEST_STREAM}" "ulimit -s" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*ulimit -s[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} "${TEST_STREAM}")
endif ()
endif ()
# if the TEST_ERRREF exists grep the error output with the error reference

View File

@ -133,16 +133,18 @@ endif ()
message (STATUS "COMMAND Error: ${TEST_ERROR}")
# remove special output
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
string (FIND TEST_STREAM "_pmi_alps" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_STREAM})
endif ()
string (FIND TEST_STREAM "ulimit -s" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*ulimit -s.*\n" "" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_STREAM})
if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}")
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
string (FIND "${TEST_STREAM}" "_pmi_alps" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*_pmi_alps[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_STREAM})
endif ()
string (FIND "${TEST_STREAM}" "ulimit -s" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*ulimit -s[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
file (WRITE ${TEST_FOLDER}/${TEST_OUTPUT} ${TEST_STREAM})
endif ()
endif ()
# remove special error output
@ -153,7 +155,7 @@ else ()
# the error stack remains in the .err file
file (READ ${TEST_FOLDER}/${TEST_OUTPUT}.err TEST_STREAM)
endif ()
string (FIND TEST_STREAM "no version information available" TEST_FIND_RESULT)
string (FIND "${TEST_STREAM}" "no version information available" TEST_FIND_RESULT)
if (TEST_FIND_RESULT GREATER -1)
string (REGEX REPLACE "^.*no version information available[^\n]+\n" "" TEST_STREAM "${TEST_STREAM}")
# write back the changes to the original files

View File

@ -253,7 +253,7 @@
# Similar to ADD_H5_TEST macro. Compare to outputs from source & target
# files instead of checking with h5ls.
#
macro (ADD_H5_CMP_TEST testname resultcode infile vparam sparam srcname dparam dstname)
macro (ADD_H5_CMP_TEST testname resultcode result_errcheck infile vparam sparam srcname dparam dstname)
# Remove any output file left over from previous test run
add_test (
NAME H5COPY-CMP-${testname}-clear-objects
@ -276,9 +276,9 @@
-D "TEST_OUTPUT=./testfiles/${testname}.out.out"
-D "TEST_EXPECT=${resultcode}"
-D "TEST_REFERENCE=./testfiles/${testname}.out"
-D "TEST_ERRREF=./testfiles/${testname}.err"
-D "TEST_ERRREF=${result_errcheck}"
-D "TEST_MASK=true"
-P "${HDF_RESOURCES_DIR}/runTest.cmake"
-P "${HDF_RESOURCES_DIR}/grepTest.cmake"
)
endif ()
set_tests_properties (H5COPY-CMP-${testname} PROPERTIES DEPENDS H5COPY-CMP-${testname}-clear-objects)
@ -598,7 +598,7 @@
#-----------------------------------------------------------------
# "Test copying object into group which doesn't exist, without -p"
#
ADD_H5_CMP_TEST (h5copy_misc1 1 ${HDF_FILE1}.h5 -v -s /simple -d /g1/g2/simple)
ADD_H5_CMP_TEST (h5copy_misc1 1 "h5copy error" ${HDF_FILE1}.h5 -v -s /simple -d /g1/g2/simple)
#-------------------------------------------
# "Test copying objects to the same file "

View File

@ -174,7 +174,7 @@
endif ()
endmacro ()
macro (ADD_H5_ERR_TEST resultfile resultcode)
macro (ADD_H5_ERR_TEST resultfile resultcode result_errcheck)
# If using memchecker add tests without using scripts
if (HDF5_ENABLE_USING_MEMCHECKER)
add_test (NAME H5LS-${resultfile} COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $<TARGET_FILE:h5ls${tgt_file_ext}> ${ARGN})
@ -193,8 +193,9 @@
-D "TEST_OUTPUT=${resultfile}.out"
-D "TEST_EXPECT=${resultcode}"
-D "TEST_REFERENCE=${resultfile}.ls"
-D "TEST_ERRREF=${resultfile}.err"
-P "${HDF_RESOURCES_DIR}/runTest.cmake"
-D "TEST_ERRREF=${result_errcheck}"
-D "TEST_SKIP_COMPARE=true"
-P "${HDF_RESOURCES_DIR}/grepTest.cmake"
)
endif ()
set_tests_properties (H5LS-${resultfile} PROPERTIES
@ -264,7 +265,7 @@
# test for displaying groups
# The following combination of arguments is expected to return an error message
# and return value 1
ADD_H5_ERR_TEST (tgroup-1 1 -w80 -r -g tgroup.h5)
ADD_H5_ERR_TEST (tgroup-1 1 "option not compatible" -w80 -r -g tgroup.h5)
ADD_H5_TEST (tgroup-2 0 -w80 -g tgroup.h5/g1)
# test for files with groups that have long comments
@ -305,7 +306,7 @@
# tests for no-dangling-links
# if this option is given on dangling link, h5ls should return exit code 1
# when used alone , expect to print out help and return exit code 1
ADD_H5_ERR_TEST (textlinksrc-nodangle-1 1 -w80 --no-dangling-links textlinksrc.h5)
ADD_H5_ERR_TEST (textlinksrc-nodangle-1 1 "no-dangling-links must be used" -w80 --no-dangling-links textlinksrc.h5)
# external dangling link - expected exit code 1
ADD_H5_TEST (textlinksrc-nodangle-2 1 -w80 --follow-symlinks --no-dangling-links textlinksrc.h5)
# soft dangling link - expected exit code 1
@ -367,7 +368,7 @@
endif ()
# test for non-existing file
ADD_H5_ERR_TEST (nosuchfile 1 nosuchfile.h5)
ADD_H5_ERR_TEST (nosuchfile 1 "unable to open file" nosuchfile.h5)
# test for variable length data types in verbose mode
if (H5_WORDS_BIGENDIAN)

View File

@ -112,7 +112,7 @@
endif ()
endmacro ()
macro (ADD_H5_ERR_CMP testname resultfile resultcode)
macro (ADD_H5_ERR_CMP testname resultfile resultcode result_errcheck)
if (NOT HDF5_ENABLE_USING_MEMCHECKER)
add_test (
NAME H5CLEAR_CMP-${testname}
@ -124,8 +124,9 @@
-D "TEST_OUTPUT=${testname}.out"
-D "TEST_EXPECT=${resultcode}"
-D "TEST_REFERENCE=${resultfile}.mty"
-D "TEST_ERRREF=${resultfile}.err"
-P "${HDF_RESOURCES_DIR}/runTest.cmake"
-D "TEST_ERRREF=${result_errcheck}"
-D "TEST_SKIP_COMPARE=true"
-P "${HDF_RESOURCES_DIR}/grepTest.cmake"
)
if ("H5CLEAR_CMP-${testname}" MATCHES "${HDF5_DISABLE_TESTS_REGEX}")
set_tests_properties (H5CLEAR_CMP-${testname} PROPERTIES DISABLED true)
@ -443,11 +444,11 @@
ADD_H5_CMP (h5clr_usage_junk h5clear_usage 1 "" junk.h5)
ADD_H5_CMP (h5clr_usage_none h5clear_usage 1 "" orig_h5clear_sec2_v3.h5)
ADD_H5_CMP (h5clr_missing_file_m h5clear_missing_file 1 "-m")
ADD_H5_ERR_CMP (h5clr_open_fail_s h5clear_open_fail 1 "-s" junk.h5)
ADD_H5_ERR_CMP (h5clr_open_fail_s h5clear_open_fail 1 "h5clear error" "-s" junk.h5)
ADD_H5_CMP (h5clr_missing_file_ms h5clear_missing_file 1 "-m" "-s")
ADD_H5_ERR_CMP (h5clr_open_fail_ms h5clear_open_fail 1 "-m" "-s" junk.h5)
ADD_H5_ERR_CMP (h5clr_no_mdc_image_m h5clear_no_mdc_image 0 "-m" orig_h5clear_sec2_v2.h5)
ADD_H5_ERR_CMP (h5clr_no_mdc_image_ms h5clear_no_mdc_image 0 "-s" "-m" orig_h5clear_sec2_v0.h5)
ADD_H5_ERR_CMP (h5clr_open_fail_ms h5clear_open_fail 1 "h5clear error" "-m" "-s" junk.h5)
ADD_H5_ERR_CMP (h5clr_no_mdc_image_m h5clear_no_mdc_image 0 "h5clear warning" "-m" orig_h5clear_sec2_v2.h5)
ADD_H5_ERR_CMP (h5clr_no_mdc_image_ms h5clear_no_mdc_image 0 "h5clear warning" "-s" "-m" orig_h5clear_sec2_v0.h5)
#
#
#
@ -478,8 +479,8 @@
#
#
# h5clear_mdc_image.h5 already has cache image removed earlier, verify the expected warning from h5clear:
ADD_H5_ERR_CMP (h5clr_mdc_image_m h5clear_no_mdc_image 0 "-m" mod_h5clear_mdc_image.h5)
ADD_H5_ERR_CMP (h5clr_mdc_image_sm h5clear_no_mdc_image 0 "-s" "-m" mod_h5clear_mdc_image2.h5)
ADD_H5_ERR_CMP (h5clr_mdc_image_m h5clear_no_mdc_image 0 "h5clear warning" "-m" mod_h5clear_mdc_image.h5)
ADD_H5_ERR_CMP (h5clr_mdc_image_sm h5clear_no_mdc_image 0 "h5clear warning" "-s" "-m" mod_h5clear_mdc_image2.h5)
#
#
#