mirror of
https://github.com/HDFGroup/hdf5.git
synced 2024-11-21 01:04:10 +08:00
Add parallel cdash script to daily-build workflow (#5032)
This commit is contained in:
parent
90429f5e7d
commit
3f86b4f228
372
.github/workflows/cmake-par-script.yml
vendored
Normal file
372
.github/workflows/cmake-par-script.yml
vendored
Normal file
@ -0,0 +1,372 @@
|
||||
name: hdf5 callable parallel report to cdash
|
||||
|
||||
# Triggers hdf5 dev parallel ctest script the workflow on a call from another workflow
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
snap_name:
|
||||
description: 'The name in the source tarballs'
|
||||
type: string
|
||||
required: false
|
||||
default: hdfsrc
|
||||
file_base:
|
||||
description: "The common base name of the source tarballs"
|
||||
required: true
|
||||
type: string
|
||||
use_environ:
|
||||
description: 'Environment to locate files'
|
||||
type: string
|
||||
required: true
|
||||
default: snapshots
|
||||
build_mode:
|
||||
description: "release vs. debug build"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
CMake_build_parallel_windows:
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
mpi: [ 'msmpi', 'intelmpi']
|
||||
name: "Parallel ${{ matrix.mpi }} Windows-${{ inputs.build_mode }}"
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Install Dependencies (Windows)
|
||||
run: choco install ninja
|
||||
|
||||
- name: Install Dependencies
|
||||
uses: ssciwr/doxygen-install@v1
|
||||
with:
|
||||
version: "1.9.7"
|
||||
|
||||
- name: Enable Developer Command Prompt
|
||||
uses: ilammy/msvc-dev-cmd@v1.13.0
|
||||
|
||||
- name: Setup MPI (${{ matrix.mpi }})
|
||||
id: setup-mpi
|
||||
uses: mpi4py/setup-mpi@v1
|
||||
with:
|
||||
mpi: ${{ matrix.mpi }}
|
||||
|
||||
- name: Set MPI Environment Variables (${{ matrix.mpi }})
|
||||
run: |
|
||||
echo "CC=mpicc" >> $GITHUB_ENV
|
||||
echo "FC=mpif90" >> $GITHUB_ENV
|
||||
|
||||
- name: Set file base name (${{ matrix.mpi }})
|
||||
id: set-file-base
|
||||
run: |
|
||||
FILE_NAME_BASE=$(echo "${{ inputs.file_base }}")
|
||||
echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT
|
||||
if [[ '${{ inputs.use_environ }}' == 'release' ]]
|
||||
then
|
||||
SOURCE_NAME_BASE=$(echo "${{ inputs.snap_name }}")
|
||||
else
|
||||
SOURCE_NAME_BASE=$(echo "hdfsrc")
|
||||
fi
|
||||
echo "SOURCE_BASE=$SOURCE_NAME_BASE" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
|
||||
# Get files created by release script
|
||||
- name: Get zip-tarball (Windows)
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||
with:
|
||||
name: zip-tarball
|
||||
path: ${{ github.workspace }}
|
||||
|
||||
- name: using powershell
|
||||
shell: pwsh
|
||||
run: Get-Location
|
||||
|
||||
- name: List files for the space (Windows)
|
||||
run: |
|
||||
Get-ChildItem -Path ${{ github.workspace }}
|
||||
Get-ChildItem -Path ${{ runner.workspace }}
|
||||
shell: pwsh
|
||||
|
||||
- name: Uncompress source (Windows)
|
||||
working-directory: ${{ github.workspace }}
|
||||
run: 7z x ${{ steps.set-file-base.outputs.FILE_BASE }}.zip
|
||||
shell: bash
|
||||
|
||||
- name: Copy script files for the space (${{ matrix.mpi }})
|
||||
run: |
|
||||
Copy-Item -Path ${{ github.workspace }}/${{ steps.set-file-base.outputs.SOURCE_BASE }}/config/cmake/scripts/CTestScript.cmake -Destination ${{ runner.workspace }}/hdf5
|
||||
Copy-Item -Path ${{ github.workspace }}/${{ steps.set-file-base.outputs.SOURCE_BASE }}/config/cmake/scripts/HDF5config.cmake -Destination ${{ runner.workspace }}/hdf5
|
||||
shell: pwsh
|
||||
|
||||
- name: List files for the hdf5 (${{ matrix.mpi }})
|
||||
run: |
|
||||
Get-ChildItem -Path ${{ runner.workspace }}/hdf5
|
||||
shell: pwsh
|
||||
|
||||
- name: Create options file (${{ matrix.mpi }})
|
||||
uses: "DamianReeves/write-file-action@master"
|
||||
with:
|
||||
path: ${{ runner.workspace }}/hdf5/HDF5options.cmake
|
||||
write-mode: overwrite
|
||||
contents: |
|
||||
set (CTEST_DROP_SITE_INIT "my.cdash.org")
|
||||
# Change following line to submit to your CDash dashboard to a different CDash project
|
||||
#set (CTEST_DROP_LOCATION_INIT "/submit.php?project=HDF5")
|
||||
set (SITE_BUILDNAME_SUFFIX "${{ steps.set-file-base.outputs.FILE_BASE }}")
|
||||
set (ctest_test_args INCLUDE MPI_TEST)
|
||||
set (CTEST_TEST_TIMEOUT 300 CACHE STRING "Maximum test time allowed.")
|
||||
set (MODEL "MPI")
|
||||
set (GROUP "MPI")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} --log-level=VERBOSE")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DCMAKE_BUILD_TYPE=${{ inputs.build_mode }}")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DMPIEXEC_NUMPROC_FLAG:STRING=-n")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DMPIEXEC_MAX_NUMPROCS:STRING=2")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_PARALLEL:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SUBFILING_VFD:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_JAVA:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_CPP_LIB:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_FORTRAN:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_ENCODING:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_PLUGIN_SUPPORT:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DLIBAEC_USE_LOCALCONTENT:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DZLIB_USE_LOCALCONTENT:BOOL=OFF")
|
||||
|
||||
- name: Run ctest script (${{ matrix.mpi }})
|
||||
run: |
|
||||
cd "${{ runner.workspace }}/hdf5"
|
||||
ctest -S HDF5config.cmake,CTEST_SITE_EXT=GH-${{ github.event.repository.full_name }}-${{ matrix.mpi }},LOCAL_SUBMIT=ON,NINJA=TRUE,BUILD_GENERATOR=VS202264,CTEST_SOURCE_NAME=${{ steps.set-file-base.outputs.SOURCE_BASE }} -C ${{ inputs.build_mode }} -VV -O hdf5.log
|
||||
shell: bash
|
||||
continue-on-error: true
|
||||
|
||||
# Save log files created by ctest script
|
||||
- name: Save log (${{ matrix.mpi }})
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: windows-${{ matrix.mpi }}-log
|
||||
path: ${{ runner.workspace }}/hdf5/hdf5.log
|
||||
|
||||
CMake_build_parallel_linux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
mpi: [ 'mpich', 'openmpi', 'intelmpi']
|
||||
name: "Parallel ${{ matrix.mpi }} Linux-${{ inputs.build_mode }}"
|
||||
steps:
|
||||
- name: Install CMake Dependencies (${{ matrix.mpi }})
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install ninja-build graphviz curl
|
||||
sudo apt install libssl3 libssl-dev libcurl4 libcurl4-openssl-dev
|
||||
sudo apt install gcc-12 g++-12 gfortran-12
|
||||
sudo apt install libaec0 libaec-dev
|
||||
|
||||
- name: Install Dependencies
|
||||
uses: ssciwr/doxygen-install@v1
|
||||
with:
|
||||
version: "1.9.7"
|
||||
|
||||
- name: Setup MPI (${{ matrix.mpi }})
|
||||
id: setup-mpi
|
||||
uses: mpi4py/setup-mpi@v1
|
||||
with:
|
||||
mpi: ${{ matrix.mpi }}
|
||||
|
||||
- name: Set MPI Environment Variables (${{ matrix.mpi }})
|
||||
run: |
|
||||
echo "CC=mpicc" >> $GITHUB_ENV
|
||||
echo "FC=mpif90" >> $GITHUB_ENV
|
||||
|
||||
- name: Set file base name (${{ matrix.mpi }})
|
||||
id: set-file-base
|
||||
run: |
|
||||
FILE_NAME_BASE=$(echo "${{ inputs.file_base }}")
|
||||
echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT
|
||||
if [[ '${{ inputs.use_environ }}' == 'release' ]]
|
||||
then
|
||||
SOURCE_NAME_BASE=$(echo "${{ inputs.snap_name }}")
|
||||
else
|
||||
SOURCE_NAME_BASE=$(echo "hdfsrc")
|
||||
fi
|
||||
echo "SOURCE_BASE=$SOURCE_NAME_BASE" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
|
||||
# Get files created by release script
|
||||
- name: Get tgz-tarball (${{ matrix.mpi }})
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||
with:
|
||||
name: tgz-tarball
|
||||
path: ${{ github.workspace }}
|
||||
|
||||
- name: List files for the space (Linux)
|
||||
run: |
|
||||
ls -l ${{ github.workspace }}
|
||||
ls ${{ runner.workspace }}
|
||||
|
||||
- name: Uncompress source (Linux)
|
||||
run: tar -zxvf ${{ github.workspace }}/${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz
|
||||
|
||||
- name: Copy script files for the space (${{ matrix.mpi }})
|
||||
run: |
|
||||
cp ${{ github.workspace }}/${{ steps.set-file-base.outputs.SOURCE_BASE }}/config/cmake/scripts/CTestScript.cmake ${{ runner.workspace }}/hdf5
|
||||
cp ${{ github.workspace }}/${{ steps.set-file-base.outputs.SOURCE_BASE }}/config/cmake/scripts/HDF5config.cmake ${{ runner.workspace }}/hdf5
|
||||
|
||||
- name: List files for the hdf5 (${{ matrix.mpi }})
|
||||
run: |
|
||||
ls ${{ runner.workspace }}/hdf5
|
||||
|
||||
- name: Create options file (${{ matrix.mpi }})
|
||||
uses: "DamianReeves/write-file-action@master"
|
||||
with:
|
||||
path: ${{ runner.workspace }}/hdf5/HDF5options.cmake
|
||||
write-mode: overwrite
|
||||
contents: |
|
||||
set (CTEST_DROP_SITE_INIT "my.cdash.org")
|
||||
# Change following line to submit to your CDash dashboard to a different CDash project
|
||||
#set (CTEST_DROP_LOCATION_INIT "/submit.php?project=HDF5")
|
||||
set (SITE_BUILDNAME_SUFFIX "${{ steps.set-file-base.outputs.FILE_BASE }}")
|
||||
set (ctest_test_args INCLUDE MPI_TEST)
|
||||
set (CTEST_TEST_TIMEOUT 600 CACHE STRING "Maximum test time allowed.")
|
||||
set (MODEL "MPI")
|
||||
set (GROUP "MPI")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} --log-level=VERBOSE")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DCMAKE_BUILD_TYPE=${{ inputs.build_mode }}")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DMPIEXEC_NUMPROC_FLAG:STRING=-n")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DMPIEXEC_MAX_NUMPROCS:STRING=2")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_PARALLEL:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SUBFILING_VFD:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_JAVA:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_CPP_LIB:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_FORTRAN:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_ENCODING:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_PLUGIN_SUPPORT:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DLIBAEC_USE_LOCALCONTENT:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DZLIB_USE_LOCALCONTENT:BOOL=OFF")
|
||||
|
||||
- name: Run ctest script (${{ matrix.mpi }})
|
||||
run: |
|
||||
cd "${{ runner.workspace }}/hdf5"
|
||||
ctest -S HDF5config.cmake,CTEST_SITE_EXT=GH-${{ github.event.repository.full_name }}-${{ matrix.mpi }},LOCAL_SUBMIT=ON,NINJA=TRUE,BUILD_GENERATOR=Unix,CTEST_SOURCE_NAME=${{ steps.set-file-base.outputs.SOURCE_BASE }} -C ${{ inputs.build_mode }} -VV -O hdf5.log
|
||||
shell: bash
|
||||
continue-on-error: true
|
||||
|
||||
# Save log files created by ctest script
|
||||
- name: Save log (${{ matrix.mpi }})
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: linux-${{ matrix.mpi }}-log
|
||||
path: ${{ runner.workspace }}/hdf5/hdf5.log
|
||||
|
||||
CMake_build_parallel_intelmpi_macos:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
mpi: [ 'mpich', 'openmpi']
|
||||
name: "Parallel ${{ matrix.mpi }} macos-${{ inputs.build_mode }}"
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Install Dependencies (MacOS_latest)
|
||||
run: brew install ninja
|
||||
|
||||
- name: Install Dependencies
|
||||
uses: ssciwr/doxygen-install@v1
|
||||
with:
|
||||
version: "1.9.7"
|
||||
|
||||
- name: Setup MPI (${{ matrix.mpi }})
|
||||
id: setup-mpi
|
||||
uses: mpi4py/setup-mpi@v1
|
||||
with:
|
||||
mpi: ${{ matrix.mpi }}
|
||||
|
||||
- name: Set MPI Environment Variables (${{ matrix.mpi }})
|
||||
run: |
|
||||
echo "CC=mpicc" >> $GITHUB_ENV
|
||||
echo "FC=mpif90" >> $GITHUB_ENV
|
||||
|
||||
- name: Set file base name (${{ matrix.mpi }})
|
||||
id: set-file-base
|
||||
run: |
|
||||
FILE_NAME_BASE=$(echo "${{ inputs.file_base }}")
|
||||
echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT
|
||||
if [[ '${{ inputs.use_environ }}' == 'release' ]]
|
||||
then
|
||||
SOURCE_NAME_BASE=$(echo "${{ inputs.snap_name }}")
|
||||
else
|
||||
SOURCE_NAME_BASE=$(echo "hdfsrc")
|
||||
fi
|
||||
echo "SOURCE_BASE=$SOURCE_NAME_BASE" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get files created by release script
|
||||
- name: Get tgz-tarball (${{ matrix.mpi }})
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||
with:
|
||||
name: tgz-tarball
|
||||
path: ${{ github.workspace }}
|
||||
|
||||
- name: List files for the space (${{ matrix.mpi }})
|
||||
run: |
|
||||
ls -l ${{ github.workspace }}
|
||||
ls ${{ runner.workspace }}
|
||||
|
||||
- name: Uncompress source (${{ matrix.mpi }})
|
||||
run: tar -zxvf ${{ github.workspace }}/${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz
|
||||
|
||||
- name: Copy script files for the space (${{ matrix.mpi }})
|
||||
run: |
|
||||
cp ${{ github.workspace }}/${{ steps.set-file-base.outputs.SOURCE_BASE }}/config/cmake/scripts/CTestScript.cmake ${{ runner.workspace }}/hdf5
|
||||
cp ${{ github.workspace }}/${{ steps.set-file-base.outputs.SOURCE_BASE }}/config/cmake/scripts/HDF5config.cmake ${{ runner.workspace }}/hdf5
|
||||
|
||||
- name: List files for the hdf5 (${{ matrix.mpi }})
|
||||
run: |
|
||||
ls ${{ runner.workspace }}/hdf5
|
||||
|
||||
- name: Create options file (${{ matrix.mpi }})
|
||||
uses: "DamianReeves/write-file-action@master"
|
||||
with:
|
||||
path: ${{ runner.workspace }}/hdf5/HDF5options.cmake
|
||||
write-mode: overwrite
|
||||
contents: |
|
||||
set (CTEST_DROP_SITE_INIT "my.cdash.org")
|
||||
# Change following line to submit to your CDash dashboard to a different CDash project
|
||||
#set (CTEST_DROP_LOCATION_INIT "/submit.php?project=HDF5")
|
||||
set (SITE_BUILDNAME_SUFFIX "${{ steps.set-file-base.outputs.FILE_BASE }}")
|
||||
set (ctest_test_args INCLUDE MPI_TEST)
|
||||
set (CTEST_TEST_TIMEOUT 600 CACHE STRING "Maximum test time allowed.")
|
||||
set (MODEL "MPI")
|
||||
set (GROUP "MPI")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} --log-level=VERBOSE")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DCMAKE_BUILD_TYPE=${{ inputs.build_mode }}")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DMPIEXEC_NUMPROC_FLAG:STRING=-n")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DMPIEXEC_MAX_NUMPROCS:STRING=2")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_PARALLEL:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SUBFILING_VFD:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_JAVA:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_CPP_LIB:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_FORTRAN:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_ENCODING:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_PLUGIN_SUPPORT:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DLIBAEC_USE_LOCALCONTENT:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DZLIB_USE_LOCALCONTENT:BOOL=OFF")
|
||||
|
||||
- name: Run ctest script (${{ matrix.mpi }})
|
||||
run: |
|
||||
cd "${{ runner.workspace }}/hdf5"
|
||||
ctest -S HDF5config.cmake,CTEST_SITE_EXT=GH-${{ github.event.repository.full_name }}-${{ matrix.mpi }},LOCAL_SUBMIT=ON,NINJA=TRUE,BUILD_GENERATOR=Unix,CTEST_SOURCE_NAME=${{ steps.set-file-base.outputs.SOURCE_BASE }} -C ${{ inputs.build_mode }} -VV -O hdf5.log
|
||||
shell: bash
|
||||
continue-on-error: true
|
||||
|
||||
# Save log files created by ctest script
|
||||
- name: Save log (${{ matrix.mpi }})
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: macos-${{ matrix.mpi }}-log
|
||||
path: ${{ runner.workspace }}/hdf5/hdf5.log
|
230
.github/workflows/cmake-par-source.yml
vendored
Normal file
230
.github/workflows/cmake-par-source.yml
vendored
Normal file
@ -0,0 +1,230 @@
|
||||
name: hdf5 dev parallel from source ctest script runs
|
||||
|
||||
# Triggers the workflow on a call from another workflow
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
snap_name:
|
||||
description: 'The name in the source tarballs'
|
||||
type: string
|
||||
required: false
|
||||
default: hdfsrc
|
||||
file_base:
|
||||
description: "The common base name of the source tarballs"
|
||||
required: true
|
||||
type: string
|
||||
use_environ:
|
||||
description: 'Environment to locate files'
|
||||
type: string
|
||||
required: true
|
||||
default: snapshots
|
||||
build_mode:
|
||||
description: "release vs. debug build"
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
CMake_build_parallel_src_openmpi:
|
||||
name: "Parallel OpenMPI GCC-${{ inputs.build_mode }}"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Install CMake Dependencies (OpenMPI)
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install ninja-build graphviz curl
|
||||
sudo apt install libssl3 libssl-dev libcurl4 libcurl4-openssl-dev
|
||||
sudo apt install gcc-12 g++-12 gfortran-12
|
||||
sudo apt install libaec0 libaec-dev
|
||||
sudo apt-get install libopenmpi-dev
|
||||
echo "CC=mpicc" >> $GITHUB_ENV
|
||||
echo "FC=mpif90" >> $GITHUB_ENV
|
||||
|
||||
- name: Install Dependencies
|
||||
uses: ssciwr/doxygen-install@v1
|
||||
with:
|
||||
version: "1.9.7"
|
||||
|
||||
- name: Set file base name (OpenMPI)
|
||||
id: set-file-base
|
||||
run: |
|
||||
FILE_NAME_BASE=$(echo "${{ inputs.file_base }}")
|
||||
echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT
|
||||
if [[ '${{ inputs.use_environ }}' == 'release' ]]
|
||||
then
|
||||
SOURCE_NAME_BASE=$(echo "${{ inputs.snap_name }}")
|
||||
else
|
||||
SOURCE_NAME_BASE=$(echo "hdfsrc")
|
||||
fi
|
||||
echo "SOURCE_BASE=$SOURCE_NAME_BASE" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get files created by release script
|
||||
- name: Get tgz-tarball (OpenMPI)
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||
with:
|
||||
name: tgz-tarball
|
||||
path: ${{ github.workspace }}
|
||||
|
||||
- name: List files for the space (OpenMPI)
|
||||
run: |
|
||||
ls -l ${{ github.workspace }}
|
||||
ls ${{ runner.workspace }}
|
||||
|
||||
- name: Uncompress source (OpenMPI)
|
||||
run: tar -zxvf ${{ github.workspace }}/${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz
|
||||
|
||||
- name: Copy script files for the space (OpenMPI)
|
||||
run: |
|
||||
cp ${{ github.workspace }}/${{ steps.set-file-base.outputs.SOURCE_BASE }}/config/cmake/scripts/CTestScript.cmake ${{ runner.workspace }}/hdf5
|
||||
cp ${{ github.workspace }}/${{ steps.set-file-base.outputs.SOURCE_BASE }}/config/cmake/scripts/HDF5config.cmake ${{ runner.workspace }}/hdf5
|
||||
|
||||
- name: List files for the hdf5 (OpenMPI)
|
||||
run: |
|
||||
ls ${{ runner.workspace }}/hdf5
|
||||
|
||||
- name: Create options file (OpenMPI)
|
||||
uses: "DamianReeves/write-file-action@master"
|
||||
with:
|
||||
path: ${{ runner.workspace }}/hdf5/HDF5options.cmake
|
||||
write-mode: overwrite
|
||||
contents: |
|
||||
set (CTEST_DROP_SITE_INIT "my.cdash.org")
|
||||
# Change following line to submit to your CDash dashboard to a different CDash project
|
||||
#set (CTEST_DROP_LOCATION_INIT "/submit.php?project=HDF5")
|
||||
set (SITE_BUILDNAME_SUFFIX "${{ steps.set-file-base.outputs.FILE_BASE }}")
|
||||
set (ctest_test_args INCLUDE MPI_TEST)
|
||||
set (MODEL "MPI")
|
||||
set (GROUP "MPI")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} --log-level=VERBOSE")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DMPIEXEC_NUMPROC_FLAG:STRING=-n")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DMPIEXEC_MAX_NUMPROCS:STRING=2")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_PARALLEL:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SUBFILING_VFD:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_JAVA:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_CPP_LIB:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_FORTRAN:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_ENCODING:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_PLUGIN_SUPPORT:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DLIBAEC_USE_LOCALCONTENT:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DZLIB_USE_LOCALCONTENT:BOOL=OFF")
|
||||
|
||||
- name: Run ctest script (OpenMPI)
|
||||
run: |
|
||||
cd "${{ runner.workspace }}/hdf5"
|
||||
ctest -S HDF5config.cmake,CTEST_SITE_EXT=GH-${{ github.event.repository.full_name }}-OpenMPI-s,LOCAL_SUBMIT=ON,NINJA=TRUE,BUILD_GENERATOR=Unix,CTEST_SOURCE_NAME=${{ steps.set-file-base.outputs.SOURCE_BASE }} -C ${{ inputs.build_mode }} -VV -O hdf5.log
|
||||
shell: bash
|
||||
continue-on-error: true
|
||||
|
||||
# Save log files created by ctest script
|
||||
- name: Save log (OpenMPI)
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: openmpi-log
|
||||
path: ${{ runner.workspace }}/hdf5/hdf5.log
|
||||
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
|
||||
|
||||
CMake_build_parallel_src_mpich:
|
||||
name: "Parallel Mpich GCC-${{ inputs.build_mode }}"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Install Linux Dependencies (MPICH)
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install ninja-build doxygen graphviz
|
||||
sudo apt install libssl3 libssl-dev libcurl4 libcurl4-openssl-dev
|
||||
sudo apt install gcc-12 g++-12 gfortran-12
|
||||
sudo apt install libaec0 libaec-dev
|
||||
sudo apt-get install libmpich-dev
|
||||
echo "CC=mpicc" >> $GITHUB_ENV
|
||||
echo "FC=mpif90" >> $GITHUB_ENV
|
||||
|
||||
- name: Install Dependencies
|
||||
uses: ssciwr/doxygen-install@v1
|
||||
with:
|
||||
version: "1.9.7"
|
||||
|
||||
- name: Set file base name (MPICH)
|
||||
id: set-file-base
|
||||
run: |
|
||||
FILE_NAME_BASE=$(echo "${{ inputs.file_base }}")
|
||||
echo "FILE_BASE=$FILE_NAME_BASE" >> $GITHUB_OUTPUT
|
||||
if [[ '${{ inputs.use_environ }}' == 'release' ]]
|
||||
then
|
||||
SOURCE_NAME_BASE=$(echo "${{ inputs.snap_name }}")
|
||||
else
|
||||
SOURCE_NAME_BASE=$(echo "hdfsrc")
|
||||
fi
|
||||
echo "SOURCE_BASE=$SOURCE_NAME_BASE" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get files created by release script
|
||||
- name: Get tgz-tarball (MPICH)
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||
with:
|
||||
name: tgz-tarball
|
||||
path: ${{ github.workspace }}
|
||||
|
||||
- name: List files for the space (MPICH)
|
||||
run: |
|
||||
ls -l ${{ github.workspace }}
|
||||
ls ${{ runner.workspace }}
|
||||
|
||||
- name: Uncompress source (MPICH)
|
||||
run: tar -zxvf ${{ github.workspace }}/${{ steps.set-file-base.outputs.FILE_BASE }}.tar.gz
|
||||
|
||||
- name: Copy script files for the space (MPICH)
|
||||
run: |
|
||||
cp ${{ github.workspace }}/${{ steps.set-file-base.outputs.SOURCE_BASE }}/config/cmake/scripts/CTestScript.cmake ${{ runner.workspace }}/hdf5
|
||||
cp ${{ github.workspace }}/${{ steps.set-file-base.outputs.SOURCE_BASE }}/config/cmake/scripts/HDF5config.cmake ${{ runner.workspace }}/hdf5
|
||||
|
||||
- name: List files for the hdf5 (MPICH)
|
||||
run: |
|
||||
ls ${{ runner.workspace }}/hdf5
|
||||
|
||||
- name: Create options file (MPICH)
|
||||
uses: "DamianReeves/write-file-action@master"
|
||||
with:
|
||||
path: ${{ runner.workspace }}/hdf5/HDF5options.cmake
|
||||
write-mode: overwrite
|
||||
contents: |
|
||||
set (CTEST_DROP_SITE_INIT "my.cdash.org")
|
||||
# Change following line to submit to your CDash dashboard to a different CDash project
|
||||
#set (CTEST_DROP_LOCATION_INIT "/submit.php?project=HDF5")
|
||||
set (SITE_BUILDNAME_SUFFIX "${{ steps.set-file-base.outputs.FILE_BASE }}")
|
||||
set (ctest_test_args INCLUDE MPI_TEST)
|
||||
set (MODEL "MPI")
|
||||
set (GROUP "MPI")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} --log-level=VERBOSE")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DMPIEXEC_NUMPROC_FLAG:STRING=-n")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DMPIEXEC_MAX_NUMPROCS:STRING=2")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_PARALLEL:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SUBFILING_VFD:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_JAVA:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_CPP_LIB:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_FORTRAN:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_ENCODING:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_PLUGIN_SUPPORT:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DLIBAEC_USE_LOCALCONTENT:BOOL=OFF")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DZLIB_USE_LOCALCONTENT:BOOL=OFF")
|
||||
|
||||
- name: Run ctest script (MPICH)
|
||||
run: |
|
||||
cd "${{ runner.workspace }}/hdf5"
|
||||
ctest -S HDF5config.cmake,CTEST_SITE_EXT=GH-${{ github.event.repository.full_name }}-MPICH-s,LOCAL_SUBMIT=ON,NINJA=TRUE,BUILD_GENERATOR=Unix,CTEST_SOURCE_NAME=${{ steps.set-file-base.outputs.SOURCE_BASE }} -C ${{ inputs.build_mode }} -VV -O hdf5.log
|
||||
shell: bash
|
||||
continue-on-error: true
|
||||
|
||||
# Save log files created by ctest script
|
||||
- name: Save log (MPICH)
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: mpich-log
|
||||
path: ${{ runner.workspace }}/hdf5/hdf5.log
|
||||
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
|
14
.github/workflows/cmake-script.yml
vendored
14
.github/workflows/cmake-script.yml
vendored
@ -96,6 +96,8 @@ jobs:
|
||||
set (CTEST_DROP_SITE_INIT "my.cdash.org")
|
||||
# Change following line to submit to your CDash dashboard to a different CDash project
|
||||
#set (CTEST_DROP_LOCATION_INIT "/submit.php?project=HDF5")
|
||||
set (MODEL "GHDaily")
|
||||
set (GROUP "GHDaily")
|
||||
set (SITE_BUILDNAME_SUFFIX "${{ steps.set-file-base.outputs.FILE_BASE }}")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} --log-level=VERBOSE")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_JAVA:BOOL=ON")
|
||||
@ -185,6 +187,8 @@ jobs:
|
||||
set (CTEST_DROP_SITE_INIT "my.cdash.org")
|
||||
# Change following line to submit to your CDash dashboard to a different CDash project
|
||||
#set (CTEST_DROP_LOCATION_INIT "/submit.php?project=HDF5")
|
||||
set (MODEL "GHDaily")
|
||||
set (GROUP "GHDaily")
|
||||
set (SITE_BUILDNAME_SUFFIX "${{ steps.set-file-base.outputs.FILE_BASE }}")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} --log-level=VERBOSE")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_JAVA:BOOL=ON")
|
||||
@ -287,6 +291,8 @@ jobs:
|
||||
# Change following line to submit to your CDash dashboard to a different CDash project
|
||||
#set (CTEST_DROP_LOCATION_INIT "/submit.php?project=HDF5")
|
||||
set (SITE_BUILDNAME_SUFFIX "${{ steps.set-file-base.outputs.FILE_BASE }}")
|
||||
set (MODEL "GHDaily")
|
||||
set (GROUP "GHDaily")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} --log-level=VERBOSE")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_JAVA:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_CPP_LIB:BOOL=ON")
|
||||
@ -373,6 +379,8 @@ jobs:
|
||||
# Change following line to submit to your CDash dashboard to a different CDash project
|
||||
#set (CTEST_DROP_LOCATION_INIT "/submit.php?project=HDF5")
|
||||
set (SITE_BUILDNAME_SUFFIX "${{ steps.set-file-base.outputs.FILE_BASE }}")
|
||||
set (MODEL "GHDaily")
|
||||
set (GROUP "GHDaily")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} --log-level=VERBOSE")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_JAVA:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_CPP_LIB:BOOL=ON")
|
||||
@ -474,6 +482,8 @@ jobs:
|
||||
#set (CTEST_DROP_LOCATION_INIT "/submit.php?project=HDF5")
|
||||
#set (CMAKE_GENERATOR_TOOLSET "Intel C++ Compiler 2024,fortran=ifx")
|
||||
set (SITE_BUILDNAME_SUFFIX "${{ steps.set-file-base.outputs.FILE_BASE }}")
|
||||
set (MODEL "GHDaily")
|
||||
set (GROUP "GHDaily")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} --log-level=VERBOSE")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DCMAKE_TOOLCHAIN_FILE:STRING=config/toolchain/intel.cmake")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_JAVA:BOOL=ON")
|
||||
@ -570,6 +580,8 @@ jobs:
|
||||
# Change following line to submit to your CDash dashboard to a different CDash project
|
||||
#set (CTEST_DROP_LOCATION_INIT "/submit.php?project=HDF5")
|
||||
set (SITE_BUILDNAME_SUFFIX "${{ steps.set-file-base.outputs.FILE_BASE }}")
|
||||
set (MODEL "GHDaily")
|
||||
set (GROUP "GHDaily")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} --log-level=VERBOSE")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_JAVA:BOOL=ON")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_BUILD_CPP_LIB:BOOL=ON")
|
||||
@ -665,6 +677,8 @@ jobs:
|
||||
set (CTEST_DROP_SITE_INIT "my.cdash.org")
|
||||
# Change following line to submit to your CDash dashboard to a different CDash project
|
||||
set (SITE_BUILDNAME_SUFFIX "${{ steps.set-file-base.outputs.FILE_BASE }}")
|
||||
set (MODEL "GHDaily")
|
||||
set (GROUP "GHDaily")
|
||||
#set (CTEST_DROP_LOCATION_INIT "/submit.php?project=HDF5")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} --log-level=VERBOSE")
|
||||
#set (CMAKE_GENERATOR_TOOLSET "clang")
|
||||
|
2
.github/workflows/cygwin-cmake.yml
vendored
2
.github/workflows/cygwin-cmake.yml
vendored
@ -89,6 +89,8 @@ jobs:
|
||||
set (CTEST_DROP_SITE_INIT "my.cdash.org")
|
||||
# Change following line to submit to your CDash dashboard to a different CDash project
|
||||
#set (CTEST_DROP_LOCATION_INIT "/submit.php?project=HDF5")
|
||||
set (MODEL "GHDaily")
|
||||
set (GROUP "GHDaily")
|
||||
set (SITE_BUILDNAME_SUFFIX "${{ steps.set-file-base.outputs.FILE_BASE }}")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} --log-level=VERBOSE")
|
||||
set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_GENERATE_HEADERS=OFF")
|
||||
|
18
.github/workflows/daily-build.yml
vendored
18
.github/workflows/daily-build.yml
vendored
@ -76,6 +76,24 @@ jobs:
|
||||
use_environ: snapshots
|
||||
if: ${{ ((needs.call-workflow-tarball.outputs.has_changes == 'true') || (needs.get-old-names.outputs.run-ignore == 'ignore')) }}
|
||||
|
||||
call-workflow-par-script:
|
||||
needs: [get-old-names, call-workflow-tarball]
|
||||
uses: ./.github/workflows/cmake-par-script.yml
|
||||
with:
|
||||
file_base: ${{ needs.call-workflow-tarball.outputs.file_base }}
|
||||
use_environ: snapshots
|
||||
build_mode: "Release"
|
||||
if: ${{ ((needs.call-workflow-tarball.outputs.has_changes == 'true') || (needs.get-old-names.outputs.run-ignore == 'ignore')) }}
|
||||
|
||||
call-workflow-par-source:
|
||||
needs: [get-old-names, call-workflow-tarball]
|
||||
uses: ./.github/workflows/cmake-par-source.yml
|
||||
with:
|
||||
file_base: ${{ needs.call-workflow-tarball.outputs.file_base }}
|
||||
use_environ: snapshots
|
||||
build_mode: "Release"
|
||||
if: ${{ ((needs.call-workflow-tarball.outputs.has_changes == 'true') || (needs.get-old-names.outputs.run-ignore == 'ignore')) }}
|
||||
|
||||
call-workflow-sanitizers:
|
||||
needs: [get-old-names, call-workflow-tarball]
|
||||
uses: ./.github/workflows/cmake-analysis.yml
|
||||
|
@ -66,6 +66,29 @@
|
||||
#define H5TEST_DLLVAR extern
|
||||
#endif /* H5TEST_DLL */
|
||||
|
||||
#if defined(hdf5_testpar_shared_EXPORTS)
|
||||
#if defined(_MSC_VER) /* MSVC Compiler Case */
|
||||
#define H5TESTPAR_DLL __declspec(dllexport)
|
||||
#define H5TESTPAR_DLLVAR extern __declspec(dllexport)
|
||||
#elif (__GNUC__ >= 4) /* GCC 4.x has support for visibility options */
|
||||
#define H5TESTPAR_DLL __attribute__((visibility("default")))
|
||||
#define H5TESTPAR_DLLVAR extern __attribute__((visibility("default")))
|
||||
#endif
|
||||
#else
|
||||
#if defined(_MSC_VER) /* MSVC Compiler Case */
|
||||
#define H5TESTPAR_DLL __declspec(dllimport)
|
||||
#define H5TESTPAR_DLLVAR __declspec(dllimport)
|
||||
#elif (__GNUC__ >= 4) /* GCC 4.x has support for visibility options */
|
||||
#define H5TESTPAR_DLL __attribute__((visibility("default")))
|
||||
#define H5TESTPAR_DLLVAR extern __attribute__((visibility("default")))
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifndef H5TESTPAR_DLL
|
||||
#define H5TESTPAR_DLL
|
||||
#define H5TESTPAR_DLLVAR extern
|
||||
#endif /* H5TESTPAR_DLL */
|
||||
|
||||
#if defined(hdf5_tools_shared_EXPORTS)
|
||||
#if defined(_MSC_VER) /* MSVC Compiler Case */
|
||||
#define H5TOOLS_DLL __declspec(dllexport)
|
||||
@ -232,6 +255,8 @@
|
||||
#define H5_DLLVAR extern
|
||||
#define H5TEST_DLL
|
||||
#define H5TEST_DLLVAR extern
|
||||
#define H5TESTPAR_DLL
|
||||
#define H5TESTPAR_DLLVAR extern
|
||||
#define H5TOOLS_DLL
|
||||
#define H5TOOLS_DLLVAR extern
|
||||
#define H5_DLLCPP
|
||||
|
@ -47,7 +47,7 @@ set (HDF5_TESTPAR_COMPILE_DEFS_PRIVATE
|
||||
if (BUILD_STATIC_LIBS)
|
||||
add_library (${HDF5_TEST_PAR_LIB_TARGET} STATIC ${TEST_PAR_LIB_SOURCES} ${TEST_PAR_LIB_HEADERS})
|
||||
target_include_directories (${HDF5_TEST_PAR_LIB_TARGET}
|
||||
PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_TEST_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_PAR_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>"
|
||||
PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_TEST_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_PAR_BINARY_DIR};${MPI_C_INCLUDE_DIRS}"
|
||||
INTERFACE "$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/include>"
|
||||
)
|
||||
target_compile_options(${HDF5_TEST_PAR_LIB_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}")
|
||||
@ -85,7 +85,7 @@ endif ()
|
||||
if (BUILD_SHARED_LIBS)
|
||||
add_library (${HDF5_TEST_PAR_LIBSH_TARGET} SHARED ${TEST_PAR_LIB_SOURCES} ${TEST_PAR_LIB_HEADERS})
|
||||
target_include_directories (${HDF5_TEST_PAR_LIBSH_TARGET}
|
||||
PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_TEST_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_PAR_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>"
|
||||
PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_TEST_SRC_DIR};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_PAR_BINARY_DIR};${MPI_C_INCLUDE_DIRS}"
|
||||
INTERFACE "$<INSTALL_INTERFACE:$<INSTALL_PREFIX>/include>"
|
||||
)
|
||||
target_compile_options(${HDF5_TEST_PAR_LIBSH_TARGET} PRIVATE "${HDF5_CMAKE_C_FLAGS}")
|
||||
@ -138,20 +138,20 @@ endif ()
|
||||
|
||||
#-- Adding test for testhdf5
|
||||
add_executable (testphdf5 ${testphdf5_SOURCES})
|
||||
target_include_directories (testphdf5
|
||||
PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${MPI_C_INCLUDE_DIRS}"
|
||||
)
|
||||
target_compile_options(testphdf5 PRIVATE "${HDF5_CMAKE_C_FLAGS}")
|
||||
target_compile_definitions(testphdf5 PRIVATE "${HDF5_TESTPAR_COMPILE_DEFS_PRIVATE}")
|
||||
target_include_directories (testphdf5
|
||||
PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>"
|
||||
)
|
||||
if (NOT BUILD_SHARED_LIBS)
|
||||
TARGET_C_PROPERTIES (testphdf5 STATIC)
|
||||
target_link_libraries (testphdf5
|
||||
PRIVATE ${HDF5_TEST_LIB_TARGET} ${HDF5_TEST_PAR_LIB_TARGET} ${HDF5_LIB_TARGET} "$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:MPI::MPI_C>"
|
||||
PRIVATE ${HDF5_TEST_LIB_TARGET} ${HDF5_TEST_PAR_LIB_TARGET} ${HDF5_LIB_TARGET} MPI::MPI_C
|
||||
)
|
||||
else ()
|
||||
TARGET_C_PROPERTIES (testphdf5 SHARED)
|
||||
target_link_libraries (testphdf5
|
||||
PRIVATE ${HDF5_TEST_LIBSH_TARGET} ${HDF5_TEST_PAR_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} "$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:MPI::MPI_C>"
|
||||
PRIVATE ${HDF5_TEST_LIBSH_TARGET} ${HDF5_TEST_PAR_LIBSH_TARGET} ${HDF5_LIBSH_TARGET} MPI::MPI_C
|
||||
)
|
||||
endif ()
|
||||
set_target_properties (testphdf5 PROPERTIES FOLDER test/par)
|
||||
@ -165,11 +165,11 @@ endif ()
|
||||
|
||||
macro (ADD_H5P_EXE file)
|
||||
add_executable (${file} ${HDF5_TEST_PAR_SOURCE_DIR}/${file}.c)
|
||||
target_include_directories (${file}
|
||||
PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${MPI_C_INCLUDE_DIRS}"
|
||||
)
|
||||
target_compile_options(${file} PRIVATE "${HDF5_CMAKE_C_FLAGS}")
|
||||
target_compile_definitions(${file} PRIVATE "${HDF5_TESTPAR_COMPILE_DEFS_PRIVATE}")
|
||||
target_include_directories (${file}
|
||||
PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>"
|
||||
)
|
||||
if (NOT BUILD_SHARED_LIBS)
|
||||
TARGET_C_PROPERTIES (${file} STATIC)
|
||||
target_link_libraries (${file}
|
||||
@ -193,7 +193,7 @@ macro (ADD_H5P_EXE file)
|
||||
if (HDF5_ENABLE_FORMATTERS)
|
||||
clang_format (HDF5_TEST_PAR_${file}_FORMAT ${file})
|
||||
endif ()
|
||||
endmacro (ADD_H5P_EXE file)
|
||||
endmacro ()
|
||||
|
||||
set (H5P_TESTS
|
||||
t_mpi
|
||||
|
@ -17,17 +17,17 @@
|
||||
##############################################################################
|
||||
# Remove any output file left over from previous test run
|
||||
add_test (
|
||||
NAME MPI_TEST-clear-testphdf5-objects
|
||||
NAME MPI_TEST-testphdf5-clear-objects
|
||||
COMMAND ${CMAKE_COMMAND} -E remove ParaTest.h5
|
||||
WORKING_DIRECTORY ${HDF5_TEST_PAR_BINARY_DIR}
|
||||
)
|
||||
set_tests_properties (MPI_TEST-clear-testphdf5-objects PROPERTIES FIXTURES_SETUP par_clear_testphdf5)
|
||||
set_tests_properties (MPI_TEST-testphdf5-clear-objects PROPERTIES FIXTURES_SETUP par_clear_testphdf5)
|
||||
add_test (
|
||||
NAME MPI_TEST-clean-testphdf5-objects
|
||||
NAME MPI_TEST-testphdf5-clean-objects
|
||||
COMMAND ${CMAKE_COMMAND} -E remove ParaTest.h5
|
||||
WORKING_DIRECTORY ${HDF5_TEST_PAR_BINARY_DIR}
|
||||
)
|
||||
set_tests_properties (MPI_TEST-clean-testphdf5-objects PROPERTIES FIXTURES_CLEANUP par_clear_testphdf5)
|
||||
set_tests_properties (MPI_TEST-testphdf5-clean-objects PROPERTIES FIXTURES_CLEANUP par_clear_testphdf5)
|
||||
|
||||
set (SKIP_tests
|
||||
cchunk1
|
||||
|
@ -155,10 +155,10 @@ typedef enum {
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
hid_t create_faccess_plist(MPI_Comm comm, MPI_Info info, int l_facc_type);
|
||||
H5TESTPAR_DLL hid_t create_faccess_plist(MPI_Comm comm, MPI_Info info, int l_facc_type);
|
||||
|
||||
void point_set(hsize_t start[], hsize_t count[], hsize_t stride[], hsize_t block[], size_t num_points,
|
||||
hsize_t coords[], int order);
|
||||
H5TESTPAR_DLL void point_set(hsize_t start[], hsize_t count[], hsize_t stride[], hsize_t block[],
|
||||
size_t num_points, hsize_t coords[], int order);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user