Add GitHub actions for testing VOL connectors (#3849)

* Fix issue with HDF5_VOL_ALLOW_EXTERNAL CMake variable

* Add initial API test workflow

* Initialize parallel testing with MPI_THREAD_MULTIPLE when testing API

* Add CMake variable to allow specifying a VOL connector's package name

* Remove call to MPI_Init in serial API tests

While previously necessary, it now interferes with VOL connectors that
may need to be initialized with MPI_THREAD_MULTIPLE
This commit is contained in:
jhendersonHDF 2023-11-15 08:20:50 -06:00 committed by GitHub
parent e807dee0fd
commit c779464bfe
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 1087 additions and 122 deletions

48
.github/workflows/vol.yml vendored Normal file
View File

@ -0,0 +1,48 @@
name: hdf5 VOL connectors CI
# Run VOL connector CI daily at 06:00 CDT (11:00 UTC)
on:
workflow_dispatch:
schedule:
- cron: "0 11 * * *"
permissions:
contents: read
jobs:
# Build and test individual VOL connectors by using HDF5's
# CMake FetchContent functionality.
#hdf5_vol_daos_fetchcontent:
# uses: ./.github/workflows/vol_daos.yml
# with:
# build_mode: "Release"
hdf5_vol_rest_fetchcontent:
uses: ./.github/workflows/vol_rest.yml
with:
build_mode: "Release"
hdf5_vol_ext_passthru_fetchcontent:
uses: ./.github/workflows/vol_ext_passthru.yml
with:
build_mode: "Release"
hdf5_vol_async_fetchcontent:
uses: ./.github/workflows/vol_async.yml
with:
build_mode: "Release"
hdf5_vol_cache_fetchcontent:
uses: ./.github/workflows/vol_cache.yml
with:
build_mode: "Release"
hdf5_vol_adios2:
uses: ./.github/workflows/vol_adios2.yml
with:
build_mode: "Release"
hdf5_vol_log:
uses: ./.github/workflows/vol_log.yml
with:
build_mode: "Release"

119
.github/workflows/vol_adios2.yml vendored Normal file
View File

@ -0,0 +1,119 @@
name: Test HDF5 ADIOS2 VOL
on:
workflow_call:
inputs:
build_mode:
description: "CMake Build type"
required: true
type: string
permissions:
contents: read
env:
ADIOS2_COMMIT: 3adf20a929b69c23312a6b5f3cccc49376df77e8
ADIOS2_COMMIT_SHORT: 3adf20a
jobs:
build_and_test:
name: Test HDF5 ADIOS2 VOL connector
runs-on: ubuntu-latest
steps:
- name: Install dependencies
run: |
sudo apt update
sudo apt-get install automake autoconf libtool libtool-bin libopenmpi-dev
- name: Checkout HDF5
uses: actions/checkout@v4
with:
repository: HDFGroup/hdf5
path: hdf5
- name: Configure HDF5
shell: bash
run: |
mkdir ${{ github.workspace }}/hdf5/build
cd ${{ github.workspace }}/hdf5/build
cmake -DCMAKE_BUILD_TYPE=${{ inputs.build_mode }} \
-DCMAKE_INSTALL_PREFIX=${{ runner.workspace }}/hdf5_build \
-DBUILD_STATIC_LIBS=OFF \
-DHDF5_TEST_API:BOOL=ON \
-DHDF5_TEST_API_ENABLE_ASYNC:BOOL=ON \
-DHDF5_ENABLE_PARALLEL:BOOL=ON \
-DHDF5_ENABLE_THREADSAFE:BOOL=ON \
-DALLOW_UNSUPPORTED:BOOL=ON \
-DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF \
${{ github.workspace }}/hdf5
cat src/libhdf5.settings
- name: Build and install HDF5
shell: bash
working-directory: ${{ github.workspace }}/hdf5/build
run: |
cmake --build . --parallel 3 --config ${{ inputs.build_mode }}
cmake --install .
echo "LD_LIBRARY_PATH=${{ github.workspace }}/hdf5/build/bin" >> $GITHUB_ENV
echo "PATH=${{ runner.workspace }}/hdf5_build/bin:${PATH}" >> $GITHUB_ENV
# Since the HDF5 ADIOS2 VOL connector is part of the ADIOS2 repository,
# it is difficult to use CMake's FetchContent functionality to fetch
# and build the ADIOS2 connector. Also, since building of ADIOS2 takes
# a while, it isn't ideal to have to rebuild it every time we want to
# test against changes in HDF5 or the VOL connector. Therefore, just
# use a fixed commit for the build of ADIOS2 so we can cache that and
# still test the connector against changes in HDF5.
- name: Restore ADIOS2 (${{ env.ADIOS2_COMMIT_SHORT }}) installation cache
id: cache-adios2
uses: actions/cache@v3
with:
path: ${{ runner.workspace }}/adios2-${{ env.ADIOS2_COMMIT_SHORT }}-install
key: ${{ runner.os }}-${{ runner.arch }}-adios2-${{ env.ADIOS2_COMMIT }}-${{ inputs.build_mode }}-cache
- if: ${{ steps.cache-adios2.outputs.cache-hit != 'true' }}
name: Checkout ADIOS2 (${{ env.ADIOS2_COMMIT_SHORT }})
uses: actions/checkout@v4
with:
repository: ornladios/ADIOS2
ref: ${{ env.ADIOS2_COMMIT }}
path: adios2
- if: ${{ steps.cache-adios2.outputs.cache-hit != 'true' }}
name: Install ADIOS2 (${{ env.ADIOS2_COMMIT_SHORT }})
env:
CXX: mpic++
CC: mpicc
run: |
mkdir adios2/build
cd adios2/build
cmake -DCMAKE_BUILD_TYPE=${{ inputs.build_mode }} \
-DCMAKE_INSTALL_PREFIX=${{ runner.workspace }}/adios2-${{ env.ADIOS2_COMMIT_SHORT }}-install \
-DADIOS2_USE_HDF5:BOOL=ON \
-DHDF5_ROOT=${{ runner.workspace }}/hdf5_build/ \
..
make -j2
make -j2 install
- name: Cache ADIOS2 (${{ env.ADIOS2_COMMIT_SHORT }}) installation
uses: actions/cache/save@v3
if: ${{ steps.cache-adios2.outputs.cache-hit != 'true' }}
with:
path: ${{ runner.workspace }}/adios2-${{ env.ADIOS2_COMMIT_SHORT }}-install
key: ${{ runner.os }}-${{ runner.arch }}-adios2-${{ env.ADIOS2_COMMIT }}-${{ inputs.build_mode }}-cache
- name: Set environment variables for tests
run: |
echo "HDF5_PLUGIN_PATH=${{ runner.workspace }}/adios2-${{ env.ADIOS2_COMMIT_SHORT }}-install/lib" >> $GITHUB_ENV
echo "HDF5_VOL_CONNECTOR=ADIOS2_VOL" >> $GITHUB_ENV
# Skip parallel testing for now as it appears to hang
- name: Test HDF5 ADIOS2 VOL connector with HDF5 API tests
working-directory: ${{ github.workspace }}/hdf5/build
# Don't test the ADIOS2 VOL connector with the HDF5 API tests yet,
# as it doesn't currently pass all the tests. Leave the step in,
# but skip it to leave an indication that this should be re-enabled
# in the future.
if: false
run: |
ctest --build-config ${{ inputs.build_mode }} -VV -R "h5_api" -E "parallel" .

94
.github/workflows/vol_async.yml vendored Normal file
View File

@ -0,0 +1,94 @@
name: Test HDF5 async VOL
on:
workflow_call:
inputs:
build_mode:
description: "CMake Build type"
required: true
type: string
permissions:
contents: read
jobs:
build_and_test:
name: Test HDF5 asynchronous I/O VOL connector
runs-on: ubuntu-latest
steps:
- name: Install dependencies
run: |
sudo apt update
sudo apt-get install automake autoconf libtool libtool-bin libopenmpi-dev
- name: Checkout HDF5
uses: actions/checkout@v4
with:
repository: HDFGroup/hdf5
path: hdf5
- name: Checkout Argobots
uses: actions/checkout@v4
with:
repository: pmodels/argobots
path: abt
# Argobots builds and installs fairly quickly,
# so no caching is currently performed here
- name: Install Argobots
working-directory: ${{ github.workspace }}/abt
run: |
./autogen.sh
./configure --prefix=/usr/local
make -j2
sudo make -j2 install
- name: Configure HDF5 with asynchronous I/O VOL connector
shell: bash
run: |
mkdir ${{ github.workspace }}/hdf5/build
cd ${{ github.workspace }}/hdf5/build
cmake -DCMAKE_BUILD_TYPE=${{ inputs.build_mode }} \
-DCMAKE_INSTALL_PREFIX=${{ runner.workspace }}/hdf5_build \
-DBUILD_STATIC_LIBS=OFF \
-DHDF5_TEST_API:BOOL=ON \
-DHDF5_TEST_API_ENABLE_ASYNC:BOOL=ON \
-DHDF5_ENABLE_PARALLEL:BOOL=ON \
-DHDF5_ENABLE_THREADSAFE:BOOL=ON \
-DALLOW_UNSUPPORTED:BOOL=ON \
-DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF \
-DHDF5_VOL_ALLOW_EXTERNAL:STRING="GIT" \
-DHDF5_VOL_URL01:STRING="https://github.com/hpc-io/vol-async.git" \
-DHDF5_VOL_VOL-ASYNC_BRANCH:STRING="develop" \
-DHDF5_VOL_VOL-ASYNC_NAME:STRING="async under_vol=0\;under_info={}" \
-DHDF5_VOL_VOL-ASYNC_TEST_PARALLEL:BOOL=ON \
${{ github.workspace }}/hdf5
cat src/libhdf5.settings
- name: Build HDF5 and asynchronous I/O VOL connector
shell: bash
working-directory: ${{ github.workspace }}/hdf5/build
run: |
cmake --build . --parallel 3 --config ${{ inputs.build_mode }}
echo "LD_LIBRARY_PATH=/usr/local/lib:${{ github.workspace }}/hdf5/build/bin" >> $GITHUB_ENV
# Workaround for asynchronous I/O VOL CMake issue
- name: Copy testing files
working-directory: ${{ github.workspace }}/hdf5/build
run: |
cp bin/async_test* ./_deps/vol-async-build/test
- name: Test HDF5 asynchronous I/O VOL connector with external tests
working-directory: ${{ github.workspace }}/hdf5/build
run: |
ctest --build-config ${{ inputs.build_mode }} -VV -R "async_test" .
- name: Test HDF5 asynchronous I/O VOL connector with HDF5 API tests
working-directory: ${{ github.workspace }}/hdf5/build
# Don't test the Async VOL connector with the HDF5 API tests yet,
# as it doesn't currently pass all the tests. Leave the step in,
# but skip it to leave an indication that this should be re-enabled
# in the future.
if: false
run: |
ctest --build-config ${{ inputs.build_mode }} -VV -R "HDF5_VOL_vol-async" .

164
.github/workflows/vol_cache.yml vendored Normal file
View File

@ -0,0 +1,164 @@
name: Test HDF5 cache VOL
on:
workflow_call:
inputs:
build_mode:
description: "CMake Build type"
required: true
type: string
permissions:
contents: read
jobs:
build_and_test:
strategy:
matrix:
name:
- "Test HDF5 cache VOL connector"
- "Test HDF5 cache VOL connector atop async VOL connector"
async: [false, true]
exclude:
- name: "Test HDF5 cache VOL connector"
async: true
- name: "Test HDF5 cache VOL connector atop async VOL connector"
async: false
runs-on: ubuntu-latest
steps:
- name: Install dependencies
run: |
sudo apt update
sudo apt-get install automake autoconf libtool libtool-bin libopenmpi-dev
- name: Checkout HDF5
uses: actions/checkout@v4
with:
repository: HDFGroup/hdf5
path: hdf5
- name: Checkout Argobots
uses: actions/checkout@v4
with:
repository: pmodels/argobots
path: abt
# Argobots builds and installs fairly quickly,
# so no caching is currently performed here
- name: Install Argobots
working-directory: ${{ github.workspace }}/abt
run: |
./autogen.sh
./configure --prefix=/usr/local
make -j2
sudo make -j2 install
- name: Set environment variables for configuration (cache VOL only)
run: |
echo "HDF5_VOL_CACHE_TEST_NAME=cache_ext config=$GITHUB_WORKSPACE/config1.cfg\;under_vol=0\;under_info={}\;" >> $GITHUB_ENV
if: ${{ ! matrix.async }}
- name: Set environment variables for configuration (cache VOL atop async VOL)
run: |
echo "HDF5_VOL_CACHE_TEST_NAME=cache_ext config=$GITHUB_WORKSPACE/config1.cfg\;under_vol=512\;under_info={under_vol=0\;under_info={}}\;" >> $GITHUB_ENV
if: ${{ matrix.async }}
# Define ASYNC_INCLUDE_DIR, ASYNC_INCLUDE_DIRS and ASYNC_LIBRARIES to
# patch around having the cache VOL find the async VOL when they're built
# at the same time. Once the Async and Cache VOLs create CMake .config
# files, this should no longer be needed with CMake 3.24 and newer (see
# FetchContent's OVERRIDE_FIND_PACKAGE)
- name: Configure HDF5 with cache VOL connector
shell: bash
run: |
mkdir ${{ github.workspace }}/hdf5/build
cd ${{ github.workspace }}/hdf5/build
cmake -DCMAKE_BUILD_TYPE=${{ inputs.build_mode }} \
-DCMAKE_INSTALL_PREFIX=${{ runner.workspace }}/hdf5_build \
-DBUILD_STATIC_LIBS=OFF \
-DHDF5_TEST_API:BOOL=ON \
-DHDF5_TEST_API_ENABLE_ASYNC:BOOL=ON \
-DHDF5_ENABLE_PARALLEL:BOOL=ON \
-DHDF5_ENABLE_THREADSAFE:BOOL=ON \
-DALLOW_UNSUPPORTED:BOOL=ON \
-DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF \
-DHDF5_VOL_ALLOW_EXTERNAL:STRING="GIT" \
-DHDF5_VOL_URL01:STRING="https://github.com/hpc-io/vol-async.git" \
-DHDF5_VOL_VOL-ASYNC_BRANCH:STRING="develop" \
-DHDF5_VOL_VOL-ASYNC_NAME:STRING="async under_vol=0\;under_info={}" \
-DHDF5_VOL_VOL-ASYNC_TEST_PARALLEL:BOOL=ON \
-DHDF5_VOL_URL02:STRING="https://github.com/HDFGroup/vol-cache.git" \
-DHDF5_VOL_VOL-CACHE_BRANCH:STRING="develop" \
-DHDF5_VOL_VOL-CACHE_NAME:STRING="$HDF5_VOL_CACHE_TEST_NAME" \
-DHDF5_VOL_VOL-CACHE_TEST_PARALLEL:BOOL=ON \
-DASYNC_INCLUDE_DIR=${{ github.workspace }}/hdf5/build/_deps/vol-async-src/src \
-DASYNC_INCLUDE_DIRS=${{ github.workspace }}/hdf5/build/_deps/vol-async-src/src \
-DASYNC_LIBRARIES=${{ github.workspace }}/hdf5/build/bin/libasynchdf5.a\;${{ github.workspace }}/hdf5/build/bin/libh5async.so \
${{ github.workspace }}/hdf5
cat src/libhdf5.settings
- name: Build HDF5 and cache VOL connector
shell: bash
working-directory: ${{ github.workspace }}/hdf5/build
run: |
cmake --build . --parallel 3 --config ${{ inputs.build_mode }}
echo "LD_LIBRARY_PATH=/usr/local/lib:${{ github.workspace }}/hdf5/build/bin" >> $GITHUB_ENV
- name: Create cache VOL connector configuration file for testing
shell: bash
run: |
mkdir -p $GITHUB_WORKSPACE/scratch
touch $GITHUB_WORKSPACE/config1.cfg
echo "HDF5_CACHE_STORAGE_SCOPE: LOCAL" >> $GITHUB_WORKSPACE/config1.cfg
echo "HDF5_CACHE_STORAGE_PATH: $GITHUB_WORKSPACE/scratch" >> $GITHUB_WORKSPACE/config1.cfg
echo "HDF5_CACHE_STORAGE_SIZE: 4294967296" >> $GITHUB_WORKSPACE/config1.cfg
echo "HDF5_CACHE_STORAGE_TYPE: SSD" >> $GITHUB_WORKSPACE/config1.cfg
echo "HDF5_CACHE_REPLACEMENT_POLICY: LRU" >> $GITHUB_WORKSPACE/config1.cfg
# Workaround for cache VOL CMake issue
- name: Copy testing files
working-directory: ${{ github.workspace }}/hdf5/build
run: |
cp bin/test_file.exe ./_deps/vol-cache-build/tests
cp bin/test_group.exe ./_deps/vol-cache-build/tests
cp bin/test_dataset.exe ./_deps/vol-cache-build/tests
cp bin/test_dataset_async_api.exe ./_deps/vol-cache-build/tests
cp bin/test_write_multi.exe ./_deps/vol-cache-build/tests
cp bin/test_multdset.exe ./_deps/vol-cache-build/tests
- name: Set environment variables for external tests (cache VOL only)
run: |
echo "HDF5_PLUGIN_PATH=${{ github.workspace }}/hdf5/build/bin/" >> $GITHUB_ENV
echo "HDF5_VOL_CONNECTOR=cache_ext config=$GITHUB_WORKSPACE/config1.cfg;under_vol=0;under_info={};" >> $GITHUB_ENV
if: ${{ ! matrix.async }}
- name: Set environment variables for external tests (cache VOL atop async VOL)
run: |
echo "HDF5_PLUGIN_PATH=${{ github.workspace }}/hdf5/build/bin/" >> $GITHUB_ENV
echo "HDF5_VOL_CONNECTOR=cache_ext config=$GITHUB_WORKSPACE/config1.cfg;under_vol=512;under_info={under_vol=0;under_info={}};" >> $GITHUB_ENV
if: ${{ matrix.async }}
# Until cache VOL tests are namespaced properly, run them directly
- name: Test HDF5 cache VOL connector with external tests
working-directory: ${{ github.workspace }}/hdf5/build
run: |
ctest --build-config ${{ inputs.build_mode }} -VV -R "^test_file$" .
ctest --build-config ${{ inputs.build_mode }} -VV -R "^test_group$" .
ctest --build-config ${{ inputs.build_mode }} -VV -R "^test_dataset$" .
ctest --build-config ${{ inputs.build_mode }} -VV -R "^test_dataset_async_api$" .
ctest --build-config ${{ inputs.build_mode }} -VV -R "^test_write_multi$" .
ctest --build-config ${{ inputs.build_mode }} -VV -R "^test_multdset$" .
- name: Test HDF5 cache VOL connector with HDF5 API tests
working-directory: ${{ github.workspace }}/hdf5/build
# Don't test the Cache VOL connector with the HDF5 API tests yet
# when it's stacked on top of the Async connector, as it doesn't
# currently pass all the tests due to the Async connector not passing
# all the tests. Leave the step in, but skip it to leave an indication
# that this should be re-enabled in the future.
if: ${{ ! matrix.async }}
run: |
ctest --build-config ${{ inputs.build_mode }} -VV -R "HDF5_VOL_vol-cache" .

64
.github/workflows/vol_ext_passthru.yml vendored Normal file
View File

@ -0,0 +1,64 @@
name: Test HDF5 external pass-through VOL
on:
workflow_call:
inputs:
build_mode:
description: "CMake Build type"
required: true
type: string
permissions:
contents: read
jobs:
build_and_test:
name: Test HDF5 external passthrough VOL connector
runs-on: ubuntu-latest
steps:
- name: Install dependencies
run: |
sudo apt update
sudo apt-get install automake autoconf libtool libtool-bin libopenmpi-dev
- name: Checkout HDF5
uses: actions/checkout@v4
with:
repository: HDFGroup/hdf5
path: hdf5
- name: Checkout vol-external-passthrough
uses: actions/checkout@v4
with:
repository: hpc-io/vol-external-passthrough
path: vol-external-passthrough
- name: Configure HDF5 with external passthrough VOL connector
shell: bash
run: |
mkdir ${{ github.workspace }}/hdf5/build
cd ${{ github.workspace }}/hdf5/build
cmake -DCMAKE_BUILD_TYPE=${{ inputs.build_mode }} \
-DCMAKE_INSTALL_PREFIX=${{ runner.workspace }}/hdf5_build \
-DBUILD_STATIC_LIBS=OFF \
-DHDF5_TEST_API:BOOL=ON \
-DHDF5_ENABLE_PARALLEL:BOOL=ON \
-DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF \
-DHDF5_VOL_ALLOW_EXTERNAL:STRING="GIT" \
-DHDF5_VOL_URL01:STRING="https://github.com/hpc-io/vol-external-passthrough.git" \
-DHDF5_VOL_VOL-EXTERNAL-PASSTHROUGH_BRANCH:STRING="develop" \
-DHDF5_VOL_VOL-EXTERNAL-PASSTHROUGH_NAME:STRING="pass_through_ext under_vol=0\;under_info={}\;" \
-DHDF5_VOL_VOL-EXTERNAL-PASSTHROUGH_TEST_PARALLEL:BOOL=ON \
${{ github.workspace }}/hdf5
cat src/libhdf5.settings
- name: Build HDF5 and external passthrough VOL connector
shell: bash
working-directory: ${{ github.workspace }}/hdf5/build
run: |
cmake --build . --parallel 3 --config ${{ inputs.build_mode }}
- name: Test HDF5 external passthrough VOL connector with HDF5 API tests
working-directory: ${{ github.workspace }}/hdf5/build
run: |
ctest --build-config ${{ inputs.build_mode }} -VV -R "HDF5_VOL_vol-external-passthrough" .

89
.github/workflows/vol_log.yml vendored Normal file
View File

@ -0,0 +1,89 @@
name: Test HDF5 Log-based VOL
on:
workflow_call:
inputs:
build_mode:
description: "CMake Build type"
required: true
type: string
permissions:
contents: read
jobs:
build_and_test:
name: Test HDF5 Log-based VOL connector
runs-on: ubuntu-latest
steps:
- name: Install dependencies
run: |
sudo apt update
sudo apt-get install automake autoconf libtool libtool-bin libopenmpi-dev zlib1g-dev
#mpich
- name: Checkout HDF5
uses: actions/checkout@v4
with:
repository: HDFGroup/hdf5
path: hdf5
# Log-based VOL currently doesn't have CMake support
- name: Configure HDF5
shell: bash
run: |
mkdir ${{ github.workspace }}/hdf5/build
cd ${{ github.workspace }}/hdf5/build
cmake -DCMAKE_BUILD_TYPE=${{ inputs.build_mode }} \
-DCMAKE_INSTALL_PREFIX=${{ runner.workspace }}/hdf5_build \
-DBUILD_STATIC_LIBS=OFF \
-DHDF5_TEST_API:BOOL=ON \
-DHDF5_TEST_API_ENABLE_ASYNC:BOOL=ON \
-DHDF5_ENABLE_PARALLEL:BOOL=ON \
-DHDF5_ENABLE_THREADSAFE:BOOL=ON \
-DALLOW_UNSUPPORTED:BOOL=ON \
-DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF \
${{ github.workspace }}/hdf5
cat src/libhdf5.settings
- name: Build and install HDF5
shell: bash
working-directory: ${{ github.workspace }}/hdf5/build
run: |
cmake --build . --parallel 3 --config ${{ inputs.build_mode }}
cmake --install .
echo "LD_LIBRARY_PATH=${{ github.workspace }}/hdf5/build/bin" >> $GITHUB_ENV
echo "PATH=${{ runner.workspace }}/hdf5_build/bin:${PATH}" >> $GITHUB_ENV
- name: Checkout Log-based VOL
uses: actions/checkout@v4
with:
repository: DataLib-ECP/vol-log-based
path: vol-log-based
- name: Build HDF5 Log-based VOL connector and test with external tests
env:
CXX: mpic++
CC: mpicc
LD_LIBRARY_PATH: ${{ runner.workspace }}/hdf5_build/lib
run: |
cd vol-log-based
autoreconf -i
./configure --prefix=${{ runner.workspace }}/vol-log-based-build --with-hdf5=${{ runner.workspace }}/hdf5_build/ --enable-shared --enable-zlib
make -j2 && make install
export HDF5_PLUGIN_PATH="${{ runner.workspace }}/vol-log-based-build/lib"
export HDF5_VOL_CONNECTOR="LOG under_vol=0;under_info={}"
make check
echo "HDF5_PLUGIN_PATH=${HDF5_PLUGIN_PATH}" >> $GITHUB_ENV
echo "HDF5_VOL_CONNECTOR=${HDF5_VOL_CONNECTOR}" >> $GITHUB_ENV
# Skip parallel testing for now as it appears to hang
- name: Test HDF5 Log-based VOL connector with HDF5 API tests
working-directory: ${{ github.workspace }}/hdf5/build
# Don't test the Log-based VOL connector with the HDF5 API tests yet,
# as it doesn't currently pass all the tests. Leave the step in,
# but skip it to leave an indication that this should be re-enabled
# in the future.
if: false
run: |
ctest --build-config ${{ inputs.build_mode }} -VV -R "h5_api" -E "parallel" .

195
.github/workflows/vol_rest.yml vendored Normal file
View File

@ -0,0 +1,195 @@
name: Test HDF5 REST VOL
on:
workflow_call:
inputs:
build_mode:
description: "CMake Build type"
required: true
type: string
permissions:
contents: read
env:
ADMIN_PASSWORD: admin
ADMIN_USERNAME: admin
USER_NAME: test_user1
USER_PASSWORD: test
USER2_NAME: test_user2
USER2_PASSWORD: test
HSDS_USERNAME: test_user1
HSDS_PASSWORD: test
HSDS_PATH: /home/test_user1/
HDF5_API_TEST_PATH_PREFIX: /home/test_user1/
HSDS_ENDPOINT: http+unix://%2Ftmp%2Fhs%2Fsn_1.sock
HDF5_VOL_CONNECTOR: REST
ROOT_DIR: ${{github.workspace}}/hsdsdata
BUCKET_NAME: hsdstest
jobs:
build_and_test:
name: Test HDF5 REST VOL connector
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.10"]
steps:
- name: Install dependencies
run: |
sudo apt-get update
sudo apt-get install automake autoconf libtool libtool-bin libcurl4-openssl-dev libyajl-dev
- name: Checkout HDF5
uses: actions/checkout@v4
with:
repository: HDFGroup/hdf5
path: hdf5
- name: Configure HDF5 with REST VOL connector
shell: bash
run: |
mkdir ${{ github.workspace }}/hdf5/build
cd ${{ github.workspace }}/hdf5/build
cmake -DCMAKE_BUILD_TYPE=${{ inputs.build_mode }} \
-DCMAKE_INSTALL_PREFIX=${{ runner.workspace }}/hdf5_build \
-DBUILD_STATIC_LIBS=OFF \
-DHDF5_BUILD_HL_LIB:BOOL=ON \
-DHDF5_TEST_API:BOOL=ON \
-DALLOW_UNSUPPORTED:BOOL=ON \
-DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF \
-DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF \
-DHDF5_VOL_ALLOW_EXTERNAL:STRING="GIT" \
-DHDF5_VOL_URL01:STRING="https://github.com/HDFGroup/vol-rest.git" \
-DHDF5_VOL_VOL-REST_BRANCH:STRING="master" \
-DHDF5_VOL_VOL-REST_NAME:STRING="REST" \
-DHDF5_VOL_VOL-REST_TEST_PARALLEL:BOOL=OFF \
-DHDF5_VOL_REST_ENABLE_EXAMPLES=ON \
${{ github.workspace }}/hdf5
cat src/libhdf5.settings
- name: Build and install HDF5 and REST VOL connector
shell: bash
working-directory: ${{ github.workspace }}/hdf5/build
run: |
cmake --build . --parallel 3 --config ${{ inputs.build_mode }}
cmake --install .
echo "LD_LIBRARY_PATH=${{ github.workspace }}/hdf5/build/bin" >> $GITHUB_ENV
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Checkout HSDS
uses: actions/checkout@v4
with:
repository: HDFGroup/hsds
path: ${{github.workspace}}/hsds
- name: Get HSDS HEAD commit SHA
shell: bash
working-directory: ${{github.workspace}}/hsds
run: |
export HSDS_COMMIT=`git rev-parse HEAD`
export HSDS_COMMIT_SHORT=`git rev-parse --short HEAD`
echo "HSDS_COMMIT=${HSDS_COMMIT}" >> $GITHUB_ENV
echo "HSDS_COMMIT_SHORT=${HSDS_COMMIT_SHORT}" >> $GITHUB_ENV
# Note that we don't currently cache HSDS, as we would need
# to pick a fixed commit/tag in order to generate a reasonable
# key to use for caching/restoring from the cache
#- name: Restore HSDS (${{ env.HSDS_COMMIT_SHORT }}) installation cache
# id: restore-hsds
# uses: actions/cache@v3
# with:
# path: ${{ runner.workspace }}/hsds-${{ env.HSDS_COMMIT_SHORT }}-install
# key: ${{ runner.os }}-${{ runner.arch }}-hsds-${{ env.HSDS_COMMIT }}-${{ inputs.build_mode }}-cache
- name: Install HSDS (${{ env.HSDS_COMMIT_SHORT }}) dependencies
shell: bash
working-directory: ${{github.workspace}}/hsds
run: |
python -m pip install --upgrade pip
python -m pip install pytest
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Install HSDS (${{ env.HSDS_COMMIT_SHORT }}) package
#if: ${{ ! steps.restore-hsds.outputs.cache-hit }}
shell: bash
run: |
cd ${{github.workspace}}/hsds
pip install -e .
#- name: Cache HSDS (${{ env.HSDS_COMMIT_SHORT }}) installation
# uses: actions/cache/save@v3
# if: ${{ ! steps.restore-hsds.outputs.cache-hit }}
# with:
# path: ${{ runner.workspace }}/hsds-${{ env.HSDS_COMMIT_SHORT }}-install
# key: ${{ runner.os }}-${{ runner.arch }}-hsds-${{ env.HSDS_COMMIT }}-${{ inputs.build_mode }}-cache
- name: Run HSDS unit tests
shell: bash
working-directory: ${{github.workspace}}/hsds
run: |
pytest
- name: Start HSDS
working-directory: ${{github.workspace}}/hsds
run: |
mkdir ${{github.workspace}}/hsdsdata &&
mkdir ${{github.workspace}}/hsdsdata/hsdstest &&
cp admin/config/groups.default admin/config/groups.txt &&
cp admin/config/passwd.default admin/config/passwd.txt &&
cp admin/config/groups.default admin/config/groups.txt &&
cp admin/config/passwd.default admin/config/passwd.txt
ROOT_DIR=${{github.workspace}}/hsdsdata ./runall.sh --no-docker 1 &
sleep 10
- name: Test HSDS
working-directory: ${{github.workspace}}/hsds
run: |
python tests/integ/setup_test.py
- name: Test HDF5 REST VOL connector with external tests
working-directory: ${{github.workspace}}/hdf5/build/
run: |
sudo \
HDF5_PLUGIN_PATH="${{ runner.workspace }}/hdf5_build/lib" \
HDF5_VOL_CONNECTOR=REST \
ADMIN_USERNAME=admin ADMIN_PASSWORD=admin \
USER_NAME=test_user1 USER_PASSWORD=test \
USER2_NAME=test_user2 USER2_PASSWORD=test \
HSDS_USERNAME=test_user1 HSDS_PASSWORD=test \
HSDS_PATH=/home/test_user1/ HDF5_API_TEST_PATH_PREFIX=/home/test_user1/ \
HSDS_ENDPOINT=http+unix://%2Ftmp%2Fhs%2Fsn_1.sock \
ROOT_DIR=${{github.workspace}}/hsdsdata \
BUCKET_NAME=hsdstest \
ctest --build-config ${{ inputs.build_mode }} -VV -R "test_rest_vol" .
- name: Test HDF5 REST VOL connector with HDF5 API tests
working-directory: ${{github.workspace}}/hdf5/build/
# Don't test the REST VOL connector with the HDF5 API tests yet,
# as it doesn't currently pass all the tests. Leave the step in,
# but skip it to leave an indication that this should be re-enabled
# in the future.
if: false
run: |
sudo \
HDF5_PLUGIN_PATH="${{ runner.workspace }}/hdf5_build/lib" \
HDF5_VOL_CONNECTOR=REST \
ADMIN_USERNAME=admin ADMIN_PASSWORD=admin \
USER_NAME=test_user1 USER_PASSWORD=test \
USER2_NAME=test_user2 USER2_PASSWORD=test \
HSDS_USERNAME=test_user1 HSDS_PASSWORD=test \
HSDS_PATH=/home/test_user1/ HDF5_API_TEST_PATH_PREFIX=/home/test_user1/ \
HSDS_ENDPOINT=http+unix://%2Ftmp%2Fhs%2Fsn_1.sock \
ROOT_DIR=${{github.workspace}}/hsdsdata \
BUCKET_NAME=hsdstest \
ctest --build-config ${{ inputs.build_mode }} -VV -R "h5_api" .
- name: Stop HSDS
working-directory: ${{github.workspace}}/hsds
run: |
./stopall.sh

View File

@ -26,14 +26,51 @@ function (get_generated_cmake_targets out_var dir)
set (${out_var} "${dir_targets}" PARENT_SCOPE)
endfunction ()
# For now, only support building of external VOL connectors with FetchContent
option (HDF5_VOL_ALLOW_EXTERNAL "Allow building of external HDF5 VOL connectors with FetchContent" "NO")
mark_as_advanced (HDF5_VOL_ALLOW_EXTERNAL)
if (HDF5_VOL_ALLOW_EXTERNAL)
if (HDF5_VOL_ALLOW_EXTERNAL MATCHES "NO" OR (NOT HDF5_VOL_ALLOW_EXTERNAL MATCHES "GIT" AND NOT HDF5_VOL_ALLOW_EXTERNAL MATCHES "LOCAL_DIR"))
message (FATAL_ERROR "HDF5_VOL_ALLOW_EXTERNAL must be set to 'GIT' or 'LOCAL_DIR' to allow building of external HDF5 VOL connectors")
endif()
# Function to apply connector-specify workarounds to build
# code once a connector has been populated through FetchContent
function (apply_connector_workarounds connector_name source_dir)
# For the cache VOL, remove the call to find_package(ASYNC).
# Eventually, the FetchContent OVERRIDE_FIND_PACKAGE should be
# able to fulfill this dependency when building the cache VOL,
# but for now we have to hack around this until the async and
# cache VOLs create CMake .config files
if ("${connector_name}" MATCHES "vol-cache")
# Remove find_package(ASYNC) call from connector's CMake code
file (READ "${source_dir}/CMakeLists.txt" vol_cmake_contents)
string (REGEX REPLACE "[ \t]*find_package[ \t]*\\([ \t]*ASYNC[^\r\n\\)]*\\)[ \t]*[\r\n]+" "" vol_cmake_contents "${vol_cmake_contents}")
file (WRITE "${source_dir}/CMakeLists.txt" "${vol_cmake_contents}")
# Remove setting of HDF5_VOL_CONNECTOR and HDF5_PLUGIN_PATH
# in connector's external tests CMake code
file (STRINGS "${source_dir}/tests/CMakeLists.txt" file_lines)
file (WRITE "${source_dir}/tests/CMakeLists.txt" "")
foreach (line IN LISTS file_lines)
set (stripped_line "${line}")
string (REGEX MATCH "^[ \t]*set_tests_properties\\([ \t]*[\r\n]?" match_string "${line}")
if (NOT "${match_string}" STREQUAL "")
string (REGEX REPLACE "^[ \t]*set_tests_properties\\([ \t]*[\r\n]?" "" stripped_line "${line}")
endif ()
string (REGEX MATCH "^[ \t]*.\\{test\\}[ \t]*[\r\n]?" match_string "${line}")
if (NOT "${match_string}" STREQUAL "")
string (REGEX REPLACE "^[ \t]*.\\{[A-Za-z]*\\}[ \t]*[\r\n]?" "" stripped_line "${line}")
endif ()
string (REGEX MATCH "^[ \t]*PROPERTIES[ \t]*[\r\n]?" match_string "${line}")
if (NOT "${match_string}" STREQUAL "")
string (REGEX REPLACE "^[ \t]*PROPERTIES[ \t]*[\r\n]?" "" stripped_line "${line}")
endif ()
string (REGEX MATCH "^[ \t]*ENVIRONMENT[ \t]*.*[\r\n]?" match_string "${line}")
if (NOT "${match_string}" STREQUAL "")
string (REGEX REPLACE "^[ \t]*ENVIRONMENT[ \t]*.*[\r\n]?" "" stripped_line "${line}")
endif ()
file (APPEND "${source_dir}/tests/CMakeLists.txt" "${stripped_line}\n")
endforeach ()
endif ()
endfunction ()
set (HDF5_VOL_ALLOW_EXTERNAL "NO" CACHE STRING "Allow building of external HDF5 VOL connectors with FetchContent")
set_property (CACHE HDF5_VOL_ALLOW_EXTERNAL PROPERTY STRINGS NO GIT LOCAL_DIR)
mark_as_advanced (HDF5_VOL_ALLOW_EXTERNAL)
if (HDF5_VOL_ALLOW_EXTERNAL MATCHES "GIT" OR HDF5_VOL_ALLOW_EXTERNAL MATCHES "LOCAL_DIR")
# For compatibility, set some variables that projects would
# typically look for after calling find_package(HDF5)
set (HDF5_FOUND 1)
@ -103,6 +140,13 @@ if (HDF5_VOL_ALLOW_EXTERNAL)
mark_as_advanced ("HDF5_VOL_${hdf5_vol_name_upper}_BRANCH")
endif()
set ("HDF5_VOL_${hdf5_vol_name_upper}_CMAKE_PACKAGE_NAME"
"${hdf5_vol_name_lower}"
CACHE
STRING
"CMake package name used by find_package(...) calls for VOL connector '${hdf5_vol_name}'"
)
set ("HDF5_VOL_${hdf5_vol_name_upper}_NAME" "" CACHE STRING "Name of VOL connector to set for the HDF5_VOL_CONNECTOR environment variable")
option ("HDF5_VOL_${hdf5_vol_name_upper}_TEST_PARALLEL" "Whether to test VOL connector '${hdf5_vol_name}' against the parallel API tests" OFF)
@ -124,22 +168,40 @@ if (HDF5_VOL_ALLOW_EXTERNAL)
message (FATAL_ERROR "HDF5_VOL_PATH${vol_idx_fixed} must be an absolute path to a valid directory")
endif ()
# Set internal convenience variables for FetchContent dependency name
set (hdf5_vol_depname "${HDF5_VOL_${hdf5_vol_name_upper}_CMAKE_PACKAGE_NAME}")
string (TOLOWER "${hdf5_vol_depname}" hdf5_vol_depname_lower)
if (HDF5_VOL_ALLOW_EXTERNAL MATCHES "GIT")
FetchContent_Declare (HDF5_VOL_${hdf5_vol_name_lower}
GIT_REPOSITORY "${HDF5_VOL_SOURCE}"
GIT_TAG "${HDF5_VOL_${hdf5_vol_name_upper}_BRANCH}"
)
if (${CMAKE_VERSION} VERSION_GREATER_EQUAL "3.24")
FetchContent_Declare (${hdf5_vol_depname}
GIT_REPOSITORY "${HDF5_VOL_SOURCE}"
GIT_TAG "${HDF5_VOL_${hdf5_vol_name_upper}_BRANCH}"
OVERRIDE_FIND_PACKAGE
)
else ()
FetchContent_Declare (${hdf5_vol_depname}
GIT_REPOSITORY "${HDF5_VOL_SOURCE}"
GIT_TAG "${HDF5_VOL_${hdf5_vol_name_upper}_BRANCH}"
FIND_PACKAGE_ARGS NAMES ${hdf5_vol_name_lower}
)
endif ()
elseif(HDF5_VOL_ALLOW_EXTERNAL MATCHES "LOCAL_DIR")
FetchContent_Declare (HDF5_VOL_${hdf5_vol_name_lower}
SOURCE_DIR "${HDF5_VOL_SOURCE}"
FetchContent_Declare (${hdf5_vol_depname}
SOURCE_DIR "${HDF5_VOL_SOURCE}"
)
endif()
FetchContent_GetProperties(HDF5_VOL_${hdf5_vol_name_lower})
if (NOT hdf5_vol_${hdf5_vol_name_lower}_POPULATED)
FetchContent_Populate(HDF5_VOL_${hdf5_vol_name_lower})
FetchContent_GetProperties(${hdf5_vol_depname})
if (NOT ${hdf5_vol_depname}_POPULATED)
FetchContent_Populate(${hdf5_vol_depname})
if (NOT EXISTS "${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR}/CMakeLists.txt")
# Now that content has been populated, set other internal
# convenience variables for FetchContent dependency
set (hdf5_vol_depname_source_dir "${${hdf5_vol_depname_lower}_SOURCE_DIR}")
set (hdf5_vol_depname_binary_dir "${${hdf5_vol_depname_lower}_BINARY_DIR}")
if (NOT EXISTS "${hdf5_vol_depname_source_dir}/CMakeLists.txt")
if (HDF5_VOL_ALLOW_EXTERNAL MATCHES "GIT")
message (SEND_ERROR "The git repository branch '${HDF5_VOL_${hdf5_vol_name_upper}_BRANCH}' for VOL connector '${hdf5_vol_name}' does not appear to contain a CMakeLists.txt file")
elseif (HDF5_VOL_ALLOW_EXTERNAL MATCHES "LOCAL_DIR")
@ -150,21 +212,24 @@ if (HDF5_VOL_ALLOW_EXTERNAL)
# If there are any calls to find_package(HDF5) in the connector's
# CMakeLists.txt files, remove those since any found HDF5 targets
# will conflict with targets being generated by this build of HDF5
if (EXISTS "${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR}/CMakeLists.txt")
file (READ "${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR}/CMakeLists.txt" vol_cmake_contents)
if (EXISTS "${hdf5_vol_depname_source_dir}/CMakeLists.txt")
file (READ "${hdf5_vol_depname_source_dir}/CMakeLists.txt" vol_cmake_contents)
string (REGEX REPLACE "[ \t]*find_package[ \t]*\\([ \t]*HDF5[^\r\n\\)]*\\)[ \t]*[\r\n]+" "" vol_cmake_contents "${vol_cmake_contents}")
file (WRITE "${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR}/CMakeLists.txt" "${vol_cmake_contents}")
file (WRITE "${hdf5_vol_depname_source_dir}/CMakeLists.txt" "${vol_cmake_contents}")
endif ()
if (EXISTS "${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR}/src/CMakeLists.txt")
file (READ "${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR}/src/CMakeLists.txt" vol_cmake_contents)
if (EXISTS "${hdf5_vol_depname_source_dir}/src/CMakeLists.txt")
file (READ "${hdf5_vol_depname_source_dir}/src/CMakeLists.txt" vol_cmake_contents)
string (REGEX REPLACE "[ \t]*find_package[ \t]*\\([ \t]*HDF5[^\r\n\\)]*\\)[ \t]*[\r\n]+" "" vol_cmake_contents "${vol_cmake_contents}")
file (WRITE "${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR}/src/CMakeLists.txt" "${vol_cmake_contents}")
file (WRITE "${hdf5_vol_depname_source_dir}/src/CMakeLists.txt" "${vol_cmake_contents}")
endif ()
add_subdirectory (${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR} ${hdf5_vol_${hdf5_vol_name_lower}_BINARY_DIR})
# Apply any connector-specific workarounds
apply_connector_workarounds ("${hdf5_vol_name_lower}" "${hdf5_vol_depname_source_dir}")
add_subdirectory (${hdf5_vol_depname_source_dir} ${hdf5_vol_depname_binary_dir})
# Get list of targets generated by build of connector
get_generated_cmake_targets (connector_targets ${hdf5_vol_${hdf5_vol_name_lower}_SOURCE_DIR})
get_generated_cmake_targets (connector_targets ${hdf5_vol_depname_source_dir})
# Create a custom target for the connector to encompass all its
# targets and other custom properties set by us for later use
@ -217,8 +282,30 @@ if (HDF5_VOL_ALLOW_EXTERNAL)
HDF5_VOL_TEST_PARALLEL ${HDF5_VOL_${hdf5_vol_name_upper}_TEST_PARALLEL}
)
# Add this connector's target to the list of external connector targets
# Add this VOL connector's target to the list of external connector targets
list (APPEND HDF5_EXTERNAL_VOL_TARGETS "HDF5_VOL_${hdf5_vol_name_lower}")
# Get the list of library targets from this VOL connector
unset (connector_lib_targets)
foreach (connector_target ${connector_targets})
get_target_property (target_type ${connector_target} TYPE)
if (target_type STREQUAL "SHARED_LIBRARY" OR target_type STREQUAL "STATIC_LIBRARY")
list (APPEND connector_lib_targets "${connector_target}")
endif ()
endforeach ()
# Add all of the previous VOL connector's library targets as
# dependencies for the current VOL connector to ensure that
# VOL connectors get built serially in case there are dependencies
if (DEFINED last_vol_lib_targets)
foreach (connector_target ${connector_targets})
add_dependencies (${connector_target} ${last_vol_lib_targets})
endforeach ()
endif ()
# Use this connector's library targets as dependencies
# for the next connector that is built
set (last_vol_lib_targets "${connector_lib_targets}")
endif ()
endif ()
endforeach ()

View File

@ -7,6 +7,7 @@ HDF5 version 1.15.0 currently under development
[![netCDF build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/netcdf.yml?branch=develop&label=netCDF)](https://github.com/HDFGroup/hdf5/actions/workflows/netcdf.yml?query=branch%3Adevelop)
[![h5py build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/h5py.yml?branch=develop&label=h5py)](https://github.com/HDFGroup/hdf5/actions/workflows/h5py.yml?query=branch%3Adevelop)
[![CVE regression](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/cve.yml?branch=develop&label=CVE)](https://github.com/HDFGroup/hdf5/actions/workflows/cve.yml?query=branch%3Adevelop)
[![HDF5 VOL connectors build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/vol.yml?branch=develop&label=HDF5-VOL)](https://github.com/HDFGroup/hdf5/actions/workflows/vol.yml?query=branch%3Adevelop)
[![1.14 build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/main.yml?branch=hdf5_1_14&label=1.14)](https://github.com/HDFGroup/hdf5/actions/workflows/main.yml?query=branch%3Ahdf5_1_14)
[![BSD](https://img.shields.io/badge/License-BSD-blue.svg)](https://github.com/HDFGroup/hdf5/blob/develop/COPYING)

View File

@ -97,12 +97,21 @@ After the VOL's internal name is generated, the following new variables get crea
variable must be set in order for the VOL connector to be testable with
HDF5's tests.
HDF5_VOL_<VOL name>_CMAKE_PACKAGE_NAME (Default: "<lowercased <VOL name>>")
This variable specifies the exact name that would be passed to CMake
find_package(...) calls for the VOL connector in question. It is used as
the dependency name when making CMake FetchContent calls to try to ensure
that any other VOL connectors to be built which depend on this VOL connector
can make find_package(...) calls for this VOL connector at configure time.
By default, this variable is set to a lowercased version of the internal
name generated for the VOL connector (described above).
HDF5_VOL_<VOL name>_TEST_PARALLEL (Default: OFF)
This variable determines whether the VOL connector with the CMake-internal
name '<VOL name>' should be tested against HDF5's parallel tests.
If the source was retrieved from a Git URL, then the following variable will additionally be created:
HDF5_VOL_<VOL name>_BRANCH (Default: "main")
This variable specifies the git branch name or tag to use when fetching
the source code for the VOL connector with the CMake-internal name
@ -111,9 +120,10 @@ If the source was retrieved from a Git URL, then the following variable will add
As an example, this would create the following variables for the
previously-mentioned VOL connector if it is retrieved from a URL:
HDF5_VOL_VOL-ASYNC_BRANCH
HDF5_VOL_VOL-ASYNC_NAME
HDF5_VOL_VOL-ASYNC_TEST_PARALLEL
HDF5_VOL_VOL-ASYNC_NAME ""
HDF5_VOL_VOL-ASYNC_CMAKE_PACKAGE_NAME "vol-async"
HDF5_VOL_VOL-ASYNC_BRANCH "main"
HDF5_VOL_VOL-ASYNC_TEST_PARALLEL OFF
**NOTE**
If a VOL connector requires extra information to be passed in its
@ -139,9 +149,10 @@ would typically be passed when building HDF5, such as `CMAKE_INSTALL_PREFIX`,
-DHDF5_TEST_API=ON
-DHDF5_VOL_ALLOW_EXTERNAL="GIT"
-DHDF5_VOL_URL01=https://github.com/hpc-io/vol-async.git
-DHDF5_VOL_VOL-ASYNC_BRANCH=develop
-DHDF5_VOL_VOL-ASYNC_BRANCH=develop
-DHDF5_VOL_VOL-ASYNC_NAME="async under_vol=0\;under_info={}"
-DHDF5_VOL_VOL-ASYNC_TEST_PARALLEL=ON ..
-DHDF5_VOL_VOL-ASYNC_TEST_PARALLEL=ON
..
Here, we are specifying that:
@ -156,7 +167,8 @@ Here, we are specifying that:
variable should be set to "async under_vol=0\;under_info={}", which
specifies that the VOL connector with the canonical name "async" should
be loaded and it should be passed the string "under_vol=0;under_info={}"
for its configuration
for its configuration (note the backslash-escaping of semicolons in the string
provided)
* The Asynchronous I/O VOL connector should be tested against HDF5's parallel API tests
Note that this also assumes that the Asynchronous I/O VOL connector's

View File

@ -141,7 +141,7 @@ target_compile_options (
target_compile_definitions (
h5_api_test
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
"${HDF5_TEST_COMPILE_DEFS_PRIVATE}"
)
# Always prefer linking the shared HDF5 library by default
if (BUILD_SHARED_LIBS)

View File

@ -136,15 +136,6 @@ main(int argc, char **argv)
}
}
#ifdef H5_HAVE_PARALLEL
/* If HDF5 was built with parallel enabled, go ahead and call MPI_Init before
* running these tests. Even though these are meant to be serial tests, they will
* likely be run using mpirun (or similar) and we cannot necessarily expect HDF5 or
* an HDF5 VOL connector to call MPI_Init.
*/
MPI_Init(&argc, &argv);
#endif
H5open();
n_tests_run_g = 0;
@ -304,9 +295,5 @@ done:
H5close();
#ifdef H5_HAVE_PARALLEL
MPI_Finalize();
#endif
exit(((err_occurred || n_tests_failed_g > 0) ? EXIT_FAILURE : EXIT_SUCCESS));
}

View File

@ -27,6 +27,15 @@ set (TEST_LIB_HEADERS
${HDF5_TEST_SOURCE_DIR}/swmr_common.h
)
#################################################################################
# Set private compile-time definitions added when
# compiling test source files
#################################################################################
set (HDF5_TEST_COMPILE_DEFS_PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
"$<$<BOOL:HDF5_TEST_API>:H5_HAVE_TEST_API>"
)
if (BUILD_STATIC_LIBS)
add_library (${HDF5_TEST_LIB_TARGET} STATIC ${TEST_LIB_SOURCES} ${TEST_LIB_HEADERS})
target_include_directories (${HDF5_TEST_LIB_TARGET}
@ -37,7 +46,7 @@ if (BUILD_STATIC_LIBS)
target_compile_definitions(${HDF5_TEST_LIB_TARGET}
PRIVATE
"H5_TEST_EXPRESS_LEVEL_DEFAULT=${H5_TEST_EXPRESS_LEVEL_DEFAULT}"
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
"${HDF5_TEST_COMPILE_DEFS_PRIVATE}"
)
TARGET_C_PROPERTIES (${HDF5_TEST_LIB_TARGET} STATIC)
target_link_libraries (${HDF5_TEST_LIB_TARGET}
@ -79,7 +88,7 @@ if (BUILD_SHARED_LIBS)
"H5_BUILT_AS_DYNAMIC_LIB"
PRIVATE
"H5_TEST_EXPRESS_LEVEL_DEFAULT=${H5_TEST_EXPRESS_LEVEL_DEFAULT}"
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
"${HDF5_TEST_COMPILE_DEFS_PRIVATE}"
)
TARGET_C_PROPERTIES (${HDF5_TEST_LIBSH_TARGET} SHARED)
target_link_libraries (${HDF5_TEST_LIBSH_TARGET}
@ -431,10 +440,7 @@ macro (ADD_H5_EXE file)
add_executable (${file} ${HDF5_TEST_SOURCE_DIR}/${file}.c)
target_include_directories (${file} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
target_compile_options(${file} PRIVATE "${HDF5_CMAKE_C_FLAGS}")
target_compile_definitions(${file}
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
)
target_compile_definitions(${file} PRIVATE "${HDF5_TEST_COMPILE_DEFS_PRIVATE}")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (${file} STATIC)
target_link_libraries (${file} PRIVATE ${HDF5_TEST_LIB_TARGET})
@ -475,10 +481,7 @@ endforeach ()
#-- Adding test for chunk_info
add_executable (chunk_info ${HDF5_TEST_SOURCE_DIR}/chunk_info.c)
target_compile_options(chunk_info PRIVATE "${HDF5_CMAKE_C_FLAGS}")
target_compile_definitions(chunk_info
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
)
target_compile_definitions(chunk_info PRIVATE "${HDF5_TEST_COMPILE_DEFS_PRIVATE}")
target_include_directories (chunk_info PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (chunk_info STATIC)
@ -499,10 +502,7 @@ endif ()
#-- Adding test for direct_chunk
add_executable (direct_chunk ${HDF5_TEST_SOURCE_DIR}/direct_chunk.c)
target_compile_options(direct_chunk PRIVATE "${HDF5_CMAKE_C_FLAGS}")
target_compile_definitions(direct_chunk
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
)
target_compile_definitions(direct_chunk PRIVATE "${HDF5_TEST_COMPILE_DEFS_PRIVATE}")
target_include_directories (direct_chunk PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (direct_chunk STATIC)
@ -524,10 +524,7 @@ endif ()
#-- Adding test for testhdf5
add_executable (testhdf5 ${testhdf5_SOURCES})
target_compile_options(testhdf5 PRIVATE "${HDF5_CMAKE_C_FLAGS}")
target_compile_definitions(testhdf5
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
)
target_compile_definitions(testhdf5 PRIVATE "${HDF5_TEST_COMPILE_DEFS_PRIVATE}")
target_include_directories (testhdf5 PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (testhdf5 STATIC)
@ -548,10 +545,7 @@ endif ()
#-- Adding test for cache_image
add_executable (cache_image ${cache_image_SOURCES})
target_compile_options(cache_image PRIVATE "${HDF5_CMAKE_C_FLAGS}")
target_compile_definitions(cache_image
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
)
target_compile_definitions(cache_image PRIVATE "${HDF5_TEST_COMPILE_DEFS_PRIVATE}")
target_include_directories (cache_image PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (cache_image STATIC)
@ -572,10 +566,7 @@ endif ()
#-- Adding test for ttsafe
add_executable (ttsafe ${ttsafe_SOURCES})
target_compile_options(ttsafe PRIVATE "${HDF5_CMAKE_C_FLAGS}")
target_compile_definitions(ttsafe
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
)
target_compile_definitions(ttsafe PRIVATE "${HDF5_TEST_COMPILE_DEFS_PRIVATE}")
target_include_directories (ttsafe PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (ttsafe STATIC)
@ -602,10 +593,7 @@ endif ()
#-- Adding test for thread_id
add_executable (thread_id ${HDF5_TEST_SOURCE_DIR}/thread_id.c)
target_compile_options(thread_id PRIVATE "${HDF5_CMAKE_C_FLAGS}")
target_compile_definitions(thread_id
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
)
target_compile_definitions(thread_id PRIVATE "${HDF5_TEST_COMPILE_DEFS_PRIVATE}")
target_include_directories (thread_id PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (thread_id STATIC)
@ -712,10 +700,7 @@ macro (ADD_H5_VDS_EXE file)
add_executable (${file} ${HDF5_TEST_SOURCE_DIR}/${file}.c ${HDF5_TEST_SOURCE_DIR}/vds_swmr.h)
target_include_directories (${file} PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_BINARY_DIR};${HDF5_TEST_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
target_compile_options(${file} PRIVATE "${HDF5_CMAKE_C_FLAGS}")
target_compile_definitions(${file}
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
)
target_compile_definitions(${file} PRIVATE "${HDF5_TEST_COMPILE_DEFS_PRIVATE}")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (${file} STATIC)
target_link_libraries (${file} PRIVATE ${HDF5_TEST_LIB_TARGET})
@ -742,10 +727,7 @@ endforeach ()
# and it can't be renamed (i.e., no <foo>-shared).
add_executable (accum_swmr_reader ${HDF5_TEST_SOURCE_DIR}/accum_swmr_reader.c)
target_compile_options(accum_swmr_reader PRIVATE "${HDF5_CMAKE_C_FLAGS}")
target_compile_definitions(accum_swmr_reader
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
)
target_compile_definitions(accum_swmr_reader PRIVATE "${HDF5_TEST_COMPILE_DEFS_PRIVATE}")
target_include_directories (accum_swmr_reader PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (accum_swmr_reader STATIC)
@ -816,10 +798,7 @@ endif ()
set (use_append_chunk_SOURCES ${HDF5_TEST_SOURCE_DIR}/use_append_chunk.c ${HDF5_TEST_SOURCE_DIR}/use_common.c ${HDF5_TEST_SOURCE_DIR}/use.h)
add_executable (use_append_chunk ${use_append_chunk_SOURCES})
target_compile_options(use_append_chunk PRIVATE "${HDF5_CMAKE_C_FLAGS}")
target_compile_definitions(use_append_chunk
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
)
target_compile_definitions(use_append_chunk PRIVATE "${HDF5_TEST_COMPILE_DEFS_PRIVATE}")
target_include_directories (use_append_chunk PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (use_append_chunk STATIC)
@ -841,10 +820,7 @@ if (HDF5_BUILD_UTILS) # requires mirror server
set (use_append_chunk_mirror_SOURCES ${HDF5_TEST_SOURCE_DIR}/use_append_chunk_mirror.c ${HDF5_TEST_SOURCE_DIR}/use_common.c ${HDF5_TEST_SOURCE_DIR}/use.h)
add_executable (use_append_chunk_mirror ${use_append_chunk_mirror_SOURCES})
target_compile_options(use_append_chunk_mirror PRIVATE "${HDF5_CMAKE_C_FLAGS}")
target_compile_definitions(use_append_chunk_mirror
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
)
target_compile_definitions(use_append_chunk_mirror PRIVATE "${HDF5_TEST_COMPILE_DEFS_PRIVATE}")
target_include_directories (use_append_chunk_mirror PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (use_append_chunk_mirror STATIC)
@ -866,10 +842,7 @@ endif ()
set (use_append_mchunks_SOURCES ${HDF5_TEST_SOURCE_DIR}/use_append_mchunks.c ${HDF5_TEST_SOURCE_DIR}/use_common.c ${HDF5_TEST_SOURCE_DIR}/use.h)
add_executable (use_append_mchunks ${use_append_mchunks_SOURCES})
target_compile_options(use_append_mchunks PRIVATE "${HDF5_CMAKE_C_FLAGS}")
target_compile_definitions(use_append_mchunks
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
)
target_compile_definitions(use_append_mchunks PRIVATE "${HDF5_TEST_COMPILE_DEFS_PRIVATE}")
target_include_directories (use_append_mchunks PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (use_append_mchunks STATIC)
@ -890,10 +863,7 @@ endif ()
set (use_disable_mdc_flushes_SOURCES ${HDF5_TEST_SOURCE_DIR}/use_disable_mdc_flushes.c)
add_executable (use_disable_mdc_flushes ${use_disable_mdc_flushes_SOURCES})
target_compile_options(use_disable_mdc_flushes PRIVATE "${HDF5_CMAKE_C_FLAGS}")
target_compile_definitions(use_disable_mdc_flushes
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
)
target_compile_definitions(use_disable_mdc_flushes PRIVATE "${HDF5_TEST_COMPILE_DEFS_PRIVATE}")
target_include_directories (use_disable_mdc_flushes PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>")
if (NOT BUILD_SHARED_LIBS)
TARGET_C_PROPERTIES (use_disable_mdc_flushes STATIC)

View File

@ -99,7 +99,7 @@ target_compile_options (
target_compile_definitions (
h5_api_test_parallel
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
"${HDF5_TESTPAR_COMPILE_DEFS_PRIVATE}"
)
# Always prefer linking the shared HDF5 library by default
if (BUILD_SHARED_LIBS)

View File

@ -21,13 +21,19 @@ set (testphdf5_SOURCES
${HDF5_TEST_PAR_SOURCE_DIR}/t_oflush.c
)
#################################################################################
# Set private compile-time definitions added when
# compiling test source files
#################################################################################
set (HDF5_TESTPAR_COMPILE_DEFS_PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
"$<$<BOOL:HDF5_TEST_API>:H5_HAVE_TEST_API>"
)
#-- Adding test for testhdf5
add_executable (testphdf5 ${testphdf5_SOURCES})
target_compile_options(testphdf5 PRIVATE "${HDF5_CMAKE_C_FLAGS}")
target_compile_definitions(testphdf5
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
)
target_compile_definitions(testphdf5 PRIVATE "${HDF5_TESTPAR_COMPILE_DEFS_PRIVATE}")
target_include_directories (testphdf5
PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>"
)
@ -54,10 +60,7 @@ endif ()
macro (ADD_H5P_EXE file)
add_executable (${file} ${HDF5_TEST_PAR_SOURCE_DIR}/${file}.c)
target_compile_options(${file} PRIVATE "${HDF5_CMAKE_C_FLAGS}")
target_compile_definitions(${file}
PRIVATE
"$<$<CONFIG:Developer>:${HDF5_DEVELOPER_DEFS}>"
)
target_compile_definitions(${file} PRIVATE "${HDF5_TESTPAR_COMPILE_DEFS_PRIVATE}")
target_include_directories (${file}
PRIVATE "${HDF5_SRC_INCLUDE_DIRS};${HDF5_SRC_BINARY_DIR};$<$<BOOL:${HDF5_ENABLE_PARALLEL}>:${MPI_C_INCLUDE_DIRS}>"
)

View File

@ -1867,6 +1867,11 @@ main(int argc, char **argv)
/* Set the bigio processing limit to be 'newsize' bytes */
hsize_t oldsize = H5_mpi_set_bigio_count(newsize);
hid_t acc_plist = H5I_INVALID_HID;
#ifdef H5_HAVE_TEST_API
int required = MPI_THREAD_MULTIPLE;
int provided;
#endif
int mpi_code;
/* Having set the bigio handling to a size that is manageable,
* we'll set our 'bigcount' variable to be 2X that limit so
@ -1876,9 +1881,37 @@ main(int argc, char **argv)
if (newsize != oldsize)
bigcount = newsize * 2;
MPI_Init(&argc, &argv);
MPI_Comm_size(MPI_COMM_WORLD, &mpi_size_g);
MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank_g);
#ifdef H5_HAVE_TEST_API
/* Attempt to initialize with MPI_THREAD_MULTIPLE if possible */
if (MPI_SUCCESS != (mpi_code = MPI_Init_thread(&argc, &argv, required, &provided))) {
printf("MPI_Init_thread failed with error code %d\n", mpi_code);
return -1;
}
#else
if (MPI_SUCCESS != (mpi_code = MPI_Init(&argc, &argv))) {
printf("MPI_Init failed with error code %d\n", mpi_code);
return -1;
}
#endif
if (MPI_SUCCESS != (mpi_code = MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank_g))) {
printf("MPI_Comm_rank failed with error code %d\n", mpi_code);
MPI_Finalize();
return -1;
}
#ifdef H5_HAVE_TEST_API
/* Warn about missing MPI_THREAD_MULTIPLE support */
if ((provided < required) && MAIN_PROCESS)
printf("** MPI doesn't support MPI_Init_thread with MPI_THREAD_MULTIPLE **\n");
#endif
if (MPI_SUCCESS != (mpi_code = MPI_Comm_size(MPI_COMM_WORLD, &mpi_size_g))) {
if (MAIN_PROCESS)
printf("MPI_Comm_size failed with error code %d\n", mpi_code);
MPI_Finalize();
return -1;
}
/* Attempt to turn off atexit post processing so that in case errors
* happen during the test and the process is aborted, it will not get

View File

@ -41,10 +41,43 @@ main(int argc, char **argv)
hsize_t stride[RANK];
hsize_t block[RANK];
DATATYPE *data_array = NULL; /* data buffer */
int mpi_code;
#ifdef H5_HAVE_TEST_API
int required = MPI_THREAD_MULTIPLE;
int provided;
#endif
MPI_Init(&argc, &argv);
MPI_Comm_size(comm, &mpi_size);
MPI_Comm_rank(comm, &mpi_rank);
#ifdef H5_HAVE_TEST_API
/* Attempt to initialize with MPI_THREAD_MULTIPLE if possible */
if (MPI_SUCCESS != (mpi_code = MPI_Init_thread(&argc, &argv, required, &provided))) {
printf("MPI_Init_thread failed with error code %d\n", mpi_code);
return -1;
}
#else
if (MPI_SUCCESS != (mpi_code = MPI_Init(&argc, &argv))) {
printf("MPI_Init failed with error code %d\n", mpi_code);
return -1;
}
#endif
if (MPI_SUCCESS != (mpi_code = MPI_Comm_rank(comm, &mpi_rank))) {
printf("MPI_Comm_rank failed with error code %d\n", mpi_code);
MPI_Finalize();
return -1;
}
#ifdef H5_HAVE_TEST_API
/* Warn about missing MPI_THREAD_MULTIPLE support */
if ((provided < required) && MAINPROCESS)
printf("** MPI doesn't support MPI_Init_thread with MPI_THREAD_MULTIPLE **\n");
#endif
if (MPI_SUCCESS != (mpi_code = MPI_Comm_size(comm, &mpi_size))) {
if (MAINPROCESS)
printf("MPI_Comm_size failed with error code %d\n", mpi_code);
MPI_Finalize();
return -1;
}
if (MAINPROCESS)
TESTING("proper shutdown of HDF5 library");

View File

@ -4271,6 +4271,11 @@ int
main(int argc, char **argv)
{
int mpi_size, mpi_rank; /* mpi variables */
int mpi_code;
#ifdef H5_HAVE_TEST_API
int required = MPI_THREAD_MULTIPLE;
int provided;
#endif
#ifndef H5_HAVE_WIN32_API
/* Un-buffer the stdout and stderr */
@ -4278,9 +4283,37 @@ main(int argc, char **argv)
HDsetbuf(stdout, NULL);
#endif
MPI_Init(&argc, &argv);
MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
#ifdef H5_HAVE_TEST_API
/* Attempt to initialize with MPI_THREAD_MULTIPLE if possible */
if (MPI_SUCCESS != (mpi_code = MPI_Init_thread(&argc, &argv, required, &provided))) {
printf("MPI_Init_thread failed with error code %d\n", mpi_code);
return -1;
}
#else
if (MPI_SUCCESS != (mpi_code = MPI_Init(&argc, &argv))) {
printf("MPI_Init failed with error code %d\n", mpi_code);
return -1;
}
#endif
if (MPI_SUCCESS != (mpi_code = MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank))) {
printf("MPI_Comm_rank failed with error code %d\n", mpi_code);
MPI_Finalize();
return -1;
}
#ifdef H5_HAVE_TEST_API
/* Warn about missing MPI_THREAD_MULTIPLE support */
if ((provided < required) && MAINPROCESS)
printf("** MPI doesn't support MPI_Init_thread with MPI_THREAD_MULTIPLE **\n");
#endif
if (MPI_SUCCESS != (mpi_code = MPI_Comm_size(MPI_COMM_WORLD, &mpi_size))) {
if (MAINPROCESS)
printf("MPI_Comm_size failed with error code %d\n", mpi_code);
MPI_Finalize();
return -1;
}
mpi_rank_framework_g = mpi_rank;

View File

@ -301,10 +301,15 @@ int
main(int argc, char **argv)
{
int mpi_size, mpi_rank; /* mpi variables */
int mpi_code;
H5Ptest_param_t ndsets_params, ngroups_params;
H5Ptest_param_t collngroups_params;
H5Ptest_param_t io_mode_confusion_params;
H5Ptest_param_t rr_obj_flush_confusion_params;
#ifdef H5_HAVE_TEST_API
int required = MPI_THREAD_MULTIPLE;
int provided;
#endif
#ifndef H5_HAVE_WIN32_API
/* Un-buffer the stdout and stderr */
@ -312,9 +317,37 @@ main(int argc, char **argv)
HDsetbuf(stdout, NULL);
#endif
MPI_Init(&argc, &argv);
MPI_Comm_size(MPI_COMM_WORLD, &mpi_size);
MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank);
#ifdef H5_HAVE_TEST_API
/* Attempt to initialize with MPI_THREAD_MULTIPLE if possible */
if (MPI_SUCCESS != (mpi_code = MPI_Init_thread(&argc, &argv, required, &provided))) {
printf("MPI_Init_thread failed with error code %d\n", mpi_code);
return -1;
}
#else
if (MPI_SUCCESS != (mpi_code = MPI_Init(&argc, &argv))) {
printf("MPI_Init failed with error code %d\n", mpi_code);
return -1;
}
#endif
if (MPI_SUCCESS != (mpi_code = MPI_Comm_rank(MPI_COMM_WORLD, &mpi_rank))) {
printf("MPI_Comm_rank failed with error code %d\n", mpi_code);
MPI_Finalize();
return -1;
}
#ifdef H5_HAVE_TEST_API
/* Warn about missing MPI_THREAD_MULTIPLE support */
if ((provided < required) && MAINPROCESS)
printf("** MPI doesn't support MPI_Init_thread with MPI_THREAD_MULTIPLE **\n");
#endif
if (MPI_SUCCESS != (mpi_code = MPI_Comm_size(MPI_COMM_WORLD, &mpi_size))) {
if (MAINPROCESS)
printf("MPI_Comm_size failed with error code %d\n", mpi_code);
MPI_Finalize();
return -1;
}
mpi_rank_framework_g = mpi_rank;