Convert develop to v2.0.0 (#5006)

Switches previous 1.16/17/18 values to 2.0
This commit is contained in:
Allen Byrne 2024-10-27 23:51:07 -05:00 committed by GitHub
parent b8a06b51f1
commit 5425a571e0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
72 changed files with 288 additions and 367 deletions

View File

@ -51,7 +51,7 @@ jobs:
- name: Get published binary (Linux)
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: tgz-ubuntu-2204_gcc-binary
name: tgz-ubuntu-2404_gcc-binary
path: ${{ github.workspace }}
- name: List files for the space (Linux)
@ -59,7 +59,7 @@ jobs:
ls -l ${{ github.workspace }}
- name: Uncompress gh binary (Linux)
run: tar -zxvf ${{ github.workspace }}/${{ inputs.file_base }}-ubuntu-2204_gcc.tar.gz
run: tar -zxvf ${{ github.workspace }}/${{ inputs.file_base }}-ubuntu-2404_gcc.tar.gz
- name: Uncompress hdf5 binary (Linux)
run: |
@ -84,8 +84,8 @@ jobs:
run: |
mkdir "${{ github.workspace }}/hdf5R"
cd "${{ github.workspace }}/hdf5R"
wget -q https://github.com/HDFGroup/hdf5/releases/download/hdf5_${{ inputs.file_ref }}/hdf5-${{ steps.convert-hdf5lib-refname.outputs.HDF5R_DOTS }}-ubuntu-2204_gcc.tar.gz
tar zxf hdf5-${{ steps.convert-hdf5lib-refname.outputs.HDF5R_DOTS }}-ubuntu-2204_gcc.tar.gz
wget -q https://github.com/HDFGroup/hdf5/releases/download/hdf5_${{ inputs.file_ref }}/hdf5-${{ steps.convert-hdf5lib-refname.outputs.HDF5R_DOTS }}-ubuntu-2404_gcc.tar.gz
tar zxf hdf5-${{ steps.convert-hdf5lib-refname.outputs.HDF5R_DOTS }}-ubuntu-2404_gcc.tar.gz
- name: List files for the space (Linux)
run: |

View File

@ -109,7 +109,7 @@ jobs:
- name: Get published binary (Linux)
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: tgz-ubuntu-2204_gcc-${{ inputs.build_mode }}-binary
name: tgz-ubuntu-2404_gcc-${{ inputs.build_mode }}-binary
path: ${{ github.workspace }}
- name: Uncompress hdf5 binary (Linux)

View File

@ -266,21 +266,21 @@ jobs:
cp ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-GNUC/README.md ${{ runner.workspace }}/build/hdf5
cp ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-GNUC/*.tar.gz ${{ runner.workspace }}/build/hdf5
cd "${{ runner.workspace }}/build"
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz hdf5
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.tar.gz hdf5
shell: bash
- name: Publish deb binary (Linux)
id: publish-ctest-deb-binary
run: |
mkdir "${{ runner.workspace }}/builddeb"
cp ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-GNUC/*.deb ${{ runner.workspace }}/builddeb/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb
cp ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-GNUC/*.deb ${{ runner.workspace }}/builddeb/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.deb
shell: bash
- name: Publish rpm binary (Linux)
id: publish-ctest-rpm-binary
run: |
mkdir "${{ runner.workspace }}/buildrpm"
cp ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-GNUC/*.rpm ${{ runner.workspace }}/buildrpm/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm
cp ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-GNUC/*.rpm ${{ runner.workspace }}/buildrpm/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.rpm
shell: bash
- name: List files in the space (Linux)
@ -292,22 +292,22 @@ jobs:
- name: Save published binary (Linux)
uses: actions/upload-artifact@v4
with:
name: tgz-ubuntu-2204_gcc-binary
path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz
name: tgz-ubuntu-2404_gcc-binary
path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.tar.gz
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
- name: Save published binary deb (Linux)
uses: actions/upload-artifact@v4
with:
name: deb-ubuntu-2204_gcc-binary
path: ${{ runner.workspace }}/builddeb/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb
name: deb-ubuntu-2404_gcc-binary
path: ${{ runner.workspace }}/builddeb/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.deb
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
- name: Save published binary rpm (Linux)
uses: actions/upload-artifact@v4
with:
name: rpm-ubuntu-2204_gcc-binary
path: ${{ runner.workspace }}/buildrpm/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm
name: rpm-ubuntu-2404_gcc-binary
path: ${{ runner.workspace }}/buildrpm/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.rpm
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
# Save doxygen files created by ctest script
@ -589,7 +589,7 @@ jobs:
cp ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-GNUC-S3/README.md ${{ runner.workspace }}/build/hdf5
cp ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-GNUC-S3/*.tar.gz ${{ runner.workspace }}/build/hdf5
cd "${{ runner.workspace }}/build"
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc_s3.tar.gz hdf5
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc_s3.tar.gz hdf5
shell: bash
- name: List files in the space (Linux S3)
@ -601,8 +601,8 @@ jobs:
- name: Save published binary (Linux S3)
uses: actions/upload-artifact@v4
with:
name: tgz-ubuntu-2204_gcc_s3-binary
path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc_s3.tar.gz
name: tgz-ubuntu-2404_gcc_s3-binary
path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc_s3.tar.gz
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
####### intel builds
@ -812,7 +812,7 @@ jobs:
cp ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-Intel/README.md ${{ runner.workspace }}/build/hdf5
cp ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-Intel/*.tar.gz ${{ runner.workspace }}/build/hdf5
cd "${{ runner.workspace }}/build"
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_intel.tar.gz hdf5
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2404_intel.tar.gz hdf5
shell: bash
- name: List files in the space (Linux_intel)
@ -824,6 +824,6 @@ jobs:
- name: Save published binary (Linux_intel)
uses: actions/upload-artifact@v4
with:
name: tgz-ubuntu-2204_intel-binary
path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_intel.tar.gz
name: tgz-ubuntu-2404_intel-binary
path: ${{ runner.workspace }}/build/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2404_intel.tar.gz
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`

View File

@ -24,8 +24,8 @@ jobs:
sed -i 's/hdf5@1.10.6:1.14/hdf5@1.10.6:/g' \
./spack/var/spack/repos/builtin/packages/py-h5py/package.py
. ./spack/share/spack/setup-env.sh
./spack/bin/spack spec py-h5py@master+mpi ^hdf5@develop-1.17
./spack/bin/spack install py-h5py@master+mpi ^hdf5@develop-1.17
./spack/bin/spack spec py-h5py@master+mpi ^hdf5@develop-2.0
./spack/bin/spack install py-h5py@master+mpi ^hdf5@develop-2.0
./spack/bin/spack install py-pytest
./spack/bin/spack install py-ipython
./spack/bin/spack install py-pytest-mpi

View File

@ -293,20 +293,20 @@ jobs:
run: ctest . --parallel 2 -C Debug -V
working-directory: ${{ runner.workspace }}/build
build_v1_16:
name: "gcc DBG v1.16 default API"
build_v2_0:
name: "gcc DBG v2.0.0 default API"
runs-on: ubuntu-latest
steps:
# SETUP
- name: Install Linux Dependencies
run: |
sudo apt-get update
sudo apt-get install ninja-build doxygen graphviz
sudo apt install libssl3 libssl-dev libcurl4 libcurl4-openssl-dev
sudo apt install gcc-12 g++-12 gfortran-12
echo "CC=gcc-12" >> $GITHUB_ENV
echo "CXX=g++-12" >> $GITHUB_ENV
echo "FC=gfortran-12" >> $GITHUB_ENV
sudo apt-get update
sudo apt-get install ninja-build doxygen graphviz
sudo apt install libssl3 libssl-dev libcurl4 libcurl4-openssl-dev
sudo apt install gcc-12 g++-12 gfortran-12
echo "CC=gcc-12" >> $GITHUB_ENV
echo "CXX=g++-12" >> $GITHUB_ENV
echo "FC=gfortran-12" >> $GITHUB_ENV
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- name: Get Sources
@ -333,7 +333,7 @@ jobs:
-DHDF5_ENABLE_DIRECT_VFD:BOOL=ON \
-DHDF5_ENABLE_ROS3_VFD:BOOL=ON \
-DH5_NO_DEPRECATED_SYMBOLS:BOOL=OFF \
-DHDF5_DEFAULT_API_VERSION:STRING=v116 \
-DHDF5_DEFAULT_API_VERSION:STRING=v200 \
$GITHUB_WORKSPACE
shell: bash
@ -354,13 +354,13 @@ jobs:
# SETUP
- name: Install Linux Dependencies
run: |
sudo apt-get update
sudo apt-get install ninja-build doxygen graphviz
sudo apt install libssl3 libssl-dev libcurl4 libcurl4-openssl-dev
sudo apt install gcc-12 g++-12 gfortran-12
echo "CC=gcc-12" >> $GITHUB_ENV
echo "CXX=g++-12" >> $GITHUB_ENV
echo "FC=gfortran-12" >> $GITHUB_ENV
sudo apt-get update
sudo apt-get install ninja-build doxygen graphviz
sudo apt install libssl3 libssl-dev libcurl4 libcurl4-openssl-dev
sudo apt install gcc-12 g++-12 gfortran-12
echo "CC=gcc-12" >> $GITHUB_ENV
echo "CXX=g++-12" >> $GITHUB_ENV
echo "FC=gfortran-12" >> $GITHUB_ENV
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- name: Get Sources

View File

@ -259,7 +259,7 @@ jobs:
- name: Save published binary (linux)
uses: actions/upload-artifact@v4
with:
name: tgz-ubuntu-2204_gcc-${{ inputs.build_mode }}-binary
name: tgz-ubuntu-2404_gcc-${{ inputs.build_mode }}-binary
path: ${{ runner.workspace }}/build/HDF5-*-Linux.tar.gz
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
if: ${{ (matrix.os == 'ubuntu-latest') && (inputs.thread_safety != 'TS') && ( inputs.build_mode != 'Debug') }}

View File

@ -121,25 +121,25 @@ jobs:
- name: Get published binary (Linux)
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: tgz-ubuntu-2204_gcc-binary
name: tgz-ubuntu-2404_gcc-binary
path: ${{ github.workspace }}
- name: Get published deb binary (Linux)
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: deb-ubuntu-2204_gcc-binary
name: deb-ubuntu-2404_gcc-binary
path: ${{ github.workspace }}
- name: Get published rpm binary (Linux)
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: rpm-ubuntu-2204_gcc-binary
name: rpm-ubuntu-2404_gcc-binary
path: ${{ github.workspace }}
- name: Get published binary (Linux S3)
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: tgz-ubuntu-2204_gcc_s3-binary
name: tgz-ubuntu-2404_gcc_s3-binary
path: ${{ github.workspace }}
- name: Get published binary (Windows_intel)
@ -157,7 +157,7 @@ jobs:
- name: Get published binary (Linux_intel)
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
with:
name: tgz-ubuntu-2204_intel-binary
name: tgz-ubuntu-2404_intel-binary
path: ${{ github.workspace }}
- name: Get published abi reports (Linux)
@ -187,13 +187,13 @@ jobs:
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}.zip >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.dmg >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc_s3.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.deb >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.rpm >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc_s3.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_intel.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_intel.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.zip >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}.html.abi.reports.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
@ -233,13 +233,13 @@ jobs:
${{ steps.get-file-base.outputs.FILE_BASE }}.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.dmg
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc_s3.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.deb
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.rpm
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc_s3.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_intel.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_intel.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi
${{ steps.get-file-base.outputs.FILE_BASE }}.html.abi.reports.tar.gz
@ -262,13 +262,13 @@ jobs:
hdf5.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.dmg
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc_s3.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.deb
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.rpm
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc_s3.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_intel.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_intel.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi
${{ steps.get-file-base.outputs.FILE_BASE }}.html.abi.reports.tar.gz

View File

@ -52,12 +52,12 @@ jobs:
${{ steps.get-file-base.outputs.FILE_BASE }}.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.dmg
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc_s3.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.deb
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc.rpm
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_gcc_s3.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_intel.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2404_intel.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi

View File

@ -858,8 +858,8 @@ if (HDF5_ENABLE_SUBFILING_VFD)
endif()
set (HDF5_DEFAULT_API_VERSION "v118" CACHE STRING "Enable v1.16 API (v16, v18, v110, v112, v114, v116, v118)")
set_property (CACHE HDF5_DEFAULT_API_VERSION PROPERTY STRINGS v16 v18 v110 v112 v114 v116 v118)
set (HDF5_DEFAULT_API_VERSION "v200" CACHE STRING "Enable v2.0 API (v16, v18, v110, v112, v114, v200)")
set_property (CACHE HDF5_DEFAULT_API_VERSION PROPERTY STRINGS v16 v18 v110 v112 v114 v200)
#-----------------------------------------------------------------------------
# Option to use 1.6.x API
#-----------------------------------------------------------------------------
@ -901,22 +901,14 @@ if (HDF5_DEFAULT_API_VERSION MATCHES "v114")
endif ()
#-----------------------------------------------------------------------------
# Option to use 1.16.x API
#-----------------------------------------------------------------------------
set (H5_USE_116_API_DEFAULT 0)
if (HDF5_DEFAULT_API_VERSION MATCHES "v116")
set (H5_USE_116_API_DEFAULT 1)
endif ()
#-----------------------------------------------------------------------------
# Option to use 1.18.x API
# Option to use 2.x.y API
#-----------------------------------------------------------------------------
set (H5_USE_200_API_DEFAULT 0)
if (NOT HDF5_DEFAULT_API_VERSION)
set (HDF5_DEFAULT_API_VERSION "v118")
set (HDF5_DEFAULT_API_VERSION "v200")
endif ()
set (H5_USE_118_API_DEFAULT 0)
if (HDF5_DEFAULT_API_VERSION MATCHES "v118")
set (H5_USE_118_API_DEFAULT 1)
if (DEFAULT_API_VERSION MATCHES "v200")
set (H5_USE_200_API_DEFAULT 1)
endif ()
#-----------------------------------------------------------------------------

View File

@ -19,7 +19,7 @@ foreach (example_name ${common_examples})
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})
@ -45,7 +45,7 @@ if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.8")
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})
@ -72,7 +72,7 @@ endif ()
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
# )
# if (H5_HAVE_PARALLEL)
# target_include_directories (${EXAMPLE_VARNAME}_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})
@ -112,14 +112,14 @@ endif ()
# endif ()
# endforeach ()
#endif ()
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.16")
# foreach (example_name ${1_16_examples})
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "2.0")
# foreach (example_name ${2_0_examples})
# if (H5EX_BUILD_TESTING)
# add_custom_command (
# TARGET ${EXAMPLE_VARNAME}_${example_name}
# POST_BUILD
# COMMAND ${CMAKE_COMMAND}
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/116/${example_name}.tst ${PROJECT_BINARY_DIR}/${example_name}.tst
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/200/${example_name}.tst ${PROJECT_BINARY_DIR}/${example_name}.tst
# )
# endif ()
# endforeach ()
@ -192,12 +192,12 @@ if (HDF5_BUILD_TOOLS)
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/114/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# )
#endif ()
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.16")
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "2.0")
# add_custom_command (
# TARGET ${EXAMPLE_VARNAME}_${example_name}
# POST_BUILD
# COMMAND ${CMAKE_COMMAND}
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/116/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/200/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# )
#endif ()
endif ()
@ -209,7 +209,7 @@ if (HDF5_BUILD_TOOLS)
# endforeach ()
# foreach (example_name ${1_14_examples})
# endforeach ()
# foreach (example_name ${1_16_examples})
# foreach (example_name ${2_0_examples})
# endforeach ()
endif ()

View File

@ -165,7 +165,7 @@ foreach (example ${dyn_examples})
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_${example} PUBLIC ${MPI_C_INCLUDE_DIRS})

View File

@ -19,7 +19,7 @@ foreach (example_name ${common_examples})
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})
@ -47,7 +47,7 @@ if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.8" AND NOT ${EXAMPLE_VARNAME}_US
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})
@ -74,7 +74,7 @@ endif ()
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
# )
# if (H5_HAVE_PARALLEL)
# target_include_directories (${EXAMPLE_VARNAME}_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})
@ -114,14 +114,14 @@ endif ()
# endif ()
# endforeach ()
#endif ()
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.16")
# foreach (example_name ${1_16_examples})
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "2.0")
# foreach (example_name ${2_0_examples})
# if (H5EX_BUILD_TESTING)
# add_custom_command (
# TARGET ${EXAMPLE_VARNAME}_${example_name}
# POST_BUILD
# COMMAND ${CMAKE_COMMAND}
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/116/${example_name}.tst ${PROJECT_BINARY_DIR}/${example_name}.tst
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/200/${example_name}.tst ${PROJECT_BINARY_DIR}/${example_name}.tst
# )
# endif ()
# endforeach ()
@ -187,12 +187,12 @@ if (HDF5_BUILD_TOOLS)
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/114/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# )
#endif ()
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.16")
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "2.0")
# add_custom_command (
# TARGET ${EXAMPLE_VARNAME}_${example_name}
# POST_BUILD
# COMMAND ${CMAKE_COMMAND}
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/116/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/200/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# )
#endif ()
# endforeach ()
@ -203,7 +203,7 @@ if (HDF5_BUILD_TOOLS)
# endforeach ()
# foreach (example_name ${1_14_examples})
# endforeach ()
# foreach (example_name ${1_16_examples})
# foreach (example_name ${2_0_examples})
# endforeach ()
endif ()

View File

@ -15,7 +15,7 @@ foreach (example_name ${examples})
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
target_include_directories (${EXAMPLE_VARNAME}_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})
target_link_libraries (${EXAMPLE_VARNAME}_${example_name} ${H5EX_HDF5_LINK_LIBS})

View File

@ -19,7 +19,7 @@ foreach (example_name ${common_examples})
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})
@ -45,7 +45,7 @@ endforeach ()
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
# )
# if (H5_HAVE_PARALLEL)
# target_include_directories (${EXAMPLE_VARNAME}_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})
@ -97,14 +97,14 @@ endforeach ()
# endif ()
# endforeach ()
#endif ()
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.16")
# foreach (example_name ${1_16_examples})
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "2.0")
# foreach (example_name ${2_0_examples})
# if (H5EX_BUILD_TESTING)
# add_custom_command (
# TARGET ${EXAMPLE_VARNAME}_${example_name}
# POST_BUILD
# COMMAND ${CMAKE_COMMAND}
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/116/${example_name}.tst ${PROJECT_BINARY_DIR}/${example_name}.tst
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/200/${example_name}.tst ${PROJECT_BINARY_DIR}/${example_name}.tst
# )
# endif ()
# endforeach ()
@ -321,12 +321,12 @@ if (HDF5_BUILD_TOOLS)
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/114/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# )
#endif ()
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.16")
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "2.0")
# add_custom_command (
# TARGET ${EXAMPLE_VARNAME}_${example_name}
# POST_BUILD
# COMMAND ${CMAKE_COMMAND}
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/116/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/200/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# )
#endif ()
# endforeach ()
@ -336,7 +336,7 @@ if (HDF5_BUILD_TOOLS)
# endforeach ()
# foreach (example_name ${1_14_examples})
# endforeach ()
# foreach (example_name ${1_16_examples})
# foreach (example_name ${2_0_examples})
# endforeach ()
endif ()

View File

@ -19,7 +19,7 @@ if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.10")
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})
@ -60,14 +60,14 @@ endif ()
# endif ()
# endforeach ()
#endif ()
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.16")
# foreach (example_name ${1_16_examples})
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "2.0")
# foreach (example_name ${2_0_examples})
# if (H5EX_BUILD_TESTING)
# add_custom_command (
# TARGET ${EXAMPLE_VARNAME}_${example_name}
# POST_BUILD
# COMMAND ${CMAKE_COMMAND}
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/116/${example_name}.tst ${PROJECT_BINARY_DIR}/${example_name}.tst
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/200/${example_name}.tst ${PROJECT_BINARY_DIR}/${example_name}.tst
# )
# endif ()
# endforeach ()
@ -102,12 +102,12 @@ if (HDF5_BUILD_TOOLS)
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/114/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# )
#endif ()
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.16")
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "2.0")
# add_custom_command (
# TARGET ${EXAMPLE_VARNAME}_${example_name}
# POST_BUILD
# COMMAND ${CMAKE_COMMAND}
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/116/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/200/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# )
#endif ()
endforeach ()
@ -116,7 +116,7 @@ if (HDF5_BUILD_TOOLS)
# endforeach ()
# foreach (example_name ${1_14_examples})
# endforeach ()
# foreach (example_name ${1_16_examples})
# foreach (example_name ${2_0_examples})
# endforeach ()
endif ()

View File

@ -16,7 +16,7 @@ foreach (example ${examples})
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})

View File

@ -15,7 +15,7 @@ foreach (example_name ${examples})
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_tutr_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})

View File

@ -16,7 +16,7 @@ if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.8")
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_cpp_ex_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})

View File

@ -16,7 +16,7 @@ if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.8")
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_cpp_ex_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})

View File

@ -28,7 +28,7 @@ foreach (example_name ${common_examples})
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_f90_${example_name} PUBLIC ${MPI_Fortran_INCLUDE_DIRS})
@ -73,7 +73,7 @@ if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.10")
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_f90_${example_name} PUBLIC ${MPI_Fortran_INCLUDE_DIRS})
@ -116,14 +116,14 @@ endif ()
# endif ()
# endforeach ()
#endif ()
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.16")
# foreach (example_name ${1_16_examples})
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "2.0")
# foreach (example_name ${2_0_examples})
# if (H5EX_BUILD_TESTING)
# add_custom_command (
# TARGET ${EXAMPLE_VARNAME}_f90_${example_name}
# POST_BUILD
# COMMAND ${CMAKE_COMMAND}
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/116/${example_name}.tst ${PROJECT_BINARY_DIR}/${example_name}.tst
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/200/${example_name}.tst ${PROJECT_BINARY_DIR}/${example_name}.tst
# )
# endif ()
# endforeach ()
@ -216,12 +216,12 @@ if (HDF5_BUILD_TOOLS)
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/114/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# )
#endif ()
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.16")
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "2.0")
# add_custom_command (
# TARGET ${EXAMPLE_VARNAME}_f90_${example_name}
# POST_BUILD
# COMMAND ${CMAKE_COMMAND}
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/116/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/200/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# )
#endif ()
endif ()
@ -231,7 +231,7 @@ if (HDF5_BUILD_TOOLS)
# endforeach ()
# foreach (example_name ${1_14_examples})
# endforeach ()
# foreach (example_name ${1_16_examples})
# foreach (example_name ${2_0_examples})
# endforeach ()
endif ()
endif ()

View File

@ -28,7 +28,7 @@ foreach (example_name ${common_examples})
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_f90_${example_name} PUBLIC ${MPI_Fortran_INCLUDE_DIRS})
@ -57,7 +57,7 @@ endforeach ()
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
# "$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
# )
# if (H5_HAVE_PARALLEL)
# target_include_directories (${EXAMPLE_VARNAME}_f90_${example_name} PUBLIC ${MPI_C_INCLUDE_DIRS})
@ -110,14 +110,14 @@ endforeach ()
# endif ()
# endforeach ()
#endif ()
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.16")
# foreach (example_name ${1_16_examples})
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "2.0")
# foreach (example_name ${2_0_examples})
# if (H5EX_BUILD_TESTING)
# add_custom_command (
# TARGET ${EXAMPLE_VARNAME}_f90_${example_name}
# POST_BUILD
# COMMAND ${CMAKE_COMMAND}
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/116/${example_name}.tst ${PROJECT_BINARY_DIR}/${example_name}.tst
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/200/${example_name}.tst ${PROJECT_BINARY_DIR}/${example_name}.tst
# )
# endif ()
# endforeach ()
@ -165,7 +165,7 @@ endif ()
# endforeach ()
# foreach (example_name ${1_14_examples})
# endforeach ()
# foreach (example_name ${1_16_examples})
# foreach (example_name ${2_0_examples})
# endforeach ()
#endif ()

View File

@ -28,7 +28,7 @@ foreach (example_name ${examples})
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
target_include_directories (${EXAMPLE_VARNAME}_f90_${example_name} PUBLIC ${MPI_Fortran_INCLUDE_DIRS})
target_link_libraries (${EXAMPLE_VARNAME}_f90_${example_name} ${H5EX_LINK_Fortran_LIBS})

View File

@ -23,7 +23,7 @@ include (Fortran_sourcefiles.cmake)
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_f90_${example_name} PUBLIC ${MPI_Fortran_INCLUDE_DIRS})
@ -50,7 +50,7 @@ foreach (example_name ${common_examples})
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_f90_${example_name} PUBLIC ${MPI_Fortran_INCLUDE_DIRS})
@ -103,14 +103,14 @@ endforeach ()
# endif ()
# endforeach ()
#endif ()
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.16")
# foreach (example_name ${1_16_examples})
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "2.0")
# foreach (example_name ${2_0_examples})
# if (H5EX_BUILD_TESTING)
# add_custom_command (
# TARGET ${EXAMPLE_VARNAME}_f90_${example_name}
# POST_BUILD
# COMMAND ${CMAKE_COMMAND}
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/116/${example_name}.tst ${PROJECT_BINARY_DIR}/${example_name}.tst
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/200/${example_name}.tst ${PROJECT_BINARY_DIR}/${example_name}.tst
# )
# endif ()
# endforeach ()
@ -280,12 +280,12 @@ if (HDF5_BUILD_TOOLS)
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/114/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# )
#endif ()
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "1.16")
#if (HDF5_VERSION_MAJOR VERSION_GREATER_EQUAL "2.0")
# add_custom_command (
# TARGET ${EXAMPLE_VARNAME}_f90_${example_name}
# POST_BUILD
# COMMAND ${CMAKE_COMMAND}
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/116/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# ARGS -E copy_if_different ${PROJECT_SOURCE_DIR}/tfiles/200/${example_name}.ddl ${PROJECT_BINARY_DIR}/${example_name}.ddl
# )
#endif ()
# endforeach ()
@ -293,7 +293,7 @@ if (HDF5_BUILD_TOOLS)
# endforeach ()
# foreach (example_name ${1_14_examples})
# endforeach ()
# foreach (example_name ${1_16_examples})
# foreach (example_name ${2_0_examples})
# endforeach ()
endif ()

View File

@ -23,7 +23,7 @@ include (Fortran_sourcefiles.cmake)
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_f90_tutr_${example_name} PUBLIC ${MPI_Fortran_INCLUDE_DIRS})
@ -42,7 +42,7 @@ foreach (example_name ${common_examples})
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_110_API}>:-DH5_USE_110_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_112_API}>:-DH5_USE_112_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_114_API}>:-DH5_USE_114_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_116_API}>:-DH5_USE_116_API>"
"$<$<BOOL:${${EXAMPLE_VARNAME}_USE_200_API}>:-DH5_USE_200_API>"
)
if (H5_HAVE_PARALLEL)
target_include_directories (${EXAMPLE_VARNAME}_f90_tutr_${example_name} PUBLIC ${MPI_Fortran_INCLUDE_DIRS})

View File

@ -356,10 +356,10 @@ macro (APIVersion version xyapi)
endif ()
#-----------------------------------------------------------------------------
# Option to use 1.16.x API
# Option to use 2.0.x API
#-----------------------------------------------------------------------------
option (${EXAMPLE_VARNAME}_USE_116_API "Use the HDF5 1.16.x API" OFF)
if (${EXAMPLE_VARNAME}_USE_116_API AND ${xyapi} GREATER 116)
set (${xyapi} "116")
option (${EXAMPLE_VARNAME}_USE_200_API "Use the HDF5 2.x.y API" OFF)
if (${EXAMPLE_VARNAME}_USE_200_API AND ${xyapi} GREATER 200)
set (${xyapi} "200")
endif ()
endmacro ()

View File

@ -8,7 +8,7 @@ use warnings;
# is added (like support for 1.4, etc), the min_sup_idx parameter will
# need to be decremented.)
# Max. library "index" (0 = v1.0, 1 = 1.2, 2 = 1.4, 3 = 1.6, 4 = 1.8, 5 = 1.10, 6 = 1.12, 7 = 1.14, 8 = 1.16, etc)
# Max. library "index" (0 = v1.0, 1 = 1.2, 2 = 1.4, 3 = 1.6, 4 = 1.8, 5 = 1.10, 6 = 1.12, 7 = 1.14, 8 = 2.0, etc)
$max_idx = 8;
# Min. supported previous library version "index" (0 = v1.0, 1 = 1.2, etc)

View File

@ -649,7 +649,7 @@ DSetCreatPropList::setFletcher32() const
/// which is a 32-bit signed long value on Windows, which limited
/// the valid offset that can be set to 2 GiB.
///
///\version 1.16.0 \p offset parameter type changed to HDoff_t from off_t.
///\version 2.0.0 \p offset parameter type changed to HDoff_t from off_t.
//--------------------------------------------------------------------------
void
DSetCreatPropList::setExternal(const char *name, HDoff_t offset, hsize_t size) const
@ -702,7 +702,7 @@ DSetCreatPropList::getExternalCount() const
/// which is a 32-bit signed long value on Windows, which limited
/// the valid offset that can be returned to 2 GiB.
///
///\version 1.16.0 \p offset parameter type changed to HDoff_t from off_t.
///\version 2.0.0 \p offset parameter type changed to HDoff_t from off_t.
//--------------------------------------------------------------------------
void
DSetCreatPropList::getExternal(unsigned idx, size_t name_size, char *name, HDoff_t &offset,

View File

@ -713,8 +713,7 @@ FileAccPropList::getFileLocking(hbool_t &use_file_locking, hbool_t &ignore_when_
/// \li \c H5F_LIBVER_110
/// \li \c H5F_LIBVER_112
/// \li \c H5F_LIBVER_114
/// \li \c H5F_LIBVER_116
/// \li \c H5F_LIBVER_118
/// \li \c H5F_LIBVER_200
/// \li \c H5F_LIBVER_LATEST
///
/// Valid values of \a libver_high are as follows:
@ -722,8 +721,7 @@ FileAccPropList::getFileLocking(hbool_t &use_file_locking, hbool_t &ignore_when_
/// \li \c H5F_LIBVER_110
/// \li \c H5F_LIBVER_112
/// \li \c H5F_LIBVER_114
/// \li \c H5F_LIBVER_116
/// \li \c H5F_LIBVER_118
/// \li \c H5F_LIBVER_200
/// \li \c H5F_LIBVER_LATEST (Default)
///
/// For more detail, please refer to the H5Pset_libver_bounds API in
@ -755,8 +753,7 @@ FileAccPropList::setLibverBounds(H5F_libver_t libver_low, H5F_libver_t libver_hi
/// \li \c H5F_LIBVER_110
/// \li \c H5F_LIBVER_112
/// \li \c H5F_LIBVER_114
/// \li \c H5F_LIBVER_116
/// \li \c H5F_LIBVER_118
/// \li \c H5F_LIBVER_200
/// \li \c H5F_LIBVER_LATEST
///
/// and \a libver_high:
@ -764,8 +761,7 @@ FileAccPropList::setLibverBounds(H5F_libver_t libver_low, H5F_libver_t libver_hi
/// \li \c H5F_LIBVER_110
/// \li \c H5F_LIBVER_112
/// \li \c H5F_LIBVER_114
/// \li \c H5F_LIBVER_116
/// \li \c H5F_LIBVER_118
/// \li \c H5F_LIBVER_200
/// \li \c H5F_LIBVER_LATEST
//--------------------------------------------------------------------------
void

View File

@ -607,11 +607,8 @@
/* Define using v1.14 public API symbols by default */
#cmakedefine H5_USE_114_API_DEFAULT @H5_USE_114_API_DEFAULT@
/* Define using v1.16 public API symbols by default */
#cmakedefine H5_USE_116_API_DEFAULT @H5_USE_116_API_DEFAULT@
/* Define using v1.18 public API symbols by default */
#cmakedefine H5_USE_118_API_DEFAULT @H5_USE_118_API_DEFAULT@
/* Define using v2.0 public API symbols by default */
#cmakedefine H5_USE_200_API_DEFAULT @H5_USE_200_API_DEFAULT@
/* Define if the library will use file locking */
#cmakedefine H5_USE_FILE_LOCKING @H5_USE_FILE_LOCKING@

View File

@ -36,10 +36,8 @@ elseif (HDF5_DEFAULT_API_VERSION MATCHES "v112")
set (H5_USE_112_API ON)
elseif (HDF5_DEFAULT_API_VERSION MATCHES "v114")
set (H5_USE_114_API ON)
elseif (HDF5_DEFAULT_API_VERSION MATCHES "v116")
set (H5_USE_116_API ON)
elseif (HDF5_DEFAULT_API_VERSION MATCHES "v118")
set (H5_USE_118_API ON)
elseif (HDF5_DEFAULT_API_VERSION MATCHES "v200")
set (H5_USE_200_API ON)
endif ()
message (STATUS "HDF5 H5_LIBVER_DIR: ${H5_LIBVER_DIR} HDF5_API_VERSION: ${HDF5_DEFAULT_API_VERSION}")

View File

@ -38,15 +38,15 @@ cmake_minimum_required (VERSION 3.18)
# CTEST_SOURCE_NAME - source folder
##############################################################################
set (CTEST_SOURCE_VERSION "1.17.0")
set (CTEST_SOURCE_VERSION "2.0.0")
set (CTEST_SOURCE_VERSEXT "")
##############################################################################
# handle input parameters to script.
#BUILD_GENERATOR - which CMake generator to use, required
#INSTALLDIR - HDF5-1.17.x root folder
#INSTALLDIR - HDF5-2.0.x root folder
#CTEST_CONFIGURATION_TYPE - Release, Debug, RelWithDebInfo
#CTEST_SOURCE_NAME - name of source folder; HDF5-1.17.x
#CTEST_SOURCE_NAME - name of source folder; HDF5-2.0.x
#MODEL - CDash group name
#HPC - run alternate configurations for HPC machines; sbatch, bsub, raybsub, qsub
#MPI - enable MPI

View File

@ -22,7 +22,7 @@ AC_PREREQ([2.71])
## NOTE: Do not forget to change the version number here when we do a
## release!!!
##
AC_INIT([HDF5], [1.17.0], [help@hdfgroup.org])
AC_INIT([HDF5], [2.0.0], [help@hdfgroup.org])
AC_CONFIG_SRCDIR([src/H5.c])
AC_CONFIG_HEADERS([src/H5config.h])
@ -4213,10 +4213,10 @@ esac
AC_SUBST([DEFAULT_API_VERSION])
AC_MSG_CHECKING([which version of public symbols to use by default])
AC_ARG_WITH([default-api-version],
[AS_HELP_STRING([--with-default-api-version=(default|v16|v18|v110|v112|v114|v116|v118)],
[AS_HELP_STRING([--with-default-api-version=(default|v16|v18|v110|v112|v114|v200)],
[Specify default release version of public symbols
[default=v118]])],,
[withval=v118])
[default=v200]])],,
[withval=v200])
## Allowing "default" allows the GitHub CI to check that we didn't forget
## to change the defaults when creating a new major version
@ -4245,16 +4245,11 @@ elif test "X$withval" = "Xv114"; then
DEFAULT_API_VERSION=v114
AC_DEFINE([USE_114_API_DEFAULT], [1],
[Define using v1.14 public API symbols by default])
elif test "X$withval" = "Xv116"; then
AC_MSG_RESULT([v116])
DEFAULT_API_VERSION=v116
AC_DEFINE([USE_116_API_DEFAULT], [1],
[Define using v1.16 public API symbols by default])
elif test "X$withval" = "Xv118" -o "X$withval" = "Xdefault"; then
AC_MSG_RESULT([v118])
DEFAULT_API_VERSION=v118
AC_DEFINE([USE_118_API_DEFAULT], [1],
[Define using v1.18 public API symbols by default])
elif test "X$withval" = "Xv200" -o "X$withval" = "Xdefault"; then
AC_MSG_RESULT([v200])
DEFAULT_API_VERSION=v200
AC_DEFINE([USE_200_API_DEFAULT], [1],
[Define using v2.0 public API symbols by default])
else
AC_MSG_ERROR([invalid version of public symbols given])
fi
@ -4264,7 +4259,7 @@ fi
## if the user insists on doing this via the --enable-unsupported configure
## flag, we'll let them.
if test "X${ALLOW_UNSUPPORTED}" != "Xyes"; then
if test "X${DEFAULT_API_VERSION}" != "Xv118" -a "X${DEPRECATED_SYMBOLS}" = "Xno" ; then
if test "X${DEFAULT_API_VERSION}" != "Xv200" -a "X${DEPRECATED_SYMBOLS}" = "Xno" ; then
AC_MSG_ERROR([Removing old public API symbols not allowed when using them as default public API symbols. Use --enable-unsupported to override this error.])
fi
fi

View File

@ -539,8 +539,7 @@ h5init_flags_c(int_f *h5d_flags, size_t_f *h5d_size_flags, int_f *h5e_flags, hid
h5f_flags[27] = (int_f)H5F_LIBVER_V110;
h5f_flags[28] = (int_f)H5F_LIBVER_V112;
h5f_flags[29] = (int_f)H5F_LIBVER_V114;
h5f_flags[30] = (int_f)H5F_LIBVER_V116;
h5f_flags[31] = (int_f)H5F_LIBVER_V118;
h5f_flags[30] = (int_f)H5F_LIBVER_V200;
/*
* H5FD flags

View File

@ -56,7 +56,7 @@ MODULE H5LIB
!
! H5F flags declaration
!
INTEGER, PARAMETER :: H5F_FLAGS_LEN = 32
INTEGER, PARAMETER :: H5F_FLAGS_LEN = 31
INTEGER, DIMENSION(1:H5F_FLAGS_LEN) :: H5F_flags
!
! H5generic flags declaration
@ -379,8 +379,7 @@ CONTAINS
H5F_LIBVER_V110_F = H5F_flags(28)
H5F_LIBVER_V112_F = H5F_flags(29)
H5F_LIBVER_V114_F = H5F_flags(30)
H5F_LIBVER_V116_F = H5F_flags(31)
H5F_LIBVER_V118_F = H5F_flags(32)
H5F_LIBVER_V200_F = H5F_flags(31)
!
! H5generic flags
!

View File

@ -236,8 +236,7 @@ MODULE H5GLOBAL
!DEC$ATTRIBUTES DLLEXPORT :: H5F_LIBVER_V110_F
!DEC$ATTRIBUTES DLLEXPORT :: H5F_LIBVER_V112_F
!DEC$ATTRIBUTES DLLEXPORT :: H5F_LIBVER_V114_F
!DEC$ATTRIBUTES DLLEXPORT :: H5F_LIBVER_V116_F
!DEC$ATTRIBUTES DLLEXPORT :: H5F_LIBVER_V118_F
!DEC$ATTRIBUTES DLLEXPORT :: H5F_LIBVER_V200_F
!DEC$ATTRIBUTES DLLEXPORT :: H5F_FSPACE_STRATEGY_FSM_AGGR_F
!DEC$ATTRIBUTES DLLEXPORT :: H5F_FSPACE_STRATEGY_PAGE_F
!DEC$ATTRIBUTES DLLEXPORT :: H5F_FSPACE_STRATEGY_AGGR_F
@ -271,8 +270,7 @@ MODULE H5GLOBAL
INTEGER :: H5F_LIBVER_V110_F !< H5F_LIBVER_V110
INTEGER :: H5F_LIBVER_V112_F !< H5F_LIBVER_V112
INTEGER :: H5F_LIBVER_V114_F !< H5F_LIBVER_V114
INTEGER :: H5F_LIBVER_V116_F !< H5F_LIBVER_V116
INTEGER :: H5F_LIBVER_V118_F !< H5F_LIBVER_V118
INTEGER :: H5F_LIBVER_V200_F !< H5F_LIBVER_V200
INTEGER :: H5F_FSPACE_STRATEGY_FSM_AGGR_F !< H5F_FSPACE_STRATEGY_FSM_AGGR
INTEGER :: H5F_FSPACE_STRATEGY_PAGE_F !< H5F_FSPACE_STRATEGY_PAGE
INTEGER :: H5F_FSPACE_STRATEGY_AGGR_F !< H5F_FSPACE_STRATEGY_AGGR

View File

@ -228,19 +228,12 @@ SUBROUTINE test_create(total_error)
CALL VERIFY("***ERROR: Returned wrong low libver_bounds", low, H5F_LIBVER_V114_F, total_error)
CALL VERIFY("***ERROR: Returned wrong high libver_bounds", high, H5F_LIBVER_V114_F, total_error)
CALL h5pset_libver_bounds_f(fapl, H5F_LIBVER_V116_F, H5F_LIBVER_V116_F, error)
CALL h5pset_libver_bounds_f(fapl, H5F_LIBVER_V200_F, H5F_LIBVER_V200_F, error)
CALL check("h5pset_libver_bounds_f",error, total_error)
CALL h5pget_libver_bounds_f(fapl, low, high, error)
CALL check("h5pget_libver_bounds_f",error, total_error)
CALL VERIFY("***ERROR: Returned wrong low libver_bounds", low, H5F_LIBVER_V116_F, total_error)
CALL VERIFY("***ERROR: Returned wrong high libver_bounds", high, H5F_LIBVER_V116_F, total_error)
CALL h5pset_libver_bounds_f(fapl, H5F_LIBVER_V118_F, H5F_LIBVER_V118_F, error)
CALL check("h5pset_libver_bounds_f",error, total_error)
CALL h5pget_libver_bounds_f(fapl, low, high, error)
CALL check("h5pget_libver_bounds_f",error, total_error)
CALL VERIFY("***ERROR: Returned wrong low libver_bounds", low, H5F_LIBVER_V118_F, total_error)
CALL VERIFY("***ERROR: Returned wrong high libver_bounds", high, H5F_LIBVER_V118_F, total_error)
CALL VERIFY("***ERROR: Returned wrong low libver_bounds", low, H5F_LIBVER_V200_F, total_error)
CALL VERIFY("***ERROR: Returned wrong high libver_bounds", high, H5F_LIBVER_V200_F, total_error)
CALL H5Pset_libver_bounds_f(fapl, H5F_LIBVER_LATEST_F, H5F_LIBVER_LATEST_F, error)
CALL check("H5Pset_libver_bounds_f",error, total_error)

View File

@ -231,7 +231,7 @@ import org.slf4j.LoggerFactory;
* which prints out the HDF5 error stack, as described in the HDF5 C API <i><b>@ref H5Eprint()</b>.</i> This
* may be used by Java exception handlers to print out the HDF5 error stack. <hr>
*
* @version HDF5 1.17.0 <BR>
* @version HDF5 2.0.0 <BR>
* <b>See also: </b>
* @ref HDFARRAY hdf.hdf5lib.HDFArray<br />
* @ref HDF5CONST hdf.hdf5lib.HDF5Constants<br />
@ -273,7 +273,7 @@ public class H5 implements java.io.Serializable {
* </ul>
* Make sure to update the versions number when a different library is used.
*/
public final static int LIB_VERSION[] = {1, 17, 0};
public final static int LIB_VERSION[] = {2, 0, 0};
/**
* @ingroup JH5

View File

@ -573,9 +573,7 @@ public class HDF5Constants {
/** */
public static final int H5F_LIBVER_V114 = H5F_LIBVER_V114();
/** */
public static final int H5F_LIBVER_V116 = H5F_LIBVER_V116();
/** */
public static final int H5F_LIBVER_V118 = H5F_LIBVER_V118();
public static final int H5F_LIBVER_V200 = H5F_LIBVER_V200();
/** */
public static final int H5F_LIBVER_NBOUNDS = H5F_LIBVER_NBOUNDS();
/** */
@ -2060,9 +2058,7 @@ public class HDF5Constants {
private static native final int H5F_LIBVER_V114();
private static native final int H5F_LIBVER_V116();
private static native final int H5F_LIBVER_V118();
private static native final int H5F_LIBVER_V200();
private static native final int H5F_LIBVER_NBOUNDS();

View File

@ -1304,14 +1304,9 @@ Java_hdf_hdf5lib_HDF5Constants_H5F_1LIBVER_1V114(JNIEnv *env, jclass cls)
return H5F_LIBVER_V114;
}
JNIEXPORT jint JNICALL
Java_hdf_hdf5lib_HDF5Constants_H5F_1LIBVER_1V116(JNIEnv *env, jclass cls)
Java_hdf_hdf5lib_HDF5Constants_H5F_1LIBVER_1V200(JNIEnv *env, jclass cls)
{
return H5F_LIBVER_V116;
}
JNIEXPORT jint JNICALL
Java_hdf_hdf5lib_HDF5Constants_H5F_1LIBVER_1V118(JNIEnv *env, jclass cls)
{
return H5F_LIBVER_V118;
return H5F_LIBVER_V200;
}
JNIEXPORT jint JNICALL
Java_hdf_hdf5lib_HDF5Constants_H5F_1LIBVER_1NBOUNDS(JNIEnv *env, jclass cls)

View File

@ -313,7 +313,7 @@ public class TestH5 {
@Test
public void testH5get_libversion()
{
int libversion[] = {1, 17, 0};
int libversion[] = {2, 0, 0};
try {
H5.H5get_libversion(libversion);
@ -322,11 +322,8 @@ public class TestH5 {
fail("H5.H5get_libversion: " + err);
}
for (int i = 0; i < 2; i++)
for (int i = 0; i < 3; i++)
assertEquals(H5.LIB_VERSION[i], libversion[i]);
for (int i = 0; i < 2; i++)
assertFalse(libversion[i] == 0);
}
/**
@ -354,7 +351,7 @@ public class TestH5 {
@Test
public void testH5check_version()
{
int majnum = 1, minnum = 17, relnum = 0;
int majnum = 2, minnum = 0, relnum = 0;
try {
H5.H5check_version(majnum, minnum, relnum);

View File

@ -27,10 +27,10 @@ Obtaining HDF5 source code
2. Obtain HDF5 source from Github
development branch: https://github.com/HDFGroup/hdf5
last release: https://github.com/HDFGroup/hdf5/releases/latest
hdf5-1_17_"X".tar.gz or hdf5-1_17_"X".zip
hdf5-2_"X"_"Y".tar.gz or hdf5-2_"X"_"Y".zip
and put it in "myhdfstuff".
Uncompress the file. There should be a hdf5-1.17."X" folder.
Uncompress the file. There should be a hdf5-2."X"."Y" folder.
========================================================================
@ -47,16 +47,16 @@ Before You Start:
2. Optional: Install the Szip version 2.1 library (you may use
Szip 2.0 binaries).
3. Extract the source from the hdf5-X.Y.Z.tar file and change
directory to hdf5-X.Y.Z.
3. Extract the source from the hdf5-2.X.Y.tar file and change
directory to hdf5-2.X.Y.
4. Quick installation
For those who don't like to read ;-) the following steps can be used
to configure, build, test, and install the HDF5 library, header files,
and support programs. For example, to install HDF5 version X.Y.Z at
and support programs. For example, to install HDF5 version 2.X.Y at
location /usr/local/hdf5, use the following steps.
$ cd hdf5-X.Y.Z
$ cd hdf5-2.X.Y
$ ./configure --prefix=/usr/local/hdf5 <more configure_flags>
$ make
$ make check # run test suite.
@ -81,23 +81,23 @@ III. Full installation instructions for source distributions
1. Unpacking the distribution
The HDF5 source code is distributed in a variety of formats which
can be unpacked with the following commands, each of which creates an
'hdf5-X.Y.Z' directory, where X.Y.Z is the HDF5 version numbers.
'hdf5-2.X.Y' directory, where 2.X.Y is the HDF5 version numbers.
1.1. Non-compressed tar archive (*.tar)
$ tar xf hdf5-X.Y.Z.tar
$ tar xf hdf5-2.X.Y.tar
1.2. Gzip'd tar archive (*.tar.gz)
$ gunzip < hdf5-X.Y.Z.tar.gz | tar xf -
$ gunzip < hdf5-2.X.Y.tar.gz | tar xf -
Or
$ tar zxf hdf5-X.Y.Z.tar.gz
$ tar zxf hdf5-2.X.Y.tar.gz
1.3. Bzip'd tar archive (*.tar.bz2)
$ bunzip2 < hdf5-X.Y.Z.tar.bz2 | tar xf -
$ bunzip2 < hdf5-2.X.Y.tar.bz2 | tar xf -
Or
$ tar jxf hdf5-X.Y.Z.tar.bz2
$ tar jxf hdf5-2.X.Y.tar.bz2
2. Source versus build directories
On most systems the build can occur in a directory other than the
@ -108,7 +108,7 @@ III. Full installation instructions for source distributions
details are below). For example,
$ mkdir built-fortran
$ cd build-fortran
$ ../hdf5-X.Y.Z/configure --enable-fortran ...
$ ../hdf5-2.X.Y/configure --enable-fortran ...
3. Configuring
@ -345,10 +345,8 @@ III. Full installation instructions for source distributions
3.11. Backward compatibility
The 1.17 version of the HDF5 library can be configured to operate
identically to the v1.16 library with the
--with-default-api-version=v116
configure flag, or identically to the v1.14 library with the
The 2.0.0 version of the HDF5 library can be configured to operate
identically to the v1.14 library with the
--with-default-api-version=v114
configure flag, or identically to the v1.12 library with the
--with-default-api-version=v112
@ -359,7 +357,7 @@ III. Full installation instructions for source distributions
configure flag, or identically to the v1.6 library with the
--with-default-api-version=v16
configure flag. This allows existing code to be compiled with the
v1.17 library without requiring immediate changes to the application
v2.0 library without requiring immediate changes to the application
source code. For additional configuration options and other details,
see "API Compatibility Macros":

View File

@ -29,15 +29,15 @@ Obtaining HDF5 source code
2. Obtain HDF5 source from Github
development branch: https://github.com/HDFGroup/hdf5
last release: https://github.com/HDFGroup/hdf5/releases/latest
hdf5-1_15_"X".tar.gz or hdf5-1_15_"X".zip
hdf5-2_"X"_"Y".tar.gz or hdf5-2_"X"_"Y".zip
and put it in "myhdfstuff".
Uncompress the file. There should be a hdf5-1.17."X" folder.
Uncompress the file. There should be a hdf5-2."X"."Y" folder.
CMake version
1. We suggest you obtain the latest CMake from the Kitware web site.
The HDF5 1.17."X" product requires a minimum CMake version 3.18,
where "X" is the current HDF5 release version. If you are using
The HDF5 2."X"."Y" product requires a minimum CMake version 3.18,
where "X"."Y" is the current HDF5 release version. If you are using
VS2022, the minimum version is 3.21.
Note:
@ -85,7 +85,7 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to:
2. Download/copy the individual files mentioned above to "myhdfstuff".
Do not uncompress the tar.gz files.
3. Change to the source directory "hdf5-1.17.x".
3. Change to the source directory "hdf5-2.x.y".
CTestScript.cmake file should not be modified.
4. Edit the platform configuration file, HDF5options.cmake, if you want to change
@ -113,7 +113,7 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to:
The command above will configure, build, test, and create an install
package in the myhdfstuff folder. It will have the format:
HDF5-1.17.NN-<platform>.<zip or tar.gz>
HDF5-2.X.Y-<platform>.<zip or tar.gz>
On Unix, <platform> will be "Linux". A similar .sh file will also be created.
On Windows, <platform> will be "win64" or "win32". If you have an
@ -134,13 +134,13 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to:
6. To install, "X" is the current release version
On Windows (with WiX installed), execute:
HDF5-1.17."X"-win32.msi or HDF5-1.17."X"-win64.msi
HDF5-2."X"."Y"-win32.msi or HDF5-2."X"."Y"-win64.msi
By default this program will install the hdf5 library into the
"C:\Program Files" directory and will create the following
directory structure:
HDF_Group
--HDF5
----1.17."X"
----2."X"."Y"
------bin
------include
------lib
@ -149,29 +149,29 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to:
On Linux, change to the install destination directory
(create it if doesn't exist) and execute:
<path-to>/myhdfstuff/HDF5-1.17."X"-Linux.sh
<path-to>/myhdfstuff/HDF5-2."X"."Y"-Linux.sh
After accepting the license, the script will prompt:
By default the HDF5 will be installed in:
"<current directory>/HDF5-1.17."X"-Linux"
Do you want to include the subdirectory HDF5-1.17."X"-Linux?
"<current directory>/HDF5-2."X"."Y"-Linux"
Do you want to include the subdirectory HDF5-2."X"."Y"-Linux?
Saying no will install in: "<current directory>" [Yn]:
Note that the script will create the following directory structure
relative to the install point:
HDF_Group
--HDF5
----1.17."X"
----2."X"."Y"
------bin
------include
------lib
--------plugins
------share
On Mac you will find HDF5-1.17."X"-Darwin.dmg in the myhdfstuff folder. Click
On Mac you will find HDF5-2."X"."Y"-Darwin.dmg in the myhdfstuff folder. Click
on the dmg file to proceed with installation. After accepting the license,
there will be a folder with the following structure:
HDF_Group
--HDF5
----1.17."X"
----2."X"."Y"
------bin
------include
------lib
@ -179,12 +179,12 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to:
------share
By default the installation will create the bin, include, lib and cmake
folders in the <install destination directory>/HDF_Group/HDF5/1.17."X"
folders in the <install destination directory>/HDF_Group/HDF5/2."X"."Y"
The <install destination directory> depends on the build platform;
Windows will set the default to:
C:/Program Files/HDF_Group/HDF5/1.17."X"
C:/Program Files/HDF_Group/HDF5/2."X"."Y"
Linux will set the default to:
"myhdfstuff/HDF_Group/HDF5/1.17."X"
"myhdfstuff/HDF_Group/HDF5/2."X"."Y"
The default can be changed by adding ",INSTALLDIR=<my new dir>" to the
"ctest -S HDF5config.cmake..." command. For example on linux:
ctest -S HDF5config.cmake,INSTALLDIR=/usr/local/myhdf5,BUILD_GENERATOR=Unix -C Release -VV -O hdf5.log
@ -211,13 +211,13 @@ Notes: This short set of instructions is written for users who want to
5. Configure the C library, tools and tests with one of the following commands:
On Windows 32 bit
cmake -G "Visual Studio 16 2019" -A Win32 -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.17."X"
cmake -G "Visual Studio 16 2019" -A Win32 -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-2."X"."Y"
On Windows 64 bit
cmake -G "Visual Studio 16 2019" -A x64 -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.17."X"
cmake -G "Visual Studio 16 2019" -A x64 -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-2."X"."Y"
On Linux and Mac
cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ../hdf5-1.17."X"
cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ../hdf5-2."X"."Y"
where "X" is the current release version.
@ -232,13 +232,13 @@ Notes: This short set of instructions is written for users who want to
9. To install
On Windows (with WiX installed), execute:
HDF5-1.17."X"-win32.msi or HDF5-1.17."X"-win64.msi
HDF5-2."X"."Y"-win32.msi or HDF5-2."X"."Y"-win64.msi
By default this program will install the hdf5 library into the
"C:\Program Files" directory and will create the following
directory structure:
HDF_Group
--HDF5
----1.17."X"
----2."X"."Y"
------bin
------include
------lib
@ -247,29 +247,29 @@ Notes: This short set of instructions is written for users who want to
On Linux, change to the install destination directory
(create if doesn't exist) and execute:
<path-to>/myhdfstuff/build/HDF5-1.17."X"-Linux.sh
<path-to>/myhdfstuff/build/HDF5-2."X"."Y"-Linux.sh
After accepting the license, the script will prompt:
By default the HDF5 will be installed in:
"<current directory>/HDF5-1.17."X"-Linux"
Do you want to include the subdirectory HDF5-1.17."X"-Linux?
"<current directory>/HDF5-2."X"."Y"-Linux"
Do you want to include the subdirectory HDF5-2."X"."Y"-Linux?
Saying no will install in: "<current directory>" [Yn]:
Note that the script will create the following directory structure
relative to the install point:
HDF_Group
--HDF5
----1.17."X"
----2."X"."Y"
------bin
------include
------lib
--------plugins
------share
On Mac you will find HDF5-1.17."X"-Darwin.dmg in the build folder. Click
On Mac you will find HDF5-2."X"."Y"-Darwin.dmg in the build folder. Click
on the dmg file to proceed with installation. After accepting the license,
there will be a folder with the following structure:
HDF_Group
--HDF5
----1.17."X"
----2."X"."Y"
------bin
------include
------lib
@ -282,7 +282,7 @@ IV. Further considerations
========================================================================
1. We suggest you obtain the latest CMake for windows from the Kitware
web site. The HDF5 1.17."X" product requires a minimum CMake version 3.18.
web site. The HDF5 2."X"."Y" product requires a minimum CMake version 3.18.
If you are using VS2022, the CMake minimum version is 3.21.
2. If you plan to use Zlib or Szip:
@ -888,7 +888,7 @@ HDF5_STRICT_FORMAT_CHECKS "Whether to perform strict file format checks"
HDF5_WANT_DATA_ACCURACY "IF data accuracy is guaranteed during data conversions" ON
HDF5_WANT_DCONV_EXCEPTION "exception handling functions is checked during data conversions" ON
HDF5_DEFAULT_API_VERSION "Enable default API (v16, v18, v110, v112, v114, v116, v118)" "v118"
HDF5_DEFAULT_API_VERSION "Enable default API (v16, v18, v110, v112, v114, v200)" "v200"
HDF5_USE_FOLDERS "Enable folder grouping of projects in IDEs." ON
HDF5_MSVC_NAMING_CONVENTION "Use MSVC Naming conventions for Shared Libraries" OFF
HDF5_MINGW_STATIC_GCC_LIBS "Statically link libgcc/libstdc++" OFF

View File

@ -94,19 +94,19 @@ Build, Test and Install HDF5 on Cygwin
The HDF5 source code is distributed in a variety of formats which
can be unpacked with the following commands, each of which creates
an `hdf5-1.17.x' directory.
an `hdf5-2.0.x' directory.
2.1 Non-compressed tar archive (*.tar)
$ tar xf hdf5-1.17.x.tar
$ tar xf hdf5-2.0.x.tar
2.2 Gzip'd tar archive (*.tar.gz)
$ gunzip < hdf5-1.17.x.tar.gz | tar xf -
$ gunzip < hdf5-2.0.x.tar.gz | tar xf -
2.3 Bzip'd tar archive (*.tar.bz2)
$ bunzip2 < hdf5-1.17.x.tar.bz2 | tar xf -
$ bunzip2 < hdf5-2.0.x.tar.bz2 | tar xf -
2. Setup Environment

View File

@ -1,4 +1,4 @@
HDF5 version 1.17.0 currently under development
HDF5 version 2.0.0 currently under development
Features included for the next major release:
----------------------------------------------------------------------------

View File

@ -1,4 +1,4 @@
HDF5 version 1.17.0 currently under development
HDF5 version 2.0.0 currently under development
================================================================================
@ -21,7 +21,7 @@ The official HDF5 releases can be obtained from:
https://support.hdfgroup.org/downloads/index.html
Changes from Release to Release and New Features in the HDF5-1.16.x release series
Changes from Release to Release and New Features in the HDF5-2.x.y release series
can be found at:
https://support.hdfgroup.org/releases/hdf5/documentation/release_specific_info.md
@ -36,7 +36,7 @@ CONTENTS
- New Features
- Support for new platforms and languages
- Bug Fixes since HDF5-1.16.0
- Bug Fixes since HDF5-2.0.0
- Platforms Tested
- Known Problems
- CMake vs. Autotools installations
@ -228,7 +228,7 @@ Support for new platforms, languages and compilers
==================================================
-
Bug Fixes since HDF5-1.16.0 release
Bug Fixes since HDF5-2.0.0 release
===================================
Library
-------

View File

@ -216,5 +216,5 @@ For more information on the HDF5 versioning and backward and forward compatibili
[u11]: https://github.com/HDFGroup/hdf5/blob/develop/src/CMakeLists.txt
[u12]: https://github.com/HDFGroup/hdf5/blob/develop/configure.ac
[u13]: https://hdfgroup.github.io/hdf5/develop/api-compat-macros.html
[u14]: https://github.com/HDFGroup/hdf5/releases/tag/snapshot-1.16
[u14]: https://github.com/HDFGroup/hdf5/releases/tag/snapshot-2.0.0
[u15]: https://github.com/HDFGroup/hdf5/releases/tag/snapshot

View File

@ -21,7 +21,7 @@ I. Preconditions
========================================================================
1. We suggest you obtain the latest CMake for your platform from the Kitware
web site. The HDF5 1.17.x product requires a minimum CMake version
web site. The HDF5 2.0.x product requires a minimum CMake version
of 3.18. If you are using VS2022, the minimum CMake version is 3.21.
2. You have installed the HDF5 library built with CMake, by executing

View File

@ -38,7 +38,7 @@ I. Preconditions
========================================================================
1. We suggest you obtain the latest CMake for your platform from the Kitware
web site. The HDF5 1.17.x product requires a minimum CMake version
web site. The HDF5 2.0.x product requires a minimum CMake version
of 3.18. If you are using VS2022, the minimum CMake version is 3.21.
2. You have installed the HDF5 library built with CMake, by executing
@ -50,24 +50,24 @@ I. Preconditions
or environment variable, set(ENV{HDF5_ROOT} "<install_path>")
to the installed location of HDF5.
On Windows:
HDF5_ROOT=C:/Program Files/HDF_Group/HDF5/1.17.x/
HDF5_ROOT=C:/Program Files/HDF_Group/HDF5/2.0.x/
On unix:
HDF5_ROOT=<install root folder>/HDF_Group/HDF5/1.17.x/
HDF5_ROOT=<install root folder>/HDF_Group/HDF5/2.0.x/
If you are using shared libraries, you may need to add to the path
environment variable. Set the path environment variable to the
installed location of the library files for HDF5.
On Windows (*.dll):
PATH=%PATH%;C:/Program Files/HDF_Group/HDF5/1.17.x/bin
PATH=%PATH%;C:/Program Files/HDF_Group/HDF5/2.0.x/bin
On unix (*.so):
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:<install root folder>/HDF_Group/HDF5/1.17.x/lib
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:<install root folder>/HDF_Group/HDF5/2.0.x/lib
If you are using filter plugin libraries, you will need to set the
HDF5_PLUGIN_PATH environment variable.
On Windows:
HDF5_PLUGIN_PATH=C:/Program Files/HDF_Group/HDF5/1.17.x/lib/plugin
HDF5_PLUGIN_PATH=C:/Program Files/HDF_Group/HDF5/2.0.x/lib/plugin
On unix:
HDF5_PLUGIN_PATH=<install root folder>/HDF_Group/HDF5/1.17.x/lib/plugin
HDF5_PLUGIN_PATH=<install root folder>/HDF_Group/HDF5/2.0.x/lib/plugin
(Note there are no quote characters used on Windows and all platforms
use forward slashes)

View File

@ -62,11 +62,11 @@ Using Visual Studio 2008 with HDF5 Libraries built with Visual Studio 2008
and select "x64".
2.2 Find the box "Show directories for", choose "Include files", add the
header path (i.e. c:\Program Files\HDF_Group\HDF5\1.17.x\include)
header path (i.e. c:\Program Files\HDF_Group\HDF5\2.0.x\include)
to the included directories.
2.3 Find the box "Show directories for", choose "Library files", add the
library path (i.e. c:\Program Files\HDF_Group\HDF5\1.17.x\lib)
library path (i.e. c:\Program Files\HDF_Group\HDF5\2.0.x\lib)
to the library directories.
2.4 If using Fortran libraries, you will also need to setup the path

View File

@ -98,8 +98,7 @@ const unsigned H5O_attr_ver_bounds[] = {
H5O_ATTR_VERSION_3, /* H5F_LIBVER_V110 */
H5O_ATTR_VERSION_3, /* H5F_LIBVER_V112 */
H5O_ATTR_VERSION_3, /* H5F_LIBVER_V114 */
H5O_ATTR_VERSION_3, /* H5F_LIBVER_V116 */
H5O_ATTR_VERSION_3, /* H5F_LIBVER_V118 */
H5O_ATTR_VERSION_3, /* H5F_LIBVER_V200 */
H5O_ATTR_VERSION_LATEST /* H5F_LIBVER_LATEST */
};

View File

@ -48,8 +48,7 @@ const unsigned H5O_layout_ver_bounds[] = {
H5O_LAYOUT_VERSION_4, /* H5F_LIBVER_V110 */
H5O_LAYOUT_VERSION_4, /* H5F_LIBVER_V112 */
H5O_LAYOUT_VERSION_4, /* H5F_LIBVER_V114 */
H5O_LAYOUT_VERSION_4, /* H5F_LIBVER_V116 */
H5O_LAYOUT_VERSION_4, /* H5F_LIBVER_V118 */
H5O_LAYOUT_VERSION_4, /* H5F_LIBVER_V200 */
H5O_LAYOUT_VERSION_LATEST /* H5F_LIBVER_LATEST */
};

View File

@ -168,12 +168,11 @@ typedef enum H5F_libver_t {
H5F_LIBVER_V110 = 2, /**< Use the latest v110 format for storing objects */
H5F_LIBVER_V112 = 3, /**< Use the latest v112 format for storing objects */
H5F_LIBVER_V114 = 4, /**< Use the latest v114 format for storing objects */
H5F_LIBVER_V116 = 5, /**< Use the latest v116 format for storing objects */
H5F_LIBVER_V118 = 6, /**< Use the latest v118 format for storing objects */
H5F_LIBVER_V200 = 5, /**< Use the latest v200 format for storing objects */
H5F_LIBVER_NBOUNDS /**< Sentinel */
} H5F_libver_t;
#define H5F_LIBVER_LATEST H5F_LIBVER_V118
#define H5F_LIBVER_LATEST H5F_LIBVER_V200
/**
* File space handling strategy

View File

@ -71,8 +71,7 @@ static const unsigned HDF5_superblock_ver_bounds[] = {
HDF5_SUPERBLOCK_VERSION_3, /* H5F_LIBVER_V110 */
HDF5_SUPERBLOCK_VERSION_3, /* H5F_LIBVER_V112 */
HDF5_SUPERBLOCK_VERSION_3, /* H5F_LIBVER_V114 */
HDF5_SUPERBLOCK_VERSION_3, /* H5F_LIBVER_V116 */
HDF5_SUPERBLOCK_VERSION_3, /* H5F_LIBVER_V118 */
HDF5_SUPERBLOCK_VERSION_3, /* H5F_LIBVER_V200 */
HDF5_SUPERBLOCK_VERSION_LATEST /* H5F_LIBVER_LATEST */
};

View File

@ -153,8 +153,7 @@ const unsigned H5O_fill_ver_bounds[] = {
H5O_FILL_VERSION_3, /* H5F_LIBVER_V110 */
H5O_FILL_VERSION_3, /* H5F_LIBVER_V112 */
H5O_FILL_VERSION_3, /* H5F_LIBVER_V114 */
H5O_FILL_VERSION_3, /* H5F_LIBVER_V116 */
H5O_FILL_VERSION_3, /* H5F_LIBVER_V118 */
H5O_FILL_VERSION_3, /* H5F_LIBVER_V200 */
H5O_FILL_VERSION_LATEST /* H5F_LIBVER_LATEST */
};

View File

@ -69,8 +69,7 @@ static const unsigned H5O_fsinfo_ver_bounds[] = {
H5O_FSINFO_VERSION_1, /* H5F_LIBVER_V110 */
H5O_FSINFO_VERSION_1, /* H5F_LIBVER_V112 */
H5O_FSINFO_VERSION_1, /* H5F_LIBVER_V114 */
H5O_FSINFO_VERSION_1, /* H5F_LIBVER_V116 */
H5O_FSINFO_VERSION_1, /* H5F_LIBVER_V118 */
H5O_FSINFO_VERSION_1, /* H5F_LIBVER_V200 */
H5O_FSINFO_VERSION_LATEST /* H5F_LIBVER_LATEST */
};
#define N_FSINFO_VERSION_BOUNDS H5F_LIBVER_NBOUNDS

View File

@ -130,8 +130,7 @@ const unsigned H5O_obj_ver_bounds[] = {
H5O_VERSION_2, /* H5F_LIBVER_V110 */
H5O_VERSION_2, /* H5F_LIBVER_V112 */
H5O_VERSION_2, /* H5F_LIBVER_V114 */
H5O_VERSION_2, /* H5F_LIBVER_V116 */
H5O_VERSION_2, /* H5F_LIBVER_V118 */
H5O_VERSION_2, /* H5F_LIBVER_V200 */
H5O_VERSION_LATEST /* H5F_LIBVER_LATEST */
};

View File

@ -88,8 +88,7 @@ const unsigned H5O_pline_ver_bounds[] = {
H5O_PLINE_VERSION_2, /* H5F_LIBVER_V110 */
H5O_PLINE_VERSION_2, /* H5F_LIBVER_V112 */
H5O_PLINE_VERSION_2, /* H5F_LIBVER_V114 */
H5O_PLINE_VERSION_2, /* H5F_LIBVER_V116 */
H5O_PLINE_VERSION_2, /* H5F_LIBVER_V118 */
H5O_PLINE_VERSION_2, /* H5F_LIBVER_V200 */
H5O_PLINE_VERSION_LATEST /* H5F_LIBVER_LATEST */
};

View File

@ -5053,26 +5053,11 @@ H5_DLL herr_t H5Pset_gc_references(hid_t fapl_id, unsigned gc_ref);
* objects created with this setting.</td>
* </tr>
* <tr>
* <td>\p low=#H5F_LIBVER_V116<br />
* <td>\p low=#H5F_LIBVER_V200<br />
* \p high=<any version higher than \p low but not #H5F_LIBVER_LATEST></td>
* <td>
* \li The library will create objects with the latest format
* versions available to library release 1.16.x.
* \li The library will allow objects to be created with the latest
* format versions available to library release specified
* in the \p high value.
* \li API calls that create objects or features that are available
* to versions of the library greater than version specified in
* \p high will fail.
* \li Earlier versions of the library may not be able to access
* objects created with this setting.</td>
* </tr>
* <tr>
* <td>\p low=#H5F_LIBVER_V118<br />
* \p high=<any version higher than \p low but not #H5F_LIBVER_LATEST></td>
* <td>
* \li The library will create objects with the latest format
* versions available to library release 1.18.x.
* versions available to library release 2.0.x.
* \li The library will allow objects to be created with the latest
* format versions available to library release specified
* in the \p high value.
@ -5142,10 +5127,10 @@ H5_DLL herr_t H5Pset_gc_references(hid_t fapl_id, unsigned gc_ref);
* </table>
*
* \note *H5F_LIBVER_LATEST*:<br />
* Since 1.16.x is also #H5F_LIBVER_LATEST, there is no upper
* Since 2.0.x is also #H5F_LIBVER_LATEST, there is no upper
* limit on the format versions to use. That is, if a
* newer format version is required to support a feature
* in 1.16.x series, this setting will allow the object to be
* in 2.0.x series, this setting will allow the object to be
* created.
*
* \version 1.10.2 #H5F_LIBVER_V18 added to the enumerated defines in
@ -6059,7 +6044,7 @@ H5_DLL herr_t H5Pget_dset_no_attrs_hint(hid_t dcpl_id, hbool_t *minimize);
* which is a 32-bit signed long value on Windows, which limited
* the valid offset that can be returned to 2 GiB.
*
* \version 1.16.0 \p offset parameter type changed to HDoff_t from off_t.
* \version 2.0.0 \p offset parameter type changed to HDoff_t from off_t.
* \version 1.6.4 \p idx parameter type changed to unsigned.
* \since 1.0.0
*
@ -6556,7 +6541,7 @@ H5_DLL herr_t H5Pset_dset_no_attrs_hint(hid_t dcpl_id, hbool_t minimize);
* which is a 32-bit signed long value on Windows, which limited
* the valid offset that can be set to 2 GiB.
*
* \version 1.16.0 \p offset parameter type changed to HDoff_t from off_t.
* \version 2.0.0 \p offset parameter type changed to HDoff_t from off_t.
* \since 1.0.0
*
*/

View File

@ -64,8 +64,7 @@ const unsigned H5O_sdspace_ver_bounds[] = {
H5O_SDSPACE_VERSION_2, /* H5F_LIBVER_V110 */
H5O_SDSPACE_VERSION_2, /* H5F_LIBVER_V112 */
H5O_SDSPACE_VERSION_2, /* H5F_LIBVER_V114 */
H5O_SDSPACE_VERSION_2, /* H5F_LIBVER_V116 */
H5O_SDSPACE_VERSION_2, /* H5F_LIBVER_V118 */
H5O_SDSPACE_VERSION_2, /* H5F_LIBVER_V200 */
H5O_SDSPACE_VERSION_LATEST /* H5F_LIBVER_LATEST */
};

View File

@ -241,8 +241,7 @@ static const unsigned H5O_sds_hyper_ver_bounds[] = {
H5S_HYPER_VERSION_2, /* H5F_LIBVER_V110 */
H5S_HYPER_VERSION_3, /* H5F_LIBVER_V112 */
H5S_HYPER_VERSION_3, /* H5F_LIBVER_V114 */
H5S_HYPER_VERSION_3, /* H5F_LIBVER_V116 */
H5S_HYPER_VERSION_3, /* H5F_LIBVER_V118 */
H5S_HYPER_VERSION_3, /* H5F_LIBVER_V200 */
H5S_HYPER_VERSION_3 /* H5F_LIBVER_LATEST */
};

View File

@ -126,8 +126,7 @@ static const unsigned H5O_sds_point_ver_bounds[] = {
H5S_POINT_VERSION_1, /* H5F_LIBVER_V110 */
H5S_POINT_VERSION_2, /* H5F_LIBVER_V112 */
H5S_POINT_VERSION_2, /* H5F_LIBVER_V114 */
H5S_POINT_VERSION_2, /* H5F_LIBVER_V116 */
H5S_POINT_VERSION_2, /* H5F_LIBVER_V118 */
H5S_POINT_VERSION_2, /* H5F_LIBVER_V200 */
H5S_POINT_VERSION_2 /* H5F_LIBVER_LATEST */
};

View File

@ -612,8 +612,7 @@ const unsigned H5O_dtype_ver_bounds[] = {
H5O_DTYPE_VERSION_3, /* H5F_LIBVER_V110 */
H5O_DTYPE_VERSION_4, /* H5F_LIBVER_V112 */
H5O_DTYPE_VERSION_4, /* H5F_LIBVER_V114 */
H5O_DTYPE_VERSION_4, /* H5F_LIBVER_V116 */
H5O_DTYPE_VERSION_4, /* H5F_LIBVER_V118 */
H5O_DTYPE_VERSION_4, /* H5F_LIBVER_V200 */
H5O_DTYPE_VERSION_LATEST /* H5F_LIBVER_LATEST */
};

View File

@ -75,11 +75,11 @@
/**
* For major interface/format changes
*/
#define H5_VERS_MAJOR 1
#define H5_VERS_MAJOR 2
/**
* For minor interface/format changes
*/
#define H5_VERS_MINOR 17
#define H5_VERS_MINOR 0
/**
* For tweaks, bug-fixes, or development
*/
@ -91,11 +91,11 @@
/**
* Short version string
*/
#define H5_VERS_STR "1.17.0"
#define H5_VERS_STR "2.0.0"
/**
* Full version string
*/
#define H5_VERS_INFO "HDF5 library version: 1.17.0"
#define H5_VERS_INFO "HDF5 library version: 2.0.0"
#define H5check() H5check_version(H5_VERS_MAJOR, H5_VERS_MINOR, H5_VERS_RELEASE)

View File

@ -1452,12 +1452,8 @@ H5_trace_args(H5RS_str_t *rs, const char *type, va_list ap)
H5RS_acat(rs, "H5F_LIBVER_V114");
break;
case H5F_LIBVER_V116:
H5RS_acat(rs, "H5F_LIBVER_V116");
break;
case H5F_LIBVER_V118:
HDcompile_assert(H5F_LIBVER_LATEST == H5F_LIBVER_V118);
case H5F_LIBVER_V200:
HDcompile_assert(H5F_LIBVER_LATEST == H5F_LIBVER_V200);
H5RS_acat(rs, "H5F_LIBVER_LATEST");
break;

View File

@ -49,9 +49,13 @@
#endif
/* Test file names, using H5F_libver_t as indices */
static const char *FILENAME[] = {
"tchunk_info_earliest", "tchunk_info_v18", "tchunk_info_v110", "tchunk_info_v112",
"tchunk_info_v114", "tchunk_info_v116", "tchunk_info_v118", NULL};
static const char *FILENAME[] = {"tchunk_info_earliest",
"tchunk_info_v18",
"tchunk_info_v110",
"tchunk_info_v112",
"tchunk_info_v114",
"tchunk_info_v200",
NULL};
/* File to be used in test_failed_attempts */
#define FILTERMASK_FILE "tflt_msk"

View File

@ -10126,7 +10126,7 @@ test_versionbounds(void)
hsize_t arr_dim[] = {ARRAY_LEN}; /* Length of the array */
int low, high; /* Indices for iterating over versions */
H5F_libver_t versions[] = {H5F_LIBVER_EARLIEST, H5F_LIBVER_V18, H5F_LIBVER_V110,
H5F_LIBVER_V112, H5F_LIBVER_V114, H5F_LIBVER_V116};
H5F_LIBVER_V112, H5F_LIBVER_V114, H5F_LIBVER_V200};
int versions_count = 6; /* Number of version bounds in the array */
unsigned highest_version; /* Highest version in nested datatypes */
color_t enum_val; /* Enum type index */

View File

@ -106,8 +106,7 @@ const char *LIBVER_NAMES[] = {"earliest", /* H5F_LIBVER_EARLIEST = 0 */
"v110", /* H5F_LIBVER_V110 = 2 */
"v112", /* H5F_LIBVER_V112 = 3 */
"v114", /* H5F_LIBVER_V114 = 4 */
"v116", /* H5F_LIBVER_V116 = 5 */
"v118", /* H5F_LIBVER_V118 = 6 */
"v200", /* H5F_LIBVER_V200 = 5 */
"latest", /* H5F_LIBVER_LATEST */
NULL};

View File

@ -6189,8 +6189,7 @@ test_libver_bounds_super_create(hid_t fapl, hid_t fcpl, htri_t is_swmr, htri_t n
case H5F_LIBVER_V110:
case H5F_LIBVER_V112:
case H5F_LIBVER_V114:
case H5F_LIBVER_V116:
case H5F_LIBVER_V118:
case H5F_LIBVER_V200:
ok = (f->shared->sblock->super_vers == HDF5_SUPERBLOCK_VERSION_3);
VERIFY(ok, true, "HDF5_superblock_ver_bounds");
break;

View File

@ -118,9 +118,8 @@
* \li <strong>2</strong> This is #H5F_LIBVER_V110 in #H5F_libver_t struct
* \li <strong>3</strong> This is #H5F_LIBVER_V112 in #H5F_libver_t struct
* \li <strong>4</strong> This is #H5F_LIBVER_V114 in #H5F_libver_t struct
* \li <strong>5</strong> This is #H5F_LIBVER_V116 in #H5F_libver_t struct
* \li <strong>6</strong> This is #H5F_LIBVER_V118 in #H5F_libver_t struct
* \li #H5F_LIBVER_LATEST is aliased to #H5F_LIBVER_V118 for this release
* \li <strong>5</strong> This is #H5F_LIBVER_V200 in #H5F_libver_t struct
* \li #H5F_LIBVER_LATEST is aliased to #H5F_LIBVER_V200 for this release
*
* \subsubsection subsubsec_cltools_h5repack_options_fs File Strategy Settings
* <strong>FS_STRATEGY</strong> is a string indicating the file space strategy used:

View File

@ -228,10 +228,9 @@ usage(const char *prog)
PRINTVALSTREAM(rawoutstream, " 2: This is H5F_LIBVER_V110 in H5F_libver_t struct\n");
PRINTVALSTREAM(rawoutstream, " 3: This is H5F_LIBVER_V112 in H5F_libver_t struct\n");
PRINTVALSTREAM(rawoutstream, " 4: This is H5F_LIBVER_V114 in H5F_libver_t struct\n");
PRINTVALSTREAM(rawoutstream, " 5: This is H5F_LIBVER_V116 in H5F_libver_t struct\n");
PRINTVALSTREAM(rawoutstream, " 6: This is H5F_LIBVER_V118 in H5F_libver_t struct\n");
PRINTVALSTREAM(rawoutstream, " 5: This is H5F_LIBVER_V200 in H5F_libver_t struct\n");
PRINTVALSTREAM(rawoutstream,
" (H5F_LIBVER_LATEST is aliased to H5F_LIBVER_V118 for this release\n");
" (H5F_LIBVER_LATEST is aliased to H5F_LIBVER_V200 for this release\n");
PRINTVALSTREAM(rawoutstream, "\n");
PRINTVALSTREAM(rawoutstream, " FS_STRATEGY is a string indicating the file space strategy used:\n");
PRINTVALSTREAM(rawoutstream, " FSM_AGGR:\n");

View File

@ -92,9 +92,8 @@ usage: h5repack [OPTIONS] file1 file2
2: This is H5F_LIBVER_V110 in H5F_libver_t struct
3: This is H5F_LIBVER_V112 in H5F_libver_t struct
4: This is H5F_LIBVER_V114 in H5F_libver_t struct
5: This is H5F_LIBVER_V116 in H5F_libver_t struct
6: This is H5F_LIBVER_V118 in H5F_libver_t struct
(H5F_LIBVER_LATEST is aliased to H5F_LIBVER_V118 for this release
5: This is H5F_LIBVER_V200 in H5F_libver_t struct
(H5F_LIBVER_LATEST is aliased to H5F_LIBVER_V200 for this release
FS_STRATEGY is a string indicating the file space strategy used:
FSM_AGGR: