Merge changes from dev CMake and documentation (#4828)

This commit is contained in:
Allen Byrne 2024-09-13 10:47:29 -05:00 committed by GitHub
parent 4d135a0473
commit 6c5d47c95e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
45 changed files with 153 additions and 202 deletions

View File

@ -172,7 +172,7 @@ jobs:
Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING -Destination ${{ runner.workspace }}/build114/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/build114/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-MSVC/README.md -Destination ${{ runner.workspace }}/build114/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-MSVC/* -Destination ${{ runner.workspace}}/build114/hdf5/ -Include *.zip
Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-MSVC/* -Destination ${{ runner.workspace }}/build114/hdf5/ -Include *.zip
cd "${{ runner.workspace }}/build114"
7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip hdf5
shell: pwsh
@ -181,13 +181,7 @@ jobs:
id: publish-ctest-msi-binary
run: |
mkdir "${{ runner.workspace }}/buildmsi"
mkdir "${{ runner.workspace }}/buildmsi/hdf5"
Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING -Destination ${{ runner.workspace }}/buildmsi/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/buildmsi/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-MSVC/README.md -Destination ${{ runner.workspace }}/buildmsi/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-MSVC/* -Destination ${{ runner.workspace }}/buildmsi/hdf5/ -Include *.msi
cd "${{ runner.workspace }}/buildmsi"
7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi.zip hdf5
Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-MSVC/* -Destination ${{ runner.workspace }}/buildmsi/${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi -Include *.msi
shell: pwsh
- name: List files in the space (Windows)
@ -208,7 +202,7 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: msi-vs2022_cl-binary
path: ${{ runner.workspace }}/buildmsi/${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi.zip
path: ${{ runner.workspace }}/buildmsi/${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
build_and_test_linux:
@ -279,26 +273,14 @@ jobs:
id: publish-ctest-deb-binary
run: |
mkdir "${{ runner.workspace }}/builddeb"
mkdir "${{ runner.workspace }}/builddeb/hdf5"
cp ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING ${{ runner.workspace }}/builddeb/hdf5
cp ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING_LBNL_HDF5 ${{ runner.workspace }}/builddeb/hdf5
cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-GNUC/README.md ${{ runner.workspace }}/builddeb/hdf5
cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-GNUC/*.deb ${{ runner.workspace }}/builddeb/hdf5
cd "${{ runner.workspace }}/builddeb"
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb.tar.gz hdf5
shell: bash
- name: Publish rpm binary (Linux)
id: publish-ctest-rpm-binary
run: |
mkdir "${{ runner.workspace }}/buildrpm"
mkdir "${{ runner.workspace }}/buildrpm/hdf5"
cp ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING ${{ runner.workspace }}/buildrpm/hdf5
cp ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING_LBNL_HDF5 ${{ runner.workspace }}/buildrpm/hdf5
cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-GNUC/README.md ${{ runner.workspace }}/buildrpm/hdf5
cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-GNUC/*.rpm ${{ runner.workspace }}/buildrpm/hdf5
cd "${{ runner.workspace }}/buildrpm"
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm.tar.gz hdf5
cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-GNUC/*.rpm ${{ runner.workspace }}/buildrpm/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm
shell: bash
- name: List files in the space (Linux)
@ -318,14 +300,14 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: deb-ubuntu-2204_gcc-binary
path: ${{ runner.workspace }}/builddeb/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb.tar.gz
path: ${{ runner.workspace }}/builddeb/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
- name: Save published binary rpm (Linux)
uses: actions/upload-artifact@v4
with:
name: rpm-ubuntu-2204_gcc-binary
path: ${{ runner.workspace }}/buildrpm/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm.tar.gz
path: ${{ runner.workspace }}/buildrpm/${{ steps.set-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
# Save doxygen files created by ctest script
@ -518,8 +500,8 @@ jobs:
mkdir "${{ runner.workspace }}/build114/hdf5"
cp ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING_LBNL_HDF5 ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-Clang/README.md ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-Clang/*.tar.gz ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-macos-Clang/README.md ${{ runner.workspace }}/build114/hdf5
cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-macos-Clang/*.tar.gz ${{ runner.workspace }}/build114/hdf5
cd "${{ runner.workspace }}/build114"
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz hdf5
shell: bash
@ -528,13 +510,7 @@ jobs:
id: publish-ctest-dmg-binary
run: |
mkdir "${{ runner.workspace }}/builddmg"
mkdir "${{ runner.workspace }}/builddmg/hdf5"
cp ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING ${{ runner.workspace }}/builddmg/hdf5
cp ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING_LBNL_HDF5 ${{ runner.workspace }}/builddmg/hdf5
cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-Clang/README.md ${{ runner.workspace }}/builddmg/hdf5
cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-Clang/*.dmg ${{ runner.workspace }}/builddmg/hdf5
cd "${{ runner.workspace }}/builddmg"
tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-macos14_clang.dmg.tar.gz hdf5
cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-macos-Clang/*.dmg ${{ runner.workspace }}/builddmg/${{ steps.set-file-base.outputs.FILE_BASE }}-macos14_clang.dmg
shell: bash
- name: List files in the space (MacOS_latest)
@ -554,7 +530,7 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: tgz-macos14_clang-dmg-binary
path: ${{ runner.workspace }}/builddmg/${{ steps.set-file-base.outputs.FILE_BASE }}-macos14_clang.dmg.tar.gz
path: ${{ runner.workspace }}/builddmg/${{ steps.set-file-base.outputs.FILE_BASE }}-macos14_clang.dmg
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
build_and_test_S3_linux:
@ -745,13 +721,7 @@ jobs:
id: publish-ctest-msi-binary
run: |
mkdir "${{ runner.workspace }}/buildmsi"
mkdir "${{ runner.workspace }}/buildmsi/hdf5"
Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING -Destination ${{ runner.workspace }}/buildmsi/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/COPYING_LBNL_HDF5 -Destination ${{ runner.workspace }}/buildmsi/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-Intel/README.md -Destination ${{ runner.workspace }}/buildmsi/hdf5/
Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-Intel/* -Destination ${{ runner.workspace }}/buildmsi/hdf5/ -Include *.msi
cd "${{ runner.workspace }}/buildmsi"
7z a -tzip ${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi.zip hdf5
Copy-Item -Path ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-Intel/* -Destination ${{ runner.workspace }}/buildmsi/${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi -Include *.msi
shell: pwsh
- name: List files in the space (Windows_intel)
@ -772,7 +742,7 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: msi-vs2022_intel-binary
path: ${{ runner.workspace }}/buildmsi/${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi.zip
path: ${{ runner.workspace }}/buildmsi/${{ steps.set-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
build_and_test_linux_intel:

View File

@ -1,4 +1,4 @@
name: Check Markdown links
name: Check 1.14 Markdown links
# Triggers the workflow on push or pull request or on demand
on:

View File

@ -186,16 +186,16 @@ jobs:
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}.zip >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.dmg.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.dmg >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc_s3.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi.zip >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_intel.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.zip >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi.zip >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}.html.abi.reports.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
- name: Create sha256 sums for files for nonversioned files
@ -232,16 +232,16 @@ jobs:
${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.dmg.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.dmg
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc_s3.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_intel.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi
${{ steps.get-file-base.outputs.FILE_BASE }}.html.abi.reports.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`
@ -261,16 +261,16 @@ jobs:
hdf5.tar.gz
hdf5.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.dmg.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.dmg
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc_s3.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_intel.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi
${{ steps.get-file-base.outputs.FILE_BASE }}.html.abi.reports.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt
if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn`

View File

@ -51,13 +51,13 @@ jobs:
${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.dmg.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.dmg
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc_s3.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_cl.msi
${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_intel.tar.gz
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi.zip
${{ steps.get-file-base.outputs.FILE_BASE }}-win-vs2022_intel.msi

View File

@ -12,7 +12,7 @@ on:
schedule:
- cron: '36 22 * * 4'
push:
branches: [ "develop" ]
branches: [ "hdf5_1_14" ]
# Declare default permissions as read only.
permissions: read-all

View File

@ -9,4 +9,4 @@ authors:
website: 'https://www.hdfgroup.org'
repository-code: 'https://github.com/HDFGroup/hdf5'
url: 'https://www.hdfgroup.org/HDF5/'
repository-artifact: 'https://support.hdfgroup.org/downloads/HDF5'
repository-artifact: 'https://support.hdfgroup.org/downloads/index.html'

View File

@ -41,7 +41,7 @@ Once a pull request is correctly formatted and passes **ALL** CI tests, it will
community members who can approve pull requests. The HDF Group developers will work with you to ensure that the pull request satisfies the acceptance
criteria described in the next section.
<h2 id="criteria">Acceptance criteria for a pull request</h2>
<h2 id="workflow">Workflow</h2>
We appreciate every contribution we receive, but we may not accept them all. Those that we *do* satisfy the following criteria:

View File

@ -9,9 +9,7 @@ in science, engineering, and research communities worldwide.
The HDF Group is the developer, maintainer, and steward of HDF5 software. Find more
information about The HDF Group, the HDF5 Community, and other HDF5 software projects,
tools, and services at The HDF Group's website.
https://www.hdfgroup.org/
tools, and services at [The HDF Group's website](https://www.hdfgroup.org/).
@ -19,44 +17,44 @@ HELP AND SUPPORT
----------------
Information regarding Help Desk and Support services is available at
https://hdfgroup.atlassian.net/servicedesk/customer/portals
https://help.hdfgroup.org
FORUM and NEWS
--------------
The following public forums are provided for public announcements and discussions
The [HDF Forum](https://forum.hdfgroup.org) is provided for public announcements and discussions
of interest to the general HDF5 Community.
- Homepage of the Forum
https://forum.hdfgroup.org
- News and Announcement
- News and Announcements
https://forum.hdfgroup.org/c/news-and-announcements-from-the-hdf-group
- HDF5 and HDF4 Topics
- HDF5 Topics
https://forum.hdfgroup.org/c/hdf5
These forums are provided as an open and public service for searching and reading.
Posting requires completing a simple registration and allows one to join in the
conversation. Please read the following instructions pertaining to the Forum's
use and configuration
https://forum.hdfgroup.org/t/quickstart-guide-welcome-to-the-new-hdf-forum
conversation. Please read the [instructions](https://forum.hdfgroup.org/t/quickstart-guide-welcome-to-the-new-hdf-forum
) pertaining to the Forum's use and configuration.
HDF5 SNAPSHOTS, PREVIOUS RELEASES AND SOURCE CODE
--------------------------------------------
Full Documentation and Programming Resources for this HDF5 can be found at
https://support.hdfgroup.org/documentation/hdf5/index.html
https://support.hdfgroup.org/documentation/index.html
Periodically development code snapshots are provided at the following URL:
https://github.com/HDFGroup/hdf5/releases
https://github.com/HDFGroup/hdf5/releases/tag/snapshot-1.14
Source packages for current and previous releases are located at:
https://support.hdfgroup.org/releases/hdf5/downloads/
hdf5 1.14 releases:
https://support.hdfgroup.org/releases/hdf5/v1_14/index.html
Archived releases:
https://support.hdfgroup.org/archive/support/ftp/HDF5/releases/index.html
Development code is available at our Github location:

View File

@ -6,6 +6,8 @@ HDF5 version 1.14.5-1 currently under development
[![1.14 autotools build status](https://img.shields.io/github/actions/workflow/status/HDFGroup/hdf5/autotools.yml?branch=hdf5_1_14&label=HDF5%201.14%20Autotools%20CI)](https://github.com/HDFGroup/hdf5/actions/workflows/autotools.yml?query=branch%3Ahdf5_1_14)
[![BSD](https://img.shields.io/badge/License-BSD-blue.svg)](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/COPYING)
[HPC configure/build/test results](https://my.cdash.org/index.php?project=HDF5)
*Please refer to the release_docs/INSTALL file for installation instructions.*
This repository contains a high-performance library's source code and a file format
@ -21,11 +23,11 @@ DOCUMENTATION
-------------
This release is fully functional for the API described in the documentation.
[HDF5 C API](https://hdfgroup.github.io/hdf5/v1_14/_l_b_a_p_i.html)
https://hdfgroup.github.io/hdf5/v1_14/_l_b_a_p_i.html
Full Documentation and Programming Resources for this release can be found at
[Full Documentation](https://hdfgroup.github.io/hdf5/v1_14/index.html)
https://hdfgroup.github.io/hdf5/v1_14/index.html
The latest doxygen documentation generated on changes to HDF5 1.14.x is available at:
@ -72,14 +74,18 @@ conversation. Please read the [instructions](https://forum.hdfgroup.org/t/quick
SNAPSHOTS, PREVIOUS RELEASES AND SOURCE CODE
--------------------------------------------
Periodically development code snapshots are provided at the following URL:
https://github.com/HDFGroup/hdf5/releases/tag/snapshot-1.14
Source packages for current and previous releases are located at:
https://support.hdfgroup.org/downloads/hdf5
hdf5 1.14 releases:
https://support.hdfgroup.org/releases/hdf5/v1_14/index.html
Archived releases:
https://support.hdfgroup.org/archive/support/ftp/HDF5/releases/index.html
Development code is available at our Github location:
https://github.com/HDFGroup/hdf5.git

View File

@ -18,7 +18,7 @@
<string>????</string>
<!-- See https://developer.apple.com/app-store/categories/ for list of AppStore categories -->
<key>LSApplicationCategoryType</key>
<string>public.app-category.utilities</string>
<string>public.app-category.developer-tools</string>
<key>CFBundleVersion</key>
<string>@CPACK_PACKAGE_VERSION@</string>
<key>CFBundleShortVersionString</key>

View File

@ -7,8 +7,8 @@
# HDFS_LIBRARIES, location of libhdfs.so
# HDFS_FOUND, whether HDFS is found.
exec_program($ENV{HADOOP_HOME}/bin/hadoop ARGS version OUTPUT_VARIABLE Hadoop_VERSION
RETURN_VALUE Hadoop_RETURN)
execute_process(COMMAND $ENV{HADOOP_HOME}/bin/hadoop version OUTPUT_VARIABLE Hadoop_VERSION
RESULT_VARIABLE Hadoop_RETURN)
# currently only looking in HADOOP_HOME
find_path(HDFS_INCLUDE_DIR hdfs.h PATHS

View File

@ -75,6 +75,6 @@ For more information see USING_CMake_Examples.txt in the install folder.
===========================================================================
Documentation for this release can be found at the following URL:
https://support.hdfgroup.org/hdf5/@HDF5_PACKAGE_NAME@-@HDF5_PACKAGE_VERSION@/documentation/doxygen/index.html
https://support.hdfgroup.org/releases/hdf5/@${H5_VERS_MAJOR}@_@${H5_VERS_MINOR}@/@${H5_VERS_MAJOR}@_@${H5_VERS_MINOR}@_@${H5_VERS_RELEASE}@/documentation/doxygen/index.html
Bugs should be reported to help@hdfgroup.org.

View File

@ -87,7 +87,7 @@ set (PLUGIN_GIT_URL "https://github.com/HDFGroup/hdf5_plugins.git" CACHE STRING
set (PLUGIN_GIT_BRANCH "master" CACHE STRING "" FORCE)
set (H5PL_VERS_MAJOR "1" CACHE STRING "Major version of hdf5 package for PLUGIN package" FORCE)
set (H5PL_VERS_MINOR "14" CACHE STRING "Minor version of hdf5 package for PLUGIN package" FORCE)
set (H5PL_VERS_RELEASE "4" CACHE STRING "Release version of hdf5 package for PLUGIN package" FORCE)
set (H5PL_VERS_RELEASE "5" CACHE STRING "Release version of hdf5 package for PLUGIN package" FORCE)
#############
# bitshuffle

View File

@ -335,17 +335,17 @@ hid_t file_id = H5Fcreate("file.h5", H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id);
[u1]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/HDF5Examples/C/H5PAR/ph5_filtered_writes.c
[u2]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/HDF5Examples/C/H5PAR/ph5_filtered_writes_no_sel.c
[u3]: https://hdfgroup.github.io/hdf5/develop/group___h5_d.html#gaf6213bf3a876c1741810037ff2bb85d8
[u4]: https://hdfgroup.github.io/hdf5/develop/group___h5_d.html#ga8eb1c838aff79a17de385d0707709915
[u5]: https://hdfgroup.github.io/hdf5/develop/group___d_c_p_l.html#ga85faefca58387bba409b65c470d7d851
[u6]: https://hdfgroup.github.io/hdf5/develop/group___d_c_p_l.html#ga4335bb45b35386daa837b4ff1b9cd4a4
[u7]: https://hdfgroup.github.io/hdf5/develop/group___d_c_p_l.html#ga6bd822266b31f86551a9a1d79601b6a2
[u8]: https://support.hdfgroup.org/documentation/hdf5/parallel-compression-improvements-in-hdf5-1-13-1
[u9]: https://support.hdfgroup.org/documentation/hdf5/chunking_in_hdf5.html
[u10]: https://support.hdfgroup.org/documentation/hdf5/technotes/TechNote-HDF5-ImprovingIOPerformanceCompressedDatasets.pdf
[u11]: https://hdfgroup.github.io/hdf5/develop/group___f_a_p_l.html#gab99d5af749aeb3896fd9e3ceb273677a
[u12]: https://hdfgroup.github.io/hdf5/develop/group___f_c_p_l.html#ga167ff65f392ca3b7f1933b1cee1b9f70
[u13]: https://hdfgroup.github.io/hdf5/develop/group___f_c_p_l.html#gad012d7f3c2f1e1999eb1770aae3a4963
[u14]: https://hdfgroup.github.io/hdf5/develop/group___d_x_p_l.html#ga001a22b64f60b815abf5de8b4776f09e
[u15]: https://hdfgroup.github.io/hdf5/develop/group___d_x_p_l.html#gacb30d14d1791ec7ff9ee73aa148a51a3
[u16]: https://hdfgroup.github.io/hdf5/develop/group___f_a_p_l.html#gacbe1724e7f70cd17ed687417a1d2a910
[u3]: https://hdfgroup.github.io/hdf5/hdf5_1_14/group___h5_d.html#gaf6213bf3a876c1741810037ff2bb85d8
[u4]: https://hdfgroup.github.io/hdf5/hdf5_1_14/group___h5_d.html#ga8eb1c838aff79a17de385d0707709915
[u5]: https://hdfgroup.github.io/hdf5/hdf5_1_14/group___d_c_p_l.html#ga85faefca58387bba409b65c470d7d851
[u6]: https://hdfgroup.github.io/hdf5/hdf5_1_14/group___d_c_p_l.html#ga4335bb45b35386daa837b4ff1b9cd4a4
[u7]: https://hdfgroup.github.io/hdf5/hdf5_1_14/group___d_c_p_l.html#ga6bd822266b31f86551a9a1d79601b6a2
[u8]: https://www.hdfgroup.org/2022/03/04/parallel-compression-improvements-in-hdf5-1-13-1/
[u9]: https://support.hdfgroup.org/releases/hdf5/documentation/advanced_topics/chunking_in_hdf5.md
[u10]: https://support.hdfgroup.org/releases/hdf5/documentation/hdf5_topics/HDF5ImprovingIOPerformanceCompressedDatasets.pdf
[u11]: https://hdfgroup.github.io/hdf5/hdf5_1_14/group___f_a_p_l.html#gab99d5af749aeb3896fd9e3ceb273677a
[u12]: https://hdfgroup.github.io/hdf5/hdf5_1_14/group___f_c_p_l.html#ga167ff65f392ca3b7f1933b1cee1b9f70
[u13]: https://hdfgroup.github.io/hdf5/hdf5_1_14/group___f_c_p_l.html#gad012d7f3c2f1e1999eb1770aae3a4963
[u14]: https://hdfgroup.github.io/hdf5/hdf5_1_14/group___d_x_p_l.html#ga001a22b64f60b815abf5de8b4776f09e
[u15]: https://hdfgroup.github.io/hdf5/hdf5_1_14/group___d_x_p_l.html#gacb30d14d1791ec7ff9ee73aa148a51a3
[u16]: https://hdfgroup.github.io/hdf5/hdf5_1_14/group___f_a_p_l.html#gacbe1724e7f70cd17ed687417a1d2a910

View File

@ -258,13 +258,13 @@ ALIASES += sa_metadata_ops="\sa \li H5Pget_all_coll_metadata_ops() \li H5Pget_co
ALIASES += ref_cons_semantics="<a href=\"https://\RFCURL/RFC%20PHDF5%20Consistency%20Semantics%20MC%20120328.docx.pdf\">Enabling a Strict Consistency Semantics Model in Parallel HDF5</a>"
ALIASES += ref_file_image_ops="<a href=\"https://\RFCURL/HDF5FileImageOperations.pdf\">HDF5 File Image Operations</a>"
ALIASES += ref_filter_pipe="<a href=\"https://\DOCURL/HDF5+Data+Flow+Pipeline+for+H5Dread\">Data Flow Pipeline for H5Dread()</a>"
ALIASES += ref_filter_pipe="<a href=\"https://\DOCURL/advanced_topics/data_flow_pline_H5Dread.md\">Data Flow Pipeline for H5Dread()</a>"
ALIASES += ref_group_impls="<a href=\"https://\DOXURL/group___h5_g.html\">Group implementations in HDF5</a>"
ALIASES += ref_h5lib_relver="<a href=\"https://\ARCURL/TechNotes/Version.html\">HDF5 Library Release Version Numbers</a>"
ALIASES += ref_mdc_in_hdf5="<a href=\"https://\DOCURL/Metadata+Caching+in+HDF5\">Metadata Caching in HDF5</a>"
ALIASES += ref_mdc_logging="<a href=\"https://\DOCURL/H5F_START_MDC_LOGGING\">Metadata Cache Logging</a>"
ALIASES += ref_news_112="<a href=\"https://\DOCURL/release_specifics/new_features_1_12.html\">New Features in HDF5 Release 1.12</a>"
ALIASES += ref_h5ocopy="<a href=\"https://\DOCURL/Copying+Committed+Datatypes+with+H5Ocopy\">Copying Committed Datatypes with H5Ocopy()</a>"
ALIASES += ref_mdc_in_hdf5="<a href=\"https://\DOCURL/advanced_topics/FineTuningMetadataCache.md\">Metadata Caching in HDF5</a>"
ALIASES += ref_mdc_logging="<a href=\"https://\RFCURL/Design-MetadataCache-Logging-THG20140224-v4.pdf\">Metadata Cache Logging</a>"
ALIASES += ref_news_112="<a href=\"https://\DOCURL/release_specifics/new_features_1_12.md\">New Features in HDF5 Release 1.12</a>"
ALIASES += ref_h5ocopy="<a href=\"https://\DOCURL/advanced_topics/CopyingCommittedDatatypesWithH5Ocopy.pdf\">Copying Committed Datatypes with H5Ocopy()</a>"
ALIASES += ref_sencode_fmt_change="<a href=\"https://\RFCURL/H5Sencode_format.docx.pdf\">RFC H5Sencode() / H5Sdecode() Format Change</a>"
ALIASES += ref_vlen_strings="\Emph{Creating variable-length string datatypes}"
ALIASES += ref_vol_doc="VOL documentation"

View File

@ -617,7 +617,6 @@ on the <a href="http://hdfeos.org/">HDF-EOS Tools and Information Center</a> pag
\li \ref LBExamples
\li \ref ExAPI
\li <a href="https://\SRCURL/HDF5Examples">Examples in the Source Code</a>
\li <a href="https://\DOCURL/Other+Examples">Other Examples</a>
\section secHDF5ExamplesCompile How To Compile
For information on compiling in C, C++ and Fortran, see: \ref LBCompiling

View File

@ -181,9 +181,9 @@ created the dataset layout cannot be changed. The h5repack utility can be used t
to a new with a new layout.
\section secLBDsetLayoutSource Sources of Information
<a href="https://\DOCURL/chunking_in_hdf5.html">Chunking in HDF5</a>
(See the documentation on <a href="https://\DOCURL/advanced_topics_list.html">Advanced Topics in HDF5</a>)
see \ref sec_plist in the HDF5 \ref UG.
<a href="https://\DOCURL/advanced_topics/chunking_in_hdf5.md">Chunking in HDF5</a>
(See the documentation on <a href="https://\DOCURL/advanced_topics_list.md">Advanced Topics in HDF5</a>)
\see \ref sec_plist in the HDF5 \ref UG.
<hr>
Previous Chapter \ref LBPropsList - Next Chapter \ref LBExtDset
@ -201,7 +201,7 @@ certain initial dimensions, then to later increase the size of any of the initia
HDF5 requires you to use chunking to define extendible datasets. This makes it possible to extend
datasets efficiently without having to excessively reorganize storage. (To use chunking efficiently,
be sure to see the advanced topic, <a href="https://\DOCURL/chunking_in_hdf5.html">Chunking in HDF5</a>.)
be sure to see the advanced topic, <a href="https://\DOCURL/advanced_topics/chunking_in_hdf5.md">Chunking in HDF5</a>.)
The following operations are required in order to extend a dataset:
\li Declare the dataspace of the dataset to have unlimited dimensions for all dimensions that might eventually be extended.
@ -243,7 +243,7 @@ Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
\section secLBComDsetCreate Creating a Compressed Dataset
HDF5 requires you to use chunking to create a compressed dataset. (To use chunking efficiently,
be sure to see the advanced topic, <a href="https://\DOCURL/chunking_in_hdf5.html">Chunking in HDF5</a>.)
be sure to see the advanced topic, <a href="https://\DOCURL/advanced_topics/chunking_in_hdf5.md">Chunking in HDF5</a>.)
The following operations are required in order to create a compressed dataset:
\li Create a dataset creation property list.
@ -251,7 +251,7 @@ The following operations are required in order to create a compressed dataset:
\li Create the dataset.
\li Close the dataset creation property list and dataset.
For more information on compression, see the FAQ question on <a href="https://\DOCURL/Using+Compression+in+HDF5">Using Compression in HDF5</a>.
For more information on compression, see the FAQ question on <a href="https://\DOCURL/hdf5_topics/UsingCompressionInHDF5.md">Using Compression in HDF5</a>.
\section secLBComDsetProg Programming Example
@ -968,8 +968,7 @@ or on WINDOWS you may need to add the path to the bin folder to PATH.
\section secLBCompilingCMake Compiling an Application with CMake
\subsection subsecLBCompilingCMakeScripts CMake Scripts for Building Applications
Simple scripts are provided for building applications with different languages and options.
See <a href="https://\DOCURL/CMake+Scripts+for+Building+Applications">CMake Scripts for Building Applications</a>.
See <a href="https://\SRCURL/release_docs/USING_CMake_examples.txt">Using CMake to Build Applications</a> to build applications with different languages and options.
For a more complete script (and to help resolve issues) see the script provided with the HDF5 Examples project.
@ -977,7 +976,7 @@ For a more complete script (and to help resolve issues) see the script provided
The installed HDF5 can be verified by compiling the HDF5 Examples project, included with the CMake built HDF5 binaries
in the share folder or you can go to the <a href="https://\SRCURL/HDF5Examples">HDF5 Examples</a> in the HDF5 github repository.
Go into the share directory and follow the instructions in USING_CMake_examples.txt to build the examples.
Go into the share directory and follow the instructions in <a href="https://\SRCURL/release_docs/USING_CMake_examples.txt">Using CMake to Build Examples</a> to build the examples.
In general, users must first set the HDF5_ROOT environment variable to the installed location of the CMake
configuration files for HDF5. For example, on Windows the following path might be set:
@ -1031,17 +1030,8 @@ For example, on Unix the log files will be in:
There are log files for the configure, test, and build.
<hr>
Previous Chapter \ref LBQuizAnswers - Next Chapter \ref LBTraining
Previous Chapter \ref LBQuizAnswers
Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
@page LBTraining Training Videos
Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
<a href="https://\DOCURL/Training+Videos">Training Videos</a>
<hr>
Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
*/

View File

@ -246,7 +246,7 @@ in the file).
Please note that the chunk sizes used in this topic are for demonstration purposes only. For
information on chunking and specifying an appropriate chunk size, see the
<a href="https://\DOCURL/advanced_topics/chunking_in_hdf5.html">Chunking in HDF5</a> documentation.
<a href="https://\DOCURL/advanced_topics/chunking_in_hdf5.md">Chunking in HDF5</a> documentation.
Also see the HDF5 Tutorial topic on \ref secLBComDsetCreate.
<ul>

View File

@ -92,7 +92,7 @@ Public header Files you will need to be familiar with include:
</table>
Many VOL connectors are listed on The HDF Group's VOL plugin registration page, located at:
<a href="https://\DOCURL/registered_vol_connectors.html">Registered VOL Connectors</a>.
<a href="https://\DOCURL/registered_vol_connectors.md">Registered VOL Connectors</a>.
Not all of these VOL connectors are supported by The HDF Group and the level of completeness varies, but the
connectors found there can serve as examples of working implementations
@ -195,7 +195,7 @@ contact <a href="help@hdfgroup.org">help@hdfgroup.org</a> for help with this. We
name you've chosen will appear on the registered VOL connectors page.
As noted above, registered VOL connectors will be listed at:
<a href="https://\DOCURL/registered_vol_connectors.html">Registered VOL Connectors</a>
<a href="https://\DOCURL/registered_vol_connectors.md">Registered VOL Connectors</a>
A new \b conn_version field has been added to the class struct for 1.13. This field is currently not used by
the library so its use is determined by the connector author. Best practices for this field will be determined

View File

@ -5,12 +5,8 @@
<tab type="user" url="@ref GettingStarted" title="Getting started" />
<tab type="user" url="@ref UG" title="User Guide" />
<tab type="user" url="@ref RM" title="Reference Manual" />
<!-- <tab type="user" url="https://support.hdfgroup.org/documentation/hdf5/HDF5+Application+Developer%27s+Guide" title="Application Developer's Guide" />
<tab type="user" url="https://support.hdfgroup.org/documentation/hdf5/An_Overview_of_the_HDF5_Library_Architecture.v2.pdf" title="HDF5 Library Architecture Overview" /> -->
<tab type="user" url="@ref Cookbook" title="Cookbook" />
<tab type="user" url="@ref TN" title="Technical Notes" />
<!-- <tab type="user" url="@ref VOL_Connector" title="HDF5 VOL Connector Author Guide" />
<tab type="user" url="https://support.hdfgroup.org/documentation/hdf5/HDF5+VOL+User%27s+Guide" title="HDF5 VOL User's Guide" /> -->
<tab type="user" url="@ref RFC" title="RFCs" />
<tab type="user" url="@ref SPEC" title="Specifications" />
<tab type="user" url="@ref GLS" title="Glossary" />

View File

@ -161,7 +161,7 @@ H5_HLDLL herr_t H5DOappend(hid_t dset_id, hid_t dxpl_id, unsigned axis, size_t e
* from one datatype to another, and the filter pipeline to write the chunk.
* Developers should have experience with these processes before
* using this function. Please see
* <a href="https://\DOCURL/Using+the+Direct+Chunk+Write+Function">
* <a href="https://\DOCURL/advanced_topics/UsingDirectChunkWrite.pdf">
* Using the Direct Chunk Write Function</a>
* for more information.
*

View File

@ -147,7 +147,7 @@ H5_HLDLL herr_t H5DSattach_scale(hid_t did, hid_t dsid, unsigned int idx);
* dimension \p idx of dataset \p did. This deletes the entries in the
* #DIMENSION_LIST and #REFERENCE_LIST attributes,
* as defined in section 4.2 of
* <a href="https://support.hdfgroup.org/documentation/HDF5/HL/H5DS_Spec.pdf">
* <a href="https://support.hdfgroup.org/releases/hdf5/documentation/hdf5_topics/H5DS_Spec.pdf">
* HDF5 Dimension Scale Specification</a>.
*
* Fails if:
@ -180,7 +180,7 @@ H5_HLDLL herr_t H5DSdetach_scale(hid_t did, hid_t dsid, unsigned int idx);
* as defined above. Creates the CLASS attribute, set to the value
* "DIMENSION_SCALE" and an empty #REFERENCE_LIST attribute,
* as described in
* <a href="https://support.hdfgroup.org/documentation/HDF5/HL/H5DS_Spec.pdf">
* <a href="https://support.hdfgroup.org/releases/hdf5/documentation/hdf5_topics/H5DS_Spec.pdf">
* HDF5 Dimension Scale Specification</a>.
* (PDF, see section 4.2).
*

View File

@ -1625,7 +1625,7 @@ H5_HLDLL htri_t H5LTpath_valid(hid_t loc_id, const char *path, hbool_t check_obj
* \note **Recommended Reading:**
* \note This function is part of the file image operations feature set.
* It is highly recommended to study the guide
* <a href="https://\DOCURL/HDF5+File+Image+Operations">
* <a href="https://\RFCURL/HDF5FileImageOperations.pdf">
* HDF5 File Image Operations</a> before using this feature set.\n
* See the See Also section below for links to other elements of
* HDF5 file image operations.

View File

@ -91,6 +91,6 @@ The <i><b>H5</b> </i>class automatically loads the native method implementations
and the HDF5 library.
<h3>To Obtain</h3>
The JHI5 is included with the <a href="https://support.hdfgroup.org/downloads/HDF5/index.html">HDF5</a> library.
The JHI5 is included with the <a href="https://support.hdfgroup.org/downloads/index.html">HDF5</a> library.
</body>

View File

@ -49,7 +49,7 @@ CONTENTS
include the Szip library with the encoder enabled. These can be found
here:
https://support.hdfgroup.org/downloads/HDF5
https://support.hdfgroup.org/downloads/index.html
Please notice that if HDF5 configure cannot find a valid Szip library,
configure will not fail; in this case, the compression filter will

View File

@ -334,7 +334,7 @@ III. Full installation instructions for source distributions
(or '--with-pthread=DIR') flag to the configure script.
For further information, see:
https://support.hdfgroup.org/documentation/HDF5/Questions+about+thread-safety+and+concurrent+access
https://support.hdfgroup.org/releases/hdf5/documentation/gen_topics/Questions+about+thread-safety+and+concurrent+access
The high-level, C++, Fortran and Java interfaces are not compatible
with the thread-safety option because the lock is not hoisted
@ -490,7 +490,7 @@ IV. Using the Library
For information on using HDF5 see the documentation, tutorials and examples
found here:
https://support.hdfgroup.org/documentation/HDF5/index.html
https://support.hdfgroup.org/documentation/index.html
A summary of the features included in the built HDF5 installation can be found
in the libhdf5.settings file in the same directory as the static and/or

View File

@ -29,7 +29,7 @@ Obtaining HDF5 source code
2. Obtain HDF5 source from Github
development branch: https://github.com/HDFGroup/hdf5
last release: https://github.com/HDFGroup/hdf5/releases/latest
hdf5-1_14_"X".tar.gz or hdf5-1_1
hdf5-1_14_"X".tar.gz or hdf5-1_14_"X".zip
and put it in "myhdfstuff".
Uncompress the file. There should be a hdf5-1.14."X" folder.

View File

@ -90,7 +90,7 @@ nodes. They would probably work for other Cray systems but have
not been verified.
Obtain the HDF5 source code:
https://support.hdfgroup.org/downloads/HDF5
https://support.hdfgroup.org/downloads/index.html
The entire build process should be done on a MOM node in an interactive allocation and on a file system accessible by all compute nodes.
Request an interactive allocation with qsub:

View File

@ -15,16 +15,16 @@ final release.
Links to HDF5 documentation can be found on:
https://support.hdfgroup.org/documentation/HDF5
https://support.hdfgroup.org/releases/hdf5/latest-docs.html
The official HDF5 releases can be obtained from:
https://support.hdfgroup.org/downloads/HDF5/
https://support.hdfgroup.org/downloads/index.html
Changes from release to release and new features in the HDF5-1.14.x release series
Changes from Release to Release and New Features in the HDF5-1.14.x release series
can be found at:
https://support.hdfgroup.org/documentation/HDF5/release_specific_info.html
https://support.hdfgroup.org/releases/hdf5/documentation/release_specific_info.md
If you have any questions or comments, please send them to the HDF Help Desk:

View File

@ -62,7 +62,7 @@ For more information on the HDF5 versioning and backward and forward compatibili
### 5. Update Interface Version (Release Manager | Product Manager)
1. Verify interface additions, changes, and removals, and update the shared library interface version number.
2. Execute the CI snapshot workflow.
- Actions - "[hdf5 release build][u8]" workflow and use the defaults.
- Actions - “[hdf5 release build][u8]” workflow and use the defaults.
3. Download and inspect release build source and binary files. Downloaded source files should build correctly, one or more binaries should install and run correctly. There should be nothing missing nor any extraneous files that arent meant for release.
4. Verify the interface compatibility reports between the current source and the previous release on the Github [Snapshots]u14] page.
- The compatibility reports are produced by the CI and are viewable in the Github [Releases/snapshot][u15] section.
@ -95,9 +95,9 @@ For more information on the HDF5 versioning and backward and forward compatibili
- `$ git push`
7. Update default configuration mode
- `$ git checkout hdf5_X_Y_Z;` and `$ bin/switch_maint_mode -disable ./configure.ac` to disable `AM_MAINTAINER_MODE`.
- Need to set option `HDF5_GENERATE_HEADERS` to `OFF`, currently in line 996 of [src/CMakeLists.txt][11].
- Need to set option `HDF5_GENERATE_HEADERS` to `OFF`, currently in line 996 of [src/CMakeLists.txt][u11].
- Change the **release preparation branch**'s (i.e. hdf5_X_Y_Z) default configuration mode from development to production in [configure.ac][u12].
- Find "Determine build mode" in [configure.ac][u12].
- Find “Determine build mode” in [configure.ac][u12].
- Change `default=debug` to `default=production` at the bottom of the `AS_HELP_STRING` for `--enable-build-mode`.
- Under `if test "X-$BUILD_MODE" = X- ; then` change `BUILD_MODE=debug` to `BUILD_MODE=production`.
- Run `sh ./autogen.sh` to regenerate the UNIX build system files and commit the changes. (use `git status --ignored` to see the changes and `git add -f` to add all files. First delete any new files not to be committed, notably `src/H5public.h~` and `autom4te.cache/`.)
@ -137,7 +137,7 @@ For more information on the HDF5 versioning and backward and forward compatibili
### 8. Finalize Release Notes (Release Manager)
1. Perform a final review of release notes and ensure that any new changes made to the source, any new known issues discovered, and any additional tests run since the code freeze have been reflected in RELEASE.txt and other appropriate in-source documentation files (INSTALL_*, etc.). (Refer to the sub-steps of step 3 for what to check).
2. Update the [RELEASE.txt][u1] in the **support** branch (i.e. hdf5_X_Y) to remove entries in "Bugs fixed" and "New Features" sections and increment the version number for the following release ("Bug fixes since X.Y.Z" - occurs twice).
2. Update the [RELEASE.txt][u1] in the **support** branch (i.e. hdf5_X_Y) to remove entries in “Bugs fixed” and “New Features” sections and increment the version number for the following release (“Bug fixes since X.Y.Z” - occurs twice).
- `$ git checkout hdf5_X_Y`
- `$ vi RELEASE.txt # update RELEASE.txt to clear it out`
- `$ git commit -m "Reset RELEASE.txt in preparation for the next release."`
@ -174,6 +174,6 @@ For more information on the HDF5 versioning and backward and forward compatibili
[u10]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/bin/h5vers
[u11]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/src/CMakeLists.txt
[u12]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/configure.ac
[u13]: https://support.hdfgroup.org/documentation/hdf5/v1_14/v1_14_4/api-compat-macros.html
[u13]: https://hdfgroup.github.io/hdf5/v1_14/api-compat-macros.html
[u14]: https://github.com/HDFGroup/hdf5/releases/tag/snapshot-1.14
[u15]: https://github.com/HDFGroup/hdf5/releases/tag/snapshot

View File

@ -22,7 +22,7 @@ I. Preconditions
1. We suggest you obtain the latest CMake for your platform from the Kitware
web site. The HDF5 1.14.x product requires a minimum CMake version
of 3.18. If you are using VS2022, the minimum version is 3.21.
of 3.18. If you are using VS2022, the minimum CMake version is 3.21.
2. You have installed the HDF5 library built with CMake, by executing
the HDF Install Utility (the *.msi file in the binary package for

View File

@ -3708,7 +3708,7 @@ H5_DLL herr_t H5Pget_fclose_degree(hid_t fapl_id, H5F_close_degree_t *degree);
* \see H5LTopen_file_image(), H5Fget_file_image(), H5Pset_file_image(),
* H5Pset_file_image_callbacks(), H5Pget_file_image_callbacks(),
* \ref H5FD_file_image_callbacks_t, \ref H5FD_file_image_op_t,
* <a href="https://\DOCURL/advanced_topics/file_image_ops.html">
* <a href="https://\DOCURL/advanced_topics/file_image_ops.md">
* HDF5 File Image Operations</a>.
*
*
@ -3748,7 +3748,7 @@ H5_DLL herr_t H5Pget_file_image(hid_t fapl_id, void **buf_ptr_ptr, size_t *buf_l
* \see H5LTopen_file_image(), H5Fget_file_image(), H5Pset_file_image(),
* H5Pset_file_image_callbacks(), H5Pget_file_image_callbacks(),
* \ref H5FD_file_image_callbacks_t, \ref H5FD_file_image_op_t,
* <a href="https://\DOCURL/advanced_topics/file_image_ops.html">
* <a href="https://\DOCURL/advanced_topics/file_image_ops.md">
* HDF5 File Image Operations</a>.
*
* \since 1.8.9
@ -4692,7 +4692,7 @@ H5_DLL herr_t H5Pset_fclose_degree(hid_t fapl_id, H5F_close_degree_t degree);
* This function is part of the file image
* operations feature set. It is highly recommended to study the guide
* [<em>HDF5 File Image Operations</em>]
* (https://\DOCURL/advanced_topics/file_image_ops.html
* (https://\DOCURL/advanced_topics/file_image_ops.md
* ) before using this feature set. See the See Also section below
* for links to other elements of HDF5 file image operations.
*
@ -4704,9 +4704,9 @@ H5_DLL herr_t H5Pset_fclose_degree(hid_t fapl_id, H5F_close_degree_t degree);
* \li H5Pget_file_image_callbacks()
*
* \li [HDF5 File Image Operations]
* (https://\DOCURL/advanced_topics/file_image_ops.html)
* (https://\DOCURL/advanced_topics/file_image_ops.md)
* in [Advanced Topics in HDF5]
* (https://\DOCURL/advanced_topics_list.html)
* (https://\DOCURL/advanced_topics_list.md)
*
* \li Within H5Pset_file_image_callbacks():
* \li Callback #H5FD_file_image_callbacks_t
@ -4729,7 +4729,7 @@ H5_DLL herr_t H5Pset_file_image(hid_t fapl_id, void *buf_ptr, size_t buf_len);
* **Recommended Reading:** This function is part of the file
* image operations feature set. It is highly recommended to study
* the guide [HDF5 File Image Operations]
* (https://\DOCURL/advanced_topics/file_image_ops.html
* (https://\DOCURL/advanced_topics/file_image_ops.md
* ) before using this feature set. See the See Also section below
* for links to other elements of HDF5 file image operations.
*
@ -5205,7 +5205,7 @@ H5_DLL herr_t H5Pset_mdc_config(hid_t plist_id, H5AC_cache_config_t *config_ptr)
* current state of the logging flags.
*
* The log format is described in [<em>Metadata Cache Logging</em>]
* (https://\DOCURL/advanced_topics/Fine-tuning+the+Metadata+Cache).
* (https://\DOCURL/advanced_topics/FineTuningMetadataCache.md).
*
* \since 1.10.0
*

View File

@ -83,7 +83,7 @@
* to be much more common than internal implementations.
*
* A list of VOL connectors can be found here:
* <a href="https://\DOCURL/registered_vol_connectors.html">
* <a href="https://\DOCURL/registered_vol_connectors.md">
* Registered VOL Connectors</a>
*
* This list is incomplete and only includes the VOL connectors that have been registered with

View File

@ -782,7 +782,7 @@
* item must be closed separately.
*
* For more information,
* @see <a href="http://\ARCURL/Advanced/UsingIdentifiers/index.html">Using Identifiers</a>
* @see <a href="http://\DOCURL/hdf5_topics/UsingIdentifiers.md">Using Identifiers</a>
* in the HDF5 Application Developer's Guide under General Topics in HDF5.
*
* <h4>How Closing a File Effects Other Open Structural Elements</h4>

View File

@ -333,10 +333,9 @@ usage(const char *prog)
PRINTVALSTREAM(
rawoutstream,
" D - is the file driver to use in opening the file. Acceptable values are available from\n");
PRINTVALSTREAM(
rawoutstream,
" "
"https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html.\n");
PRINTVALSTREAM(rawoutstream, " "
"https://support.hdfgroup.org/releases/hdf5/documentation/"
"registered_virtual_file_drivers_vfds.md.\n");
PRINTVALSTREAM(rawoutstream,
" Without the file driver flag, the file will be opened with each driver in\n");
PRINTVALSTREAM(rawoutstream, " turn and in the order specified above until one driver succeeds\n");

View File

@ -135,9 +135,9 @@
*
* \subsubsection subsubsec_cltools_h5dump_options_args Option Argument Conventions
* \li <strong>D</strong> - is the file driver to use in opening the file. Acceptable values are available
* from https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html. Without
* the file driver flag, the file will be opened with each driver in turn and in the order specified above
* until one driver succeeds in opening the file. See examples below for family, split, and multi driver
* from https://support.hdfgroup.org/releases/hdf5/documentation/registered_virtual_file_drivers_vfds.md.
* Without the file driver flag, the file will be opened with each driver in turn and in the order specified
* above until one driver succeeds in opening the file. See examples below for family, split, and multi driver
* special file name usage.
*
* \li <strong>F</strong> - is a filename.

View File

@ -65,20 +65,20 @@ usage(const char *progname)
/*-------------------------------------------------------------------------
* Function: get_size
*
* Purpose: Reads a size option of the form `-XNS' where `X' is any
* letter, `N' is a multi-character positive decimal number, and
* `S' is an optional suffix letter in the set [GgMmk]. The
* option may also be split among two arguments as: `-X NS'.
* The input value of ARGNO is the argument number for the
* switch in the ARGV vector and ARGC is the number of entries
* in that vector.
* Purpose: Reads a size option of the form `-XNS' where `X' is any
* letter, `N' is a multi-character positive decimal number, and
* `S' is an optional suffix letter in the set [GgMmk]. The
* option may also be split among two arguments as: `-X NS'.
* The input value of ARGNO is the argument number for the
* switch in the ARGV vector and ARGC is the number of entries
* in that vector.
*
* Return: Success: The value N multiplied according to the
* suffix S. On return ARGNO will be the number
* of the next argument to process.
* Return: Success: The value N multiplied according to the
* suffix S. On return ARGNO will be the number
* of the next argument to process.
*
* Failure: Calls usage() which exits with a non-zero
* status.
* Failure: Calls usage() which exits with a non-zero
* status.
*
*-------------------------------------------------------------------------
*/
@ -126,13 +126,6 @@ get_size(const char *progname, int *argno, int argc, char *argv[])
/*-------------------------------------------------------------------------
* Function: main
*
* Purpose: Split an hdf5 file
*
* Return: Success:
*
* Failure:
*
*-------------------------------------------------------------------------
*/
H5_GCC_CLANG_DIAG_OFF("format-nonliteral")

View File

@ -105,7 +105,7 @@ usage: h5dump [OPTIONS] files
--------------- Option Argument Conventions ---------------
D - is the file driver to use in opening the file. Acceptable values are available from
https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html.
https://support.hdfgroup.org/releases/hdf5/documentation/registered_virtual_file_drivers_vfds.md.
Without the file driver flag, the file will be opened with each driver in
turn and in the order specified above until one driver succeeds
in opening the file.

View File

@ -105,7 +105,7 @@ usage: h5dump [OPTIONS] files
--------------- Option Argument Conventions ---------------
D - is the file driver to use in opening the file. Acceptable values are available from
https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html.
https://support.hdfgroup.org/releases/hdf5/documentation/registered_virtual_file_drivers_vfds.md.
Without the file driver flag, the file will be opened with each driver in
turn and in the order specified above until one driver succeeds
in opening the file.

View File

@ -105,7 +105,7 @@ usage: h5dump [OPTIONS] files
--------------- Option Argument Conventions ---------------
D - is the file driver to use in opening the file. Acceptable values are available from
https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html.
https://support.hdfgroup.org/releases/hdf5/documentation/registered_virtual_file_drivers_vfds.md.
Without the file driver flag, the file will be opened with each driver in
turn and in the order specified above until one driver succeeds
in opening the file.

View File

@ -105,7 +105,7 @@ usage: h5dump [OPTIONS] files
--------------- Option Argument Conventions ---------------
D - is the file driver to use in opening the file. Acceptable values are available from
https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html.
https://support.hdfgroup.org/releases/hdf5/documentation/registered_virtual_file_drivers_vfds.md.
Without the file driver flag, the file will be opened with each driver in
turn and in the order specified above until one driver succeeds
in opening the file.

View File

@ -105,7 +105,7 @@ usage: h5dump [OPTIONS] files
--------------- Option Argument Conventions ---------------
D - is the file driver to use in opening the file. Acceptable values are available from
https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html.
https://support.hdfgroup.org/releases/hdf5/documentation/registered_virtual_file_drivers_vfds.md.
Without the file driver flag, the file will be opened with each driver in
turn and in the order specified above until one driver succeeds
in opening the file.

View File

@ -105,7 +105,7 @@ usage: h5dump [OPTIONS] files
--------------- Option Argument Conventions ---------------
D - is the file driver to use in opening the file. Acceptable values are available from
https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html.
https://support.hdfgroup.org/releases/hdf5/documentation/registered_virtual_file_drivers_vfds.md.
Without the file driver flag, the file will be opened with each driver in
turn and in the order specified above until one driver succeeds
in opening the file.

View File

@ -105,7 +105,7 @@ usage: h5dump [OPTIONS] files
--------------- Option Argument Conventions ---------------
D - is the file driver to use in opening the file. Acceptable values are available from
https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html.
https://support.hdfgroup.org/releases/hdf5/documentation/registered_virtual_file_drivers_vfds.md.
Without the file driver flag, the file will be opened with each driver in
turn and in the order specified above until one driver succeeds
in opening the file.

View File

@ -105,7 +105,7 @@ usage: h5dump [OPTIONS] files
--------------- Option Argument Conventions ---------------
D - is the file driver to use in opening the file. Acceptable values are available from
https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html.
https://support.hdfgroup.org/releases/hdf5/documentation/registered_virtual_file_drivers_vfds.md.
Without the file driver flag, the file will be opened with each driver in
turn and in the order specified above until one driver succeeds
in opening the file.