Update examples and fix old version references.

This commit is contained in:
Allen Byrne 2020-02-26 13:54:34 -06:00
parent f53220dc20
commit 13f5b3aee2
15 changed files with 248 additions and 258 deletions

View File

@ -233,7 +233,7 @@ tar2cmakezip()
# step 3: add SZIP.tar.gz, ZLib.tar.gz and cmake files
cp /mnt/scr1/pre-release/hdf5/CMake/SZip.tar.gz $cmziptmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmziptmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.0-Source.zip $cmziptmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.1-Source.zip $cmziptmpsubdir
cp $cmziptmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmziptmpsubdir
cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmziptmpsubdir
cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5options.cmake $cmziptmpsubdir
@ -328,7 +328,7 @@ tar2cmaketgz()
# step 3: add SZIP.tar.gz, ZLib.tar.gz and cmake files
cp /mnt/scr1/pre-release/hdf5/CMake/SZip.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.0-Source.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.1-Source.tar.gz $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5options.cmake $cmgztmpsubdir
@ -411,7 +411,7 @@ tar2hpccmaketgz()
# step 3: add SZIP.tar.gz, ZLib.tar.gz and cmake files
cp /mnt/scr1/pre-release/hdf5/CMake/SZip.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.0-Source.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.14.1-Source.tar.gz $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir

View File

@ -77,7 +77,7 @@ set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DSITE:STRING=${CTEST_SITE} -DBUILDN
#TAR_SOURCE - name of tarfile
#if(NOT DEFINED TAR_SOURCE)
# set(CTEST_USE_TAR_SOURCE "HDF5Examples-1.14.0-Source")
# set(CTEST_USE_TAR_SOURCE "HDF5Examples-1.14.1-Source")
#endif()
###############################################################################################################

View File

@ -284,11 +284,13 @@ endif ()
# dump the output unless nodisplay option is set
if (TEST_SKIP_COMPARE AND NOT TEST_NO_DISPLAY)
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
execute_process (
COMMAND ${CMAKE_COMMAND} -E echo ${TEST_STREAM}
RESULT_VARIABLE TEST_RESULT
)
if (EXISTS "${TEST_FOLDER}/${TEST_OUTPUT}")
file (READ ${TEST_FOLDER}/${TEST_OUTPUT} TEST_STREAM)
execute_process (
COMMAND ${CMAKE_COMMAND} -E echo ${TEST_STREAM}
RESULT_VARIABLE TEST_RESULT
)
endif ()
endif ()
# everything went fine...

View File

@ -42,9 +42,9 @@ set (CTEST_SOURCE_VERSEXT "")
##############################################################################
# handle input parameters to script.
#BUILD_GENERATOR - which CMake generator to use, required
#INSTALLDIR - HDF5-1.13.0 root folder
#INSTALLDIR - HDF5-1.13.x root folder
#CTEST_CONFIGURATION_TYPE - Release, Debug, RelWithDebInfo
#CTEST_SOURCE_NAME - name of source folder; HDF5-1.13.0
#CTEST_SOURCE_NAME - name of source folder; HDF5-1.13.x
#MODEL - CDash group name
#HPC - run alternate configurations for HPC machines; sbatch, bsub, raybsub, qsub
#MPI - enable MPI
@ -188,8 +188,6 @@ else ()
endif ()
###################################################################
###################################################################
###################################################################
##### Following controls CDash submission #####
#set (LOCAL_SUBMIT "TRUE")

View File

@ -67,7 +67,7 @@ set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ALLOW_EXTERNAL_SUPPORT:STRIN
#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_ENABLE_SZIP_ENCODING:BOOL=OFF")
#### package examples ####
#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_PACK_EXAMPLES:BOOL=ON -DHDF5_EXAMPLES_COMPRESSED:STRING=HDF5Examples-1.14.0-Source.tar.gz -DHDF5_EXAMPLES_COMPRESSED_DIR:PATH=${CTEST_SCRIPT_DIRECTORY}")
#set (ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_PACK_EXAMPLES:BOOL=ON -DHDF5_EXAMPLES_COMPRESSED:STRING=HDF5Examples-1.14.1-Source.tar.gz -DHDF5_EXAMPLES_COMPRESSED_DIR:PATH=${CTEST_SCRIPT_DIRECTORY}")
#############################################################################################
### enable parallel builds

View File

@ -7,7 +7,7 @@ The <b>Java HD5 Interface (JHI5)</b> is a Java package
(<a href="../../hdf-java-html/javadocs/hdf/hdf5lib/package-summary.html">hdf.hdf5lib</a>)
that ``wraps around'' the HDF5 library.
<p>There are a large number of functions in the HDF5
library (version 1.10). Some of the functions are not supported in JHI5. Most
library (version 1.13). Some of the functions are not supported in JHI5. Most
of the unsupported functions have C function pointers, which is not currently
implemented in JHI5.</p>

View File

@ -395,29 +395,23 @@ Java_hdf_hdf5lib_H5_H5Pget_1fapl_1hdfs
if (H5Pget_fapl_hdfs((hid_t)fapl_id, &fa) < 0)
H5_LIBRARY_ERROR(ENVONLY);
if (HDstrlen(fa.namenode_name) > 0) {
if (NULL == (j_namenode_name = ENVPTR->NewStringUTF(ENVONLY, fa.namenode_name))) {
CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create namenode_name string");
}
if (NULL == (j_namenode_name = ENVPTR->NewStringUTF(ENVONLY, fa.namenode_name))) {
CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create namenode_name string");
}
args[0].l = j_namenode_name;
args[1].i = (jint)fa.namenode_port;
if (HDstrlen(fa.user_name) > 0) {
if (NULL == (j_user_name = ENVPTR->NewStringUTF(ENVONLY, fa.user_name))) {
CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create user_name string");
}
if (NULL == (j_user_name = ENVPTR->NewStringUTF(ENVONLY, fa.user_name))) {
CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create user_name string");
}
args[2].l = j_user_name;
if (HDstrlen(fa.kerberos_ticket_cache) > 0) {
if (NULL == (j_kerb_cache_path = ENVPTR->NewStringUTF(ENVONLY, fa.kerberos_ticket_cache))) {
CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create kerberos_ticket_cache string");
}
if (NULL == (j_kerb_cache_path = ENVPTR->NewStringUTF(ENVONLY, fa.kerberos_ticket_cache))) {
CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create kerberos_ticket_cache string");
}
args[3].l = j_kerb_cache_path;
@ -820,27 +814,21 @@ Java_hdf_hdf5lib_H5_H5Pget_1fapl_1ros3
if (H5Pget_fapl_ros3((hid_t)fapl_id, &fa) < 0)
H5_LIBRARY_ERROR(ENVONLY);
if (HDstrlen(fa.aws_region) > 0) {
if (NULL == (j_aws = ENVPTR->NewStringUTF(ENVONLY, fa.aws_region))) {
CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create aws_region string");
}
if (NULL == (j_aws = ENVPTR->NewStringUTF(ENVONLY, fa.aws_region))) {
CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create aws_region string");
}
args[0].l = j_aws;
if (HDstrlen(fa.secret_id) > 0) {
if (NULL == (j_id = ENVPTR->NewStringUTF(ENVONLY, fa.secret_id))) {
CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create secret_id string");
}
if (NULL == (j_id = ENVPTR->NewStringUTF(ENVONLY, fa.secret_id))) {
CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create secret_id string");
}
args[1].l = j_id;
if (HDstrlen(fa.secret_key) > 0) {
if (NULL == (j_key = ENVPTR->NewStringUTF(ENVONLY, fa.secret_key))) {
CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create secret_key string");
}
if (NULL == (j_key = ENVPTR->NewStringUTF(ENVONLY, fa.secret_key))) {
CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create secret_key string");
}
args[2].l = j_key;
@ -1685,11 +1673,9 @@ Java_hdf_hdf5lib_H5_H5Pget_1mdc_1config
args[2].z = cacheinfo.open_trace_file;
args[3].z = cacheinfo.close_trace_file;
if (HDstrlen(cacheinfo.trace_file_name) > 0) {
if (NULL == (j_str = ENVPTR->NewStringUTF(ENVONLY, cacheinfo.trace_file_name))) {
CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_mdc_config: out of memory - unable to construct string from UTF characters");
}
if (NULL == (j_str = ENVPTR->NewStringUTF(ENVONLY, cacheinfo.trace_file_name))) {
CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE);
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "H5Pget_mdc_config: out of memory - unable to construct string from UTF characters");
}
args[4].l = j_str;

View File

@ -414,13 +414,17 @@ CONTENTS
4.3.11. Backward compatibility
The 1.10 version of the HDF5 library can be configured to operate
identically to the v1.8 library with the
The 1.13 version of the HDF5 library can be configured to operate
identically to the v1.12 library with the
--with-default-api-version=v112
configure flag, or identically to the v1.10 library with the
--with-default-api-version=v110
configure flag, or identically to the v1.8 library with the
--with-default-api-version=v18
configure flag, or identically to the v1.6 library with the
--with-default-api-version=v16
configure flag. This allows existing code to be compiled with the
v1.10 library without requiring immediate changes to the application
v1.13 library without requiring immediate changes to the application
source code. For additional configuration options and other details,
see "API Compatibility Macros":

View File

@ -26,11 +26,11 @@ Obtaining HDF5 source code
2. Obtain compressed (*.tar or *.zip) HDF5 source from
https://portal.hdfgroup.org/display/support/Building+HDF5+with+CMake
and put it in "myhdfstuff".
Uncompress the file. There should be a hdf5-1.10."X" folder.
Uncompress the file. There should be a hdf5-1.13."X" folder.
CMake version
1. We suggest you obtain the latest CMake from the Kitware web site.
The HDF5 1.10."X" product requires a minimum CMake version 3.10,
The HDF5 1.13."X" product requires a minimum CMake version 3.12,
where "X" is the current HDF5 release version. If you are using
VS2019, the minimum version is 3.15.
@ -53,7 +53,7 @@ The following files referenced below are available at the HDF web site:
https://portal.hdfgroup.org/display/support/Building+HDF5+with+CMake
Single compressed file with all the files needed, including source:
CMake-hdf5-1.10.X.zip or CMake-hdf5-1.10.X.tar.gz
CMake-hdf5-1.13.X.zip or CMake-hdf5-1.13.X.tar.gz
Individual files included in the above mentioned compressed files
-----------------------------------------------
@ -65,7 +65,7 @@ External compression szip and zlib libraries:
ZLib.tar.gz
Examples Source package:
HDF5Examples-1.10.x-Source.tar.gz
HDF5Examples-1.14.x-Source.tar.gz
Configuration files:
HDF5config.cmake
@ -78,10 +78,10 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to:
1. Change to the development directory "myhdfstuff".
2. Download the CMake-hdf5-1.10.X.zip(.tar.gz) file to "myhdfstuff".
2. Download the CMake-hdf5-1.13.X.zip(.tar.gz) file to "myhdfstuff".
Uncompress the file.
3. Change to the source directory "hdf5-1.10.x".
3. Change to the source directory "hdf5-1.13.x".
CTestScript.cmake file should not be modified.
4. Edit the platform configuration file, HDF5options.cmake, if you want to change
@ -109,7 +109,7 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to:
The command above will configure, build, test, and create an install
package in the myhdfstuff folder. It will have the format:
HDF5-1.10.NN-<platform>.<zip or tar.gz>
HDF5-1.13.NN-<platform>.<zip or tar.gz>
On Unix, <platform> will be "Linux". A similar .sh file will also be created.
On Windows, <platform> will be "win64" or "win32". If you have an
@ -130,13 +130,13 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to:
6. To install, "X" is the current release version
On Windows (with WiX installed), execute:
HDF5-1.10."X"-win32.msi or HDF5-1.10."X"-win64.msi
HDF5-1.13."X"-win32.msi or HDF5-1.13."X"-win64.msi
By default this program will install the hdf5 library into the
"C:\Program Files" directory and will create the following
directory structure:
HDF_Group
--HDF5
----1.10."X"
----1.13."X"
------bin
------include
------lib
@ -144,40 +144,40 @@ To build HDF5 with the SZIP and ZLIB external libraries you will need to:
On Linux, change to the install destination directory
(create it if doesn't exist) and execute:
<path-to>/myhdfstuff/HDF5-1.10."X"-Linux.sh
<path-to>/myhdfstuff/HDF5-1.13."X"-Linux.sh
After accepting the license, the script will prompt:
By default the HDF5 will be installed in:
"<current directory>/HDF5-1.10."X"-Linux"
Do you want to include the subdirectory HDF5-1.10."X"-Linux?
"<current directory>/HDF5-1.13."X"-Linux"
Do you want to include the subdirectory HDF5-1.13."X"-Linux?
Saying no will install in: "<current directory>" [Yn]:
Note that the script will create the following directory structure
relative to the install point:
HDF_Group
--HDF5
----1.10."X"
----1.13."X"
------bin
------include
------lib
------share
On Mac you will find HDF5-1.10."X"-Darwin.dmg in the myhdfstuff folder. Click
On Mac you will find HDF5-1.13."X"-Darwin.dmg in the myhdfstuff folder. Click
on the dmg file to proceed with installation. After accepting the license,
there will be a folder with the following structure:
HDF_Group
--HDF5
----1.10."X"
----1.13."X"
------bin
------include
------lib
------share
By default the installation will create the bin, include, lib and cmake
folders in the <install destination directory>/HDF_Group/HDF5/1.10."X"
folders in the <install destination directory>/HDF_Group/HDF5/1.13."X"
The <install destination directory> depends on the build platform;
Windows will set the default to:
C:/Program Files/HDF_Group/HDF5/1.10."X"
C:/Program Files/HDF_Group/HDF5/1.13."X"
Linux will set the default to:
"myhdfstuff/HDF_Group/HDF5/1.10."X"
"myhdfstuff/HDF_Group/HDF5/1.13."X"
The default can be changed by adding ",INSTALLDIR=<my new dir>" to the
"ctest -S HDF5config.cmake..." command. For example on linux:
ctest -S HDF5config.cmake,INSTALLDIR=/usr/local/myhdf5,BUILD_GENERATOR=Unix -C Release -VV -O hdf5.log
@ -204,13 +204,13 @@ Notes: This short set of instructions is written for users who want to
5. Configure the C library, tools and tests with one of the following commands:
On Windows 32 bit
cmake -G "Visual Studio 12 2013" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.10."X"
cmake -G "Visual Studio 12 2013" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.13."X"
On Windows 64 bit
cmake -G "Visual Studio 12 2013 Win64" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.10."X"
cmake -G "Visual Studio 12 2013 Win64" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ..\hdf5-1.13."X"
On Linux and Mac
cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ../hdf5-1.10."X"
cmake -G "Unix Makefiles" -DCMAKE_BUILD_TYPE:STRING=Release -DBUILD_SHARED_LIBS:BOOL=OFF -DBUILD_TESTING:BOOL=ON -DHDF5_BUILD_TOOLS:BOOL=ON ../hdf5-1.13."X"
where "X" is the current release version.
@ -225,13 +225,13 @@ Notes: This short set of instructions is written for users who want to
9. To install
On Windows (with WiX installed), execute:
HDF5-1.10."X"-win32.msi or HDF5-1.10."X"-win64.msi
HDF5-1.13."X"-win32.msi or HDF5-1.13."X"-win64.msi
By default this program will install the hdf5 library into the
"C:\Program Files" directory and will create the following
directory structure:
HDF_Group
--HDF5
----1.10."X"
----1.13."X"
------bin
------include
------lib
@ -239,28 +239,28 @@ Notes: This short set of instructions is written for users who want to
On Linux, change to the install destination directory
(create if doesn't exist) and execute:
<path-to>/myhdfstuff/build/HDF5-1.10."X"-Linux.sh
<path-to>/myhdfstuff/build/HDF5-1.13."X"-Linux.sh
After accepting the license, the script will prompt:
By default the HDF5 will be installed in:
"<current directory>/HDF5-1.10."X"-Linux"
Do you want to include the subdirectory HDF5-1.10."X"-Linux?
"<current directory>/HDF5-1.13."X"-Linux"
Do you want to include the subdirectory HDF5-1.13."X"-Linux?
Saying no will install in: "<current directory>" [Yn]:
Note that the script will create the following directory structure
relative to the install point:
HDF_Group
--HDF5
----1.10."X"
----1.13."X"
------bin
------include
------lib
------share
On Mac you will find HDF5-1.10."X"-Darwin.dmg in the build folder. Click
On Mac you will find HDF5-1.13."X"-Darwin.dmg in the build folder. Click
on the dmg file to proceed with installation. After accepting the license,
there will be a folder with the following structure:
HDF_Group
--HDF5
----1.10."X"
----1.13."X"
------bin
------include
------lib
@ -272,7 +272,7 @@ IV. Further considerations
========================================================================
1. We suggest you obtain the latest CMake for windows from the Kitware
web site. The HDF5 1.10."X" product requires a minimum CMake version 3.10.
web site. The HDF5 1.13."X" product requires a minimum CMake version 3.12.
2. If you plan to use Zlib or Szip:
A. Download the binary packages and install them in a central location.
@ -656,7 +656,7 @@ HDF5_STRICT_FORMAT_CHECKS "Whether to perform strict file format checks"
HDF_TEST_EXPRESS "Control testing framework (0-3)" "0"
HDF5_TEST_VFD "Execute tests with different VFDs" OFF
HDF5_TEST_PASSTHROUGH_VOL "Execute tests with different passthrough VOL connectors" OFF
DEFAULT_API_VERSION "Enable default API (v16, v18, v110, v112)" "v112"
DEFAULT_API_VERSION "Enable default API (v16, v18, v110, v112, v114)" "v114"
HDF5_USE_FOLDERS "Enable folder grouping of projects in IDEs." ON
HDF5_WANT_DATA_ACCURACY "IF data accuracy is guaranteed during data conversions" ON
HDF5_WANT_DCONV_EXCEPTION "exception handling functions is checked during data conversions" ON

View File

@ -98,19 +98,19 @@ Build, Test and Install HDF5 on Cygwin
The HDF5 source code is distributed in a variety of formats which
can be unpacked with the following commands, each of which creates
an `hdf5-1.10.x' directory.
an `hdf5-1.13.x' directory.
2.1 Non-compressed tar archive (*.tar)
$ tar xf hdf5-1.10.x.tar
$ tar xf hdf5-1.13.x.tar
2.2 Gzip'd tar archive (*.tar.gz)
$ gunzip < hdf5-1.10.x.tar.gz | tar xf -
$ gunzip < hdf5-1.13.x.tar.gz | tar xf -
2.3 Bzip'd tar archive (*.tar.bz2)
$ bunzip2 < hdf5-1.10.x.tar.bz2 | tar xf -
$ bunzip2 < hdf5-1.13.x.tar.bz2 | tar xf -
2. Setup Environment

View File

@ -22,7 +22,7 @@ I. Prerequisites
from HDF5 source in a 'home' directory typically results in test
failures and should be avoided.
2. Load modules for desired compilers, module for cmake version 3.10 or greater,
2. Load modules for desired compilers, module for cmake version 3.12 or greater,
and set any needed environment variables for compilers (i.e., CC, FC, CXX).
Unload any problematic modules (i.e., craype-hugepages2M).
@ -37,11 +37,11 @@ from a release tar file in a working directory:
If no branch is specified, then the 'develop' version will be checked out.
If no source directory is specified, then the source will be located in the
'hdf5' directory. The Cmake scripts expect the source to be in a directory
'hdf5' directory. The CMake scripts expect the source to be in a directory
named hdf5-<version string>, where 'version string' uses the format '1.xx.xx'.
For example, for the current 'develop' version, the "hdf5" directory should
be renamed "hdf5-1.11.4", or for the first hdf5_1_10_5 pre-release version,
it should be renamed "hdf5-1.10.5-pre1".
be renamed "hdf5-1.13.0", or for the first hdf5_1_12_0 pre-release version,
it should be renamed "hdf5-1.12.0-5".
If the version number is not known a priori, the version string
can be obtained by running bin/h5vers in the top level directory of the source clone, and
@ -66,14 +66,14 @@ a cross-compiling emulator. The setup steps will make default settings for
parallel or serial only builds available to the CMake command.
1. For the current 'develop' version the "hdf5" directory should be renamed
"hdf5-1.11.4".
"hdf5-1.13.0".
2. Three cmake script files need to be copied to the working directory, or
have symbolic links to them, created in the working directory:
hdf5-1.11.4/config/cmake/scripts/HDF5config.cmake
hdf5-1.11.4/config/cmake/scripts/CTestScript.cmake
hdf5-1.11.4/config/cmake/scripts/HDF5options.cmake
hdf5-1.13.0/config/cmake/scripts/HDF5config.cmake
hdf5-1.13.0/config/cmake/scripts/CTestScript.cmake
hdf5-1.13.0/config/cmake/scripts/HDF5options.cmake
should be copied to the working directory.
@ -82,7 +82,7 @@ parallel or serial only builds available to the CMake command.
CTestScript.cmake
HDF5config.cmake
HDF5options.cmake
hdf5-1.11.4
hdf5-1.13.0
Additionally, when the ctest command runs [1], it will add a build directory
in the working directory.
@ -145,7 +145,7 @@ cori, another CrayXC40, that line is replaced by "#SBATCH -C knl,quad,cache".
For cori (and other machines), the values in LOCAL_BATCH_SCRIPT_NAME and
LOCAL_BATCH_SCRIPT_PARALLEL_NAME in the config/cmake/scripts/HPC/sbatch-HDF5options.cmake
file can be replaced by cori_knl_ctestS.sl and cori_knl_ctestS.sl, or the lines
can be edited in the batch files in hdf5-1.11.4/bin/batch.
can be edited in the batch files in hdf5-1.13.0/bin/batch.
========================================================================
V. Manual alternatives
@ -153,11 +153,11 @@ V. Manual alternatives
If using ctest is undesirable, one can create a build directory and run the cmake
configure command, for example
"/projects/Mutrino/hpcsoft/cle6.0/common/cmake/3.10.2/bin/cmake"
-C "<working directory>/hdf5-1.11.4/config/cmake/cacheinit.cmake"
"/projects/Mutrino/hpcsoft/cle6.0/common/cmake/3.12/bin/cmake"
-C "<working directory>/hdf5-1.13.0/config/cmake/cacheinit.cmake"
-DCMAKE_BUILD_TYPE:STRING=Release -DHDF5_BUILD_FORTRAN:BOOL=ON
-DHDF5_BUILD_JAVA:BOOL=OFF
-DCMAKE_INSTALL_PREFIX:PATH=<working directory>/HDF_Group/HDF5/1.11.4
-DCMAKE_INSTALL_PREFIX:PATH=<working directory>/HDF_Group/HDF5/1.13.0
-DHDF5_ENABLE_Z_LIB_SUPPORT:BOOL=OFF -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF
-DHDF5_ENABLE_PARALLEL:BOOL=ON -DHDF5_BUILD_CPP_LIB:BOOL=OFF
-DHDF5_BUILD_JAVA:BOOL=OFF -DHDF5_ENABLE_THREADSAFE:BOOL=OFF
@ -168,7 +168,7 @@ configure command, for example
-DLOCAL_BATCH_SCRIPT_NAME:STRING=knl_ctestS.sl
-DLOCAL_BATCH_SCRIPT_PARALLEL_NAME:STRING=knl_ctestP.sl -DSITE:STRING=mutrino
-DBUILDNAME:STRING=par-knl_GCC493-SHARED-Linux-4.4.156-94.61.1.16335.0.PTF.1107299-default-x86_64
"-GUnix Makefiles" "" "<working directory>/hdf5-1.11.4"
"-GUnix Makefiles" "" "<working directory>/hdf5-1.13.0"
followed by make and batch jobs to run tests.
@ -195,12 +195,12 @@ files which do not seem to be necessary with the Cray PrgEnv-* modules
1. HDF5_USE_PREGEN. This option, along with the HDF5_USE_PREGEN_DIR CMake
variable would allow the use of an appropriate H5Tinit.c file with type
information generated on a compute node to be used when cross compiling
for those compute nodes. The use of the variables in lines 110 and 111
of HDF5options.cmake file seem to preclude needing this option with the
available Cray modules and CMake option.
information generated on a compute node to be used when cross compiling
for those compute nodes. The use of the variables in lines 110 and 111
of HDF5options.cmake file seem to preclude needing this option with the
available Cray modules and CMake option.
2. HDF5_BATCH_H5DETECT and associated CMake variables. This option when
properly configured will run H5detect in a batch job on a compute node
at the beginning of the CMake build process. It was also found to be
unnecessary with the available Cray modules and CMake options.
at the beginning of the CMake build process. It was also found to be
unnecessary with the available Cray modules and CMake options.

View File

@ -20,7 +20,7 @@ The official HDF5 releases can be obtained from:
https://www.hdfgroup.org/downloads/hdf5/
Changes from Release to Release and New Features in the HDF5-1.10.x release series
Changes from Release to Release and New Features in the HDF5-1.13.x release series
can be found at:
https://portal.hdfgroup.org/display/HDF5/HDF5+Application+Developer%27s+Guide

View File

@ -21,8 +21,8 @@ I. Preconditions
========================================================================
1. We suggest you obtain the latest CMake for windows from the Kitware
web site. The HDF5 1.10.x product requires a minimum CMake version
of 3.10.2. If you are using VS2019, the minimum version is 3.15.
web site. The HDF5 1.13.x product requires a minimum CMake version
of 3.12. If you are using VS2019, the minimum version is 3.15.
2. You have installed the HDF5 library built with CMake, by executing
the HDF Install Utility (the *.msi file in the binary package for

View File

@ -36,8 +36,8 @@ I. Preconditions
========================================================================
1. We suggest you obtain the latest CMake for windows from the Kitware
web site. The HDF5 1.10.x product requires a minimum CMake version
of 3.10.1.
web site. The HDF5 1.13.x product requires a minimum CMake version
of 3.12.
2. You have installed the HDF5 library built with CMake, by executing
the HDF Install Utility (the *.msi file in the binary package for
@ -47,7 +47,7 @@ I. Preconditions
3. Set the environment variable HDF5_DIR to the installed location of
the config files for HDF5. On Windows:
HDF5_DIR=C:/Program Files/HDF_Group/HDF5/1.10.x/cmake
HDF5_DIR=C:/Program Files/HDF_Group/HDF5/1.13.x/cmake
(Note there are no quote characters used on Windows and all platforms
use forward slashes)

View File

@ -54,11 +54,11 @@ Using Visual Studio 2008 with HDF5 Libraries built with Visual Studio 2008
and select "x64".
2.2 Find the box "Show directories for", choose "Include files", add the
header path (i.e. c:\Program Files\HDF_Group\HDF5\1.10.x\include)
header path (i.e. c:\Program Files\HDF_Group\HDF5\1.13.x\include)
to the included directories.
2.3 Find the box "Show directories for", choose "Library files", add the
library path (i.e. c:\Program Files\HDF_Group\HDF5\1.10.x\lib)
library path (i.e. c:\Program Files\HDF_Group\HDF5\1.13.x\lib)
to the library directories.
2.4 If using Fortran libraries, you will also need to setup the path