handle missing H5Dread_chunks

This commit is contained in:
Dennis Heimbigner 2020-05-14 12:59:00 -06:00
parent c4faca3bcc
commit a9cede7647
3 changed files with 20 additions and 15 deletions

View File

@ -744,8 +744,15 @@ IF(USE_HDF5 OR ENABLE_NETCDF_4)
SET(HDF5_CC h5cc)
ENDIF()
# Check to see if H5Dread_chunk is available
CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Dread_chunk "" HAS_READCHUNKS)
# Check to see if this is hdf5-1.10.3 or later.
CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Dread_chunk "" HDF5_SUPPORTS_PAR_FILTERS)
IF(HAS_READCHUNKS)
SET(HDF5_SUPPORTS_PAR_FILTERS ON)
SET(ENABLE_NCDUMPCHUNKS ON)
ENDIF()
IF (HDF5_SUPPORTS_PAR_FILTERS)
SET(HDF5_HAS_PAR_FILTERS TRUE CACHE BOOL "")
SET(HAS_PAR_FILTERS yes CACHE STRING "")

View File

@ -1065,7 +1065,7 @@ if test "x$enable_hdf5" = xyes; then
# H5Pset_fapl_mpiposix and H5Pget_fapl_mpiposix have been removed since HDF5 1.8.12.
# Use H5Pset_fapl_mpio and H5Pget_fapl_mpio, instead.
AC_CHECK_FUNCS([H5Pget_fapl_mpio H5Pset_deflate H5Z_SZIP H5free_memory H5resize_memory H5allocate_memory H5Pset_libver_bounds H5Pset_all_coll_metadata_ops H5Dread_chunk])
AC_CHECK_FUNCS([H5Pget_fapl_mpio H5Pset_deflate H5Z_SZIP H5free_memory H5resize_memory H5allocate_memory H5Pset_libver_bounds H5Pset_all_coll_metadata_ops])
# Check to see if HDF5 library has collective metadata APIs, (HDF5 >= 1.10.0)
if test "x$ac_cv_func_H5Pset_all_coll_metadata_ops" = xyes; then
@ -1076,19 +1076,21 @@ if test "x$enable_hdf5" = xyes; then
if test "x$ac_cv_func_H5Pget_fapl_mpio" = xyes -o "x$ac_cv_func_H5Pget_fapl_mpiposix" = xyes; then
hdf5_parallel=yes
fi
AC_MSG_CHECKING([whether parallel io is enabled in hdf5])
AC_MSG_RESULT([$hdf5_parallel])
# See if H5Dread_chunk is available
AC_SEARCH_LIBS([H5Dread_chunk],[hdf5_hldll hdf5_hl], [has_readchunks=yes], [has_readdhunks=no])
# Check to see if HDF5 library is 1.10.3 or greater. If so, allows
# parallel I/O with filters. This allows zlib/szip compression to
# be used with parallel I/O, which is very helpful to HPC users.
if test "x$ac_cv_func_H5Dread_chunk" = xyes; then
if test "x$has_readchunks" = xyes; then
AC_DEFINE([HDF5_SUPPORTS_PAR_FILTERS], [1], [if true, HDF5 is at least version 1.10.3 and allows parallel I/O with zip])
hdf5_supports_par_filters=yes
fi
AC_MSG_CHECKING([whether HDF5 allows parallel filters])
AC_MSG_RESULT([$ac_cv_func_H5Dread_chunk])
AC_MSG_RESULT([$has_readchunks])
# Check to see if user asked for parallel build, but HDF5 does not support it.
if test "x$hdf5_parallel" = "xno"; then
@ -1106,14 +1108,8 @@ if test "x$enable_hdf5" = xyes; then
fi
AC_MSG_RESULT([$enable_szlib])
if test "x$nc_build_utilities" = xyes ; then
# See if H5Dread_chunk is available
AC_SEARCH_LIBS([H5Dread_chunk],[hdf5_hldll hdf5_hl], [enable_readchunks=yes], [enable_readdhunks=no])
else
enable_readchunks=no
fi
fi
AM_CONDITIONAL(ENABLE_NCDUMPCHUNKS, [test "x$enable_readchunks" = xyes ])
AM_CONDITIONAL(ENABLE_NCDUMPCHUNKS, [test "x$has_readchunks" = xyes ])
# If the user wants hdf4 built in, check it out.
if test "x$enable_hdf4" = xyes; then

View File

@ -35,9 +35,10 @@ TARGET_LINK_LIBRARIES(ncgen netcdf ${ALL_TLL_LIBS})
# Given a netcdf4 file, dump the actual chunk contents.
# Used to validate nczarr chunking code.
IF(USE_NETCDF4)
SET(ncdumpchunks_FILES ncdumpchunks.c)
ADD_EXECUTABLE(ncdumpchunks ${ncdumpchunks_FILES})
TARGET_LINK_LIBRARIES(ncdumpchunks netcdf ${ALL_TLL_LIBS})
IF(ENABLE_NCDUMPCHUNKS)
SET(ncdumpchunks_FILES ncdumpchunks.c)
ADD_EXECUTABLE(ncdumpchunks ${ncdumpchunks_FILES})
TARGET_LINK_LIBRARIES(ncdumpchunks netcdf ${ALL_TLL_LIBS})
SET_TARGET_PROPERTIES(ncdumpchunks PROPERTIES RUNTIME_OUTPUT_DIRECTORY
${CMAKE_CURRENT_BINARY_DIR})
SET_TARGET_PROPERTIES(ncdumpchunks PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG
@ -50,6 +51,7 @@ IF(MSVC)
)
ENDIF()
ENDIF()
ENDIF()
####
# We have to do a little tweaking