2013-08-15 23:43:59 +08:00
|
|
|
## This is a CMake file, part of Unidata's netCDF package.
|
2018-03-27 01:53:31 +08:00
|
|
|
# Copyright 2012-2018, see the COPYRIGHT file for more information.
|
2015-10-15 01:08:54 +08:00
|
|
|
#
|
2013-08-15 23:43:59 +08:00
|
|
|
|
2013-10-01 05:51:34 +08:00
|
|
|
##################################
|
|
|
|
# Set Project Properties
|
|
|
|
##################################
|
|
|
|
|
2012-10-03 04:56:46 +08:00
|
|
|
#Minimum required CMake Version
|
2017-02-17 04:00:07 +08:00
|
|
|
cmake_minimum_required(VERSION 3.6.1)
|
2013-10-01 05:51:34 +08:00
|
|
|
|
2012-10-03 04:56:46 +08:00
|
|
|
#Project Name
|
This PR adds EXPERIMENTAL support for accessing data in the
cloud using a variant of the Zarr protocol and storage
format. This enhancement is generically referred to as "NCZarr".
The data model supported by NCZarr is netcdf-4 minus the user-defined
types and the String type. In this sense it is similar to the CDF-5
data model.
More detailed information about enabling and using NCZarr is
described in the document NUG/nczarr.md and in a
[Unidata Developer's blog entry](https://www.unidata.ucar.edu/blogs/developer/en/entry/overview-of-zarr-support-in).
WARNING: this code has had limited testing, so do use this version
for production work. Also, performance improvements are ongoing.
Note especially the following platform matrix of successful tests:
Platform | Build System | S3 support
------------------------------------
Linux+gcc | Automake | yes
Linux+gcc | CMake | yes
Visual Studio | CMake | no
Additionally, and as a consequence of the addition of NCZarr,
major changes have been made to the Filter API. NOTE: NCZarr
does not yet support filters, but these changes are enablers for
that support in the future. Note that it is possible
(probable?) that there will be some accidental reversions if the
changes here did not correctly mimic the existing filter testing.
In any case, previously filter ids and parameters were of type
unsigned int. In order to support the more general zarr filter
model, this was all converted to char*. The old HDF5-specific,
unsigned int operations are still supported but they are
wrappers around the new, char* based nc_filterx_XXX functions.
This entailed at least the following changes:
1. Added the files libdispatch/dfilterx.c and include/ncfilter.h
2. Some filterx utilities have been moved to libdispatch/daux.c
3. A new entry, "filter_actions" was added to the NCDispatch table
and the version bumped.
4. An overly complex set of structs was created to support funnelling
all of the filterx operations thru a single dispatch
"filter_actions" entry.
5. Move common code to from libhdf5 to libsrc4 so that it is accessible
to nczarr.
Changes directly related to Zarr:
1. Modified CMakeList.txt and configure.ac to support both C and C++
-- this is in support of S3 support via the awd-sdk libraries.
2. Define a size64_t type to support nczarr.
3. More reworking of libdispatch/dinfermodel.c to
support zarr and to regularize the structure of the fragments
section of a URL.
Changes not directly related to Zarr:
1. Make client-side filter registration be conditional, with default off.
2. Hack include/nc4internal.h to make some flags added by Ed be unique:
e.g. NC_CREAT, NC_INDEF, etc.
3. cleanup include/nchttp.h and libdispatch/dhttp.c.
4. Misc. changes to support compiling under Visual Studio including:
* Better testing under windows for dirent.h and opendir and closedir.
5. Misc. changes to the oc2 code to support various libcurl CURLOPT flags
and to centralize error reporting.
6. By default, suppress the vlen tests that have unfixed memory leaks; add option to enable them.
7. Make part of the nc_test/test_byterange.sh test be contingent on remotetest.unidata.ucar.edu being accessible.
Changes Left TO-DO:
1. fix provenance code, it is too HDF5 specific.
2020-06-29 08:02:47 +08:00
|
|
|
project(netCDF LANGUAGES C CXX)
|
2013-03-28 03:15:00 +08:00
|
|
|
set(PACKAGE "netCDF" CACHE STRING "")
|
2014-07-16 00:50:58 +08:00
|
|
|
|
2014-05-13 06:05:39 +08:00
|
|
|
#####
|
|
|
|
# Version Info:
|
|
|
|
#
|
|
|
|
# Release Version
|
|
|
|
# Library Version
|
|
|
|
# SO Version
|
|
|
|
#
|
|
|
|
# SO Version is computed from library version. See:
|
|
|
|
# http://www.gnu.org/software/libtool/manual/libtool.html#Libtool-versioning
|
|
|
|
#####
|
|
|
|
|
2014-09-11 00:49:32 +08:00
|
|
|
SET(NC_VERSION_MAJOR 4)
|
2020-03-27 01:31:43 +08:00
|
|
|
SET(NC_VERSION_MINOR 8)
|
|
|
|
SET(NC_VERSION_PATCH 0)
|
2015-06-20 02:29:15 +08:00
|
|
|
SET(NC_VERSION_NOTE "-development")
|
2014-09-11 00:49:32 +08:00
|
|
|
SET(netCDF_VERSION ${NC_VERSION_MAJOR}.${NC_VERSION_MINOR}.${NC_VERSION_PATCH}${NC_VERSION_NOTE})
|
2013-02-20 07:29:41 +08:00
|
|
|
SET(VERSION ${netCDF_VERSION})
|
2014-09-11 00:49:32 +08:00
|
|
|
SET(NC_VERSION ${netCDF_VERSION})
|
2020-03-27 01:31:43 +08:00
|
|
|
SET(netCDF_LIB_VERSION 18)
|
|
|
|
SET(netCDF_SO_VERSION 18)
|
2014-02-20 07:32:26 +08:00
|
|
|
SET(PACKAGE_VERSION ${VERSION})
|
|
|
|
|
2019-11-16 05:57:07 +08:00
|
|
|
# Version of the dispatch table, in case we change it.
|
|
|
|
SET(NC_DISPATCH_VERSION 1)
|
|
|
|
|
2014-04-22 06:11:48 +08:00
|
|
|
# Get system configuration, Use it to determine osname, os release, cpu. These
|
|
|
|
# will be used when committing to CDash.
|
2013-03-12 05:13:51 +08:00
|
|
|
find_program(UNAME NAMES uname)
|
2013-03-20 06:40:15 +08:00
|
|
|
IF(UNAME)
|
2014-04-22 01:15:33 +08:00
|
|
|
macro(getuname name flag)
|
2014-03-07 23:46:26 +08:00
|
|
|
exec_program("${UNAME}" ARGS "${flag}" OUTPUT_VARIABLE "${name}")
|
2014-04-22 01:15:33 +08:00
|
|
|
endmacro(getuname)
|
|
|
|
getuname(osname -s)
|
|
|
|
getuname(osrel -r)
|
|
|
|
getuname(cpu -m)
|
2015-04-24 01:03:26 +08:00
|
|
|
set(TMP_BUILDNAME "${osname}-${osrel}-${cpu}")
|
2013-03-20 06:40:15 +08:00
|
|
|
ENDIF()
|
2015-04-24 01:03:26 +08:00
|
|
|
|
|
|
|
###
|
|
|
|
# Allow for some customization of the buildname.
|
|
|
|
# This will make it easier to identify different builds,
|
|
|
|
# based on values passed from command line/shell scripts.
|
|
|
|
#
|
|
|
|
# For ctest scripts, we can use CTEST_BUILD_NAME.
|
|
|
|
###
|
|
|
|
|
|
|
|
SET(BUILDNAME_PREFIX "" CACHE STRING "")
|
|
|
|
SET(BUILDNAME_SUFFIX "" CACHE STRING "")
|
|
|
|
|
|
|
|
IF(BUILDNAME_PREFIX)
|
2015-04-24 01:13:50 +08:00
|
|
|
SET(TMP_BUILDNAME "${BUILDNAME_PREFIX}-${TMP_BUILDNAME}")
|
2015-04-24 01:03:26 +08:00
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
IF(BUILDNAME_SUFFIX)
|
2015-04-24 01:13:50 +08:00
|
|
|
SET(TMP_BUILDNAME "${TMP_BUILDNAME}-${BUILDNAME_SUFFIX}")
|
2015-04-24 01:03:26 +08:00
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
IF(NOT BUILDNAME)
|
|
|
|
SET(BUILDNAME "${TMP_BUILDNAME}" CACHE STRING "Build name variable for CDash")
|
|
|
|
ENDIF()
|
|
|
|
###
|
|
|
|
# End BUILDNAME customization.
|
|
|
|
###
|
|
|
|
|
2013-08-14 00:00:22 +08:00
|
|
|
# For CMAKE_INSTALL_LIBDIR
|
|
|
|
INCLUDE(GNUInstallDirs)
|
2012-10-03 04:56:46 +08:00
|
|
|
|
|
|
|
IF(MSVC)
|
2014-04-22 01:15:33 +08:00
|
|
|
SET(GLOBAL PROPERTY USE_FOLDERS ON)
|
2012-10-03 04:56:46 +08:00
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
#Add custom CMake Module
|
2019-01-26 02:26:26 +08:00
|
|
|
SET(CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake/modules/"
|
2014-04-22 01:15:33 +08:00
|
|
|
CACHE INTERNAL "Location of our custom CMake modules.")
|
2012-10-03 04:56:46 +08:00
|
|
|
|
2013-10-01 05:51:34 +08:00
|
|
|
# auto-configure style checks, other CMake modules.
|
2017-12-30 04:14:45 +08:00
|
|
|
INCLUDE(CheckLibraryExists)
|
|
|
|
INCLUDE(CheckIncludeFile)
|
|
|
|
INCLUDE(CheckIncludeFiles)
|
|
|
|
INCLUDE(CheckTypeSize)
|
|
|
|
INCLUDE(CheckFunctionExists)
|
|
|
|
INCLUDE(CheckCXXSourceCompiles)
|
|
|
|
INCLUDE(CheckCSourceCompiles)
|
|
|
|
INCLUDE(TestBigEndian)
|
|
|
|
INCLUDE(CheckSymbolExists)
|
|
|
|
INCLUDE(GetPrerequisites)
|
2014-03-12 04:31:41 +08:00
|
|
|
|
2014-04-03 04:26:42 +08:00
|
|
|
INCLUDE(CheckCCompilerFlag)
|
2013-03-26 04:28:40 +08:00
|
|
|
FIND_PACKAGE(PkgConfig QUIET)
|
2013-03-28 03:15:00 +08:00
|
|
|
|
2017-02-07 03:23:55 +08:00
|
|
|
# A check to see if the system is big endian
|
2017-02-09 05:51:41 +08:00
|
|
|
TEST_BIG_ENDIAN(BIGENDIAN)
|
|
|
|
IF(${BIGENDIAN})
|
2017-02-07 03:23:55 +08:00
|
|
|
SET(WORDS_BIGENDIAN "1")
|
2017-02-09 05:51:41 +08:00
|
|
|
ENDIF(${BIGENDIAN})
|
|
|
|
|
2014-04-03 04:26:42 +08:00
|
|
|
# A macro to check if a C linker supports a particular flag.
|
|
|
|
MACRO(CHECK_C_LINKER_FLAG M_FLAG M_RESULT)
|
|
|
|
SET(T_REQ_FLAG "${CMAKE_REQUIRED_FLAGS}")
|
|
|
|
SET(CMAKE_REQUIRED_FLAGS "${M_FLAG}")
|
|
|
|
CHECK_C_SOURCE_COMPILES("int main() {return 0;}" ${M_RESULT})
|
|
|
|
SET(CMAKE_REQUIRED_FLAGS "${T_REQ_FLAG}")
|
|
|
|
ENDMACRO()
|
|
|
|
|
2013-10-01 05:51:34 +08:00
|
|
|
# Enable 'dist and distcheck'.
|
|
|
|
# File adapted from http://ensc.de/cmake/FindMakeDist.cmake
|
2013-03-28 03:15:00 +08:00
|
|
|
FIND_PACKAGE(MakeDist)
|
2013-10-01 05:51:34 +08:00
|
|
|
# End 'enable dist and distcheck'
|
|
|
|
|
|
|
|
# Set the build type.
|
2014-04-22 06:11:48 +08:00
|
|
|
IF(NOT CMAKE_BUILD_TYPE)
|
2014-03-07 23:46:26 +08:00
|
|
|
SET(CMAKE_BUILD_TYPE DEBUG CACHE STRING "Choose the type of build, options are: None, Debug, Release."
|
2014-04-22 01:15:33 +08:00
|
|
|
FORCE)
|
2013-10-01 05:51:34 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2014-04-03 04:26:42 +08:00
|
|
|
# Set build type uppercase
|
|
|
|
STRING(TOUPPER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE)
|
|
|
|
|
2014-06-03 05:14:27 +08:00
|
|
|
# Determine the configure date.
|
2014-12-16 01:56:14 +08:00
|
|
|
|
2016-05-15 23:20:18 +08:00
|
|
|
IF(DEFINED ENV{SOURCE_DATE_EPOCH})
|
|
|
|
EXECUTE_PROCESS(
|
|
|
|
COMMAND "date" "-u" "-d" "@$ENV{SOURCE_DATE_EPOCH}"
|
|
|
|
OUTPUT_VARIABLE CONFIG_DATE
|
|
|
|
)
|
|
|
|
ELSE()
|
|
|
|
EXECUTE_PROCESS(
|
|
|
|
COMMAND date
|
|
|
|
OUTPUT_VARIABLE CONFIG_DATE
|
|
|
|
)
|
|
|
|
ENDIF()
|
2014-12-16 01:56:14 +08:00
|
|
|
IF(CONFIG_DATE)
|
|
|
|
string(STRIP ${CONFIG_DATE} CONFIG_DATE)
|
|
|
|
ENDIF()
|
2014-09-30 02:04:35 +08:00
|
|
|
##
|
|
|
|
# Allow for extra dependencies.
|
|
|
|
##
|
|
|
|
|
|
|
|
SET(EXTRA_DEPS "")
|
|
|
|
|
2013-10-01 05:51:34 +08:00
|
|
|
################################
|
|
|
|
# End Project Properties
|
|
|
|
################################
|
|
|
|
|
|
|
|
|
|
|
|
################################
|
|
|
|
# Set CTest Properties
|
|
|
|
################################
|
|
|
|
|
2015-04-24 01:03:26 +08:00
|
|
|
|
|
|
|
|
2013-10-01 05:51:34 +08:00
|
|
|
ENABLE_TESTING()
|
|
|
|
INCLUDE(CTest)
|
|
|
|
|
|
|
|
# Copy the CTest customization file into binary directory, as required.
|
2019-11-07 00:43:49 +08:00
|
|
|
FILE(COPY ${CMAKE_CURRENT_SOURCE_DIR}/CTestCustom.cmake DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
|
2013-10-01 05:51:34 +08:00
|
|
|
|
|
|
|
# Set Memory test program for non-MSVC based builds.
|
|
|
|
# Assume valgrind for now.
|
|
|
|
IF(NOT MSVC)
|
2014-04-22 06:11:48 +08:00
|
|
|
SET(CTEST_MEMORYCHECK_COMMAND valgrind CACHE STRING "")
|
2013-10-01 05:51:34 +08:00
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
# Set variable to define the build type.
|
|
|
|
INCLUDE(GenerateExportHeader)
|
|
|
|
|
|
|
|
################################
|
|
|
|
# End CTest Properties
|
|
|
|
################################
|
2013-05-25 05:19:07 +08:00
|
|
|
|
2013-03-28 03:15:00 +08:00
|
|
|
|
2013-10-01 05:51:34 +08:00
|
|
|
################################
|
2016-03-08 05:04:52 +08:00
|
|
|
# Compiler and Linker Configuration
|
2013-10-01 05:51:34 +08:00
|
|
|
################################
|
|
|
|
|
2016-01-09 01:56:25 +08:00
|
|
|
##
|
2014-01-25 02:10:41 +08:00
|
|
|
# Default building shared libraries.
|
2014-06-12 05:51:31 +08:00
|
|
|
# BUILD_SHARED_LIBS is provided by/used by
|
|
|
|
# CMake directly.
|
2016-01-09 01:56:25 +08:00
|
|
|
##
|
2013-10-01 05:51:34 +08:00
|
|
|
OPTION(BUILD_SHARED_LIBS "Configure netCDF as a shared library." ON)
|
2014-04-22 06:11:48 +08:00
|
|
|
IF(BUILD_SHARED_LIBS)
|
2014-06-12 05:51:31 +08:00
|
|
|
SET(CMAKE_POSITION_INDEPENDENT_CODE ON)
|
2013-10-01 05:51:34 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2016-03-08 05:04:52 +08:00
|
|
|
OPTION(NC_FIND_SHARED_LIBS "Find dynamically-built versions of dependent libraries" ${BUILD_SHARED_LIBS})
|
|
|
|
|
|
|
|
##
|
|
|
|
# We've had a request to allow for non-versioned shared libraries.
|
2018-03-16 22:38:40 +08:00
|
|
|
# This seems reasonable enough to accommodate. See
|
2016-03-08 05:04:52 +08:00
|
|
|
# https://github.com/Unidata/netcdf-c/issues/228 for more info.
|
|
|
|
##
|
|
|
|
OPTION(ENABLE_SHARED_LIBRARY_VERSION "Encode the library SO version in the file name of the generated library file." ON)
|
|
|
|
|
2014-01-25 02:10:41 +08:00
|
|
|
# Set some default linux gcc & apple compiler options for
|
|
|
|
# debug builds.
|
2013-10-01 05:51:34 +08:00
|
|
|
IF(CMAKE_COMPILER_IS_GNUCC OR APPLE)
|
2014-04-22 06:11:48 +08:00
|
|
|
OPTION(ENABLE_COVERAGE_TESTS "Enable compiler flags needed to perform coverage tests." OFF)
|
|
|
|
OPTION(ENABLE_CONVERSION_WARNINGS "Enable warnings for implicit conversion from 64 to 32-bit datatypes." ON)
|
|
|
|
OPTION(ENABLE_LARGE_FILE_TESTS "Enable large file tests." OFF)
|
2014-03-07 23:40:50 +08:00
|
|
|
|
2014-01-25 02:10:41 +08:00
|
|
|
# Debugging flags
|
2014-03-20 03:56:09 +08:00
|
|
|
SET(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -Wall")
|
2013-10-01 05:51:34 +08:00
|
|
|
|
2014-05-21 05:31:08 +08:00
|
|
|
# Check to see if -Wl,--no-undefined is supported.
|
|
|
|
CHECK_C_LINKER_FLAG("-Wl,--no-undefined" LIBTOOL_HAS_NO_UNDEFINED)
|
2014-04-22 06:11:48 +08:00
|
|
|
|
2014-04-03 04:26:42 +08:00
|
|
|
IF(LIBTOOL_HAS_NO_UNDEFINED)
|
2014-05-21 05:31:08 +08:00
|
|
|
SET(CMAKE_SHARED_LINKER_FLAGS_DEBUG "${CMAKE_SHARED_LINKER_FLAGS_DEBUG} -Wl,--no-undefined")
|
2014-04-03 04:26:42 +08:00
|
|
|
ENDIF()
|
|
|
|
SET(CMAKE_REQUIRED_FLAGS "${TMP_CMAKE_REQUIRED_FLAGS}")
|
2014-04-22 06:11:48 +08:00
|
|
|
|
2014-01-25 02:10:41 +08:00
|
|
|
# Coverage tests need to have optimization turned off.
|
2014-03-20 03:56:09 +08:00
|
|
|
IF(ENABLE_COVERAGE_TESTS)
|
2014-01-25 02:10:41 +08:00
|
|
|
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -fprofile-arcs -ftest-coverage")
|
|
|
|
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fprofile-arcs -ftest-coverage")
|
2014-03-07 23:40:50 +08:00
|
|
|
MESSAGE(STATUS "Coverage Tests: On.")
|
2014-01-25 02:10:41 +08:00
|
|
|
ENDIF()
|
2014-04-22 06:11:48 +08:00
|
|
|
|
This PR adds EXPERIMENTAL support for accessing data in the
cloud using a variant of the Zarr protocol and storage
format. This enhancement is generically referred to as "NCZarr".
The data model supported by NCZarr is netcdf-4 minus the user-defined
types and the String type. In this sense it is similar to the CDF-5
data model.
More detailed information about enabling and using NCZarr is
described in the document NUG/nczarr.md and in a
[Unidata Developer's blog entry](https://www.unidata.ucar.edu/blogs/developer/en/entry/overview-of-zarr-support-in).
WARNING: this code has had limited testing, so do use this version
for production work. Also, performance improvements are ongoing.
Note especially the following platform matrix of successful tests:
Platform | Build System | S3 support
------------------------------------
Linux+gcc | Automake | yes
Linux+gcc | CMake | yes
Visual Studio | CMake | no
Additionally, and as a consequence of the addition of NCZarr,
major changes have been made to the Filter API. NOTE: NCZarr
does not yet support filters, but these changes are enablers for
that support in the future. Note that it is possible
(probable?) that there will be some accidental reversions if the
changes here did not correctly mimic the existing filter testing.
In any case, previously filter ids and parameters were of type
unsigned int. In order to support the more general zarr filter
model, this was all converted to char*. The old HDF5-specific,
unsigned int operations are still supported but they are
wrappers around the new, char* based nc_filterx_XXX functions.
This entailed at least the following changes:
1. Added the files libdispatch/dfilterx.c and include/ncfilter.h
2. Some filterx utilities have been moved to libdispatch/daux.c
3. A new entry, "filter_actions" was added to the NCDispatch table
and the version bumped.
4. An overly complex set of structs was created to support funnelling
all of the filterx operations thru a single dispatch
"filter_actions" entry.
5. Move common code to from libhdf5 to libsrc4 so that it is accessible
to nczarr.
Changes directly related to Zarr:
1. Modified CMakeList.txt and configure.ac to support both C and C++
-- this is in support of S3 support via the awd-sdk libraries.
2. Define a size64_t type to support nczarr.
3. More reworking of libdispatch/dinfermodel.c to
support zarr and to regularize the structure of the fragments
section of a URL.
Changes not directly related to Zarr:
1. Make client-side filter registration be conditional, with default off.
2. Hack include/nc4internal.h to make some flags added by Ed be unique:
e.g. NC_CREAT, NC_INDEF, etc.
3. cleanup include/nchttp.h and libdispatch/dhttp.c.
4. Misc. changes to support compiling under Visual Studio including:
* Better testing under windows for dirent.h and opendir and closedir.
5. Misc. changes to the oc2 code to support various libcurl CURLOPT flags
and to centralize error reporting.
6. By default, suppress the vlen tests that have unfixed memory leaks; add option to enable them.
7. Make part of the nc_test/test_byterange.sh test be contingent on remotetest.unidata.ucar.edu being accessible.
Changes Left TO-DO:
1. fix provenance code, it is too HDF5 specific.
2020-06-29 08:02:47 +08:00
|
|
|
# Warnings for 64-to-32 bit conversions.
|
2014-01-25 02:10:41 +08:00
|
|
|
IF(ENABLE_CONVERSION_WARNINGS)
|
2014-04-03 04:26:42 +08:00
|
|
|
CHECK_C_COMPILER_FLAG(-Wconversion CC_HAS_WCONVERSION)
|
|
|
|
CHECK_C_COMPILER_FLAG(-Wshorten-64-to-32 CC_HAS_SHORTEN_64_32)
|
2014-04-22 06:11:48 +08:00
|
|
|
|
2014-04-03 04:26:42 +08:00
|
|
|
IF(CC_HAS_SHORTEN_64_32)
|
2014-03-20 03:56:09 +08:00
|
|
|
SET(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -Wshorten-64-to-32")
|
2014-04-03 04:26:42 +08:00
|
|
|
ENDIF()
|
|
|
|
IF(CC_HAS_WCONVERSION)
|
2014-04-22 06:11:48 +08:00
|
|
|
SET(CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG} -Wconversion")
|
2014-04-03 04:26:42 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2014-01-25 02:10:41 +08:00
|
|
|
ENDIF(ENABLE_CONVERSION_WARNINGS)
|
|
|
|
|
|
|
|
ENDIF(CMAKE_COMPILER_IS_GNUCC OR APPLE)
|
|
|
|
|
|
|
|
# End default linux gcc & apple compiler options.
|
2013-10-01 05:51:34 +08:00
|
|
|
|
|
|
|
ADD_DEFINITIONS()
|
|
|
|
|
2015-08-20 17:42:05 +08:00
|
|
|
# Suppress CRT Warnings.
|
2012-11-08 01:34:05 +08:00
|
|
|
# Only necessary for Windows
|
|
|
|
IF(MSVC)
|
2014-03-07 23:46:26 +08:00
|
|
|
ADD_DEFINITIONS(-D_CRT_SECURE_NO_WARNINGS)
|
2012-12-14 06:09:41 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2015-11-16 20:51:17 +08:00
|
|
|
#####
|
|
|
|
# System inspection checks
|
|
|
|
#####
|
2019-11-07 00:43:49 +08:00
|
|
|
INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}/include)
|
|
|
|
INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}/oc2)
|
|
|
|
INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}/libsrc)
|
|
|
|
SET(CMAKE_REQUIRED_INCLUDES ${CMAKE_CURRENT_SOURCE_DIR}/libsrc)
|
2015-11-16 20:51:17 +08:00
|
|
|
|
2013-10-01 05:51:34 +08:00
|
|
|
################################
|
|
|
|
# End Compiler Configuration
|
|
|
|
################################
|
|
|
|
|
2013-02-27 06:11:09 +08:00
|
|
|
##
|
|
|
|
# Configuration for post-install RPath
|
|
|
|
# Adapted from http://www.cmake.org/Wiki/CMake_RPATH_handling
|
|
|
|
##
|
2018-11-07 01:11:57 +08:00
|
|
|
IF(NOT MSVC AND BUILD_SHARED_LIBS)
|
2014-03-07 23:46:26 +08:00
|
|
|
# use, i.e. don't skip the full RPATH for the build tree
|
|
|
|
SET(CMAKE_SKIP_BUILD_RPATH FALSE)
|
|
|
|
|
|
|
|
# when building, don't use the install RPATH already
|
|
|
|
# (but later on when installing)
|
|
|
|
SET(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE)
|
|
|
|
|
2014-07-16 00:50:58 +08:00
|
|
|
if(APPLE)
|
|
|
|
set(CMAKE_MACOSX_RPATH ON)
|
|
|
|
endif(APPLE)
|
2014-03-07 23:46:26 +08:00
|
|
|
|
|
|
|
# add the automatically determined parts of the RPATH
|
|
|
|
# which point to directories outside the build tree to the install RPATH
|
|
|
|
SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
|
|
|
|
|
2014-07-16 00:50:58 +08:00
|
|
|
# the RPATH to be used when installing,
|
|
|
|
# but only if it's not a system directory
|
2014-03-07 23:46:26 +08:00
|
|
|
LIST(FIND CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}" isSystemDir)
|
|
|
|
IF("${isSystemDir}" STREQUAL "-1")
|
|
|
|
SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR}")
|
|
|
|
ENDIF("${isSystemDir}" STREQUAL "-1")
|
2014-07-16 00:50:58 +08:00
|
|
|
|
2013-02-27 06:11:09 +08:00
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
##
|
|
|
|
# End configuration for post-install RPath
|
|
|
|
##
|
|
|
|
|
2013-10-01 05:51:34 +08:00
|
|
|
################################
|
2012-10-03 04:56:46 +08:00
|
|
|
# Option checks
|
2013-10-01 05:51:34 +08:00
|
|
|
################################
|
|
|
|
|
2012-10-03 04:56:46 +08:00
|
|
|
# HDF5 cache variables.
|
2019-11-26 08:23:07 +08:00
|
|
|
SET(DEFAULT_CHUNK_SIZE 16777216 CACHE STRING "Default Chunk Cache Size.")
|
2014-04-22 06:11:48 +08:00
|
|
|
SET(DEFAULT_CHUNKS_IN_CACHE 10 CACHE STRING "Default number of chunks in cache.")
|
2019-11-26 08:23:07 +08:00
|
|
|
SET(CHUNK_CACHE_SIZE 16777216 CACHE STRING "Default Chunk Cache Size.")
|
|
|
|
SET(CHUNK_CACHE_NELEMS 4133 CACHE STRING "Default maximum number of elements in cache.")
|
2014-04-22 06:11:48 +08:00
|
|
|
SET(CHUNK_CACHE_PREEMPTION 0.75 CACHE STRING "Default file chunk cache preemption policy for HDf5 files(a number between 0 and 1, inclusive.")
|
|
|
|
SET(MAX_DEFAULT_CACHE_SIZE 67108864 CACHE STRING "Default maximum cache size.")
|
|
|
|
SET(NETCDF_LIB_NAME "" CACHE STRING "Default name of the netcdf library.")
|
|
|
|
SET(TEMP_LARGE "." CACHE STRING "Where to put large temp files if large file tests are run.")
|
2019-03-17 16:09:16 +08:00
|
|
|
SET(NCPROPERTIES_EXTRA "" CACHE STRING "Specify extra pairs for _NCProperties.")
|
2012-11-01 06:17:33 +08:00
|
|
|
|
|
|
|
IF(NOT NETCDF_LIB_NAME STREQUAL "")
|
2014-03-07 23:46:26 +08:00
|
|
|
SET(MOD_NETCDF_NAME ON)
|
2012-11-01 06:17:33 +08:00
|
|
|
ENDIF()
|
2012-10-03 04:56:46 +08:00
|
|
|
|
|
|
|
# Set the appropriate compiler/architecture for universal OSX binaries.
|
|
|
|
IF(${CMAKE_SYSTEM_NAME} EQUAL "Darwin")
|
2014-03-07 23:46:26 +08:00
|
|
|
SET(CMAKE_OSX_ARCHITECTURES i386;x86_64)
|
2012-10-03 04:56:46 +08:00
|
|
|
ENDIF(${CMAKE_SYSTEM_NAME} EQUAL "Darwin")
|
|
|
|
|
2018-02-09 02:45:29 +08:00
|
|
|
# Macro for replacing '/MD' with '/MT'.
|
|
|
|
# Used only on Windows, /MD tells VS to use the shared
|
|
|
|
# CRT libs, MT tells VS to use the static CRT libs.
|
|
|
|
#
|
|
|
|
# Taken From:
|
|
|
|
# http://www.cmake.org/Wiki/CMake_FAQ#How_can_I_build_my_MSVC_application_with_a_static_runtime.3F
|
|
|
|
#
|
|
|
|
MACRO(specify_static_crt_flag)
|
|
|
|
SET(vars
|
|
|
|
CMAKE_C_FLAGS
|
|
|
|
CMAKE_C_FLAGS_DEBUG
|
2018-04-24 20:14:11 +08:00
|
|
|
CMAKE_C_FLAGS_RELEASE
|
2018-02-09 02:45:29 +08:00
|
|
|
CMAKE_C_FLAGS_MINSIZEREL
|
|
|
|
CMAKE_C_FLAGS_RELWITHDEBINFO
|
|
|
|
CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG
|
|
|
|
CMAKE_CXX_FLAGS_RELEASE CMAKE_CXX_FLAGS_MINSIZEREL
|
|
|
|
CMAKE_CXX_FLAGS_RELWITHDEBINFO)
|
|
|
|
|
|
|
|
FOREACH(flag_var ${vars})
|
|
|
|
IF(${flag_var} MATCHES "/MD")
|
|
|
|
STRING(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}")
|
|
|
|
ENDIF()
|
|
|
|
ENDFOREACH()
|
|
|
|
|
|
|
|
FOREACH(flag_var ${vars})
|
|
|
|
MESSAGE(STATUS " '${flag_var}': ${${flag_var}}")
|
|
|
|
ENDFOREACH()
|
|
|
|
MESSAGE(STATUS "")
|
|
|
|
ENDMACRO()
|
|
|
|
|
2013-09-19 04:49:37 +08:00
|
|
|
# Option to use Static Runtimes in MSVC
|
|
|
|
IF(MSVC)
|
|
|
|
OPTION(NC_USE_STATIC_CRT "Use static CRT Libraries ('\\MT')." OFF)
|
|
|
|
IF(NC_USE_STATIC_CRT)
|
|
|
|
SET(USE_STATIC_CRT ON)
|
|
|
|
specify_static_crt_flag()
|
|
|
|
ENDIF()
|
|
|
|
ENDIF()
|
|
|
|
|
2013-02-20 07:29:41 +08:00
|
|
|
# Option to build netCDF Version 2
|
2014-04-04 00:45:22 +08:00
|
|
|
OPTION (ENABLE_V2_API "Build netCDF Version 2." ON)
|
2014-04-04 07:09:38 +08:00
|
|
|
SET(BUILD_V2 ${ENABLE_V2_API})
|
2014-04-04 00:45:22 +08:00
|
|
|
IF(NOT ENABLE_V2_API)
|
2014-09-05 07:20:15 +08:00
|
|
|
SET(NO_NETCDF_2 ON)
|
2017-01-28 08:41:03 +08:00
|
|
|
ELSE(NOT ENABLE_V2_API)
|
|
|
|
SET(USE_NETCDF_2 TRUE)
|
|
|
|
ENDIF(NOT ENABLE_V2_API)
|
2012-10-03 04:56:46 +08:00
|
|
|
|
|
|
|
# Option to build utilities
|
2014-04-22 06:11:48 +08:00
|
|
|
OPTION(BUILD_UTILITIES "Build ncgen, ncgen3, ncdump." ON)
|
2014-03-07 23:40:50 +08:00
|
|
|
|
2012-10-03 04:56:46 +08:00
|
|
|
# Option to use MMAP
|
2017-01-28 08:41:03 +08:00
|
|
|
OPTION(ENABLE_MMAP "Use MMAP." ON)
|
2013-02-15 04:24:02 +08:00
|
|
|
|
2012-10-03 04:56:46 +08:00
|
|
|
# Option to use examples.
|
2014-04-22 06:11:48 +08:00
|
|
|
OPTION(ENABLE_EXAMPLES "Build Examples" ON)
|
2012-10-03 04:56:46 +08:00
|
|
|
|
2014-11-07 06:48:28 +08:00
|
|
|
# Option to automatically build netcdf-fortran.
|
2014-11-08 02:22:04 +08:00
|
|
|
IF(NOT MSVC)
|
2014-11-21 06:56:11 +08:00
|
|
|
OPTION(ENABLE_REMOTE_FORTRAN_BOOTSTRAP "Download and build netcdf-fortran automatically (EXPERIMENTAL)." OFF)
|
|
|
|
IF(ENABLE_REMOTE_FORTRAN_BOOTSTRAP)
|
|
|
|
SET(BUILD_FORTRAN ON)
|
|
|
|
ENDIF()
|
2014-11-08 02:22:04 +08:00
|
|
|
IF(BUILD_FORTRAN)
|
2014-11-19 03:52:41 +08:00
|
|
|
CONFIGURE_FILE("${CMAKE_SOURCE_DIR}/postinstall.sh.in"
|
|
|
|
"${CMAKE_BINARY_DIR}/postinstall.sh"
|
2014-11-08 02:22:04 +08:00
|
|
|
@ONLY)
|
2014-11-20 05:29:32 +08:00
|
|
|
|
|
|
|
ADD_CUSTOM_TARGET(build-netcdf-fortran
|
|
|
|
COMMAND sh -c "${CMAKE_BINARY_DIR}/postinstall.sh -t cmake -a build"
|
|
|
|
DEPENDS netcdf
|
|
|
|
)
|
|
|
|
|
|
|
|
ADD_CUSTOM_TARGET(install-netcdf-fortran
|
|
|
|
COMMAND sh -c "${CMAKE_BINARY_DIR}/postinstall.sh -t cmake -a install"
|
|
|
|
DEPENDS build-netcdf-fortran
|
|
|
|
)
|
|
|
|
|
2014-11-08 02:22:04 +08:00
|
|
|
ENDIF(BUILD_FORTRAN)
|
|
|
|
ENDIF()
|
2014-11-07 06:48:28 +08:00
|
|
|
|
2016-11-11 07:22:02 +08:00
|
|
|
|
|
|
|
###
|
|
|
|
# Allow the user to specify libraries
|
|
|
|
# to link against, similar to automakes 'LIBS' variable.
|
|
|
|
###
|
|
|
|
SET(NC_EXTRA_DEPS "" CACHE STRING "Additional libraries to link against.")
|
|
|
|
IF(NC_EXTRA_DEPS)
|
|
|
|
STRING(REPLACE " " ";" DEPS_LIST ${NC_EXTRA_DEPS})
|
|
|
|
FOREACH(_DEP ${DEPS_LIST})
|
|
|
|
STRING(REGEX REPLACE "^-l" "" _LIB ${_DEP})
|
|
|
|
FIND_LIBRARY("${_LIB}_DEP" NAMES "${_LIB}" "lib${_LIB}")
|
|
|
|
MESSAGE(${${_LIB}_DEP})
|
2018-02-10 03:57:11 +08:00
|
|
|
IF("${${_LIB}_DEP}" STREQUAL "${_LIB}_DEP-NOTFOUND")
|
2016-11-11 07:22:02 +08:00
|
|
|
MESSAGE(FATAL_ERROR "Error finding ${_LIB}.")
|
|
|
|
ELSE()
|
|
|
|
MESSAGE(STATUS "Found ${_LIB}: ${${_LIB}_DEP}")
|
|
|
|
ENDIF()
|
|
|
|
SET(EXTRA_DEPS ${EXTRA_DEPS} "${${_LIB}_DEP}")
|
|
|
|
ENDFOREACH()
|
|
|
|
MESSAGE("Extra deps: ${EXTRA_DEPS}")
|
|
|
|
LIST(REMOVE_DUPLICATES EXTRA_DEPS)
|
|
|
|
SET(CMAKE_REQUIRED_LIBRARIES ${CMAKE_REQUIRED_LIBRARIES} ${EXTRA_DEPS})
|
|
|
|
ENDIF()
|
|
|
|
###
|
|
|
|
# End user-specified dependent libraries.
|
|
|
|
###
|
|
|
|
|
2012-10-03 04:56:46 +08:00
|
|
|
# Option to use HDF4
|
2018-02-09 23:26:58 +08:00
|
|
|
OPTION(ENABLE_HDF4 "Build netCDF-4 with HDF4 read capability(HDF4, HDF5 and Zlib required)." OFF)
|
2014-04-22 06:11:48 +08:00
|
|
|
IF(ENABLE_HDF4)
|
2014-04-22 01:15:33 +08:00
|
|
|
SET(USE_HDF4 ON)
|
|
|
|
# Check for include files, libraries.
|
|
|
|
|
|
|
|
FIND_PATH(MFHDF_H_INCLUDE_DIR mfhdf.h)
|
|
|
|
IF(NOT MFHDF_H_INCLUDE_DIR)
|
|
|
|
MESSAGE(FATAL_ERROR "HDF4 Support specified, cannot find file mfhdf.h")
|
|
|
|
ELSE()
|
|
|
|
INCLUDE_DIRECTORIES(${MFHDF_H_INCLUDE_DIR})
|
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
FIND_LIBRARY(HDF4_DF_LIB NAMES df libdf hdf)
|
|
|
|
IF(NOT HDF4_DF_LIB)
|
|
|
|
MESSAGE(FATAL_ERROR "Can't find or link to the hdf4 df library.")
|
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
FIND_LIBRARY(HDF4_MFHDF_LIB NAMES mfhdf libmfhdf)
|
|
|
|
IF(NOT HDF4_MFHDF_LIB)
|
|
|
|
MESSAGE(FATAL_ERROR "Can't find or link to the hdf4 mfhdf library.")
|
|
|
|
ENDIF()
|
|
|
|
|
2017-02-23 00:55:13 +08:00
|
|
|
SET(HAVE_LIBMFHDF TRUE)
|
|
|
|
|
2015-05-29 07:13:15 +08:00
|
|
|
SET(HDF4_LIBRARIES ${HDF4_DF_LIB} ${HDF4_MFHDF_LIB})
|
2014-04-22 01:15:33 +08:00
|
|
|
# End include files, libraries.
|
|
|
|
MESSAGE(STATUS "HDF4 libraries: ${HDF4_DF_LIB}, ${HDF4_MFHDF_LIB}")
|
|
|
|
|
|
|
|
MESSAGE(STATUS "Seeking HDF4 jpeg dependency.")
|
|
|
|
|
|
|
|
# Look for the jpeglib.h header file.
|
|
|
|
FIND_PATH(JPEGLIB_H_INCLUDE_DIR jpeglib.h)
|
|
|
|
IF(NOT JPEGLIB_H_INCLUDE_DIR)
|
|
|
|
MESSAGE(FATAL_ERROR "HDF4 Support enabled but cannot find jpeglib.h")
|
|
|
|
ELSE()
|
|
|
|
SET(HAVE_JPEGLIB_H ON CACHE BOOL "")
|
2017-02-23 00:55:13 +08:00
|
|
|
SET(HAVE_LIBJPEG TRUE)
|
2014-04-22 01:15:33 +08:00
|
|
|
INCLUDE_DIRECTORIES(${JPEGLIB_H_INCLUDE_DIR})
|
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
FIND_LIBRARY(JPEG_LIB NAMES jpeg libjpeg)
|
|
|
|
IF(NOT JPEG_LIB)
|
|
|
|
MESSAGE(FATAL_ERROR "HDF4 Support enabled but cannot find libjpeg")
|
|
|
|
ENDIF()
|
|
|
|
SET(HDF4_LIBRARIES ${JPEG_LIB} ${HDF4_LIBRARIES})
|
|
|
|
MESSAGE(STATUS "Found JPEG libraries: ${JPEG_LIB}")
|
|
|
|
|
|
|
|
# Option to enable HDF4 file tests.
|
2014-08-29 07:10:19 +08:00
|
|
|
OPTION(ENABLE_HDF4_FILE_TESTS "Run HDF4 file tests. This fetches sample HDF4 files from the Unidata ftp site to test with (requires curl)." ON)
|
2014-04-22 01:15:33 +08:00
|
|
|
IF(ENABLE_HDF4_FILE_TESTS)
|
2014-08-29 07:10:19 +08:00
|
|
|
FIND_PROGRAM(PROG_CURL NAMES curl)
|
|
|
|
IF(PROG_CURL)
|
2014-04-22 01:15:33 +08:00
|
|
|
SET(USE_HDF4_FILE_TESTS ON)
|
|
|
|
ELSE()
|
2014-08-29 07:10:19 +08:00
|
|
|
MESSAGE(STATUS "Unable to locate 'curl'. Disabling hdf4 file tests.")
|
2014-04-22 01:15:33 +08:00
|
|
|
SET(USE_HDF4_FILE_TESTS OFF)
|
|
|
|
ENDIF()
|
|
|
|
ENDIF()
|
2014-04-22 06:11:48 +08:00
|
|
|
ENDIF()
|
2012-10-03 04:56:46 +08:00
|
|
|
|
|
|
|
# Option to Build DLL
|
|
|
|
IF(WIN32)
|
2014-04-22 06:11:48 +08:00
|
|
|
OPTION(ENABLE_DLL "Build a Windows DLL." ${BUILD_SHARED_LIBS})
|
|
|
|
IF(ENABLE_DLL)
|
2014-04-22 01:15:33 +08:00
|
|
|
SET(BUILD_DLL ON CACHE BOOL "")
|
2014-03-07 23:46:26 +08:00
|
|
|
ADD_DEFINITIONS(-DDLL_NETCDF)
|
2014-04-22 01:15:33 +08:00
|
|
|
ADD_DEFINITIONS(-DDLL_EXPORT)
|
2014-04-22 06:11:48 +08:00
|
|
|
ENDIF()
|
2012-10-03 04:56:46 +08:00
|
|
|
ENDIF()
|
|
|
|
# Did the user specify a default minimum blocksize for posixio?
|
2014-04-22 06:11:48 +08:00
|
|
|
SET(NCIO_MINBLOCKSIZE 256 CACHE STRING "Minimum I/O Blocksize for netCDF classic and 64-bit offset format files.")
|
2012-10-03 04:56:46 +08:00
|
|
|
|
2013-02-20 07:29:41 +08:00
|
|
|
# Build netCDF4
|
2014-04-22 06:11:48 +08:00
|
|
|
OPTION(ENABLE_NETCDF_4 "Enable netCDF-4" ON)
|
2012-10-03 04:56:46 +08:00
|
|
|
IF(ENABLE_NETCDF_4)
|
2014-09-05 07:20:15 +08:00
|
|
|
SET(USE_NETCDF4 ON CACHE BOOL "")
|
2014-03-07 23:46:26 +08:00
|
|
|
SET(ENABLE_NETCDF_4 ON CACHE BOOL "")
|
|
|
|
SET(ENABLE_NETCDF4 ON CACHE BOOL "")
|
2018-02-09 23:26:58 +08:00
|
|
|
ELSE()
|
|
|
|
SET(USE_HDF4_FILE_TESTS OFF)
|
2018-02-27 08:08:26 +08:00
|
|
|
SET(USE_HDF4 OFF)
|
2018-02-09 23:26:58 +08:00
|
|
|
SET(ENABLE_HDF4_FILE_TESTS OFF)
|
2018-02-27 08:08:26 +08:00
|
|
|
SET(ENABLE_HDF4 OFF)
|
2012-10-03 04:56:46 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2018-05-13 23:48:20 +08:00
|
|
|
# Option Logging, only valid for netcdf4.
|
|
|
|
OPTION(ENABLE_LOGGING "Enable Logging." OFF)
|
2018-10-31 10:48:12 +08:00
|
|
|
IF(NOT ENABLE_NETCDF_4)
|
|
|
|
SET(ENABLE_LOGGING OFF)
|
|
|
|
ENDIF()
|
2018-05-13 23:48:20 +08:00
|
|
|
IF(ENABLE_LOGGING)
|
|
|
|
ADD_DEFINITIONS(-DLOGGING)
|
|
|
|
ADD_DEFINITIONS(-DENABLE_SET_LOG_LEVEL)
|
|
|
|
SET(LOGGING ON)
|
2018-07-16 22:39:23 +08:00
|
|
|
SET(ENABLE_SET_LOG_LEVEL ON)
|
2018-05-13 23:48:20 +08:00
|
|
|
ENDIF()
|
|
|
|
OPTION(ENABLE_SET_LOG_LEVEL_FUNC "Enable definition of nc_set_log_level()." ON)
|
2018-07-17 01:45:26 +08:00
|
|
|
IF(ENABLE_NETCDF_4 AND NOT ENABLE_LOGGING AND ENABLE_SET_LOG_LEVEL_FUNC)
|
2018-05-13 23:48:20 +08:00
|
|
|
ADD_DEFINITIONS(-DENABLE_SET_LOG_LEVEL)
|
2018-07-16 20:39:15 +08:00
|
|
|
SET(ENABLE_SET_LOG_LEVEL ON)
|
2018-05-13 23:48:20 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2017-11-21 04:52:06 +08:00
|
|
|
# Option to allow for strict null file padding.
|
|
|
|
# See https://github.com/Unidata/netcdf-c/issues/657 for more information
|
|
|
|
OPTION(ENABLE_STRICT_NULL_BYTE_HEADER_PADDING "Enable strict null byte header padding." OFF)
|
2017-11-21 08:02:16 +08:00
|
|
|
|
2017-11-21 04:52:06 +08:00
|
|
|
IF(ENABLE_STRICT_NULL_BYTE_HEADER_PADDING)
|
|
|
|
SET(USE_STRICT_NULL_BYTE_HEADER_PADDING ON CACHE BOOL "")
|
|
|
|
ENDIF(ENABLE_STRICT_NULL_BYTE_HEADER_PADDING)
|
|
|
|
|
2012-10-03 04:56:46 +08:00
|
|
|
# Option for building RPC
|
2014-10-31 23:53:06 +08:00
|
|
|
OPTION(ENABLE_RPC "Enable RPC Client and Server." OFF)
|
2012-10-03 04:56:46 +08:00
|
|
|
IF(ENABLE_RPC)
|
2014-03-07 23:46:26 +08:00
|
|
|
SET(BUILD_RPC ON CACHE BOOL "")
|
2012-10-03 04:56:46 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2016-01-09 01:56:25 +08:00
|
|
|
##
|
2012-10-03 04:56:46 +08:00
|
|
|
# Option to Enable HDF5
|
2016-01-09 01:56:25 +08:00
|
|
|
#
|
|
|
|
# The HDF5 cmake variables differ between platform (linux/osx and Windows),
|
|
|
|
# as well as between HDF5 versions. As a result, this section is a bit convoluted.
|
|
|
|
#
|
|
|
|
# Note that the behavior seems much more stable across HDF5 versions under linux,
|
|
|
|
# so we do not have to do as much version-based tweaking.
|
|
|
|
#
|
|
|
|
# At the end of it, we should have the following defined:
|
|
|
|
#
|
|
|
|
# * HDF5_C_LIBRARY
|
|
|
|
# * HDF5_HL_LIBRARY
|
|
|
|
# * HDF5_LIBRARIES
|
|
|
|
# * HDF5_INCLUDE_DIR
|
|
|
|
# *
|
|
|
|
##
|
2014-04-22 06:11:48 +08:00
|
|
|
OPTION(USE_HDF5 "Use HDF5." ${ENABLE_NETCDF_4})
|
|
|
|
IF(USE_HDF5 OR ENABLE_NETCDF_4)
|
2012-10-03 04:56:46 +08:00
|
|
|
SET(USE_HDF5 ON)
|
2014-09-05 07:20:15 +08:00
|
|
|
SET(USE_NETCDF4 ON)
|
2016-01-09 01:56:25 +08:00
|
|
|
##
|
2015-08-20 17:42:05 +08:00
|
|
|
# Accommodate developers who have hdf5 libraries and
|
2013-05-02 07:55:24 +08:00
|
|
|
# headers on their system, but do not have a the hdf
|
|
|
|
# .cmake files. If this is the case, they should
|
2020-01-18 01:30:14 +08:00
|
|
|
# specify HDF5_HL_LIBRARY, HDF5_LIBRARY, HDF5_INCLUDE_DIR manually.
|
2016-01-09 01:56:25 +08:00
|
|
|
##
|
|
|
|
IF(HDF5_C_LIBRARY AND HDF5_HL_LIBRARY AND HDF5_INCLUDE_DIR)
|
|
|
|
SET(HDF5_LIBRARIES ${HDF5_C_LIBRARY} ${HDF5_HL_LIBRARY})
|
2016-02-09 08:20:48 +08:00
|
|
|
SET(HDF5_C_LIBRARIES ${HDF5_C_LIBRARY})
|
2017-03-07 06:34:14 +08:00
|
|
|
SET(HDF5_C_LIBRARY_hdf5 ${HDF5_C_LIBRARY})
|
2016-02-09 08:20:48 +08:00
|
|
|
SET(HDF5_HL_LIBRARIES ${HDF5_HL_LIBRARY})
|
2016-02-23 01:54:37 +08:00
|
|
|
INCLUDE_DIRECTORIES(${HDF5_INCLUDE_DIR})
|
2016-01-09 01:56:25 +08:00
|
|
|
MESSAGE(STATUS "Using HDF5 C Library: ${HDF5_C_LIBRARY}")
|
|
|
|
MESSAGE(STATUS "Using HDF5 HL LIbrary: ${HDF5_HL_LIBRARY}")
|
|
|
|
ELSE(HDF5_C_LIBRARY AND HDF5_HL_LIBRARY AND HDF5_INCLUDE_DIR) # We are seeking out HDF5 with Find Package.
|
|
|
|
###
|
|
|
|
# For now we assume that if we are building netcdf
|
|
|
|
# as a shared library, we will use hdf5 as a shared
|
|
|
|
# library. If we are building netcdf statically,
|
|
|
|
# we will use a static library. This can be toggled
|
2018-03-16 22:38:40 +08:00
|
|
|
# by explicitly modifying NC_FIND_SHARED_LIBS.
|
2016-01-09 01:56:25 +08:00
|
|
|
##
|
|
|
|
IF(NC_FIND_SHARED_LIBS)
|
2016-01-08 05:11:44 +08:00
|
|
|
SET(NC_HDF5_LINK_TYPE "shared")
|
2016-01-08 06:43:17 +08:00
|
|
|
SET(NC_HDF5_LINK_TYPE_UPPER "SHARED")
|
|
|
|
ADD_DEFINITIONS(-DH5_BUILT_AS_DYNAMIC_LIB)
|
2016-01-09 01:56:25 +08:00
|
|
|
ELSE(NC_FIND_SHARED_LIBS)
|
2016-01-08 05:11:44 +08:00
|
|
|
SET(NC_HDF5_LINK_TYPE "static")
|
2016-01-08 06:43:17 +08:00
|
|
|
SET(NC_HDF5_LINK_TYPE_UPPER "STATIC")
|
|
|
|
ADD_DEFINITIONS(-DH5_BUILT_AS_STATIC_LIB)
|
2016-01-09 01:56:25 +08:00
|
|
|
ENDIF(NC_FIND_SHARED_LIBS)
|
|
|
|
|
|
|
|
#####
|
|
|
|
# First, find the C and HL libraries.
|
|
|
|
#
|
|
|
|
# This has been updated to reflect what is in the hdf5
|
|
|
|
# examples, even though the previous version of what we
|
|
|
|
# had worked.
|
|
|
|
#####
|
|
|
|
IF(MSVC)
|
|
|
|
SET(SEARCH_PACKAGE_NAME ${HDF5_PACKAGE_NAME})
|
2019-09-22 09:56:39 +08:00
|
|
|
FIND_PACKAGE(HDF5 NAMES ${SEARCH_PACKAGE_NAME} COMPONENTS C HL CONFIG REQUIRED ${NC_HDF5_LINK_TYPE})
|
2016-01-09 01:56:25 +08:00
|
|
|
ELSE(MSVC)
|
|
|
|
FIND_PACKAGE(HDF5 COMPONENTS C HL REQUIRED)
|
|
|
|
ENDIF(MSVC)
|
|
|
|
|
|
|
|
##
|
|
|
|
# Next, check the HDF5 version. This will inform which
|
|
|
|
# HDF5 variables we need to munge.
|
|
|
|
##
|
|
|
|
|
|
|
|
##
|
|
|
|
# Assert HDF5 version meets minimum required version.
|
|
|
|
##
|
|
|
|
SET(HDF5_VERSION_REQUIRED 1.8.10)
|
|
|
|
|
|
|
|
IF(HDF5_VERSION_STRING AND NOT HDF5_VERSION)
|
|
|
|
SET(HDF5_VERSION ${HDF5_VERSION_STRING})
|
2014-08-21 16:25:00 +08:00
|
|
|
ENDIF()
|
2016-01-07 04:05:52 +08:00
|
|
|
|
2016-01-09 01:56:25 +08:00
|
|
|
IF("${HDF5_VERSION}" STREQUAL "")
|
|
|
|
MESSAGE(STATUS "Unable to determine hdf5 version. NetCDF requires at least version ${HDF5_VERSION_REQUIRED}")
|
|
|
|
ELSE()
|
|
|
|
IF(${HDF5_VERSION} VERSION_LESS ${HDF5_VERSION_REQUIRED})
|
|
|
|
MESSAGE(FATAL_ERROR
|
|
|
|
"netCDF requires at least HDF5 ${HDF5_VERSION_REQUIRED}. Found ${HDF5_VERSION}.")
|
|
|
|
ELSE()
|
|
|
|
MESSAGE(STATUS "Found HDF5 libraries version ${HDF5_VERSION}")
|
|
|
|
ENDIF()
|
|
|
|
ENDIF()
|
2016-01-08 06:43:17 +08:00
|
|
|
|
2016-01-09 01:56:25 +08:00
|
|
|
##
|
|
|
|
# Include the HDF5 include directory.
|
|
|
|
##
|
2016-08-28 21:26:35 +08:00
|
|
|
IF(HDF5_INCLUDE_DIRS AND NOT HDF5_INCLUDE_DIR)
|
|
|
|
SET(HDF5_INCLUDE_DIR ${HDF5_INCLUDE_DIRS})
|
|
|
|
ENDIF()
|
|
|
|
MESSAGE(STATUS "Using HDF5 include dir: ${HDF5_INCLUDE_DIR}")
|
2016-01-09 01:56:25 +08:00
|
|
|
INCLUDE_DIRECTORIES(${HDF5_INCLUDE_DIR})
|
2016-01-08 06:43:17 +08:00
|
|
|
|
2016-01-09 01:56:25 +08:00
|
|
|
###
|
|
|
|
# This is the block where we figure out what the appropriate
|
|
|
|
# variables are, and we ensure that we end up with
|
|
|
|
# HDF5_C_LIBRARY, HDF5_HL_LIBRARY and HDF5_LIBRARIES.
|
|
|
|
###
|
2013-06-21 04:38:23 +08:00
|
|
|
IF(MSVC)
|
2016-01-09 01:56:25 +08:00
|
|
|
##
|
|
|
|
# HDF5 1.8.15 defined HDF5_LIBRARIES.
|
|
|
|
##
|
|
|
|
IF(${HDF5_VERSION} VERSION_LESS "1.8.16")
|
2016-01-08 06:43:17 +08:00
|
|
|
SET(HDF5_C_LIBRARY hdf5)
|
2017-03-07 06:34:14 +08:00
|
|
|
SET(HDF5_C_LIBRARY_hdf5 hdf5)
|
2016-01-09 01:56:25 +08:00
|
|
|
ENDIF(${HDF5_VERSION} VERSION_LESS "1.8.16")
|
|
|
|
|
|
|
|
IF(${HDF5_VERSION} VERSION_GREATER "1.8.15")
|
|
|
|
IF(NOT HDF5_LIBRARIES AND HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY AND HDF5_HL_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY)
|
|
|
|
SET(HDF5_C_LIBRARY ${HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY})
|
2017-02-18 04:00:12 +08:00
|
|
|
SET(HDF5_C_LIBRARY_hdf5 ${HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY})
|
2016-01-09 01:56:25 +08:00
|
|
|
SET(HDF5_HL_LIBRARY ${HDF5_HL_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY})
|
2017-02-17 04:00:07 +08:00
|
|
|
|
2016-01-09 01:56:25 +08:00
|
|
|
SET(HDF5_LIBRARIES ${HDF5_C_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY} ${HDF5_HL_${NC_HDF5_LINK_TYPE_UPPER}_LIBRARY})
|
|
|
|
ENDIF()
|
|
|
|
ENDIF(${HDF5_VERSION} VERSION_GREATER "1.8.15")
|
|
|
|
|
|
|
|
ELSE(MSVC)
|
|
|
|
|
|
|
|
# Depending on the install, either HDF5_hdf_library or
|
|
|
|
# HDF5_C_LIBRARIES may be defined. We must check for either.
|
|
|
|
IF(HDF5_C_LIBRARIES AND NOT HDF5_hdf5_LIBRARY)
|
|
|
|
SET(HDF5_hdf5_LIBRARY ${HDF5_C_LIBRARIES})
|
2016-01-08 06:43:17 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2016-01-09 01:56:25 +08:00
|
|
|
ENDIF(MSVC)
|
2016-12-01 00:22:44 +08:00
|
|
|
IF(NOT HDF5_C_LIBRARY)
|
|
|
|
SET(HDF5_C_LIBRARY hdf5)
|
|
|
|
ENDIF()
|
2016-01-09 01:56:25 +08:00
|
|
|
ENDIF(HDF5_C_LIBRARY AND HDF5_HL_LIBRARY AND HDF5_INCLUDE_DIR)
|
2017-05-25 05:49:18 +08:00
|
|
|
|
2018-05-16 02:19:33 +08:00
|
|
|
FIND_PACKAGE(Threads)
|
2018-04-25 06:15:26 +08:00
|
|
|
|
2017-04-10 23:26:57 +08:00
|
|
|
# There is a missing case in the above code so default it
|
2018-01-17 02:00:09 +08:00
|
|
|
IF(NOT HDF5_C_LIBRARY_HDF5 OR "${HDF5_C_LIBRARY_hdf5}" STREQUAL "" )
|
2017-04-10 23:26:57 +08:00
|
|
|
SET(HDF5_C_LIBRARY_hdf5 "${HDF5_C_LIBRARY}")
|
|
|
|
ENDIF()
|
2016-01-08 06:22:11 +08:00
|
|
|
|
2019-09-22 09:56:39 +08:00
|
|
|
# Find out if HDF5 was built with parallel support.
|
|
|
|
# Do that by checking for the targets H5Pget_fapl_mpiposx and
|
|
|
|
# H5Pget_fapl_mpio in ${HDF5_LIB}.
|
2017-02-04 02:04:05 +08:00
|
|
|
|
2019-09-22 09:56:39 +08:00
|
|
|
# H5Pset_fapl_mpiposix and H5Pget_fapl_mpiposix have been removed since HDF5 1.8.12.
|
|
|
|
# Use H5Pset_fapl_mpio and H5Pget_fapl_mpio, instead.
|
|
|
|
# CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Pget_fapl_mpiposix "" HDF5_IS_PARALLEL_MPIPOSIX)
|
2018-09-23 09:22:34 +08:00
|
|
|
|
2019-09-22 09:56:39 +08:00
|
|
|
CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Pget_fapl_mpio "" HDF5_IS_PARALLEL_MPIO)
|
|
|
|
IF(HDF5_IS_PARALLEL_MPIO)
|
|
|
|
SET(HDF5_PARALLEL ON)
|
|
|
|
ELSE()
|
|
|
|
SET(HDF5_PARALLEL OFF)
|
|
|
|
ENDIF()
|
2014-04-22 06:11:48 +08:00
|
|
|
|
2019-09-22 09:56:39 +08:00
|
|
|
#Check to see if HDF5 library has collective metadata APIs, (HDF5 >= 1.10.0)
|
|
|
|
CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Pset_all_coll_metadata_ops "" HDF5_HAS_COLL_METADATA_OPS)
|
2016-11-11 09:06:29 +08:00
|
|
|
|
2016-11-11 07:22:02 +08:00
|
|
|
#Check to see if H5Z_SZIP exists in HDF5_Libraries. If so, we must use szip.
|
2017-08-28 10:38:37 +08:00
|
|
|
CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Z_SZIP "" USE_SZIP)
|
2016-11-11 07:22:02 +08:00
|
|
|
IF(USE_SZIP)
|
|
|
|
FIND_LIBRARY(SZIP NAMES szip sz)
|
|
|
|
IF(SZIP)
|
2017-08-28 10:38:37 +08:00
|
|
|
SET(HAVE_H5Z_SZIP 1)
|
2016-11-11 07:22:02 +08:00
|
|
|
SET(SZIP_LIBRARY ${SZIP})
|
|
|
|
SET(CMAKE_REQUIRED_LIBRARIES ${CMAKE_REQUIRED_LIBRARIES} ${SZIP})
|
2020-02-17 03:59:33 +08:00
|
|
|
MESSAGE(STATUS "HDF5 has szip.")
|
2016-11-11 07:22:02 +08:00
|
|
|
ELSE()
|
|
|
|
MESSAGE(FATAL_ERROR "HDF5 Requires SZIP, but cannot find libszip or libsz.")
|
|
|
|
ENDIF()
|
|
|
|
ENDIF()
|
2014-03-18 05:33:09 +08:00
|
|
|
|
2019-04-30 06:36:08 +08:00
|
|
|
CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Pset_libver_bounds "" HAVE_H5PSET_LIBVER_BOUNDS)
|
|
|
|
CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5free_memory "" HAVE_H5FREE_MEMORY)
|
|
|
|
CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5allocate_memory "" HAVE_H5ALLOCATE_MEMORY)
|
|
|
|
CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5resize_memory "" HAVE_H5RESIZE_MEMORY)
|
2015-04-22 03:52:43 +08:00
|
|
|
|
2015-11-30 11:38:12 +08:00
|
|
|
IF(HDF5_PARALLEL)
|
2014-10-10 06:00:01 +08:00
|
|
|
SET(HDF5_CC h5pcc)
|
|
|
|
ELSE()
|
|
|
|
SET(HDF5_CC h5cc)
|
|
|
|
ENDIF()
|
|
|
|
|
2020-01-21 22:42:14 +08:00
|
|
|
# Check to see if this is hdf5-1.10.3 or later.
|
|
|
|
CHECK_LIBRARY_EXISTS(${HDF5_C_LIBRARY_hdf5} H5Dread_chunk "" HDF5_SUPPORTS_PAR_FILTERS)
|
2020-05-12 01:11:24 +08:00
|
|
|
IF (HDF5_SUPPORTS_PAR_FILTERS)
|
|
|
|
SET(HDF5_HAS_PAR_FILTERS TRUE CACHE BOOL "")
|
|
|
|
SET(HAS_PAR_FILTERS yes CACHE STRING "")
|
|
|
|
ELSE()
|
|
|
|
SET(HDF5_HAS_PAR_FILTERS FALSE CACHE BOOL "")
|
|
|
|
SET(HAS_PAR_FILTERS no CACHE STRING "")
|
|
|
|
ENDIF()
|
2019-12-21 00:43:56 +08:00
|
|
|
|
2013-08-29 02:57:07 +08:00
|
|
|
SET(H5_USE_16_API 1)
|
2014-04-22 06:11:48 +08:00
|
|
|
OPTION(NC_ENABLE_HDF_16_API "Enable HDF5 1.6.x Compatibility(Required)" ON)
|
2013-08-29 02:57:07 +08:00
|
|
|
IF(NOT NC_ENABLE_HDF_16_API)
|
|
|
|
SET(H5_USE_16_API 0)
|
|
|
|
ENDIF()
|
2014-03-07 23:40:50 +08:00
|
|
|
|
2020-03-06 05:44:38 +08:00
|
|
|
FIND_PATH(HAVE_HDF5_H hdf5.h PATHS ${HDF5_INCLUDE_DIR} NO_DEFAULT_PATH)
|
|
|
|
FIND_PATH(HAVE_HDF5_H hdf5.h)
|
2017-01-28 08:41:03 +08:00
|
|
|
IF(NOT HAVE_HDF5_H)
|
|
|
|
MESSAGE(FATAL_ERROR "Compiling a test with hdf5 failed. Either hdf5.h cannot be found, or the log messages should be checked for another reason.")
|
|
|
|
ELSE(NOT HAVE_HDF5_H)
|
|
|
|
INCLUDE_DIRECTORIES(${HAVE_HDF5_H})
|
|
|
|
ENDIF(NOT HAVE_HDF5_H)
|
|
|
|
|
2020-01-23 21:51:44 +08:00
|
|
|
# Check to ensure that HDF5 was built with zlib.
|
|
|
|
set (CMAKE_REQUIRED_INCLUDES ${HAVE_HDF5_H})
|
2020-05-16 19:45:13 +08:00
|
|
|
CHECK_C_SOURCE_COMPILES("#include <H5public.h>
|
2020-01-23 21:51:44 +08:00
|
|
|
#if !H5_HAVE_ZLIB_H
|
|
|
|
#error
|
|
|
|
#endif
|
|
|
|
int main() {
|
|
|
|
int x = 1;}" HAVE_HDF5_ZLIB)
|
|
|
|
IF(NOT HAVE_HDF5_ZLIB)
|
|
|
|
MESSAGE(FATAL_ERROR "HDF5 was built without zlib. Rebuild HDF5 with zlib.")
|
|
|
|
ENDIF()
|
|
|
|
|
2018-02-10 03:57:11 +08:00
|
|
|
#option to include HDF5 High Level header file (hdf5_hl.h) in case we are not doing a make install
|
2018-02-09 02:45:29 +08:00
|
|
|
INCLUDE_DIRECTORIES(${HDF5_HL_INCLUDE_DIR})
|
2017-01-28 08:41:03 +08:00
|
|
|
|
|
|
|
|
2016-01-09 01:56:25 +08:00
|
|
|
ENDIF(USE_HDF5 OR ENABLE_NETCDF_4)
|
2015-04-22 03:52:43 +08:00
|
|
|
|
2019-04-20 10:32:26 +08:00
|
|
|
# See if we have libcurl
|
|
|
|
FIND_PACKAGE(CURL)
|
|
|
|
ADD_DEFINITIONS(-DCURL_STATICLIB=1)
|
|
|
|
INCLUDE_DIRECTORIES(${CURL_INCLUDE_DIRS})
|
|
|
|
|
|
|
|
# Check to see if CURLOPT_USERNAME is defined.
|
|
|
|
# It is present starting version 7.19.1.
|
|
|
|
CHECK_C_SOURCE_COMPILES("
|
|
|
|
#include <curl/curl.h>
|
|
|
|
int main() {int x = CURLOPT_USERNAME;}" HAVE_CURLOPT_USERNAME)
|
|
|
|
|
|
|
|
# Check to see if CURLOPT_PASSWORD is defined.
|
|
|
|
# It is present starting version 7.19.1.
|
|
|
|
CHECK_C_SOURCE_COMPILES("
|
|
|
|
#include <curl/curl.h>
|
|
|
|
int main() {int x = CURLOPT_PASSWORD;}" HAVE_CURLOPT_PASSWORD)
|
|
|
|
|
|
|
|
# Check to see if CURLOPT_KEYPASSWD is defined.
|
|
|
|
# It is present starting version 7.16.4.
|
|
|
|
CHECK_C_SOURCE_COMPILES("
|
|
|
|
#include <curl/curl.h>
|
|
|
|
int main() {int x = CURLOPT_KEYPASSWD;}" HAVE_CURLOPT_KEYPASSWD)
|
|
|
|
|
|
|
|
# Check to see if CURLINFO_RESPONSE_CODE is defined.
|
|
|
|
# It showed up in curl 7.10.7.
|
|
|
|
CHECK_C_SOURCE_COMPILES("
|
|
|
|
#include <curl/curl.h>
|
|
|
|
int main() {int x = CURLINFO_RESPONSE_CODE;}" HAVE_CURLINFO_RESPONSE_CODE)
|
|
|
|
|
|
|
|
# Check to see if CURLINFO_HTTP_CONNECTCODE is defined.
|
|
|
|
# It showed up in curl 7.10.7.
|
|
|
|
CHECK_C_SOURCE_COMPILES("
|
|
|
|
#include <curl/curl.h>
|
|
|
|
int main() {int x = CURLINFO_HTTP_CONNECTCODE;}" HAVE_CURLINFO_HTTP_CONNECTCODE)
|
|
|
|
|
|
|
|
# Check to see if CURLOPT_BUFFERSIZE is defined.
|
|
|
|
# It is present starting version 7.59
|
|
|
|
CHECK_C_SOURCE_COMPILES("
|
|
|
|
#include <curl/curl.h>
|
|
|
|
int main() {int x = CURLOPT_BUFFERSIZE;}" HAVE_CURLOPT_BUFFERSIZE)
|
|
|
|
|
|
|
|
# Check to see if CURLOPT_TCP_KEEPALIVE is defined.
|
|
|
|
# It is present starting version 7.25
|
|
|
|
CHECK_C_SOURCE_COMPILES("
|
|
|
|
#include <curl/curl.h>
|
|
|
|
int main() {int x = CURLOPT_TCP_KEEPALIVE;}" HAVE_CURLOPT_KEEPALIVE)
|
|
|
|
|
2020-04-11 03:42:27 +08:00
|
|
|
# Check to see if we have libcurl 7.66 or later
|
|
|
|
CHECK_C_SOURCE_COMPILES("
|
|
|
|
#include <curl/curl.h>
|
2020-05-31 07:36:25 +08:00
|
|
|
int main() {
|
|
|
|
#if (LIBCURL_VERSION_MAJOR*1000 + LIBCURL_VERSION_MINOR < 7066)
|
|
|
|
choke me;
|
|
|
|
#endif
|
|
|
|
}" HAVE_LIBCURL_766)
|
2020-04-11 03:42:27 +08:00
|
|
|
|
2017-03-09 08:01:10 +08:00
|
|
|
# Option to Build DAP2+DAP4 Clients
|
|
|
|
OPTION(ENABLE_DAP "Enable DAP2 and DAP4 Client." ON)
|
2014-04-22 06:11:48 +08:00
|
|
|
IF(ENABLE_DAP)
|
2017-09-27 05:26:34 +08:00
|
|
|
SET(USE_DAP ON CACHE BOOL "")
|
|
|
|
SET(ENABLE_DAP2 ON CACHE BOOL "")
|
|
|
|
|
|
|
|
IF(ENABLE_NETCDF_4)
|
|
|
|
SET(ENABLE_DAP4 ON CACHE BOOL "")
|
|
|
|
ELSE(ENABLE_NETCDF_4)
|
2017-09-27 04:01:21 +08:00
|
|
|
SET(ENABLE_DAP4 OFF CACHE BOOL "")
|
2017-09-27 05:26:34 +08:00
|
|
|
ENDIF(ENABLE_NETCDF_4)
|
|
|
|
|
2017-03-09 08:01:10 +08:00
|
|
|
ELSE()
|
|
|
|
SET(ENABLE_DAP2 OFF)
|
|
|
|
SET(ENABLE_DAP4 OFF)
|
2012-10-03 04:56:46 +08:00
|
|
|
ENDIF()
|
|
|
|
|
Provide byte-range reading of remote datasets
re: issue https://github.com/Unidata/netcdf-c/issues/1251
Assume that you have the URL to a remote dataset
which is a normal netcdf-3 or netcdf-4 file.
This PR allows the netcdf-c to read that dataset's
contents as a netcdf file using HTTP byte ranges
if the remote server supports byte-range access.
Originally, this PR was set up to access Amazon S3 objects,
but it can also access other remote datasets such as those
provided by a Thredds server via the HTTPServer access protocol.
It may also work for other kinds of servers.
Note that this is not intended as a true production
capability because, as is known, this kind of access to
can be quite slow. In addition, the byte-range IO drivers
do not currently do any sort of optimization or caching.
An additional goal here is to gain some experience with
the Amazon S3 REST protocol.
This architecture and its use documented in
the file docs/byterange.dox.
There are currently two test cases:
1. nc_test/tst_s3raw.c - this does a simple open, check format, close cycle
for a remote netcdf-3 file and a remote netcdf-4 file.
2. nc_test/test_s3raw.sh - this uses ncdump to investigate some remote
datasets.
This PR also incorporates significantly changed model inference code
(see the superceded PR https://github.com/Unidata/netcdf-c/pull/1259).
1. It centralizes the code that infers the dispatcher.
2. It adds support for byte-range URLs
Other changes:
1. NC_HDF5_finalize was not being properly called by nc_finalize().
2. Fix minor bug in ncgen3.l
3. fix memory leak in nc4info.c
4. add code to walk the .daprc triples and to replace protocol=
fragment tag with a more general mode= tag.
Final Note:
Th inference code is still way too complicated. We need to move
to the validfile() model used by netcdf Java, where each
dispatcher is asked if it can process the file. This decentralizes
the inference code. This will be done after all the major new
dispatchers (PIO, Zarr, etc) have been implemented.
2019-01-02 09:27:36 +08:00
|
|
|
# Option to support byte-range reading of remote datasets
|
|
|
|
OPTION(ENABLE_BYTERANGE "Enable byte-range access to remote datasets.." OFF)
|
2019-04-20 10:32:26 +08:00
|
|
|
|
2020-05-31 07:36:25 +08:00
|
|
|
IF(NOT CURL_LIBRARIES AND NOT CURL_LIBRARY)
|
2019-04-20 10:32:26 +08:00
|
|
|
IF(ENABLE_BYTERANGE)
|
Provide byte-range reading of remote datasets
re: issue https://github.com/Unidata/netcdf-c/issues/1251
Assume that you have the URL to a remote dataset
which is a normal netcdf-3 or netcdf-4 file.
This PR allows the netcdf-c to read that dataset's
contents as a netcdf file using HTTP byte ranges
if the remote server supports byte-range access.
Originally, this PR was set up to access Amazon S3 objects,
but it can also access other remote datasets such as those
provided by a Thredds server via the HTTPServer access protocol.
It may also work for other kinds of servers.
Note that this is not intended as a true production
capability because, as is known, this kind of access to
can be quite slow. In addition, the byte-range IO drivers
do not currently do any sort of optimization or caching.
An additional goal here is to gain some experience with
the Amazon S3 REST protocol.
This architecture and its use documented in
the file docs/byterange.dox.
There are currently two test cases:
1. nc_test/tst_s3raw.c - this does a simple open, check format, close cycle
for a remote netcdf-3 file and a remote netcdf-4 file.
2. nc_test/test_s3raw.sh - this uses ncdump to investigate some remote
datasets.
This PR also incorporates significantly changed model inference code
(see the superceded PR https://github.com/Unidata/netcdf-c/pull/1259).
1. It centralizes the code that infers the dispatcher.
2. It adds support for byte-range URLs
Other changes:
1. NC_HDF5_finalize was not being properly called by nc_finalize().
2. Fix minor bug in ncgen3.l
3. fix memory leak in nc4info.c
4. add code to walk the .daprc triples and to replace protocol=
fragment tag with a more general mode= tag.
Final Note:
Th inference code is still way too complicated. We need to move
to the validfile() model used by netcdf Java, where each
dispatcher is asked if it can process the file. This decentralizes
the inference code. This will be done after all the major new
dispatchers (PIO, Zarr, etc) have been implemented.
2019-01-02 09:27:36 +08:00
|
|
|
MESSAGE(FATAL_ERROR "Byte-range support specified, CURL libraries are not found.")
|
2019-04-20 10:32:26 +08:00
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
IF(ENABLE_DAP2 OR ENABLE_DAP4)
|
|
|
|
MESSAGE(FATAL_ERROR "DAP support specified, CURL libraries are not found.")
|
Provide byte-range reading of remote datasets
re: issue https://github.com/Unidata/netcdf-c/issues/1251
Assume that you have the URL to a remote dataset
which is a normal netcdf-3 or netcdf-4 file.
This PR allows the netcdf-c to read that dataset's
contents as a netcdf file using HTTP byte ranges
if the remote server supports byte-range access.
Originally, this PR was set up to access Amazon S3 objects,
but it can also access other remote datasets such as those
provided by a Thredds server via the HTTPServer access protocol.
It may also work for other kinds of servers.
Note that this is not intended as a true production
capability because, as is known, this kind of access to
can be quite slow. In addition, the byte-range IO drivers
do not currently do any sort of optimization or caching.
An additional goal here is to gain some experience with
the Amazon S3 REST protocol.
This architecture and its use documented in
the file docs/byterange.dox.
There are currently two test cases:
1. nc_test/tst_s3raw.c - this does a simple open, check format, close cycle
for a remote netcdf-3 file and a remote netcdf-4 file.
2. nc_test/test_s3raw.sh - this uses ncdump to investigate some remote
datasets.
This PR also incorporates significantly changed model inference code
(see the superceded PR https://github.com/Unidata/netcdf-c/pull/1259).
1. It centralizes the code that infers the dispatcher.
2. It adds support for byte-range URLs
Other changes:
1. NC_HDF5_finalize was not being properly called by nc_finalize().
2. Fix minor bug in ncgen3.l
3. fix memory leak in nc4info.c
4. add code to walk the .daprc triples and to replace protocol=
fragment tag with a more general mode= tag.
Final Note:
Th inference code is still way too complicated. We need to move
to the validfile() model used by netcdf Java, where each
dispatcher is asked if it can process the file. This decentralizes
the inference code. This will be done after all the major new
dispatchers (PIO, Zarr, etc) have been implemented.
2019-01-02 09:27:36 +08:00
|
|
|
ENDIF()
|
|
|
|
ENDIF()
|
|
|
|
|
2015-08-20 17:42:05 +08:00
|
|
|
# Check for the math library so it can be explicitly linked.
|
2014-02-05 07:13:18 +08:00
|
|
|
IF(NOT WIN32)
|
2014-04-22 01:15:33 +08:00
|
|
|
FIND_LIBRARY(HAVE_LIBM NAMES math m libm)
|
|
|
|
MESSAGE(STATUS "Found Math library: ${HAVE_LIBM}")
|
|
|
|
IF(NOT HAVE_LIBM)
|
|
|
|
MESSAGE(FATAL_ERROR "Unable to find the math library.")
|
|
|
|
ENDIF()
|
2014-02-05 07:13:18 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2012-10-03 04:56:46 +08:00
|
|
|
# Option to Enable DAP long tests, remote tests.
|
|
|
|
OPTION(ENABLE_DAP_LONG_TESTS "Enable DAP long tests." OFF)
|
|
|
|
OPTION(ENABLE_DAP_REMOTE_TESTS "Enable DAP remote tests." ON)
|
2020-01-04 04:12:58 +08:00
|
|
|
SET(REMOTETESTSERVERS "remotetest.unidata.ucar.edu" CACHE STRING "test servers to use for remote test")
|
2014-03-08 04:58:09 +08:00
|
|
|
|
2013-01-16 06:43:09 +08:00
|
|
|
# Enable some developer-only tests
|
2015-10-09 01:02:31 +08:00
|
|
|
OPTION(ENABLE_EXTRA_TESTS "Enable Extra tests. Some may not work because of known issues. Developers only." OFF)
|
2013-01-16 06:43:09 +08:00
|
|
|
IF(ENABLE_EXTRA_TESTS)
|
2014-03-07 23:46:26 +08:00
|
|
|
SET(EXTRA_TESTS ON)
|
2013-01-16 06:43:09 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2012-10-03 04:56:46 +08:00
|
|
|
# Option to use bundled XGetopt in place of getopt(). This is mostly useful
|
2020-05-19 09:36:28 +08:00
|
|
|
# for MSVC builds. If not building utilities or some tests,
|
|
|
|
# getopt() isn't required at all.
|
2012-10-03 04:56:46 +08:00
|
|
|
IF(MSVC)
|
2014-03-07 23:46:26 +08:00
|
|
|
OPTION(ENABLE_XGETOPT "Enable bundled XGetOpt instead of external getopt()." ON)
|
|
|
|
IF(ENABLE_XGETOPT)
|
|
|
|
SET(USE_X_GETOPT ON CACHE BOOL "")
|
2020-05-19 09:36:28 +08:00
|
|
|
# Copy XGetopt.c to everywhere it is needed. Avoids
|
|
|
|
# inconsistent code
|
|
|
|
FILE(COPY ${netCDF_SOURCE_DIR}/libsrc/XGetopt.c
|
|
|
|
DESTINATION ${netCDF_BINARY_DIR}/ncgen3/)
|
|
|
|
FILE(COPY ${netCDF_SOURCE_DIR}/libsrc/XGetopt.c
|
|
|
|
DESTINATION ${netCDF_BINARY_DIR}/ncgen/)
|
|
|
|
FILE(COPY ${netCDF_SOURCE_DIR}/libsrc/XGetopt.c
|
|
|
|
DESTINATION ${netCDF_BINARY_DIR}/ncdump/)
|
This PR adds EXPERIMENTAL support for accessing data in the
cloud using a variant of the Zarr protocol and storage
format. This enhancement is generically referred to as "NCZarr".
The data model supported by NCZarr is netcdf-4 minus the user-defined
types and the String type. In this sense it is similar to the CDF-5
data model.
More detailed information about enabling and using NCZarr is
described in the document NUG/nczarr.md and in a
[Unidata Developer's blog entry](https://www.unidata.ucar.edu/blogs/developer/en/entry/overview-of-zarr-support-in).
WARNING: this code has had limited testing, so do use this version
for production work. Also, performance improvements are ongoing.
Note especially the following platform matrix of successful tests:
Platform | Build System | S3 support
------------------------------------
Linux+gcc | Automake | yes
Linux+gcc | CMake | yes
Visual Studio | CMake | no
Additionally, and as a consequence of the addition of NCZarr,
major changes have been made to the Filter API. NOTE: NCZarr
does not yet support filters, but these changes are enablers for
that support in the future. Note that it is possible
(probable?) that there will be some accidental reversions if the
changes here did not correctly mimic the existing filter testing.
In any case, previously filter ids and parameters were of type
unsigned int. In order to support the more general zarr filter
model, this was all converted to char*. The old HDF5-specific,
unsigned int operations are still supported but they are
wrappers around the new, char* based nc_filterx_XXX functions.
This entailed at least the following changes:
1. Added the files libdispatch/dfilterx.c and include/ncfilter.h
2. Some filterx utilities have been moved to libdispatch/daux.c
3. A new entry, "filter_actions" was added to the NCDispatch table
and the version bumped.
4. An overly complex set of structs was created to support funnelling
all of the filterx operations thru a single dispatch
"filter_actions" entry.
5. Move common code to from libhdf5 to libsrc4 so that it is accessible
to nczarr.
Changes directly related to Zarr:
1. Modified CMakeList.txt and configure.ac to support both C and C++
-- this is in support of S3 support via the awd-sdk libraries.
2. Define a size64_t type to support nczarr.
3. More reworking of libdispatch/dinfermodel.c to
support zarr and to regularize the structure of the fragments
section of a URL.
Changes not directly related to Zarr:
1. Make client-side filter registration be conditional, with default off.
2. Hack include/nc4internal.h to make some flags added by Ed be unique:
e.g. NC_CREAT, NC_INDEF, etc.
3. cleanup include/nchttp.h and libdispatch/dhttp.c.
4. Misc. changes to support compiling under Visual Studio including:
* Better testing under windows for dirent.h and opendir and closedir.
5. Misc. changes to the oc2 code to support various libcurl CURLOPT flags
and to centralize error reporting.
6. By default, suppress the vlen tests that have unfixed memory leaks; add option to enable them.
7. Make part of the nc_test/test_byterange.sh test be contingent on remotetest.unidata.ucar.edu being accessible.
Changes Left TO-DO:
1. fix provenance code, it is too HDF5 specific.
2020-06-29 08:02:47 +08:00
|
|
|
FILE(COPY ${netCDF_SOURCE_DIR}/libsrc/XGetopt.c
|
|
|
|
DESTINATION ${netCDF_BINARY_DIR}/nczarr_test/)
|
2014-03-07 23:46:26 +08:00
|
|
|
ENDIF()
|
2012-10-03 04:56:46 +08:00
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
SET(MATH "")
|
|
|
|
IF(NOT WIN32)
|
2015-10-10 03:06:32 +08:00
|
|
|
|
|
|
|
# STDIO instead of posixio.
|
|
|
|
OPTION(ENABLE_STDIO "If true, use stdio instead of posixio (ex. on the Cray)" OFF)
|
|
|
|
IF(ENABLE_STDIO)
|
|
|
|
SET(USE_STDIO ON CACHE BOOL "")
|
|
|
|
ENDIF()
|
|
|
|
|
2014-03-07 23:46:26 +08:00
|
|
|
# FFIO insteaad of PosixIO
|
|
|
|
OPTION(ENABLE_FFIO "If true, use ffio instead of posixio" OFF)
|
|
|
|
IF(ENABLE_FFIO)
|
|
|
|
SET(USE_FFIO ON CACHE BOOL "")
|
|
|
|
ENDIF()
|
2012-10-03 04:56:46 +08:00
|
|
|
ENDIF()
|
|
|
|
|
This PR adds EXPERIMENTAL support for accessing data in the
cloud using a variant of the Zarr protocol and storage
format. This enhancement is generically referred to as "NCZarr".
The data model supported by NCZarr is netcdf-4 minus the user-defined
types and the String type. In this sense it is similar to the CDF-5
data model.
More detailed information about enabling and using NCZarr is
described in the document NUG/nczarr.md and in a
[Unidata Developer's blog entry](https://www.unidata.ucar.edu/blogs/developer/en/entry/overview-of-zarr-support-in).
WARNING: this code has had limited testing, so do use this version
for production work. Also, performance improvements are ongoing.
Note especially the following platform matrix of successful tests:
Platform | Build System | S3 support
------------------------------------
Linux+gcc | Automake | yes
Linux+gcc | CMake | yes
Visual Studio | CMake | no
Additionally, and as a consequence of the addition of NCZarr,
major changes have been made to the Filter API. NOTE: NCZarr
does not yet support filters, but these changes are enablers for
that support in the future. Note that it is possible
(probable?) that there will be some accidental reversions if the
changes here did not correctly mimic the existing filter testing.
In any case, previously filter ids and parameters were of type
unsigned int. In order to support the more general zarr filter
model, this was all converted to char*. The old HDF5-specific,
unsigned int operations are still supported but they are
wrappers around the new, char* based nc_filterx_XXX functions.
This entailed at least the following changes:
1. Added the files libdispatch/dfilterx.c and include/ncfilter.h
2. Some filterx utilities have been moved to libdispatch/daux.c
3. A new entry, "filter_actions" was added to the NCDispatch table
and the version bumped.
4. An overly complex set of structs was created to support funnelling
all of the filterx operations thru a single dispatch
"filter_actions" entry.
5. Move common code to from libhdf5 to libsrc4 so that it is accessible
to nczarr.
Changes directly related to Zarr:
1. Modified CMakeList.txt and configure.ac to support both C and C++
-- this is in support of S3 support via the awd-sdk libraries.
2. Define a size64_t type to support nczarr.
3. More reworking of libdispatch/dinfermodel.c to
support zarr and to regularize the structure of the fragments
section of a URL.
Changes not directly related to Zarr:
1. Make client-side filter registration be conditional, with default off.
2. Hack include/nc4internal.h to make some flags added by Ed be unique:
e.g. NC_CREAT, NC_INDEF, etc.
3. cleanup include/nchttp.h and libdispatch/dhttp.c.
4. Misc. changes to support compiling under Visual Studio including:
* Better testing under windows for dirent.h and opendir and closedir.
5. Misc. changes to the oc2 code to support various libcurl CURLOPT flags
and to centralize error reporting.
6. By default, suppress the vlen tests that have unfixed memory leaks; add option to enable them.
7. Make part of the nc_test/test_byterange.sh test be contingent on remotetest.unidata.ucar.edu being accessible.
Changes Left TO-DO:
1. fix provenance code, it is too HDF5 specific.
2020-06-29 08:02:47 +08:00
|
|
|
# Option to Build NCZarr clients
|
|
|
|
OPTION(ENABLE_NCZARR "Enable NCZarr Client." OFF)
|
|
|
|
OPTION(ENABLE_S3_SDK "Enable NCZarr S3 support." ON)
|
|
|
|
|
|
|
|
# See if aws-s3-sdk is available
|
|
|
|
FIND_LIBRARY(AWS_CPP_SDK_CORE_LIB NAMES aws-cpp-sdk-core)
|
|
|
|
FIND_LIBRARY(AWS_CPP_SDK_S3_LIB NAMES aws-cpp-sdk-s3)
|
|
|
|
FIND_LIBRARY(AWS_C_COMMON_LIB NAMES aws-c-common)
|
|
|
|
|
|
|
|
IF(NOT AWS_CPP_SDK_CORE_LIB)
|
|
|
|
SET(AWS_CPP_SDK_CORE_LIB OFF)
|
|
|
|
ENDIF()
|
|
|
|
IF(NOT AWS_CPP_SDK_S3_LIB)
|
|
|
|
SET(AWS_CPP_SDK_S3_LIB OFF)
|
|
|
|
ENDIF()
|
|
|
|
IF(NOT AWS_C_COMMON_LIB)
|
|
|
|
SET(AWS_C_COMMON_LIB OFF)
|
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
IF(ENABLE_S3_SDK)
|
|
|
|
IF(NOT AWS_CPP_SDK_CORE_LIB
|
|
|
|
OR NOT AWS_CPP_SDK_S3_LIB
|
|
|
|
OR NOT AWS_C_COMMON_LIB)
|
|
|
|
MESSAGE(WARNING "One of aws-c-common, aws-cpp-sdk-s3, aws-cpp-sdk-core libraries is not available: Turning off S3 support")
|
|
|
|
SET(ENABLE_S3_SDK OFF)
|
|
|
|
ELSE()
|
|
|
|
MESSAGE(STATUS "Found aws sdk libraries")
|
|
|
|
ENDIF()
|
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
IF(ENABLE_S3_SDK)
|
|
|
|
IF(CMAKE_COMPILER_IS_GNUCC OR APPLE)
|
|
|
|
# SET(LDFLAGS "${LDFLAGS} ${AWS_CPP_SDK_CORE_LIB} ${AWS_CPP_SDK_S3_LIB} ${AWS_C_COMMON_LIB}")
|
|
|
|
# SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -lstdc++")
|
|
|
|
ENDIF(CMAKE_COMPILER_IS_GNUCC OR APPLE)
|
|
|
|
ENDIF(ENABLE_S3_SDK)
|
|
|
|
|
2015-04-30 01:04:26 +08:00
|
|
|
##
|
2012-10-03 04:56:46 +08:00
|
|
|
# Enable Tests
|
2015-04-30 01:04:26 +08:00
|
|
|
##
|
2014-04-22 06:11:48 +08:00
|
|
|
OPTION(ENABLE_TESTS "Enable basic tests, run with 'make test'." ON)
|
2014-03-07 23:40:50 +08:00
|
|
|
IF(ENABLE_TESTS)
|
2014-04-22 01:15:33 +08:00
|
|
|
SET(BUILD_TESTSETS ON CACHE BOOL "")
|
|
|
|
# Options for CTest-based tests, dashboards.
|
|
|
|
SET(NC_CTEST_PROJECT_NAME "netcdf-c" CACHE STRING "Project Name for CTest-based testing purposes.")
|
2019-11-19 01:58:44 +08:00
|
|
|
SET(NC_CTEST_DROP_SITE "cdash.unidata.ucar.edu:443" CACHE STRING "Dashboard location for CTest-based testing purposes.")
|
2014-04-22 01:15:33 +08:00
|
|
|
SET(NC_CTEST_DROP_LOC_PREFIX "" CACHE STRING "Prefix for Dashboard location on remote server when using CTest-based testing.")
|
2019-11-19 03:07:01 +08:00
|
|
|
SET(SUBMIT_URL "https://cdash.unidata.ucar.edu:443")
|
2015-02-03 06:14:22 +08:00
|
|
|
FIND_PROGRAM(HOSTNAME_CMD NAMES hostname)
|
2015-02-05 00:11:27 +08:00
|
|
|
IF(NOT MSVC)
|
|
|
|
SET(HOSTNAME_ARG "-s")
|
2015-02-13 04:16:54 +08:00
|
|
|
ENDIF()
|
2015-02-03 06:14:22 +08:00
|
|
|
IF(HOSTNAME_CMD)
|
2015-02-05 00:11:27 +08:00
|
|
|
EXEC_PROGRAM(${HOSTNAME_CMD} ARGS "${HOSTNAME_ARG}" OUTPUT_VARIABLE HOSTNAME)
|
2015-02-03 06:14:22 +08:00
|
|
|
SET(NC_CTEST_SITE "${HOSTNAME}" CACHE STRING "Hostname of test machine.")
|
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
IF(NC_CTEST_SITE)
|
|
|
|
SET(SITE "${NC_CTEST_SITE}" CACHE STRING "")
|
|
|
|
ENDIF()
|
|
|
|
|
2015-04-30 01:04:26 +08:00
|
|
|
###
|
|
|
|
# This option dictates whether or not to turn on
|
|
|
|
# tests which are known to fail. This is not the
|
|
|
|
# same thing as an 'expected failure'. Rather, these
|
|
|
|
# are tests that will need to be fixed eventually.
|
|
|
|
#
|
2019-09-18 10:27:43 +08:00
|
|
|
# By placing them here, we can occasionally turn this
|
2015-04-30 01:04:26 +08:00
|
|
|
# flag on and see if any known failures have been
|
|
|
|
# fixed in the course of code improvement/other bug
|
|
|
|
# fixes.
|
|
|
|
#
|
|
|
|
# To use this, simply add as a fencepost around tests
|
|
|
|
# which are known to fail.
|
|
|
|
###
|
|
|
|
|
|
|
|
OPTION(ENABLE_FAILING_TESTS "Run tests which are known to fail, check to see if any have been fixed." OFF)
|
|
|
|
|
This PR adds EXPERIMENTAL support for accessing data in the
cloud using a variant of the Zarr protocol and storage
format. This enhancement is generically referred to as "NCZarr".
The data model supported by NCZarr is netcdf-4 minus the user-defined
types and the String type. In this sense it is similar to the CDF-5
data model.
More detailed information about enabling and using NCZarr is
described in the document NUG/nczarr.md and in a
[Unidata Developer's blog entry](https://www.unidata.ucar.edu/blogs/developer/en/entry/overview-of-zarr-support-in).
WARNING: this code has had limited testing, so do use this version
for production work. Also, performance improvements are ongoing.
Note especially the following platform matrix of successful tests:
Platform | Build System | S3 support
------------------------------------
Linux+gcc | Automake | yes
Linux+gcc | CMake | yes
Visual Studio | CMake | no
Additionally, and as a consequence of the addition of NCZarr,
major changes have been made to the Filter API. NOTE: NCZarr
does not yet support filters, but these changes are enablers for
that support in the future. Note that it is possible
(probable?) that there will be some accidental reversions if the
changes here did not correctly mimic the existing filter testing.
In any case, previously filter ids and parameters were of type
unsigned int. In order to support the more general zarr filter
model, this was all converted to char*. The old HDF5-specific,
unsigned int operations are still supported but they are
wrappers around the new, char* based nc_filterx_XXX functions.
This entailed at least the following changes:
1. Added the files libdispatch/dfilterx.c and include/ncfilter.h
2. Some filterx utilities have been moved to libdispatch/daux.c
3. A new entry, "filter_actions" was added to the NCDispatch table
and the version bumped.
4. An overly complex set of structs was created to support funnelling
all of the filterx operations thru a single dispatch
"filter_actions" entry.
5. Move common code to from libhdf5 to libsrc4 so that it is accessible
to nczarr.
Changes directly related to Zarr:
1. Modified CMakeList.txt and configure.ac to support both C and C++
-- this is in support of S3 support via the awd-sdk libraries.
2. Define a size64_t type to support nczarr.
3. More reworking of libdispatch/dinfermodel.c to
support zarr and to regularize the structure of the fragments
section of a URL.
Changes not directly related to Zarr:
1. Make client-side filter registration be conditional, with default off.
2. Hack include/nc4internal.h to make some flags added by Ed be unique:
e.g. NC_CREAT, NC_INDEF, etc.
3. cleanup include/nchttp.h and libdispatch/dhttp.c.
4. Misc. changes to support compiling under Visual Studio including:
* Better testing under windows for dirent.h and opendir and closedir.
5. Misc. changes to the oc2 code to support various libcurl CURLOPT flags
and to centralize error reporting.
6. By default, suppress the vlen tests that have unfixed memory leaks; add option to enable them.
7. Make part of the nc_test/test_byterange.sh test be contingent on remotetest.unidata.ucar.edu being accessible.
Changes Left TO-DO:
1. fix provenance code, it is too HDF5 specific.
2020-06-29 08:02:47 +08:00
|
|
|
# There is a special class of tests that currently fail because of memory leaks.
|
|
|
|
# As with failing tests, they can be enabled to work on them
|
|
|
|
OPTION(ENABLE_UNFIXED_MEMORY_LEAKS "Run tests with unfixed memory leaks" OFF)
|
|
|
|
|
2019-08-22 04:50:09 +08:00
|
|
|
###
|
2019-11-16 22:20:45 +08:00
|
|
|
# Option to turn on unit testing. See
|
|
|
|
# https://github.com/Unidata/netcdf-c/pull/1472 for more
|
|
|
|
# information. Currently (August 21, 2019): Will not work with
|
|
|
|
# Visual Studio. The unit tests are for internal netCDF functions,
|
|
|
|
# so we don't want to make them external, which would be required to
|
|
|
|
# run on Windows.
|
2019-08-22 04:50:09 +08:00
|
|
|
###
|
|
|
|
IF(NOT MSVC)
|
|
|
|
OPTION(ENABLE_UNIT_TESTS "Run Unit Tests." ON)
|
|
|
|
ENDIF(NOT MSVC)
|
2015-04-30 01:04:26 +08:00
|
|
|
###
|
|
|
|
# End known-failures.
|
|
|
|
###
|
|
|
|
MARK_AS_ADVANCED(ENABLE_FAILING_TESTS)
|
2012-12-21 05:50:45 +08:00
|
|
|
ENDIF()
|
2012-10-03 04:56:46 +08:00
|
|
|
|
2016-09-28 01:20:41 +08:00
|
|
|
###
|
|
|
|
# Option to enable extreme numbers during testing.
|
|
|
|
###
|
|
|
|
OPTION(ENABLE_EXTREME_NUMBERS "Enable extreme numbers during testing, such as MAX_INT-1" ON)
|
|
|
|
IF(ENABLE_EXTREME_NUMBERS)
|
|
|
|
SET(USE_EXTREME_NUMBERS ON)
|
|
|
|
ENDIF()
|
|
|
|
|
2012-10-03 04:56:46 +08:00
|
|
|
# Enable Large file tests
|
|
|
|
IF(ENABLE_LARGE_FILE_TESTS)
|
2012-12-21 05:50:45 +08:00
|
|
|
SET(LARGE_FILE_TESTS ON)
|
2012-10-03 04:56:46 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2018-07-11 00:58:52 +08:00
|
|
|
OPTION(ENABLE_METADATA_PERF_TESTS "Enable test of metadata performance." OFF)
|
2018-07-10 23:25:09 +08:00
|
|
|
IF(ENABLE_METADATA_PERF_TESTS)
|
|
|
|
SET(ENABLE_METADATA_PERF ON)
|
|
|
|
ENDIF()
|
|
|
|
|
2012-12-06 02:35:42 +08:00
|
|
|
# Location for large file tests.
|
|
|
|
SET(TEMP_LARGE "." CACHE STRING "Location to store large file tests.")
|
|
|
|
|
2014-04-22 06:11:48 +08:00
|
|
|
OPTION(ENABLE_FSYNC "Enable experimental fsync code." OFF)
|
2012-10-03 04:56:46 +08:00
|
|
|
IF(ENABLE_FSYNC)
|
2014-03-07 23:46:26 +08:00
|
|
|
SET(USE_FSYNC ON)
|
2012-10-03 04:56:46 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2014-06-19 04:33:49 +08:00
|
|
|
# Temporary
|
2014-04-10 05:20:16 +08:00
|
|
|
OPTION (ENABLE_JNA "Enable jna bug fix code." OFF)
|
|
|
|
IF(ENABLE_JNA)
|
|
|
|
SET(JNA ON)
|
|
|
|
ENDIF()
|
|
|
|
|
2012-12-21 05:50:45 +08:00
|
|
|
# Linux specific large file support flags.
|
|
|
|
# Modelled after check in CMakeLists.txt for hdf5.
|
2013-10-17 06:44:11 +08:00
|
|
|
OPTION(ENABLE_LARGE_FILE_SUPPORT "Enable large file support." ON)
|
|
|
|
IF(ENABLE_LARGE_FILE_SUPPORT)
|
|
|
|
IF(MSVC)
|
|
|
|
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /LARGEADDRESSAWARE")
|
|
|
|
SET(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /LARGEADDRESSAWARE")
|
|
|
|
SET(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} /LARGEADDRESSAWARE")
|
|
|
|
ELSE()
|
2017-07-14 07:15:56 +08:00
|
|
|
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_LARGEFILE64_SOURCE -D_FILE_OFFSET_BITS=64")
|
2012-12-21 05:50:45 +08:00
|
|
|
ENDIF()
|
|
|
|
ENDIF()
|
|
|
|
|
2014-04-22 06:11:48 +08:00
|
|
|
OPTION(ENABLE_EXAMPLE_TESTS "Run extra example tests. Requires GNU Sed. Ignored if netCDF-4 is not Enabled" OFF)
|
2012-10-03 04:56:46 +08:00
|
|
|
IF(NOT ENABLE_NETCDF_4 AND ENABLE_EXAMPLE_TESTS)
|
2014-04-22 06:11:48 +08:00
|
|
|
SET(ENABLE_EXAMPLE_TESTS OFF)
|
2012-10-03 04:56:46 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2018-08-16 21:33:04 +08:00
|
|
|
# Enable Parallel IO with netCDF-4/HDF5 files using HDF5 parallel I/O.
|
2013-02-13 06:22:54 +08:00
|
|
|
SET(STATUS_PARALLEL "OFF")
|
2015-11-30 11:38:12 +08:00
|
|
|
OPTION(ENABLE_PARALLEL4 "Build netCDF-4 with parallel IO" "${HDF5_PARALLEL}")
|
2015-10-10 04:13:50 +08:00
|
|
|
IF(ENABLE_PARALLEL4 AND ENABLE_NETCDF_4)
|
2015-11-30 11:38:12 +08:00
|
|
|
IF(NOT HDF5_PARALLEL)
|
2014-03-07 23:46:26 +08:00
|
|
|
SET(USE_PARALLEL OFF CACHE BOOL "")
|
|
|
|
MESSAGE(STATUS "Cannot find HDF5 library built with parallel support. Disabling parallel build.")
|
2014-03-07 23:40:50 +08:00
|
|
|
ELSE()
|
2019-12-19 00:48:40 +08:00
|
|
|
FIND_PACKAGE(MPI REQUIRED)
|
2015-11-30 11:38:12 +08:00
|
|
|
SET(HDF5_PARALLEL ON CACHE BOOL "")
|
2014-04-04 06:57:17 +08:00
|
|
|
SET(USE_PARALLEL ON CACHE BOOL "")
|
2015-10-10 04:13:50 +08:00
|
|
|
SET(USE_PARALLEL4 ON CACHE BOOL "")
|
2014-03-07 23:46:26 +08:00
|
|
|
SET(STATUS_PARALLEL "ON")
|
2018-08-16 21:33:04 +08:00
|
|
|
configure_file("${netCDF_SOURCE_DIR}/nc_test4/run_par_test.sh.in"
|
2018-08-17 22:52:34 +08:00
|
|
|
"${netCDF_BINARY_DIR}/tmp/run_par_test.sh" @ONLY NEWLINE_STYLE LF)
|
|
|
|
FILE(COPY "${netCDF_BINARY_DIR}/tmp/run_par_test.sh"
|
|
|
|
DESTINATION ${netCDF_BINARY_DIR}/nc_test4
|
|
|
|
FILE_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
|
2020-02-28 05:06:45 +08:00
|
|
|
configure_file("${netCDF_SOURCE_DIR}/h5_test/run_par_tests.sh.in"
|
|
|
|
"${netCDF_BINARY_DIR}/tmp/run_par_tests.sh" @ONLY NEWLINE_STYLE LF)
|
|
|
|
FILE(COPY "${netCDF_BINARY_DIR}/tmp/run_par_tests.sh"
|
|
|
|
DESTINATION ${netCDF_BINARY_DIR}/h5_test
|
|
|
|
FILE_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
|
2014-03-07 23:46:26 +08:00
|
|
|
ENDIF()
|
2013-02-13 06:22:54 +08:00
|
|
|
ENDIF()
|
2013-02-07 07:09:19 +08:00
|
|
|
|
2018-09-21 00:45:25 +08:00
|
|
|
# Options to enable parallel IO for classic formats with PnetCDF library.
|
2013-02-13 06:22:54 +08:00
|
|
|
SET(STATUS_PNETCDF "OFF")
|
2018-09-18 00:47:40 +08:00
|
|
|
OPTION(ENABLE_PNETCDF "Build with parallel I/O for CDF-1, 2, and 5 files using PnetCDF." OFF)
|
2015-08-16 06:26:35 +08:00
|
|
|
|
2013-02-13 06:22:54 +08:00
|
|
|
IF(ENABLE_PNETCDF)
|
2014-03-07 23:46:26 +08:00
|
|
|
# Check for ncmpi_create in libpnetcdf, define USE_PNETCDF
|
2018-09-18 00:47:40 +08:00
|
|
|
# Does the user want to turn on PnetCDF read ability?
|
2014-03-07 23:46:26 +08:00
|
|
|
SET(USE_PNETCDF ON CACHE BOOL "")
|
|
|
|
FIND_LIBRARY(PNETCDF NAMES pnetcdf)
|
|
|
|
FIND_PATH(PNETCDF_INCLUDE_DIR pnetcdf.h)
|
|
|
|
IF(NOT PNETCDF)
|
2016-11-13 13:58:09 +08:00
|
|
|
MESSAGE(STATUS "Cannot find PnetCDF library. Disabling PnetCDF support.")
|
2014-03-07 23:46:26 +08:00
|
|
|
SET(USE_PNETCDF OFF CACHE BOOL "")
|
2015-11-10 02:21:04 +08:00
|
|
|
ELSE(NOT PNETCDF)
|
|
|
|
SET(USE_PARALLEL ON CACHE BOOL "")
|
2016-02-09 22:23:09 +08:00
|
|
|
|
2016-11-13 13:58:09 +08:00
|
|
|
# Check PnetCDF version. Must be >= 1.6.0
|
2016-02-09 22:23:09 +08:00
|
|
|
set(pnetcdf_h "${PNETCDF_INCLUDE_DIR}/pnetcdf.h" )
|
2016-11-13 13:58:09 +08:00
|
|
|
message(STATUS "PnetCDF include file ${pnetcdf_h} will be searched for version")
|
2016-02-09 22:23:09 +08:00
|
|
|
file(STRINGS "${pnetcdf_h}" pnetcdf_major_string REGEX "^#define PNETCDF_VERSION_MAJOR")
|
|
|
|
string(REGEX REPLACE "[^0-9]" "" pnetcdf_major "${pnetcdf_major_string}")
|
|
|
|
file(STRINGS "${pnetcdf_h}" pnetcdf_minor_string REGEX "^#define PNETCDF_VERSION_MINOR")
|
|
|
|
string(REGEX REPLACE "[^0-9]" "" pnetcdf_minor "${pnetcdf_minor_string}")
|
|
|
|
file(STRINGS "${pnetcdf_h}" pnetcdf_sub_string REGEX "^#define PNETCDF_VERSION_SUB")
|
|
|
|
string(REGEX REPLACE "[^0-9]" "" pnetcdf_sub "${pnetcdf_sub_string}")
|
|
|
|
set(pnetcdf_version "${pnetcdf_major}.${pnetcdf_minor}.${pnetcdf_sub}")
|
2016-11-13 13:58:09 +08:00
|
|
|
message(STATUS "Found PnetCDF version ${pnetcdf_version}")
|
2016-02-09 22:23:09 +08:00
|
|
|
|
2016-02-10 02:23:33 +08:00
|
|
|
if(${pnetcdf_version} VERSION_GREATER "1.6.0")
|
2016-02-09 22:23:09 +08:00
|
|
|
SET(STATUS_PNETCDF "ON")
|
|
|
|
INCLUDE_DIRECTORIES(${PNETCDF_INCLUDE_DIR})
|
|
|
|
SET(HAVE_LIBPNETCDF ON)
|
2018-09-18 00:47:40 +08:00
|
|
|
# PnetCDF => parallel
|
2016-02-09 22:23:09 +08:00
|
|
|
SET(STATUS_PARALLEL ON)
|
|
|
|
SET(USE_PARALLEL ON)
|
2016-11-13 13:58:09 +08:00
|
|
|
MESSAGE(STATUS "Using PnetCDF Library: ${PNETCDF}")
|
2016-02-09 22:23:09 +08:00
|
|
|
ELSE()
|
2016-11-13 13:58:09 +08:00
|
|
|
MESSAGE(WARNING "ENABLE_PNETCDF requires version 1.6.1 or later; found version ${pnetcdf_version}. PnetCDF is disabled")
|
2016-02-24 02:12:16 +08:00
|
|
|
ENDIF()
|
2015-09-18 04:02:15 +08:00
|
|
|
ENDIF(NOT PNETCDF)
|
2013-02-07 07:09:19 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2018-03-16 22:38:40 +08:00
|
|
|
# Options to enable use of fill values for elements causing NC_ERANGE
|
2018-07-17 03:34:22 +08:00
|
|
|
SET(ENABLE_ERANGE_FILL AUTO CACHE STRING "AUTO")
|
|
|
|
OPTION(ENABLE_ERANGE_FILL "Enable use of fill value when out-of-range type conversion causes NC_ERANGE error." OFF)
|
|
|
|
IF(ENABLE_ERANGE_FILL) # enable or auto
|
|
|
|
STRING(TOUPPER ${ENABLE_ERANGE_FILL} ENABLE_ERANGE_FILL)
|
|
|
|
IF(ENABLE_ERANGE_FILL AND NOT ENABLE_ERANGE_FILL STREQUAL "AUTO")
|
|
|
|
# explicitly enabled
|
|
|
|
SET(ENABLE_ERANGE_FILL ON)
|
|
|
|
ELSE()
|
|
|
|
IF(NOT ENABLE_ERANGE_FILL STREQUAL "AUTO")
|
|
|
|
SET(ENABLE_ERANGE_FILL OFF)
|
|
|
|
ENDIF()
|
|
|
|
ENDIF()
|
|
|
|
ENDIF(ENABLE_ERANGE_FILL)
|
|
|
|
# Now ENABLE_ERANGE_FILL is either AUTO, ON, or OFF
|
2016-11-15 00:41:10 +08:00
|
|
|
|
2019-11-26 21:59:03 +08:00
|
|
|
# More relaxed coordinate check is now mandatory for all builds.
|
2019-11-26 20:40:03 +08:00
|
|
|
SET(ENABLE_ZERO_LENGTH_COORD_BOUND ON)
|
2016-11-15 00:41:10 +08:00
|
|
|
|
|
|
|
# check and conform with PnetCDF settings on ERANGE_FILL and RELAX_COORD_BOUND
|
2016-11-13 13:58:09 +08:00
|
|
|
IF(STATUS_PNETCDF)
|
2016-11-15 00:41:10 +08:00
|
|
|
file(STRINGS "${pnetcdf_h}" enable_erange_fill_pnetcdf REGEX "^#define PNETCDF_ERANGE_FILL")
|
|
|
|
string(REGEX REPLACE "[^0-9]" "" erange_fill_pnetcdf "${enable_erange_fill_pnetcdf}")
|
|
|
|
IF("x${erange_fill_pnetcdf}" STREQUAL "x1")
|
2018-07-17 03:34:22 +08:00
|
|
|
SET(erange_fill_pnetcdf "ON")
|
|
|
|
ELSE()
|
|
|
|
SET(erange_fill_pnetcdf "OFF")
|
|
|
|
ENDIF()
|
|
|
|
IF(ENABLE_ERANGE_FILL STREQUAL "AUTO") # not set on command line
|
|
|
|
SET(ENABLE_ERANGE_FILL "${erange_fill_pnetcdf}")
|
2016-11-15 00:41:10 +08:00
|
|
|
ELSE()
|
2018-07-17 03:34:22 +08:00
|
|
|
# user explicitly set this option on command line
|
|
|
|
IF(NOT ENABLE_ERANGE_FILL STREQUAL "${erange_fill_pnetcdf}")
|
|
|
|
IF(ENABLE_ERANGE_FILL)
|
|
|
|
MESSAGE(FATAL_ERROR "Enabling erange-fill conflicts with PnetCDF setting")
|
|
|
|
ELSE()
|
|
|
|
MESSAGE(FATAL_ERROR "Disabling erange-fill conflicts with PnetCDF setting")
|
|
|
|
ENDIF()
|
2016-11-15 00:41:10 +08:00
|
|
|
ENDIF()
|
|
|
|
ENDIF()
|
2018-07-17 03:34:22 +08:00
|
|
|
|
2016-11-13 13:58:09 +08:00
|
|
|
file(STRINGS "${pnetcdf_h}" relax_coord_bound_pnetcdf REGEX "^#define PNETCDF_RELAX_COORD_BOUND")
|
|
|
|
string(REGEX REPLACE "[^0-9]" "" relax_coord_bound "${relax_coord_bound_pnetcdf}")
|
2016-11-15 00:41:10 +08:00
|
|
|
IF("x${relax_coord_bound}" STREQUAL "x1")
|
2018-07-17 03:34:22 +08:00
|
|
|
SET(relax_coord_bound_pnetcdf "ON")
|
2016-11-13 13:58:09 +08:00
|
|
|
ELSE()
|
2018-07-17 03:34:22 +08:00
|
|
|
SET(relax_coord_bound_pnetcdf "OFF")
|
|
|
|
ENDIF()
|
2019-11-26 21:59:03 +08:00
|
|
|
# pnetcdf must have relaxed coord bounds to build with netCDF-4
|
2019-11-26 20:40:03 +08:00
|
|
|
IF(NOT ENABLE_ZERO_LENGTH_COORD_BOUND STREQUAL "${relax_coord_bound_pnetcdf}")
|
2019-11-26 21:59:03 +08:00
|
|
|
MESSAGE(FATAL_ERROR "Pnetcdf must be built with relax-coord-bound enabled")
|
2016-11-13 13:58:09 +08:00
|
|
|
ENDIF()
|
|
|
|
ENDIF()
|
2016-11-15 00:41:10 +08:00
|
|
|
|
2018-07-17 03:34:22 +08:00
|
|
|
IF(ENABLE_ERANGE_FILL)
|
|
|
|
MESSAGE(STATUS "Enabling use of fill value when NC_ERANGE")
|
2016-11-15 00:41:10 +08:00
|
|
|
SET(M4FLAGS "-DERANGE_FILL" CACHE STRING "")
|
|
|
|
ENDIF()
|
|
|
|
|
2018-08-28 06:57:07 +08:00
|
|
|
IF(ENABLE_ZERO_LENGTH_COORD_BOUND)
|
2018-07-29 05:10:07 +08:00
|
|
|
MESSAGE(STATUS "Enabling a more relaxed check for NC_EINVALCOORDS")
|
2016-11-13 13:58:09 +08:00
|
|
|
ADD_DEFINITIONS(-DRELAX_COORD_BOUND)
|
|
|
|
ENDIF()
|
|
|
|
|
2013-10-01 05:51:34 +08:00
|
|
|
# Enable Parallel Tests.
|
2020-04-02 06:20:33 +08:00
|
|
|
OPTION(ENABLE_PARALLEL_TESTS "Enable Parallel IO Tests. Requires HDF5/NetCDF4 with parallel I/O Support." "${HDF5_PARALLEL}")
|
2013-02-13 06:22:54 +08:00
|
|
|
IF(ENABLE_PARALLEL_TESTS AND USE_PARALLEL)
|
2014-03-07 23:46:26 +08:00
|
|
|
SET(TEST_PARALLEL ON CACHE BOOL "")
|
2016-06-10 03:21:28 +08:00
|
|
|
IF(USE_NETCDF4)
|
|
|
|
SET(TEST_PARALLEL4 ON CACHE BOOL "")
|
|
|
|
ENDIF()
|
2018-02-27 08:16:15 +08:00
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
IF (ENABLE_PARALLEL_TESTS AND NOT USE_PARALLEL)
|
2018-02-27 08:08:26 +08:00
|
|
|
MESSAGE(FATAL_ERROR "Parallel tests requested, but no parallel HDF5 installation detected.")
|
2013-02-13 06:22:54 +08:00
|
|
|
ENDIF()
|
2012-10-03 04:56:46 +08:00
|
|
|
|
2017-11-24 01:55:24 +08:00
|
|
|
# Enable special filter test; experimental when using cmake.
|
2019-01-09 07:37:33 +08:00
|
|
|
OPTION(ENABLE_FILTER_TESTING "Enable filter testing. Ignored if shared libraries or netCDF4 are not enabled" ${ENABLE_NETCDF_4})
|
2018-12-08 05:44:47 +08:00
|
|
|
IF(ENABLE_FILTER_TESTING AND NOT ENABLE_NETCDF_4)
|
2018-01-17 02:00:09 +08:00
|
|
|
MESSAGE(WARNING "ENABLE_FILTER_TESTING requires netCDF-4. Disabling.")
|
Provide byte-range reading of remote datasets
re: issue https://github.com/Unidata/netcdf-c/issues/1251
Assume that you have the URL to a remote dataset
which is a normal netcdf-3 or netcdf-4 file.
This PR allows the netcdf-c to read that dataset's
contents as a netcdf file using HTTP byte ranges
if the remote server supports byte-range access.
Originally, this PR was set up to access Amazon S3 objects,
but it can also access other remote datasets such as those
provided by a Thredds server via the HTTPServer access protocol.
It may also work for other kinds of servers.
Note that this is not intended as a true production
capability because, as is known, this kind of access to
can be quite slow. In addition, the byte-range IO drivers
do not currently do any sort of optimization or caching.
An additional goal here is to gain some experience with
the Amazon S3 REST protocol.
This architecture and its use documented in
the file docs/byterange.dox.
There are currently two test cases:
1. nc_test/tst_s3raw.c - this does a simple open, check format, close cycle
for a remote netcdf-3 file and a remote netcdf-4 file.
2. nc_test/test_s3raw.sh - this uses ncdump to investigate some remote
datasets.
This PR also incorporates significantly changed model inference code
(see the superceded PR https://github.com/Unidata/netcdf-c/pull/1259).
1. It centralizes the code that infers the dispatcher.
2. It adds support for byte-range URLs
Other changes:
1. NC_HDF5_finalize was not being properly called by nc_finalize().
2. Fix minor bug in ncgen3.l
3. fix memory leak in nc4info.c
4. add code to walk the .daprc triples and to replace protocol=
fragment tag with a more general mode= tag.
Final Note:
Th inference code is still way too complicated. We need to move
to the validfile() model used by netcdf Java, where each
dispatcher is asked if it can process the file. This decentralizes
the inference code. This will be done after all the major new
dispatchers (PIO, Zarr, etc) have been implemented.
2019-01-02 09:27:36 +08:00
|
|
|
SET(ENABLE_FILTER_TESTING OFF)
|
2018-01-17 02:00:09 +08:00
|
|
|
ENDIF()
|
|
|
|
IF(NOT BUILD_SHARED_LIBS)
|
|
|
|
MESSAGE(WARNING "ENABLE_FILTER_TESTING requires shared libraries. Disabling.")
|
Provide byte-range reading of remote datasets
re: issue https://github.com/Unidata/netcdf-c/issues/1251
Assume that you have the URL to a remote dataset
which is a normal netcdf-3 or netcdf-4 file.
This PR allows the netcdf-c to read that dataset's
contents as a netcdf file using HTTP byte ranges
if the remote server supports byte-range access.
Originally, this PR was set up to access Amazon S3 objects,
but it can also access other remote datasets such as those
provided by a Thredds server via the HTTPServer access protocol.
It may also work for other kinds of servers.
Note that this is not intended as a true production
capability because, as is known, this kind of access to
can be quite slow. In addition, the byte-range IO drivers
do not currently do any sort of optimization or caching.
An additional goal here is to gain some experience with
the Amazon S3 REST protocol.
This architecture and its use documented in
the file docs/byterange.dox.
There are currently two test cases:
1. nc_test/tst_s3raw.c - this does a simple open, check format, close cycle
for a remote netcdf-3 file and a remote netcdf-4 file.
2. nc_test/test_s3raw.sh - this uses ncdump to investigate some remote
datasets.
This PR also incorporates significantly changed model inference code
(see the superceded PR https://github.com/Unidata/netcdf-c/pull/1259).
1. It centralizes the code that infers the dispatcher.
2. It adds support for byte-range URLs
Other changes:
1. NC_HDF5_finalize was not being properly called by nc_finalize().
2. Fix minor bug in ncgen3.l
3. fix memory leak in nc4info.c
4. add code to walk the .daprc triples and to replace protocol=
fragment tag with a more general mode= tag.
Final Note:
Th inference code is still way too complicated. We need to move
to the validfile() model used by netcdf Java, where each
dispatcher is asked if it can process the file. This decentralizes
the inference code. This will be done after all the major new
dispatchers (PIO, Zarr, etc) have been implemented.
2019-01-02 09:27:36 +08:00
|
|
|
SET(ENABLE_FILTER_TESTING OFF)
|
2017-11-24 01:55:24 +08:00
|
|
|
ENDIF()
|
2020-05-11 23:42:31 +08:00
|
|
|
OPTION(ENABLE_CLIENTSIDE_FILTERS "Enable client-side filter registration." OFF)
|
|
|
|
IF(NOT ENABLE_FILTER_TESTING)
|
|
|
|
SET(ENABLE_CLIENTSIDE_FILTERS OFF)
|
|
|
|
ENDIF()
|
|
|
|
|
2017-11-24 01:55:24 +08:00
|
|
|
|
2013-10-01 05:51:34 +08:00
|
|
|
# Determine whether or not to generate documentation.
|
2014-06-19 03:46:22 +08:00
|
|
|
OPTION(ENABLE_DOXYGEN "Enable generation of doxygen-based documentation." OFF)
|
2012-10-03 04:56:46 +08:00
|
|
|
IF(ENABLE_DOXYGEN)
|
2014-07-16 16:25:27 +08:00
|
|
|
FIND_PACKAGE(Doxygen REQUIRED)
|
|
|
|
# Offer the option to build internal documentation.
|
|
|
|
OPTION(ENABLE_INTERNAL_DOCS "Build internal documentation. This is of interest to developers only." OFF)
|
|
|
|
IF(ENABLE_INTERNAL_DOCS)
|
|
|
|
SET(BUILD_INTERNAL_DOCS YES CACHE STRING "")
|
|
|
|
ELSE()
|
|
|
|
SET(BUILD_INTERNAL_DOCS NO CACHE STRING "")
|
|
|
|
ENDIF()
|
2014-06-19 04:33:49 +08:00
|
|
|
|
2015-08-18 00:59:32 +08:00
|
|
|
###
|
|
|
|
#
|
|
|
|
# If we are building release documentation, we need to set some
|
|
|
|
# variables that will be used in the Doxygen.in template.
|
|
|
|
###
|
|
|
|
OPTION(ENABLE_DOXYGEN_BUILD_RELEASE_DOCS "Build release documentation. This is of interest only to the netCDF developers." OFF)
|
|
|
|
IF(ENABLE_DOXYGEN_BUILD_RELEASE_DOCS)
|
|
|
|
SET(DOXYGEN_CSS_FILE "${CMAKE_SOURCE_DIR}/docs/release.css" CACHE STRING "")
|
|
|
|
SET(DOXYGEN_HEADER_FILE "${CMAKE_SOURCE_DIR}/docs/release_header.html" CACHE STRING "")
|
|
|
|
SET(DOXYGEN_SEARCHENGINE "NO" CACHE STRING "")
|
2015-11-03 03:46:09 +08:00
|
|
|
SET(ENABLE_DOXYGEN_SERVER_BASED_SEARCH NO CACHE STRING "")
|
2014-09-19 05:05:08 +08:00
|
|
|
ELSE()
|
2015-08-18 00:59:32 +08:00
|
|
|
SET(DOXYGEN_CSS_FILE "" CACHE STRING "")
|
|
|
|
SET(DOXYGEN_HEADER_FILE "" CACHE STRING "")
|
|
|
|
SET(DOXYGEN_SEARCHENGINE "YES" CACHE STRING "")
|
2015-11-03 03:46:09 +08:00
|
|
|
|
|
|
|
# If not using release document configuration,
|
|
|
|
# provide an option for server-based search.
|
|
|
|
OPTION(ENABLE_DOXYGEN_SERVER_SIDE_SEARCH "Configure Doxygen with server-based search." OFF)
|
|
|
|
IF(ENABLE_DOXYGEN_SERVER_SIDE_SEARCH)
|
2015-11-03 03:58:29 +08:00
|
|
|
SET(DOXYGEN_SERVER_BASED_SEARCH "YES" CACHE STRING "")
|
2015-11-03 03:46:09 +08:00
|
|
|
ELSE()
|
2015-11-03 03:58:29 +08:00
|
|
|
SET(DOXYGEN_SERVER_BASED_SEARCH "NO" CACHE STRING "")
|
2015-11-03 03:46:09 +08:00
|
|
|
ENDIF(ENABLE_DOXYGEN_SERVER_SIDE_SEARCH)
|
|
|
|
|
|
|
|
|
2015-08-18 00:59:32 +08:00
|
|
|
ENDIF(ENABLE_DOXYGEN_BUILD_RELEASE_DOCS)
|
2014-07-16 16:25:27 +08:00
|
|
|
# Option to turn on the TODO list in the doxygen-generated documentation.
|
2014-09-04 02:52:10 +08:00
|
|
|
OPTION(DOXYGEN_ENABLE_TASKS "Turn on test, todo, bug lists in documentation. This is of interest to developers only." OFF)
|
|
|
|
IF(DOXYGEN_ENABLE_TASKS)
|
2014-07-16 16:25:27 +08:00
|
|
|
SET(SHOW_DOXYGEN_TAG_LIST YES CACHE STRING "")
|
2014-09-04 02:52:10 +08:00
|
|
|
ELSE(DOXYGEN_ENABLE_TASKS)
|
2014-07-16 16:25:27 +08:00
|
|
|
SET(SHOW_DOXYGEN_TODO_LIST NO CACHE STRING "")
|
2014-09-04 02:52:10 +08:00
|
|
|
ENDIF(DOXYGEN_ENABLE_TASKS)
|
2014-02-28 05:19:28 +08:00
|
|
|
|
2014-07-16 16:25:27 +08:00
|
|
|
OPTION(ENABLE_DOXYGEN_PDF_OUTPUT "[EXPERIMENTAL] Turn on PDF output for Doxygen-generated documentation." OFF)
|
2014-06-19 04:33:49 +08:00
|
|
|
|
2014-07-16 16:25:27 +08:00
|
|
|
IF(ENABLE_DOXYGEN_PDF_OUTPUT)
|
|
|
|
SET(NC_ENABLE_DOXYGEN_PDF_OUTPUT "YES" CACHE STRING "")
|
|
|
|
ELSE()
|
|
|
|
SET(NC_ENABLE_DOXYGEN_PDF_OUTPUT "NO" CACHE STRING "")
|
|
|
|
ENDIF()
|
2014-06-19 04:33:49 +08:00
|
|
|
|
2014-08-13 06:34:11 +08:00
|
|
|
FIND_PROGRAM(NC_DOT NAMES dot)
|
2014-07-16 16:25:27 +08:00
|
|
|
# Specify whether or not 'dot' was found on the system path.
|
|
|
|
IF(NC_DOT)
|
|
|
|
SET(HAVE_DOT YES CACHE STRING "")
|
|
|
|
ELSE(NC_DOT)
|
|
|
|
SET(HAVE_DOT NO CACHE STRING "")
|
|
|
|
ENDIF(NC_DOT)
|
2012-10-03 04:56:46 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2018-05-12 05:30:19 +08:00
|
|
|
# Always enable DISKLESS
|
|
|
|
OPTION(ENABLE_DISKLESS "Enable in-memory files" ON)
|
|
|
|
|
|
|
|
|
2013-10-10 06:02:13 +08:00
|
|
|
# By default, MSVC has a stack size of 1000000.
|
|
|
|
# Allow a user to override this.
|
|
|
|
IF(MSVC)
|
2013-11-01 01:10:20 +08:00
|
|
|
SET(NC_MSVC_STACK_SIZE 40000000 CACHE STRING "Default stack size for MSVC-based projects.")
|
2013-10-10 06:05:50 +08:00
|
|
|
# By default, CMake sets the stack to 1000000.
|
|
|
|
# Remove this limitation.
|
|
|
|
# See here for more details:
|
|
|
|
# http://www.cmake.org/pipermail/cmake/2009-April/028710.html
|
2013-10-10 06:02:13 +08:00
|
|
|
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /STACK:${NC_MSVC_STACK_SIZE}")
|
|
|
|
SET(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /STACK:${NC_MSVC_STACK_SIZE}")
|
|
|
|
SET(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} /STACK:${NC_MSVC_STACK_SIZE}")
|
|
|
|
ENDIF()
|
|
|
|
|
2012-10-03 04:56:46 +08:00
|
|
|
# Set some of the options as advanced.
|
2015-11-03 03:58:29 +08:00
|
|
|
MARK_AS_ADVANCED(ENABLE_INTERNAL_DOCS VALGRIND_TESTS ENABLE_COVERAGE_TESTS )
|
2012-11-20 05:43:12 +08:00
|
|
|
MARK_AS_ADVANCED(ENABLE_DAP_REMOTE_TESTS ENABLE_DAP_LONG_TESTS USE_REMOTE_CDASH)
|
2015-11-03 03:58:29 +08:00
|
|
|
MARK_AS_ADVANCED(ENABLE_DOXYGEN_BUILD_RELEASE_DOCS DOXYGEN_ENABLE_TASKS ENABLE_DOXYGEN_SERVER_SIDE_SEARCH)
|
2016-03-08 05:04:52 +08:00
|
|
|
MARK_AS_ADVANCED(ENABLE_SHARED_LIBRARY_VERSION)
|
2016-05-04 11:17:06 +08:00
|
|
|
|
2013-10-01 05:51:34 +08:00
|
|
|
################################
|
|
|
|
# Option checks
|
|
|
|
################################
|
|
|
|
|
2012-10-03 04:56:46 +08:00
|
|
|
# Library include checks
|
|
|
|
CHECK_INCLUDE_FILE("math.h" HAVE_MATH_H)
|
2014-03-07 23:46:26 +08:00
|
|
|
CHECK_INCLUDE_FILE("unistd.h" HAVE_UNISTD_H)
|
2013-01-16 06:43:09 +08:00
|
|
|
# Solve a compatibility issue in ncgen/, which checks
|
|
|
|
# for NO_UNISTD_H
|
|
|
|
IF(NOT HAVE_UNISTD_H)
|
2014-03-07 23:46:26 +08:00
|
|
|
SET(YY_NO_UNISTD_H TRUE)
|
2013-01-16 06:43:09 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2014-03-07 23:46:26 +08:00
|
|
|
CHECK_INCLUDE_FILE("alloca.h" HAVE_ALLOCA_H)
|
2013-03-28 03:15:00 +08:00
|
|
|
CHECK_INCLUDE_FILE("malloc.h" HAVE_MALLOC_H)
|
2014-03-07 23:46:26 +08:00
|
|
|
CHECK_INCLUDE_FILE("fcntl.h" HAVE_FCNTL_H)
|
|
|
|
CHECK_INCLUDE_FILE("getopt.h" HAVE_GETOPT_H)
|
2017-01-28 08:41:03 +08:00
|
|
|
CHECK_INCLUDE_FILE("locale.h" HAVE_LOCALE_H)
|
2014-03-07 23:46:26 +08:00
|
|
|
CHECK_INCLUDE_FILE("stdint.h" HAVE_STDINT_H)
|
|
|
|
CHECK_INCLUDE_FILE("stdio.h" HAVE_STDIO_H)
|
2017-03-17 04:34:33 +08:00
|
|
|
IF(MSVC)
|
|
|
|
CHECK_INCLUDE_FILE("io.h" HAVE_IO_H)
|
|
|
|
ENDIF(MSVC)
|
2014-03-07 23:46:26 +08:00
|
|
|
CHECK_INCLUDE_FILE("stdlib.h" HAVE_STDLIB_H)
|
2012-12-21 05:50:45 +08:00
|
|
|
CHECK_INCLUDE_FILE("stdarg.h" HAVE_STDARG_H)
|
2014-03-07 23:46:26 +08:00
|
|
|
CHECK_INCLUDE_FILE("strings.h" HAVE_STRINGS_H)
|
|
|
|
CHECK_INCLUDE_FILE("signal.h" HAVE_SIGNAL_H)
|
2012-10-03 04:56:46 +08:00
|
|
|
CHECK_INCLUDE_FILE("sys/param.h" HAVE_SYS_PARAM_H)
|
2014-03-07 23:46:26 +08:00
|
|
|
CHECK_INCLUDE_FILE("sys/stat.h" HAVE_SYS_STAT_H)
|
|
|
|
CHECK_INCLUDE_FILE("sys/time.h" HAVE_SYS_TIME_H)
|
2012-10-03 04:56:46 +08:00
|
|
|
CHECK_INCLUDE_FILE("sys/types.h" HAVE_SYS_TYPES_H)
|
2017-01-28 08:41:03 +08:00
|
|
|
CHECK_INCLUDE_FILE("sys/mman.h" HAVE_SYS_MMAN_H)
|
2012-10-03 04:56:46 +08:00
|
|
|
CHECK_INCLUDE_FILE("sys/resource.h" HAVE_SYS_RESOURCE_H)
|
2014-03-07 23:46:26 +08:00
|
|
|
CHECK_INCLUDE_FILE("fcntl.h" HAVE_FCNTL_H)
|
2012-10-03 04:56:46 +08:00
|
|
|
CHECK_INCLUDE_FILE("inttypes.h" HAVE_INTTYPES_H)
|
2014-03-07 23:46:26 +08:00
|
|
|
CHECK_INCLUDE_FILE("pstdint.h" HAVE_PSTDINT_H)
|
|
|
|
CHECK_INCLUDE_FILE("endian.h" HAVE_ENDIAN_H)
|
2013-04-03 06:09:31 +08:00
|
|
|
CHECK_INCLUDE_FILE("BaseTsd.h" HAVE_BASETSD_H)
|
2014-02-08 00:51:47 +08:00
|
|
|
CHECK_INCLUDE_FILE("stddef.h" HAVE_STDDEF_H)
|
2017-01-28 08:41:03 +08:00
|
|
|
CHECK_INCLUDE_FILE("string.h" HAVE_STRING_H)
|
2017-07-27 03:40:03 +08:00
|
|
|
CHECK_INCLUDE_FILE("winsock2.h" HAVE_WINSOCK2_H)
|
2017-07-06 00:03:48 +08:00
|
|
|
CHECK_INCLUDE_FILE("ftw.h" HAVE_FTW_H)
|
2018-02-03 11:27:06 +08:00
|
|
|
CHECK_INCLUDE_FILE("libgen.h" HAVE_LIBGEN_H)
|
This PR adds EXPERIMENTAL support for accessing data in the
cloud using a variant of the Zarr protocol and storage
format. This enhancement is generically referred to as "NCZarr".
The data model supported by NCZarr is netcdf-4 minus the user-defined
types and the String type. In this sense it is similar to the CDF-5
data model.
More detailed information about enabling and using NCZarr is
described in the document NUG/nczarr.md and in a
[Unidata Developer's blog entry](https://www.unidata.ucar.edu/blogs/developer/en/entry/overview-of-zarr-support-in).
WARNING: this code has had limited testing, so do use this version
for production work. Also, performance improvements are ongoing.
Note especially the following platform matrix of successful tests:
Platform | Build System | S3 support
------------------------------------
Linux+gcc | Automake | yes
Linux+gcc | CMake | yes
Visual Studio | CMake | no
Additionally, and as a consequence of the addition of NCZarr,
major changes have been made to the Filter API. NOTE: NCZarr
does not yet support filters, but these changes are enablers for
that support in the future. Note that it is possible
(probable?) that there will be some accidental reversions if the
changes here did not correctly mimic the existing filter testing.
In any case, previously filter ids and parameters were of type
unsigned int. In order to support the more general zarr filter
model, this was all converted to char*. The old HDF5-specific,
unsigned int operations are still supported but they are
wrappers around the new, char* based nc_filterx_XXX functions.
This entailed at least the following changes:
1. Added the files libdispatch/dfilterx.c and include/ncfilter.h
2. Some filterx utilities have been moved to libdispatch/daux.c
3. A new entry, "filter_actions" was added to the NCDispatch table
and the version bumped.
4. An overly complex set of structs was created to support funnelling
all of the filterx operations thru a single dispatch
"filter_actions" entry.
5. Move common code to from libhdf5 to libsrc4 so that it is accessible
to nczarr.
Changes directly related to Zarr:
1. Modified CMakeList.txt and configure.ac to support both C and C++
-- this is in support of S3 support via the awd-sdk libraries.
2. Define a size64_t type to support nczarr.
3. More reworking of libdispatch/dinfermodel.c to
support zarr and to regularize the structure of the fragments
section of a URL.
Changes not directly related to Zarr:
1. Make client-side filter registration be conditional, with default off.
2. Hack include/nc4internal.h to make some flags added by Ed be unique:
e.g. NC_CREAT, NC_INDEF, etc.
3. cleanup include/nchttp.h and libdispatch/dhttp.c.
4. Misc. changes to support compiling under Visual Studio including:
* Better testing under windows for dirent.h and opendir and closedir.
5. Misc. changes to the oc2 code to support various libcurl CURLOPT flags
and to centralize error reporting.
6. By default, suppress the vlen tests that have unfixed memory leaks; add option to enable them.
7. Make part of the nc_test/test_byterange.sh test be contingent on remotetest.unidata.ucar.edu being accessible.
Changes Left TO-DO:
1. fix provenance code, it is too HDF5 specific.
2020-06-29 08:02:47 +08:00
|
|
|
CHECK_INCLUDE_FILE("execinfo.h" HAVE_EXECINFO_H)
|
|
|
|
CHECK_INCLUDE_FILE("dirent.h" HAVE_DIRENT_H)
|
2017-01-28 08:41:03 +08:00
|
|
|
|
|
|
|
# Symbol Exists
|
|
|
|
CHECK_SYMBOL_EXISTS(isfinite "math.h" HAVE_DECL_ISFINITE)
|
|
|
|
CHECK_SYMBOL_EXISTS(isnan "math.h" HAVE_DECL_ISNAN)
|
|
|
|
CHECK_SYMBOL_EXISTS(isinf "math.h" HAVE_DECL_ISINF)
|
|
|
|
CHECK_SYMBOL_EXISTS(st_blksize "sys/stat.h" HAVE_STRUCT_STAT_ST_BLKSIZE)
|
|
|
|
CHECK_SYMBOL_EXISTS(alloca "alloca.h" HAVE_ALLOCA)
|
2017-01-31 05:54:00 +08:00
|
|
|
CHECK_SYMBOL_EXISTS(snprintf "stdio.h" HAVE_SNPRINTF)
|
2012-10-03 04:56:46 +08:00
|
|
|
|
|
|
|
# Type checks
|
2017-04-04 11:39:44 +08:00
|
|
|
# Aliases for automake consistency
|
|
|
|
SET(SIZEOF_VOIDSTAR ${CMAKE_SIZEOF_VOID_P})
|
2017-03-09 08:01:10 +08:00
|
|
|
SET(SIZEOF_VOIDP ${SIZEOF_VOIDSTAR})
|
2014-03-29 07:11:26 +08:00
|
|
|
CHECK_TYPE_SIZE("char" SIZEOF_CHAR)
|
|
|
|
CHECK_TYPE_SIZE("double" SIZEOF_DOUBLE)
|
|
|
|
CHECK_TYPE_SIZE("float" SIZEOF_FLOAT)
|
|
|
|
CHECK_TYPE_SIZE("int" SIZEOF_INT)
|
2017-01-28 08:41:03 +08:00
|
|
|
CHECK_TYPE_SIZE("uint" SIZEOF_UINT)
|
|
|
|
IF(SIZEOF_UINT)
|
|
|
|
SET(HAVE_UINT TRUE)
|
|
|
|
ENDIF(SIZEOF_UINT)
|
|
|
|
|
2017-12-21 10:53:30 +08:00
|
|
|
CHECK_TYPE_SIZE("schar" SIZEOF_SCHAR)
|
|
|
|
IF(SIZEOF_SCHAR)
|
|
|
|
SET(HAVE_SCHAR TRUE)
|
|
|
|
ENDIF(SIZEOF_SCHAR)
|
|
|
|
|
2014-03-29 07:11:26 +08:00
|
|
|
CHECK_TYPE_SIZE("long" SIZEOF_LONG)
|
|
|
|
CHECK_TYPE_SIZE("long long" SIZEOF_LONG_LONG)
|
2017-01-28 08:41:03 +08:00
|
|
|
IF(SIZEOF_LONG_LONG)
|
|
|
|
SET(HAVE_LONG_LONG_INT TRUE)
|
|
|
|
ENDIF(SIZEOF_LONG_LONG)
|
|
|
|
|
|
|
|
CHECK_TYPE_SIZE("unsigned long long" SIZEOF_UNSIGNED_LONG_LONG)
|
|
|
|
|
2014-03-29 07:11:26 +08:00
|
|
|
CHECK_TYPE_SIZE("off_t" SIZEOF_OFF_T)
|
|
|
|
CHECK_TYPE_SIZE("off64_t" SIZEOF_OFF64_T)
|
|
|
|
CHECK_TYPE_SIZE("short" SIZEOF_SHORT)
|
2017-01-28 08:41:03 +08:00
|
|
|
CHECK_TYPE_SIZE("ushort" SIZEOF_USHORT)
|
|
|
|
IF(SIZEOF_USHORT)
|
|
|
|
SET(HAVE_USHORT TRUE)
|
|
|
|
ENDIF(SIZEOF_USHORT)
|
|
|
|
|
|
|
|
CHECK_TYPE_SIZE("_Bool" SIZEOF__BOOL)
|
|
|
|
|
2014-03-29 07:11:26 +08:00
|
|
|
CHECK_TYPE_SIZE("size_t" SIZEOF_SIZE_T)
|
2017-01-28 08:41:03 +08:00
|
|
|
|
2018-07-02 08:42:03 +08:00
|
|
|
# Check whether to turn on or off CDF5 support.
|
|
|
|
SET(ENABLE_CDF5 AUTO CACHE STRING "AUTO")
|
|
|
|
OPTION(ENABLE_CDF5 "Enable CDF5 support" ON)
|
|
|
|
IF(SIZEOF_SIZE_T EQUAL 4)
|
|
|
|
IF(ENABLE_CDF5) # enable or auto
|
|
|
|
STRING(TOUPPER ${ENABLE_CDF5} ENABLE_CDF5)
|
|
|
|
IF(ENABLE_CDF5 AND NOT ENABLE_CDF5 STREQUAL "AUTO") # explicitly enabled
|
|
|
|
MESSAGE(FATAL_ERROR "Unable to support CDF5 feature because size_t is less than 8 bytes")
|
|
|
|
ENDIF(ENABLE_CDF5 AND NOT ENABLE_CDF5 STREQUAL "AUTO")
|
|
|
|
SET(ENABLE_CDF5 OFF) # cannot support CDF5
|
|
|
|
SET(USE_CDF5 OFF CACHE BOOL "") # cannot support CDF5
|
|
|
|
ENDIF(ENABLE_CDF5)
|
|
|
|
ELSE(SIZEOF_SIZE_T EQUAL 4)
|
|
|
|
IF(ENABLE_CDF5) # explicitly set by user or not set
|
|
|
|
SET(USE_CDF5 ON CACHE BOOL "")
|
|
|
|
ELSE(ENABLE_CDF5) # explicitly disabled by user
|
|
|
|
SET(USE_CDF5 OFF CACHE BOOL "")
|
|
|
|
ENDIF(ENABLE_CDF5)
|
|
|
|
ENDIF(SIZEOF_SIZE_T EQUAL 4)
|
|
|
|
|
2018-03-17 01:46:18 +08:00
|
|
|
CHECK_TYPE_SIZE("ssize_t" SIZEOF_SSIZE_T)
|
|
|
|
IF(SIZEOF_SSIZE_T)
|
|
|
|
SET(HAVE_SSIZE_T TRUE)
|
|
|
|
ENDIF(SIZEOF_SSIZE_T)
|
|
|
|
CHECK_TYPE_SIZE("ptrdiff_t" SIZEOF_PTRDIFF_T)
|
|
|
|
IF(SIZEOF_PTRDIFF_T)
|
|
|
|
SET(HAVE_PTRDIFF_T TRUE)
|
|
|
|
ENDIF(SIZEOF_PTRDIFF_T)
|
|
|
|
CHECK_TYPE_SIZE("uintptr_t" SIZEOF_UINTPTR_T)
|
|
|
|
IF(SIZEOF_UINTPTR_T)
|
|
|
|
SET(HAVE_UINTPTR_T TRUE)
|
|
|
|
ENDIF(SIZEOF_UINTPTR_T)
|
2017-01-28 08:41:03 +08:00
|
|
|
|
2013-08-31 05:16:17 +08:00
|
|
|
# __int64 is used on Windows for large file support.
|
2014-03-29 07:11:26 +08:00
|
|
|
CHECK_TYPE_SIZE("__int64" SIZEOF___INT_64)
|
|
|
|
CHECK_TYPE_SIZE("int64_t" SIZEOF_INT64_T)
|
|
|
|
CHECK_TYPE_SIZE("uint64_t" SIZEOF_UINT64_T)
|
2016-10-29 07:16:49 +08:00
|
|
|
CHECK_TYPE_SIZE("unsigned char" SIZEOF_UCHAR)
|
2017-03-14 05:12:47 +08:00
|
|
|
CHECK_TYPE_SIZE("unsigned short int" SIZEOF_UNSIGNED_SHORT_INT)
|
|
|
|
CHECK_TYPE_SIZE("unsigned int" SIZEOF_UNSIGNED_INT)
|
2016-10-29 07:16:49 +08:00
|
|
|
CHECK_TYPE_SIZE("long long" SIZEOF_LONGLONG)
|
|
|
|
CHECK_TYPE_SIZE("unsigned long long" SIZEOF_ULONGLONG)
|
2012-10-03 04:56:46 +08:00
|
|
|
|
2013-08-31 05:25:28 +08:00
|
|
|
# On windows systems, we redefine off_t as __int64
|
|
|
|
# to enable LFS. This is true on 32 and 64 bit system.s
|
|
|
|
# We must redefine SIZEOF_OFF_T to match.
|
2013-08-31 05:16:17 +08:00
|
|
|
IF(MSVC AND SIZEOF___INT_64)
|
2013-08-31 05:25:28 +08:00
|
|
|
SET(SIZEOF_OFF_T ${SIZEOF___INT_64})
|
2013-08-31 05:16:17 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2014-03-07 23:40:50 +08:00
|
|
|
# Check for various functions.
|
2014-03-07 23:46:26 +08:00
|
|
|
CHECK_FUNCTION_EXISTS(fsync HAVE_FSYNC)
|
|
|
|
CHECK_FUNCTION_EXISTS(strlcat HAVE_STRLCAT)
|
|
|
|
CHECK_FUNCTION_EXISTS(strdup HAVE_STRDUP)
|
2014-08-08 07:03:27 +08:00
|
|
|
CHECK_FUNCTION_EXISTS(strndup HAVE_STRNDUP)
|
2014-03-07 23:46:26 +08:00
|
|
|
CHECK_FUNCTION_EXISTS(strtoll HAVE_STRTOLL)
|
2017-01-28 08:41:03 +08:00
|
|
|
CHECK_FUNCTION_EXISTS(strtoull HAVE_STRTOULL)
|
2014-03-07 23:46:26 +08:00
|
|
|
CHECK_FUNCTION_EXISTS(mkstemp HAVE_MKSTEMP)
|
2017-09-03 08:09:36 +08:00
|
|
|
CHECK_FUNCTION_EXISTS(mktemp HAVE_MKTEMP)
|
2016-04-12 06:07:27 +08:00
|
|
|
CHECK_FUNCTION_EXISTS(random HAVE_RANDOM)
|
2014-03-07 23:46:26 +08:00
|
|
|
CHECK_FUNCTION_EXISTS(gettimeofday HAVE_GETTIMEOFDAY)
|
2017-01-28 08:41:03 +08:00
|
|
|
CHECK_FUNCTION_EXISTS(MPI_Comm_f2c HAVE_MPI_COMM_F2C)
|
2018-09-22 06:32:36 +08:00
|
|
|
CHECK_FUNCTION_EXISTS(MPI_Info_f2c HAVE_MPI_INFO_F2C)
|
2014-03-07 23:46:26 +08:00
|
|
|
CHECK_FUNCTION_EXISTS(memmove HAVE_MEMMOVE)
|
|
|
|
CHECK_FUNCTION_EXISTS(getpagesize HAVE_GETPAGESIZE)
|
|
|
|
CHECK_FUNCTION_EXISTS(sysconf HAVE_SYSCONF)
|
|
|
|
CHECK_FUNCTION_EXISTS(getrlimit HAVE_GETRLIMIT)
|
2013-11-05 03:58:33 +08:00
|
|
|
CHECK_FUNCTION_EXISTS(_filelengthi64 HAVE_FILE_LENGTH_I64)
|
2018-10-16 21:44:20 +08:00
|
|
|
CHECK_FUNCTION_EXISTS(mmap HAVE_MMAP)
|
2017-01-28 08:41:03 +08:00
|
|
|
CHECK_FUNCTION_EXISTS(mremap HAVE_MREMAP)
|
2019-03-23 05:16:47 +08:00
|
|
|
CHECK_FUNCTION_EXISTS(fileno HAVE_FILENO)
|
2017-01-28 08:41:03 +08:00
|
|
|
|
2018-12-06 10:20:43 +08:00
|
|
|
# Check to see if MAP_ANONYMOUS is defined.
|
2019-04-20 10:32:26 +08:00
|
|
|
IF(MSVC)
|
|
|
|
MESSAGE(WARNING "mmap not supported under visual studio: disabling MMAP support.")
|
|
|
|
SET(ENABLE_MMAP OFF)
|
|
|
|
ELSE()
|
2018-12-06 10:20:43 +08:00
|
|
|
CHECK_C_SOURCE_COMPILES("
|
2019-04-20 10:32:26 +08:00
|
|
|
#include <sys/mman.h>
|
2018-12-06 10:20:43 +08:00
|
|
|
int main() {int x = MAP_ANONYMOUS;}" HAVE_MAPANON)
|
2019-04-20 10:32:26 +08:00
|
|
|
IF(NOT HAVE_MMAP OR NOT HAVE_MAPANON)
|
|
|
|
MESSAGE(WARNING "mmap or MAP_ANONYMOUS not found: disabling MMAP support.")
|
2018-10-11 03:32:17 +08:00
|
|
|
SET(ENABLE_MMAP OFF)
|
|
|
|
ENDIF()
|
2019-04-20 10:32:26 +08:00
|
|
|
ENDIF()
|
2018-10-11 03:32:17 +08:00
|
|
|
|
2017-01-28 08:41:03 +08:00
|
|
|
IF(ENABLE_MMAP)
|
2019-04-20 10:32:26 +08:00
|
|
|
# Aliases
|
2018-10-11 03:32:17 +08:00
|
|
|
SET(BUILD_MMAP ON)
|
|
|
|
SET(USE_MMAP ON)
|
2017-01-28 08:41:03 +08:00
|
|
|
ENDIF(ENABLE_MMAP)
|
2013-03-28 03:15:00 +08:00
|
|
|
|
2017-01-28 08:41:03 +08:00
|
|
|
#CHECK_FUNCTION_EXISTS(alloca HAVE_ALLOCA)
|
2012-10-03 04:56:46 +08:00
|
|
|
#####
|
|
|
|
# End system inspection checks.
|
|
|
|
#####
|
|
|
|
|
2014-10-08 18:47:58 +08:00
|
|
|
################################
|
|
|
|
# Define Utility Macros
|
|
|
|
################################
|
|
|
|
|
|
|
|
# Macro to append files to the EXTRA_DIST files.
|
2017-03-09 08:01:10 +08:00
|
|
|
# Note: can only be used in subdirectories because of the use of PARENT_SCOPE
|
2014-10-08 18:47:58 +08:00
|
|
|
SET(EXTRA_DIST "")
|
|
|
|
MACRO(ADD_EXTRA_DIST files)
|
|
|
|
FOREACH(F ${files})
|
|
|
|
SET(EXTRA_DIST ${EXTRA_DIST} ${CMAKE_CURRENT_SOURCE_DIR}/${F})
|
|
|
|
SET(EXTRA_DIST ${EXTRA_DIST} PARENT_SCOPE)
|
|
|
|
ENDFOREACH()
|
|
|
|
ENDMACRO()
|
|
|
|
|
|
|
|
# A basic script used to convert m4 files
|
2018-04-26 04:23:25 +08:00
|
|
|
FIND_PROGRAM(NC_M4 NAMES m4 m4.exe)
|
2015-06-12 04:28:39 +08:00
|
|
|
IF(NC_M4)
|
2018-04-26 04:02:10 +08:00
|
|
|
MESSAGE(STATUS "Found m4: ${NC_M4}")
|
|
|
|
SET(HAVE_M4 TRUE)
|
|
|
|
ELSE()
|
2018-04-26 04:13:45 +08:00
|
|
|
MESSAGE(STATUS "m4 not found.")
|
2015-06-12 04:28:39 +08:00
|
|
|
ENDIF()
|
2015-01-13 01:42:33 +08:00
|
|
|
|
2014-10-08 18:47:58 +08:00
|
|
|
MACRO(GEN_m4 filename)
|
2016-01-23 03:03:28 +08:00
|
|
|
|
2018-03-01 03:18:20 +08:00
|
|
|
IF(HAVE_M4)
|
|
|
|
|
2016-01-23 03:03:28 +08:00
|
|
|
# If m4 is available, remove generated file if it exists.
|
|
|
|
IF(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/${filename}.c)
|
|
|
|
FILE(REMOVE ${CMAKE_CURRENT_SOURCE_DIR}/${filename}.c)
|
|
|
|
ENDIF()
|
|
|
|
|
2015-06-04 05:15:00 +08:00
|
|
|
ADD_CUSTOM_COMMAND(
|
|
|
|
OUTPUT ${CMAKE_CURRENT_SOURCE_DIR}/${filename}.c
|
|
|
|
COMMAND ${NC_M4}
|
2016-11-04 04:14:14 +08:00
|
|
|
ARGS ${M4FLAGS} ${CMAKE_CURRENT_SOURCE_DIR}/${filename}.m4 > ${CMAKE_CURRENT_SOURCE_DIR}/${filename}.c
|
2015-06-04 05:15:00 +08:00
|
|
|
VERBATIM
|
|
|
|
)
|
2015-06-12 04:28:39 +08:00
|
|
|
ENDIF(HAVE_M4)
|
2014-10-08 18:47:58 +08:00
|
|
|
ENDMACRO(GEN_m4)
|
|
|
|
|
|
|
|
# Binary tests, but ones which depend on value of 'TEMP_LARGE' being defined.
|
|
|
|
MACRO(add_bin_env_temp_large_test prefix F)
|
|
|
|
ADD_EXECUTABLE(${prefix}_${F} ${F}.c)
|
|
|
|
TARGET_LINK_LIBRARIES(${prefix}_${F} netcdf)
|
|
|
|
IF(MSVC)
|
|
|
|
SET_TARGET_PROPERTIES(${prefix}_${F}
|
|
|
|
PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT"
|
|
|
|
)
|
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
ADD_TEST(${prefix}_${F} bash "-c" "TEMP_LARGE=${CMAKE_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/${prefix}_${F}")
|
|
|
|
IF(MSVC)
|
|
|
|
SET_PROPERTY(TARGET ${prefix}_${F} PROPERTY FOLDER "tests")
|
2015-02-03 06:14:22 +08:00
|
|
|
SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY
|
2015-01-31 03:55:46 +08:00
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
2015-02-03 06:14:22 +08:00
|
|
|
SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG
|
2015-01-31 03:55:46 +08:00
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
|
|
|
SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE
|
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
2016-03-03 04:03:29 +08:00
|
|
|
SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELWITHDEBINFO
|
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
2014-10-08 18:47:58 +08:00
|
|
|
ENDIF()
|
|
|
|
ENDMACRO()
|
|
|
|
|
|
|
|
|
|
|
|
# Tests which are binary, but depend on a particular environmental variable.
|
|
|
|
MACRO(add_bin_env_test prefix F)
|
|
|
|
ADD_EXECUTABLE(${prefix}_${F} ${F}.c)
|
|
|
|
TARGET_LINK_LIBRARIES(${prefix}_${F} netcdf)
|
|
|
|
IF(MSVC)
|
|
|
|
SET_TARGET_PROPERTIES(${prefix}_${F}
|
|
|
|
PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT"
|
|
|
|
)
|
|
|
|
ENDIF()
|
|
|
|
|
2019-11-07 00:43:49 +08:00
|
|
|
ADD_TEST(${prefix}_${F} bash "-c" "TOPSRCDIR=${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR}/${prefix}_${F}")
|
2014-10-08 18:47:58 +08:00
|
|
|
IF(MSVC)
|
|
|
|
SET_PROPERTY(TARGET ${prefix}_${F} PROPERTY FOLDER "tests")
|
|
|
|
ENDIF()
|
|
|
|
ENDMACRO()
|
|
|
|
|
|
|
|
# Build a binary used by a script, but don't make a test out of it.
|
|
|
|
MACRO(build_bin_test F)
|
|
|
|
ADD_EXECUTABLE(${F} ${F}.c)
|
2015-05-29 07:13:15 +08:00
|
|
|
TARGET_LINK_LIBRARIES(${F} netcdf ${ALL_TLL_LIBS})
|
2014-10-08 18:47:58 +08:00
|
|
|
IF(MSVC)
|
|
|
|
SET_TARGET_PROPERTIES(${F}
|
|
|
|
PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT"
|
|
|
|
)
|
2015-02-03 06:14:22 +08:00
|
|
|
SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY
|
2015-01-31 03:55:46 +08:00
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
2015-02-03 06:14:22 +08:00
|
|
|
SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG
|
2015-01-31 03:55:46 +08:00
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
|
|
|
SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE
|
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
2014-10-08 18:47:58 +08:00
|
|
|
ENDIF()
|
|
|
|
ENDMACRO()
|
|
|
|
|
|
|
|
# Binary tests which are used by a script looking for a specific name.
|
|
|
|
MACRO(add_bin_test_no_prefix F)
|
|
|
|
build_bin_test(${F})
|
|
|
|
ADD_TEST(${F} ${EXECUTABLE_OUTPUT_PATH}/${F})
|
|
|
|
IF(MSVC)
|
|
|
|
SET_PROPERTY(TEST ${F} PROPERTY FOLDER "tests/")
|
2015-02-03 06:14:22 +08:00
|
|
|
SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY
|
2015-01-31 03:55:46 +08:00
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
2015-02-03 06:14:22 +08:00
|
|
|
SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG
|
2015-01-31 03:55:46 +08:00
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
|
|
|
SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE
|
2015-02-03 06:14:22 +08:00
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
2014-10-08 18:47:58 +08:00
|
|
|
ENDIF()
|
|
|
|
ENDMACRO()
|
|
|
|
|
|
|
|
# Binary tests which are used by a script looking for a specific name.
|
|
|
|
MACRO(build_bin_test_no_prefix F)
|
|
|
|
build_bin_test(${F})
|
|
|
|
IF(MSVC)
|
2017-11-30 03:17:57 +08:00
|
|
|
#SET_PROPERTY(TEST ${F} PROPERTY FOLDER "tests/")
|
2015-02-03 06:14:22 +08:00
|
|
|
SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY
|
2015-01-31 03:55:46 +08:00
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
2015-02-03 06:14:22 +08:00
|
|
|
SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG
|
2015-01-31 03:55:46 +08:00
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
|
|
|
SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE
|
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
2014-10-08 18:47:58 +08:00
|
|
|
ENDIF()
|
|
|
|
ENDMACRO()
|
|
|
|
|
|
|
|
MACRO(add_bin_test prefix F)
|
|
|
|
ADD_EXECUTABLE(${prefix}_${F} ${F}.c)
|
2014-10-08 18:50:55 +08:00
|
|
|
TARGET_LINK_LIBRARIES(${prefix}_${F}
|
2015-05-29 07:13:15 +08:00
|
|
|
${ALL_TLL_LIBS}
|
2014-10-08 18:50:55 +08:00
|
|
|
netcdf
|
|
|
|
)
|
2014-10-08 18:47:58 +08:00
|
|
|
IF(MSVC)
|
|
|
|
SET_TARGET_PROPERTIES(${prefix}_${F}
|
|
|
|
PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT"
|
|
|
|
)
|
|
|
|
ENDIF()
|
2017-03-09 08:01:10 +08:00
|
|
|
ADD_TEST(${prefix}_${F}
|
|
|
|
${EXECUTABLE_OUTPUT_PATH}/${prefix}_${F}
|
|
|
|
)
|
2014-10-08 18:47:58 +08:00
|
|
|
IF(MSVC)
|
|
|
|
SET_PROPERTY(TEST ${prefix}_${F} PROPERTY FOLDER "tests/")
|
2015-02-03 06:14:22 +08:00
|
|
|
SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY
|
2015-01-31 03:55:46 +08:00
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
2015-02-03 06:14:22 +08:00
|
|
|
SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG
|
2015-01-31 03:55:46 +08:00
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
|
|
|
SET_TARGET_PROPERTIES(${prefix}_${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE
|
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
2014-10-08 18:47:58 +08:00
|
|
|
ENDIF()
|
|
|
|
ENDMACRO()
|
|
|
|
|
This PR adds EXPERIMENTAL support for accessing data in the
cloud using a variant of the Zarr protocol and storage
format. This enhancement is generically referred to as "NCZarr".
The data model supported by NCZarr is netcdf-4 minus the user-defined
types and the String type. In this sense it is similar to the CDF-5
data model.
More detailed information about enabling and using NCZarr is
described in the document NUG/nczarr.md and in a
[Unidata Developer's blog entry](https://www.unidata.ucar.edu/blogs/developer/en/entry/overview-of-zarr-support-in).
WARNING: this code has had limited testing, so do use this version
for production work. Also, performance improvements are ongoing.
Note especially the following platform matrix of successful tests:
Platform | Build System | S3 support
------------------------------------
Linux+gcc | Automake | yes
Linux+gcc | CMake | yes
Visual Studio | CMake | no
Additionally, and as a consequence of the addition of NCZarr,
major changes have been made to the Filter API. NOTE: NCZarr
does not yet support filters, but these changes are enablers for
that support in the future. Note that it is possible
(probable?) that there will be some accidental reversions if the
changes here did not correctly mimic the existing filter testing.
In any case, previously filter ids and parameters were of type
unsigned int. In order to support the more general zarr filter
model, this was all converted to char*. The old HDF5-specific,
unsigned int operations are still supported but they are
wrappers around the new, char* based nc_filterx_XXX functions.
This entailed at least the following changes:
1. Added the files libdispatch/dfilterx.c and include/ncfilter.h
2. Some filterx utilities have been moved to libdispatch/daux.c
3. A new entry, "filter_actions" was added to the NCDispatch table
and the version bumped.
4. An overly complex set of structs was created to support funnelling
all of the filterx operations thru a single dispatch
"filter_actions" entry.
5. Move common code to from libhdf5 to libsrc4 so that it is accessible
to nczarr.
Changes directly related to Zarr:
1. Modified CMakeList.txt and configure.ac to support both C and C++
-- this is in support of S3 support via the awd-sdk libraries.
2. Define a size64_t type to support nczarr.
3. More reworking of libdispatch/dinfermodel.c to
support zarr and to regularize the structure of the fragments
section of a URL.
Changes not directly related to Zarr:
1. Make client-side filter registration be conditional, with default off.
2. Hack include/nc4internal.h to make some flags added by Ed be unique:
e.g. NC_CREAT, NC_INDEF, etc.
3. cleanup include/nchttp.h and libdispatch/dhttp.c.
4. Misc. changes to support compiling under Visual Studio including:
* Better testing under windows for dirent.h and opendir and closedir.
5. Misc. changes to the oc2 code to support various libcurl CURLOPT flags
and to centralize error reporting.
6. By default, suppress the vlen tests that have unfixed memory leaks; add option to enable them.
7. Make part of the nc_test/test_byterange.sh test be contingent on remotetest.unidata.ucar.edu being accessible.
Changes Left TO-DO:
1. fix provenance code, it is too HDF5 specific.
2020-06-29 08:02:47 +08:00
|
|
|
# Binary tests which are used by a script looking for a specific name.
|
|
|
|
MACRO(build_bin_test_sourced F)
|
|
|
|
ADD_EXECUTABLE(${F} ${F}.c ${ARGN})
|
|
|
|
TARGET_LINK_LIBRARIES(${F} netcdf ${ALL_TLL_LIBS})
|
|
|
|
IF(MSVC)
|
|
|
|
SET_TARGET_PROPERTIES(${F}
|
|
|
|
PROPERTIES LINK_FLAGS_DEBUG " /NODEFAULTLIB:MSVCRT"
|
|
|
|
)
|
|
|
|
SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY
|
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
|
|
|
SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG
|
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
|
|
|
SET_TARGET_PROPERTIES(${F} PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE
|
|
|
|
${CMAKE_CURRENT_BINARY_DIR})
|
|
|
|
ENDIF()
|
|
|
|
ENDMACRO()
|
|
|
|
|
2014-10-08 18:47:58 +08:00
|
|
|
# A cmake script to print out information at the end of the configuration step.
|
|
|
|
MACRO(print_conf_summary)
|
|
|
|
MESSAGE("")
|
|
|
|
MESSAGE("")
|
|
|
|
MESSAGE("Configuration Summary:")
|
|
|
|
MESSAGE("")
|
|
|
|
MESSAGE(STATUS "Building Shared Libraries: ${BUILD_SHARED_LIBS}")
|
|
|
|
MESSAGE(STATUS "Building netCDF-4: ${ENABLE_NETCDF_4}")
|
2017-03-09 08:01:10 +08:00
|
|
|
MESSAGE(STATUS "Building DAP2 Support: ${ENABLE_DAP2}")
|
|
|
|
MESSAGE(STATUS "Building DAP4 Support: ${ENABLE_DAP4}")
|
2019-02-25 07:54:13 +08:00
|
|
|
MESSAGE(STATUS "Building Byte-range Support: ${ENABLE_BYTERANGE}")
|
2014-10-08 18:47:58 +08:00
|
|
|
MESSAGE(STATUS "Building Utilities: ${BUILD_UTILITIES}")
|
|
|
|
IF(CMAKE_PREFIX_PATH)
|
|
|
|
MESSAGE(STATUS "CMake Prefix Path: ${CMAKE_PREFIX_PATH}")
|
|
|
|
ENDIF()
|
|
|
|
MESSAGE("")
|
|
|
|
|
|
|
|
IF(${STATUS_PNETCDF} OR ${STATUS_PARALLEL})
|
|
|
|
MESSAGE("Building Parallel NetCDF")
|
2018-09-18 00:47:40 +08:00
|
|
|
MESSAGE(STATUS "Using PnetCDF: ${STATUS_PNETCDF}")
|
2014-10-08 18:47:58 +08:00
|
|
|
MESSAGE(STATUS "Using Parallel IO: ${STATUS_PARALLEL}")
|
|
|
|
MESSAGE("")
|
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
MESSAGE("Tests Enabled: ${ENABLE_TESTS}")
|
|
|
|
IF(ENABLE_TESTS)
|
|
|
|
MESSAGE(STATUS "DAP Remote Tests: ${ENABLE_DAP_REMOTE_TESTS}")
|
|
|
|
MESSAGE(STATUS "Extra Tests: ${ENABLE_EXTRA_TESTS}")
|
|
|
|
MESSAGE(STATUS "Coverage Tests: ${ENABLE_COVERAGE_TESTS}")
|
|
|
|
MESSAGE(STATUS "Parallel Tests: ${ENABLE_PARALLEL_TESTS}")
|
|
|
|
MESSAGE(STATUS "Large File Tests: ${ENABLE_LARGE_FILE_TESTS}")
|
2016-09-28 01:20:41 +08:00
|
|
|
MESSAGE(STATUS "Extreme Numbers: ${ENABLE_EXTREME_NUMBERS}")
|
2019-08-22 04:50:09 +08:00
|
|
|
MESSAGE(STATUS "Unit Tests: ${ENABLE_UNIT_TESTS}")
|
2014-10-08 18:47:58 +08:00
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
MESSAGE("")
|
|
|
|
MESSAGE("Compiler:")
|
|
|
|
MESSAGE("")
|
|
|
|
MESSAGE(STATUS "Build Type: ${CMAKE_BUILD_TYPE}")
|
|
|
|
MESSAGE(STATUS "CMAKE_C_COMPILER: ${CMAKE_C_COMPILER}")
|
|
|
|
MESSAGE(STATUS "CMAKE_C_FLAGS: ${CMAKE_C_FLAGS}")
|
|
|
|
IF("${CMAKE_BUILD_TYPE}" STREQUAL "DEBUG")
|
|
|
|
MESSAGE(STATUS "CMAKE_C_FLAGS_DEBUG: ${CMAKE_C_FLAGS_DEBUG}")
|
|
|
|
ENDIF()
|
|
|
|
IF("${CMAKE_BUILD_TYPE}" STREQUAL "RELEASE")
|
|
|
|
MESSAGE(STATUS "CMAKE_C_FLAGS_RELEASE: ${CMAKE_C_FLAGS_RELEASE}")
|
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
MESSAGE(STATUS "Linking against: ${ALL_TLL_LIBS}")
|
|
|
|
|
|
|
|
MESSAGE("")
|
|
|
|
ENDMACRO()
|
2015-01-28 04:57:51 +08:00
|
|
|
##
|
2014-10-08 18:47:58 +08:00
|
|
|
# Shell script Macro
|
2015-01-28 04:57:51 +08:00
|
|
|
##
|
|
|
|
# Determine if 'bash' is on the system.
|
|
|
|
##
|
|
|
|
|
2018-04-27 06:27:20 +08:00
|
|
|
OPTION(ENABLE_BASH_SCRIPT_TESTING "Detection is typically automatic, but this option can be used to force enable/disable bash-script based tests." ON)
|
|
|
|
|
|
|
|
IF(ENABLE_BASH_SCRIPT_TESTING)
|
|
|
|
FIND_PROGRAM(HAVE_BASH bash)
|
|
|
|
IF(HAVE_BASH)
|
|
|
|
STRING(COMPARE EQUAL "${HAVE_BASH}" "C:/Windows/System32/bash.exe" IS_BASH_EXE)
|
|
|
|
IF(NOT IS_BASH_EXE)
|
|
|
|
MESSAGE(STATUS "Found bash: ${HAVE_BASH}")
|
|
|
|
ELSE()
|
|
|
|
MESSAGE(STATUS "Ignoring ${HAVE_BASH}")
|
|
|
|
SET(HAVE_BASH "")
|
|
|
|
ENDIF()
|
2018-04-27 04:38:56 +08:00
|
|
|
ELSE()
|
2018-04-27 06:27:20 +08:00
|
|
|
MESSAGE(STATUS "Bash shell not found; disabling shell script tests.")
|
2018-04-27 04:38:56 +08:00
|
|
|
ENDIF()
|
2018-04-27 06:27:20 +08:00
|
|
|
ELSE(ENABLE_BASH_SCRIPT_TESTING)
|
|
|
|
SET(HAVE_BASH "")
|
|
|
|
ENDIF(ENABLE_BASH_SCRIPT_TESTING)
|
2015-01-28 04:57:51 +08:00
|
|
|
|
2014-10-08 18:47:58 +08:00
|
|
|
MACRO(add_sh_test prefix F)
|
2015-01-28 04:57:51 +08:00
|
|
|
IF(HAVE_BASH)
|
2014-10-08 18:47:58 +08:00
|
|
|
ADD_TEST(${prefix}_${F} bash "-c" "export srcdir=${CMAKE_CURRENT_SOURCE_DIR};export TOPSRCDIR=${CMAKE_SOURCE_DIR};${CMAKE_CURRENT_BINARY_DIR}/${F}.sh")
|
|
|
|
ENDIF()
|
|
|
|
ENDMACRO()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# A function used to create autotools-style 'yes/no' definitions.
|
|
|
|
# If a variable is set, it 'yes' is returned. Otherwise, 'no' is
|
|
|
|
# returned.
|
|
|
|
#
|
|
|
|
# Also creates a version of the ret_val prepended with 'NC',
|
|
|
|
# when feature is true, which is used to generate netcdf_meta.h.
|
|
|
|
FUNCTION(is_enabled feature ret_val)
|
|
|
|
IF(${feature})
|
|
|
|
SET(${ret_val} "yes" PARENT_SCOPE)
|
|
|
|
SET("NC_${ret_val}" 1 PARENT_SCOPE)
|
|
|
|
ELSE()
|
|
|
|
SET(${ret_val} "no" PARENT_SCOPE)
|
|
|
|
SET("NC_${ret_val}" 0 PARENT_SCOPE)
|
|
|
|
ENDIF(${feature})
|
|
|
|
ENDFUNCTION()
|
|
|
|
|
|
|
|
# A function used to create autotools-style 'yes/no' definitions.
|
|
|
|
# If a variable is set, it 'yes' is returned. Otherwise, 'no' is
|
|
|
|
# returned.
|
|
|
|
#
|
|
|
|
# Also creates a version of the ret_val prepended with 'NC',
|
|
|
|
# when feature is true, which is used to generate netcdf_meta.h.
|
|
|
|
FUNCTION(is_disabled feature ret_val)
|
|
|
|
IF(${feature})
|
|
|
|
SET(${ret_val} "no" PARENT_SCOPE)
|
|
|
|
ELSE()
|
|
|
|
SET(${ret_val} "yes" PARENT_SCOPE)
|
|
|
|
SET("NC_${ret_val}" 1 PARENT_SCOPE)
|
|
|
|
ENDIF(${feature})
|
|
|
|
ENDFUNCTION()
|
|
|
|
|
|
|
|
################################
|
|
|
|
# End Macro Definitions
|
|
|
|
################################
|
|
|
|
|
2014-03-09 08:39:09 +08:00
|
|
|
# Create config.h file.
|
2014-05-30 03:23:24 +08:00
|
|
|
configure_file("${netCDF_SOURCE_DIR}/config.h.cmake.in"
|
2014-03-07 23:46:26 +08:00
|
|
|
"${netCDF_BINARY_DIR}/config.h")
|
2013-09-17 01:45:34 +08:00
|
|
|
ADD_DEFINITIONS(-DHAVE_CONFIG_H)
|
2013-02-20 07:29:41 +08:00
|
|
|
INCLUDE_DIRECTORIES(${netCDF_BINARY_DIR})
|
2012-10-03 04:56:46 +08:00
|
|
|
# End autotools-style checs for config.h
|
|
|
|
|
|
|
|
#####
|
|
|
|
# Set core names of the libraries.
|
|
|
|
#####
|
2014-04-22 06:11:48 +08:00
|
|
|
SET(netCDF_LIB_CORENAME "netcdf")
|
2012-10-03 04:56:46 +08:00
|
|
|
|
|
|
|
#####
|
|
|
|
# Set the true names of all the libraries, if customized by external project
|
|
|
|
#####
|
|
|
|
# Recurse into other subdirectories.
|
|
|
|
add_subdirectory("include")
|
|
|
|
add_subdirectory(libdispatch)
|
|
|
|
add_subdirectory(libsrc)
|
|
|
|
|
2014-04-22 06:11:48 +08:00
|
|
|
IF(USE_PNETCDF)
|
2015-08-15 10:38:30 +08:00
|
|
|
add_subdirectory(libsrcp)
|
2014-04-22 06:11:48 +08:00
|
|
|
ENDIF(USE_PNETCDF)
|
2013-03-16 04:31:07 +08:00
|
|
|
|
2014-04-22 06:11:48 +08:00
|
|
|
IF(USE_HDF5)
|
2014-04-22 01:15:33 +08:00
|
|
|
add_subdirectory(libsrc4)
|
2018-05-09 01:58:01 +08:00
|
|
|
add_subdirectory(libhdf5)
|
2014-04-22 06:11:48 +08:00
|
|
|
ENDIF(USE_HDF5)
|
2012-10-03 04:56:46 +08:00
|
|
|
|
2018-02-09 23:26:58 +08:00
|
|
|
IF(USE_HDF4)
|
2018-02-08 21:20:58 +08:00
|
|
|
add_subdirectory(libhdf4)
|
2018-03-05 19:22:15 +08:00
|
|
|
add_subdirectory(hdf4_test)
|
2018-02-09 23:26:58 +08:00
|
|
|
ENDIF(USE_HDF4)
|
2018-02-08 21:20:58 +08:00
|
|
|
|
2017-03-09 08:01:10 +08:00
|
|
|
IF(ENABLE_DAP2)
|
2014-03-07 23:46:26 +08:00
|
|
|
ADD_SUBDIRECTORY(oc2)
|
|
|
|
ADD_SUBDIRECTORY(libdap2)
|
2012-10-03 04:56:46 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2017-03-09 08:01:10 +08:00
|
|
|
IF(ENABLE_DAP4)
|
|
|
|
ADD_SUBDIRECTORY(libdap4)
|
|
|
|
ENDIF()
|
|
|
|
|
This PR adds EXPERIMENTAL support for accessing data in the
cloud using a variant of the Zarr protocol and storage
format. This enhancement is generically referred to as "NCZarr".
The data model supported by NCZarr is netcdf-4 minus the user-defined
types and the String type. In this sense it is similar to the CDF-5
data model.
More detailed information about enabling and using NCZarr is
described in the document NUG/nczarr.md and in a
[Unidata Developer's blog entry](https://www.unidata.ucar.edu/blogs/developer/en/entry/overview-of-zarr-support-in).
WARNING: this code has had limited testing, so do use this version
for production work. Also, performance improvements are ongoing.
Note especially the following platform matrix of successful tests:
Platform | Build System | S3 support
------------------------------------
Linux+gcc | Automake | yes
Linux+gcc | CMake | yes
Visual Studio | CMake | no
Additionally, and as a consequence of the addition of NCZarr,
major changes have been made to the Filter API. NOTE: NCZarr
does not yet support filters, but these changes are enablers for
that support in the future. Note that it is possible
(probable?) that there will be some accidental reversions if the
changes here did not correctly mimic the existing filter testing.
In any case, previously filter ids and parameters were of type
unsigned int. In order to support the more general zarr filter
model, this was all converted to char*. The old HDF5-specific,
unsigned int operations are still supported but they are
wrappers around the new, char* based nc_filterx_XXX functions.
This entailed at least the following changes:
1. Added the files libdispatch/dfilterx.c and include/ncfilter.h
2. Some filterx utilities have been moved to libdispatch/daux.c
3. A new entry, "filter_actions" was added to the NCDispatch table
and the version bumped.
4. An overly complex set of structs was created to support funnelling
all of the filterx operations thru a single dispatch
"filter_actions" entry.
5. Move common code to from libhdf5 to libsrc4 so that it is accessible
to nczarr.
Changes directly related to Zarr:
1. Modified CMakeList.txt and configure.ac to support both C and C++
-- this is in support of S3 support via the awd-sdk libraries.
2. Define a size64_t type to support nczarr.
3. More reworking of libdispatch/dinfermodel.c to
support zarr and to regularize the structure of the fragments
section of a URL.
Changes not directly related to Zarr:
1. Make client-side filter registration be conditional, with default off.
2. Hack include/nc4internal.h to make some flags added by Ed be unique:
e.g. NC_CREAT, NC_INDEF, etc.
3. cleanup include/nchttp.h and libdispatch/dhttp.c.
4. Misc. changes to support compiling under Visual Studio including:
* Better testing under windows for dirent.h and opendir and closedir.
5. Misc. changes to the oc2 code to support various libcurl CURLOPT flags
and to centralize error reporting.
6. By default, suppress the vlen tests that have unfixed memory leaks; add option to enable them.
7. Make part of the nc_test/test_byterange.sh test be contingent on remotetest.unidata.ucar.edu being accessible.
Changes Left TO-DO:
1. fix provenance code, it is too HDF5 specific.
2020-06-29 08:02:47 +08:00
|
|
|
IF(ENABLE_NCZARR)
|
|
|
|
ADD_SUBDIRECTORY(libnczarr)
|
|
|
|
ENDIF()
|
|
|
|
|
2012-10-03 04:56:46 +08:00
|
|
|
add_subdirectory(liblib)
|
|
|
|
|
2018-12-19 04:26:49 +08:00
|
|
|
IF(ENABLE_FILTER_TESTING)
|
2020-05-11 23:42:31 +08:00
|
|
|
CONFIGURE_FILE(plugins/H5Znoop.c ${CMAKE_SOURCE_DIR}/plugins/H5Znoop1.c COPYONLY)
|
2018-12-19 04:26:49 +08:00
|
|
|
add_subdirectory(plugins)
|
|
|
|
ENDIF()
|
|
|
|
|
2012-10-10 04:56:27 +08:00
|
|
|
# For tests and utilities, we are no longer
|
|
|
|
# exporting symbols but rather importing them.
|
|
|
|
IF(BUILD_DLL)
|
2014-03-07 23:46:26 +08:00
|
|
|
REMOVE_DEFINITIONS(-DDLL_EXPORT)
|
2012-10-10 04:56:27 +08:00
|
|
|
ENDIF()
|
2012-10-03 04:56:46 +08:00
|
|
|
|
2012-10-10 04:56:27 +08:00
|
|
|
# Enable Utilities.
|
2014-04-22 06:11:48 +08:00
|
|
|
IF(BUILD_UTILITIES)
|
2014-03-07 23:46:26 +08:00
|
|
|
INCLUDE_DIRECTORIES(ncdump)
|
|
|
|
ADD_SUBDIRECTORY(ncgen)
|
|
|
|
ADD_SUBDIRECTORY(ncgen3)
|
|
|
|
ADD_SUBDIRECTORY(ncdump)
|
2012-10-03 04:56:46 +08:00
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
# Enable tests
|
|
|
|
IF(ENABLE_TESTS)
|
2014-04-04 00:45:22 +08:00
|
|
|
IF(ENABLE_V2_API)
|
2014-04-04 07:09:38 +08:00
|
|
|
ADD_SUBDIRECTORY(nctest)
|
2014-04-04 00:45:22 +08:00
|
|
|
ENDIF()
|
2014-03-07 23:46:26 +08:00
|
|
|
ADD_SUBDIRECTORY(nc_test)
|
|
|
|
IF(USE_NETCDF4)
|
2015-05-16 04:24:27 +08:00
|
|
|
INCLUDE_DIRECTORIES(h5_test)
|
2014-04-22 01:15:33 +08:00
|
|
|
ADD_SUBDIRECTORY(nc_test4)
|
2019-03-18 22:15:57 +08:00
|
|
|
IF(BUILD_BENCHMARKS)
|
|
|
|
ADD_SUBDIRECTORY(nc_perf)
|
|
|
|
ENDIF()
|
2014-03-07 23:46:26 +08:00
|
|
|
ADD_SUBDIRECTORY(h5_test)
|
|
|
|
ENDIF()
|
2017-03-09 08:01:10 +08:00
|
|
|
IF(ENABLE_DAP2)
|
2014-03-07 23:46:26 +08:00
|
|
|
ADD_SUBDIRECTORY(ncdap_test)
|
|
|
|
ENDIF()
|
2017-03-09 08:01:10 +08:00
|
|
|
IF(ENABLE_DAP4)
|
|
|
|
ADD_SUBDIRECTORY(dap4_test)
|
|
|
|
ENDIF()
|
This PR adds EXPERIMENTAL support for accessing data in the
cloud using a variant of the Zarr protocol and storage
format. This enhancement is generically referred to as "NCZarr".
The data model supported by NCZarr is netcdf-4 minus the user-defined
types and the String type. In this sense it is similar to the CDF-5
data model.
More detailed information about enabling and using NCZarr is
described in the document NUG/nczarr.md and in a
[Unidata Developer's blog entry](https://www.unidata.ucar.edu/blogs/developer/en/entry/overview-of-zarr-support-in).
WARNING: this code has had limited testing, so do use this version
for production work. Also, performance improvements are ongoing.
Note especially the following platform matrix of successful tests:
Platform | Build System | S3 support
------------------------------------
Linux+gcc | Automake | yes
Linux+gcc | CMake | yes
Visual Studio | CMake | no
Additionally, and as a consequence of the addition of NCZarr,
major changes have been made to the Filter API. NOTE: NCZarr
does not yet support filters, but these changes are enablers for
that support in the future. Note that it is possible
(probable?) that there will be some accidental reversions if the
changes here did not correctly mimic the existing filter testing.
In any case, previously filter ids and parameters were of type
unsigned int. In order to support the more general zarr filter
model, this was all converted to char*. The old HDF5-specific,
unsigned int operations are still supported but they are
wrappers around the new, char* based nc_filterx_XXX functions.
This entailed at least the following changes:
1. Added the files libdispatch/dfilterx.c and include/ncfilter.h
2. Some filterx utilities have been moved to libdispatch/daux.c
3. A new entry, "filter_actions" was added to the NCDispatch table
and the version bumped.
4. An overly complex set of structs was created to support funnelling
all of the filterx operations thru a single dispatch
"filter_actions" entry.
5. Move common code to from libhdf5 to libsrc4 so that it is accessible
to nczarr.
Changes directly related to Zarr:
1. Modified CMakeList.txt and configure.ac to support both C and C++
-- this is in support of S3 support via the awd-sdk libraries.
2. Define a size64_t type to support nczarr.
3. More reworking of libdispatch/dinfermodel.c to
support zarr and to regularize the structure of the fragments
section of a URL.
Changes not directly related to Zarr:
1. Make client-side filter registration be conditional, with default off.
2. Hack include/nc4internal.h to make some flags added by Ed be unique:
e.g. NC_CREAT, NC_INDEF, etc.
3. cleanup include/nchttp.h and libdispatch/dhttp.c.
4. Misc. changes to support compiling under Visual Studio including:
* Better testing under windows for dirent.h and opendir and closedir.
5. Misc. changes to the oc2 code to support various libcurl CURLOPT flags
and to centralize error reporting.
6. By default, suppress the vlen tests that have unfixed memory leaks; add option to enable them.
7. Make part of the nc_test/test_byterange.sh test be contingent on remotetest.unidata.ucar.edu being accessible.
Changes Left TO-DO:
1. fix provenance code, it is too HDF5 specific.
2020-06-29 08:02:47 +08:00
|
|
|
IF(ENABLE_NCZARR)
|
|
|
|
ADD_SUBDIRECTORY(nczarr_test)
|
|
|
|
ENDIF()
|
2014-03-07 23:46:26 +08:00
|
|
|
IF(ENABLE_EXAMPLES)
|
|
|
|
ADD_SUBDIRECTORY(examples)
|
|
|
|
ENDIF()
|
2019-08-22 04:50:09 +08:00
|
|
|
IF(ENABLE_UNIT_TESTS)
|
|
|
|
ADD_SUBDIRECTORY(unit_test)
|
|
|
|
ENDIF(ENABLE_UNIT_TESTS)
|
2012-10-03 04:56:46 +08:00
|
|
|
ENDIF()
|
|
|
|
|
|
|
|
# Code to generate an export header
|
2012-10-04 04:47:34 +08:00
|
|
|
#GENERATE_EXPORT_HEADER(netcdf
|
2014-03-07 23:46:26 +08:00
|
|
|
# BASE_NAME netcdf
|
|
|
|
# EXPORT_MACRO_NAME netcdf_EXPORT
|
|
|
|
# EXPORT_FILE_NAME netcdf_Export.h
|
|
|
|
# STATIC_DEFINE netcdf_BUILT_AS_STATIC
|
2012-10-04 04:47:34 +08:00
|
|
|
#)
|
2012-10-03 04:56:46 +08:00
|
|
|
|
|
|
|
#####
|
|
|
|
# Build doxygen documentation, if need be.
|
|
|
|
#####
|
2014-05-22 04:40:39 +08:00
|
|
|
ADD_SUBDIRECTORY(docs)
|
2014-12-03 04:58:23 +08:00
|
|
|
|
2014-12-06 02:19:02 +08:00
|
|
|
##
|
2018-03-16 22:38:40 +08:00
|
|
|
# Brute force, grab all of the dlls from the dependency directory,
|
2014-12-06 02:19:02 +08:00
|
|
|
# install them in the binary dir. Grab all of the .libs, put them
|
|
|
|
# in the libdir.
|
|
|
|
##
|
2012-11-20 05:43:12 +08:00
|
|
|
IF(MSVC)
|
2014-12-06 02:19:02 +08:00
|
|
|
FILE(GLOB COPY_FILES ${CMAKE_PREFIX_PATH}/lib/*.lib)
|
|
|
|
INSTALL(FILES ${COPY_FILES}
|
|
|
|
DESTINATION ${CMAKE_INSTALL_LIBDIR}
|
|
|
|
COMPONENT dependencies)
|
|
|
|
|
|
|
|
FILE(GLOB COPY_FILES ${CMAKE_PREFIX_PATH}/bin/*.dll)
|
|
|
|
STRING(REGEX REPLACE "msv[.*].dll" "" COPY_FILES "${COPY_FILES}")
|
|
|
|
INSTALL(FILES ${COPY_FILES}
|
|
|
|
DESTINATION ${CMAKE_INSTALL_BINDIR}
|
|
|
|
COMPONENT dependencies)
|
|
|
|
|
2012-11-20 05:43:12 +08:00
|
|
|
ENDIF()
|
|
|
|
|
2012-10-03 04:56:46 +08:00
|
|
|
# Subdirectory CMakeLists.txt files should specify their own
|
|
|
|
# 'install' files.
|
|
|
|
# Including 'CPack' kicks everything off.
|
|
|
|
INCLUDE(InstallRequiredSystemLibraries)
|
|
|
|
CONFIGURE_FILE(
|
|
|
|
${CMAKE_CURRENT_SOURCE_DIR}/FixBundle.cmake.in
|
|
|
|
${CMAKE_CURRENT_BINARY_DIR}/FixBundle.cmake
|
|
|
|
@ONLY
|
2014-04-22 01:15:33 +08:00
|
|
|
)
|
2012-10-03 04:56:46 +08:00
|
|
|
|
2014-02-07 07:17:30 +08:00
|
|
|
###
|
|
|
|
# Create pkgconfig files.
|
|
|
|
###
|
|
|
|
|
|
|
|
IF(NOT DEFINED CMAKE_INSTALL_LIBDIR)
|
|
|
|
SET(CMAKE_INSTALL_LIBDIR lib)
|
|
|
|
ENDIF(NOT DEFINED CMAKE_INSTALL_LIBDIR)
|
|
|
|
|
2014-03-07 23:40:50 +08:00
|
|
|
# Set
|
2014-02-07 07:17:30 +08:00
|
|
|
SET(prefix ${CMAKE_INSTALL_PREFIX})
|
|
|
|
SET(exec_prefix ${CMAKE_INSTALL_PREFIX})
|
|
|
|
SET(libdir ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR})
|
2014-08-21 02:30:11 +08:00
|
|
|
SET(includedir ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_INCLUDEDIR})
|
2014-02-07 07:17:30 +08:00
|
|
|
SET(CC ${CMAKE_C_COMPILER})
|
2014-02-20 07:32:26 +08:00
|
|
|
|
2014-02-07 07:17:30 +08:00
|
|
|
# Process all dependency libraries and create a string
|
|
|
|
# used when parsing netcdf.pc.in
|
|
|
|
|
2019-04-27 06:00:28 +08:00
|
|
|
SET(NC_LIBS "")
|
|
|
|
|
2014-06-03 05:14:27 +08:00
|
|
|
FOREACH(_LIB ${ALL_TLL_LIBS})
|
|
|
|
GET_FILENAME_COMPONENT(_LIB_NAME ${_LIB} NAME_WE)
|
|
|
|
STRING(REGEX REPLACE "^lib" "" _NAME ${_LIB_NAME})
|
|
|
|
LIST(APPEND NC_LIBS "-l${_NAME}")
|
|
|
|
GET_FILENAME_COMPONENT(_LIB_DIR ${_LIB} PATH)
|
|
|
|
LIST(APPEND LINKFLAGS "-L${_LIB_DIR}")
|
|
|
|
ENDFOREACH()
|
2014-02-07 07:17:30 +08:00
|
|
|
|
2019-02-28 05:36:42 +08:00
|
|
|
#SET(NC_LIBS "-lnetcdf ${NC_LIBS}")
|
2019-04-27 06:28:53 +08:00
|
|
|
IF(NC_LIBS)
|
2019-04-27 06:00:28 +08:00
|
|
|
STRING(REPLACE ";" " " NC_LIBS "${NC_LIBS}")
|
|
|
|
STRING(REPLACE "-lhdf5::hdf5-shared" "-lhdf5" NC_LIBS ${NC_LIBS})
|
|
|
|
STRING(REPLACE "-lhdf5::hdf5_hl-shared" "-lhdf5_hl" NC_LIBS ${NC_LIBS})
|
|
|
|
STRING(REPLACE "-lhdf5::hdf5-static" "-lhdf5" NC_LIBS ${NC_LIBS})
|
|
|
|
STRING(REPLACE "-lhdf5::hdf5_hl-static" "-lhdf5_hl" NC_LIBS ${NC_LIBS})
|
|
|
|
ENDIF()
|
2017-12-05 03:50:43 +08:00
|
|
|
|
2014-02-07 07:17:30 +08:00
|
|
|
STRING(REPLACE ";" " " LINKFLAGS "${LINKFLAGS}")
|
|
|
|
|
|
|
|
LIST(REMOVE_DUPLICATES NC_LIBS)
|
|
|
|
LIST(REMOVE_DUPLICATES LINKFLAGS)
|
|
|
|
|
2019-02-28 05:36:42 +08:00
|
|
|
SET(LIBS ${NC_LIBS})
|
|
|
|
SET(NC_LIBS "-lnetcdf")
|
|
|
|
|
2014-04-22 06:11:48 +08:00
|
|
|
configure_file(
|
2014-02-07 07:17:30 +08:00
|
|
|
${netCDF_SOURCE_DIR}/netcdf.pc.in
|
|
|
|
${netCDF_BINARY_DIR}/netcdf.pc @ONLY)
|
|
|
|
|
2018-06-30 05:06:27 +08:00
|
|
|
|
|
|
|
IF(NOT IS_DIRECTORY ${netCDF_BINARY_DIR}/tmp)
|
|
|
|
FILE(MAKE_DIRECTORY ${netCDF_BINARY_DIR}/tmp)
|
|
|
|
ENDIF()
|
|
|
|
|
2014-05-30 03:23:24 +08:00
|
|
|
configure_file("${netCDF_SOURCE_DIR}/nc-config.cmake.in"
|
2017-03-09 08:01:10 +08:00
|
|
|
"${netCDF_BINARY_DIR}/tmp/nc-config" @ONLY
|
|
|
|
NEWLINE_STYLE LF)
|
2014-03-09 08:39:09 +08:00
|
|
|
FILE(COPY "${netCDF_BINARY_DIR}/tmp/nc-config"
|
|
|
|
DESTINATION ${netCDF_BINARY_DIR}/
|
|
|
|
FILE_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
|
|
|
|
|
2014-04-22 06:11:48 +08:00
|
|
|
INSTALL(FILES ${netCDF_BINARY_DIR}/netcdf.pc
|
2014-06-03 04:03:42 +08:00
|
|
|
DESTINATION ${CMAKE_INSTALL_LIBDIR}/pkgconfig
|
|
|
|
COMPONENT utilities)
|
2014-02-07 07:17:30 +08:00
|
|
|
|
2014-06-19 04:33:49 +08:00
|
|
|
INSTALL(PROGRAMS ${netCDF_BINARY_DIR}/nc-config
|
2014-05-22 05:15:56 +08:00
|
|
|
DESTINATION ${CMAKE_INSTALL_BINDIR}
|
|
|
|
COMPONENT utilities)
|
2014-03-09 08:39:09 +08:00
|
|
|
|
2014-02-07 07:17:30 +08:00
|
|
|
###
|
2014-03-09 08:39:09 +08:00
|
|
|
# End pkgconfig, nc-config file creation.
|
2014-02-07 07:17:30 +08:00
|
|
|
###
|
|
|
|
|
2012-10-03 04:56:46 +08:00
|
|
|
##
|
2014-12-06 02:19:02 +08:00
|
|
|
# Print the configuration summary
|
2014-03-07 23:40:50 +08:00
|
|
|
##
|
2012-10-03 04:56:46 +08:00
|
|
|
print_conf_summary()
|
2013-05-25 05:19:07 +08:00
|
|
|
|
|
|
|
# Enable Makedist files.
|
2013-06-04 00:42:04 +08:00
|
|
|
ADD_MAKEDIST()
|
2014-05-30 03:23:24 +08:00
|
|
|
ENABLE_MAKEDIST(README.md COPYRIGHT RELEASE_NOTES.md INSTALL INSTALL.cmake test_prog.c lib_flags.am cmake CMakeLists.txt COMPILE.cmake.txt config.h.cmake.in cmake_uninstall.cmake.in netcdf-config-version.cmake.in netcdf-config.cmake.in FixBundle.cmake.in nc-config.cmake.in configure configure.ac install-sh config.h.in config.sub CTestConfig.cmake.in)
|
2013-05-25 05:19:07 +08:00
|
|
|
|
2014-06-03 04:03:42 +08:00
|
|
|
#####
|
|
|
|
# Configure and print the libnetcdf.settings file.
|
|
|
|
#####
|
|
|
|
|
2014-06-03 05:14:27 +08:00
|
|
|
# Set variables to mirror those used by autoconf.
|
2014-06-03 04:03:42 +08:00
|
|
|
# This way we don't need to maintain two separate template
|
|
|
|
# files.
|
2014-06-03 05:14:27 +08:00
|
|
|
SET(host_cpu "${cpu}")
|
|
|
|
SET(host_vendor "${osname}")
|
|
|
|
SET(host_os "${osrel}")
|
2019-11-07 00:43:49 +08:00
|
|
|
SET(abs_top_builddir "${CMAKE_CURRENT_BINARY_DIR}")
|
|
|
|
SET(abs_top_srcdir "${CMAKE_CURRENT_SOURCE_DIR}")
|
2014-06-03 05:14:27 +08:00
|
|
|
|
|
|
|
SET(CC_VERSION "${CMAKE_C_COMPILER}")
|
|
|
|
|
2014-06-12 05:51:31 +08:00
|
|
|
# Build *FLAGS for libnetcdf.settings.
|
|
|
|
SET(CFLAGS "${CMAKE_C_FLAGS} ${CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE}}")
|
|
|
|
SET(CPPFLAGS "${CMAKE_CPP_FLAGS} ${CMAKE_CPP_FLAGS_${CMAKE_BUILD_TYPE}}")
|
|
|
|
SET(LDFLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${CMAKE_SHARED_LINKER_FLAGS_${CMAKE_BUILD_TYPE}}")
|
2014-06-04 00:47:01 +08:00
|
|
|
|
|
|
|
is_disabled(BUILD_SHARED_LIBS enable_static)
|
|
|
|
is_enabled(BUILD_SHARED_LIBS enable_shared)
|
2014-06-03 05:14:27 +08:00
|
|
|
|
|
|
|
is_enabled(ENABLE_V2_API HAS_NC2)
|
|
|
|
is_enabled(ENABLE_NETCDF_4 HAS_NC4)
|
|
|
|
is_enabled(ENABLE_HDF4 HAS_HDF4)
|
|
|
|
is_enabled(ENABLE_NETCDF_4 HAS_HDF5)
|
2020-05-12 06:53:30 +08:00
|
|
|
is_enabled(USE_SZIP HAS_SZIP)
|
|
|
|
is_enabled(USE_SZIP HAS_SZIP_WRITE)
|
2014-06-03 05:14:27 +08:00
|
|
|
is_enabled(STATUS_PNETCDF HAS_PNETCDF)
|
|
|
|
is_enabled(STATUS_PARALLEL HAS_PARALLEL)
|
2015-11-18 08:14:22 +08:00
|
|
|
is_enabled(ENABLE_PARALLEL4 HAS_PARALLEL4)
|
2017-03-09 08:01:10 +08:00
|
|
|
is_enabled(ENABLE_DAP HAS_DAP)
|
2017-09-27 01:32:12 +08:00
|
|
|
is_enabled(ENABLE_DAP HAS_DAP2)
|
2017-03-09 08:01:10 +08:00
|
|
|
is_enabled(ENABLE_DAP4 HAS_DAP4)
|
2019-02-25 07:54:13 +08:00
|
|
|
is_enabled(ENABLE_BYTERANGE HAS_BYTERANGE)
|
2018-05-12 05:30:19 +08:00
|
|
|
is_enabled(ENABLE_DISKLESS HAS_DISKLESS)
|
2014-06-03 05:14:27 +08:00
|
|
|
is_enabled(USE_MMAP HAS_MMAP)
|
|
|
|
is_enabled(JNA HAS_JNA)
|
2018-07-17 03:34:22 +08:00
|
|
|
is_enabled(ENABLE_ZERO_LENGTH_COORD_BOUND RELAX_COORD_BOUND)
|
2017-09-27 04:01:21 +08:00
|
|
|
is_enabled(USE_CDF5 HAS_CDF5)
|
2018-01-26 04:07:33 +08:00
|
|
|
is_enabled(ENABLE_ERANGE_FILL HAS_ERANGE_FILL)
|
2020-01-25 07:36:29 +08:00
|
|
|
is_enabled(HAVE_H5Z_SZIP HAS_SZLIB)
|
2020-04-02 07:08:24 +08:00
|
|
|
is_enabled(HDF5_HAS_PAR_FILTERS HAS_PAR_FILTERS)
|
This PR adds EXPERIMENTAL support for accessing data in the
cloud using a variant of the Zarr protocol and storage
format. This enhancement is generically referred to as "NCZarr".
The data model supported by NCZarr is netcdf-4 minus the user-defined
types and the String type. In this sense it is similar to the CDF-5
data model.
More detailed information about enabling and using NCZarr is
described in the document NUG/nczarr.md and in a
[Unidata Developer's blog entry](https://www.unidata.ucar.edu/blogs/developer/en/entry/overview-of-zarr-support-in).
WARNING: this code has had limited testing, so do use this version
for production work. Also, performance improvements are ongoing.
Note especially the following platform matrix of successful tests:
Platform | Build System | S3 support
------------------------------------
Linux+gcc | Automake | yes
Linux+gcc | CMake | yes
Visual Studio | CMake | no
Additionally, and as a consequence of the addition of NCZarr,
major changes have been made to the Filter API. NOTE: NCZarr
does not yet support filters, but these changes are enablers for
that support in the future. Note that it is possible
(probable?) that there will be some accidental reversions if the
changes here did not correctly mimic the existing filter testing.
In any case, previously filter ids and parameters were of type
unsigned int. In order to support the more general zarr filter
model, this was all converted to char*. The old HDF5-specific,
unsigned int operations are still supported but they are
wrappers around the new, char* based nc_filterx_XXX functions.
This entailed at least the following changes:
1. Added the files libdispatch/dfilterx.c and include/ncfilter.h
2. Some filterx utilities have been moved to libdispatch/daux.c
3. A new entry, "filter_actions" was added to the NCDispatch table
and the version bumped.
4. An overly complex set of structs was created to support funnelling
all of the filterx operations thru a single dispatch
"filter_actions" entry.
5. Move common code to from libhdf5 to libsrc4 so that it is accessible
to nczarr.
Changes directly related to Zarr:
1. Modified CMakeList.txt and configure.ac to support both C and C++
-- this is in support of S3 support via the awd-sdk libraries.
2. Define a size64_t type to support nczarr.
3. More reworking of libdispatch/dinfermodel.c to
support zarr and to regularize the structure of the fragments
section of a URL.
Changes not directly related to Zarr:
1. Make client-side filter registration be conditional, with default off.
2. Hack include/nc4internal.h to make some flags added by Ed be unique:
e.g. NC_CREAT, NC_INDEF, etc.
3. cleanup include/nchttp.h and libdispatch/dhttp.c.
4. Misc. changes to support compiling under Visual Studio including:
* Better testing under windows for dirent.h and opendir and closedir.
5. Misc. changes to the oc2 code to support various libcurl CURLOPT flags
and to centralize error reporting.
6. By default, suppress the vlen tests that have unfixed memory leaks; add option to enable them.
7. Make part of the nc_test/test_byterange.sh test be contingent on remotetest.unidata.ucar.edu being accessible.
Changes Left TO-DO:
1. fix provenance code, it is too HDF5 specific.
2020-06-29 08:02:47 +08:00
|
|
|
is_enabled(ENABLE_NCZARR HAS_NCZARR)
|
|
|
|
is_enabled(ENABLE_S3_SDK HAS_S3_SDK)
|
2014-06-03 05:14:27 +08:00
|
|
|
|
|
|
|
# Generate file from template.
|
2014-06-03 04:03:42 +08:00
|
|
|
CONFIGURE_FILE("${CMAKE_CURRENT_SOURCE_DIR}/libnetcdf.settings.in"
|
|
|
|
"${CMAKE_CURRENT_BINARY_DIR}/libnetcdf.settings"
|
2014-08-21 02:30:11 +08:00
|
|
|
@ONLY)
|
2014-06-03 04:03:42 +08:00
|
|
|
|
|
|
|
# Read in settings file, print out.
|
|
|
|
# Avoid using system-specific calls so that this
|
|
|
|
# might also work on Windows.
|
|
|
|
FILE(READ "${CMAKE_CURRENT_BINARY_DIR}/libnetcdf.settings"
|
|
|
|
LIBNETCDF_SETTINGS)
|
This PR adds EXPERIMENTAL support for accessing data in the
cloud using a variant of the Zarr protocol and storage
format. This enhancement is generically referred to as "NCZarr".
The data model supported by NCZarr is netcdf-4 minus the user-defined
types and the String type. In this sense it is similar to the CDF-5
data model.
More detailed information about enabling and using NCZarr is
described in the document NUG/nczarr.md and in a
[Unidata Developer's blog entry](https://www.unidata.ucar.edu/blogs/developer/en/entry/overview-of-zarr-support-in).
WARNING: this code has had limited testing, so do use this version
for production work. Also, performance improvements are ongoing.
Note especially the following platform matrix of successful tests:
Platform | Build System | S3 support
------------------------------------
Linux+gcc | Automake | yes
Linux+gcc | CMake | yes
Visual Studio | CMake | no
Additionally, and as a consequence of the addition of NCZarr,
major changes have been made to the Filter API. NOTE: NCZarr
does not yet support filters, but these changes are enablers for
that support in the future. Note that it is possible
(probable?) that there will be some accidental reversions if the
changes here did not correctly mimic the existing filter testing.
In any case, previously filter ids and parameters were of type
unsigned int. In order to support the more general zarr filter
model, this was all converted to char*. The old HDF5-specific,
unsigned int operations are still supported but they are
wrappers around the new, char* based nc_filterx_XXX functions.
This entailed at least the following changes:
1. Added the files libdispatch/dfilterx.c and include/ncfilter.h
2. Some filterx utilities have been moved to libdispatch/daux.c
3. A new entry, "filter_actions" was added to the NCDispatch table
and the version bumped.
4. An overly complex set of structs was created to support funnelling
all of the filterx operations thru a single dispatch
"filter_actions" entry.
5. Move common code to from libhdf5 to libsrc4 so that it is accessible
to nczarr.
Changes directly related to Zarr:
1. Modified CMakeList.txt and configure.ac to support both C and C++
-- this is in support of S3 support via the awd-sdk libraries.
2. Define a size64_t type to support nczarr.
3. More reworking of libdispatch/dinfermodel.c to
support zarr and to regularize the structure of the fragments
section of a URL.
Changes not directly related to Zarr:
1. Make client-side filter registration be conditional, with default off.
2. Hack include/nc4internal.h to make some flags added by Ed be unique:
e.g. NC_CREAT, NC_INDEF, etc.
3. cleanup include/nchttp.h and libdispatch/dhttp.c.
4. Misc. changes to support compiling under Visual Studio including:
* Better testing under windows for dirent.h and opendir and closedir.
5. Misc. changes to the oc2 code to support various libcurl CURLOPT flags
and to centralize error reporting.
6. By default, suppress the vlen tests that have unfixed memory leaks; add option to enable them.
7. Make part of the nc_test/test_byterange.sh test be contingent on remotetest.unidata.ucar.edu being accessible.
Changes Left TO-DO:
1. fix provenance code, it is too HDF5 specific.
2020-06-29 08:02:47 +08:00
|
|
|
MESSAGE(STATUS ${LIBNETCDF_SETTINGS})
|
2014-06-03 04:03:42 +08:00
|
|
|
|
|
|
|
# Install libnetcdf.settings file into same location
|
|
|
|
# as the libraries.
|
|
|
|
INSTALL(FILES "${netCDF_BINARY_DIR}/libnetcdf.settings"
|
2014-06-18 04:25:49 +08:00
|
|
|
DESTINATION "${CMAKE_INSTALL_LIBDIR}"
|
2014-06-03 04:03:42 +08:00
|
|
|
COMPONENT libraries)
|
|
|
|
|
|
|
|
#####
|
|
|
|
# End libnetcdf.settings section.
|
|
|
|
#####
|
|
|
|
|
2014-09-05 05:42:11 +08:00
|
|
|
#####
|
|
|
|
# Create 'netcdf_meta.h' include file.
|
|
|
|
#####
|
|
|
|
configure_file(
|
2014-09-11 06:50:45 +08:00
|
|
|
${netCDF_SOURCE_DIR}/include/netcdf_meta.h.in
|
2014-09-19 05:05:08 +08:00
|
|
|
${netCDF_SOURCE_DIR}/include/netcdf_meta.h @ONLY)
|
|
|
|
|
|
|
|
FILE(COPY ${netCDF_SOURCE_DIR}/include/netcdf_meta.h
|
|
|
|
DESTINATION ${netCDF_BINARY_DIR}/include/)
|
2014-09-05 05:42:11 +08:00
|
|
|
|
2017-03-09 08:01:10 +08:00
|
|
|
#####
|
|
|
|
# Build test_common.sh
|
|
|
|
#####
|
2019-11-07 00:43:49 +08:00
|
|
|
SET(EXTRA_DIST ${EXTRA_DIST} ${CMAKE_CURRENT_SOURCE_DIR}/test_common.in)
|
|
|
|
SET(TOPSRCDIR "${CMAKE_CURRENT_SOURCE_DIR}")
|
|
|
|
SET(TOPBUILDDIR "${CMAKE_CURRENT_BINARY_DIR}")
|
|
|
|
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/test_common.in ${CMAKE_CURRENT_BINARY_DIR}/test_common.sh @ONLY NEWLINE_STYLE LF)
|
2017-03-09 08:01:10 +08:00
|
|
|
|
2020-01-03 02:47:25 +08:00
|
|
|
|
2018-01-17 02:00:09 +08:00
|
|
|
#####
|
|
|
|
# Build nc_test4/findplugin.sh
|
|
|
|
#####
|
|
|
|
SET(ISCMAKE "1")
|
2019-11-07 00:43:49 +08:00
|
|
|
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/nc_test4/findplugin.in ${CMAKE_CURRENT_BINARY_DIR}/nc_test4/findplugin.sh @ONLY NEWLINE_STYLE LF)
|
2018-01-17 02:00:09 +08:00
|
|
|
|
2018-03-17 01:46:18 +08:00
|
|
|
IF(ENABLE_EXAMPLES)
|
2019-11-07 00:43:49 +08:00
|
|
|
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/nc_test4/findplugin.in ${CMAKE_CURRENT_BINARY_DIR}/examples/C/findplugin.sh @ONLY NEWLINE_STYLE LF)
|
2018-03-17 01:46:18 +08:00
|
|
|
ENDIF()
|
2018-01-17 02:00:09 +08:00
|
|
|
|
2018-09-05 01:27:47 +08:00
|
|
|
#####
|
|
|
|
# Build ncdap_test|dap4_test/findtestserver[4].c
|
|
|
|
#####
|
2019-11-07 00:43:49 +08:00
|
|
|
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/ncdap_test/findtestserver.c.in ${CMAKE_CURRENT_SOURCE_DIR}/ncdap_test/findtestserver.c @ONLY NEWLINE_STYLE LF)
|
|
|
|
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/ncdap_test/findtestserver.c.in ${CMAKE_CURRENT_SOURCE_DIR}/dap4_test/findtestserver4.c @ONLY NEWLINE_STYLE LF)
|
2018-09-05 01:27:47 +08:00
|
|
|
|
2020-01-03 02:47:25 +08:00
|
|
|
#####
|
|
|
|
# Build dap4_test/pingurl4.c
|
|
|
|
#####
|
|
|
|
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/ncdap_test/pingurl.c ${CMAKE_CURRENT_SOURCE_DIR}/dap4_test/pingurl4.c @ONLY NEWLINE_STYLE LF)
|
|
|
|
|
2014-08-22 01:09:18 +08:00
|
|
|
####
|
|
|
|
# Export files
|
|
|
|
####
|
|
|
|
|
|
|
|
# Create CMake package configuration files. With these, other packages using
|
|
|
|
# cmake should be able to find netcdf using find_package and find_library.
|
|
|
|
# The EXPORT call is paired with one in liblib.
|
|
|
|
set(ConfigPackageLocation ${CMAKE_INSTALL_LIBDIR}/cmake/netCDF)
|
2014-12-06 02:19:02 +08:00
|
|
|
|
2014-08-22 01:09:18 +08:00
|
|
|
install(EXPORT netCDFTargets
|
|
|
|
DESTINATION ${ConfigPackageLocation}
|
2014-12-06 02:19:02 +08:00
|
|
|
COMPONENT headers
|
2019-11-20 03:45:21 +08:00
|
|
|
NAMESPACE netCDF::
|
2014-08-22 01:09:18 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
include(CMakePackageConfigHelpers)
|
|
|
|
CONFIGURE_PACKAGE_CONFIG_FILE(
|
|
|
|
"${CMAKE_CURRENT_SOURCE_DIR}/netCDFConfig.cmake.in"
|
|
|
|
"${CMAKE_CURRENT_BINARY_DIR}/netCDFConfig.cmake"
|
|
|
|
INSTALL_DESTINATION "${ConfigPackageLocation}"
|
|
|
|
NO_CHECK_REQUIRED_COMPONENTS_MACRO
|
|
|
|
PATH_VARS
|
|
|
|
CMAKE_INSTALL_PREFIX
|
|
|
|
CMAKE_INSTALL_INCLUDEDIR
|
|
|
|
CMAKE_INSTALL_LIBDIR
|
|
|
|
)
|
|
|
|
|
|
|
|
INSTALL(
|
|
|
|
FILES "${CMAKE_CURRENT_BINARY_DIR}/netCDFConfig.cmake"
|
|
|
|
DESTINATION "${ConfigPackageLocation}"
|
2014-12-06 02:19:02 +08:00
|
|
|
COMPONENT headers
|
2014-08-22 01:09:18 +08:00
|
|
|
)
|
|
|
|
|
2019-11-20 03:45:21 +08:00
|
|
|
add_library(netCDF::netcdf ALIAS netcdf)
|
2019-11-20 03:39:46 +08:00
|
|
|
target_include_directories(netcdf
|
|
|
|
PUBLIC
|
|
|
|
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/include>
|
|
|
|
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
|
|
|
|
)
|
|
|
|
|
2014-08-22 01:09:18 +08:00
|
|
|
# Create export configuration
|
|
|
|
write_basic_package_version_file(
|
|
|
|
"${CMAKE_CURRENT_BINARY_DIR}/netCDF/netCDFConfigVersion.cmake"
|
|
|
|
VERSION ${netCDF_VERSION}
|
|
|
|
COMPATIBILITY SameMajorVersion
|
|
|
|
)
|
|
|
|
|
|
|
|
install(
|
|
|
|
FILES
|
|
|
|
"${CMAKE_CURRENT_BINARY_DIR}/netCDF/netCDFConfigVersion.cmake"
|
|
|
|
DESTINATION ${ConfigPackageLocation}
|
2014-12-06 02:19:02 +08:00
|
|
|
COMPONENT headers
|
2014-08-22 01:09:18 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
####
|
|
|
|
# End export files
|
|
|
|
####
|
|
|
|
|
2012-10-03 04:56:46 +08:00
|
|
|
# CPack inclusion must come last.
|
2014-12-03 04:58:23 +08:00
|
|
|
# INCLUDE(CPack)
|
|
|
|
INCLUDE(CMakeInstallation.cmake)
|