netcdf-c/config.h.cmake.in

684 lines
19 KiB
CMake
Raw Normal View History

/*! \file
Copyright 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002,
2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014,
2015, 2016, 2017, 2018
University Corporation for Atmospheric Research/Unidata.
See \ref copyright file for more info.
*/
2017-02-01 01:59:03 +08:00
#ifndef CONFIG_H
#define CONFIG_H
#ifdef _MSC_VER
/* Prevent an issue where there is a circular inclusion
of winsock.h/windows.h. This weird state occurs with
libdap4 and hdf4 support. The solution comes from the
following URL, found after a bit of research.
Added in support of the 4.5.0-rc1. Hello, future generations.
* https://stackoverflow.com/questions/1372480/c-redefinition-header-files-winsock2-h
*/
2017-07-27 03:40:03 +08:00
/* #cmakedefine HAVE_WINSOCK2_H
2017-07-27 03:40:03 +08:00
#ifdef HAVE_WINSOCK2_H
#define _WINSOCKAPI_
2017-07-27 03:40:03 +08:00
#endif
*/
#if _MSC_VER>=1900
#define STDC99
#endif
2017-02-01 01:59:03 +08:00
/* Define O_BINARY so that the appropriate flags
are set when opening a binary file on Windows. */
/* Disable a few warnings under Visual Studio, for the
time being. */
#include <io.h>
#pragma warning( disable: 4018 4996 4244 4305 )
#define unlink _unlink
#define open _open
#define close _close
#define read _read
#define lseek _lseeki64
2020-12-11 20:21:47 +08:00
#ifndef __clang__
#define fstat _fstat64
2020-12-11 20:21:47 +08:00
#endif
#define off_t __int64
#define _off_t __int64
#ifndef _OFF_T_DEFINED
#define _OFF_T_DEFINED
#endif
#define strdup _strdup
#define fdopen _fdopen
#define write _write
#define strtoll _strtoi64
#endif /*_MSC_VER */
#cmakedefine const
#ifndef _FILE_OFFSET_BITS
2017-05-24 06:41:23 +08:00
#cmakedefine _FILE_OFFSET_BITS ${_FILE_OFFSET_BITS}
#cmakedefine _LARGEFILE64_SOURCE
#cmakedefine _LARGEFILE_SOURCE
#endif
/* Define if building universal (internal helper macro) */
#cmakedefine AC_APPLE_UNIVERSAL_BUILD 1
/* If true, will attempt to download and build netcdf-fortran. */
#cmakedefine BUILD_FORTRAN 1
/* default file chunk cache nelems. */
#cmakedefine CHUNK_CACHE_NELEMS ${CHUNK_CACHE_NELEMS}
/* default file chunk cache preemption policy. */
#cmakedefine CHUNK_CACHE_PREEMPTION ${CHUNK_CACHE_PREEMPTION}
2012-09-05 06:10:32 +08:00
/* default file chunk cache size in bytes. */
#cmakedefine CHUNK_CACHE_SIZE ${CHUNK_CACHE_SIZE}
/* default nczarr chunk cache size in bytes. */
#cmakedefine CHUNK_CACHE_SIZE_NCZARR ${CHUNK_CACHE_SIZE_NCZARR}
/* Define to one of `_getb67', `GETB67', `getb67' for Cray-2 and Cray-YMP
systems. This function is required for `alloca.c' support on those systems.
*/
#cmakedefine CRAY_STACKSEG_END
2012-07-18 04:50:43 +08:00
/* Define to 1 if using `alloca.c'. */
#cmakedefine C_ALLOCA 1
/* num chunks in default per-var chunk cache. */
#cmakedefine DEFAULT_CHUNKS_IN_CACHE ${DEFAULT_CHUNKS_IN_CACHE}
2012-07-18 04:50:43 +08:00
/* default chunk size in bytes */
#cmakedefine DEFAULT_CHUNK_SIZE ${DEFAULT_CHUNK_SIZE}
/* set this only when building a DLL under MinGW */
#cmakedefine DLL_EXPORT 1
/* set this only when building a DLL under MinGW */
#cmakedefine DLL_NETCDF 1
/* if true, use atexist */
#cmakedefine ENABLE_ATEXIT_FINALIZE 1
/* if true, build byte-range Client */
#cmakedefine ENABLE_BYTERANGE 1
/* if true, use hdf5 S3 virtual file reader */
#cmakedefine ENABLE_HDF5_ROS3 1
2017-09-14 05:25:40 +08:00
/* if true, enable CDF5 Support */
#cmakedefine ENABLE_CDF5 1
2017-09-14 05:25:40 +08:00
This PR adds EXPERIMENTAL support for accessing data in the cloud using a variant of the Zarr protocol and storage format. This enhancement is generically referred to as "NCZarr". The data model supported by NCZarr is netcdf-4 minus the user-defined types and the String type. In this sense it is similar to the CDF-5 data model. More detailed information about enabling and using NCZarr is described in the document NUG/nczarr.md and in a [Unidata Developer's blog entry](https://www.unidata.ucar.edu/blogs/developer/en/entry/overview-of-zarr-support-in). WARNING: this code has had limited testing, so do use this version for production work. Also, performance improvements are ongoing. Note especially the following platform matrix of successful tests: Platform | Build System | S3 support ------------------------------------ Linux+gcc | Automake | yes Linux+gcc | CMake | yes Visual Studio | CMake | no Additionally, and as a consequence of the addition of NCZarr, major changes have been made to the Filter API. NOTE: NCZarr does not yet support filters, but these changes are enablers for that support in the future. Note that it is possible (probable?) that there will be some accidental reversions if the changes here did not correctly mimic the existing filter testing. In any case, previously filter ids and parameters were of type unsigned int. In order to support the more general zarr filter model, this was all converted to char*. The old HDF5-specific, unsigned int operations are still supported but they are wrappers around the new, char* based nc_filterx_XXX functions. This entailed at least the following changes: 1. Added the files libdispatch/dfilterx.c and include/ncfilter.h 2. Some filterx utilities have been moved to libdispatch/daux.c 3. A new entry, "filter_actions" was added to the NCDispatch table and the version bumped. 4. An overly complex set of structs was created to support funnelling all of the filterx operations thru a single dispatch "filter_actions" entry. 5. Move common code to from libhdf5 to libsrc4 so that it is accessible to nczarr. Changes directly related to Zarr: 1. Modified CMakeList.txt and configure.ac to support both C and C++ -- this is in support of S3 support via the awd-sdk libraries. 2. Define a size64_t type to support nczarr. 3. More reworking of libdispatch/dinfermodel.c to support zarr and to regularize the structure of the fragments section of a URL. Changes not directly related to Zarr: 1. Make client-side filter registration be conditional, with default off. 2. Hack include/nc4internal.h to make some flags added by Ed be unique: e.g. NC_CREAT, NC_INDEF, etc. 3. cleanup include/nchttp.h and libdispatch/dhttp.c. 4. Misc. changes to support compiling under Visual Studio including: * Better testing under windows for dirent.h and opendir and closedir. 5. Misc. changes to the oc2 code to support various libcurl CURLOPT flags and to centralize error reporting. 6. By default, suppress the vlen tests that have unfixed memory leaks; add option to enable them. 7. Make part of the nc_test/test_byterange.sh test be contingent on remotetest.unidata.ucar.edu being accessible. Changes Left TO-DO: 1. fix provenance code, it is too HDF5 specific.
2020-06-29 08:02:47 +08:00
/* if true, enable client side filters */
#cmakedefine ENABLE_CLIENT_FILTERS 1
2017-11-21 04:52:06 +08:00
/* if true, enable strict null byte header padding. */
#cmakedefine USE_STRICT_NULL_BYTE_HEADER_PADDING 1
/* if true, build DAP2 and DAP4 Client */
2012-08-11 04:55:07 +08:00
#cmakedefine ENABLE_DAP 1
/* if true, build DAP4 Client */
#cmakedefine ENABLE_DAP4 1
/* if true, do remote tests */
2012-08-11 04:55:07 +08:00
#cmakedefine ENABLE_DAP_REMOTE_TESTS 1
This PR adds EXPERIMENTAL support for accessing data in the cloud using a variant of the Zarr protocol and storage format. This enhancement is generically referred to as "NCZarr". The data model supported by NCZarr is netcdf-4 minus the user-defined types and the String type. In this sense it is similar to the CDF-5 data model. More detailed information about enabling and using NCZarr is described in the document NUG/nczarr.md and in a [Unidata Developer's blog entry](https://www.unidata.ucar.edu/blogs/developer/en/entry/overview-of-zarr-support-in). WARNING: this code has had limited testing, so do use this version for production work. Also, performance improvements are ongoing. Note especially the following platform matrix of successful tests: Platform | Build System | S3 support ------------------------------------ Linux+gcc | Automake | yes Linux+gcc | CMake | yes Visual Studio | CMake | no Additionally, and as a consequence of the addition of NCZarr, major changes have been made to the Filter API. NOTE: NCZarr does not yet support filters, but these changes are enablers for that support in the future. Note that it is possible (probable?) that there will be some accidental reversions if the changes here did not correctly mimic the existing filter testing. In any case, previously filter ids and parameters were of type unsigned int. In order to support the more general zarr filter model, this was all converted to char*. The old HDF5-specific, unsigned int operations are still supported but they are wrappers around the new, char* based nc_filterx_XXX functions. This entailed at least the following changes: 1. Added the files libdispatch/dfilterx.c and include/ncfilter.h 2. Some filterx utilities have been moved to libdispatch/daux.c 3. A new entry, "filter_actions" was added to the NCDispatch table and the version bumped. 4. An overly complex set of structs was created to support funnelling all of the filterx operations thru a single dispatch "filter_actions" entry. 5. Move common code to from libhdf5 to libsrc4 so that it is accessible to nczarr. Changes directly related to Zarr: 1. Modified CMakeList.txt and configure.ac to support both C and C++ -- this is in support of S3 support via the awd-sdk libraries. 2. Define a size64_t type to support nczarr. 3. More reworking of libdispatch/dinfermodel.c to support zarr and to regularize the structure of the fragments section of a URL. Changes not directly related to Zarr: 1. Make client-side filter registration be conditional, with default off. 2. Hack include/nc4internal.h to make some flags added by Ed be unique: e.g. NC_CREAT, NC_INDEF, etc. 3. cleanup include/nchttp.h and libdispatch/dhttp.c. 4. Misc. changes to support compiling under Visual Studio including: * Better testing under windows for dirent.h and opendir and closedir. 5. Misc. changes to the oc2 code to support various libcurl CURLOPT flags and to centralize error reporting. 6. By default, suppress the vlen tests that have unfixed memory leaks; add option to enable them. 7. Make part of the nc_test/test_byterange.sh test be contingent on remotetest.unidata.ucar.edu being accessible. Changes Left TO-DO: 1. fix provenance code, it is too HDF5 specific.
2020-06-29 08:02:47 +08:00
/* if true, enable NCZARR */
#cmakedefine ENABLE_NCZARR 1
Add filter support to NCZarr Filter support has three goals: 1. Use the existing HDF5 filter implementations, 2. Allow filter metadata to be stored in the NumCodecs metadata format used by Zarr, 3. Allow filters to be used even when HDF5 is disabled Detailed usage directions are define in docs/filters.md. For now, the existing filter API is left in place. So filters are defined using ''nc_def_var_filter'' using the HDF5 style where the id and parameters are unsigned integers. This is a big change since filters affect many parts of the code. In the following, the terms "compressor" and "filter" and "codec" are generally used synonomously. ### Filter-Related Changes: * In order to support dynamic loading of shared filter libraries, a new library was added in the libncpoco directory; it helps to isolate dynamic loading across multiple platforms. * Provide a json parsing library for use by plugins; this is created by merging libdispatch/ncjson.c with include/ncjson.h. * Add a new _Codecs attribute to allow clients to see what codecs are being used; let ncdump -s print it out. * Provide special headers to help support compilation of HDF5 filters when HDF5 is not enabled: netcdf_filter_hdf5_build.h and netcdf_filter_build.h. * Add a number of new test to test the new nczarr filters. * Let ncgen parse _Codecs attribute, although it is ignored. ### Plugin directory changes: * Add support for the Blosc compressor; this is essential because it is the most common compressor used in Zarr datasets. This also necessitated adding a CMake FindBlosc.cmake file * Add NCZarr support for the big-four filters provided by HDF5: shuffle, fletcher32, deflate (zlib), and szip * Add a Codec defaulter (see docs/filters.md) for the big four filters. * Make plugins work with windows by properly adding __declspec declaration. ### Misc. Non-Filter Changes * Replace most uses of USE_NETCDF4 (deprecated) with USE_HDF5. * Improve support for caching * More fixes for path conversion code * Fix misc. memory leaks * Add new utility -- ncdump/ncpathcvt -- that does more or less the same thing as cygpath. * Add a number of new test to test the non-filter fixes. * Update the parsers * Convert most instances of '#ifdef _MSC_VER' to '#ifdef _WIN32'
2021-09-03 07:04:26 +08:00
/* if true, enable nczarr filter support */
#cmakedefine ENABLE_NCZARR_FILTERS 1
/* if true, enable S3 testing*/
2020-10-17 05:04:51 +08:00
#cmakedefine ENABLE_NCZARR_S3_TESTS 1
Fix byterange handling of some URLS re: Issue The byterange handling of the following URLS fails. ### Problem 1: "https://crudata.uea.ac.uk/cru/data/temperature/HadCRUT.4.6.0.0.median.nc#mode=bytes" It turns out that byterange in hdf5 has two possible targets: S3 and not-S3 (e.g. a thredds server or the crudata URL above). Each uses a different HDF5 Virtual File Driver (VFD). I incorrectly set up the byterange code in libhdf5 so that it would choose one or the other of the two VFD's for any netcdf-c library build. The fix is to allow it to choose either one at run-time. ### Problem 2: "https://noaa-goes16.s3.amazonaws.com/ABI-L1b-RadF/2022/001/18/OR_ABI-L1b-RadF-M6C01_G16_s20220011800205_e20220011809513_c20220011809562.nc#mode=bytes,s3" When given what appears to be an S3-related URL, the netcdf-c library code converts it into a canonical, so-called "path" format. In casing out the possible input URL formats, I missed the case where the host contains the bucket ("noaa-goes16"), but not the region. So the fix was to check for this case. ## Misc. Related Changes 1. Since S3 is used in more than just NCZarr, I changed the automake/cmake options to replace "--enable-nczarr-s3" with "--enable-s3", but keeping the former option as a synonym for the latter. This also entailed cleaning up libnetcdf.settings WRT S3 support 2. Added the above URLS as additional test cases ## Misc. Un-Related Changes 1. CURLOPT_PUT is deprecated in favor to CURLOPT_UPLOAD 2. Fix some minor warnings ## Open Problems * Under Ubuntu, either libcrypto or aws-sdk-cpp has a memory leak.
2023-03-03 10:51:02 +08:00
/* if true, enable nczarr zip support */
#cmakedefine ENABLE_NCZARR_ZIP 1
Add filter support to NCZarr Filter support has three goals: 1. Use the existing HDF5 filter implementations, 2. Allow filter metadata to be stored in the NumCodecs metadata format used by Zarr, 3. Allow filters to be used even when HDF5 is disabled Detailed usage directions are define in docs/filters.md. For now, the existing filter API is left in place. So filters are defined using ''nc_def_var_filter'' using the HDF5 style where the id and parameters are unsigned integers. This is a big change since filters affect many parts of the code. In the following, the terms "compressor" and "filter" and "codec" are generally used synonomously. ### Filter-Related Changes: * In order to support dynamic loading of shared filter libraries, a new library was added in the libncpoco directory; it helps to isolate dynamic loading across multiple platforms. * Provide a json parsing library for use by plugins; this is created by merging libdispatch/ncjson.c with include/ncjson.h. * Add a new _Codecs attribute to allow clients to see what codecs are being used; let ncdump -s print it out. * Provide special headers to help support compilation of HDF5 filters when HDF5 is not enabled: netcdf_filter_hdf5_build.h and netcdf_filter_build.h. * Add a number of new test to test the new nczarr filters. * Let ncgen parse _Codecs attribute, although it is ignored. ### Plugin directory changes: * Add support for the Blosc compressor; this is essential because it is the most common compressor used in Zarr datasets. This also necessitated adding a CMake FindBlosc.cmake file * Add NCZarr support for the big-four filters provided by HDF5: shuffle, fletcher32, deflate (zlib), and szip * Add a Codec defaulter (see docs/filters.md) for the big four filters. * Make plugins work with windows by properly adding __declspec declaration. ### Misc. Non-Filter Changes * Replace most uses of USE_NETCDF4 (deprecated) with USE_HDF5. * Improve support for caching * More fixes for path conversion code * Fix misc. memory leaks * Add new utility -- ncdump/ncpathcvt -- that does more or less the same thing as cygpath. * Add a number of new test to test the non-filter fixes. * Update the parsers * Convert most instances of '#ifdef _MSC_VER' to '#ifdef _WIN32'
2021-09-03 07:04:26 +08:00
/* if true, Allow dynamically loaded plugins */
#cmakedefine ENABLE_PLUGINS 1
Improve S3 Documentation and Support ## Improvements to S3 Documentation * Create a new document *quickstart_paths.md* that give a summary of the legal path formats used by netcdf-c. This includes both file paths and URL paths. * Modify *nczarr.md* to remove most of the S3 related text. * Move the S3 text from *nczarr.md* to a new document *cloud.md*. * Add some S3-related text to the *byterange.md* document. Hopefully, this will make it easier for users to find the information they want. ## Rebuild NCZarr Testing In order to avoid problems with running make check in parallel, two changes were made: 1. The *nczarr_test* test system was rebuilt. Now, for each test. any generated files are kept in a test-specific directory, isolated from all other test executions. 2. Similarly, since the S3 test bucket is shared, any generated S3 objects are isolated using a test-specific key path. ## Other S3 Related Changes * Add code to ensure that files created on S3 are reclaimed at end of testing. * Used the bash "trap" command to ensure S3 cleanup even if the test fails. * Cleanup the S3 related configure.ac flag set since S3 is used in several places. So now one should use the option *--enable-s3* instead of *--enable-nczarr-s3*, although the latter is still kept as a deprecated alias for the former. * Get some of the github actions yml to work with S3; required fixing various test scripts adding a secret to access the Unidata S3 bucket. * Cleanup S3 portion of libnetcdf.settings.in and netcdf_meta.h.in and test_common.in. * Merge partial S3 support into dhttp.c. * Create an experimental s3 access library especially for use with Windows. It is enabled by using the options *--enable-s3-internal* (automake) or *-DENABLE_S3_INTERNAL=ON* (CMake). Also add a unit-test for it. * Move some definitions from ncrc.h to ncs3sdk.h ## Other Changes * Provide a default implementation of strlcpy and move this and similar defaults into *dmissing.c*.
2023-04-26 07:15:06 +08:00
/* if true, enable S3 support */
#cmakedefine ENABLE_S3 1
/* if true, AWS S3 SDK is available */
#cmakedefine ENABLE_S3_AWS 1
/* if true, Force use of S3 internal library */
#cmakedefine ENABLE_S3_INTERNAL 1
/* if true, run extra tests which may not work yet */
#cmakedefine EXTRA_TESTS 1
/* use HDF5 1.6 API */
#cmakedefine H5_USE_16_API 1
/* Define to 1 if you have `alloca', as a function or macro. */
#cmakedefine HAVE_ALLOCA 1
/* Define to 1 if you have <alloca.h> and it should be used (not on Ultrix). */
#cmakedefine HAVE_ALLOCA_H 1
/* Define to 1 if you have the `atexit function. */
#cmakedefine HAVE_ATEXIT 1
Enhance/Fix filter support re: Discussion https://github.com/Unidata/netcdf-c/discussions/2214 The primary change is to support so-called "standard filters". A standard filter is one that is defined by the following netcdf-c API: ```` int nc_def_var_XXX(int ncid, int varid, size_t nparams, unsigned* params); int nc_inq_var_XXXX(int ncid, int varid, int* usefilterp, unsigned* params); ```` So for example, zstandard would be a standard filter by defining the functions *nc_def_var_zstandard* and *nc_inq_var_zstandard*. In order to define these functions, we need a new dispatch function: ```` int nc_inq_filter_avail(int ncid, unsigned filterid); ```` This function, combined with the existing filter API can be used to implement arbitrary standard filters using a simple code pattern. Note that I would have preferred that this function return a list of all available filters, but HDF5 does not support that functionality. So this PR implements the dispatch function and implements the following standard functions: + bzip2 + zstandard + blosc Specific test cases are also provided for HDF5 and NCZarr. Over time, other specific standard filters will be defined. ## Primary Changes * Add nc_inq_filter_avail() to netcdf-c API. * Add standard filter implementations to test use of *nc_inq_filter_avail*. * Bump the dispatch table version number and add to all the relevant dispatch tables (libsrc, libsrcp, etc). * Create a program to invoke nc_inq_filter_avail so that it is accessible to shell scripts. * Cleanup szip support to properly support szip when HDF5 is disabled. This involves detecting libsz separately from testing if HDF5 supports szip. * Integrate shuffle and fletcher32 into the existing filter API. This means that, for example, nc_def_var_fletcher32 is now a wrapper around nc_def_var_filter. * Extend the Codec defaulting to allow multiple default shared libraries. ## Misc. Changes * Modify configure.ac/CMakeLists.txt to look for the relevant libraries implementing standard filters. * Modify libnetcdf.settings to list available standard filters (including deflate and szip). * Add CMake test modules to locate libbz2 and libzstd. * Cleanup the HDF5 memory manager function use in the plugins. * remove unused file include//ncfilter.h * remove tests for the HDF5 memory operations e.g. H5allocate_memory. * Add flag to ncdump to force use of _Filter instead of _Deflate or _Shuffle or _Fletcher32. Used for testing.
2022-03-15 02:39:37 +08:00
/* Define to 1 if bzip2 library available. */
#cmakedefine HAVE_BZ2 1
Enhance/Fix filter support re: Discussion https://github.com/Unidata/netcdf-c/discussions/2214 The primary change is to support so-called "standard filters". A standard filter is one that is defined by the following netcdf-c API: ```` int nc_def_var_XXX(int ncid, int varid, size_t nparams, unsigned* params); int nc_inq_var_XXXX(int ncid, int varid, int* usefilterp, unsigned* params); ```` So for example, zstandard would be a standard filter by defining the functions *nc_def_var_zstandard* and *nc_inq_var_zstandard*. In order to define these functions, we need a new dispatch function: ```` int nc_inq_filter_avail(int ncid, unsigned filterid); ```` This function, combined with the existing filter API can be used to implement arbitrary standard filters using a simple code pattern. Note that I would have preferred that this function return a list of all available filters, but HDF5 does not support that functionality. So this PR implements the dispatch function and implements the following standard functions: + bzip2 + zstandard + blosc Specific test cases are also provided for HDF5 and NCZarr. Over time, other specific standard filters will be defined. ## Primary Changes * Add nc_inq_filter_avail() to netcdf-c API. * Add standard filter implementations to test use of *nc_inq_filter_avail*. * Bump the dispatch table version number and add to all the relevant dispatch tables (libsrc, libsrcp, etc). * Create a program to invoke nc_inq_filter_avail so that it is accessible to shell scripts. * Cleanup szip support to properly support szip when HDF5 is disabled. This involves detecting libsz separately from testing if HDF5 supports szip. * Integrate shuffle and fletcher32 into the existing filter API. This means that, for example, nc_def_var_fletcher32 is now a wrapper around nc_def_var_filter. * Extend the Codec defaulting to allow multiple default shared libraries. ## Misc. Changes * Modify configure.ac/CMakeLists.txt to look for the relevant libraries implementing standard filters. * Modify libnetcdf.settings to list available standard filters (including deflate and szip). * Add CMake test modules to locate libbz2 and libzstd. * Cleanup the HDF5 memory manager function use in the plugins. * remove unused file include//ncfilter.h * remove tests for the HDF5 memory operations e.g. H5allocate_memory. * Add flag to ncdump to force use of _Filter instead of _Deflate or _Shuffle or _Fletcher32. Used for testing.
2022-03-15 02:39:37 +08:00
/* Define to 1 if zstd library available. */
#cmakedefine HAVE_ZSTD 1
/* Define to 1 if blosc library available. */
#cmakedefine HAVE_BLOSC 1
/* Define to 1 if you have hdf5_coll_metadata_ops */
#cmakedefine HDF5_HAS_COLL_METADATA_OPS 1
/* Is CURLINFO_RESPONSE_CODE defined */
#cmakedefine HAVE_CURLINFO_RESPONSE_CODE 1
/* Is CURLINFO_HTTP_CODE defined */
#cmakedefine HAVE_CURLINFO_HTTP_CONNECTCODE 1
/* Is CURLOPT_BUFFERSIZE defined */
#cmakedefine HAVE_CURLOPT_BUFFERSIZE 1
/* Is CURLOPT_TCP_KEEPALIVE defined */
#cmakedefine HAVE_CURLOPT_KEEPALIVE 1
/* Is CURLOPT_KEYPASSWD defined */
#cmakedefine HAVE_CURLOPT_KEYPASSWD 1
/* Is CURLOPT_PASSWORD defined */
#cmakedefine HAVE_CURLOPT_PASSWORD 1
/* Is CURLOPT_USERNAME defined */
#cmakedefine HAVE_CURLOPT_USERNAME 1
/* Is LIBCURL version >= 7.66 */
#cmakedefine HAVE_LIBCURL_766 1
/* Define to 1 if you have the declaration of `isfinite', and to 0 if you
don't. */
#cmakedefine HAVE_DECL_ISFINITE 1
/* Define to 1 if you have the declaration of `isinf', and to 0 if you don't.
*/
#cmakedefine HAVE_DECL_ISINF 1
/* Define to 1 if you have the declaration of `isnan', and to 0 if you don't.
*/
#cmakedefine HAVE_DECL_ISNAN 1
This PR adds EXPERIMENTAL support for accessing data in the cloud using a variant of the Zarr protocol and storage format. This enhancement is generically referred to as "NCZarr". The data model supported by NCZarr is netcdf-4 minus the user-defined types and the String type. In this sense it is similar to the CDF-5 data model. More detailed information about enabling and using NCZarr is described in the document NUG/nczarr.md and in a [Unidata Developer's blog entry](https://www.unidata.ucar.edu/blogs/developer/en/entry/overview-of-zarr-support-in). WARNING: this code has had limited testing, so do use this version for production work. Also, performance improvements are ongoing. Note especially the following platform matrix of successful tests: Platform | Build System | S3 support ------------------------------------ Linux+gcc | Automake | yes Linux+gcc | CMake | yes Visual Studio | CMake | no Additionally, and as a consequence of the addition of NCZarr, major changes have been made to the Filter API. NOTE: NCZarr does not yet support filters, but these changes are enablers for that support in the future. Note that it is possible (probable?) that there will be some accidental reversions if the changes here did not correctly mimic the existing filter testing. In any case, previously filter ids and parameters were of type unsigned int. In order to support the more general zarr filter model, this was all converted to char*. The old HDF5-specific, unsigned int operations are still supported but they are wrappers around the new, char* based nc_filterx_XXX functions. This entailed at least the following changes: 1. Added the files libdispatch/dfilterx.c and include/ncfilter.h 2. Some filterx utilities have been moved to libdispatch/daux.c 3. A new entry, "filter_actions" was added to the NCDispatch table and the version bumped. 4. An overly complex set of structs was created to support funnelling all of the filterx operations thru a single dispatch "filter_actions" entry. 5. Move common code to from libhdf5 to libsrc4 so that it is accessible to nczarr. Changes directly related to Zarr: 1. Modified CMakeList.txt and configure.ac to support both C and C++ -- this is in support of S3 support via the awd-sdk libraries. 2. Define a size64_t type to support nczarr. 3. More reworking of libdispatch/dinfermodel.c to support zarr and to regularize the structure of the fragments section of a URL. Changes not directly related to Zarr: 1. Make client-side filter registration be conditional, with default off. 2. Hack include/nc4internal.h to make some flags added by Ed be unique: e.g. NC_CREAT, NC_INDEF, etc. 3. cleanup include/nchttp.h and libdispatch/dhttp.c. 4. Misc. changes to support compiling under Visual Studio including: * Better testing under windows for dirent.h and opendir and closedir. 5. Misc. changes to the oc2 code to support various libcurl CURLOPT flags and to centralize error reporting. 6. By default, suppress the vlen tests that have unfixed memory leaks; add option to enable them. 7. Make part of the nc_test/test_byterange.sh test be contingent on remotetest.unidata.ucar.edu being accessible. Changes Left TO-DO: 1. fix provenance code, it is too HDF5 specific.
2020-06-29 08:02:47 +08:00
/* Define to 1 if you have the <dirent.h> header file. */
#cmakedefine HAVE_DIRENT_H 1
Add filter support to NCZarr Filter support has three goals: 1. Use the existing HDF5 filter implementations, 2. Allow filter metadata to be stored in the NumCodecs metadata format used by Zarr, 3. Allow filters to be used even when HDF5 is disabled Detailed usage directions are define in docs/filters.md. For now, the existing filter API is left in place. So filters are defined using ''nc_def_var_filter'' using the HDF5 style where the id and parameters are unsigned integers. This is a big change since filters affect many parts of the code. In the following, the terms "compressor" and "filter" and "codec" are generally used synonomously. ### Filter-Related Changes: * In order to support dynamic loading of shared filter libraries, a new library was added in the libncpoco directory; it helps to isolate dynamic loading across multiple platforms. * Provide a json parsing library for use by plugins; this is created by merging libdispatch/ncjson.c with include/ncjson.h. * Add a new _Codecs attribute to allow clients to see what codecs are being used; let ncdump -s print it out. * Provide special headers to help support compilation of HDF5 filters when HDF5 is not enabled: netcdf_filter_hdf5_build.h and netcdf_filter_build.h. * Add a number of new test to test the new nczarr filters. * Let ncgen parse _Codecs attribute, although it is ignored. ### Plugin directory changes: * Add support for the Blosc compressor; this is essential because it is the most common compressor used in Zarr datasets. This also necessitated adding a CMake FindBlosc.cmake file * Add NCZarr support for the big-four filters provided by HDF5: shuffle, fletcher32, deflate (zlib), and szip * Add a Codec defaulter (see docs/filters.md) for the big four filters. * Make plugins work with windows by properly adding __declspec declaration. ### Misc. Non-Filter Changes * Replace most uses of USE_NETCDF4 (deprecated) with USE_HDF5. * Improve support for caching * More fixes for path conversion code * Fix misc. memory leaks * Add new utility -- ncdump/ncpathcvt -- that does more or less the same thing as cygpath. * Add a number of new test to test the non-filter fixes. * Update the parsers * Convert most instances of '#ifdef _MSC_VER' to '#ifdef _WIN32'
2021-09-03 07:04:26 +08:00
/* Define to 1 if you have the <dlfcn.h> header file. */
#cmakedefine HAVE_DLFCN_H 1
/* Define to 1 if you have the <fcntl.h> header file. */
#cmakedefine HAVE_FCNTL_H 1
/* Define to 1 if you have the BaseTsd.h header file. */
#cmakedefine HAVE_BASETSD_H @HAVE_BASETSD_H@
/* Define if we have filelengthi64. */
#cmakedefine HAVE_FILE_LENGTH_I64 @HAVE_FILE_LENGTH_I64@
/* Define to 1 if you have the `fileno' function. */
#cmakedefine HAVE_FILENO 1
/* Define to 1 if you have the `fsync' function. */
#cmakedefine HAVE_FSYNC 1
/* Define to 1 if you have the <getopt.h> header file. */
#cmakedefine HAVE_GETOPT_H 1
/* Define to 1 if you have the `getpagesize' function. */
#cmakedefine HAVE_GETPAGESIZE 1
/* Define to 1 if you have the `getrlimit' function. */
#cmakedefine HAVE_GETRLIMIT 1
/* Define to 1 if you have the `gettimeofday' function. */
#cmakedefine HAVE_GETTIMEOFDAY 1
/* Define to 1 if you have the `clock_gettime' function. */
#cmakedefine HAVE_CLOCK_GETTIME 1
/* Define to 1 if you have the `gettimeofday' function. */
#cmakedefine HAVE_STRUCT_TIMESPEC 1
/* Define to 1 if you have the `H5Z_SZIP' function. */
#cmakedefine HAVE_H5Z_SZIP 1
Enhance/Fix filter support re: Discussion https://github.com/Unidata/netcdf-c/discussions/2214 The primary change is to support so-called "standard filters". A standard filter is one that is defined by the following netcdf-c API: ```` int nc_def_var_XXX(int ncid, int varid, size_t nparams, unsigned* params); int nc_inq_var_XXXX(int ncid, int varid, int* usefilterp, unsigned* params); ```` So for example, zstandard would be a standard filter by defining the functions *nc_def_var_zstandard* and *nc_inq_var_zstandard*. In order to define these functions, we need a new dispatch function: ```` int nc_inq_filter_avail(int ncid, unsigned filterid); ```` This function, combined with the existing filter API can be used to implement arbitrary standard filters using a simple code pattern. Note that I would have preferred that this function return a list of all available filters, but HDF5 does not support that functionality. So this PR implements the dispatch function and implements the following standard functions: + bzip2 + zstandard + blosc Specific test cases are also provided for HDF5 and NCZarr. Over time, other specific standard filters will be defined. ## Primary Changes * Add nc_inq_filter_avail() to netcdf-c API. * Add standard filter implementations to test use of *nc_inq_filter_avail*. * Bump the dispatch table version number and add to all the relevant dispatch tables (libsrc, libsrcp, etc). * Create a program to invoke nc_inq_filter_avail so that it is accessible to shell scripts. * Cleanup szip support to properly support szip when HDF5 is disabled. This involves detecting libsz separately from testing if HDF5 supports szip. * Integrate shuffle and fletcher32 into the existing filter API. This means that, for example, nc_def_var_fletcher32 is now a wrapper around nc_def_var_filter. * Extend the Codec defaulting to allow multiple default shared libraries. ## Misc. Changes * Modify configure.ac/CMakeLists.txt to look for the relevant libraries implementing standard filters. * Modify libnetcdf.settings to list available standard filters (including deflate and szip). * Add CMake test modules to locate libbz2 and libzstd. * Cleanup the HDF5 memory manager function use in the plugins. * remove unused file include//ncfilter.h * remove tests for the HDF5 memory operations e.g. H5allocate_memory. * Add flag to ncdump to force use of _Filter instead of _Deflate or _Shuffle or _Fletcher32. Used for testing.
2022-03-15 02:39:37 +08:00
/* Define to 1 if you have libsz */
#cmakedefine HAVE_SZ 1
/* Define to 1 if you have the <hdf5.h> header file. */
#cmakedefine HAVE_HDF5_H 1
/* Define to 1 if you have the <hdf5.h> header file. */
#cmakedefine HAVE_HDF5_HL_H 1
/* Define to 1 if the system has the type `int64'. */
#cmakedefine HAVE_INT64 1
/* Define to 1 if you have the <inttypes.h> header file. */
#cmakedefine HAVE_INTTYPES_H 1
/* Define to 1 if you have the `dl' library (-ldl). */
#cmakedefine HAVE_LIBDL 1
2012-07-18 04:50:43 +08:00
/* Define to 1 if you have the `jpeg' library (-ljpeg). */
#cmakedefine HAVE_LIBJPEG 1
/* Define to 1 if you have the `m' library (-lm). */
#cmakedefine HAVE_LIBM 1
/* Define to 1 if you have the `mfhdf' library (-lmfhdf). */
#cmakedefine HAVE_LIBMFHDF 1
/* Define to 1 if you have the `pnetcdf' library (-lpnetcdf). */
#cmakedefine HAVE_LIBPNETCDF 1
2012-07-11 01:58:53 +08:00
/* Define to 1 if you have the libxml2 library. */
#cmakedefine ENABLE_LIBXML2 1
/* Define to 1 if you have the <locale.h> header file. */
#cmakedefine HAVE_LOCALE_H 1
/* Define to 1 if the system has the type `longlong'. */
#cmakedefine HAVE_LONGLONG 1
/* Define to 1 if the system has the type 'long long int'. */
#cmakedefine HAVE_LONG_LONG_INT 1
/* Define to 1 if you have the <malloc.h> header file. */
#cmakedefine HAVE_MALLOC_H 1
/* Define to 1 if you have the `memmove' function. */
#cmakedefine HAVE_MEMMOVE 1
/* Define to 1 if you have the `mkstemp' function. */
#cmakedefine HAVE_MKSTEMP 1
/* Define to 1 if you have the `mktemp' function. */
#cmakedefine HAVE_MKTEMP 1
/* Define to 1 if you have the `MPI_Comm_f2c' function. */
#cmakedefine HAVE_MPI_COMM_F2C 1
/* Define to 1 if you have the `MPI_Info_f2c' function. */
#cmakedefine HAVE_MPI_INFO_F2C 1
/* Define to 1 if you have the `mremap' function. */
#cmakedefine HAVE_MREMAP 1
/* Define to 1 if you have the `random' function. */
#cmakedefine HAVE_RANDOM 1
/* Define to 1 if you have the `snprintf' function. */
#cmakedefine HAVE_SNPRINTF 1
/* Define to 1 if the system has the type `ssize_t'. */
#cmakedefine HAVE_SSIZE_T 1
/* Define to 1 if the system has the type `ptrdiff_t'. */
#cmakedefine HAVE_PTRDIFF_T 1
/* Define to 1 if the system has the type `uintptr_t'. */
#cmakedefine HAVE_UINTPTR_T 1
/* Define to 1 if you have the <stdarg.h> header file. */
#cmakedefine HAVE_STDARG_H 1
/* Define to 1 if you have the <stdint.h> header file. */
#cmakedefine HAVE_STDINT_H 1
/* Define to 1 if you have the <stdio.h> header file. */
#cmakedefine HAVE_STDIO_H 1
/* Define to 1 if you have the <stdlib.h> header file. */
#cmakedefine HAVE_STDLIB_H 1
/* Define to 1 if you have the <strings.h> header file. */
#cmakedefine HAVE_STRINGS_H 1
/* Define to 1 if you have the <signal.h> header file. */
#cmakedefine HAVE_SIGNAL_H 1
/* Define to 1 if you have the <string.h> header file. */
#cmakedefine HAVE_STRING_H 1
2012-09-15 00:15:18 +08:00
/* Define to 1 if you have the <ftw.h> header file. */
#cmakedefine HAVE_FTW_H 1
2018-02-03 11:27:06 +08:00
/* Define to 1 if you have the <libgen.h> header file. */
#cmakedefine HAVE_LIBGEN_H 1
2018-02-03 10:57:55 +08:00
/* Define to 1 if you have the `strdup' function. */
#cmakedefine HAVE_STRDUP 1
/* Define to 1 if you have the `strndup` function. */
#cmakedefine HAVE_STRNDUP
/* Define to 1 if you have the `strcasecmp` function. */
#cmakedefine HAVE_STRCASECMP
/* Define to 1 if you have the `strlcat' function. */
#cmakedefine HAVE_STRLCAT 1
Improve S3 Documentation and Support ## Improvements to S3 Documentation * Create a new document *quickstart_paths.md* that give a summary of the legal path formats used by netcdf-c. This includes both file paths and URL paths. * Modify *nczarr.md* to remove most of the S3 related text. * Move the S3 text from *nczarr.md* to a new document *cloud.md*. * Add some S3-related text to the *byterange.md* document. Hopefully, this will make it easier for users to find the information they want. ## Rebuild NCZarr Testing In order to avoid problems with running make check in parallel, two changes were made: 1. The *nczarr_test* test system was rebuilt. Now, for each test. any generated files are kept in a test-specific directory, isolated from all other test executions. 2. Similarly, since the S3 test bucket is shared, any generated S3 objects are isolated using a test-specific key path. ## Other S3 Related Changes * Add code to ensure that files created on S3 are reclaimed at end of testing. * Used the bash "trap" command to ensure S3 cleanup even if the test fails. * Cleanup the S3 related configure.ac flag set since S3 is used in several places. So now one should use the option *--enable-s3* instead of *--enable-nczarr-s3*, although the latter is still kept as a deprecated alias for the former. * Get some of the github actions yml to work with S3; required fixing various test scripts adding a secret to access the Unidata S3 bucket. * Cleanup S3 portion of libnetcdf.settings.in and netcdf_meta.h.in and test_common.in. * Merge partial S3 support into dhttp.c. * Create an experimental s3 access library especially for use with Windows. It is enabled by using the options *--enable-s3-internal* (automake) or *-DENABLE_S3_INTERNAL=ON* (CMake). Also add a unit-test for it. * Move some definitions from ncrc.h to ncs3sdk.h ## Other Changes * Provide a default implementation of strlcpy and move this and similar defaults into *dmissing.c*.
2023-04-26 07:15:06 +08:00
/* Define to 1 if you have the `strlcpy' function. */
#cmakedefine HAVE_STRLCPY 1
/* Define to 1 if you have the `strtoll' function. */
#cmakedefine HAVE_STRTOLL 1
/* Define to 1 if you have the `strtoull' function. */
#cmakedefine HAVE_STRTOULL 1
/* Define to 1 if you have the `stroull' function. */
#cmakedefine HAVE_STROULL 1
/* Define to 1 if `st_blksize' is a member of `struct stat'. */
#cmakedefine HAVE_STRUCT_STAT_ST_BLKSIZE 1
/* Define to 1 if you have the `sysconf' function. */
#cmakedefine HAVE_SYSCONF 1
/* Define to 1 if you have the <sys/param.h> header file. */
#cmakedefine HAVE_SYS_PARAM_H 1
/* Define to 1 if you have the <sys/resource.h> header file. */
#cmakedefine HAVE_SYS_RESOURCE_H 1
/* Define to 1 if you have the <sys/stat.h> header file. */
#cmakedefine HAVE_SYS_STAT_H 1
/* Define to 1 if you have the <sys/time.h> header file. */
#cmakedefine HAVE_SYS_TIME_H 1
/* Define to 1 if you have the <sys/types.h> header file. */
#cmakedefine HAVE_SYS_TYPES_H 1
/* Define to 1 if you have the <time.h> header file. */
#cmakedefine HAVE_TIME_H 1
/* Define to 1 if the system has the type `uchar'. */
#cmakedefine HAVE_UCHAR 1
/* Define to 1 if the system has the type `uint'. */
#cmakedefine HAVE_UINT 1
/* Define to 1 if the system has the type `uint64'. */
#cmakedefine HAVE_UINT64 1
/* Define to 1 if the system has the type `uint64_t'. */
2020-11-20 13:24:13 +08:00
#cmakedefine HAVE_UINT64_T 1
/* Define to 1 if you have the <unistd.h> header file. */
#cmakedefine HAVE_UNISTD_H 1
#cmakedefine YY_NO_UNISTD_H 1
/* Define to 1 if the system has the type `ushort'. */
#cmakedefine HAVE_USHORT 1
/* if true, hdf5 has parallelism enabled */
#cmakedefine HDF5_PARALLEL 1
/* if true, HDF5 is at least version 1.10. 3 and allows parallel I/O
with zip */
#cmakedefine HDF5_SUPPORTS_PAR_FILTERS 1
2020-10-17 05:04:51 +08:00
/* if true, HDF5 is at least version 1.10.5 and supports UTF8 paths */
#cmakedefine HDF5_UTF8_PATHS 1
2020-04-17 22:45:20 +08:00
/* if true, backtrace support will be used. */
#cmakedefine HAVE_EXECINFO_H 1
/* if true, include JNA bug fix */
#cmakedefine JNA 1
/* do large file tests */
#cmakedefine LARGE_FILE_TESTS 1
/* If true, turn on logging. */
#cmakedefine LOGGING 1
/* If true, define nc_set_log_level. */
#cmakedefine ENABLE_SET_LOG_LEVEL 1
/* max size of the default per-var chunk cache. */
#cmakedefine MAX_DEFAULT_CACHE_SIZE ${MAX_DEFAULT_CACHE_SIZE}
/* min blocksize for posixio. */
#cmakedefine NCIO_MINBLOCKSIZE ${NCIO_MINBLOCKSIZE}
There was a request to extend the provenance information stored in the _NCProperties attribute to allow two things: 1. capture of additional library dependencies (over and above hdf5) 2. Recognition of non-netcdf libraries that create netcdf-4 format files. To this end, the _NCProperties format has been extended to be and arbitrary set of key=value pairs separated by commas. This new format has version = 2, and uses commas as the pair separator. Thus the general form is: _NCProperties = "version=2,key1=value,key2=value2..." ; This new version is accompanied by a new ./configure option of the form --with-ncproperties="key1=value1,key2=value2..." that specifies pairs to add to the _NCProperties attribute for all files created with that netcdf library. At this point, what is missing is some programmatic way to specify either all the pairs or additional pairs to the _NCProperties attribute. Not sure of the best way to do this. Builders using non-netcdf libraries can specify whatever they want in the key value pairs (as long as the version=2 is specified first). By convention, the primary library is expected to be the the first pair after the leading version=2 pair, but this is convention only and is neither required nor enforced. Related changes: 1. Fixed the tests that check _NCProperties to properly operate with version=2. 2. When reading a version 1 _NCProperties attribute, convert it to look like a version 2 attribute. 2. Added some version 2 tests to ncdump/tst_fileinfo.c and ncdump/tst_fileinfo.sh Misc Changes: 1. Fix minor problem in ncdap_test/testurl.sh where a parameter to buildurl needed to be quoted. 2. Minor fix to ncgen to swap switches -H and -h to be consistent with other utilities. 3. Document the -M flag in nccopy usage() and the nccopy man page. 4. Modify a test case to use the nccopy -M flag.
2018-08-26 11:44:41 +08:00
/* Add extra properties to _NCProperties attribute */
#cmakedefine NCPROPERTIES_EXTRA ${NCPROPERTIES_EXTRA}
/* Idspatch table version */
#cmakedefine NC_DISPATCH_VERSION ${NC_DISPATCH_VERSION}
/* no IEEE float on this platform */
#cmakedefine NO_IEEE_FLOAT 1
#cmakedefine BUILD_V2 1
#cmakedefine ENABLE_DOXYGEN 1
#cmakedefine ENABLE_INTERNAL_DOCS 1
#cmakedefine VALGRIND_TESTS 1
#cmakedefine ENABLE_CDMREMOTE 1
#cmakedefine USE_HDF5 1
#cmakedefine ENABLE_FILEINFO 1
#cmakedefine TEST_PARALLEL ${TEST_PARALLEL}
#cmakedefine BUILD_RPC 1
#cmakedefine USE_X_GETOPT 1
#cmakedefine ENABLE_EXTREME_NUMBERS 1
/* do not build the netCDF version 2 API */
#cmakedefine NO_NETCDF_2 1
/* Name of package */
#define PACKAGE "netcdf"
/* Define to the address where bug reports for this package should be sent. */
#define PACKAGE_BUGREPORT "support-netcdf@unidata.ucar.edu"
/* Define to the full name of this package. */
#define PACKAGE_NAME "netCDF"
/* Define to the full name and version of this package. */
#define PACKAGE_STRING "netCDF ${netCDF_VERSION}"
/* Define to the one symbol short name of this package. */
#define PACKAGE_TARNAME "netcdf"
/* Define to the home page for this package. */
#define PACKAGE_URL ""
/* Define to the version of this package. */
#cmakedefine PACKAGE_VERSION "${netCDF_VERSION}"
Improve UTF8 Support On Windows re: Issue https://github.com/Unidata/netcdf-c/issues/2190 The primary purpose of this PR is to improve the utf8 support for windows. This is persuant to a change in Windows that supports utf8 natively (almost). The almost means that it is still utf16 internally and the set of characters representable by utf8 is larger than those representable by utf16. This leaves open the question in the Issue about handling the Windows 1252 character set. This required the following changes: 1. Test the Windows build and major version in order to see if native utf8 is supported. 2. If native utf8 is supported, Modify dpathmgr.c to call the 8-bit version of the windows fopen() and open() functions. 3. In support of this, programs that use XGetOpt (Windows versions) need to get the command line as utf8 and then parse to arc+argv as utf8. This requires using a homegrown command line parser named XCommandLineToArgvA. 4. Add a utility program called "acpget" that prints out the current Windows code page and locale. Additionally, some technical debt was cleaned up as follows: 1. Unify all the places which attempt to read all or a part of a file into the dutil.c#NC_readfile code. 2. Similary unify all the code that creates temp files into dutil.c#NC_mktmp code. 3. Convert almost all remaining calls to fopen() and open() to NCfopen() and NCopen3(). This is to ensure that path management is used consistently. This touches a number of files. 4. extern->EXTERNL as needed to get it to work under Windows.
2022-02-09 11:53:30 +08:00
/* Do we have access to the Windows Registry */
#cmakedefine REGEDIT 1
/* define the possible sources for remote test servers */
#cmakedefine REMOTETESTSERVERS "${REMOTETESTSERVERS}"
/* The size of `ulonglong` as computed by sizeof. */
#cmakedefine SIZEOF_ULONGLONG @SIZEOF_ULONGLONG@
/* The size of `longlong` as computed by sizeof. */
#cmakedefine SIZEOF_LONGLONG @SIZEOF_LONGLONG@
/* The size of `char` as computed by sizeof. */
#cmakedefine SIZEOF_CHAR @SIZEOF_CHAR@
/* The size of `uchar` as computed by sizeof. */
#cmakedefine SIZEOF_UCHAR @SIZEOF_UCHAR@
2013-08-31 05:16:17 +08:00
/* The size of `__int64` found on Windows systems. */
#cmakedefine SIZEOF___INT64 ${SIZEOF___INT64}
2012-08-11 04:55:07 +08:00
/* The size of `void*` as computed by sizeof. */
#cmakedefine SIZEOF_VOIDSTAR ${SIZEOF_VOIDSTAR}
2012-07-11 01:58:53 +08:00
/* The size of `short` as computed by sizeof. */
#cmakedefine SIZEOF_OFF64_T ${SIZEOF_OFF64_T}
2012-08-11 04:55:07 +08:00
/* The size of `double', as computed by sizeof. */
#cmakedefine SIZEOF_DOUBLE ${SIZEOF_DOUBLE}
/* The size of `float', as computed by sizeof. */
#cmakedefine SIZEOF_FLOAT ${SIZEOF_FLOAT}
/* The size of `int', as computed by sizeof. */
#cmakedefine SIZEOF_INT ${SIZEOF_INT}
2017-02-09 05:51:41 +08:00
/* The size of `long', as computed by sizeof. */
#cmakedefine SIZEOF_LONG ${SIZEOF_LONG}
/* The size of `long long', as computed by sizeof. */
#cmakedefine SIZEOF_LONG_LONG ${SIZEOF_LONG_LONG}
/* The size of `off_t', as computed by sizeof. */
#cmakedefine SIZEOF_OFF_T ${SIZEOF_OFF_T}
/* The size of `short', as computed by sizeof. */
#cmakedefine SIZEOF_SHORT ${SIZEOF_SHORT}
/* The size of `size_t', as computed by sizeof. */
#cmakedefine SIZEOF_SIZE_T ${SIZEOF_SIZE_T}
/* The size of `uint', as computed by sizeof. */
#cmakedefine SIZEOF_UINT ${SIZEOF_UINT}
/* The size of `unsigned int', as computed by sizeof. */
#cmakedefine SIZEOF_UNSIGNED_INT ${SIZEOF_UNSIGNED_INT}
/* The size of `unsigned long long', as computed by sizeof. */
#cmakedefine SIZEOF_UNSIGNED_LONG_LONG ${SIZEOF_UNSIGNED_LONG_LONG}
/* The size of `unsigned short int', as computed by sizeof. */
#cmakedefine SIZEOF_UNSIGNED_SHORT_INT ${SIZEOF_UNSIGNED_SHORT_INT}
/* The size of `ushort', as computed by sizeof. */
#cmakedefine SIZEOF_USHORT ${SIZEOF_USHORT}
/* The size of `void*', as computed by sizeof. */
#cmakedefine SIZEOF_VOIDP ${SIZEOF_VOIDP}
/* Place to put very large netCDF test files. */
#cmakedefine TEMP_LARGE "${TEMP_LARGE}"
/* if true, build DAP Client */
#cmakedefine USE_DAP 1
/* if true, include NC_DISKLESS code */
#cmakedefine USE_DISKLESS 1
/* set this to use extreme numbers in tests */
#cmakedefine USE_EXTREME_NUMBERS 1
/* if true, use ffio instead of posixio */
#cmakedefine USE_FFIO 1
/* if true, include experimental fsync code */
#cmakedefine USE_FSYNC 1
/* if true, use HDF4 too */
#cmakedefine USE_HDF4 1
/* If true, use use wget to fetch some sample HDF4 data, and then test against
it. */
//#cmakedefine USE_HDF4_FILE_TESTS 1
/* if true, use mmap for in-memory files */
#cmakedefine USE_MMAP 1
/* if true, build netCDF-4 */
#cmakedefine USE_NETCDF4 1
/* build the netCDF version 2 API */
#cmakedefine USE_NETCDF_2 1
/* if true, pnetcdf or parallel netcdf-4 is in use */
#cmakedefine USE_PARALLEL 1
/* if true, parallel netcdf-4 is in use */
#cmakedefine USE_PARALLEL4 1
/* if true, parallel netCDF is used */
#cmakedefine USE_PNETCDF 1
/* if true, use stdio instead of posixio */
#cmakedefine USE_STDIO 1
Mostly revert the filter code to reduce its complexity of use. re: https://github.com/Unidata/netcdf-c/issues/1836 Revert the internal filter code to simplify it. From the user's point of view, the only visible changes should be: 1. The functions that convert text to filter specs have had their signature reverted and have been moved to netcdf_aux.h 2. Some filter API functions now return NC_ENOFILTER when inquiry is made about some filter. Internally,the dispatch table has been modified to get rid of the filter_actions entry and associated complex structures. It has been replaced with inq_var_filter_ids and inq_var_filter_info entries and the dispatch table version has been bumped to 3. Corresponding NOOP and NOTNC4 functions were added to libdispatch/dnotnc4.c. Also, the filter_action entries in dispatch tables were replaced for all dispatch code bases (HDF5, DAP2, etc). This should only impact UDF users. In the process, it became clear that the form of the filters field in NC_VAR_INFO_T was format dependent, so I converted it to be of type void* and pushed its management into the various dispatch code bases. Specifically libhdf5 and libnczarr now manage the filters field in their own way. The auxilliary functions for parsing textual filter specifications were moved to netcdf_aux.h and were renamed to the following: * ncaux_h5filterspec_parse * ncaux_h5filterspec_parselist * ncaux_h5filterspec_free * ncaux_h5filter_fix8 Misc. Other Changes: 1. Document NUG/filters.md updated to reflect the changes above. 2. All the old data types (structs and enums) used by filter_actions actions were deleted. The exception is the NC_H5_Filterspec because it is needed by ncaux_h5filterspec_parselist. 3. Clientside filters were removed -- another enhancement for which no-one ever asked. 4. The ability to remove filters was itself removed. 5. Some functionality needed by nczarr was moved from libhdf5 to libsrc4 e.g. nc4_find_default_chunksizes 6. All the filterx code was removed 7. ncfilter.h and nc4filter.c no longer used Misc. Unrelated Changes: 1. The nczarr_test makefile clean was leaving some directories; so add clean-local to take care of them.
2020-09-28 02:43:46 +08:00
/* if true, multi-filters enabled*/
Add filter support to NCZarr Filter support has three goals: 1. Use the existing HDF5 filter implementations, 2. Allow filter metadata to be stored in the NumCodecs metadata format used by Zarr, 3. Allow filters to be used even when HDF5 is disabled Detailed usage directions are define in docs/filters.md. For now, the existing filter API is left in place. So filters are defined using ''nc_def_var_filter'' using the HDF5 style where the id and parameters are unsigned integers. This is a big change since filters affect many parts of the code. In the following, the terms "compressor" and "filter" and "codec" are generally used synonomously. ### Filter-Related Changes: * In order to support dynamic loading of shared filter libraries, a new library was added in the libncpoco directory; it helps to isolate dynamic loading across multiple platforms. * Provide a json parsing library for use by plugins; this is created by merging libdispatch/ncjson.c with include/ncjson.h. * Add a new _Codecs attribute to allow clients to see what codecs are being used; let ncdump -s print it out. * Provide special headers to help support compilation of HDF5 filters when HDF5 is not enabled: netcdf_filter_hdf5_build.h and netcdf_filter_build.h. * Add a number of new test to test the new nczarr filters. * Let ncgen parse _Codecs attribute, although it is ignored. ### Plugin directory changes: * Add support for the Blosc compressor; this is essential because it is the most common compressor used in Zarr datasets. This also necessitated adding a CMake FindBlosc.cmake file * Add NCZarr support for the big-four filters provided by HDF5: shuffle, fletcher32, deflate (zlib), and szip * Add a Codec defaulter (see docs/filters.md) for the big four filters. * Make plugins work with windows by properly adding __declspec declaration. ### Misc. Non-Filter Changes * Replace most uses of USE_NETCDF4 (deprecated) with USE_HDF5. * Improve support for caching * More fixes for path conversion code * Fix misc. memory leaks * Add new utility -- ncdump/ncpathcvt -- that does more or less the same thing as cygpath. * Add a number of new test to test the non-filter fixes. * Update the parsers * Convert most instances of '#ifdef _MSC_VER' to '#ifdef _WIN32'
2021-09-03 07:04:26 +08:00
#cmakedefine ENABLE_MULTIFILTERS 1
/* if true, enable nczarr blosc support */
#cmakedefine ENABLE_BLOSC 1
/* Version number of package */
#cmakedefine VERSION "${netCDF_VERSION}"
Improve UTF8 Support On Windows re: Issue https://github.com/Unidata/netcdf-c/issues/2190 The primary purpose of this PR is to improve the utf8 support for windows. This is persuant to a change in Windows that supports utf8 natively (almost). The almost means that it is still utf16 internally and the set of characters representable by utf8 is larger than those representable by utf16. This leaves open the question in the Issue about handling the Windows 1252 character set. This required the following changes: 1. Test the Windows build and major version in order to see if native utf8 is supported. 2. If native utf8 is supported, Modify dpathmgr.c to call the 8-bit version of the windows fopen() and open() functions. 3. In support of this, programs that use XGetOpt (Windows versions) need to get the command line as utf8 and then parse to arc+argv as utf8. This requires using a homegrown command line parser named XCommandLineToArgvA. 4. Add a utility program called "acpget" that prints out the current Windows code page and locale. Additionally, some technical debt was cleaned up as follows: 1. Unify all the places which attempt to read all or a part of a file into the dutil.c#NC_readfile code. 2. Similary unify all the code that creates temp files into dutil.c#NC_mktmp code. 3. Convert almost all remaining calls to fopen() and open() to NCfopen() and NCopen3(). This is to ensure that path management is used consistently. This touches a number of files. 4. extern->EXTERNL as needed to get it to work under Windows.
2022-02-09 11:53:30 +08:00
/* Capture Windows version and build */
#cmakedefine WINVERMAJOR ${WINVERMAJOR}
#cmakedefine WINVERBUILD ${WINVERBUILD}
/* Define WORDS_BIGENDIAN to 1 if your processor stores words with the most
significant byte first (like Motorola and SPARC, unlike Intel). */
#if defined AC_APPLE_UNIVERSAL_BUILD
# if defined __BIG_ENDIAN__
# define WORDS_BIGENDIAN 1
# endif
#else
# ifndef WORDS_BIGENDIAN
#cmakedefine WORDS_BIGENDIAN
# endif
#endif
/* Enable large inode numbers on Mac OS X 10.5. */
#ifndef _DARWIN_USE_64_BIT_INODE
# define _DARWIN_USE_64_BIT_INODE 1
#endif
/* Define for large files, on AIX-style hosts. */
#cmakedefine _LARGE_FILES ${_LARGE_FILES}
/* Define to `long int' if <sys/types.h> does not define. */
#cmakedefine off_t long int
/* Define to `unsigned int' if <sys/types.h> does not define. */
#cmakedefine size_t unsigned int
/* Define to `int' if <sys/types.h> does not define. */
#cmakedefine ssize_t int
/* Define to `signed long if <sys/types.h> does not define. */
#cmakedefine ptrdiff_t signed long
/* Define to `unsigned long if <sys/types.h> does not define. */
#cmakedefine uintptr_t unsigned long
#cmakedefine WORDS_BIGENDIAN
#include "ncconfigure.h"
2017-02-01 01:59:03 +08:00
#endif