Merge branch 'main' into updatedocs.dmh

This commit is contained in:
Ward Fisher 2022-05-04 17:01:06 -06:00 committed by GitHub
commit 821f69f669
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
84 changed files with 1621 additions and 956 deletions

View File

@ -7,7 +7,7 @@
name: Run macOS-based netCDF Tests
on: [pull_request,push]
on: [pull_request]
jobs:

View File

@ -4,7 +4,7 @@
name: Run Ubuntu/Linux netCDF Tests
on: [ pull_request ]
on: [pull_request]
jobs:
@ -82,7 +82,7 @@ jobs:
key: hdf5-parallel-${{ runner.os }}-${{ matrix.hdf5 }}
- name: Build libhdf5-${{ matrix.hdf5 }}
- name: Build libhdf5-${{ matrix.hdf5 }}-pnetcdf-1.12.3
if: steps.cache-hdf5.outputs.cache-hit != 'true'
run: |
set -x
@ -100,6 +100,13 @@ jobs:
make -j
make install -j
popd
wget https://parallel-netcdf.github.io/Release/pnetcdf-1.12.3.tar.gz
tar -zxf pnetcdf-1.12.3.tar.gz
pushd pnetcdf-1.12.3
CC=mpicc ./configure --disable-static --enable-shared --prefix=${HOME}/environments/${{ matrix.hdf5 }}
make -j
make install -j
popd
#####
# One-Off Autotools-based tests.
@ -233,7 +240,7 @@ jobs:
- name: Configure
shell: bash -l {0}
run: CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} CC=mpicc ./configure --enable-hdf4 --enable-hdf5 --enable-dap --disable-dap-remote-tests
run: CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} CC=mpicc ./configure --enable-hdf4 --enable-hdf5 --enable-dap --disable-dap-remote-tests --enable-parallel-tests --enable-pnetcdf
if: ${{ success() }}
- name: Look at config.log if error

View File

@ -623,26 +623,6 @@ ENDIF(ENABLE_STRICT_NULL_BYTE_HEADER_PADDING)
# 3. is nczarr enabled?
# We need separate flags for cases 1 and 2
# We need to determine if libsz is available both for HDF5 and NCZarr
# If user has specified the `SZIP_LIBRARY`, use it; otherwise try to find...
IF(NOT SZIP_LIBRARY)
FIND_LIBRARY(SZIP PATH NAMES szip sz sz2)
IF(SZIP)
SET(SZIP_LIBRARY ${SZIP})
ELSE()
UNSET(SZIP_LIBRARY)
UNSET(SZIP)
ENDIF()
ENDIF()
IF(SZIP_LIBRARY)
SET(SZIP_FOUND yes)
SET(HAVE_SZ yes)
ELSE()
SET(SZIP_FOUND no)
SET(HAVE_SZ no)
ENDIF()
##
# Option to Enable HDF5
#
@ -905,15 +885,6 @@ IF(USE_HDF5)
int x = 1;}" USE_HDF5_SZIP)
IF(USE_HDF5_SZIP)
SET(HAVE_H5Z_SZIP yes)
# If user has specified the `SZIP_LIBRARY`, use it; otherwise try to find...
IF(SZIP_FOUND)
SET(CMAKE_REQUIRED_LIBRARIES ${SZIP_LIBRARY} ${CMAKE_REQUIRED_LIBRARIES})
MESSAGE(STATUS "HDF5 has szip.")
ELSE()
MESSAGE(FATAL_ERROR "HDF5 Requires SZIP, but cannot find libszip or libsz.")
ENDIF()
ELSE()
SET(HAVE_H5Z_SZIP no)
ENDIF()
####
@ -1114,25 +1085,53 @@ string(TOLOWER "${filter}" downfilter)
IF(${filter}_FOUND)
INCLUDE_DIRECTORIES(${filter}_INCLUDE_DIRS})
SET(ENABLE_${upfilter} TRUE)
SET(STD_FILTERS "${STD_FILTERS},${downfilter}")
SET(HAVE_${upfilter} ON)
SET(STD_FILTERS "${STD_FILTERS} ${downfilter}")
MESSAGE(">>> Standard Filter: ${downfilter}")
ELSE()
SET(ENABLE_${upfilter} FALSE)
SET(HAVE_${upfilter} OFF)
ENDIF()
endmacro(set_std_filter)
# Locate some compressors
FIND_PACKAGE(Szip)
FIND_PACKAGE(Bz2)
FIND_PACKAGE(Blosc)
FIND_PACKAGE(Zstd)
# Accumulate standard filters
set(STD_FILTERS "deflate") # Always have deflate */
set_std_filter(SZIP)
set(STD_FILTERS "deflate") # Always have deflate*/
set_std_filter(Szip)
SET(HAVE_SZ ${Szip_FOUND})
set_std_filter(Blosc)
set_std_filter(Zstd)
set_std_filter(Bz2)
IF(NOT Bz2_FOUND)
set(STD_FILTERS "${STD_FILTERS},bzip2") # Always have bzip2 */
IF(Bz2_FOUND)
set_std_filter(Bz2)
ELSE()
# The reason we use a local version is to support a more comples test case
MESSAGE(WARNING "libbz2 not found using built-in version")
SET(HAVE_LOCAL_BZ2 ON)
SET(HAVE_BZ2 ON)
set(STD_FILTERS "${STD_FILTERS} bz2")
ENDIF()
# If user wants, then install selected plugins
SET(PLUGIN_INSTALL_DIR "" CACHE STRING "Whether and where we should install plugins")
SET(ENABLE_PLUGIN_INSTALL OFF)
if(DEFINED PLUGIN_INSTALL_DIR OR DEFINED CACHE{PLUGIN_INSTALL_DIR})
IF(PLUGIN_INSTALL_DIR STREQUAL "")
MESSAGE(WARNING "No plugin directory value specified; option ignored.")
UNSET(PLUGIN_INSTALL_DIR)
UNSET(PLUGIN_INSTALL_DIR CACHE)
SET(PLUGIN_INSTALL_DIR_SETTING "N.A.")
ELSE()
SET(PLUGIN_INSTALL_DIR_SETTING "${PLUGIN_INSTALL_DIR}")
SET(ENABLE_PLUGIN_INSTALL ON)
ENDIF()
ELSE()
SET(PLUGIN_INSTALL_DIR_SETTING "N.A.")
ENDIF()
# See if we have libzip
@ -2442,6 +2441,7 @@ is_enabled(ENABLE_V2_API HAS_NC2)
is_enabled(ENABLE_NETCDF_4 HAS_NC4)
is_enabled(ENABLE_HDF4 HAS_HDF4)
is_enabled(USE_HDF5 HAS_HDF5)
is_enabled(OFF HAS_BENCHMARKS)
is_enabled(STATUS_PNETCDF HAS_PNETCDF)
is_enabled(STATUS_PARALLEL HAS_PARALLEL)
is_enabled(ENABLE_PARALLEL4 HAS_PARALLEL4)
@ -2465,6 +2465,7 @@ is_enabled(ENABLE_LOGGING HAS_LOGGING)
is_enabled(ENABLE_FILTER_TESTING DO_FILTER_TESTS)
is_enabled(HAVE_SZ HAS_SZIP)
is_enabled(HAVE_SZ HAS_SZLIB_WRITE)
is_enabled(HAVE_ZSTD HAS_ZSTD)
# Generate file from template.
CONFIGURE_FILE("${CMAKE_CURRENT_SOURCE_DIR}/libnetcdf.settings.in"
@ -2520,6 +2521,10 @@ IF(ENABLE_NCZARR)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/nc_test4/findplugin.in ${CMAKE_CURRENT_BINARY_DIR}/nczarr_test/findplugin.sh @ONLY NEWLINE_STYLE LF)
ENDIF()
IF(ENABLE_PLUGINS)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/nc_test4/findplugin.in ${CMAKE_CURRENT_BINARY_DIR}/plugins/findplugin.sh @ONLY NEWLINE_STYLE LF)
ENDIF()
IF(ENABLE_EXAMPLES)
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/nc_test4/findplugin.in ${CMAKE_CURRENT_BINARY_DIR}/examples/C/findplugin.sh @ONLY NEWLINE_STYLE LF)
ENDIF()

View File

@ -8,6 +8,7 @@ This file contains a high-level description of this package's evolution. Release
## 4.8.2 - TBD
* [Enhancement] Update the documentation to match the current filter capabilities See [Github #2249](https://github.com/Unidata/netcdf-c/pull/2249).
* [Enhancement] Support installation of pre-built standard filters into user-specified location. See [Github #2318](https://github.com/Unidata/netcdf-c/pull/2318).
* [Enhancement] Improve filter support. More specifically (1) add nc_inq_filter_avail to check if a filter is available, (2) add the notion of standard filters, (3) cleanup szip support to fix interaction with NCZarr. See [Github #2245](https://github.com/Unidata/netcdf-c/pull/2245).
* [Enhancement] Switch to tinyxml2 as the default xml parser implementation. See [Github #2170](https://github.com/Unidata/netcdf-c/pull/2170).
* [Bug Fix] Require that the type of the variable in nc_def_var_filter is not variable length. See [Github #/2231](https://github.com/Unidata/netcdf-c/pull/2231).
@ -68,7 +69,7 @@ This file contains a high-level description of this package's evolution. Release
* [Enhancement] Add some optimizations to NCZarr, dosome cleanup of code cruft, add some NCZarr test cases, add a performance test to NCZarr. See [Github #1908](https://github.com/Unidata/netcdf-c/pull/1908) for more information.
* [Bug Fix] Implement a better chunk cache system for NCZarr. The cache now uses extendible hashing plus a linked list for provide a combination of expandibility, fast access, and LRU behavior. See [Github #1887](https://github.com/Unidata/netcdf-c/pull/1887) for more information.
* [Enhancement] Provide .rc fields for S3 authentication: HTTP.S3.ACCESSID and HTTP.S3.SECRETKEY.
* [Enhancement] Give the client control over what parts of a DAP2 URL are URL encoded (i.e. %xx). This is to support the different decoding rules that servers apply to incoming URLS. See [Github #1884](https://github.com/Unidata/netcdf-c/pull/1844) for more information.
* [Enhancement] Give the client control over what parts of a DAP2 URL are URL encoded (i.e. %xx). This is to support the different decoding rules that servers apply to incoming URLS. See [Github #1884](https://github.com/Unidata/netcdf-c/pull/1884) for more information.
* [Bug Fix] Fix incorrect time offsets from `ncdump -t`, in some cases when the time `units` attribute contains both a **non-zero** time-of-day, and a time zone suffix containing the letter "T", such as "UTC". See [Github #1866](https://github.com/Unidata/netcdf-c/pull/1866) for more information.
* [Bug Fix] Cleanup the NCZarr S3 build options. See [Github #1869](https://github.com/Unidata/netcdf-c/pull/1869) for more information.
* [Bug Fix] Support aligned access for selected ARM processors. See [Github #1871](https://github.com/Unidata/netcdf-c/pull/1871) for more information.

View File

@ -0,0 +1,64 @@
# Searches for an installation of the bzip2 library. On success, it sets the following variables:
#
# Bzip2_FOUND Set to true to indicate the bzip2 library was found
# Bzip2_INCLUDE_DIRS The directory containing the header file bzip2/bzip2.h
# Bzip2_LIBRARIES The libraries needed to use the bzip2 library
#
# To specify an additional directory to search, set Bzip2_ROOT.
#
# Author: Siddhartha Chaudhuri, 2009
#
# Look for the header, first in the user-specified location and then in the system locations
SET(Bzip2_INCLUDE_DOC "The directory containing the header file bzip2.h")
FIND_PATH(Bzip2_INCLUDE_DIRS NAMES bzip2.h bzip2/bzip2.h PATHS ${Bzip2_ROOT} ${Bzip2_ROOT}/include DOC ${Bzip2_INCLUDE_DOC} NO_DEFAULT_PATH)
IF(NOT Bzip2_INCLUDE_DIRS) # now look in system locations
FIND_PATH(Bzip2_INCLUDE_DIRS NAMES bzlib.h DOC ${Bzip2_INCLUDE_DOC})
ENDIF(NOT Bzip2_INCLUDE_DIRS)
SET(Bzip2_FOUND FALSE)
IF(Bzip2_INCLUDE_DIRS)
SET(Bzip2_LIBRARY_DIRS ${Bzip2_INCLUDE_DIRS})
IF("${Bzip2_LIBRARY_DIRS}" MATCHES "/include$")
# Strip off the trailing "/include" in the path.
GET_FILENAME_COMPONENT(Bzip2_LIBRARY_DIRS ${Bzip2_LIBRARY_DIRS} PATH)
ENDIF("${Bzip2_LIBRARY_DIRS}" MATCHES "/include$")
IF(EXISTS "${Bzip2_LIBRARY_DIRS}/lib")
SET(Bzip2_LIBRARY_DIRS ${Bzip2_LIBRARY_DIRS}/lib)
ENDIF(EXISTS "${Bzip2_LIBRARY_DIRS}/lib")
# Find Bzip2 libraries
FIND_LIBRARY(Bzip2_DEBUG_LIBRARY NAMES bzip2d bzip2_d libbzip2d libbzip2_d libbzip2
PATH_SUFFIXES Debug ${CMAKE_LIBRARY_ARCHITECTURE} ${CMAKE_LIBRARY_ARCHITECTURE}/Debug
PATHS ${Bzip2_LIBRARY_DIRS} NO_DEFAULT_PATH)
FIND_LIBRARY(Bzip2_RELEASE_LIBRARY NAMES bzip2 libbzip2
PATH_SUFFIXES Release ${CMAKE_LIBRARY_ARCHITECTURE} ${CMAKE_LIBRARY_ARCHITECTURE}/Release
PATHS ${Bzip2_LIBRARY_DIRS} NO_DEFAULT_PATH)
SET(Bzip2_LIBRARIES )
IF(Bzip2_DEBUG_LIBRARY AND Bzip2_RELEASE_LIBRARY)
SET(Bzip2_LIBRARIES debug ${Bzip2_DEBUG_LIBRARY} optimized ${Bzip2_RELEASE_LIBRARY})
ELSEIF(Bzip2_DEBUG_LIBRARY)
SET(Bzip2_LIBRARIES ${Bzip2_DEBUG_LIBRARY})
ELSEIF(Bzip2_RELEASE_LIBRARY)
SET(Bzip2_LIBRARIES ${Bzip2_RELEASE_LIBRARY})
ENDIF(Bzip2_DEBUG_LIBRARY AND Bzip2_RELEASE_LIBRARY)
IF(Bzip2_LIBRARIES)
SET(Bzip2_FOUND TRUE)
ENDIF(Bzip2_LIBRARIES)
ENDIF(Bzip2_INCLUDE_DIRS)
IF(Bzip2_FOUND)
# IF(NOT Bzip2_FIND_QUIETLY)
MESSAGE(STATUS "Found Bzip2: headers at ${Bzip2_INCLUDE_DIRS}, libraries at ${Bzip2_LIBRARY_DIRS}")
MESSAGE(STATUS " library is ${Bzip2_LIBRARIES}")
# ENDIF(NOT Bzip2_FIND_QUIETLY)
ELSE(Bzip2_FOUND)
IF(Bzip2_FIND_REQUIRED)
MESSAGE(FATAL_ERROR "Bzip2 library not found")
ENDIF(Bzip2_FIND_REQUIRED)
ENDIF(Bzip2_FOUND)

View File

@ -1,179 +0,0 @@
# - Find SZIP library
# - Derived from the FindTiff.cmake that is included with cmake
# Find the native SZIP includes and library
# This module defines
# SZIP_INCLUDE_DIRS, where to find tiff.h, etc.
# SZIP_LIBRARIES, libraries to link against to use SZIP.
# SZIP_FOUND, If false, do not try to use SZIP.
# also defined, but not for general use are
# SZIP_LIBRARY, where to find the SZIP library.
# SZIP_LIBRARY_DEBUG - Debug version of SZIP library
# SZIP_LIBRARY_RELEASE - Release Version of SZIP library
# MESSAGE (STATUS "Finding SZIP library and headers..." )
############################################
#
# Check the existence of the libraries.
#
############################################
# This macro was taken directly from the FindQt4.cmake file that is included
# with the CMake distribution. This is NOT my work. All work was done by the
# original authors of the FindQt4.cmake file. Only minor modifications were
# made to remove references to Qt and make this file more generally applicable
#########################################################################
MACRO (SZIP_ADJUST_LIB_VARS basename)
IF (${basename}_INCLUDE_DIR)
# if only the release version was found, set the debug variable also to the release version
IF (${basename}_LIBRARY_RELEASE AND NOT ${basename}_LIBRARY_DEBUG)
SET (${basename}_LIBRARY_DEBUG ${${basename}_LIBRARY_RELEASE})
SET (${basename}_LIBRARY ${${basename}_LIBRARY_RELEASE})
SET (${basename}_LIBRARIES ${${basename}_LIBRARY_RELEASE})
ENDIF (${basename}_LIBRARY_RELEASE AND NOT ${basename}_LIBRARY_DEBUG)
# if only the debug version was found, set the release variable also to the debug version
IF (${basename}_LIBRARY_DEBUG AND NOT ${basename}_LIBRARY_RELEASE)
SET (${basename}_LIBRARY_RELEASE ${${basename}_LIBRARY_DEBUG})
SET (${basename}_LIBRARY ${${basename}_LIBRARY_DEBUG})
SET (${basename}_LIBRARIES ${${basename}_LIBRARY_DEBUG})
ENDIF (${basename}_LIBRARY_DEBUG AND NOT ${basename}_LIBRARY_RELEASE)
IF (${basename}_LIBRARY_DEBUG AND ${basename}_LIBRARY_RELEASE)
# if the generator supports configuration types then set
# optimized and debug libraries, or if the CMAKE_BUILD_TYPE has a value
IF (CMAKE_CONFIGURATION_TYPES OR CMAKE_BUILD_TYPE)
SET (${basename}_LIBRARY optimized ${${basename}_LIBRARY_RELEASE} debug ${${basename}_LIBRARY_DEBUG})
ELSE(CMAKE_CONFIGURATION_TYPES OR CMAKE_BUILD_TYPE)
# if there are no configuration types and CMAKE_BUILD_TYPE has no value
# then just use the release libraries
SET (${basename}_LIBRARY ${${basename}_LIBRARY_RELEASE} )
ENDIF (CMAKE_CONFIGURATION_TYPES OR CMAKE_BUILD_TYPE)
SET (${basename}_LIBRARIES optimized ${${basename}_LIBRARY_RELEASE} debug ${${basename}_LIBRARY_DEBUG})
ENDIF (${basename}_LIBRARY_DEBUG AND ${basename}_LIBRARY_RELEASE)
SET (${basename}_LIBRARY ${${basename}_LIBRARY} CACHE FILEPATH "The ${basename} library")
IF (${basename}_LIBRARY)
SET (${basename}_FOUND 1)
ENDIF (${basename}_LIBRARY)
ENDIF (${basename}_INCLUDE_DIR )
# Make variables changeble to the advanced user
MARK_AS_ADVANCED (${basename}_LIBRARY ${basename}_LIBRARY_RELEASE ${basename}_LIBRARY_DEBUG ${basename}_INCLUDE_DIR )
ENDMACRO (SZIP_ADJUST_LIB_VARS)
# Look for the header file.
SET (SZIP_INCLUDE_SEARCH_DIRS
$ENV{SZIP_INSTALL}/include
$ENV{SZIP_INSTALL}/include/szip
/usr/include
/usr/include/szip
)
SET (SZIP_LIB_SEARCH_DIRS
$ENV{SZIP_INSTALL}/lib
/usr/lib
)
SET (SZIP_BIN_SEARCH_DIRS
$ENV{SZIP_INSTALL}/bin
/usr/bin
)
FIND_PATH (SZIP_INCLUDE_DIR
NAMES szlib.h
PATHS ${SZIP_INCLUDE_SEARCH_DIRS}
NO_DEFAULT_PATH
)
IF (WIN32 AND NOT MINGW)
SET (SZIP_SEARCH_DEBUG_NAMES "sz_d;libsz_d")
SET (SZIP_SEARCH_RELEASE_NAMES "sz;libsz;szip")
ELSE (WIN32 AND NOT MINGW)
SET (SZIP_SEARCH_DEBUG_NAMES "sz_d")
SET (SZIP_SEARCH_RELEASE_NAMES "sz")
ENDIF (WIN32 AND NOT MINGW)
# Look for the library.
FIND_LIBRARY (SZIP_LIBRARY_DEBUG
NAMES ${SZIP_SEARCH_DEBUG_NAMES}
PATHS ${SZIP_LIB_SEARCH_DIRS}
NO_DEFAULT_PATH
)
FIND_LIBRARY (SZIP_LIBRARY_RELEASE
NAMES ${SZIP_SEARCH_RELEASE_NAMES}
PATHS ${SZIP_LIB_SEARCH_DIRS}
NO_DEFAULT_PATH
)
SZIP_ADJUST_LIB_VARS (SZIP)
IF (SZIP_INCLUDE_DIR AND SZIP_LIBRARY)
SET (SZIP_FOUND 1)
SET (SZIP_LIBRARIES ${SZIP_LIBRARY})
SET (SZIP_INCLUDE_DIRS ${SZIP_INCLUDE_DIR})
IF (SZIP_LIBRARY_DEBUG)
GET_FILENAME_COMPONENT (SZIP_LIBRARY_PATH ${SZIP_LIBRARY_DEBUG} PATH)
SET (SZIP_LIB_DIR ${SZIP_LIBRARY_PATH})
ELSEIF (SZIP_LIBRARY_RELEASE)
GET_FILENAME_COMPONENT (SZIP_LIBRARY_PATH ${SZIP_LIBRARY_RELEASE} PATH)
SET (SZIP_LIB_DIR ${SZIP_LIBRARY_PATH})
ENDIF (SZIP_LIBRARY_DEBUG)
ELSE (SZIP_INCLUDE_DIR AND SZIP_LIBRARY)
SET (SZIP_FOUND 0)
SET (SZIP_LIBRARIES)
SET (SZIP_INCLUDE_DIRS)
ENDIF (SZIP_INCLUDE_DIR AND SZIP_LIBRARY)
# Report the results.
IF (NOT SZIP_FOUND)
SET (SZIP_DIR_MESSAGE
"SZip was not found. Make sure SZIP_LIBRARY and SZIP_INCLUDE_DIR are set or set the SZIP_INSTALL environment variable."
)
IF (NOT SZIP_FIND_QUIETLY)
MESSAGE (STATUS "${SZIP_DIR_MESSAGE}")
ELSE (NOT SZIP_FIND_QUIETLY)
IF (SZIP_FIND_REQUIRED)
MESSAGE (FATAL_ERROR "SZip was NOT found and is Required by this project")
ENDIF (SZIP_FIND_REQUIRED)
ENDIF (NOT SZIP_FIND_QUIETLY)
ENDIF (NOT SZIP_FOUND)
IF (SZIP_FOUND)
INCLUDE (CheckSymbolExists)
#############################################
# Find out if SZIP was build using dll's
#############################################
# Save required variable
SET (CMAKE_REQUIRED_INCLUDES_SAVE ${CMAKE_REQUIRED_INCLUDES})
SET (CMAKE_REQUIRED_FLAGS_SAVE ${CMAKE_REQUIRED_FLAGS})
# Add SZIP_INCLUDE_DIR to CMAKE_REQUIRED_INCLUDES
SET (CMAKE_REQUIRED_INCLUDES "${CMAKE_REQUIRED_INCLUDES};${SZIP_INCLUDE_DIRS}")
CHECK_SYMBOL_EXISTS (SZIP_BUILT_AS_DYNAMIC_LIB "SZconfig.h" HAVE_SZIP_DLL)
IF (HAVE_SZIP_DLL STREQUAL "TRUE")
SET (HAVE_SZIP_DLL "1")
ENDIF (HAVE_SZIP_DLL STREQUAL "TRUE")
# Restore CMAKE_REQUIRED_INCLUDES and CMAKE_REQUIRED_FLAGS variables
SET (CMAKE_REQUIRED_INCLUDES ${CMAKE_REQUIRED_INCLUDES_SAVE})
SET (CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS_SAVE})
#
#############################################
ENDIF (SZIP_FOUND)
IF (FIND_SZIP_DEBUG)
MESSAGE (STATUS "SZIP_INCLUDE_DIR: ${SZIP_INCLUDE_DIR}")
MESSAGE (STATUS "SZIP_INCLUDE_DIRS: ${SZIP_INCLUDE_DIRS}")
MESSAGE (STATUS "SZIP_LIBRARY_DEBUG: ${SZIP_LIBRARY_DEBUG}")
MESSAGE (STATUS "SZIP_LIBRARY_RELEASE: ${SZIP_LIBRARY_RELEASE}")
MESSAGE (STATUS "HAVE_SZIP_DLL: ${HAVE_SZIP_DLL}")
MESSAGE (STATUS "CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}")
ENDIF (FIND_SZIP_DEBUG)

View File

@ -0,0 +1,64 @@
# Searches for an installation of the szip library. On success, it sets the following variables:
#
# Szip_FOUND Set to true to indicate the szip library was found
# Szip_INCLUDE_DIRS The directory containing the header file szip/szip.h
# Szip_LIBRARIES The libraries needed to use the szip library
#
# To specify an additional directory to search, set Szip_ROOT.
#
# Author: Siddhartha Chaudhuri, 2009
#
# Look for the header, first in the user-specified location and then in the system locations
SET(Szip_INCLUDE_DOC "The directory containing the header file szip.h")
FIND_PATH(Szip_INCLUDE_DIRS NAMES szlib.h szip.h szip/szip.h PATHS ${Szip_ROOT} ${Szip_ROOT}/include DOC ${Szip_INCLUDE_DOC} NO_DEFAULT_PATH)
IF(NOT Szip_INCLUDE_DIRS) # now look in system locations
FIND_PATH(Szip_INCLUDE_DIRS NAMES szlib.h szip.h szip/szip.h DOC ${Szip_INCLUDE_DOC})
ENDIF(NOT Szip_INCLUDE_DIRS)
SET(Szip_FOUND FALSE)
IF(Szip_INCLUDE_DIRS)
SET(Szip_LIBRARY_DIRS ${Szip_INCLUDE_DIRS})
IF("${Szip_LIBRARY_DIRS}" MATCHES "/include$")
# Strip off the trailing "/include" in the path.
GET_FILENAME_COMPONENT(Szip_LIBRARY_DIRS ${Szip_LIBRARY_DIRS} PATH)
ENDIF("${Szip_LIBRARY_DIRS}" MATCHES "/include$")
IF(EXISTS "${Szip_LIBRARY_DIRS}/lib")
SET(Szip_LIBRARY_DIRS ${Szip_LIBRARY_DIRS}/lib)
ENDIF(EXISTS "${Szip_LIBRARY_DIRS}/lib")
# Find Szip libraries
FIND_LIBRARY(Szip_DEBUG_LIBRARY NAMES szipd szip_d libszipd libszip_d szip libszip sz2 libsz2
PATH_SUFFIXES Debug ${CMAKE_LIBRARY_ARCHITECTURE} ${CMAKE_LIBRARY_ARCHITECTURE}/Debug
PATHS ${Szip_LIBRARY_DIRS} NO_DEFAULT_PATH)
FIND_LIBRARY(Szip_RELEASE_LIBRARY NAMES szip libszip sz libsz sz2 libsz2
PATH_SUFFIXES Release ${CMAKE_LIBRARY_ARCHITECTURE} ${CMAKE_LIBRARY_ARCHITECTURE}/Release
PATHS ${Szip_LIBRARY_DIRS} NO_DEFAULT_PATH)
SET(Szip_LIBRARIES )
IF(Szip_DEBUG_LIBRARY AND Szip_RELEASE_LIBRARY)
SET(Szip_LIBRARIES debug ${Szip_DEBUG_LIBRARY} optimized ${Szip_RELEASE_LIBRARY})
ELSEIF(Szip_DEBUG_LIBRARY)
SET(Szip_LIBRARIES ${Szip_DEBUG_LIBRARY})
ELSEIF(Szip_RELEASE_LIBRARY)
SET(Szip_LIBRARIES ${Szip_RELEASE_LIBRARY})
ENDIF(Szip_DEBUG_LIBRARY AND Szip_RELEASE_LIBRARY)
IF(Szip_LIBRARIES)
SET(Szip_FOUND TRUE)
ENDIF(Szip_LIBRARIES)
ENDIF(Szip_INCLUDE_DIRS)
IF(Szip_FOUND)
# IF(NOT Szip_FIND_QUIETLY)
MESSAGE(STATUS "Found Szip: headers at ${Szip_INCLUDE_DIRS}, libraries at ${Szip_LIBRARY_DIRS}")
MESSAGE(STATUS " library is ${Szip_LIBRARIES}")
# ENDIF(NOT Szip_FIND_QUIETLY)
ELSE(Szip_FOUND)
IF(Szip_FIND_REQUIRED)
MESSAGE(FATAL_ERROR "Szip library not found")
ENDIF(Szip_FIND_REQUIRED)
ENDIF(Szip_FOUND)

View File

@ -173,7 +173,7 @@ are set when opening a binary file on Windows. */
#cmakedefine HAVE_ATEXIT 1
/* Define to 1 if bzip2 library available. */
#cmakedefine HAVE_BZIP2 1
#cmakedefine HAVE_BZ2 1
/* Define to 1 if zstd library available. */
#cmakedefine HAVE_ZSTD 1

View File

@ -61,6 +61,7 @@ AC_CONFIG_HEADERS([config.h])
##
SAVE_CFLAGS="${CFLAGS}"
AC_LANG_PUSH([C])
AC_LANG_COMPILER_REQUIRE
CFLAGS="${CFLAGS} -fno-strict-aliasing"
AC_COMPILE_IFELSE([AC_LANG_PROGRAM([
@ -667,24 +668,21 @@ fi
AC_MSG_CHECKING([whether libzstd library is available])
AC_MSG_RESULT([${have_zstd}])
# See if we have libbzip2 or libbz2
AC_CHECK_LIB([bz2],[BZ2_bzCompress],[have_bzip2=yes],[have_bzip2=no])
if test "x$have_bzip2" = "xyes" ; then
# See if we have libbz2
AC_CHECK_LIB([bz2],[BZ2_bzCompress],[have_bz2=yes],[have_bz2=no])
if test "x$have_bz2" = "xyes" ; then
AC_SEARCH_LIBS([BZ2_bzCompress],[bz2 bz2.dll cygbz2.dll], [], [])
AC_DEFINE([HAVE_BZIP2], [1], [if true, bz2 library is installed])
AC_DEFINE([HAVE_BZ2], [1], [if true, bz2 library is installed])
fi
AC_MSG_CHECKING([whether libbz2 library is available])
AC_MSG_RESULT([${have_bzip2}])
AC_MSG_RESULT([${have_bz2}])
if test "x$have_bzip2" = "xno" ; then
AC_CHECK_LIB([bzip2],[BZ2_bzCompress],[have_bzip2=yes],[have_bzip2=no])
if test "x$have_bzip2" = "xyes" ; then
AC_SEARCH_LIBS([BZ2_bzCompress],[bzip2 bzip2.dll cygbzip2.dll], [], [])
AC_DEFINE([HAVE_BZIP2], [1], [if true, bzip2 library is installed])
fi
AC_MSG_CHECKING([whether libbzip2 library is available])
AC_MSG_RESULT([${have_bzip2}])
if test "x$have_bz2" = "xno" ; then
have_local_bz2=yes
else
have_local_bz2=no
fi
AM_CONDITIONAL(HAVE_LOCAL_BZ2, [test "x$have_local_bz2" = xyes])
# Note that szip management is tricky.
# This is because we have three things to consider:
@ -1753,7 +1751,7 @@ AM_CONDITIONAL(HAS_MULTIFILTERS, [test "x$has_multifilters" = xyes])
AM_CONDITIONAL(HAVE_SZ, [test "x$have_sz" = xyes])
AM_CONDITIONAL(HAVE_H5Z_SZIP, [test "x$enable_hdf5_szip" = xyes])
AM_CONDITIONAL(HAVE_BLOSC, [test "x$have_blosc" = xyes])
AM_CONDITIONAL(HAVE_BZIP2, [test "x$have_bzip2" = xyes])
AM_CONDITIONAL(HAVE_BZ2, [test "x$have_bz2" = xyes])
AM_CONDITIONAL(HAVE_ZSTD, [test "x$have_zstd" = xyes])
# If the machine doesn't have a long long, and we want netCDF-4, then
@ -1839,6 +1837,7 @@ AC_SUBST(HAS_NC2,[$nc_build_v2])
AC_SUBST(HAS_NC4,[$enable_netcdf_4])
AC_SUBST(HAS_CDF5,[$enable_cdf5])
AC_SUBST(HAS_HDF4,[$enable_hdf4])
AC_SUBST(HAS_BENCHMARKS,[$enable_benchmarks])
AC_SUBST(HAS_HDF5,[$enable_hdf5])
AC_SUBST(HAS_PNETCDF,[$enable_pnetcdf])
AC_SUBST(HAS_LOGGING, [$enable_logging])
@ -1860,28 +1859,48 @@ AC_SUBST(HAS_LOGGING,[$enable_logging])
AC_SUBST(DO_FILTER_TESTS,[$enable_filter_testing])
AC_SUBST(HAS_SZLIB,[$have_sz])
AC_SUBST(HAS_SZLIB_WRITE, [$have_sz])
AC_SUBST(HAS_ZSTD,[$have_zstd])
# Always available
std_filters="deflate,bzip2"
std_filters="deflate bz2"
if test "x$enable_szlib" = xyes ; then
std_filters="${std_filters},szip"
std_filters="${std_filters} szip"
fi
# We need to include szip iff HDF5 && HDF5_HAS_SZIP || !HDF5 && NCZARR && libsz
if test "x$enable_hdf5" = xyes && test "x$enable_hdf5_szip" = xyes ; then
std_filters="${std_filters},szip"
std_filters="${std_filters} szip"
fi
if test "x$enable_hdf5" = xno && test "x$have_sz" = xyes ; then
std_filters="${std_filters},szip"
std_filters="${std_filters} szip"
fi
if test "x$have_blosc" = xyes ; then
std_filters="${std_filters},blosc"
std_filters="${std_filters} blosc"
fi
if test "x$have_zstd" = xyes ; then
std_filters="${std_filters},zst"
std_filters="${std_filters} zstd"
fi
AC_SUBST(STD_FILTERS,[$std_filters])
# If user wants, then install selected standard filters
AC_MSG_CHECKING([whether and where we should install plugins])
AC_ARG_WITH([plugin-dir], [AS_HELP_STRING([--with-plugin-dir=<absolute directory>],
[Install selected standard filters in specified directory])])
AC_MSG_RESULT([$with_plugin_dir])
if test "x$with_plugin_dir" = x ; then
AC_MSG_WARN([No plugin directory value specified; option ignored.])
with_plugin_dir=
with_plugin_dir_setting="N.A."
enable_plugin_dir=no
else
with_plugin_dir_setting="$with_plugin_dir"
enable_plugin_dir=yes
fi
AM_CONDITIONAL([ENABLE_PLUGIN_DIR], [test "x$enable_plugin_dir" = xyes])
AC_SUBST([PLUGIN_INSTALL_DIR], [$with_plugin_dir])
# Better value for libnetcdf.settings
AC_SUBST([PLUGIN_INSTALL_DIR_SETTING], [$with_plugin_dir_setting])
# Access netcdf specific version of config.h
AH_BOTTOM([#include "ncconfigure.h"])
@ -1927,6 +1946,7 @@ AC_SUBST([NC_VERSION]) NC_VERSION=$VERSION
AX_SET_META([NC_HAS_NC2],[$nc_build_v2],[yes])
AX_SET_META([NC_HAS_NC4],[$enable_netcdf_4],[yes])
AX_SET_META([NC_HAS_HDF4],[$enable_hdf4],[yes])
AX_SET_META([NC_HAS_BENCHMARKS],[$enable_benchmarks],[yes])
AX_SET_META([NC_HAS_HDF5],[$enable_hdf5],[yes])
AX_SET_META([NC_HAS_DAP2],[$enable_dap],[yes])
AX_SET_META([NC_HAS_DAP4],[$enable_dap4],[yes])
@ -1943,7 +1963,9 @@ AX_SET_META([NC_HAS_BYTERANGE],[$enable_byterange],[yes])
AX_SET_META([NC_HAS_NCZARR],[$enable_nczarr],[yes])
AX_SET_META([NC_HAS_MULTIFILTERS],[$has_multifilters],[yes])
AX_SET_META([NC_HAS_LOGGING],[$enable_logging],[yes])
AX_SET_META([NC_HAS_QUANTIZE],[yes],[yes])
AX_SET_META([NC_HAS_SZIP],[$enable_hdf5_szip],[yes])
AX_SET_META([NC_HAS_ZSTD],[$have_zstd],[yes])
# This is the version of the dispatch table. If the dispatch table is
# changed, this should be incremented, so that user-defined format
@ -1968,11 +1990,13 @@ AC_MSG_NOTICE([generating header files and makefiles])
AC_CONFIG_FILES(test_common.sh:test_common.in)
AC_CONFIG_FILES(nc_test4/findplugin.sh:nc_test4/findplugin.in)
AC_CONFIG_FILES(nczarr_test/findplugin.sh:nc_test4/findplugin.in)
AC_CONFIG_FILES(plugins/findplugin.sh:nc_test4/findplugin.in)
AC_CONFIG_FILES(examples/C/findplugin.sh:nc_test4/findplugin.in)
AC_CONFIG_FILES(ncdap_test/findtestserver.c:ncdap_test/findtestserver.c.in)
AC_CONFIG_FILES(dap4_test/findtestserver4.c:ncdap_test/findtestserver.c.in)
AC_CONFIG_FILES(dap4_test/pingurl4.c:ncdap_test/pingurl.c)
AC_CONFIG_FILES([h5_test/run_par_tests.sh], [chmod ugo+x h5_test/run_par_tests.sh])
AC_CONFIG_FILES([nc_test/run_pnetcdf_tests.sh], [chmod ugo+x nc_test/run_pnetcdf_tests.sh])
AC_CONFIG_FILES([nc_test4/run_par_test.sh], [chmod ugo+x nc_test4/run_par_test.sh])
AC_CONFIG_FILES([nc_perf/run_par_bm_test.sh], [chmod ugo+x nc_perf/run_par_bm_test.sh])
AC_CONFIG_FILES([nc_perf/run_gfs_test.sh], [chmod ugo+x nc_perf/run_gfs_test.sh])

View File

@ -1064,6 +1064,7 @@ As part of the overall build process, a number of filters are built as shared li
They may be in that directory or the "plugins/.libs" subdirectory.
It may be possible for users to utilize some of those libraries to provide filter support for general use.
If the user is using NCZarr filters, then the plugins directory has at least the following shared libraries
* libh5shuffle.so &mdash; shuffle filter
* libh5fletcher32.so &mdash; fletcher32 checksum

View File

@ -60,11 +60,11 @@ When installed, the netCDF libraries are placed in the specified locations, alon
1. When building the netCDF-C libraries with netCDF4 support, using the `Debug` libraries may cause extraneous warnings. These warnings are related to cross-dll memory management, and appear to be harmless. You can safely ignore them by using the `Release` libraries. [NCF-220]
[r1]: https://downloads.unidata.ucar.edu/netcdf-c/major.minor.point/windows_installers/netCDFmajor.minor.point-NC3-32.exe
[r2]: https://downloads.unidata.ucar.edu/netcdf-c/major.minor.point/windows_installers/netCDFmajor.minor.point-NC3-DAP-32.exe
[r3]: https://downloads.unidata.ucar.edu/netcdf-c/major.minor.point/windows_installers/netCDFmajor.minor.point-NC4-32.exe
[r4]: https://downloads.unidata.ucar.edu/netcdf-c/major.minor.point/windows_installers/netCDFmajor.minor.point-NC4-DAP-32.exe
[r5]: https://downloads.unidata.ucar.edu/netcdf-c/major.minor.point/windows_installers/netCDFmajor.minor.point-NC3-64.exe
[r6]: https://downloads.unidata.ucar.edu/netcdf-c/major.minor.point/windows_installers/netCDFmajor.minor.point-NC3-DAP-64.exe
[r7]: https://downloads.unidata.ucar.edu/netcdf-c/major.minor.point/windows_installers/netCDFmajor.minor.point-NC4-64.exe
[r8]: https://downloads.unidata.ucar.edu/netcdf-c/major.minor.point/windows_installers/netCDFmajor.minor.point-NC4-DAP-64.exe
[r1]: https://downloads.unidata.ucar.edu/netcdf-c/major.minor.point/netCDFmajor.minor.point-NC3-32.exe
[r2]: https://downloads.unidata.ucar.edu/netcdf-c/major.minor.point/netCDFmajor.minor.point-NC3-DAP-32.exe
[r3]: https://downloads.unidata.ucar.edu/netcdf-c/major.minor.point/netCDFmajor.minor.point-NC4-32.exe
[r4]: https://downloads.unidata.ucar.edu/netcdf-c/major.minor.point/netCDFmajor.minor.point-NC4-DAP-32.exe
[r5]: https://downloads.unidata.ucar.edu/netcdf-c/major.minor.point/netCDFmajor.minor.point-NC3-64.exe
[r6]: https://downloads.unidata.ucar.edu/netcdf-c/major.minor.point/netCDFmajor.minor.point-NC3-DAP-64.exe
[r7]: https://downloads.unidata.ucar.edu/netcdf-c/major.minor.point/netCDFmajor.minor.point-NC4-64.exe
[r8]: https://downloads.unidata.ucar.edu/netcdf-c/major.minor.point/netCDFmajor.minor.point-NC4-DAP-64.exe

View File

@ -7,14 +7,15 @@
# Un comment to use a more verbose test driver
#SH_LOG_DRIVER = $(SHELL) $(top_srcdir)/test-driver-verbose
#sh_LOG_DRIVER = $(SHELL) $(top_srcdir)/test-driver-verbose
#LOG_DRIVER = $(SHELL) $(top_srcdir)/test-driver-verbose
#TESTS_ENVIRONMENT = export SETX=1;
# Put together AM_CPPFLAGS and AM_LDFLAGS.
include $(top_srcdir)/lib_flags.am
AM_CPPFLAGS += -I$(top_srcdir)/include
AM_CPPFLAGS += -I$(top_builddir)/liblib
AM_LDFLAGS += ${top_builddir}/liblib/libnetcdf.la -lm
# Link to our assembled library.
LDADD = ${top_builddir}/liblib/libnetcdf.la
# These are the netCDF-3 examples.
check_PROGRAMS = simple_xy_wr simple_xy_rd sfc_pres_temp_wr \
@ -34,11 +35,13 @@ check_PROGRAMS += simple_nc4_wr simple_nc4_rd simple_xy_nc4_wr \
simple_xy_nc4_rd
TESTS += run_examples4.sh
if BUILD_UTILITIES
if ENABLE_FILTER_TESTING
# filter_example.c should be same as nc_test4/test_filter.c
check_PROGRAMS += filter_example
TESTS += run_filter.sh
endif
endif
endif #USE_HDF5

View File

@ -10,26 +10,25 @@ if test "x$srcdir" = x ; then srcdir=`pwd`; fi
. ../../test_common.sh
echo "*** Running filter example for netCDF-4."
set -e
if test -f ${builddir}/findplugin.sh ; then
echo "*** running test_filter example..."
. ${builddir}/findplugin.sh
# Locate the plugin path and the library names; argument order is critical
# Find bzip2 and capture
findplugin h5bzip2
BZIP2PATH="${HDF5_PLUGIN_PATH}/${HDF5_PLUGIN_LIB}"
BZIP2PATH="${HDF5_PLUGIN_DIR}/${HDF5_PLUGIN_LIB}"
# Verify
if ! test -f ${BZIP2PATH} ; then echo "Unable to locate ${BZIP2PATH}"; exit 1; fi
export HDF5_PLUGIN_PATH
echo "*** running filter_example..."
rm -f ./bzip2.nc
export HDF5_PLUGIN_PATH="${HDF5_PLUGIN_DIR}"
echo "*** running filter_example..."
${execdir}/filter_example
#rm -f ./bzip2.nc
fi # Filter enabled
rm -f ./bzip2.nc
echo "*** Filter example successful!"
exit 0

View File

@ -300,7 +300,8 @@ typedef struct NC_FILE_INFO
nc_bool_t parallel; /**< True if file is open for parallel access */
nc_bool_t redef; /**< True if redefining an existing file */
nc_bool_t no_attr_create_order; /**< True if the creation order tracking of attributes is disabled (netcdf-4 only) */
int fill_mode; /**< Fill mode for vars */
nc_bool_t no_dimscale_attach; /**< True if attaching dimscales to variables is disabled (netcdf-4 only) */
int fill_mode; /**< Fill mode for vars - Unused internally currently */
nc_bool_t no_write; /**< true if nc_open has mode NC_NOWRITE. */
NC_GRP_INFO_T *root_grp; /**< Pointer to root group. */
short next_nc_grpid; /**< Next available group ID. */

View File

@ -80,7 +80,13 @@
#define ATOMICTYPEMAX3 NC_DOUBLE
#define ATOMICTYPEMAX5 NC_UINT64
#ifdef USE_PARALLEL
#if !defined HDF5_PARALLEL && !defined USE_PNETCDF
typedef int MPI_Comm;
typedef int MPI_Info;
#define MPI_COMM_WORLD 0
#define MPI_INFO_NULL 0
#endif
/* Define a struct to hold the MPI info so it can be passed down the
* call stack. This is used internally by the netCDF library. It
* should not be used by netcdf users. */
@ -88,7 +94,6 @@ typedef struct NC_MPI_INFO {
MPI_Comm comm;
MPI_Info info;
} NC_MPI_INFO;
#endif
/* Define known dispatch tables and initializers */

View File

@ -4,7 +4,7 @@
#ifndef NCXML_H
#define NCXML_H
#ifdef _WIN32
#ifdef DLL_NETCDF
#ifdef DLL_EXPORT /* define when building the library */
#define DECLSPEC __declspec(dllexport)
#else

View File

@ -164,6 +164,7 @@ Use this in mode flags for both nc_create() and nc_open(). */
/* Upper 16 bits */
#define NC_NOATTCREORD 0x20000 /**< Disable the netcdf-4 (hdf5) attribute creation order tracking */
#define NC_NODIMSCALE_ATTACH 0x40000 /**< Disable the netcdf-4 (hdf5) attaching of dimscales to variables (#2128) */
#define NC_MAX_MAGIC_NUMBER_LEN 8 /**< Max len of user-defined format magic number. */

View File

@ -34,17 +34,46 @@
/* Defined flags for filter invocation (not stored); powers of two */
#define NCZ_FILTER_DECODE 0x00000001
/* External Discovery Function */
/* External Discovery Functions */
/*
Obtain a pointer to an instance of NCZ_codec_class_t.
NCZ_get_codec_info(void) -- returns pointer to instance of NCZ_codec_class_t.
Instance an be recast based on version+sort to the plugin type specific info.
So the void* return value is typically actually of type NCZ_codec_class_t*.
Signature: typedef const void* (*NCZ_get_codec_info_proto)(void);
The current object returned by NCZ_get_codec_info is a
pointer to an instance of NCZ_codec_t.
The key to this struct are the several function pointers that do
initialize/finalize and conversion between codec JSON and HDF5
parameters. The function pointers defined in NCZ_codec_t
manipulate HDF5 parameters and NumCodec JSON.
Obtain a pointer to an instance of NCZ_codec_class_t.
NCZ_get_codec_info(void) -- returns pointer to instance of NCZ_codec_class_t.
Instance an be recast based on version+sort to the plugin type specific info.
So the void* return value is typically actually of type NCZ_codec_class_t*.
*/
typedef const void* (*NCZ_get_codec_info_proto)(void);
/*
Obtain a pointer to a NULL terminated vector of NCZ_codec_class_t*.
NCZ_codec_info_defaults(void) -- returns pointer to a vector of pointers to instances of NCZ_codec_class_t. The vector is NULL terminated.
So the void* return value is typically actually of type NCZ_codec_class_t**.
Signature: typedef const void* (*NCZ_codec_info_defaults_proto)(void);
This entry point is used to return the codec information for
multiple filters that otherwise do not have codec information defined.
*/
typedef const void* (*NCZ_codec_info_defaults_proto)(void);
/* The current object returned by NCZ_get_plugin_info is a
pointer to an instance of NCZ_codec_t.
@ -86,7 +115,7 @@ int (*NCZ_hdf5_to_codec)(size_t nparams, const unsigned* params, char** codecp);
* Convert a set of visible parameters to a set of working parameters using extra environmental information.
Also allows for changes to the visible parameters. Invoked before filter is actually used.
int (*NCZ_build_parameters)(int ncid, int varid, size_t* vnparamsp, unsigned** vparamsp, size_t* wnparamsp, unsigned** wparamsp);
int (*NCZ_modify_parameters)(int ncid, int varid, size_t* vnparamsp, unsigned** vparamsp, size_t* wnparamsp, unsigned** wparamsp);
@param ncid -- (in) ncid of the variable's group
@param varid -- (in) varid of the variable
@ -96,8 +125,15 @@ int (*NCZ_build_parameters)(int ncid, int varid, size_t* vnparamsp, unsigned** v
@params wparamsp -- (out) vector of working parameters
@return -- a netcdf-c error code.
* Convert a set of working parameters to a set of visible parameters using extra environmental information, if needed.
Invoked before filter metadata is written.
* Convert an HDF5 vector of visible parameters to a JSON representation.
int (*NCZ_hdf5_to_codec)(size_t nparams, const unsigned* params, char** codecp);
@param nparams -- (in) the length of the HDF5 unsigned vector
@param params -- (in) pointer to the HDF5 unsigned vector.
@param codecp -- (out) store the string representation of the codec; caller must free.
@return -- a netcdf-c error code.
*/
/*

View File

@ -64,5 +64,7 @@
#define NC_HAS_MULTIFILTERS @NC_HAS_MULTIFILTERS@ /*!< Nczarr support. */
#define NC_HAS_LOGGING @NC_HAS_LOGGING@ /*!< Logging support. */
#define NC_HAS_QUANTIZE @NC_HAS_QUANTIZE@ /*!< Quantization support. */
#define NC_HAS_ZSTD @NC_HAS_ZSTD@ /*!< Zstd support. */
#define NC_HAS_BENCHMARKS @NC_HAS_BENCHMARKS@ /*!< Benchmarks. */
#endif

View File

@ -6,12 +6,7 @@
# See netcdf-c/COPYRIGHT file for more info.
SET(libdispatch_SOURCES dcopy.c dfile.c ddim.c datt.c dattinq.c dattput.c dattget.c derror.c dvar.c dvarget.c dvarput.c dvarinq.c ddispatch.c nclog.c dstring.c dutf8.c dinternal.c doffsets.c ncuri.c nclist.c ncbytes.c nchashmap.c nctime.c nc.c nclistmgr.c utf8proc.h utf8proc.c dpathmgr.c dutil.c drc.c dauth.c dreadonly.c dnotnc4.c dnotnc3.c dinfermodel.c
daux.c dinstance.c
dcrc32.c dcrc32.h dcrc64.c ncexhash.c ncxcache.c ncjson.c ds3util.c)
# if parallel I/O is enabled (PnetCDF or/and HDF5)
IF(ENABLE_PARALLEL)
SET(libdispatch_SOURCES ${libdispatch_SOURCES} dparallel.c)
ENDIF(ENABLE_PARALLEL)
dcrc32.c dcrc32.h dcrc64.c ncexhash.c ncxcache.c ncjson.c ds3util.c dparallel.c)
# Netcdf-4 only functions. Must be defined even if not used
SET(libdispatch_SOURCES ${libdispatch_SOURCES} dgroup.c dvlen.c dcompound.c dtype.c denum.c dopaque.c dfilter.c)

View File

@ -15,17 +15,13 @@ noinst_LTLIBRARIES = libdispatch.la
libdispatch_la_CPPFLAGS = ${AM_CPPFLAGS}
# The source files.
libdispatch_la_SOURCES = dcopy.c dfile.c ddim.c datt.c \
dattinq.c dattput.c dattget.c derror.c dvar.c dvarget.c dvarput.c \
dvarinq.c dinternal.c ddispatch.c dutf8.c nclog.c dstring.c ncuri.c \
nclist.c ncbytes.c nchashmap.c nctime.c nc.c nclistmgr.c dauth.c \
doffsets.c dpathmgr.c dutil.c dreadonly.c dnotnc4.c dnotnc3.c \
dinfermodel.c daux.c dinstance.c \
dcrc32.c dcrc32.h dcrc64.c ncexhash.c ncxcache.c ncjson.c ds3util.c
if ENABLE_PARALLEL
libdispatch_la_SOURCES += dparallel.c
endif
libdispatch_la_SOURCES = dcopy.c dfile.c ddim.c datt.c dattinq.c \
dattput.c dattget.c derror.c dvar.c dvarget.c dvarput.c dvarinq.c \
dinternal.c ddispatch.c dutf8.c nclog.c dstring.c ncuri.c nclist.c \
ncbytes.c nchashmap.c nctime.c nc.c nclistmgr.c dauth.c doffsets.c \
dpathmgr.c dutil.c dreadonly.c dnotnc4.c dnotnc3.c dinfermodel.c \
daux.c dinstance.c dcrc32.c dcrc32.h dcrc64.c ncexhash.c ncxcache.c \
ncjson.c ds3util.c dparallel.c
# Add the utf8 codebase
libdispatch_la_SOURCES += utf8proc.c utf8proc.h

View File

@ -1160,7 +1160,7 @@ check_file_type(const char *path, int omode, int use_parallel,
if((status = openmagic(&magicinfo))) goto done;
/* Verify we have a large enough file */
if(magicinfo.filelen < (long long)MAGIC_NUMBER_LEN)
if(magicinfo.filelen < (unsigned long long)MAGIC_NUMBER_LEN)
{status = NC_ENOTNC; goto done;}
if((status = readmagic(&magicinfo,0L,magic)) != NC_NOERR) {
status = NC_ENOTNC;
@ -1254,6 +1254,7 @@ openmagic(struct MagicFile* file)
else
#endif
status = NC_EPARINIT;
file->fh = MPI_FILE_NULL;
goto done;
}
/* Get its length */
@ -1390,7 +1391,8 @@ closemagic(struct MagicFile* file)
#ifdef USE_PARALLEL
if (file->use_parallel) {
int retval;
if((retval = MPI_File_close(&file->fh)) != MPI_SUCCESS)
if(file->fh != MPI_FILE_NULL
&& (retval = MPI_File_close(&file->fh)) != MPI_SUCCESS)
{status = NC_EPARINIT; return status;}
} else
#endif

View File

@ -211,9 +211,10 @@ NC_mktmp(const char* base)
int fd = -1;
char* tmp = NULL;
size_t len;
#ifndef HAVE_MKSTEMP
int tries;
#define MAXTRIES 4
#ifdef HAVE_MKSTEMP
#else
mode_t mask;
#endif

View File

@ -381,10 +381,12 @@ NC_hashmapdeactivate(NC_hashmap* map, uintptr_t data)
/* This will currently fail if `n > 180503 * 180503` */
static int isPrime(size_t n)
{
size_t i;
if (n <= 1) return 0;
if (n <= 3) return 1;
for (size_t i=1; i < NC_nprimes - 1; i++) {
for (i=1; i < NC_nprimes - 1; i++) {
size_t prime = NC_primes[i];
if (n % prime == 0) {
return 0;

View File

@ -208,6 +208,11 @@ nc4_create_file(const char *path, int cmode, size_t initialsz,
BAIL(NC_EHDFERR);
#endif
if (cmode & NC_NODIMSCALE_ATTACH) {
/* See https://github.com/Unidata/netcdf-c/issues/2128 */
nc4_info->no_dimscale_attach = NC_TRUE;
}
if(nc4_info->mem.inmemory) {
retval = NC4_create_image_file(nc4_info,initialsz);
if(retval)
@ -337,7 +342,7 @@ nc4_H5Fcreate(const char *filename0, unsigned flags, hid_t fcpl_id, hid_t fapl_i
#ifdef HDF5_UTF8_PATHS
NCpath2utf8(filename0,&filename);
#else
#else
filename = strdup(filename0);
#endif
/* Canonicalize it since we are not opening the file ourselves */

View File

@ -161,20 +161,24 @@ HDF5_inq_dim(int ncid, int dimid, char *name, size_t *lenp)
{
if (dim->unlimited)
{
*lenp = 0;
#ifndef USE_PARALLEL
/* Shortcut for non-parallel operation: if the dim->len is
* non-zero, it will be set to the correct size. */
if (dim->len)
*lenp = dim->len;
#endif
/* Since this is an unlimited dimension, go to the file
and see how many records there are. Take the max number
of records from all the vars that share this
dimension. */
*lenp = 0;
if (dim->len == 0) {
if (*lenp == 0)
{
if ((ret = nc4_find_dim_len(dim_grp, dimid, &lenp)))
return ret;
if (h5->no_write == NC_TRUE) {
dim->len = *lenp;
}
}
else {
*lenp = dim->len;
}
}
else

View File

@ -117,6 +117,8 @@ find_var_dim_max_length(NC_GRP_INFO_T *grp, int varid, int dimid,
*maxlen = 0;
LOG((3, "find_var_dim_max_length varid %d dimid %d", varid, dimid));
/* Find this var. */
var = (NC_VAR_INFO_T*)ncindexith(grp->vars,varid);
if (!var) return NC_ENOTVAR;
@ -157,11 +159,27 @@ find_var_dim_max_length(NC_GRP_INFO_T *grp, int varid, int dimid,
BAIL(NC_EHDFERR);
LOG((5, "find_var_dim_max_length: varid %d len %d max: %d",
varid, (int)h5dimlen[0], (int)h5dimlenmax[0]));
for (d=0; d<dataset_ndims; d++) {
if (var->dimids[d] == dimid) {
for (d=0; d<dataset_ndims; d++)
if (var->dimids[d] == dimid)
*maxlen = *maxlen > h5dimlen[d] ? *maxlen : h5dimlen[d];
}
}
#ifdef USE_PARALLEL
/* If we are doing parallel I/O in collective mode (with
* either pnetcdf or HDF5), then communicate with all
* other tasks in the collective and find out which has
* the max value for the dimension size. */
assert(grp->nc4_info);
LOG((3, "before Allreduce *maxlen %ld grp->nc4_info->parallel %d var->parallel_access %d",
*maxlen, grp->nc4_info->parallel, var->parallel_access));
if (grp->nc4_info->parallel && var->parallel_access == NC_COLLECTIVE)
{
if ((MPI_SUCCESS != MPI_Allreduce(MPI_IN_PLACE, maxlen, 1,
MPI_UNSIGNED_LONG_LONG, MPI_MAX,
grp->nc4_info->comm)))
BAIL(NC_EMPI);
LOG((3, "after Allreduce *maxlen %ld", *maxlen));
}
#endif /* USE_PARALLEL */
}
}

View File

@ -1957,9 +1957,11 @@ nc4_rec_write_metadata(NC_GRP_INFO_T *grp, nc_bool_t bad_coord_order)
}
} /* end while */
/* Attach dimscales to vars in this group. */
if ((retval = attach_dimscales(grp)))
return retval;
/* Attach dimscales to vars in this group. Unless directed not to. */
if (!grp->nc4_info->no_dimscale_attach) {
if ((retval = attach_dimscales(grp)))
return retval;
}
/* If there are any child groups, write their metadata. */
for (i = 0; i < ncindexsize(grp->children); i++)

View File

@ -52,9 +52,11 @@
#include "netcdf_filter_build.h"
#include "netcdf_aux.h"
#undef DEBUG
#undef DEBUGF
#undef DEBUGL
#if 0
#define DEBUG
#define DEBUGF
#define DEBUGL
#endif
/* If set, then triage potential shared libraries based on extension */
#define NAMEOPT
@ -177,7 +179,7 @@ NCJtrace(const NCjson* j)
const char*
printplugin(const NCZ_Plugin* plugin)
{
char* plbuf = malloc(4096);
static char plbuf[4096];
char plbuf2[2000];
char plbuf1[2000];
@ -194,8 +196,7 @@ printplugin(const NCZ_Plugin* plugin)
static char*
printparams(size_t nparams, const unsigned* params)
{
char* ppbuf = malloc(4096);
static char ppbuf[4096];
if(nparams == 0)
snprintf(ppbuf,4096,"{0,%p}",params);
else
@ -1378,20 +1379,27 @@ NCZ_load_plugin(const char* path, struct NCZ_Plugin** plugp)
/* See what we have */
{
H5PL_get_plugin_type_proto gpt = (H5PL_get_plugin_type_proto)ncpgetsymbol(lib,"H5PLget_plugin_type");
H5PL_get_plugin_info_proto gpi = (H5PL_get_plugin_info_proto)ncpgetsymbol(lib,"H5PLget_plugin_info");
NCZ_get_codec_info_proto npi = (NCZ_get_codec_info_proto)ncpgetsymbol(lib,"NCZ_get_codec_info");
NCZ_codec_info_defaults_proto cpd = (NCZ_codec_info_defaults_proto)ncpgetsymbol(lib,"NCZ_codec_info_defaults");
const H5PL_get_plugin_type_proto gpt = (H5PL_get_plugin_type_proto)ncpgetsymbol(lib,"H5PLget_plugin_type");
const H5PL_get_plugin_info_proto gpi = (H5PL_get_plugin_info_proto)ncpgetsymbol(lib,"H5PLget_plugin_info");
const NCZ_get_codec_info_proto npi = (NCZ_get_codec_info_proto)ncpgetsymbol(lib,"NCZ_get_codec_info");
const NCZ_codec_info_defaults_proto cpd = (NCZ_codec_info_defaults_proto)ncpgetsymbol(lib,"NCZ_codec_info_defaults");
if(gpt == NULL && gpi == NULL && npi == NULL && cpd == NULL)
{stat = NC_ENOFILTER; goto done;}
if(cpd != NULL) {
/* Deal with defaults first */
NCZ_codec_t** cp = NULL;
const NCZ_codec_t** cp = NULL;
nclistpush(default_libs,lib);
for(cp=cpd();*cp;cp++) {
cp = (const NCZ_codec_t**)cpd();
#ifdef DEBUGL
fprintf(stderr,"@@@ %s: default codec library found: %p\n",path,cp);
#endif
for(;*cp;cp++) {
struct CodecAPI* c0;
#ifdef DEBUGL
fprintf(stderr,"@@@ %s: %s = %u\n",path,(*cp)->codecid,(*cp)->hdf5id);
#endif
c0 = (struct CodecAPI*)calloc(1,sizeof(struct CodecAPI));
if(c0 == NULL) {stat = NC_ENOMEM; goto done1;}
c0->codec = *cp;

View File

@ -25,16 +25,6 @@
#define plugin_dir_win "%s/hdf5/lib/plugin"
#define win32_root_env "ALLUSERSPROFILE"
/*
Return a NULL terminated vector of pointers to instances of ''NCZ_codec_t''.
The value returned is actually of type ''NCZ_codec_t**'',
but is of type ''void*'' to allow for extensions.
The list of returned items are used to try to provide defaults
for any HDF5 filters that have no corresponding Codec.
This is for internal use only.
*/
typedef void* (*NCZ_codec_info_defaults_proto)(void);
/* Opaque */
struct NCZ_Filter;

View File

@ -9,6 +9,7 @@ Configured On: @CONFIG_DATE@
Host System: @host_cpu@-@host_vendor@-@host_os@
Build Directory: @abs_top_builddir@
Install Prefix: @prefix@
Plugin Install Prefix: @PLUGIN_INSTALL_DIR_SETTING@
# Compiling Options
-----------------
@ -48,3 +49,5 @@ Quantization: @HAS_QUANTIZE@
Logging: @HAS_LOGGING@
SZIP Write Support: @HAS_SZLIB_WRITE@
Standard Filters: @STD_FILTERS@
ZSTD Support: @HAS_ZSTD@
Benchmarks: @HAS_BENCHMARKS@

View File

@ -49,7 +49,9 @@ extern int ffio_open(const char*,int,off_t,size_t,size_t*,void*,ncio**,void** co
extern int memio_open(const char*,int,off_t,size_t,size_t*,void*,ncio**,void** const);
/* Forward */
#ifdef ENABLE_BYTERANGE
static int urlmodetest(const char* path);
#endif
int
ncio_create(const char *path, int ioflags, size_t initialsz,
@ -179,6 +181,7 @@ NC_HTTP => byterange
NC_S3SDK => s3
0 => Not URL
*/
#ifdef ENABLE_BYTERANGE
static int
urlmodetest(const char* path)
{
@ -195,3 +198,4 @@ urlmodetest(const char* path)
ncurifree(uri);
return kind;
}
#endif

View File

@ -91,7 +91,9 @@ IF(BUILD_UTILITIES)
add_sh_test(nc_test run_diskless5)
add_sh_test(nc_test run_inmemory)
IF(LARGE_FILE_TESTS)
add_sh_test(nc_test run_diskless2)
IF(NOT USE_PARALLEL)
add_sh_test(nc_test run_diskless2)
ENDIF()
ENDIF()
IF(ENABLE_BYTERANGE)

View File

@ -22,25 +22,25 @@ AM_CPPFLAGS += -I$(top_builddir)/liblib -I$(top_builddir)/include -I$(top_srcdir
TEST_EXTENSIONS = .sh
check_PROGRAMS =
# These are the tests which are always run.
TESTPROGRAMS = t_nc tst_small nc_test tst_misc tst_norm tst_names \
tst_nofill tst_nofill2 tst_nofill3 tst_atts3 tst_meta tst_inq_type \
tst_utf8_validate tst_utf8_phrases tst_global_fillval \
TESTPROGRAMS = tst_names tst_nofill2 tst_nofill3 tst_meta \
tst_inq_type tst_utf8_validate tst_utf8_phrases tst_global_fillval \
tst_max_var_dims tst_formats tst_def_var_fill tst_err_enddef \
tst_default_format
# These are always built, but for parallel builds are run from a test
# script, because they are parallel-enabled tests.
check_PROGRAMS = t_nc tst_atts3 tst_nofill nc_test tst_small
# These tests are only run if pnetcdf is enabled.
if USE_PNETCDF
check_PROGRAMS += tst_parallel2 tst_pnetcdf tst_addvar
TESTPROGRAMS += tst_formatx_pnetcdf tst_default_format_pnetcdf
check_PROGRAMS += tst_parallel2 tst_pnetcdf tst_addvar \
tst_formatx_pnetcdf tst_default_format_pnetcdf
endif
if TEST_PARALLEL4
if USE_PNETCDF
if ENABLE_CDF5
TESTPROGRAMS += tst_cdf5format
endif
check_PROGRAMS += tst_cdf5format
endif
endif
@ -86,29 +86,33 @@ endif
if BUILD_UTILITIES
TESTS += run_diskless.sh run_diskless5.sh run_inmemory.sh
if LARGE_FILE_TESTS
if ! ENABLE_PARALLEL
TESTS += run_diskless2.sh
endif
endif
if BUILD_MMAP
TESTS += run_mmap.sh
run_mmap.log: run_diskless.log
endif
endif
# If pnetcdf is enabled, these tests are run by a test
# script. Otherwise, the are run by automake in the usual way.
if USE_PNETCDF
TESTS += run_pnetcdf_test.sh
TESTS += run_pnetcdf_tests.sh
else
TESTS += t_nc tst_atts3 tst_nofill nc_test tst_small
endif
# The .c files that are generated with m4 are already distributed, but
# we also include the original m4 files, plus test scripts data.
EXTRA_DIST = test_get.m4 test_put.m4 run_diskless.sh run_diskless2.sh \
run_diskless5.sh run_mmap.sh run_pnetcdf_test.sh test_read.m4 \
test_write.m4 ref_tst_diskless2.cdl tst_diskless5.cdl \
ref_tst_diskless3_create.cdl ref_tst_diskless3_open.cdl \
run_inmemory.sh run_mmap.sh \
f03tst_open_mem.nc \
test_byterange.sh ref_tst_http_nc3.cdl \
ref_tst_http_nc4a.cdl ref_tst_http_nc4b.cdl ref_tst_http_nc4c.cdl \
CMakeLists.txt
EXTRA_DIST = test_get.m4 test_put.m4 run_diskless.sh run_diskless2.sh \
run_diskless5.sh run_mmap.sh run_pnetcdf_test.sh test_read.m4 \
test_write.m4 ref_tst_diskless2.cdl tst_diskless5.cdl \
ref_tst_diskless3_create.cdl ref_tst_diskless3_open.cdl \
run_inmemory.sh run_mmap.sh f03tst_open_mem.nc test_byterange.sh \
ref_tst_http_nc3.cdl ref_tst_http_nc4a.cdl ref_tst_http_nc4b.cdl \
ref_tst_http_nc4c.cdl CMakeLists.txt run_pnetcdf_tests.sh.in
# These files are created by the tests.
CLEANFILES = nc_test_*.nc tst_*.nc t_nc.nc large_files.nc \

View File

@ -5,9 +5,6 @@ if test "x$srcdir" = x ; then srcdir=`pwd`; fi
set -e
if test "x$srcdir" = x ; then srcdir=`pwd`; fi
. ../test_common.sh
# Get the target OS and CPU
CPU=`uname -p`
OS=`uname`

View File

@ -1,14 +0,0 @@
#!/bin/sh
# This script runs some PnetCDF I/O tests
set -e
echo
echo "Testing file created with PnetCDF is modifiable with netCDF..."
./tst_pnetcdf
echo "Testing file created with PnetCDF works when adding variables..."
./tst_addvar tst_pnetcdf.nc
# We assume a min of at least 2 processors is available
mpiexec -n 2 ./tst_parallel2

26
nc_test/run_pnetcdf_tests.sh.in Executable file
View File

@ -0,0 +1,26 @@
#!/bin/sh
# This script runs some PnetCDF I/O tests
set -e
echo
echo "Testing file created with PnetCDF is modifiable with netCDF..."
./tst_pnetcdf
echo "Testing file created with PnetCDF works when adding variables..."
./tst_addvar tst_pnetcdf.nc
# We assume a min of at least 2 processors is available
@MPIEXEC@ -n 2 ./tst_parallel2
# These tests work in either serial or parallel builds.
@MPIEXEC@ -n 1 ./t_nc
@MPIEXEC@ -n 1 ./tst_atts3
@MPIEXEC@ -n 1 ./tst_nofill
@MPIEXEC@ -n 1 ./nc_test
@MPIEXEC@ -n 1 ./tst_default_format_pnetcdf
@MPIEXEC@ -n 1 ./tst_small
@MPIEXEC@ -n 1 ./tst_formatx_pnetcdf
@MPIEXEC@ -n 1 ./tst_cdf5format

View File

@ -132,7 +132,6 @@ fail(int ret)
void
exists(const char* file)
{
int stat = 0;
FILE* f = NCfopen(file, "r");
if(f == NULL) fail(NC_EPERM);
fclose(f);

View File

@ -486,8 +486,9 @@ main(int argc, char **argv)
if (nc_close(ncid2)) ERR;
SUMMARIZE_ERR;
}
FINAL_RESULTS;
#ifdef USE_PNETCDF
MPI_Finalize();
#endif
FINAL_RESULTS;
}

View File

@ -44,7 +44,8 @@ int main(int argc, char* argv[])
size_t start[2], count[2];
MPI_Comm comm=MPI_COMM_SELF;
MPI_Info info=MPI_INFO_NULL;
char file_name[NC_MAX_NAME + 1];
printf("\n*** Testing bug fix with changing PnetCDF variable offsets...");
MPI_Init(&argc,&argv);
@ -63,7 +64,9 @@ int main(int argc, char* argv[])
#endif
cmode = NC_CLOBBER;
st = nc_create_par(FILENAME, cmode, comm, info, &ncid);
sprintf(file_name, "%s/%s", TEMP_LARGE, FILENAME);
st = nc_create_par(file_name, cmode, comm, info, &ncid);
#ifdef USE_PNETCDF
CHK_ERR(st)
#else
@ -109,7 +112,7 @@ int main(int argc, char* argv[])
if (info != MPI_INFO_NULL) MPI_Info_free(&info);
/* re-open the file with netCDF (parallel) and enter define mode */
st = nc_open_par(FILENAME, NC_WRITE, comm, info, &ncid); CHK_ERR(st)
st = nc_open_par(file_name, NC_WRITE, comm, info, &ncid); CHK_ERR(st)
st = nc_redef(ncid); CHK_ERR(st)

View File

@ -93,7 +93,8 @@ endif # BUILD_UTILITIES
if TEST_PARALLEL4
check_PROGRAMS += tst_mpi_parallel tst_parallel tst_parallel3 \
tst_parallel4 tst_parallel5 tst_nc4perf tst_mode tst_simplerw_coll_r \
tst_mode tst_parallel_zlib tst_parallel_compress tst_quantize_par
tst_mode tst_parallel_zlib tst_parallel_compress tst_quantize_par \
tst_parallel6
TESTS += run_par_test.sh
endif # TEST_PARALLEL4
@ -108,9 +109,15 @@ ref_filteredvv.cdl ref_multi.cdl \
ref_ncgenF.cdl ref_nccopyF.cdl \
ref_filter_repeat.txt ref_fillonly.cdl test_fillonly.sh \
ref_filter_order_create.txt ref_filter_order_read.txt \
ref_any.cdl tst_specific_filters.sh \
ref_any.cdl tst_specific_filters.sh \
tst_virtual_datasets.c
# The tst_filterinstall test can only be run after an install
# occurred with --with-plugin-dir enabled. So there is no point
#in running it via make check. It is kept here so it can be
# manually invoked if desired
EXTRA_DIST += tst_filterinstall.sh
CLEANFILES = tst_mpi_parallel.bin cdm_sea_soundings.nc bm_chunking.nc \
tst_floats_1D.cdl floats_1D_3.nc floats_1D.cdl tst_*.nc tmp_*.txt \
tst_floats2_*.cdl tst_ints2_*.cdl tst_shorts2_*.cdl tst_elena_*.cdl \

View File

@ -9,12 +9,10 @@
# $2 is 1 if we are running under cmake
# $3 is 1 if we are running using Visual Studio, blank otherwise
# $4 is the build type; only used if $3 is 1
# Optional Input:
# HDF5_PLUGIN_PATH environment variable
# Outputs:
# return code is 0 is success, 1 if failed
# Variable HDF5_PLUGIN_LIB is set to the library file name
# Variable HDF5_PLUGIN_PATH is setthe absolute path to the
# Variable HDF5_PLUGIN_DIR is set to the absolute path to the
# directory containing the plugin library file
# Local variables are prefixed with FP_
#
@ -27,7 +25,7 @@ findplugin() {
FP_NAME="$1"
FP_PLUGIN_LIB=
FP_PLUGIN_PATH=
FP_PLUGIN_DIR=
# Figure out the plugin file name
# Test for visual studio before cygwin since both might be true
@ -43,14 +41,6 @@ else # Presumably some form on *nix"
FP_PLUGIN_LIB="lib${FP_NAME}.so"
fi
# If HDF5_PLUGIN_PATH is defined, then it overrides everything else.
if test "x$HDF5_PLUGIN_PATH" != x ; then
HDF5_PLUGIN_LIB="$FP_PLUGIN_LIB"
# HDF5_PLUGIN_PATH already set
HDF5_PLUGIN_PATH=`${NCPATHCVT} $HDF5_PLUGIN_PATH`
return 0;
fi
# Figure out the path to where the lib is stored
# This can probably be simplified
@ -59,40 +49,38 @@ cd ${TOPBUILDDIR}/plugins
FP_PLUGINS=`pwd`
cd ${CURWD}
# Case 1: Cmake with Visual Studio
# Do not know where to look for a dylib
# Case 1: Cmake with Visual Studio
if test "x$FP_ISCMAKE" != x -a "x${FP_ISMSVC}" != x ; then
# Case 1a: ignore the build type directory
if test -f "${FP_PLUGINS}/${FP_PLUGIN_LIB}" ; then
FP_PLUGIN_PATH="${FP_PLUGINS}"
FP_PLUGIN_DIR="${FP_PLUGINS}"
fi
else # Case 2: automake
# Case 2a: look in .libs
if test -f "${FP_PLUGINS}/.libs/${FP_PLUGIN_LIB}" ; then
FP_PLUGIN_PATH="${FP_PLUGINS}/.libs"
FP_PLUGIN_DIR="${FP_PLUGINS}/.libs"
else # Case 2: look in FP_PLUGINS directly
if test -f "${FP_PLUGINS}/${FP_PLUGIN_LIB}" ; then
FP_PLUGIN_PATH="${FP_PLUGINS}"
FP_PLUGIN_DIR="${FP_PLUGINS}"
fi
fi
fi
# Verify
if test "x$FP_PLUGIN_PATH" = x ; then
echo "***Fail: Could not locate a usable HDF5_PLUGIN_PATH"
if test "x$FP_PLUGIN_DIR" = x ; then
echo "***Fail: Could not locate a usable HDF5_PLUGIN_DIR"
return 1
fi
if ! test -f "$FP_PLUGIN_PATH/$FP_PLUGIN_LIB" ; then
if ! test -f "$FP_PLUGIN_DIR/$FP_PLUGIN_LIB" ; then
echo "***Fail: Could not locate a usable HDF5_PLUGIN_LIB"
return 1
fi
FP_PLUGIN_PATH=`${NCPATHCVT} $FP_PLUGIN_PATH`
FP_PLUGIN_DIR=`${NCPATHCVT} $FP_PLUGIN_DIR`
# Set the final output variables
HDF5_PLUGIN_LIB="$FP_PLUGIN_LIB"
HDF5_PLUGIN_PATH="$FP_PLUGIN_PATH"
HDF5_PLUGIN_DIR="$FP_PLUGIN_DIR"
return 0
}

View File

@ -52,7 +52,6 @@ echo "Parallel I/O test for Collective I/O, contributed by HDF Group."
if test "@HAS_PAR_FILTERS@" = "yes"; then
echo
echo "Parallel I/O test with zlib."
@MPIEXEC@ -n 1 ./tst_parallel_zlib
@MPIEXEC@ -n 4 ./tst_parallel_zlib
echo
@ -65,3 +64,7 @@ echo
echo "Parallel I/O test for quantize feature."
@MPIEXEC@ -n 4 ./tst_quantize_par
echo
echo "Parallel I/O test contributed by wkliao from pnetcdf."
@MPIEXEC@ -n 4 ./tst_parallel6

View File

@ -22,7 +22,11 @@ main() {
printf("\n*** Testing NetCDF-4 with truncated (broken) sample file.\n");
{
printf("*** testing via file on file-system ...\n");
FILE *fp = fopen(FILE_NAME, "w");
#if defined _WIN32 || defined __MINGW32__
FILE *fp = fopen(FILE_NAME, "wb");
#else
FILE *fp = fopen(FILE_NAME, "w");
#endif
if(!fp) ERR;
if(fwrite(TRUNCATED_FILE_CONTENT, sizeof(char), sizeof(TRUNCATED_FILE_CONTENT), fp) != sizeof(TRUNCATED_FILE_CONTENT)) ERR;
fclose(fp);

View File

@ -43,36 +43,37 @@ sed -e 's/[ ]*\([^ ].*\)/\1/' <$1 >$2
# Hide/unhide the noop filter
hidenoop() {
rm -fr ${HDF5_PLUGIN_PATH}/save
mkdir ${HDF5_PLUGIN_PATH}/save
mv ${NOOPPATH} ${HDF5_PLUGIN_PATH}/save
rm -fr ${HDF5_PLUGIN_DIR}/save
mkdir ${HDF5_PLUGIN_DIR}/save
mv ${NOOPDIR} ${HDF5_PLUGIN_DIR}/save
}
unhidenoop() {
mv ${HDF5_PLUGIN_PATH}/save/${NOOPLIB} ${HDF5_PLUGIN_PATH}
rm -fr ${HDF5_PLUGIN_PATH}/save
mv ${HDF5_PLUGIN_DIR}/save/${NOOPLIB} ${HDF5_PLUGIN_DIR}
rm -fr ${HDF5_PLUGIN_DIR}/save
}
# Locate the plugin path and the library names; argument order is critical
# Locate the plugin dir and the library names; argument order is critical
# Find bzip2 and capture
findplugin h5bzip2
BZIP2LIB="${HDF5_PLUGIN_LIB}"
BZIP2PATH="${HDF5_PLUGIN_PATH}/${BZIP2LIB}"
BZIP2DIR="${HDF5_PLUGIN_DIR}/${BZIP2LIB}"
# Find misc and capture
findplugin h5misc
MISCPATH="${HDF5_PLUGIN_PATH}/${HDF5_PLUGIN_LIB}"
MISCDIR="${HDF5_PLUGIN_DIR}/${HDF5_PLUGIN_LIB}"
# Find noop and capture
findplugin h5noop
NOOPLIB="${HDF5_PLUGIN_LIB}"
NOOPPATH="${HDF5_PLUGIN_PATH}/${HDF5_PLUGIN_LIB}"
NOOPDIR="${HDF5_PLUGIN_DIR}/${HDF5_PLUGIN_LIB}"
echo "final HDF5_PLUGIN_PATH=${HDF5_PLUGIN_PATH}"
export HDF5_PLUGIN_PATH
echo "final HDF5_PLUGIN_DIR=${HDF5_PLUGIN_DIR}"
export HDF5_PLUGIN_DIR
export HDF5_PLUGIN_PATH="$HDF5_PLUGIN_DIR"
# Verify
if ! test -f ${BZIP2PATH} ; then echo "Unable to locate ${BZIP2PATH}"; exit 1; fi
if ! test -f ${MISCPATH} ; then echo "Unable to locate ${MISCPATH}"; exit 1; fi
if ! test -f ${NOOPPATH} ; then echo "Unable to locate ${NOOPPATH}"; exit 1; fi
if ! test -f ${BZIP2DIR} ; then echo "Unable to locate ${BZIP2DIR}"; exit 1; fi
if ! test -f ${MISCDIR} ; then echo "Unable to locate ${MISCDIR}"; exit 1; fi
if ! test -f ${NOOPDIR} ; then echo "Unable to locate ${NOOPDIR}"; exit 1; fi
# See if we have szip
if avail szip; then HAVE_SZIP=1; else HAVE_SZIP=0; fi

111
nc_test4/tst_filterinstall.sh Executable file
View File

@ -0,0 +1,111 @@
#!/bin/bash
# Test the filter install
if test "x$srcdir" = x ; then srcdir=`pwd`; fi
. ../test_common.sh
if test "x$TESTNCZARR" = x1 ; then
. ./test_nczarr.sh
fi
set -e
# Use this plugin path
export HDF5_PLUGIN_PATH="${FEATURE_PLUGIN_INSTALL_DIR}"
# Function to remove selected -s attributes from file;
# These attributes might be platform dependent
sclean() {
cat $1 \
| sed -e '/:_IsNetcdf4/d' \
| sed -e '/:_Endianness/d' \
| sed -e '/_NCProperties/d' \
| sed -e '/_SuperblockVersion/d' \
| sed -e '/_Format/d' \
| sed -e '/global attributes:/d' \
| cat > $2
}
# Function to extract _Filter attribute from a file
# These attributes might be platform dependent
getfilterattr() {
V="$1"
sed -e '/${V}.*:_Filter/p' -ed <$2 >$3
}
# Function to extract _Codecs attribute from a file
# These attributes might be platform dependent
getcodecsattr() {
V="$1"
sed -e '/${V}.*:_Codecs/p' -ed <$2 >$3
}
trimleft() {
sed -e 's/[ ]*\([^ ].*\)/\1/' <$1 >$2
}
setfilter() {
FF="$1"
FSRC="$2"
FDST="$3"
FIH5="$4"
FICX="$5"
FFH5="$6"
FFCX="$7"
if test "x$FFH5" = x ; then FFH5="$FIH5" ; fi
if test "x$FFCX" = x ; then FFCX="$FICX" ; fi
rm -f $FDST
cat ${srcdir}/$FSRC \
| sed -e "s/ref_any/${FF}/" \
| sed -e "s/IH5/${FIH5}/" -e "s/FH5/${FFH5}/" \
| sed -e "s/ICX/${FICX}/" -e "s/FCX/${FFCX}/" \
| sed -e 's/"/\\"/g' -e 's/@/"/g' \
| cat > $FDST
}
# Execute the specified tests
runfilter() {
zext=$1
zfilt="$2"
zparams="$3"
zcodec="$4"
echo "*** Testing processing of filter $zfilt for map $zext"
if test "x$TESTNCZARR" = x1 ; then
fileargs "tmp_filt_${zfilt}"
deletemap $zext $file
else
file="tmp_filt_${zfilt}.nc"
rm -f $file
fi
setfilter $zfilt ref_any.cdl "tmp_filt_${zfilt}.cdl" "$zparams" "$zcodec"
if test "x$TESTNCZARR" = x1 ; then
${NCGEN} -4 -lb -o $fileurl "tmp_filt_${zfilt}.cdl"
${NCDUMP} -n $zfilt -sF $fileurl > "tmp_filt_${zfilt}.tmp"
else
${NCGEN} -4 -lb -o $file "tmp_filt_${zfilt}.cdl"
${NCDUMP} -n $zfilt -sF $file > "tmp_filt_${zfilt}.tmp"
fi
sclean "tmp_filt_${zfilt}.tmp" "tmp_filt_${zfilt}.dump"
}
testbzip2() {
zext=$1
if ! avail bzip2; then return 0; fi
runfilter $zext bzip2 '307,9' '[{\"id\": \"bz2\",\"level\": \"9\"}]'
diff -b -w "tmp_filt_bzip2.cdl" "tmp_filt_bzip2.dump"
}
testset() {
# Which test cases to exercise
testbzip2 $1
}
if test "x$TESTNCZARR" = x1 ; then
testset file
if test "x$FEATURE_NCZARR_ZIP" = xyes ; then testset zip ; fi
if test "x$FEATURE_S3TESTS" = xyes ; then testset s3 ; fi
else
testset nc
fi

74
nc_test4/tst_parallel6.c Normal file
View File

@ -0,0 +1,74 @@
/* Copyright 2022, UCAR/Unidata See COPYRIGHT file for copying and
* redistribution conditions.
*
* This parallel I/O test checks the behavior of nc_inq_dimlen() after
* parallel I/O writes.
*
* This program taken from a PNetCDF issue:
* https://github.com/Parallel-NetCDF/PnetCDF/issues/72, thanks
* wkliao!
*
* wkliao, Ed Hartnett, 4/11/22
*/
#include <nc_tests.h>
#include "err_macros.h"
#include <stdio.h>
#include <stdlib.h>
#include <mpi.h>
#include <netcdf.h>
#include <netcdf_par.h>
#define FILENAME "tst_parallel6.nc"
int main(int argc, char** argv)
{
int err = NC_NOERR, rank, nprocs;
int ncid, varid, dimid;
size_t start[1], count[1], nrecs;
MPI_Init(&argc, &argv);
MPI_Comm_size(MPI_COMM_WORLD, &nprocs);
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
if (!rank)
printf("\n*** Testing parallel I/O.\n");
if (!rank)
printf("*** testing record lenth with multiple processes writing records...");
/* nc_set_log_level(4); */
if (nc_create_par(FILENAME, NC_CLOBBER | NC_NETCDF4, MPI_COMM_WORLD,
MPI_INFO_NULL, &ncid)) ERR;
if (nc_def_dim(ncid, "time", NC_UNLIMITED, &dimid)) ERR;
if (nc_def_var(ncid, "var", NC_INT, 1, &dimid, &varid)) ERR;
if (nc_var_par_access(ncid, varid, NC_COLLECTIVE)) ERR;
if (nc_enddef(ncid)) ERR;
start[0] = rank;
count[0] = 1;
if (nc_put_vara_int(ncid, varid, start, count, &rank)) ERR;
if (nc_inq_dimlen(ncid, dimid, &nrecs)) ERR;
if (nc_close(ncid)) ERR;
/* nc_set_log_level(-1); */
if (nrecs != nprocs)
{
printf("Rank %d error at line %d of file %s:\n",rank,__LINE__,__FILE__);
printf("\tafter writing start=%zd count=%zd\n", start[0], count[0]);
printf("\texpecting number of records = %d but got %ld\n",
nprocs, nrecs);
ERR;
}
if (!rank)
SUMMARIZE_ERR;
MPI_Finalize();
if (!rank)
FINAL_RESULTS;
return 0;
}

View File

@ -60,6 +60,13 @@ main(int argc, char **argv)
/*printf("mpi_name: %s size: %d rank: %d\n", mpi_name,
mpi_size, mpi_rank);*/
/* This program must have exactly 4 processors. */
if (mpi_size != 4) {
printf("Error: mpi_size must be 4 with this test. mpi_size: %d used.\n",
mpi_size);
ERR;
}
#ifdef USE_MPE
MPE_Init_log();
s_init = MPE_Log_get_event_number();

View File

@ -550,7 +550,7 @@ main(int argc, char **argv)
/* union FU fout; */
union FU xpect[DIM_LEN_5];
union DU dfin;
union DU dfout;
/* union DU dfout; */
union DU double_xpect[DIM_LEN_5];
xpect[0].u = 0x3f8e3000;
xpect[1].u = 0x3f800fff;

View File

@ -28,9 +28,6 @@ echo "findplugin.sh loaded"
# Assume all test filters are in same plugin dir
findplugin h5bzip2
echo "final HDF5_PLUGIN_PATH=${HDF5_PLUGIN_PATH}"
export HDF5_PLUGIN_PATH
# Function to remove selected -s attributes from file;
# These attributes might be platform dependent
sclean() {
@ -167,14 +164,14 @@ testzstd() {
testset() {
# Which test cases to exercise
if test "x$TESTNCZARR" = x1 ; then
# testfletcher32 $1
testfletcher32 $1
testshuffle $1
fi
# testdeflate $1
# testszip $1
# testbzip2 $1
# testblosc $1
# testzstd $1
testdeflate $1
testszip $1
testbzip2 $1
testblosc $1
testzstd $1
}
if test "x$TESTNCZARR" = x1 ; then

View File

@ -10,7 +10,7 @@ include $(top_srcdir)/lib_flags.am
#SH_LOG_DRIVER = $(SHELL) $(top_srcdir)/test-driver-verbose
#LOG_DRIVER = $(SHELL) $(top_srcdir)/test-driver-verbose
#TEST_LOG_DRIVER = $(SHELL) $(top_srcdir)/test-driver-verbose
#TESTS_ENVIRONMENT += export SETX=1;
#TESTS_ENVIRONMENT = export SETX=1;
# Note which tests depend on other tests. Necessary for make -j check.
TEST_EXTENSIONS = .sh

View File

@ -40,10 +40,7 @@ kwcase.nc"
# only to detect which are considered XFAIL tests.
XFAILTESTS=
# For now, remove some tests from windows platform.
#if test "x$platform" != xmingw ; then
XFAILTESTS="$XFAILTESTS EOSDB OverideExample SimpleDrdsExample test.67 test.gr5 123bears.nc 123.nc bears.nc ber-2002-10-01 data.nc in1.nc in_2.nc in_no_three_double_dmn.nc test.nc text.nc test.22 test.23 test.gr1 in.nc ber-2002-10-01.nc kwcase.nc"
#fi
XFAILTESTS="$XFAILTESTS EOSDB OverideExample SimpleDrdsExample test.67 test.gr5 123bears.nc 123.nc bears.nc ber-2002-10-01 data.nc in1.nc in_2.nc in_no_three_double_dmn.nc test.nc text.nc test.22 test.23 test.gr1 in.nc ber-2002-10-01.nc kwcase.nc"
FILETESTS="${SYNTHETICDATA} ${SOURCEFILES}"
#DDSTESTS intersect FILETESTS should be empty

View File

@ -1,9 +1,8 @@
#!/bin/sh
if test "x$SETX" = x1 ; then set -x ; fi
if test "x$srcdir" = x ; then srcdir=`pwd`; fi
. ../test_common.sh
set -e
. ${srcdir}/tst_utils.sh

View File

@ -47,7 +47,7 @@ kwcase.nc"
XFAILTESTS=""
# For now, remove some tests from windows platform.
if [ `uname | cut -d "_" -f 1` = "MINGW32" ]; then
if test "x$FP_ISMINGW" == xyes || test "x$FP_ISMSVC" == xyes ; then
XFAILTESTS="$XFAILTESTS EOSDB OverideExample SimpleDrdsExample test.67 test.gr5 123bears.nc 123.nc bears.nc ber-2002-10-01 data.nc in1.nc in_2.nc in_no_three_double_dmn.nc test.nc text.nc test.22 test.23 test.gr1 in.nc ber-2002-10-01.nc kwcase.nc"
fi

View File

@ -19,7 +19,7 @@ srcdir=`pwd`
# Do a hack to remove e.g. c: for CYGWIN
builddir=`pwd`/..
# Hack for CYGWIN
if [ `uname | cut -d "_" -f 1` = "MINGW32" ]; then
if test "x$FP_ISMINGW" = yes ; then
srcdir=`pwd | sed 's/\/c\//c:\//g'`
builddir="$srcdir"/..
fi

View File

@ -17,10 +17,6 @@ fi
PARAMS="[log]"
#PARAMS="${PARAMS}[show=fetch]"
# Determine If we're on OSX or Linux
myplatform=`uname -a | cut -d" " -f 1`
#OCLOGFILE=/dev/null
OCLOGFILE="" ; export OCLOGFILE
@ -147,8 +143,8 @@ REMOTETESTSCB="dods"
# Known to fail
XFAILTESTS3=""
# For now, remove some tests from windows platform.
if [ `uname | cut -d "_" -f 1` = "MINGW32" ]; then
# For now, remove some tests from mingw platform.
if test "x$FP_ISMINGW" = xyes ; then
XFAILTESTS3="$XFAILTESTS3 test.67"
fi
@ -239,7 +235,7 @@ for t in ${TESTSET} ; do
#index=`expr index "${t}" ";"`
#echo index: $index
if [ "$myplatform" = "Darwin" ]; then
if test "x$FP_ISOSX" = xyes ; then
index=`echo "${t}" | sed -n "s/;.*//p" | wc -c`
if (( $index == 0 )) ; then
constrained=0

View File

@ -1,7 +1,5 @@
#!/bin/bash
if test "x$SETX" = x1 ; then set -x ; fi
if test "x$srcdir" = x ; then srcdir=`pwd`; fi
. ../test_common.sh

View File

@ -19,7 +19,7 @@ srcdir=`pwd`
# compute the build directory
builddir=`pwd`/..
# Hack for CYGWIN
if [ `uname | cut -d "_" -f 1` = "MINGW32" ]; then
if test "x$FP_ISMINGW" = xyes ; then
srcdir=`pwd | sed 's/\/c\//c:\//g'`
builddir="$srcdir"/..
fi

View File

@ -155,9 +155,9 @@ for x in ${REMOTETESTS} ; do
if test "x$quiet" = "x0" ; then echo "*** Testing: ${name} ; url=$url" ; fi
# determine if this is an xfailtest
isxfail=0
if test "x${XFAILTESTS}" != x ; then
if IGNORE=`echo -n " ${XFAILTESTS} " | fgrep " ${name} "`; then isxfail=1; fi
fi
for xf in $XFAILTESTS ; do
if test "x$xf" = "x${name}" ; then isxfail=1; fi
done
ok=1
if ${NCDUMP} ${DUMPFLAGS} "${url}" | sed 's/\\r//g' > ${name}.dmp ; then ok=$ok; else ok=0; fi
# compare with expected

View File

@ -23,17 +23,6 @@ passcount=0
xfailcount=0
failcount=0
# Try to figure out our platform
myplatform=`uname -a | cut -d" " -f 1`
case "$myplatform" in
Darwin*) platform=osx ;;
MINGW*) platform=mingw ;;
CYGWIN*) platform=cygwin ;;
linux*) platform=linux ;;
Linux*) platform=linux ;;
*) platform=unknown ;;
esac
# How to access local files
FILEURL="file://${testdata3}"

View File

@ -1,11 +1,10 @@
#!/bin/sh
if test "x$SETX" != x ; then set -x; fi
set -e
if test "x$srcdir" = x ; then srcdir=`pwd`; fi
. ../test_common.sh
set -e
##
# If the bug referenced in https://github.com/Unidata/netcdf-c/issues/1300
# reoccurs, then the following command would fail.

View File

@ -15,6 +15,10 @@
#include <unistd.h>
#endif
#ifdef HAVE_GETOPT_H
#include <getopt.h>
#endif
#if defined(_WIN32) && !defined(__MINGW32__)
#include "XGetopt.h"
#else

View File

@ -15,6 +15,10 @@
#include <unistd.h>
#endif
#ifdef HAVE_GETOPT_H
#include <getopt.h>
#endif
#if defined(_WIN32) && !defined(__MINGW32__)
#include "XGetopt.h"
#else

View File

@ -73,6 +73,10 @@ THIS SOFTWARE.
#include <unistd.h> /* read() getopt() */
#endif
#ifdef HAVE_GETOPT_H
#include <getopt.h>
#endif
#if defined(_WIN32) && !defined(__MINGW32__)
#include <io.h>
#include "XGetopt.h"

View File

@ -1,12 +1,17 @@
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <assert.h>
#ifdef HAVE_GETOPT_H
#include <getopt.h>
#endif
#if defined(_WIN32) && ! defined(__MINGW32__)
#include "XGetopt.h"
#else
#include <getopt.h>
#endif
#include <netcdf.h>

View File

@ -31,6 +31,8 @@ diff -wB ${srcdir}/$1.cdl ${execdir}/copy_$1.cdl
}
typescope() {
echo ">>>"
ls -l ${execdir}/printfqn* ${execdir}/$1.nc ${execdir}/$1_copy.nc
REFT=`${execdir}/printfqn -f ${execdir}/$1.nc -v test_variable -t`
COPYT=`${execdir}/printfqn -f ${execdir}/$1_copy.nc -v test_variable -t`
if test "x$REFT" != "x$COPYT" ; then

View File

@ -5,6 +5,9 @@ if test "x$srcdir" = x ; then srcdir=`pwd`; fi
set -e
# Disable automatic path conversions in MINGW shell:
export MSYS2_ARG_CONV_EXCL='*'
# We need to find the drive letter, if any
DL=`${NCPATHCVT} -c -e / | sed -e 's|/cygdrive/\([a-zA-Z]\)/.*|\1|'`
if test "x$DL" != x ; then
@ -13,6 +16,7 @@ if test "x$DL" != x ; then
DL="-d $DLL"
fi
testcase1() {
T="$1"
P="$2"

View File

@ -8,7 +8,9 @@
if test "x$srcdir" = x ; then srcdir=`pwd`; fi
. ../test_common.sh
set -x
set -e
echo ""
# get some config.h parameters

View File

@ -4,6 +4,7 @@ if test "x$srcdir" = x ; then srcdir=`pwd`; fi
. ../test_common.sh
# This shell script tests the output from several previous tests.
set -x
set -e
echo ""

View File

@ -30,14 +30,17 @@ nctest_classic.cdl ref_nctest_classic.cdl
# reference file.
TESTPROGRAMS = nctest tst_rename
check_PROGRAMS = $(TESTPROGRAMS)
# This test requires that the utilites also be built.
if BUILD_UTILITIES
TESTS = $(TESTPROGRAMS) compare_test_files.sh
# compare_test_files depends on nctest executing first.
compare_test_files.log: nctest.log
endif
# Note which tests depend on other tests. Necessary for make -j check.
TEST_EXTENSIONS = .sh
# compare_test_files depends on nctest executing first.
compare_test_files.log: nctest.log
# These are the source files for the nctest program.
nctest_SOURCES = add.c add.h atttests.c cdftests.c dimtests.c driver.c \
emalloc.c emalloc.h error.c error.h misctest.c rec.c slabs.c testcdf.h \

View File

@ -37,13 +37,13 @@ tst_fillonlyz_SOURCES = tst_fillonlyz.c ${tstcommonsrc}
check_PROGRAMS += tst_zchunks tst_zchunks2 tst_zchunks3 tst_fillonlyz test_quantize
TESTS += run_ut_map.sh
TESTS += run_ut_mapapi.sh
TESTS += run_ut_misc.sh
TESTS += run_ut_chunk.sh
if BUILD_UTILITIES
TESTS += run_ut_map.sh
TESTS += run_ut_mapapi.sh
TESTS += run_ut_misc.sh
TESTS += run_ncgen4.sh
if USE_HDF5
@ -154,26 +154,35 @@ EXTRA_DIST += ref_power_901_constants.zip ref_power_901_constants.cdl ref_quotes
CLEANFILES = ut_*.txt ut*.cdl tmp*.nc tmp*.cdl tmp*.txt tmp*.dmp tmp*.zip tmp*.nc tmp*.dump tmp*.tmp tmp_ngc.c ref_zarr_test_data.cdl
BUILT_SOURCES = test_quantize.c test_filter_avail.c run_specific_filters.sh
test_quantize.c: ../nc_test4/tst_quantize.c
rm -f test_quantize.c
echo "#define TESTNCZARR" > test_quantize.c
cat ../nc_test4/tst_quantize.c >> test_quantize.c
BUILT_SOURCES = test_quantize.c test_filter_avail.c run_specific_filters.sh run_filterinstall.sh
test_quantize.c: $(top_srcdir)/nc_test4/tst_quantize.c
rm -f $@
echo "#define TESTNCZARR" > $@
cat $(top_srcdir)/nc_test4/tst_quantize.c >> $@
test_filter_avail.c: ../nc_test4/tst_filter_avail.c
rm -f test_filter_avail.c
echo "#define TESTNCZARR" > test_filter_avail.c
cat ../nc_test4/tst_filter_avail.c >> test_filter_avail.c
test_filter_avail.c: $(top_srcdir)/nc_test4/tst_filter_avail.c
rm -f $@
echo "#define TESTNCZARR" > $@
cat $(top_srcdir)/nc_test4/tst_filter_avail.c >> $@
run_specific_filters.sh: ../nc_test4/tst_specific_filters.sh
rm -f run_specific_filters.sh run_specific_filters.tmp
run_specific_filters.sh: $(top_srcdir)/nc_test4/tst_specific_filters.sh
rm -f $@ run_specific_filters.tmp
echo "#!/bin/bash" > run_specific_filters.tmp
echo "TESTNCZARR=1" >> run_specific_filters.tmp
cat ../nc_test4/tst_specific_filters.sh >> run_specific_filters.tmp
tr -d '\r' < run_specific_filters.tmp > run_specific_filters.sh
chmod a+x run_specific_filters.sh
cat $(top_srcdir)/nc_test4/tst_specific_filters.sh >> run_specific_filters.tmp
tr -d '\r' < run_specific_filters.tmp > $@
chmod a+x $@
rm -f run_specific_filters.tmp
run_filterinstall.sh: $(top_srcdir)/nc_test4/tst_filterinstall.sh
rm -f $@ run_filterinstall.tmp
echo "#!/bin/bash" > run_filterinstall.tmp
echo "TESTNCZARR=1" >> run_filterinstall.tmp
cat $(top_srcdir)/nc_test4/tst_filterinstall.sh >> run_filterinstall.tmp
tr -d '\r' < run_filterinstall.tmp > $@
chmod a+x $@
rm -f run_filterinstall.tmp
# Remove directories
clean-local:
rm -fr tmp*.file results.file results.s3 results.zip

View File

@ -51,21 +51,21 @@ sed -e 's/[ ]*\([^ ].*\)/\1/' <$1 >$2
# Find misc and capture
findplugin h5misc
MISCPATH="${HDF5_PLUGIN_PATH}/${HDF5_PLUGIN_LIB}"
MISCDIR="${HDF5_PLUGIN_DIR}/${HDF5_PLUGIN_LIB}"
# Find noop and capture
findplugin h5noop
NOOPLIB="${HDF5_PLUGIN_LIB}"
NOOPPATH="${HDF5_PLUGIN_PATH}/${NOOPLIB}"
NOOPDIR="${HDF5_PLUGIN_DIR}/${NOOPLIB}"
# Find bzip2 and capture
findplugin h5bzip2
BZIP2LIB="${HDF5_PLUGIN_LIB}"
BZIP2PATH="${HDF5_PLUGIN_PATH}/${BZIP2LIB}"
BZIP2DIR="${HDF5_PLUGIN_DIR}/${BZIP2LIB}"
# Verify
if ! test -f ${BZIP2PATH} ; then echo "Unable to locate ${BZIP2PATH}"; exit 1; fi
if ! test -f ${MISCPATH} ; then echo "Unable to locate ${MISCPATH}"; exit 1; fi
if ! test -f ${BZIP2DIR} ; then echo "Unable to locate ${BZIP2DIR}"; exit 1; fi
if ! test -f ${MISCDIR} ; then echo "Unable to locate ${MISCDIR}"; exit 1; fi
# Execute the specified tests
@ -144,11 +144,11 @@ ${NCDUMP} -hs $fileurl > ./tmp_known_$zext.txt
# Remove irrelevant -s output
sclean ./tmp_known_$zext.txt tmp_known_$zext.dump
# Now hide the filter code
mv ${NOOPPATH} ./${NOOPLIB}.save
mv ${NOOPDIR} ./${NOOPLIB}.save
# dump and clean noop.nc header when filter is not avail
${NCDUMP} -hs $fileurl > ./tmp_unk_$zext.txt
# Restore the filter code
mv ./${NOOPLIB}.save ${NOOPPATH}
mv ./${NOOPLIB}.save ${NOOPDIR}
# Verify that the filter is no longer defined
UNK=`sed -e '/var:_Filter/p' -e d ./tmp_unk_$zext.txt`
test "x$UNK" = x

View File

@ -133,12 +133,12 @@ if test "x$FP_USEPLUGINS" = xyes; then
echo "findplugin.sh loaded"
# Locate the plugin path and the library names; argument order is critical
# Find bzip2 and capture
# Find misc in order to determine HDF5_PLUGIN+PATH.
# Assume all test filters are in same plugin dir
findplugin h5misc
echo "final HDF5_PLUGIN_PATH=${HDF5_PLUGIN_PATH}"
export HDF5_PLUGIN_PATH
echo "final HDF5_PLUGIN_DIR=${HDF5_PLUGIN_DIR}"
export HDF5_PLUGIN_PATH="${HDF5_PLUGIN_DIR}"
fi # USEPLUGINS
resetrc() {

View File

@ -10,12 +10,6 @@ FILE(READ H5Znoop.c NOOP_SOURCE)
FILE(WRITE ${CMAKE_CURRENT_BINARY_DIR}/H5Znoop1.c "#define NOOP_INSTANCE 1\n")
FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/H5Znoop1.c "${NOOP_SOURCE}")
SET(h5bzip2_SOURCES H5Zbzip2.c)
IF(NOT HAVE_BZIP2)
SET(h5bzip2_SOURCES ${h5bzip2_SOURCES} blocksort.c huffman.c crctable.c randtable.c compress.c decompress.c bzlib.c)
ENDIF()
SET(h5misc_SOURCES H5Zmisc.c H5Zutil.c h5misc.h)
SET(h5noop_SOURCES H5Znoop.c H5Zutil.c h5noop.h)
@ -29,11 +23,10 @@ SET(h5fletcher32_SOURCES H5Zfletcher32.c H5checksum.c)
SET(h5deflate_SOURCES H5Zdeflate.c)
SET(nczmisc_SOURCES NCZmisc.c)
SET(nczdefaults_SOURCES NCZdefaults.c)
IF(ENABLE_FILTER_TESTING)
IF(BUILD_UTILITIES)
SET(nczhdf5filters_SOURCES NCZhdf5filters.c)
SET(nczstdfilters_SOURCES NCZstdfilters.c)
IF(ENABLE_PLUGINS)
# LDFLAGS = -module -avoid-version -shared -export-dynamic -no-undefined
SET(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}")
@ -64,7 +57,8 @@ buildplugin(h5fletcher32 "h5fletcher32")
buildplugin(h5deflate "h5deflate")
buildplugin(nczmisc "nczmisc")
buildplugin(nczdefaults "nczdefaults" netcdf)
buildplugin(nczhdf5filters "nczhdf5filters" netcdf)
buildplugin(nczstdfilters "nczstdfilters" netcdf)
IF(ENABLE_BLOSC)
SET(h5blosc_SOURCES H5Zblosc.c)
@ -80,17 +74,54 @@ ENDIF()
IF(HAVE_SZ)
SET(h5szip_SOURCES H5Zszip.c H5Zszip.h)
buildplugin(h5szip "h5szip" ${Szip_LIBRARIES})
SET(nczszip_SOURCES NCZszip.c)
buildplugin(nczszip "nczszip" netcdf)
ENDIF()
IF(HAVE_LOCAL_BZ2)
SET(h5bzip2_SOURCES H5Zbzip2.c blocksort.c huffman.c crctable.c randtable.c compress.c decompress.c bzlib.c bzlib.h bzlib_private.h)
buildplugin(h5bzip2 "h5bzip2")
ELSE()
SET(h5bzip2_SOURCES H5Zbzip2.c)
buildplugin(h5bzip2 "h5bzip2" ${Bzip2_LIBRARIES})
# Note we use name h5bzip2 instead of bzip2 to avoid logical
# target name clash with examples/C/hdf5plugins
SET_TARGET_PROPERTIES(h5bzip2 PROPERTIES OUTPUT_NAME "bzip2")
ENDIF()
ENDIF(BUILD_UTILITIES)
ENDIF(ENABLE_FILTER_TESTING)
# Installation
IF(ENABLE_PLUGIN_INSTALL)
MACRO(installplugin PLUG)
IF(MSVC)
SET(BUILD_PLUGIN_LIB "${PLUG}.dll")
ELSE()
SET(BUILD_PLUGIN_LIB "lib${PLUG}.so")
ENDIF()
MESSAGE(STATUS "Installing: ${BUILD_PLUGIN_LIB} into ${PLUGIN_INSTALL_DIR}")
install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/${BUILD_PLUGIN_LIB} DESTINATION ${PLUGIN_INSTALL_DIR})
ENDMACRO()
install(DIRECTORY DESTINATION ${PLUGIN_INSTALL_DIR})
IF(Bzip2_FOUND)
installplugin(h5bzip2)
ENDIF()
IF(Zstd_FOUND)
installplugin(h5zstd)
ENDIF()
IF(Blosc_FOUND)
installplugin(h5blosc)
ENDIF()
IF(ENABLE_NCZARR)
installplugin(h5fletcher32)
installplugin(h5shuffle)
installplugin(h5deflate)
installplugin(nczhdf5filters)
installplugin(nczstdfilters)
IF(Szip_FOUND)
installplugin(h5szip)
ENDIF()
ENDIF()
ENDIF(ENABLE_PLUGIN_INSTALL)
ENDIF(ENABLE_PLUGINS)
# Copy some test files from current source dir to out-of-tree build dir.
FILE(COPY ${COPY_FILES} DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/)

View File

@ -125,8 +125,6 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#define PUSH_ERR(f,m,s,...) fprintf(stderr,"%s\n",s)
#endif /*USE_HDF5*/
static int h5z_blosc_initialized = 0;
static size_t blosc_filter(unsigned flags, size_t cd_nelmts,
const unsigned cd_values[], size_t nbytes,
size_t* buf_size, void** buf);
@ -162,7 +160,7 @@ herr_t blosc_set_local(hid_t dcpl, hid_t type, hid_t space)
hid_t super_type;
H5T_class_t classt;
r = GET_FILTER(dcpl, FILTER_BLOSC, &flags, &nelements, values, 0, NULL);
r = GET_FILTER(dcpl, H5Z_FILTER_BLOSC, &flags, &nelements, values, 0, NULL);
if (r < 0) return -1;
if (nelements < 4) nelements = 4; /* First 4 slots reserved. */
@ -209,7 +207,7 @@ herr_t blosc_set_local(hid_t dcpl, hid_t type, hid_t space)
fprintf(stderr, "Blosc: Computed buffer size %d\n", bufsize);
#endif
r = H5Pmodify_filter(dcpl, FILTER_BLOSC, flags, nelements, values);
r = H5Pmodify_filter(dcpl, H5Z_FILTER_BLOSC, flags, nelements, values);
if (r < 0) return -1;
return 1;
@ -365,7 +363,7 @@ size_t blosc_filter(unsigned flags, size_t cd_nelmts,
const H5Z_class2_t blosc_H5Filter[1] = {
{
H5Z_CLASS_T_VERS,
(H5Z_filter_t)(FILTER_BLOSC),
(H5Z_filter_t)(H5Z_FILTER_BLOSC),
1, /* encoder_present flag (set to true) */
1, /* decoder_present flag (set to true) */
"blosc",
@ -379,235 +377,3 @@ const H5Z_class2_t blosc_H5Filter[1] = {
H5PL_type_t H5PLget_plugin_type(void) { return H5PL_TYPE_FILTER; }
const void* H5PLget_plugin_info(void) { return blosc_H5Filter; }
/* Provide the codec support for the HDF5 blosc library */
/* NCZarr Filter Objects */
#define DEFAULT_LEVEL 9
#define DEFAULT_BLOCKSIZE 1
#define DEFAULT_TYPESIZE 1
#define DEFAULT_COMPCODE BLOSC_LZ4
/* Forward */
static void NCZ_blosc_codec_finalize(void);
static int NCZ_blosc_codec_to_hdf5(const char* codec, size_t* nparamsp, unsigned** paramsp);
static int NCZ_blosc_hdf5_to_codec(size_t nparams, const unsigned* params, char** codecp);
static int NCZ_blosc_modify_parameters(int ncid, int varid, size_t* vnparamsp, unsigned** vparamsp, size_t* wnparamsp, unsigned** wparamsp);
/* Structure for NCZ_PLUGIN_CODEC */
static NCZ_codec_t NCZ_blosc_codec = {/* NCZ_codec_t codec fields */
NCZ_CODEC_CLASS_VER, /* Struct version number */
NCZ_CODEC_HDF5, /* Struct sort */
"blosc", /* Standard name/id of the codec */
FILTER_BLOSC, /* HDF5 alias for blosc */
NULL, /*NCZ_blosc_codec_initialize*/
NCZ_blosc_codec_finalize,
NCZ_blosc_codec_to_hdf5,
NCZ_blosc_hdf5_to_codec,
NCZ_blosc_modify_parameters,
};
/* External Export API */
const void*
NCZ_get_codec_info(void)
{
if(!h5z_blosc_initialized) {
h5z_blosc_initialized = 1;
blosc_init();
}
return (void*)&NCZ_blosc_codec;
}
/* NCZarr Interface Functions */
/* Create the true parameter set:
Visible parameters:
param[0] -- reserved
param[1] -- reserved
param[2] -- reserved
param[3] -- variable chunksize in bytes | 0 (=>default)
param[4] -- compression level
param[5] -- BLOSC_SHUFFLE|BLOSC_BITSHUFFLE
param[6] -- compressor index
Working parameters:
param[0] -- filter revision
param[1] -- blosc version
param[2] -- variable type size in bytes
param[3] -- variable chunksize in bytes
param[4] -- compression level
param[5] -- BLOSC_SHUFFLE|BLOSC_BITSHUFFLE
param[6] -- compressor index
*/
static void
NCZ_blosc_codec_finalize(void)
{
if(h5z_blosc_initialized) {
blosc_destroy();
h5z_blosc_initialized = 0;
}
}
static int
NCZ_blosc_modify_parameters(int ncid, int varid, size_t* vnparamsp, unsigned** vparamsp, size_t* wnparamsp, unsigned** wparamsp)
{
int i,stat = NC_NOERR;
nc_type vtype;
int storage, ndims;
size_t* chunklens = NULL;
size_t typesize, chunksize;
char vname[NC_MAX_NAME+1];
unsigned* params = NULL;
size_t nparams;
size_t vnparams = *vnparamsp;
unsigned* vparams = *vparamsp;
if(vnparams < 7)
{stat = NC_EFILTER; goto done;}
nparams = 7;
if(vparams == NULL)
{stat = NC_EFILTER; goto done;}
if(wnparamsp == NULL || wparamsp == NULL)
{stat = NC_EFILTER; goto done;}
vnparams = *vnparamsp;
vparams = *vparamsp;
/* Get variable info */
if((stat = nc_inq_var(ncid,varid,vname,&vtype,&ndims,NULL,NULL))) goto done;
if(ndims == 0) {stat = NC_EFILTER; goto done;}
/* Get the typesize */
if((stat = nc_inq_type(ncid,vtype,NULL,&typesize))) goto done;
/* Compute chunksize */
if((chunklens = (size_t*)calloc(ndims,sizeof(size_t)))==NULL) goto done;
if((stat = nc_inq_var_chunking(ncid,varid,&storage,chunklens))) goto done;
if(storage != NC_CHUNKED) {stat = NC_EFILTER; goto done;}
chunksize = typesize;
for(i=0;i<ndims;i++) chunksize *= chunklens[i];
if((params = (unsigned*)malloc(vnparams*sizeof(unsigned)))==NULL)
{stat = NC_ENOMEM; goto done;}
memcpy(params,vparams,vnparams*sizeof(unsigned));
params[0] = FILTER_BLOSC_VERSION;
params[1] = BLOSC_VERSION_FORMAT;
params[2] = (unsigned)typesize;
params[3] = chunksize;
params[4] = params[4];
params[5] = params[5];
params[6] = params[6];
*wnparamsp = nparams;
nullfree(*wparamsp);
*wparamsp = params; params = NULL;
done:
nullfree(chunklens);
nullfree(params);
FUNC_LEAVE_NOAPI(stat)
}
static int
NCZ_blosc_codec_to_hdf5(const char* codec_json, size_t* nparamsp, unsigned** paramsp)
{
int stat = NC_NOERR;
NCjson* jcodec = NULL;
NCjson* jtmp = NULL;
unsigned* params = NULL;
struct NCJconst jc = {0,0,0,NULL};
int compcode;
/* parse the JSON */
if(NCJparse(codec_json,0,&jcodec)) {stat = NC_EFILTER; goto done;}
if(NCJsort(jcodec) != NCJ_DICT) {stat = NC_EPLUGIN; goto done;}
/* Verify the codec ID */
if(NCJdictget(jcodec,"id",&jtmp))
{stat = NC_EFILTER; goto done;}
if(jtmp == NULL || !NCJisatomic(jtmp)) {stat = NC_EINVAL; goto done;}
if(strcmp(NCJstring(jtmp),NCZ_blosc_codec.codecid)!=0) {stat = NC_EINVAL; goto done;}
if((params = (unsigned*)calloc(7,sizeof(unsigned)))==NULL) {stat = NC_ENOMEM; goto done;}
/* Get compression level*/
if(NCJdictget(jcodec,"clevel",&jtmp)) {stat = NC_EFILTER; goto done;}
if(jtmp) {
if(NCJcvt(jtmp,NCJ_INT,&jc)) {stat = NC_EFILTER; goto done;}
} else
jc.ival = DEFAULT_LEVEL;
if(jc.ival < 0 || jc.ival > NC_MAX_UINT) {stat = NC_EFILTER; goto done;}
params[4] = (unsigned)jc.ival;
/* Get blocksize */
if(NCJdictget(jcodec,"blocksize",&jtmp)) {stat = NC_EFILTER; goto done;}
if(jtmp) {
if(NCJcvt(jtmp,NCJ_INT,&jc)) {stat = NC_EFILTER; goto done;}
} else
jc.ival = DEFAULT_BLOCKSIZE;
if(jc.ival < 0 || jc.ival > NC_MAX_UINT) {stat = NC_EFILTER; goto done;}
params[3] = (unsigned)jc.ival;
/* Get shuffle */
if(NCJdictget(jcodec,"shuffle",&jtmp)) {stat = NC_EFILTER; goto done;}
if(jtmp) {
if(NCJcvt(jtmp,NCJ_INT,&jc)) {stat = NC_EFILTER; goto done;}
} else
jc.ival = BLOSC_NOSHUFFLE;
params[5] = (unsigned)jc.ival;
/* Get compname */
if(NCJdictget(jcodec,"cname",&jtmp)) {stat = NC_EFILTER; goto done;}
if(jtmp) {
if(NCJcvt(jtmp,NCJ_STRING,&jc)) {stat = NC_EFILTER; goto done;}
if(jc.sval == NULL || strlen(jc.sval) == 0) {stat = NC_EFILTER; goto done;}
if((compcode = blosc_compname_to_compcode(jc.sval)) < 0) {stat = NC_EFILTER; goto done;}
} else
compcode = DEFAULT_COMPCODE;
params[6] = (unsigned)compcode;
if(nparamsp) *nparamsp = 7;
if(paramsp) {*paramsp = params; params = NULL;}
done:
if(jc.sval) {
free(jc.sval);
}
if(params) {
free(params);
}
NCJreclaim(jcodec);
return stat;
}
static int
NCZ_blosc_hdf5_to_codec(size_t nparams, const unsigned* params, char** codecp)
{
int stat = NC_NOERR;
char json[1024];
const char* compname = NULL;
if(nparams == 0 || params == NULL)
{stat = NC_EINVAL; goto done;}
/* Get the sub-compressor name */
if(blosc_compcode_to_compname((int)params[6],&compname) < 0) {stat = NC_EFILTER; goto done;}
snprintf(json,sizeof(json),
"{\"id\": \"blosc\",\"clevel\": %u,\"blocksize\": %u,\"cname\": \"%s\",\"shuffle\": %d}",
params[4],params[3],compname,params[5]);
if(codecp) {
if((*codecp = strdup(json))==NULL) {stat = NC_ENOMEM; goto done;}
}
done:
return stat;
}

View File

@ -29,13 +29,6 @@ extern "C" {
#include "netcdf_filter_build.h"
#include "blosc.h"
/* Filter revision number, starting at 1 */
/* #define FILTER_BLOSC_VERSION 1 */
#define FILTER_BLOSC_VERSION 2 /* multiple compressors since Blosc 1.3 */
/* Filter ID registered with the HDF Group */
#define FILTER_BLOSC 32001
#ifdef _MSC_VER
#ifdef DLL_EXPORT /* define when building the library */
#define DECLSPEC __declspec(dllexport)
@ -46,6 +39,15 @@ extern "C" {
#define DECLSPEC extern
#endif
/* Filter revision number, starting at 1 */
/* #define FILTER_BLOSC_VERSION 1 */
#define FILTER_BLOSC_VERSION 2 /* multiple compressors since Blosc 1.3 */
#define DEFAULT_LEVEL 9
#define DEFAULT_BLOCKSIZE 1
#define DEFAULT_TYPESIZE 1
#define DEFAULT_COMPCODE BLOSC_LZ4
/* HDF5 Plugin API */
DECLSPEC H5PL_type_t H5PLget_plugin_type(void);
DECLSPEC const void* H5PLget_plugin_info(void);

View File

@ -214,92 +214,3 @@ H5Z_filter_bzip2(unsigned int flags, size_t cd_nelmts,
return 0;
}
/**************************************************/
/* NCZarr Filter Objects */
/* Provide the codec support for the HDF5 bzip library */
static int NCZ_bzip2_codec_to_hdf5(const char* codec, size_t* nparamsp, unsigned** paramsp);
static int NCZ_bzip2_hdf5_to_codec(size_t nparams, const unsigned* params, char** codecp);
static NCZ_codec_t NCZ_bzip2_codec = {/* NCZ_codec_t codec fields */
NCZ_CODEC_CLASS_VER, /* Struct version number */
NCZ_CODEC_HDF5, /* Struct sort */
"bz2", /* Standard name/id of the codec */
H5Z_FILTER_BZIP2, /* HDF5 alias for bzip2 */
NULL, /*NCZ_bzip2_codec_initialize*/
NULL, /*NCZ_bzip2_codec_finalize*/
NCZ_bzip2_codec_to_hdf5,
NCZ_bzip2_hdf5_to_codec,
NULL, /*NCZ_bzip2_modify_parameters*/
};
/* External Export API */
DLLEXPORT
const void*
NCZ_get_codec_info(void)
{
return (void*)&NCZ_bzip2_codec;
}
static int
NCZ_bzip2_codec_to_hdf5(const char* codec_json, size_t* nparamsp, unsigned** paramsp)
{
int stat = NC_NOERR;
NCjson* jcodec = NULL;
NCjson* jtmp = NULL;
unsigned* params = NULL;
struct NCJconst jc;
if(nparamsp == NULL || paramsp == NULL)
{stat = NC_EINTERNAL; goto done;}
if((params = (unsigned*)calloc(1,sizeof(unsigned)))== NULL)
{stat = NC_ENOMEM; goto done;}
/* parse the JSON */
if(NCJparse(codec_json,0,&jcodec))
{stat = NC_EFILTER; goto done;}
if(NCJsort(jcodec) != NCJ_DICT) {stat = NC_EPLUGIN; goto done;}
/* Verify the codec ID */
if(NCJdictget(jcodec,"id",&jtmp))
{stat = NC_EFILTER; goto done;}
if(jtmp == NULL || !NCJisatomic(jtmp)) {stat = NC_EFILTER; goto done;}
if(strcmp(NCJstring(jtmp),NCZ_bzip2_codec.codecid)!=0) {stat = NC_EINVAL; goto done;}
/* Get Level */
if(NCJdictget(jcodec,"level",&jtmp))
{stat = NC_EFILTER; goto done;}
if(NCJcvt(jtmp,NCJ_INT,&jc))
{stat = NC_EFILTER; goto done;}
if(jc.ival < 0 || jc.ival > NC_MAX_UINT) {stat = NC_EINVAL; goto done;}
params[0] = (unsigned)jc.ival;
*nparamsp = 1;
*paramsp = params; params = NULL;
done:
if(params) free(params);
NCJreclaim(jcodec);
return stat;
}
static int
NCZ_bzip2_hdf5_to_codec(size_t nparams, const unsigned* params, char** codecp)
{
int stat = NC_NOERR;
unsigned level = 0;
char json[1024];
if(nparams == 0 || params == NULL)
{stat = NC_EFILTER; goto done;}
level = params[0];
snprintf(json,sizeof(json),"{\"id\": \"%s\", \"level\": \"%u\"}",NCZ_bzip2_codec.codecid,level);
if(codecp) {
if((*codecp = strdup(json))==NULL) {stat = NC_ENOMEM; goto done;}
}
done:
return stat;
}

View File

@ -166,92 +166,3 @@ cleanupAndFail:
return 0;
}
/**************************************************/
/* NCZarr Filter Objects */
/* Provide the codec support for the HDF5 zstandard library */
static int NCZ_zstd_codec_to_hdf5(const char* codec, size_t* nparamsp, unsigned** paramsp);
static int NCZ_zstd_hdf5_to_codec(size_t nparams, const unsigned* params, char** codecp);
static NCZ_codec_t NCZ_zstd_codec = {/* NCZ_codec_t codec fields */
NCZ_CODEC_CLASS_VER, /* Struct version number */
NCZ_CODEC_HDF5, /* Struct sort */
"zstd", /* Standard name/id of the codec */
H5Z_FILTER_ZSTD, /* HDF5 alias for zstd */
NULL, /*NCZ_zstd_codec_initialize*/
NULL, /*NCZ_zstd_codec_finalize*/
NCZ_zstd_codec_to_hdf5,
NCZ_zstd_hdf5_to_codec,
NULL, /*NCZ_zstd_modify_parameters*/
};
/* External Export API */
DLLEXPORT
const void*
NCZ_get_codec_info(void)
{
return (void*)&NCZ_zstd_codec;
}
static int
NCZ_zstd_codec_to_hdf5(const char* codec_json, size_t* nparamsp, unsigned** paramsp)
{
int stat = NC_NOERR;
NCjson* jcodec = NULL;
NCjson* jtmp = NULL;
unsigned* params = NULL;
struct NCJconst jc;
if(nparamsp == NULL || paramsp == NULL)
{stat = NC_EINTERNAL; goto done;}
if((params = (unsigned*)calloc(1,sizeof(unsigned)))== NULL)
{stat = NC_ENOMEM; goto done;}
/* parse the JSON */
if(NCJparse(codec_json,0,&jcodec))
{stat = NC_EFILTER; goto done;}
if(NCJsort(jcodec) != NCJ_DICT) {stat = NC_EPLUGIN; goto done;}
/* Verify the codec ID */
if(NCJdictget(jcodec,"id",&jtmp))
{stat = NC_EFILTER; goto done;}
if(jtmp == NULL || !NCJisatomic(jtmp)) {stat = NC_EFILTER; goto done;}
if(strcmp(NCJstring(jtmp),NCZ_zstd_codec.codecid)!=0) {stat = NC_EINVAL; goto done;}
/* Get Level */
if(NCJdictget(jcodec,"level",&jtmp))
{stat = NC_EFILTER; goto done;}
if(NCJcvt(jtmp,NCJ_INT,&jc))
{stat = NC_EFILTER; goto done;}
if(jc.ival < 0 || jc.ival > NC_MAX_UINT) {stat = NC_EINVAL; goto done;}
params[0] = (unsigned)jc.ival;
*nparamsp = 1;
*paramsp = params; params = NULL;
done:
if(params) free(params);
NCJreclaim(jcodec);
return stat;
}
static int
NCZ_zstd_hdf5_to_codec(size_t nparams, const unsigned* params, char** codecp)
{
int stat = NC_NOERR;
unsigned level = 0;
char json[1024];
if(nparams == 0 || params == NULL)
{stat = NC_EFILTER; goto done;}
level = params[0];
snprintf(json,sizeof(json),"{\"id\": \"%s\", \"level\": \"%u\"}",NCZ_zstd_codec.codecid,level);
if(codecp) {
if((*codecp = strdup(json))==NULL) {stat = NC_ENOMEM; goto done;}
}
done:
return stat;
}

View File

@ -14,34 +14,22 @@ if ISMINGW
LDADD = ${top_builddir}/liblib/libnetcdf.la
endif
# BZIP2 version 1.0.8 (https://sourceware.org/bzip2/)
BZIP2SRC = blocksort.c huffman.c crctable.c randtable.c compress.c decompress.c bzlib.c bzlib.h bzlib_private.h
EXTRA_DIST = CMakeLists.txt
# The HDF5 filter wrappers
EXTRA_DIST += \
EXTRA_DIST += stdinstall.sh \
H5Ztemplate.c H5Zmisc.c H5Zutil.c H5Znoop.c h5noop.h NCZmisc.c \
H5Zshuffle.c H5Zdeflate.c H5Zszip.c H5Zszip.h \
H5Zbzip2.c h5bzip2.h H5Zblosc.c H5Zblosc.h H5Zzstd.c H5Zzstd.h
# The Codec filter wrappers
EXTRA_DIST += NCZdefaults.c NCZszip.c
EXTRA_DIST += NCZhdf5filters.c NCZstdfilters.c
# The Filter implementations
EXTRA_DIST += H5checksum.c
EXTRA_DIST += ${BZIP2SRC} BZIP2_LICENSE
if ENABLE_FILTER_TESTING
lib_LTLIBRARIES += libh5bzip2.la
libh5bzip2_la_SOURCES = H5Zbzip2.c h5bzip2.h
if ! HAVE_BZIP2
libh5bzip2_la_SOURCES += ${BZIP2SRC}
endif
noinst_LTLIBRARIES += libh5misc.la libh5noop.la libh5noop1.la libnczmisc.la
noinst_LTLIBRARIES += libnczdefaults.la
if ENABLE_NCZARR_FILTERS
noinst_LTLIBRARIES += libh5fletcher32.la libh5shuffle.la libh5deflate.la
@ -51,27 +39,39 @@ libh5deflate_la_SOURCES = H5Zdeflate.c
# Need our version of szip if libsz available and we are not using HDF5
if HAVE_SZ
noinst_LTLIBRARIES += libh5szip.la libnczszip.la
noinst_LTLIBRARIES += libh5szip.la
libh5szip_la_SOURCES = H5Zszip.c H5Zszip.h
libnczszip_la_SOURCES = NCZszip.c
endif
libnczdefaults_la_SOURCES = NCZdefaults.c
endif # ENABLE_NCZARR_FILTERS
if ENABLE_PLUGINS
libnczstdfilters_la_SOURCES = NCZstdfilters.c
libnczhdf5filters_la_SOURCES = NCZhdf5filters.c
if HAVE_BLOSC
noinst_LTLIBRARIES += libh5blosc.la
libh5blosc_la_SOURCES = H5Zblosc.c H5Zblosc.h
endif
if HAVE_ZSTD
noinst_LTLIBRARIES += libh5zstd.la
libh5zstd_la_SOURCES = H5Zzstd.c H5Zzstd.h
endif
libh5misc_la_SOURCES = H5Zmisc.c H5Zutil.c h5misc.h
noinst_LTLIBRARIES += libnczhdf5filters.la
noinst_LTLIBRARIES += libnczstdfilters.la
if HAVE_BLOSC
noinst_LTLIBRARIES += libh5blosc.la
endif
if HAVE_ZSTD
noinst_LTLIBRARIES += libh5zstd.la
endif
endif #ENABLE_PLUGINS
libh5misc_la_SOURCES = H5Zmisc.c H5Zutil.c h5misc.h
libnczmisc_la_SOURCES = NCZmisc.c
# The noop filter is to allow testing of multifilters and filter order
@ -79,6 +79,14 @@ libnczmisc_la_SOURCES = NCZmisc.c
libh5noop_la_SOURCES = H5Znoop.c H5Zutil.c h5noop.h
libh5noop1_la_SOURCES = H5Znoop1.c H5Zutil.c h5noop.h
# Bzip2 is used to test more complex filters
libh5bzip2_la_SOURCES = H5Zbzip2.c h5bzip2.h
BZIP2SRC = blocksort.c huffman.c crctable.c randtable.c compress.c decompress.c bzlib.c bzlib.h bzlib_private.h
EXTRA_DIST += ${BZIP2SRC} BZIP2_LICENSE
if HAVE_LOCAL_BZ2
libh5bzip2_la_SOURCES += ${BZIP2SRC}
endif
endif #ENABLE_FILTER_TESTING
BUILT_SOURCES = H5Znoop1.c
@ -87,6 +95,9 @@ H5Znoop1.c: Makefile H5Znoop.c
echo '#define NOOP_INSTANCE 1' > $@
cat ${srcdir}/H5Znoop.c >> $@
noinst_LTLIBRARIES += libh5bzip2.la
# Record where bzip2 came from; may be out of date
BZIP2VER = 1.0.8
BZIP2DIR = bzip2-${BZIP2VER}
BZIP2URL = https://sourceware.org/pub/bzip2/${BZIP2DIR}.tar.gz
@ -96,3 +107,7 @@ bzip2::
tar -zxf ${BZIP2DIR}.tar.gz
cd ${BZIP2DIR}; cp ${BZIP2SRC} ..; cp LICENSE ../BZIP2_LICENSE ; cd ..
rm -fr ./${BZIP2DIR}
# Custom install
install-exec-hook:
sh ./stdinstall.sh

View File

@ -21,6 +21,11 @@ Author: Dennis Heimbigner
#include "netcdf_filter_build.h"
#include "netcdf_json.h"
#ifdef HAVE_SZ
#include <szlib.h>
#include "H5Zszip.h"
#endif
#define H5Z_FILTER_DEFLATE 1 /*deflation like gzip */
#define H5Z_FILTER_SHUFFLE 2 /*shuffle the data */
#define H5Z_FILTER_FLETCHER32 3 /*fletcher32 checksum of EDC */
@ -40,6 +45,12 @@ static int NCZ_fletcher32_modify_parameters(int ncid, int varid, size_t* vnparam
static int NCZ_deflate_codec_to_hdf5(const char* codec, size_t* nparamsp, unsigned** paramsp);
static int NCZ_deflate_hdf5_to_codec(size_t nparams, const unsigned* params, char** codecp);
#ifdef HAVE_SZ
static int NCZ_szip_codec_to_hdf5(const char* codec, size_t* nparamsp, unsigned** paramsp);
static int NCZ_szip_hdf5_to_codec(size_t nparams, const unsigned* params, char** codecp);
static int NCZ_szip_modify_parameters(int ncid, int varid, size_t* vnparamsp, unsigned** vparamsp, size_t* wnparamsp, unsigned** wparamsp);
#endif
/**************************************************/
static NCZ_codec_t NCZ_shuffle_codec = {
@ -302,10 +313,224 @@ done:
/**************************************************/
#ifdef HAVE_SZ
static NCZ_codec_t NCZ_szip_codec = {
NCZ_CODEC_CLASS_VER, /* Struct version number */
NCZ_CODEC_HDF5, /* Struct sort */
"szip", /* Standard name/id of the codec */
H5Z_FILTER_SZIP, /* HDF5 alias for szip */
NULL, /*NCZ_szip_codec_initialize*/
NULL, /*NCZ_szip_codec_finalize*/
NCZ_szip_codec_to_hdf5,
NCZ_szip_hdf5_to_codec,
NCZ_szip_modify_parameters,
};
static int
NCZ_szip_codec_to_hdf5(const char* codec_json, size_t* nparamsp, unsigned** paramsp)
{
int stat = NC_NOERR;
unsigned* params = NULL;
size_t nparams = 2; /* No. of visible parameters */
NCjson* json = NULL;
NCjson* jtmp = NULL;
struct NCJconst jc = {0,0,0,NULL};
if(nparamsp == NULL || paramsp == NULL)
{stat = NC_EINTERNAL; goto done;}
if((params = (unsigned*)calloc(nparams,sizeof(unsigned)))== NULL)
{stat = NC_ENOMEM; goto done;}
if(NCJparse(codec_json,0,&json))
{stat = NC_EFILTER; goto done;}
if(NCJdictget(json,"mask",&jtmp) || jtmp == NULL)
{stat = NC_EFILTER; goto done;}
if(NCJcvt(jtmp,NCJ_INT,&jc))
{stat = NC_EFILTER; goto done;}
params[H5Z_SZIP_PARM_MASK] = (unsigned)jc.ival;
jtmp = NULL;
if(NCJdictget(json,"pixels-per-block",&jtmp) || jtmp == NULL)
{stat = NC_EFILTER; goto done;}
if(NCJcvt(jtmp,NCJ_INT,&jc))
{stat = NC_EFILTER; goto done;}
params[H5Z_SZIP_PARM_PPB] = (unsigned)jc.ival;
*nparamsp = nparams;
*paramsp = params; params = NULL;
done:
NCJreclaim(json);
nullfree(params);
return stat;
}
static int
NCZ_szip_hdf5_to_codec(size_t nparams, const unsigned* params, char** codecp)
{
int stat = NC_NOERR;
char json[2048];
snprintf(json,sizeof(json),"{\"id\": \"%s\", \"mask\": %u, \"pixels-per-block\": %u}",
NCZ_szip_codec.codecid,
params[H5Z_SZIP_PARM_MASK],
params[H5Z_SZIP_PARM_PPB]);
if(codecp) {
if((*codecp = strdup(json))==NULL) {stat = NC_ENOMEM; goto done;}
}
done:
return stat;
}
static int
NCZ_szip_modify_parameters(int ncid, int varid, size_t* vnparamsp, unsigned** vparamsp, size_t* wnparamsp, unsigned** wparamsp)
{
int i,ret_value = NC_NOERR;
nc_type vtype;
size_t typesize, scanline, dtype_precision, npoints;
int ndims, storage, dtype_order;
int dimids[NC_MAX_VAR_DIMS];
char vname[NC_MAX_NAME+1];
size_t chunklens[NC_MAX_VAR_DIMS];
unsigned* params = NULL;
unsigned* vparams = NULL;
size_t wnparams = 4;
if(wnparamsp == NULL || wparamsp == NULL)
{ret_value = NC_EFILTER; goto done;}
if(vnparamsp == NULL || vparamsp == NULL)
{ret_value = NC_EFILTER; goto done;}
if(*vnparamsp > 0 && *vparamsp == NULL)
{ret_value = NC_EFILTER; goto done;}
vparams = *vparamsp;
/* Get variable info */
if((ret_value = nc_inq_var(ncid,varid,vname,&vtype,&ndims,dimids,NULL))) goto done;
/* Get the typesize */
if((ret_value = nc_inq_type(ncid,vtype,NULL,&typesize))) goto done;
/* Get datatype's precision, in case is less than full bits */
dtype_precision = typesize*8;
if(dtype_precision > 24) {
if(dtype_precision <= 32)
dtype_precision = 32;
else if(dtype_precision <= 64)
dtype_precision = 64;
} /* end if */
if(ndims == 0) {ret_value = NC_EFILTER; goto done;}
/* Set "local" parameter for this dataset's "pixels-per-scanline" */
if((ret_value = nc_inq_dimlen(ncid,dimids[ndims-1],&scanline))) goto done;
/* Get number of elements for the dataspace; use
total number of elements in the chunk to define the new 'scanline' size */
/* Compute chunksize */
if((ret_value = nc_inq_var_chunking(ncid,varid,&storage,chunklens))) goto done;
if(storage != NC_CHUNKED) {ret_value = NC_EFILTER; goto done;}
npoints = 1;
for(i=0;i<ndims;i++) npoints *= chunklens[i];
/* Get datatype's endianness order */
if((ret_value = nc_inq_var_endian(ncid,varid,&dtype_order))) goto done;
if((params = (unsigned*)malloc(wnparams*sizeof(unsigned)))==NULL)
{ret_value = NC_ENOMEM; goto done;}
params[H5Z_SZIP_PARM_MASK] = vparams[H5Z_SZIP_PARM_MASK];
params[H5Z_SZIP_PARM_PPB] = vparams[H5Z_SZIP_PARM_PPB];
/* Set "local" parameter for this dataset's "bits-per-pixel" */
params[H5Z_SZIP_PARM_BPP] = dtype_precision;
/* Adjust scanline if it is smaller than number of pixels per block or
if it is bigger than maximum pixels per scanline, or there are more than
SZ_MAX_BLOCKS_PER_SCANLINE blocks per scanline */
if(scanline < vparams[H5Z_SZIP_PARM_PPB]) {
if(npoints < vparams[H5Z_SZIP_PARM_PPB])
HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "pixels per block greater than total number of elements in the chunk")
scanline = MIN((vparams[H5Z_SZIP_PARM_PPB] * SZ_MAX_BLOCKS_PER_SCANLINE), npoints);
} else {
if(scanline <= SZ_MAX_PIXELS_PER_SCANLINE)
scanline = MIN((vparams[H5Z_SZIP_PARM_PPB] * SZ_MAX_BLOCKS_PER_SCANLINE), scanline);
else
scanline = vparams[H5Z_SZIP_PARM_PPB] * SZ_MAX_BLOCKS_PER_SCANLINE;
} /* end else */
/* Assign the final value to the scanline */
params[H5Z_SZIP_PARM_PPS] = (unsigned)scanline;
/* Set the correct mask flags */
/* From H5Pdcpl.c#H5Pset_szip */
params[H5Z_SZIP_PARM_MASK] &= (unsigned)(~H5_SZIP_CHIP_OPTION_MASK);
params[H5Z_SZIP_PARM_MASK] |= H5_SZIP_ALLOW_K13_OPTION_MASK;
params[H5Z_SZIP_PARM_MASK] |= H5_SZIP_RAW_OPTION_MASK;
params[H5Z_SZIP_PARM_MASK] &= (unsigned)(~(H5_SZIP_LSB_OPTION_MASK | H5_SZIP_MSB_OPTION_MASK));
/* From H5Zszip.c#H5Z__set_local_szip */
params[H5Z_SZIP_PARM_MASK] &= (unsigned)(~(H5_SZIP_LSB_OPTION_MASK | H5_SZIP_MSB_OPTION_MASK));
switch(dtype_order) {
case NC_ENDIAN_LITTLE: /* Little-endian byte order */
params[H5Z_SZIP_PARM_MASK] |= H5_SZIP_LSB_OPTION_MASK;
break;
case NC_ENDIAN_BIG: /* Big-endian byte order */
params[H5Z_SZIP_PARM_MASK] |= H5_SZIP_MSB_OPTION_MASK;
break;
default:
HGOTO_ERROR(H5E_PLINE, H5E_BADTYPE, FAIL, "bad datatype endianness order")
} /* end switch */
*wnparamsp = wnparams;
nullfree(*wparamsp);
*wparamsp = params; params = NULL;
done:
nullfree(params);
FUNC_LEAVE_NOAPI(ret_value)
}
#if 0
static int
NCZ_szip_visible_parameters(int ncid, int varid, size_t nparamsin, const unsigned int* paramsin, size_t* nparamsp, unsigned** paramsp)
{
int stat = NC_NOERR;
unsigned* params = NULL;
if(nparamsp == NULL || paramsp == NULL)
{stat = NC_EFILTER; goto done;}
if((params = (unsigned*)malloc(2*sizeof(unsigned)))==NULL)
{stat = NC_ENOMEM; goto done;}
params[H5Z_SZIP_PARM_MASK] = paramsin[H5Z_SZIP_PARM_MASK];
params[H5Z_SZIP_PARM_PPB] = paramsin[H5Z_SZIP_PARM_PPB];
nullfree(*paramsp);
*paramsp = params; params = NULL;
done:
nullfree(params);
return stat;
}
#endif
#endif /*HAVE_SZ*/
/**************************************************/
NCZ_codec_t* NCZ_default_codecs[] = {
&NCZ_shuffle_codec,
&NCZ_fletcher32_codec,
&NCZ_zlib_codec,
#ifdef HAVE_SZ
&NCZ_szip_codec,
#endif
NULL
};

460
plugins/NCZstdfilters.c Normal file
View File

@ -0,0 +1,460 @@
/* Copyright 2003-2018, University Corporation for Atmospheric
* Research. See the COPYRIGHT file for copying and redistribution
* conditions.
*/
/*
Author: Dennis Heimbigner
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <assert.h>
#include "netcdf.h"
#include "netcdf_filter.h"
#include "netcdf_filter_build.h"
#include "netcdf_json.h"
/* Provide Codec information for the standard filters */
#ifndef H5Z_FILTER_BZIP2
#define H5Z_FILTER_BZIP2 1
#define H5Z_FILTER_ZSTD 2
#define H5Z_FILTER_BLOSC 3
#endif
#ifdef HAVE_BLOSC
#include "H5Zblosc.h"
#endif
/**************************************************/
/* NCZarr Filter Objects */
/* Forward */
#ifdef HAVE_BZ2
static int NCZ_bzip2_codec_to_hdf5(const char* codec, size_t* nparamsp, unsigned** paramsp);
static int NCZ_bzip2_hdf5_to_codec(size_t nparams, const unsigned* params, char** codecp);
#endif
#ifdef HAVE_ZSTD
static int NCZ_zstd_codec_to_hdf5(const char* codec, size_t* nparamsp, unsigned** paramsp);
static int NCZ_zstd_hdf5_to_codec(size_t nparams, const unsigned* params, char** codecp);
#endif
#ifdef HAVE_BLOSC
static int NCZ_blosc_codec_to_hdf5(const char* codec, size_t* nparamsp, unsigned** paramsp);
static int NCZ_blosc_hdf5_to_codec(size_t nparams, const unsigned* params, char** codecp);
static int NCZ_blosc_modify_parameters(int ncid, int varid, size_t* vnparamsp, unsigned** vparamsp, size_t* wnparamsp, unsigned** wparamsp);
static void NCZ_blosc_codec_finalize(void);
#endif
/**************************************************/
/* Provide the codec support for bzip2 filter */
#ifdef HAVE_BZ2
static NCZ_codec_t NCZ_bzip2_codec = {/* NCZ_codec_t codec fields */
NCZ_CODEC_CLASS_VER, /* Struct version number */
NCZ_CODEC_HDF5, /* Struct sort */
"bz2", /* Standard name/id of the codec */
H5Z_FILTER_BZIP2, /* HDF5 alias for bzip2 */
NULL, /*NCZ_bzip2_codec_initialize*/
NULL, /*NCZ_bzip2_codec_finalize*/
NCZ_bzip2_codec_to_hdf5,
NCZ_bzip2_hdf5_to_codec,
NULL, /*NCZ_bzip2_modify_parameters*/
};
/* External Export API */
DLLEXPORT
const void*
NCZ_get_codec_info(void)
{
return (void*)&NCZ_bzip2_codec;
}
static int
NCZ_bzip2_codec_to_hdf5(const char* codec_json, size_t* nparamsp, unsigned** paramsp)
{
int stat = NC_NOERR;
NCjson* jcodec = NULL;
NCjson* jtmp = NULL;
unsigned* params = NULL;
struct NCJconst jc;
if(nparamsp == NULL || paramsp == NULL)
{stat = NC_EINTERNAL; goto done;}
if((params = (unsigned*)calloc(1,sizeof(unsigned)))== NULL)
{stat = NC_ENOMEM; goto done;}
/* parse the JSON */
if(NCJparse(codec_json,0,&jcodec))
{stat = NC_EFILTER; goto done;}
if(NCJsort(jcodec) != NCJ_DICT) {stat = NC_EPLUGIN; goto done;}
/* Verify the codec ID */
if(NCJdictget(jcodec,"id",&jtmp))
{stat = NC_EFILTER; goto done;}
if(jtmp == NULL || !NCJisatomic(jtmp)) {stat = NC_EFILTER; goto done;}
if(strcmp(NCJstring(jtmp),NCZ_bzip2_codec.codecid)!=0) {stat = NC_EINVAL; goto done;}
/* Get Level */
if(NCJdictget(jcodec,"level",&jtmp))
{stat = NC_EFILTER; goto done;}
if(NCJcvt(jtmp,NCJ_INT,&jc))
{stat = NC_EFILTER; goto done;}
if(jc.ival < 0 || jc.ival > NC_MAX_UINT) {stat = NC_EINVAL; goto done;}
params[0] = (unsigned)jc.ival;
*nparamsp = 1;
*paramsp = params; params = NULL;
done:
if(params) free(params);
NCJreclaim(jcodec);
return stat;
}
static int
NCZ_bzip2_hdf5_to_codec(size_t nparams, const unsigned* params, char** codecp)
{
int stat = NC_NOERR;
unsigned level = 0;
char json[1024];
if(nparams == 0 || params == NULL)
{stat = NC_EFILTER; goto done;}
level = params[0];
snprintf(json,sizeof(json),"{\"id\": \"%s\", \"level\": \"%u\"}",NCZ_bzip2_codec.codecid,level);
if(codecp) {
if((*codecp = strdup(json))==NULL) {stat = NC_ENOMEM; goto done;}
}
done:
return stat;
}
#endif
/**************************************************/
/* Provide the codec support for zstandard filter */
#ifdef HAVE_ZSTD
static NCZ_codec_t NCZ_zstd_codec = {/* NCZ_codec_t codec fields */
NCZ_CODEC_CLASS_VER, /* Struct version number */
NCZ_CODEC_HDF5, /* Struct sort */
"zstd", /* Standard name/id of the codec */
H5Z_FILTER_ZSTD, /* HDF5 alias for zstd */
NULL, /*NCZ_zstd_codec_initialize*/
NULL, /*NCZ_zstd_codec_finalize*/
NCZ_zstd_codec_to_hdf5,
NCZ_zstd_hdf5_to_codec,
NULL, /*NCZ_zstd_modify_parameters*/
};
static int
NCZ_zstd_codec_to_hdf5(const char* codec_json, size_t* nparamsp, unsigned** paramsp)
{
int stat = NC_NOERR;
NCjson* jcodec = NULL;
NCjson* jtmp = NULL;
unsigned* params = NULL;
struct NCJconst jc;
if(nparamsp == NULL || paramsp == NULL)
{stat = NC_EINTERNAL; goto done;}
if((params = (unsigned*)calloc(1,sizeof(unsigned)))== NULL)
{stat = NC_ENOMEM; goto done;}
/* parse the JSON */
if(NCJparse(codec_json,0,&jcodec))
{stat = NC_EFILTER; goto done;}
if(NCJsort(jcodec) != NCJ_DICT) {stat = NC_EPLUGIN; goto done;}
/* Verify the codec ID */
if(NCJdictget(jcodec,"id",&jtmp))
{stat = NC_EFILTER; goto done;}
if(jtmp == NULL || !NCJisatomic(jtmp)) {stat = NC_EFILTER; goto done;}
if(strcmp(NCJstring(jtmp),NCZ_zstd_codec.codecid)!=0) {stat = NC_EINVAL; goto done;}
/* Get Level */
if(NCJdictget(jcodec,"level",&jtmp))
{stat = NC_EFILTER; goto done;}
if(NCJcvt(jtmp,NCJ_INT,&jc))
{stat = NC_EFILTER; goto done;}
if(jc.ival < 0 || jc.ival > NC_MAX_UINT) {stat = NC_EINVAL; goto done;}
params[0] = (unsigned)jc.ival;
*nparamsp = 1;
*paramsp = params; params = NULL;
done:
if(params) free(params);
NCJreclaim(jcodec);
return stat;
}
static int
NCZ_zstd_hdf5_to_codec(size_t nparams, const unsigned* params, char** codecp)
{
int stat = NC_NOERR;
unsigned level = 0;
char json[1024];
if(nparams == 0 || params == NULL)
{stat = NC_EFILTER; goto done;}
level = params[0];
snprintf(json,sizeof(json),"{\"id\": \"%s\", \"level\": \"%u\"}",NCZ_zstd_codec.codecid,level);
if(codecp) {
if((*codecp = strdup(json))==NULL) {stat = NC_ENOMEM; goto done;}
}
done:
return stat;
}
#endif
/**************************************************/
/* Provide the codec support for blosc filter */
#ifdef HAVE_BLOSC
/* Structure for NCZ_PLUGIN_CODEC */
static NCZ_codec_t NCZ_blosc_codec = {/* NCZ_codec_t codec fields */
NCZ_CODEC_CLASS_VER, /* Struct version number */
NCZ_CODEC_HDF5, /* Struct sort */
"blosc", /* Standard name/id of the codec */
H5Z_FILTER_BLOSC, /* HDF5 alias for blosc */
NULL, /*NCZ_blosc_codec_initialize*/
NCZ_blosc_codec_finalize,
NCZ_blosc_codec_to_hdf5,
NCZ_blosc_hdf5_to_codec,
NCZ_blosc_modify_parameters,
};
/* NCZarr Interface Functions */
/* Create the true parameter set:
Visible parameters:
param[0] -- reserved
param[1] -- reserved
param[2] -- reserved
param[3] -- variable chunksize in bytes | 0 (=>default)
param[4] -- compression level
param[5] -- BLOSC_SHUFFLE|BLOSC_BITSHUFFLE
param[6] -- compressor index
Working parameters:
param[0] -- filter revision
param[1] -- blosc version
param[2] -- variable type size in bytes
param[3] -- variable chunksize in bytes
param[4] -- compression level
param[5] -- BLOSC_SHUFFLE|BLOSC_BITSHUFFLE
param[6] -- compressor index
*/
void blosc_destroy(void);
static int ncz_blosc_initialized = 0;
static void
NCZ_blosc_codec_finalize(void)
{
if(ncz_blosc_initialized) {
blosc_destroy();
ncz_blosc_initialized = 0;
}
}
static int
NCZ_blosc_modify_parameters(int ncid, int varid, size_t* vnparamsp, unsigned** vparamsp, size_t* wnparamsp, unsigned** wparamsp)
{
int i,stat = NC_NOERR;
nc_type vtype;
int storage, ndims;
size_t* chunklens = NULL;
size_t typesize, chunksize;
char vname[NC_MAX_NAME+1];
unsigned* params = NULL;
size_t nparams;
size_t vnparams = *vnparamsp;
unsigned* vparams = *vparamsp;
if(vnparams < 7)
{stat = NC_EFILTER; goto done;}
nparams = 7;
if(vparams == NULL)
{stat = NC_EFILTER; goto done;}
if(wnparamsp == NULL || wparamsp == NULL)
{stat = NC_EFILTER; goto done;}
vnparams = *vnparamsp;
vparams = *vparamsp;
/* Get variable info */
if((stat = nc_inq_var(ncid,varid,vname,&vtype,&ndims,NULL,NULL))) goto done;
if(ndims == 0) {stat = NC_EFILTER; goto done;}
/* Get the typesize */
if((stat = nc_inq_type(ncid,vtype,NULL,&typesize))) goto done;
/* Compute chunksize */
if((chunklens = (size_t*)calloc(ndims,sizeof(size_t)))==NULL) goto done;
if((stat = nc_inq_var_chunking(ncid,varid,&storage,chunklens))) goto done;
if(storage != NC_CHUNKED) {stat = NC_EFILTER; goto done;}
chunksize = typesize;
for(i=0;i<ndims;i++) chunksize *= chunklens[i];
if((params = (unsigned*)malloc(vnparams*sizeof(unsigned)))==NULL)
{stat = NC_ENOMEM; goto done;}
memcpy(params,vparams,vnparams*sizeof(unsigned));
params[0] = FILTER_BLOSC_VERSION;
params[1] = BLOSC_VERSION_FORMAT;
params[2] = (unsigned)typesize;
params[3] = chunksize;
params[4] = params[4];
params[5] = params[5];
params[6] = params[6];
*wnparamsp = nparams;
nullfree(*wparamsp);
*wparamsp = params; params = NULL;
done:
nullfree(chunklens);
nullfree(params);
FUNC_LEAVE_NOAPI(stat)
}
static int
NCZ_blosc_codec_to_hdf5(const char* codec_json, size_t* nparamsp, unsigned** paramsp)
{
int stat = NC_NOERR;
NCjson* jcodec = NULL;
NCjson* jtmp = NULL;
unsigned* params = NULL;
struct NCJconst jc = {0,0,0,NULL};
int compcode;
/* parse the JSON */
if(NCJparse(codec_json,0,&jcodec)) {stat = NC_EFILTER; goto done;}
if(NCJsort(jcodec) != NCJ_DICT) {stat = NC_EPLUGIN; goto done;}
/* Verify the codec ID */
if(NCJdictget(jcodec,"id",&jtmp))
{stat = NC_EFILTER; goto done;}
if(jtmp == NULL || !NCJisatomic(jtmp)) {stat = NC_EINVAL; goto done;}
if(strcmp(NCJstring(jtmp),NCZ_blosc_codec.codecid)!=0) {stat = NC_EINVAL; goto done;}
if((params = (unsigned*)calloc(7,sizeof(unsigned)))==NULL) {stat = NC_ENOMEM; goto done;}
/* Get compression level*/
if(NCJdictget(jcodec,"clevel",&jtmp)) {stat = NC_EFILTER; goto done;}
if(jtmp) {
if(NCJcvt(jtmp,NCJ_INT,&jc)) {stat = NC_EFILTER; goto done;}
} else
jc.ival = DEFAULT_LEVEL;
if(jc.ival < 0 || jc.ival > NC_MAX_UINT) {stat = NC_EFILTER; goto done;}
params[4] = (unsigned)jc.ival;
/* Get blocksize */
if(NCJdictget(jcodec,"blocksize",&jtmp)) {stat = NC_EFILTER; goto done;}
if(jtmp) {
if(NCJcvt(jtmp,NCJ_INT,&jc)) {stat = NC_EFILTER; goto done;}
} else
jc.ival = DEFAULT_BLOCKSIZE;
if(jc.ival < 0 || jc.ival > NC_MAX_UINT) {stat = NC_EFILTER; goto done;}
params[3] = (unsigned)jc.ival;
/* Get shuffle */
if(NCJdictget(jcodec,"shuffle",&jtmp)) {stat = NC_EFILTER; goto done;}
if(jtmp) {
if(NCJcvt(jtmp,NCJ_INT,&jc)) {stat = NC_EFILTER; goto done;}
} else
jc.ival = BLOSC_NOSHUFFLE;
params[5] = (unsigned)jc.ival;
/* Get compname */
if(NCJdictget(jcodec,"cname",&jtmp)) {stat = NC_EFILTER; goto done;}
if(jtmp) {
if(NCJcvt(jtmp,NCJ_STRING,&jc)) {stat = NC_EFILTER; goto done;}
if(jc.sval == NULL || strlen(jc.sval) == 0) {stat = NC_EFILTER; goto done;}
if((compcode = blosc_compname_to_compcode(jc.sval)) < 0) {stat = NC_EFILTER; goto done;}
} else
compcode = DEFAULT_COMPCODE;
params[6] = (unsigned)compcode;
if(nparamsp) *nparamsp = 7;
if(paramsp) {*paramsp = params; params = NULL;}
done:
if(jc.sval) {
free(jc.sval);
}
if(params) {
free(params);
}
NCJreclaim(jcodec);
return stat;
}
static int
NCZ_blosc_hdf5_to_codec(size_t nparams, const unsigned* params, char** codecp)
{
int stat = NC_NOERR;
char json[1024];
const char* compname = NULL;
if(nparams == 0 || params == NULL)
{stat = NC_EINVAL; goto done;}
/* Get the sub-compressor name */
if(blosc_compcode_to_compname((int)params[6],&compname) < 0) {stat = NC_EFILTER; goto done;}
snprintf(json,sizeof(json),
"{\"id\": \"blosc\",\"clevel\": %u,\"blocksize\": %u,\"cname\": \"%s\",\"shuffle\": %d}",
params[4],params[3],compname,params[5]);
if(codecp) {
if((*codecp = strdup(json))==NULL) {stat = NC_ENOMEM; goto done;}
}
done:
return stat;
}
#endif
/**************************************************/
NCZ_codec_t* NCZ_stdfilters_codecs[] = {
#ifdef HAVE_BZ2
&NCZ_bzip2_codec,
#endif
#ifdef HAVE_ZSTD
&NCZ_zstd_codec,
#endif
#ifdef HAVE_BLOSC
&NCZ_blosc_codec,
#endif
NULL
};
/* External Export API */
DLLEXPORT
const void*
NCZ_codec_info_defaults(void)
{
return (void*)&NCZ_stdfilters_codecs;
}

44
plugins/stdinstall.in Executable file
View File

@ -0,0 +1,44 @@
#!/bin/bash
if test "x$srcdir" = x ; then srcdir=`pwd`; fi
. ../test_common.sh
set -x
INSTALLDIR=@PLUGIN_INSTALL_DIR@
# Load the findplugins function
. ${builddir}/findplugin.sh
echo "findplugin.sh loaded"
installplugin() {
PLUG="$1"
# Locate the plugin path and the library name; argument order is critical
findplugin ${PLUG}
if ! test -f "$HDF5_PLUGIN_DIR/$HDF5_PLUGIN_LIB" ; then
echo "Not exists: ${HDF5_PLUGIN_DIR}/$HDF5_PLUGIN_LIB ; ignored"
return
fi
if ! test -d "${INSTALLDIR}" ; then
echo "Not exists: ${INSTALLDIR} ; creating"
mkdir ${INSTALLDIR}
fi
echo "Installing: $HDF5_PLUGIN_DIR/$HDF5_PLUGIN_LIB into $INSTALLDIR"
cp -f "$HDF5_PLUGIN_DIR/$HDF5_PLUGIN_LIB" $INSTALLDIR
}
if test "x$USEPLUGINS" != x ; then
if test "x$INSTALLDIR" != x ; then
installplugin h5bzip2
installplugin h5zstd
installplugin h5blosc
if test "x$FEATURE_NCZARR" ; then
installplugin h5fletcher32
installplugin h5shuffle
installplugin h5deflate
installplugin h5szip
installplugin nczdefaults
installplugin nczszip
fi
fi
fi

View File

@ -15,17 +15,24 @@ FP_ISMSVC=@ISMSVC@
FP_ISCYGWIN=@ISCYGWIN@
FP_ISMINGW=@ISMINGW@
FP_ISMSYS=@ISMSYS@
FP_ISOSX=@ISOSX@
FP_ISREGEDIT=@ISREGEDIT@
FP_USEPLUGINS=@USEPLUGINS@
FP_ISREGEDIT=@ISREGEDIT@
# Feature flags
FEATURE_HDF5=@HAS_HDF5@
FEATURE_PARALLEL=@HAS_PARALLEL@
# Define selected features of the build
FEATURE_HDF5=@HAS_HDF5@
FEATURE_NCZARR=@HAS_NCZARR@
FEATURE_S3TESTS=@DO_NCZARR_S3_TESTS@
FEATURE_NCZARR_ZIP=@DO_NCZARR_ZIP_TESTS@
FEATURE_FILTERTESTS=@DO_FILTER_TESTS@
FEATURE_PLUGIN_INSTALL_DIR=@PLUGIN_INSTALL_DIR@
set -e
@ -84,14 +91,11 @@ if test "x$SETX" = x1 ; then set -x ; fi
# On MINGW, bash and other POSIX utilities use a mounted root directory,
# but executables compiled for Windows do not recognise the mount point.
# Here we ensure that Windows paths are used in tests of Windows executables.
if test "x${FP_ISMSYS}" = xyes || test "x${FP_ISCYGWIN}" = xyes ; then
if test "x${MSYS2_PREFIX}" = x ; then
MSYS2_PREFIX=`cygpath -w '/'`
export MSYS2_PREFIX
fi
fi
if test "x${FP_ISMINGW}" = xyes ; then
alias pwd='pwd -W'
system=`uname`
if test "x${system##MINGW*}" = x; then
alias pwd='pwd -W'
fi
# We assume that TOPSRCDIR and TOPBUILDDIR are defined

View File

@ -108,10 +108,12 @@ static Test PATHTESTS[] = {
char* macros[128];
/*Forward */
static const char* kind2string(int kind);
static char* expand(const char* s);
static void setmacros(void);
static void reclaimmacros(void);
#ifdef DEBUG
static const char* kind2string(int kind);
#endif
int
main(int argc, char** argv)
@ -130,7 +132,9 @@ main(int argc, char** argv)
/* Test localkind X path-kind */
for(test=PATHTESTS;test->test;test++) {
#ifdef DEBUG
int inputkind = NCgetinputpathkind(test->test);
#endif
/* Iterate over the test paths */
for(k=0;k<NKINDS;k++) {
int kind = kinds[k];
@ -188,6 +192,7 @@ main(int argc, char** argv)
return (failcount > 0 ? 1 : 0);
}
#ifdef DEBUG
static const char*
kind2string(int kind)
{
@ -206,6 +211,7 @@ kind2string(int kind)
}
return "Unknown";
}
#endif
static char*
expand(const char* s)