mirror of
https://github.com/Unidata/netcdf-c.git
synced 2025-03-31 17:50:26 +08:00
Bring up-to-date with main
This commit is contained in:
commit
9dfafe6c63
2
.github/workflows/run_tests_osx.yml
vendored
2
.github/workflows/run_tests_osx.yml
vendored
@ -7,7 +7,7 @@
|
||||
name: Run macOS-based netCDF Tests
|
||||
|
||||
|
||||
on: [pull_request, workflow_dispatch]
|
||||
on: [pull_request,workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
|
||||
|
42
.github/workflows/run_tests_ubuntu.yml
vendored
42
.github/workflows/run_tests_ubuntu.yml
vendored
@ -1,5 +1,5 @@
|
||||
###
|
||||
# Build hdf4, hdf5 dependencies and cache them in a combined directory.
|
||||
# Build hdf5 dependencies and cache them in a combined directory.
|
||||
###
|
||||
|
||||
name: Run Ubuntu/Linux netCDF Tests
|
||||
@ -25,7 +25,7 @@ jobs:
|
||||
run: sudo apt update && sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev doxygen
|
||||
|
||||
###
|
||||
# Installing libhdf4 and libhdf5
|
||||
# Installing libhdf5
|
||||
###
|
||||
- name: Cache libhdf5-${{ matrix.hdf5 }}
|
||||
id: cache-hdf5
|
||||
@ -39,13 +39,15 @@ jobs:
|
||||
if: steps.cache-hdf5.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
set -x
|
||||
|
||||
wget https://support.hdfgroup.org/ftp/HDF/releases/HDF4.2.15/src/hdf-4.2.15.tar.bz2
|
||||
tar -jxf hdf-4.2.15.tar.bz2
|
||||
pushd hdf-4.2.15
|
||||
./configure --prefix=${HOME}/environments/${{ matrix.hdf5 }} --disable-static --enable-shared --disable-fortran --disable-netcdf --with-szlib
|
||||
./configure --prefix=${HOME}/environments/${{ matrix.hdf5 }} --disable-static --enable-shared --disable-fortran --disable-netcdf --with-szlib --enable-hdf4-xdr
|
||||
make -j
|
||||
make install -j
|
||||
popd
|
||||
|
||||
wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-$(echo ${{ matrix.hdf5 }} | cut -d. -f 1,2)/hdf5-${{ matrix.hdf5 }}/src/hdf5-${{ matrix.hdf5 }}.tar.bz2
|
||||
tar -jxf hdf5-${{ matrix.hdf5 }}.tar.bz2
|
||||
pushd hdf5-${{ matrix.hdf5 }}
|
||||
@ -72,7 +74,7 @@ jobs:
|
||||
run: sudo apt update && sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev mpich libmpich-dev
|
||||
|
||||
###
|
||||
# Installing libhdf4 and libhdf5
|
||||
# Installing libhdf5
|
||||
###
|
||||
- name: Cache libhdf5-parallel-${{ matrix.hdf5 }}
|
||||
id: cache-hdf5
|
||||
@ -86,13 +88,15 @@ jobs:
|
||||
if: steps.cache-hdf5.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
set -x
|
||||
|
||||
wget https://support.hdfgroup.org/ftp/HDF/releases/HDF4.2.15/src/hdf-4.2.15.tar.bz2
|
||||
tar -jxf hdf-4.2.15.tar.bz2
|
||||
pushd hdf-4.2.15
|
||||
CC=mpicc ./configure --prefix=${HOME}/environments/${{ matrix.hdf5 }} --disable-static --enable-shared --disable-fortran --disable-netcdf --with-szlib --enable-parallel
|
||||
CC=mpicc ./configure --prefix=${HOME}/environments/${{ matrix.hdf5 }} --disable-static --enable-shared --disable-fortran --disable-netcdf --with-szlib --enable-parallel --enable-hdf4-xdr
|
||||
make -j
|
||||
make install -j
|
||||
popd
|
||||
|
||||
wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-$(echo ${{ matrix.hdf5 }} | cut -d. -f 1,2)/hdf5-${{ matrix.hdf5 }}/src/hdf5-${{ matrix.hdf5 }}.tar.bz2
|
||||
tar -jxf hdf5-${{ matrix.hdf5 }}.tar.bz2
|
||||
pushd hdf5-${{ matrix.hdf5 }}
|
||||
@ -164,7 +168,7 @@ jobs:
|
||||
|
||||
- name: Configure
|
||||
shell: bash -l {0}
|
||||
run: CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} ./configure --enable-hdf4 --enable-hdf5 --enable-dap --disable-dap-remote-tests --enable-doxygen --enable-external-server-tests
|
||||
run: CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} ./configure --enable-hdf5 --enable-dap --disable-dap-remote-tests --enable-doxygen --enable-external-server-tests
|
||||
if: ${{ success() }}
|
||||
|
||||
- name: Look at config.log if error
|
||||
@ -240,7 +244,7 @@ jobs:
|
||||
|
||||
- name: Configure
|
||||
shell: bash -l {0}
|
||||
run: CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} CC=mpicc ./configure --enable-hdf4 --enable-hdf5 --enable-dap --disable-dap-remote-tests --enable-parallel-tests --enable-pnetcdf
|
||||
run: CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} CC=mpicc ./configure --enable-hdf5 --enable-dap --disable-dap-remote-tests --enable-parallel-tests --enable-pnetcdf
|
||||
if: ${{ success() }}
|
||||
|
||||
- name: Look at config.log if error
|
||||
@ -322,7 +326,7 @@ jobs:
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
LD_LIBRARY_PATH=${LD_LIBRARY_PATH} cmake .. -DENABLE_HDF4=TRUE -DCMAKE_PREFIX_PATH=${CMAKE_PREFIX_PATH} -DENABLE_DAP=TRUE -DENABLE_HDF5=TRUE -DENABLE_NCZARR=TRUE -D ENABLE_DAP_LONG_TESTS=TRUE
|
||||
LD_LIBRARY_PATH=${LD_LIBRARY_PATH} cmake .. -DCMAKE_PREFIX_PATH=${CMAKE_PREFIX_PATH} -DENABLE_DAP=TRUE -DENABLE_HDF5=TRUE -DENABLE_NCZARR=TRUE -D ENABLE_DAP_LONG_TESTS=TRUE
|
||||
|
||||
- name: Print Summary
|
||||
shell: bash -l {0}
|
||||
@ -402,7 +406,7 @@ jobs:
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
LD_LIBRARY_PATH=${LD_LIBRARY_PATH} cmake .. -DCMAKE_C_COMPILER=mpicc -DENABLE_HDF4=TRUE -DCMAKE_PREFIX_PATH=${CMAKE_PREFIX_PATH} -DENABLE_DAP=TRUE -DENABLE_HDF5=TRUE -DENABLE_NCZARR=TRUE -D ENABLE_DAP_LONG_TESTS=TRUE -DENABLE_PNETCDF=TRUE
|
||||
LD_LIBRARY_PATH=${LD_LIBRARY_PATH} cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_PREFIX_PATH=${CMAKE_PREFIX_PATH} -DENABLE_DAP=TRUE -DENABLE_HDF5=TRUE -DENABLE_NCZARR=TRUE -D ENABLE_DAP_LONG_TESTS=TRUE -DENABLE_PNETCDF=TRUE
|
||||
|
||||
- name: Print Summary
|
||||
shell: bash -l {0}
|
||||
@ -458,11 +462,9 @@ jobs:
|
||||
- run: echo "LDFLAGS=-L${HOME}/environments/${{ matrix.hdf5 }}/lib" >> $GITHUB_ENV
|
||||
- run: echo "LD_LIBRARY_PATH=${HOME}/environments/${{ matrix.hdf5 }}/lib" >> $GITHUB_ENV
|
||||
- run: |
|
||||
echo "ENABLE_HDF4=--disable-hdf4" >> $GITHUB_ENV
|
||||
echo "ENABLE_HDF5=--disable-hdf5" >> $GITHUB_ENV
|
||||
if: matrix.use_nc4 == 'nc3'
|
||||
- run: |
|
||||
echo "ENABLE_HDF4=--enable-hdf4" >> $GITHUB_ENV
|
||||
echo "ENABLE_HDF5=--enable-hdf5" >> $GITHUB_ENV
|
||||
if: matrix.use_nc4 == 'nc4'
|
||||
- run: echo "ENABLE_DAP=--disable-dap" >> $GITHUB_ENV
|
||||
@ -499,7 +501,7 @@ jobs:
|
||||
|
||||
- name: Configure
|
||||
shell: bash -l {0}
|
||||
run: CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} ./configure ${ENABLE_HDF4} ${ENABLE_HDF5} ${ENABLE_DAP} ${ENABLE_NCZARR}
|
||||
run: CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} ./configure ${ENABLE_HDF5} ${ENABLE_DAP} ${ENABLE_NCZARR}
|
||||
if: ${{ success() }}
|
||||
|
||||
- name: Look at config.log if error
|
||||
@ -526,18 +528,6 @@ jobs:
|
||||
run: CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} make check -j
|
||||
if: ${{ success() }}
|
||||
|
||||
# - name: Make Distcheck
|
||||
# shell: bash -l {0}
|
||||
# run: CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} DISTCHECK_CONFIGURE_FLAGS="${ENABLE_HDF4} ${ENABLE_HDF5} ${ENABLE_DAP} ${ENABLE_NCZARR}" make distcheck
|
||||
# if: ${{ success() }}
|
||||
|
||||
#- name: Start SSH Debug
|
||||
# uses: luchihoratiu/debug-via-ssh@main
|
||||
# with:
|
||||
# NGROK_AUTH_TOKEN: ${{ secrets.NGROK_AUTH_TOKEN }}
|
||||
# SSH_PASS: ${{ secrets.SSH_PASS }}
|
||||
# if: ${{ failure() }}
|
||||
|
||||
nc-cmake:
|
||||
|
||||
needs: [ nc-cmake-tests-oneoff-serial, nc-ac-tests-oneoff-serial, nc-cmake-tests-oneoff-parallel, nc-ac-tests-oneoff-parallel ]
|
||||
@ -564,11 +554,9 @@ jobs:
|
||||
- run: echo "CMAKE_PREFIX_PATH=${HOME}/environments/${{ matrix.hdf5 }}/" >> $GITHUB_ENV
|
||||
- run: echo "LD_LIBRARY_PATH=${HOME}/environments/${{ matrix.hdf5 }}/lib" >> $GITHUB_ENV
|
||||
- run: |
|
||||
echo "ENABLE_HDF4=OFF" >> $GITHUB_ENV
|
||||
echo "ENABLE_HDF5=OFF" >> $GITHUB_ENV
|
||||
if: matrix.use_nc4 == 'nc3'
|
||||
- run: |
|
||||
echo "ENABLE_HDF4=ON" >> $GITHUB_ENV
|
||||
echo "ENABLE_HDF5=ON" >> $GITHUB_ENV
|
||||
if: matrix.use_nc4 == 'nc4'
|
||||
- run: echo "ENABLE_DAP=OFF" >> $GITHUB_ENV
|
||||
@ -605,7 +593,7 @@ jobs:
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
LD_LIBRARY_PATH=${LD_LIBRARY_PATH} cmake .. -DENABLE_HDF4=${ENABLE_HDF4} -DCMAKE_PREFIX_PATH=${CMAKE_PREFIX_PATH} -DENABLE_DAP=${ENABLE_DAP} -DENABLE_HDF5=${ENABLE_HDF5} -DENABLE_NCZARR=${ENABLE_NCZARR}
|
||||
LD_LIBRARY_PATH=${LD_LIBRARY_PATH} cmake .. -DCMAKE_PREFIX_PATH=${CMAKE_PREFIX_PATH} -DENABLE_DAP=${ENABLE_DAP} -DENABLE_HDF5=${ENABLE_HDF5} -DENABLE_NCZARR=${ENABLE_NCZARR}
|
||||
|
||||
- name: Print Summary
|
||||
shell: bash -l {0}
|
||||
|
4
.github/workflows/run_tests_win_cygwin.yml
vendored
4
.github/workflows/run_tests_win_cygwin.yml
vendored
@ -1,6 +1,6 @@
|
||||
name: Run Cygwin-based tests
|
||||
|
||||
on: [pull_request, workflow_dispatch]
|
||||
on: [pull_request,workflow_dispatch]
|
||||
|
||||
env:
|
||||
SHELLOPTS: igncr
|
||||
@ -29,7 +29,7 @@ jobs:
|
||||
git automake libtool autoconf2.5 make libhdf5-devel
|
||||
libhdf4-devel zipinfo libxml2-devel perl zlib-devel
|
||||
libzstd-devel libbz2-devel libaec-devel libzip-devel
|
||||
libdeflate-devel gcc-core
|
||||
libdeflate-devel gcc-core libcurl-devel libiconv-devel
|
||||
|
||||
- name: (Autotools) Run autoconf and friends
|
||||
run: |
|
||||
|
2
.github/workflows/run_tests_win_mingw.yml
vendored
2
.github/workflows/run_tests_win_mingw.yml
vendored
@ -9,7 +9,7 @@ name: Run MSYS2, MinGW64-based Tests
|
||||
env:
|
||||
CPPFLAGS: "-D_BSD_SOURCE"
|
||||
|
||||
on: [pull_request, workflow_dispatch]
|
||||
on: [pull_request,workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
|
||||
|
@ -519,8 +519,8 @@ OPTION(ENABLE_CDF5 "Enable CDF5 support" ON)
|
||||
# Netcdf-4 support (i.e. libsrc4) is required by more than just HDF5 (e.g. NCZarr)
|
||||
# So depending on what above formats are enabled, enable netcdf-4
|
||||
if(ENABLE_HDF5 OR ENABLE_HDF4 OR ENABLE_NCZARR)
|
||||
SET(ENABLE_NETCDF_4 ON CACHE BOOL "Enable netCDF-4 API" FORCE)
|
||||
SET(ENABLE_NETCDF4 ON CACHE BOOL "Enable netCDF4 Alias" FORCE)
|
||||
SET(ENABLE_NETCDF_4 ON CACHE BOOL "Enable netCDF-4 API" FORCE)
|
||||
SET(ENABLE_NETCDF4 ON CACHE BOOL "Enable netCDF4 Alias" FORCE)
|
||||
endif()
|
||||
|
||||
IF(ENABLE_HDF4)
|
||||
@ -1062,11 +1062,14 @@ ENDIF()
|
||||
IF(ENABLE_DAP)
|
||||
SET(USE_DAP ON CACHE BOOL "")
|
||||
SET(ENABLE_DAP2 ON CACHE BOOL "")
|
||||
|
||||
IF(ENABLE_HDF5)
|
||||
MESSAGE(STATUS "Enabling DAP4")
|
||||
SET(ENABLE_DAP4 ON CACHE BOOL "")
|
||||
ELSE(ENABLE_HDF5)
|
||||
ELSE()
|
||||
MESSAGE(STATUS "Disabling DAP4")
|
||||
SET(ENABLE_DAP4 OFF CACHE BOOL "")
|
||||
ENDIF(NOT ENABLE_HDF5)
|
||||
ENDIF(ENABLE_HDF5)
|
||||
|
||||
ELSE()
|
||||
SET(ENABLE_DAP2 OFF CACHE BOOL "")
|
||||
@ -1074,7 +1077,7 @@ ELSE()
|
||||
ENDIF()
|
||||
|
||||
# Option to support byte-range reading of remote datasets
|
||||
OPTION(ENABLE_BYTERANGE "Enable byte-range access to remote datasets.." OFF)
|
||||
OPTION(ENABLE_BYTERANGE "Enable byte-range access to remote datasets.." ON)
|
||||
|
||||
# Check for the math library so it can be explicitly linked.
|
||||
IF(NOT WIN32)
|
||||
@ -1311,7 +1314,7 @@ ENDIF()
|
||||
|
||||
IF(NOT ENABLE_S3_SDK)
|
||||
IF(ENABLE_NCZARR_S3 OR ENABLE_NCZARR_S3_TESTS)
|
||||
message(FATAL_ERROR "S3 support library not found; please specify option DENABLE_NCZARR_S3=NO")
|
||||
message(FATAL_ERROR "S3 support library not found; please specify option -DENABLE_NCZARR_S3=NO")
|
||||
SET(ENABLE_NCZARR_S3 OFF CACHE BOOL "NCZARR S3 support" FORCE)
|
||||
SET(ENABLE_NCZARR_S3_TESTS OFF CACHE BOOL "S3 tests" FORCE)
|
||||
ENDIF()
|
||||
@ -2537,6 +2540,7 @@ is_enabled(ENABLE_ZERO_LENGTH_COORD_BOUND RELAX_COORD_BOUND)
|
||||
is_enabled(USE_CDF5 HAS_CDF5)
|
||||
is_enabled(ENABLE_ERANGE_FILL HAS_ERANGE_FILL)
|
||||
is_enabled(HDF5_HAS_PAR_FILTERS HAS_PAR_FILTERS)
|
||||
is_enabled(ENABLE_NCZARR_S3 HAS_NCZARR_S3)
|
||||
is_enabled(ENABLE_NCZARR HAS_NCZARR)
|
||||
is_enabled(ENABLE_NCZARR_S3_TESTS DO_NCZARR_S3_TESTS)
|
||||
is_enabled(ENABLE_MULTIFILTERS HAS_MULTIFILTERS)
|
||||
|
@ -7,7 +7,7 @@ This file contains a high-level description of this package's evolution. Release
|
||||
|
||||
## 4.9.1 - T.B.D.
|
||||
|
||||
### 4.9.1 - Release Candidate 2 - TBD
|
||||
### 4.9.1 - Release Candidate 2 - November 21, 2022
|
||||
|
||||
#### Known Issues
|
||||
|
||||
@ -20,6 +20,10 @@ This file contains a high-level description of this package's evolution. Release
|
||||
* [Bug Fix] Fix a race condition when testing missing filters. See [Github #2557](https://github.com/Unidata/netcdf-c/pull/2557).
|
||||
* [Bug Fix] Make major changes to libdap4 and dap4_test to update the non-remote DAP4 tests. See [Github #2555](https://github.com/Unidata/netcdf-c/pull/2555).
|
||||
* [Bug Fix] Fix some race conditions due to use of a common file in multiple shell scripts . See [Github #2552](https://github.com/Unidata/netcdf-c/pull/2552).
|
||||
|
||||
|
||||
### 4.9.1 - Release Candidate 1 - October 24, 2022
|
||||
|
||||
* [Enhancement][Documentation] Add Plugins Quick Start Guide. See [GitHub #2524](https://github.com/Unidata/netcdf-c/pull/2524) for more information.
|
||||
* [Enhancement] Add new entries in `netcdf_meta.h`, `NC_HAS_BLOSC` and `NC_HAS_BZ2`. See [Github #2511](https://github.com/Unidata/netcdf-c/issues/2511) and [Github #2512](https://github.com/Unidata/netcdf-c/issues/2512) for more information.
|
||||
* [Enhancement] Add new options to `nc-config`: `--has-multifilters`, `--has-stdfilters`, `--has-quantize`, `--plugindir`. See [Github #2509](https://github.com/Unidata/netcdf-c/pull/2509) for more information.
|
||||
|
@ -97,7 +97,7 @@ AC_CONFIG_LINKS([nc_test4/ref_hdf5_compat3.nc:nc_test4/ref_hdf5_compat3.nc])
|
||||
AC_CONFIG_LINKS([hdf4_test/ref_chunked.hdf4:hdf4_test/ref_chunked.hdf4])
|
||||
AC_CONFIG_LINKS([hdf4_test/ref_contiguous.hdf4:hdf4_test/ref_contiguous.hdf4])
|
||||
AM_INIT_AUTOMAKE([foreign dist-zip subdir-objects])
|
||||
|
||||
AM_MAINTAINER_MODE()
|
||||
# Check for the existence of this file before proceeding.
|
||||
AC_CONFIG_SRCDIR([include/netcdf.h])
|
||||
|
||||
@ -1278,9 +1278,9 @@ fi
|
||||
# Does the user want to allow reading of remote data via range headers?
|
||||
AC_MSG_CHECKING([whether byte range support is enabled])
|
||||
AC_ARG_ENABLE([byterange],
|
||||
[AS_HELP_STRING([--enable-byterange],
|
||||
[AS_HELP_STRING([--disable-byterange],
|
||||
[allow byte-range I/O])])
|
||||
test "x$enable_byterange" = xyes || enable_byterange=no
|
||||
test "x$enable_byterange" = xno || enable_byterange=yes
|
||||
AC_MSG_RESULT($enable_byterange)
|
||||
# Need curl for byte ranges
|
||||
if test "x$found_curl" = xno && test "x$enable_byterange" = xyes ; then
|
||||
@ -1925,6 +1925,7 @@ AC_SUBST(HAS_ERANGE_FILL,[$enable_erange_fill])
|
||||
AC_SUBST(HAS_BYTERANGE,[$enable_byterange])
|
||||
AC_SUBST(RELAX_COORD_BOUND,[yes])
|
||||
AC_SUBST([HAS_PAR_FILTERS], [$hdf5_supports_par_filters])
|
||||
AC_SUBST(HAS_NCZARR_S3,[$enable_nczarr_s3])
|
||||
AC_SUBST(HAS_NCZARR,[$enable_nczarr])
|
||||
AC_SUBST(DO_NCZARR_S3_TESTS,[$enable_nczarr_s3_tests])
|
||||
AC_SUBST(HAS_MULTIFILTERS,[$has_multifilters])
|
||||
@ -2060,12 +2061,12 @@ AX_SET_META([NC_HAS_SZIP],[$enable_hdf5_szip],[yes])
|
||||
AX_SET_META([NC_HAS_ZSTD],[$have_zstd],[yes])
|
||||
AX_SET_META([NC_HAS_BLOSC],[$have_blosc],[yes])
|
||||
AX_SET_META([NC_HAS_BZ2],[$have_bz2],[yes])
|
||||
|
||||
# This is the version of the dispatch table. If the dispatch table is
|
||||
# changed, this should be incremented, so that user-defined format
|
||||
# applications like PIO can determine whether they have an appropriate
|
||||
# dispatch table to submit. If this is changed, make sure the value in
|
||||
# CMakeLists.txt also changes to match.
|
||||
|
||||
AC_SUBST([NC_DISPATCH_VERSION], [5])
|
||||
AC_DEFINE_UNQUOTED([NC_DISPATCH_VERSION], [${NC_DISPATCH_VERSION}], [Dispatch table version.])
|
||||
|
||||
|
@ -504,6 +504,30 @@ The code in *hdf4var.c* does an *nc_get_vara()* on the HDF4 SD
|
||||
dataset. This is all that is needed for all the nc_get_* functions to
|
||||
work.
|
||||
|
||||
# Appendix A. Changing NC_DISPATCH_VERSION
|
||||
|
||||
When new entries are added to the *struct NC_Dispatch* type `located in include/netcdf_dispatch.h.in` it is necessary to do two things.
|
||||
|
||||
1. Bump the NC_DISPATCH_VERSION number
|
||||
2. Modify the existing dispatch tables to include the new entries.
|
||||
It if often the case that the new entries do not mean anything for
|
||||
a given dispatch table. In that case, the new entries may be set to
|
||||
some variant of *NC_RO_XXX* or *NC_NOTNC4_XXX* *NC_NOTNC3_XXX*.
|
||||
|
||||
Modifying the dispatch version requires two steps:
|
||||
1. Modify the version number in *netcdf-c/configure.ac*, and
|
||||
2. Modify the version number in *netcdf-c/CMakeLists.txt*.
|
||||
|
||||
The two should agree in value.
|
||||
|
||||
### NC_DISPATCH_VERSION Incompatibility
|
||||
|
||||
When dynamically adding a dispatch table
|
||||
-- in nc_def_user_format (see libdispatch/dfile.c) --
|
||||
the version of the new table is compared with that of the built-in
|
||||
NC_DISPATCH_VERSION; if they differ, then an error is returned from
|
||||
that function.
|
||||
|
||||
# Appendix B. Inferring the Dispatch Table
|
||||
|
||||
As mentioned above, the dispatch table is inferred using the following
|
||||
|
417
docs/static-pages/orgs.html
Normal file
417
docs/static-pages/orgs.html
Normal file
@ -0,0 +1,417 @@
|
||||
<html><!-- InstanceBegin template="../../../Templates/MyUnidata.dwt" codeOutsideHTMLIsLocked="true" -->
|
||||
|
||||
<head>
|
||||
<!-- InstanceBeginEditable name="Title" -->
|
||||
<TITLE>Organizations in which NetCDF is Used</TITLE>
|
||||
<!-- InstanceEndEditable -->
|
||||
<!-- InstanceBeginEditable name="META Information" -->
|
||||
<META NAME="UIINDEX" CONTENT="0">
|
||||
<META NAME="BOOKMARK" CONTENT="NetCDF Users">
|
||||
<META NAME="AUTHOR" CONTENT="russ">
|
||||
<META NAME="KEYWORDS" CONTENT="netCDF, users, organization, used, usage">
|
||||
<META NAME="DESCRIPTION" CONTENT="The following list of organizations was created by sorting the organizational affiliations of authors of questions or comments about netCDF sent to support@unidata.ucar.edu.">
|
||||
<!-- InstanceEndEditable -->
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<!-- InstanceBeginEditable name="Content Goes Here" -->
|
||||
<A NAME="netcdf" ID="netcdf"></A>
|
||||
<h1>Organizations in which NetCDF is Used</h1>
|
||||
The following list of organizations was created by sorting the organizational
|
||||
affiliations of authors of questions or comments about netCDF sent to <code>support@unidata.ucar.edu</code>.
|
||||
<ul>
|
||||
<li>Accu-Weather</li>
|
||||
<li>Advanced Visual Systems, Inc.</li>
|
||||
<li>Aerometrics, Inc.</li>
|
||||
<li>Aerospace and Mechanical Engineering, University of Notre Dame</li>
|
||||
<li>Alfred-Wegener-Institute for Polar and Marine Research</li>
|
||||
<li>American Cyanamid Company</li>
|
||||
<li>Analytical Innovations, Inc.</li>
|
||||
<li>Analytical Services & Materials, Inc.</li>
|
||||
<li>Applied Research Associates</li>
|
||||
<li>Applied Research Corp., Goddard Space Flight Center</li>
|
||||
<li>Armstrong Labs, Tyndall AFB</li>
|
||||
<li>Astro Space Center, Moscow</li>
|
||||
<li>Astronomical Institute, Czech Academy of Sciences</li>
|
||||
<li>Atmospheric Environment Service, CANADA</li>
|
||||
<li>Atmospheric Release Advisory Capability, Lawrence Livermore National Laboratory</li>
|
||||
<li>Atmospheric Research Laboratory, Scripps Institution of Oceanography</li>
|
||||
<li>Atmospheric Sciences, Yonsei University</li>
|
||||
<li>Atmospheric, Oceanic and Planetary Physics, Clarendon Laboratory</li>
|
||||
<li>Auburn</li>
|
||||
<li>Aurora Simulation, Inc.</li>
|
||||
<li>Australian Geological Survey</li>
|
||||
<li>BB&N</li>
|
||||
<li>BMRC</li>
|
||||
<li>Battelle / Pacific Northwest Laboratories</li>
|
||||
<li>Battelle Marine Sciences Laboratory</li>
|
||||
<li>Bay Area Air Quality Management District</li>
|
||||
<li>Bio-Rad Semiconductor Division, CD Systems</li>
|
||||
<li>Biophysics Lab., University of Nijmegen, The Netherlands</li>
|
||||
<li>Branch of Atlantic Marine Geology, US Geological Survey</li>
|
||||
<li>Bristol-Myers Squibb</li>
|
||||
<li>Brookhaven National Labs</li>
|
||||
<li>CARS, University of Chicago</li>
|
||||
<li>CEA/CESTA, France</li>
|
||||
<li>CIBNOR</li>
|
||||
<li>CICESE/Depto. Oceanografia Fisica, Mexico</li>
|
||||
<li>CIRES, University of Colorado</li>
|
||||
<li>CIRES/Center for the Study of Earth from Space</li>
|
||||
<li>CMU</li>
|
||||
<li>CSIRO</li>
|
||||
<li>CSIRO Division of Atmospheric Research</li>
|
||||
<li>CSIRO Division of Oceanography</li>
|
||||
<li>CSIRO Mathematical and Information Sciences</li>
|
||||
<li>California Space Institute, Scripps Institution of Oceanography</li>
|
||||
<li>California State University at Chico</li>
|
||||
<li>Canadian Climate Centre</li>
|
||||
<li>Celestin Company</li>
|
||||
<li>Center for Analysis and Prediction of Storms, University of Oklahoma</li>
|
||||
<li>Center for Digital Systems Engineering, Research Triangle Institute</li>
|
||||
<li>Center for Global Atmospheric Modelling, University of Reading</li>
|
||||
<li>Center for Nondestructive Evaluation, Iowa State University</li>
|
||||
<li>Centre d'Oceanologie de Marseille</li>
|
||||
<li>Centro Nacional de Datos Oceanograficos de Chile</li>
|
||||
<li>Centro de Neurociencias de Cuba</li>
|
||||
<li>CERFACS (European Center for Research and Advanced Training in Scientific
|
||||
Computation), France</li>
|
||||
<li>Checkmate Engineering, Inc.</li>
|
||||
<li>ChemSoft, Inc.</li>
|
||||
<li>ChemWare, Inc.</li>
|
||||
<li>City University of Hong Kong</li>
|
||||
<li>Climate Diagnostics Center</li>
|
||||
<li>Climate Research Division, Scripps Institution of Oceanography</li>
|
||||
<li>Climate and Radiation Branch, NASA Goddard Space Flight Center</li>
|
||||
<li>College of Oceanography, Oregon State</li>
|
||||
<li>Columbia University</li>
|
||||
<li>Commonwealth Bureau of Meteorology, Australia</li>
|
||||
<li>Complutense University (MADRID- SPAIN)</li>
|
||||
<li>Cornell</li>
|
||||
<li>Cray Computer Corporation</li>
|
||||
<li>Cray Research Australia</li>
|
||||
<li>Cray Research, Inc.</li>
|
||||
<li>DCI Systems & User Support Group, RAL</li>
|
||||
<li>DLR Institute of Fluid Mechanics, Gottingen, Germany</li>
|
||||
<li>Dalhousie University, Halifax</li>
|
||||
<li>Danish Meteorological Institute</li>
|
||||
<li>Defence Research Establishment Pacific</li>
|
||||
<li>Delft University of Technology, Netherlands</li>
|
||||
<li>Department of Applied Mathematics, University of Washington</li>
|
||||
<li>Department of Atmospheric Science, Colorado State University</li>
|
||||
<li>Department of Atmospheric Sciences, UCLA</li>
|
||||
<li>Department of Atmospheric Sciences, University of Washington</li>
|
||||
<li>Department of Chemistry, Rutgers University</li>
|
||||
<li>Department of Chemistry, University of Western Ontario</li>
|
||||
<li>Department of Computer Science, Western Washington University</li>
|
||||
<li>Department of Earth Sciences, University of Wales College of Cardiff</li>
|
||||
<li>Department of Geology, Istanbul Technical University, Turkey</li>
|
||||
<li>Department of Geology, University of Illinois</li>
|
||||
<li>Department of Geophysics and Planetary Sciences, Tel-Aviv University</li>
|
||||
<li>Department of Hydrology and Water Resources, University of Arizona</li>
|
||||
<li>Department of Meteorology, Texas A&M University</li>
|
||||
<li>Department of Oceanography, Dalhousie University</li>
|
||||
<li>Department of Rangeland Ecology and Management, Texas A&M University</li>
|
||||
<li>Department of Structural Biology, Biomolecular Engineering Research Institute</li>
|
||||
<li>Dept of Atmospheric and Oceanic Sciences, McGill University</li>
|
||||
<li>Deutsche Forschungsanstalt für Luft- und Raumfahrt e.V..</li>
|
||||
<li>Dickens Data Systems, Inc.</li>
|
||||
<li>Digital Equipment Corporation</li>
|
||||
<li>Division of Ocean and Atmospheric Science, Hokkaido University</li>
|
||||
<li>Dow Chemical</li>
|
||||
<li>Earth Sciences Division, Raytheon ITSS at NASA Ames Research</li>
|
||||
<li>ENTERPRISE Products</li>
|
||||
<li>ETH Zurich</li>
|
||||
<li>Earth System Science Laboratory, University of Alabama in Huntsville</li>
|
||||
<li>Electricite de France</li>
|
||||
<li>Energy & System Engineering Group, Japan NUS Co., Ltd.</li>
|
||||
<li>Ensign Geophysics Ltd.</li>
|
||||
<li>Environment Waikato, New Zealand</li>
|
||||
<li>Federal Geographic Data Committee</li>
|
||||
<li>Fieldview Product Manager, Intelligent Light</li>
|
||||
<li>Finnigan-MAT</li>
|
||||
<li>Florida State University</li>
|
||||
<li>Forschungszentrum Juelich Gmbh (KFA)</li>
|
||||
<li>Fortner Research LLC</li>
|
||||
<li>Fraunhofer Institute for Atmospheric Environmental Research</li>
|
||||
<li>Fundacion Centro de Estudios Ambientales del Mediterraneo</li>
|
||||
<li>GNU</li>
|
||||
<li>General Atomics</li>
|
||||
<li>General Motors R&D Center</li>
|
||||
<li>General Science Corporation</li>
|
||||
<li>GeoForschungsZentrum Potsdam</li>
|
||||
<li>Geophysical Department, Utrecht University</li>
|
||||
<li>Geophysical Institute, University of Alaska, Fairbanks</li>
|
||||
<li>Geoterrex-Dighem Pty Limited, Australia</li>
|
||||
<li>German Aerospace Research Establishment (DLR)</li>
|
||||
<li>German Climate Compute Center</li>
|
||||
<li>German Remote Sensing Data Center (DFD),</li>
|
||||
<li>Glaciology Laboratory, Grenoble, France</li>
|
||||
<li>Global Climate Research Division, LLNL</li>
|
||||
<li>Goddard Space Flight Center</li>
|
||||
<li>Grupo CLIMA - IMFIA, Uruguay</li>
|
||||
<li>Harris ISD</li>
|
||||
<li>Harvard Seismology</li>
|
||||
<li>Hatfield Marine Science Center, Newport, Oregon</li>
|
||||
<li>Hewlett-Packard</li>
|
||||
<li>Hughes Aircraft Company</li>
|
||||
<li>IBM</li>
|
||||
<li>IDRIS/Support Visualisation & Video, France</li>
|
||||
<li>IFREMER (The French Institute of Research and Exploitation of the Sea)</li>
|
||||
<li>IKU Petroleum Research, Trondheim, Norway</li>
|
||||
<li>IRPEM-CNR</li>
|
||||
<li>Illinois State Water Survey</li>
|
||||
<li>Imperial College of Science, Technology, and Medicine, London</li>
|
||||
<li>Infometrix</li>
|
||||
<li>Institut d'Astronomie et de Geophysique, Belgium</li>
|
||||
<li>Institut für Flugmechanik</li>
|
||||
<li>Institut für Geophysik, Universität Göttingen</li>
|
||||
<li>Institut für Meteorologie und Klimaforschung</li>
|
||||
<li>Institut für Stratosphaerische Chemie</li>
|
||||
<li>Institute for Atmospheric Science, ETH, Zurich</li>
|
||||
<li>Institute for Stratospheric Chemistry(ICG-1), Institute for the Chemistry
|
||||
and Dynamics of the Geosphere</li>
|
||||
<li>Institute for Tropospheric Research</li>
|
||||
<li>Institute of Applied Computer Science (IAI), KfK Research Centre - Karlsruhe</li>
|
||||
<li>Instituto Andaluz de Ciencias de la Tierra, Granada, Spain</li>
|
||||
<li>Instituto Oceanografico da USP</li>
|
||||
<li>Instituto de Oceanografia, Universidade de Lisboa</li>
|
||||
<li>Iowa State</li>
|
||||
<li>JASCO Corporation, Hachioji Tokyo 192 JAPAN</li>
|
||||
<li>Jaime I University</li>
|
||||
<li>Joint Institute for the Study of the Atmosphere and Ocean (JISAO)</li>
|
||||
<li>KEO Consultants</li>
|
||||
<li>KODAK</li>
|
||||
<li>Kaiser Aluminum</li>
|
||||
<li>Koninklijk Nederlands Meteorologisch Instituut (KNMI)</li>
|
||||
<li>Koninklijke/Shell-Laboratorium Amsterdam</li>
|
||||
<li>LABTECH</li>
|
||||
<li>Laboratoire de Dynamique Moleculaire, Institut de Biologie Structurale</li>
|
||||
<li>Laboratoire de Météorologie Dynamique du CNRS, France</li>
|
||||
<li>Laboratory for Plasma Studies, Cornell University</li>
|
||||
<li>Laboratory of Molecular Biophysics</li>
|
||||
<li>Lamont-Doherty Earth Observatory of Columbia University</li>
|
||||
<li>Lawrence Berkeley Laboratory (LBL)</li>
|
||||
<li>Lawrence Livermore National Laboratory (LLNL)</li>
|
||||
<li>Litton TASC</li>
|
||||
<li>Lockheed Martin Technical Services</li>
|
||||
<li>Lockheed Martin/GES</li>
|
||||
<li>Los Alamos National Laboratory (LANL)</li>
|
||||
<li>Louisiana State University</li>
|
||||
<li>M.D. Anderson Cancer Center</li>
|
||||
<li>MAPS geosystems</li>
|
||||
<li>MIT Lincoln Laboratory</li>
|
||||
<li>MIT Plasma Fusion Center</li>
|
||||
<li>MUMM (CAMME)</li>
|
||||
<li>Marine Biological Laboratory, Woods Hole</li>
|
||||
<li>Massachusetts Institute of Technology</li>
|
||||
<li>Maurice-Lamontagne Institute, Department of Fisheries and Oceans Canada</li>
|
||||
<li>Memorial Sloan-Kettering Cancer Center (MSKCC)</li>
|
||||
<li>Mesonet, University of Oklahoma</li>
|
||||
<li>Meteorological Systems and Technology (METSYS) South Africa</li>
|
||||
<li>Michigan State University, Physics Department</li>
|
||||
<li>Michigan State University, Geography/Fisheries and Wildlife Department</li>
|
||||
<li>Microelectronics Center of North Carolina (MCNC)</li>
|
||||
<li>Minnesota Supercomputer Center</li>
|
||||
<li>Mote Marine Laboratory</li>
|
||||
<li>Multimedia Lab, University of Zurich, Switzerland</li>
|
||||
<li>NASA / GSFC</li>
|
||||
<li>NASA / Goddard Institute for Space Studies</li>
|
||||
<li>NASA / JPL</li>
|
||||
<li>NASA Ames Research Center</li>
|
||||
<li>NASA Dryden FRC</li>
|
||||
<li>NCAR / ACD</li>
|
||||
<li>NCAR / ATD</li>
|
||||
<li>NCAR / CGD</li>
|
||||
<li>NCAR / HAO</li>
|
||||
<li>NCAR / MMM</li>
|
||||
<li>NCAR / RAF</li>
|
||||
<li>NCAR / RAP</li>
|
||||
<li>NCAR / SCD</li>
|
||||
<li>NCSA-University of Illinois at Urbana-Champaign</li>
|
||||
<li>NIST</li>
|
||||
<li>NMFS</li>
|
||||
<li>NOAA / AOML / CIMAS, Hurricane Research Division</li>
|
||||
<li>NOAA / Arkansas-Red Basin River Forecast Center</li>
|
||||
<li>NOAA / CDC</li>
|
||||
<li>NOAA / CRD</li>
|
||||
<li>NOAA / ERL / FSL</li>
|
||||
<li>NOAA / ETL</li>
|
||||
<li>NOAA / FSL</li>
|
||||
<li>NOAA / Geophysical Fluid Dynamics Laboratory</li>
|
||||
<li>NOAA / NGDC</li>
|
||||
<li>NOAA / NGDC / Paleoclimatology Group</li>
|
||||
<li>NOAA / PMEL</li>
|
||||
<li>NOAA / PMEL / OCRD</li>
|
||||
<li>Nansen Environmental and Remote Sensing Centre (NERSC), Norway</li>
|
||||
<li>National Center for Atmospheric Research (NCAR)</li>
|
||||
<li>National Energy Research Supercomputer Center (NERSC)</li>
|
||||
<li>National Fisheries, University of Pusan</li>
|
||||
<li>National Institute of Health</li>
|
||||
<li>National Research Council of Canada</li>
|
||||
<li>National Severe Storms Laboratory</li>
|
||||
<li>National Weather Service</li>
|
||||
<li>National Weather Service, Camp Springs, MD</li>
|
||||
<li>National Weather Service, Juneau, Alaska</li>
|
||||
<li>Natural Resources Conservation Service, U.S. Department of Agriculture</li>
|
||||
<li>Naval Postgraduate School</li>
|
||||
<li>Naval Research Laboratory</li>
|
||||
<li>North Carolina State University</li>
|
||||
<li>North Carolina Supercomputing Center/MCNC Environmental Programs</li>
|
||||
<li>Northwest Research Associates, Inc.</li>
|
||||
<li>Nova University</li>
|
||||
<li>Numerical Algorithms Group (NAG)</li>
|
||||
<li>Oak Ridge National Laboratory</li>
|
||||
<li>Observation Center for Prediction of Earthquakes</li>
|
||||
<li>Ocean Science & Technology</li>
|
||||
<li>Oceanography, University College/Australian Defence Force Academy</li>
|
||||
<li>Office of Fusion Energy Sciences, DOE</li>
|
||||
<li>Oklahoma Climate Survey</li>
|
||||
<li>Oklahoma Mesonet</li>
|
||||
<li>Old Dominion University</li>
|
||||
<li>Oregon Graduate Institute</li>
|
||||
<li>Oregon State University</li>
|
||||
<li>Orkustofnun (National Energy Authority), Reykjavik, Iceland</li>
|
||||
<li>PE Nelson Systems, Inc.</li>
|
||||
<li>PNNL</li>
|
||||
<li>POSTECH</li>
|
||||
<li>PPPL</li>
|
||||
<li>Pacific Fisheries Environmental Group</li>
|
||||
<li>Pacific Tsunami Warning Center</li>
|
||||
<li>Parallel Computing Group, University of Geneva CUI</li>
|
||||
<li>Pennsylvania State University/Applied Research Laboratory</li>
|
||||
<li>Phillips Laboratory/GPIA, Hanscom AFB</li>
|
||||
<li>Physics Department, Lawrence Livermore National Laboratory</li>
|
||||
<li>Pittsburgh Supercomputing Center</li>
|
||||
<li>Plymouth State College, Plymouth NH</li>
|
||||
<li>Positron Imaging Laboratories, Montreal Neurological Institute</li>
|
||||
<li>Princeton</li>
|
||||
<li>Project Centre for Ecosystem Research at the University of Kiel</li>
|
||||
<li>Pure Atria</li>
|
||||
<li>Queensland Insitute of Natural Science</li>
|
||||
<li>RMIT Applied Physics</li>
|
||||
<li>RSI</li>
|
||||
<li>Raytheon Co.</li>
|
||||
<li>Research Centre Karlsruhe, Institute of Applied Computer Science</li>
|
||||
<li>Research Systems</li>
|
||||
<li>River Forecast Center, TULSA</li>
|
||||
<li>Rosenstiel School of Marine and Atmospheric Science (RSMAS), University
|
||||
of Miami</li>
|
||||
<li>Royal Observatory, Hong Kong</li>
|
||||
<li>Rutgers University</li>
|
||||
<li>SAIC</li>
|
||||
<li>SCIEX</li>
|
||||
<li>SSEC, University of Wisconsin</li>
|
||||
<li>SSESCO</li>
|
||||
<li>SUNY Albany</li>
|
||||
<li>SYSECA</li>
|
||||
<li>San Diego Supercomputer Center</li>
|
||||
<li>Sandia National Laboratories</li>
|
||||
<li>Scripps Institution of Oceanography</li>
|
||||
<li>Semichem Technical Support</li>
|
||||
<li>Shimadzu Corporation</li>
|
||||
<li>Siemens Power Corp</li>
|
||||
<li>Silicon Graphics Inc.</li>
|
||||
<li>SoftShell International, Ltd.</li>
|
||||
<li>Software Development Centre, Delft Hydraulics</li>
|
||||
<li>Software Engineering Research Group, Michigan State University</li>
|
||||
<li>Soil Conservation Service, U.S. Department of Agriculture</li>
|
||||
<li>Southeastern Regional Climate Center</li>
|
||||
<li>Southern Regional Climate Center, Louisiana State University</li>
|
||||
<li>Southwest Research Institute</li>
|
||||
<li>Space Research Institute, Moscow</li>
|
||||
<li>Stanford University</li>
|
||||
<li>StatSci</li>
|
||||
<li>Stratospheric Research group, Free University Berlin</li>
|
||||
<li>Supercomputer Computations Research Institute</li>
|
||||
<li>Synap Corporation</li>
|
||||
<li>Technical University of Madrid/Computer Science School</li>
|
||||
<li>Tera Research, Inc.</li>
|
||||
<li>Texas A&M Ranching Systems Group</li>
|
||||
<li>Texas A&M University</li>
|
||||
<li>Texas A&M University at Tallahassee</li>
|
||||
<li>Texas Instruments, Inc.</li>
|
||||
<li>The Auroral Observatory, University of Troms, Norway</li>
|
||||
<li>Theoretical Physics, Fermilab</li>
|
||||
<li>Thomson-CSF / SYSECA</li>
|
||||
<li>Tokyo Metropolitan University</li>
|
||||
<li>Tulsa District, U.S. Army Corps of Engineers</li>
|
||||
<li>U.S. Air Force</li>
|
||||
<li>U.S. Army Corps of Engineers</li>
|
||||
<li>U.S. Department of Agriculture / ARS</li>
|
||||
<li>U.S. Department of Energy</li>
|
||||
<li>U.S. Enviromental Protection Agency</li>
|
||||
<li>U.S. Geological Survey, Woods Hole</li>
|
||||
<li>U.S. Navy</li>
|
||||
<li>U.S. Patent Office</li>
|
||||
<li>UCAR / GPS/MET</li>
|
||||
<li>UMD</li>
|
||||
<li>UPRC</li>
|
||||
<li>University of Alaska</li>
|
||||
<li>University of Alberta</li>
|
||||
<li>University of Arizona</li>
|
||||
<li>University of Bergen, Norway</li>
|
||||
<li>University of Bern</li>
|
||||
<li>University of British Columbia</li>
|
||||
<li>University of Caen, France</li>
|
||||
<li>University of California / LLNL</li>
|
||||
<li>University of California, Davis</li>
|
||||
<li>University of California, Irvine</li>
|
||||
<li>University of California, Los Angeles</li>
|
||||
<li>University of California, San Diego</li>
|
||||
<li>University of California, Santa Barbara / Institute for Computational Earth
|
||||
System Science</li>
|
||||
<li>University of California, Santa Cruz</li>
|
||||
<li>University of Cambridge, UK</li>
|
||||
<li>University of Chicago</li>
|
||||
<li>University of Colorado</li>
|
||||
<li>University of Cyprus</li>
|
||||
<li>University of Delaware</li>
|
||||
<li>University of Denver</li>
|
||||
<li>University of Florida</li>
|
||||
<li>University of Hawaii</li>
|
||||
<li>University of Illinois</li>
|
||||
<li>University of Kansas</li>
|
||||
<li>University of Manitoba</li>
|
||||
<li>University of Maryland</li>
|
||||
<li>University of Massachussetts</li>
|
||||
<li>University of Miami / RSMAS</li>
|
||||
<li>University of Michigan</li>
|
||||
<li>University of Minnesota / Department of Geology and Geophysics</li>
|
||||
<li>University of Minnesota Supercomputer Institute</li>
|
||||
<li>University of Montana</li>
|
||||
<li>University of Nebraska, Lincoln</li>
|
||||
<li>University of New Hampshire</li>
|
||||
<li>University of North Dakota</li>
|
||||
<li>University of Oklahoma</li>
|
||||
<li>University of Rhode Island, Graduate School of Oceanography</li>
|
||||
<li>University of South Florida</li>
|
||||
<li>University of Sydney / School of Mathematics</li>
|
||||
<li>University of Texas, Austin</li>
|
||||
<li>University of Texas, Houston</li>
|
||||
<li>University of Tokyo / Earthquake Research Institute</li>
|
||||
<li>University of Toronto</li>
|
||||
<li>University of Utrecht (The Netherlands)</li>
|
||||
<li>University of Victoria / School of Earth and Ocean Sciences</li>
|
||||
<li>University of Virginia</li>
|
||||
<li>University of Washington</li>
|
||||
<li>University of Western Ontario</li>
|
||||
<li>University of Wisconsin</li>
|
||||
<li>University of Zurich, Switzerland</li>
|
||||
<li>University of the Witwatersrand / Climatology Research Group</li>
|
||||
<li>Universität Goettingen, Institut für Geophysik</li>
|
||||
<li>Utah Water Research Laboratory</li>
|
||||
<li>Vanderbilt</li>
|
||||
<li>Varian Chromatography Systems</li>
|
||||
<li>Victoria University of Wellington / Institute of Geophysics</li>
|
||||
<li>Virginia Tech Department of Computer Science</li>
|
||||
<li>Visualization and Imaging Team, Idaho National Engineering Lab</li>
|
||||
<li>Wadia Institute of Himalayan Geology</li>
|
||||
<li>Woods Hole Oceanographic Institution</li>
|
||||
<li>Wyle Laboratories</li>
|
||||
<li>Yale University</li>
|
||||
</ul>
|
||||
<!-- InstanceEndEditable -->
|
||||
</body>
|
||||
|
||||
|
||||
<!-- InstanceEnd -->
|
72
docs/static-pages/standards.html
Normal file
72
docs/static-pages/standards.html
Normal file
@ -0,0 +1,72 @@
|
||||
<html><head><meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
|
||||
<!--<base -->
|
||||
<!--href="http://www.unidata.ucar.edu/software/netcdf/docs/standards.html">-->
|
||||
</head><body>
|
||||
<title>NetCDF Standards</title>
|
||||
<h1>Status of standards body endorsements of netCDF and related conventions</h1>
|
||||
|
||||
<p>The netCDF format has been endorsed by several standards bodies:
|
||||
</p>
|
||||
<ul>
|
||||
|
||||
<li>On 2009-02-05, the NASA Earth Science Data Systems (ESDS) Standards Process Group
|
||||
<a href="http://earthdata.nasa.gov/our-community/esdswg/standards-process-spg/rfc/esds-rfc-011-netcdf-classic">officially endorsed</a> the document <a href="http://earthdata.nasa.gov/sites/default/files/esdswg/spg/rfc/esds-rfc-011/ESDS-RFC-011v2.00.pdf">ESDS-RFC-011</a>, <strong>NetCDF Classic and 64-bit Offset File
|
||||
Formats</strong>, as an appropriate standard for NASA Earth Science
|
||||
data. </li>
|
||||
|
||||
<li>On 2010-03-12, the Integrated Ocean Observing System
|
||||
(IOOS) Data Management and Communications (DMAC) Subsystem <a href="http://www.ioos.gov/library/dmac_implementation_2010.pdf">endorsed</a> <strong>netCDF with Climate and Forecast
|
||||
(CF) conventions</strong> as a preferred data format.</li>
|
||||
|
||||
<li>On 2010-09-27, the Steering Committee of the Federal Geographic Data Committee (<a href="http://www.fgdc.gov/">FGDC</a>) <a href="http://www.fgdc.gov/standards/fgdc-endorsed-external-standards/index_html">officially endorsed</a>
|
||||
<strong>netCDF</strong> as a "Common Encoding Standard (CES)".
|
||||
</li>
|
||||
|
||||
<li>On 2010-10-18, the ESDS-RFC-021 Technical Working Group issued a
|
||||
<a href="http://www.esdswg.org/spg/rfc/esds-rfc-021/CF_TWG_SWAL_v02.pdf">final report</a>
|
||||
concluding that the NASA ESDS Standards Process Group should
|
||||
recommend <a href="http://www.esdswg.org/spg/rfc/esds-rfc-021/ESDS-RFC-021-v0.01.pdf">ESDS-RFC-021</a>, <strong>CF Metadata
|
||||
Conventions</strong>, for endorsement as a NASA Recommended Standard. </li>
|
||||
|
||||
<li>
|
||||
On 2011-04-19, the Open Geospatial Consortium (<a href="http://www.opengeospatial.org/">OGC</a>) approved the OGC
|
||||
<strong>Network Common Data Form (netCDF) Core Encoding Standard</strong>, and <strong>NetCDF
|
||||
Binary Encoding Extension Standard - netCDF Classic and 64-bit
|
||||
Offset Format</strong> as official OGC standards. These standards are
|
||||
available for free download at <a href="http://www.opengeospatial.org/standards/netcdf">http://www.opengeospatial.org/standards/netcdf</a>.
|
||||
</li>
|
||||
|
||||
<li>
|
||||
On 2011-11-03, the ESDS-RFC-022 Technical Working Group issued a <a href="http://earthdata.nasa.gov/our-community/esdswg/standards-process-spg/rfc/esds-rfc-022-netcdf-4-hdf5">final report</a>
|
||||
recommending <a href="http://earthdata.nasa.gov/sites/default/files/field/document/ESDS-RFC-022v1.pdf">ESDS-RFC-022</a>, <strong>NetCDF-4/HDF-5 File Format</strong>, for
|
||||
endorsement as an <a
|
||||
href="http://earthdata.nasa.gov/our-community/esdswg/standards-process-spg/rfc"
|
||||
>EOSDIS Approved Standard</a>.
|
||||
</li>
|
||||
|
||||
<li>On 2012-11-16, the OGC <a
|
||||
href="http://www.opengeospatial.org/node/1697" >adopted</a> the
|
||||
netCDF Enhanced Data Model Extension to the OGC Network Common Data
|
||||
Form Core Encoding Standard, making netCDF-4 an official OGC
|
||||
standard. This standard is available for free download at <a
|
||||
href="http://www.opengeospatial.org/standards/netcdf"
|
||||
>http://www.opengeospatial.org/standards/netcdf</a>.
|
||||
</li>
|
||||
|
||||
<li>
|
||||
On 2013-02-14, the OGC <a
|
||||
href="http://www.opengeospatial.org/node/1783" >approved</a> the
|
||||
Climate and Forecast (CF) extension to the NetCDF Core data model
|
||||
standard, making the CF metadata conventions for netCDF an official
|
||||
OGC standard.
|
||||
</li>
|
||||
|
||||
</ul>
|
||||
|
||||
<hr>
|
||||
<address></address>
|
||||
<script language="JavaScript" type="text/JavaScript">
|
||||
document.write(date_modified);
|
||||
</script>
|
||||
|
||||
</body></html>
|
@ -1839,19 +1839,16 @@ NC_create(const char *path0, int cmode, size_t initialsz,
|
||||
|
||||
TRACE(nc_create);
|
||||
if(path0 == NULL)
|
||||
return NC_EINVAL;
|
||||
{stat = NC_EINVAL; goto done;}
|
||||
|
||||
/* Check mode flag for sanity. */
|
||||
if ((stat = check_create_mode(cmode)))
|
||||
return stat;
|
||||
if ((stat = check_create_mode(cmode))) goto done;
|
||||
|
||||
/* Initialize the library. The available dispatch tables
|
||||
* will depend on how netCDF was built
|
||||
* (with/without netCDF-4, DAP, CDMREMOTE). */
|
||||
if(!NC_initialized)
|
||||
{
|
||||
if ((stat = nc_initialize()))
|
||||
return stat;
|
||||
if(!NC_initialized) {
|
||||
if ((stat = nc_initialize())) goto done;
|
||||
}
|
||||
|
||||
{
|
||||
@ -1863,10 +1860,7 @@ NC_create(const char *path0, int cmode, size_t initialsz,
|
||||
|
||||
memset(&model,0,sizeof(model));
|
||||
newpath = NULL;
|
||||
if((stat = NC_infermodel(path,&cmode,1,useparallel,NULL,&model,&newpath))) {
|
||||
nullfree(newpath);
|
||||
goto done;
|
||||
}
|
||||
if((stat = NC_infermodel(path,&cmode,1,useparallel,NULL,&model,&newpath))) goto done;
|
||||
if(newpath) {
|
||||
nullfree(path);
|
||||
path = newpath;
|
||||
@ -1918,7 +1912,7 @@ NC_create(const char *path0, int cmode, size_t initialsz,
|
||||
dispatcher = NC3_dispatch_table;
|
||||
break;
|
||||
default:
|
||||
return NC_ENOTNC;
|
||||
{stat = NC_ENOTNC; goto done;}
|
||||
}
|
||||
|
||||
/* Create the NC* instance and insert its dispatcher and model */
|
||||
@ -1937,6 +1931,7 @@ NC_create(const char *path0, int cmode, size_t initialsz,
|
||||
}
|
||||
done:
|
||||
nullfree(path);
|
||||
nullfree(newpath);
|
||||
return stat;
|
||||
}
|
||||
|
||||
@ -1980,12 +1975,12 @@ NC_open(const char *path0, int omode, int basepe, size_t *chunksizehintp,
|
||||
TRACE(nc_open);
|
||||
if(!NC_initialized) {
|
||||
stat = nc_initialize();
|
||||
if(stat) return stat;
|
||||
if(stat) goto done;
|
||||
}
|
||||
|
||||
/* Check inputs. */
|
||||
if (!path0)
|
||||
return NC_EINVAL;
|
||||
{stat = NC_EINVAL; goto done;}
|
||||
|
||||
/* Capture the inmemory related flags */
|
||||
mmap = ((omode & NC_MMAP) == NC_MMAP);
|
||||
|
@ -143,7 +143,15 @@ static const struct MACRODEF {
|
||||
{NULL,NULL,{NULL}}
|
||||
};
|
||||
|
||||
/* Mode inferences: if mode contains key, then add the inference and infer again */
|
||||
/*
|
||||
Mode inferences: if mode contains key value, then add the inferred value;
|
||||
Warning: be careful how this list is constructed to avoid infinite inferences.
|
||||
In order to (mostly) avoid that consequence, any attempt to
|
||||
infer a value that is already present will be ignored.
|
||||
This effectively means that the inference graph
|
||||
must be a DAG and may not have cycles.
|
||||
You have been warned.
|
||||
*/
|
||||
static const struct MODEINFER {
|
||||
char* key;
|
||||
char* inference;
|
||||
@ -151,6 +159,7 @@ static const struct MODEINFER {
|
||||
{"zarr","nczarr"},
|
||||
{"xarray","zarr"},
|
||||
{"noxarray","nczarr"},
|
||||
{"noxarray","zarr"},
|
||||
{NULL,NULL}
|
||||
};
|
||||
|
||||
@ -202,6 +211,7 @@ static int processmacros(NClist** fraglistp);
|
||||
static char* envvlist2string(NClist* pairs, const char*);
|
||||
static void set_default_mode(int* cmodep);
|
||||
static int parseonchar(const char* s, int ch, NClist* segments);
|
||||
static int mergelist(NClist** valuesp);
|
||||
|
||||
static int openmagic(struct MagicFile* file);
|
||||
static int readmagic(struct MagicFile* file, long pos, char* magic);
|
||||
@ -217,8 +227,9 @@ static int parsepair(const char* pair, char** keyp, char** valuep);
|
||||
static NClist* parsemode(const char* modeval);
|
||||
static const char* getmodekey(const NClist* envv);
|
||||
static int replacemode(NClist* envv, const char* newval);
|
||||
static int inferone(const char* mode, NClist* newmodes);
|
||||
static void infernext(NClist* current, NClist* next);
|
||||
static int negateone(const char* mode, NClist* modes);
|
||||
static void cleanstringlist(NClist* strs, int caseinsensitive);
|
||||
|
||||
/*
|
||||
If the path looks like a URL, then parse it, reformat it.
|
||||
@ -416,28 +427,6 @@ envvlist2string(NClist* envv, const char* delim)
|
||||
return result;
|
||||
}
|
||||
|
||||
/* Convert a list into a comma'd string */
|
||||
static char*
|
||||
list2string(NClist* list)
|
||||
{
|
||||
int i;
|
||||
NCbytes* buf = NULL;
|
||||
char* result = NULL;
|
||||
|
||||
if(list == NULL || nclistlength(list)==0) return strdup("");
|
||||
buf = ncbytesnew();
|
||||
for(i=0;i<nclistlength(list);i++) {
|
||||
const char* m = nclistget(list,i);
|
||||
if(m == NULL || strlen(m) == 0) continue;
|
||||
if(i > 0) ncbytescat(buf,",");
|
||||
ncbytescat(buf,m);
|
||||
}
|
||||
result = ncbytesextract(buf);
|
||||
ncbytesfree(buf);
|
||||
if(result == NULL) result = strdup("");
|
||||
return result;
|
||||
}
|
||||
|
||||
/* Given a mode= argument, fill in the impl */
|
||||
static int
|
||||
processmodearg(const char* arg, NCmodel* model)
|
||||
@ -504,9 +493,10 @@ processinferences(NClist* fraglenv)
|
||||
{
|
||||
int stat = NC_NOERR;
|
||||
const char* modeval = NULL;
|
||||
NClist* modes = NULL;
|
||||
NClist* newmodes = nclistnew();
|
||||
int i,inferred = 0;
|
||||
NClist* currentmodes = NULL;
|
||||
NClist* nextmodes = nclistnew();
|
||||
int i;
|
||||
char* newmodeval = NULL;
|
||||
|
||||
if(fraglenv == NULL || nclistlength(fraglenv) == 0) goto done;
|
||||
@ -515,22 +505,53 @@ processinferences(NClist* fraglenv)
|
||||
if((modeval = getmodekey(fraglenv))==NULL) goto done;
|
||||
|
||||
/* Get the mode as list */
|
||||
modes = parsemode(modeval);
|
||||
currentmodes = parsemode(modeval);
|
||||
|
||||
/* Repeatedly walk the mode list until no more new positive inferences */
|
||||
do {
|
||||
for(i=0;i<nclistlength(modes);i++) {
|
||||
const char* mode = nclistget(modes,i);
|
||||
inferred = inferone(mode,newmodes);
|
||||
nclistpush(newmodes,strdup(mode)); /* keep key */
|
||||
if(!inferred) nclistpush(newmodes,strdup(mode));
|
||||
#ifdef DEBUG
|
||||
printlist(currentmodes,"processinferences: initial mode list");
|
||||
#endif
|
||||
|
||||
/* Do what amounts to breadth first inferencing down the inference DAG. */
|
||||
|
||||
for(;;) {
|
||||
NClist* tmp = NULL;
|
||||
/* Compute the next set of inferred modes */
|
||||
#ifdef DEBUG
|
||||
printlist(currentmodes,"processinferences: current mode list");
|
||||
#endif
|
||||
infernext(currentmodes,nextmodes);
|
||||
#ifdef DEBUG
|
||||
printlist(nextmodes,"processinferences: next mode list");
|
||||
#endif
|
||||
/* move current modes into list of newmodes */
|
||||
for(i=0;i<nclistlength(currentmodes);i++) {
|
||||
nclistpush(newmodes,nclistget(currentmodes,i));
|
||||
}
|
||||
} while(inferred);
|
||||
nclistsetlength(currentmodes,0); /* clear current mode list */
|
||||
if(nclistlength(nextmodes) == 0) break; /* nothing more to do */
|
||||
#ifdef DEBUG
|
||||
printlist(newmodes,"processinferences: new mode list");
|
||||
#endif
|
||||
/* Swap current and next */
|
||||
tmp = currentmodes;
|
||||
currentmodes = nextmodes;
|
||||
nextmodes = tmp;
|
||||
tmp = NULL;
|
||||
}
|
||||
/* cleanup any unused elements in currenmodes */
|
||||
nclistclearall(currentmodes);
|
||||
|
||||
/* Ensure no duplicates */
|
||||
cleanstringlist(newmodes,1);
|
||||
|
||||
#ifdef DEBUG
|
||||
printlist(newmodes,"processinferences: final inferred mode list");
|
||||
#endif
|
||||
|
||||
/* Remove negative inferences */
|
||||
for(i=0;i<nclistlength(modes);i++) {
|
||||
const char* mode = nclistget(modes,i);
|
||||
inferred = negateone(mode,newmodes);
|
||||
for(i=0;i<nclistlength(newmodes);i++) {
|
||||
const char* mode = nclistget(newmodes,i);
|
||||
negateone(mode,newmodes);
|
||||
}
|
||||
|
||||
/* Store new mode value */
|
||||
@ -541,11 +562,13 @@ processinferences(NClist* fraglenv)
|
||||
|
||||
done:
|
||||
nullfree(newmodeval);
|
||||
nclistfreeall(modes);
|
||||
nclistfreeall(newmodes);
|
||||
nclistfreeall(currentmodes);
|
||||
nclistfreeall(nextmodes);
|
||||
return check(stat);
|
||||
}
|
||||
|
||||
|
||||
static int
|
||||
negateone(const char* mode, NClist* newmodes)
|
||||
{
|
||||
@ -568,23 +591,28 @@ negateone(const char* mode, NClist* newmodes)
|
||||
return changed;
|
||||
}
|
||||
|
||||
static int
|
||||
inferone(const char* mode, NClist* newmodes)
|
||||
static void
|
||||
infernext(NClist* current, NClist* next)
|
||||
{
|
||||
const struct MODEINFER* tests = modeinferences;
|
||||
int changed = 0;
|
||||
for(;tests->key;tests++) {
|
||||
if(strcasecmp(tests->key,mode)==0) {
|
||||
/* Append the inferred mode; dups removed later */
|
||||
nclistpush(newmodes,strdup(tests->inference));
|
||||
changed = 1;
|
||||
int i;
|
||||
for(i=0;i<nclistlength(current);i++) {
|
||||
const struct MODEINFER* tests = NULL;
|
||||
const char* cur = nclistget(current,i);
|
||||
for(tests=modeinferences;tests->key;tests++) {
|
||||
if(strcasecmp(tests->key,cur)==0) {
|
||||
/* Append the inferred mode unless dup */
|
||||
if(!nclistmatch(next,tests->inference,1))
|
||||
nclistpush(next,strdup(tests->inference));
|
||||
}
|
||||
}
|
||||
}
|
||||
return changed;
|
||||
}
|
||||
|
||||
/*
|
||||
Given a list of strings, remove nulls and duplicates
|
||||
*/
|
||||
static int
|
||||
mergekey(NClist** valuesp)
|
||||
mergelist(NClist** valuesp)
|
||||
{
|
||||
int i,j;
|
||||
int stat = NC_NOERR;
|
||||
@ -686,12 +714,12 @@ cleanfragments(NClist** fraglenvp)
|
||||
|
||||
/* collect all unique keys */
|
||||
collectallkeys(fraglenv,allkeys);
|
||||
/* Collect all values for same key across all fragments */
|
||||
/* Collect all values for same key across all fragment pairs */
|
||||
for(i=0;i<nclistlength(allkeys);i++) {
|
||||
key = nclistget(allkeys,i);
|
||||
collectvaluesbykey(fraglenv,key,tmp);
|
||||
/* merge the key values, remove duplicate */
|
||||
if((stat=mergekey(&tmp))) goto done;
|
||||
if((stat=mergelist(&tmp))) goto done;
|
||||
/* Construct key,value pair and insert into newlist */
|
||||
key = strdup(key);
|
||||
nclistpush(newlist,key);
|
||||
@ -923,7 +951,7 @@ NC_infermodel(const char* path, int* omodep, int iscreate, int useparallel, void
|
||||
}
|
||||
|
||||
} else {/* Not URL */
|
||||
if(*newpathp) *newpathp = NULL;
|
||||
if(newpathp) *newpathp = NULL;
|
||||
}
|
||||
|
||||
/* Phase 8: mode inference from mode flags */
|
||||
@ -1101,6 +1129,71 @@ parsemode(const char* modeval)
|
||||
return modes;
|
||||
}
|
||||
|
||||
/* Convert a list into a comma'd string */
|
||||
static char*
|
||||
list2string(NClist* list)
|
||||
{
|
||||
int i;
|
||||
NCbytes* buf = NULL;
|
||||
char* result = NULL;
|
||||
|
||||
if(list == NULL || nclistlength(list)==0) return strdup("");
|
||||
buf = ncbytesnew();
|
||||
for(i=0;i<nclistlength(list);i++) {
|
||||
const char* m = nclistget(list,i);
|
||||
if(m == NULL || strlen(m) == 0) continue;
|
||||
if(i > 0) ncbytescat(buf,",");
|
||||
ncbytescat(buf,m);
|
||||
}
|
||||
result = ncbytesextract(buf);
|
||||
ncbytesfree(buf);
|
||||
if(result == NULL) result = strdup("");
|
||||
return result;
|
||||
}
|
||||
|
||||
#if 0
|
||||
/* Given a comma separated string, remove duplicates; mostly used to cleanup mode list */
|
||||
static char*
|
||||
cleancommalist(const char* commalist, int caseinsensitive)
|
||||
{
|
||||
NClist* tmp = nclistnew();
|
||||
char* newlist = NULL;
|
||||
if(commalist == NULL || strlen(commalist)==0) return nulldup(commalist);
|
||||
(void)parseonchar(commalist,',',tmp);/* split on commas */
|
||||
cleanstringlist(tmp,caseinsensitive);
|
||||
newlist = list2string(tmp);
|
||||
nclistfreeall(tmp);
|
||||
return newlist;
|
||||
}
|
||||
#endif
|
||||
|
||||
/* Given a list of strings, remove nulls and duplicated */
|
||||
static void
|
||||
cleanstringlist(NClist* strs, int caseinsensitive)
|
||||
{
|
||||
int i,j;
|
||||
if(nclistlength(strs) == 0) return;
|
||||
/* Remove nulls */
|
||||
for(i=nclistlength(strs)-1;i>=0;i--) {
|
||||
if(nclistget(strs,i)==NULL) nclistremove(strs,i);
|
||||
}
|
||||
/* Remove duplicates*/
|
||||
for(i=0;i<nclistlength(strs);i++) {
|
||||
const char* value = nclistget(strs,i);
|
||||
/* look ahead for duplicates */
|
||||
for(j=nclistlength(strs)-1;j>i;j--) {
|
||||
int match;
|
||||
const char* candidate = nclistget(strs,j);
|
||||
if(caseinsensitive)
|
||||
match = (strcasecmp(value,candidate) == 0);
|
||||
else
|
||||
match = (strcmp(value,candidate) == 0);
|
||||
if(match) {char* dup = nclistremove(strs,j); nullfree(dup);}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**************************************************/
|
||||
/**
|
||||
* @internal Given an existing file, figure out its format and return
|
||||
@ -1502,8 +1595,10 @@ printlist(NClist* list, const char* tag)
|
||||
{
|
||||
int i;
|
||||
fprintf(stderr,"%s:",tag);
|
||||
for(i=0;i<nclistlength(list);i++)
|
||||
for(i=0;i<nclistlength(list);i++) {
|
||||
fprintf(stderr," %s",(char*)nclistget(list,i));
|
||||
fprintf(stderr,"[%p]",(char*)nclistget(list,i));
|
||||
}
|
||||
fprintf(stderr,"\n");
|
||||
dbgflush();
|
||||
}
|
||||
|
@ -122,9 +122,7 @@ ncbytesappend(NCbytes* bb, char elem)
|
||||
int
|
||||
ncbytescat(NCbytes* bb, const char* s)
|
||||
{
|
||||
if(s == NULL) {
|
||||
return 1;
|
||||
}
|
||||
if(s == NULL) return 1;
|
||||
ncbytesappendn(bb,(void*)s,strlen(s)+1); /* include trailing null*/
|
||||
/* back up over the trailing null*/
|
||||
if(bb->length == 0) return ncbytesfail();
|
||||
|
@ -183,6 +183,7 @@ nclistremove(NClist* l, size_t i)
|
||||
return elem;
|
||||
}
|
||||
|
||||
/* Match on == */
|
||||
int
|
||||
nclistcontains(NClist* l, void* elem)
|
||||
{
|
||||
@ -193,7 +194,7 @@ nclistcontains(NClist* l, void* elem)
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* Return 1/0 */
|
||||
/* Match on str(case)cmp */
|
||||
int
|
||||
nclistmatch(NClist* l, const char* elem, int casesensitive)
|
||||
{
|
||||
@ -230,7 +231,6 @@ nclistelemremove(NClist* l, void* elem)
|
||||
return found;
|
||||
}
|
||||
|
||||
|
||||
/* Extends nclist to include a unique operator
|
||||
which remove duplicate values; NULL values removed
|
||||
return value is always 1.
|
||||
|
@ -1429,6 +1429,7 @@ define_vars(NC_FILE_INFO_T* file, NC_GRP_INFO_T* grp, NClist* varnames)
|
||||
char* varpath = NULL;
|
||||
char* key = NULL;
|
||||
NCZ_FILE_INFO_T* zinfo = NULL;
|
||||
NC_VAR_INFO_T* var = NULL;
|
||||
NCZ_VAR_INFO_T* zvar = NULL;
|
||||
NCZMAP* map = NULL;
|
||||
NCjson* jvar = NULL;
|
||||
@ -1460,7 +1461,6 @@ define_vars(NC_FILE_INFO_T* file, NC_GRP_INFO_T* grp, NClist* varnames)
|
||||
|
||||
/* Load each var in turn */
|
||||
for(i = 0; i < nclistlength(varnames); i++) {
|
||||
NC_VAR_INFO_T* var;
|
||||
const char* varname = nclistget(varnames,i);
|
||||
if((stat = nc4_var_list_add2(grp, varname, &var)))
|
||||
goto done;
|
||||
@ -1477,10 +1477,6 @@ define_vars(NC_FILE_INFO_T* file, NC_GRP_INFO_T* grp, NClist* varnames)
|
||||
/* Indicate we do not have quantizer yet */
|
||||
var->quantize_mode = -1;
|
||||
|
||||
/* Set filter list */
|
||||
assert(var->filters == NULL);
|
||||
var->filters = (void*)nclistnew();
|
||||
|
||||
/* Construct var path */
|
||||
if((stat = NCZ_varkey(var,&varpath)))
|
||||
goto done;
|
||||
@ -1697,9 +1693,9 @@ define_vars(NC_FILE_INFO_T* file, NC_GRP_INFO_T* grp, NClist* varnames)
|
||||
object MUST contain a "id" key identifying the codec to be used. */
|
||||
/* Do filters key before compressor key so final filter chain is in correct order */
|
||||
{
|
||||
#ifdef ENABLE_NCZARR_FILTERS
|
||||
if(var->filters == NULL) var->filters = (void*)nclistnew();
|
||||
if(zvar->incompletefilters == NULL) zvar->incompletefilters = (void*)nclistnew();
|
||||
#ifdef ENABLE_NCZARR_FILTERS
|
||||
{ int k;
|
||||
chainindex = 0; /* track location of filter in the chain */
|
||||
if((stat = NCZ_filter_initialize())) goto done;
|
||||
@ -1722,8 +1718,8 @@ define_vars(NC_FILE_INFO_T* file, NC_GRP_INFO_T* grp, NClist* varnames)
|
||||
/* From V2 Spec: A JSON object identifying the primary compression codec and providing
|
||||
configuration parameters, or ``null`` if no compressor is to be used. */
|
||||
{
|
||||
if(var->filters == NULL) var->filters = (void*)nclistnew();
|
||||
#ifdef ENABLE_NCZARR_FILTERS
|
||||
if(var->filters == NULL) var->filters = (void*)nclistnew();
|
||||
if((stat = NCZ_filter_initialize())) goto done;
|
||||
if((stat = NCJdictget(jvar,"compressor",&jfilter))) goto done;
|
||||
if(jfilter != NULL && NCJsort(jfilter) != NCJ_NULL) {
|
||||
@ -1752,6 +1748,7 @@ define_vars(NC_FILE_INFO_T* file, NC_GRP_INFO_T* grp, NClist* varnames)
|
||||
nullfree(shapes); shapes = NULL;
|
||||
if(formatv1) {NCJreclaim(jncvar); jncvar = NULL;}
|
||||
NCJreclaim(jvar); jvar = NULL;
|
||||
var = NULL;
|
||||
}
|
||||
|
||||
done:
|
||||
|
@ -391,9 +391,11 @@ NCZ_def_var(int ncid, const char *name, nc_type xtype, int ndims,
|
||||
var->meta_read = NC_TRUE;
|
||||
var->atts_read = NC_TRUE;
|
||||
|
||||
#ifdef ENABLE_NCZARR_FILTERS
|
||||
/* Set the filter list */
|
||||
assert(var->filters == NULL);
|
||||
var->filters = (void*)nclistnew();
|
||||
#endif
|
||||
|
||||
/* Point to the type, and increment its ref. count */
|
||||
var->type_info = type;
|
||||
@ -558,10 +560,12 @@ ncz_def_var_extra(int ncid, int varid, int *shuffle, int *unused1,
|
||||
|
||||
/* Can't turn on parallel and deflate/fletcher32/szip/shuffle
|
||||
* before HDF5 1.10.3. */
|
||||
#ifdef ENABLE_NCZARR_FILTERS
|
||||
#ifndef HDF5_SUPPORTS_PAR_FILTERS
|
||||
if (h5->parallel == NC_TRUE)
|
||||
if (nclistlength(((NClist*)var->filters)) > 0 || fletcher32 || shuffle)
|
||||
{retval = NC_EINVAL; goto done;}
|
||||
#endif
|
||||
#endif
|
||||
|
||||
/* If the HDF5 dataset has already been created, then it is too
|
||||
@ -628,8 +632,10 @@ ncz_def_var_extra(int ncid, int varid, int *shuffle, int *unused1,
|
||||
* no filters in use for this data. */
|
||||
if (storage != NC_CHUNKED)
|
||||
{
|
||||
#ifdef NCZARR_FILTERS
|
||||
if (nclistlength(((NClist*)var->filters)) > 0)
|
||||
{retval = NC_EINVAL; goto done;}
|
||||
#endif
|
||||
for (d = 0; d < var->ndims; d++)
|
||||
if (var->dim[d]->unlimited)
|
||||
{retval = NC_EINVAL; goto done;}
|
||||
|
@ -27,22 +27,27 @@ XML Parser: @XMLPARSER@
|
||||
|
||||
# Features
|
||||
--------
|
||||
Benchmarks: @HAS_BENCHMARKS@
|
||||
NetCDF-2 API: @HAS_NC2@
|
||||
HDF4 Support: @HAS_HDF4@
|
||||
HDF5 Support: @HAS_HDF5@
|
||||
NetCDF-4 API: @HAS_NC4@
|
||||
CDF5 Support: @HAS_CDF5@
|
||||
NC-4 Parallel Support: @HAS_PARALLEL4@
|
||||
PnetCDF Support: @HAS_PNETCDF@
|
||||
|
||||
DAP2 Support: @HAS_DAP2@
|
||||
DAP4 Support: @HAS_DAP4@
|
||||
Byte-Range Support: @HAS_BYTERANGE@
|
||||
NCZarr Support: @HAS_NCZARR@
|
||||
NCZarr S3 Support: @HAS_NCZARR_S3@
|
||||
|
||||
Diskless Support: @HAS_DISKLESS@
|
||||
MMap Support: @HAS_MMAP@
|
||||
JNA Support: @HAS_JNA@
|
||||
CDF5 Support: @HAS_CDF5@
|
||||
ERANGE Fill Support: @HAS_ERANGE_FILL@
|
||||
Relaxed Boundary Check: @RELAX_COORD_BOUND@
|
||||
Parallel Filters: @HAS_PAR_FILTERS@
|
||||
|
||||
NCZarr Support: @HAS_NCZARR@
|
||||
Multi-Filter Support: @HAS_MULTIFILTERS@
|
||||
Quantization: @HAS_QUANTIZE@
|
||||
@ -50,4 +55,5 @@ Logging: @HAS_LOGGING@
|
||||
SZIP Write Support: @HAS_SZLIB_WRITE@
|
||||
Standard Filters: @STD_FILTERS@
|
||||
ZSTD Support: @HAS_ZSTD@
|
||||
Benchmarks: @HAS_BENCHMARKS@
|
||||
Parallel Filters: @HAS_PAR_FILTERS@
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user