Merge branch 'master' into github156

This commit is contained in:
Dennis Heimbigner 2015-11-24 13:16:27 -07:00
commit 8fc2cd2df5
9 changed files with 184 additions and 119 deletions

View File

@ -1,52 +1,17 @@
sudo: false
sudo: required
language: c
compiler:
- gcc
- clang
addons:
apt:
packages:
- libcurl4-openssl-dev
- m4
- wget
- autoconf
- libtool
- gfortran
- git
- doxygen
- graphviz
services:
- docker
env:
matrix:
- DOCKIMG=unidata/nctests:serial USECMAKE=TRUE USEAC=TRUE USE_CC=gcc
- DOCKIMG=unidata/nctests:serial USECMAKE=TRUE USEAC=TRUE USE_CC=clang
- DOCKIMG=unidata/nctests:serial32 USECMAKE=TRUE USEAC=TRUE USE_CC=gcc
- DOCKIMG=unidata/nctests:serial32 USECMAKE=TRUE USEAC=TRUE USE_CC=clang
before_install:
before_script:
###
# Install dependencies from a pre-built binary.
###
- cd $HOME
- wget http://www.unidata.ucar.edu/downloads/netcdf/ftp/travisdeps.tar.bz2
- tar -jxf travisdeps.tar.bz2
- export LD_LIBRARY_PATH=$HOME/usr/lib
- export PATH=$HOME/usr/bin:$PATH
- cd -
- mkdir build-all
- mkdir build-min
- cd build-min
- cmake .. -DENABLE_NETCDF_4=OFF -DENABLE_DAP=OFF -DCMAKE_PREFIX_PATH=$HOME/usr-min
- cd ..
- cd build-all
- cmake .. -DENABLE_MMAP=ON -DENABLE_DOXYGEN=ON -DENABLE_EXTRA_TESTS=ON -DENABLE_HDF4=ON -DCMAKE_PREFIX_PATH=$HOME/usr
- cd ..
- docker pull $DOCKIMG > /dev/null
script:
- cd build-min
- make -j 4
- make test
- cd ../build-all
- make -j 4
- make test
- docker run --rm -it -e USEDASH=OFF -e RUNF=OFF -e RUNCXX=OFF -e RUNP=OFF -e RUNNCO=OFF -e USECMAKE=$USECMAKE -e USEAC=$USEAC -v $(pwd):/netcdf-c $DOCKIMG

52
.travis.yml.old Normal file
View File

@ -0,0 +1,52 @@
sudo: false
language: c
compiler:
- gcc
- clang
addons:
apt:
packages:
- libcurl4-openssl-dev
- m4
- wget
- autoconf
- libtool
- gfortran
- git
- doxygen
- graphviz
before_install:
before_script:
###
# Install dependencies from a pre-built binary.
###
- cd $HOME
- wget http://www.unidata.ucar.edu/downloads/netcdf/ftp/travisdeps.tar.bz2
- tar -jxf travisdeps.tar.bz2
- export LD_LIBRARY_PATH=$HOME/usr/lib
- export PATH=$HOME/usr/bin:$PATH
- cd -
- mkdir build-all
- mkdir build-min
- cd build-min
- cmake .. -DENABLE_NETCDF_4=OFF -DENABLE_DAP=OFF -DCMAKE_PREFIX_PATH=$HOME/usr-min
- cd ..
- cd build-all
- cmake .. -DENABLE_MMAP=ON -DENABLE_DOXYGEN=ON -DENABLE_EXTRA_TESTS=ON -DENABLE_HDF4=ON -DCMAKE_PREFIX_PATH=$HOME/usr
- cd ..
script:
- cd build-min
- make -j 4
- make test
- cd ../build-all
- make -j 4
- make test

View File

@ -454,7 +454,6 @@ IF(ENABLE_NETCDF_4)
SET(USE_NETCDF4 ON CACHE BOOL "")
SET(ENABLE_NETCDF_4 ON CACHE BOOL "")
SET(ENABLE_NETCDF4 ON CACHE BOOL "")
ENDIF()
# Option for building RPC
@ -506,6 +505,8 @@ IF(USE_HDF5 OR ENABLE_NETCDF_4)
CHECK_LIBRARY_EXISTS(hdf5 H5Pget_fapl_mpio "" HDF5_IS_PARALLEL_MPIO)
IF(HDF5_IS_PARALLEL_MPIPOSIX OR HDF5_IS_PARALLEL_MPIO)
SET(HDF5_IS_PARALLEL ON)
ELSE()
SET(HDF5_IS_PARALLEL OFF)
ENDIF()
IF(HDF5_IS_PARALLEL_MPIO)
@ -1604,6 +1605,7 @@ is_enabled(ENABLE_NETCDF_4 HAS_HDF5)
is_enabled(USE_SZIP HAS_SZIP)
is_enabled(STATUS_PNETCDF HAS_PNETCDF)
is_enabled(STATUS_PARALLEL HAS_PARALLEL)
is_enabled(ENABLE_PARALLEL4 HAS_PARALLEL4)
is_enabled(USE_DAP HAS_DAP)
is_enabled(USE_DISKLESS HAS_DISKLESS)
is_enabled(USE_MMAP HAS_MMAP)

View File

@ -931,7 +931,7 @@ if test "x$enable_netcdf_4" = xyes; then
# The user may have built HDF5 with the SZLIB library.
if test "x$ac_cv_func_H5Z_SZIP" = xyes; then
AC_SEARCH_LIBS([SZ_Compress], [szip sz], [], [])
AC_SEARCH_LIBS([SZ_Compress], [szip sz], [], [])
AC_DEFINE([USE_SZIP], [1], [if true, compile in szip compression in netCDF-4 variables])
fi
@ -962,9 +962,9 @@ fi
# Should we suppress parallel io for netcdf-4?
if test "x$enable_netcdf_4" = xyes ; then
AC_MSG_CHECKING([whether parallel I/O is enabled for netcdf-4])
AC_ARG_ENABLE([parallel4], [AS_HELP_STRING([--disable-parallel4],
[disable parallel I/O for netcdf-4, even if it's enabled in libhdf5])])
AC_MSG_CHECKING([whether parallel I/O is enabled for netcdf-4])
AC_ARG_ENABLE([parallel4], [AS_HELP_STRING([--disable-parallel4],
[disable parallel I/O for netcdf-4, even if it's enabled in libhdf5] )])
test "x$enable_parallel4" = xno || enable_parallel4=yes
AC_MSG_RESULT($enable_parallel4)
else
@ -1201,12 +1201,10 @@ if test "x$enable_jna" = xyes ; then
AC_DEFINE([JNA], [1], [if true, include JNA bug fix])
fi
AC_SUBST(NC_LIBS,[$NC_LIBS])
AC_SUBST(HAS_DAP,[$enable_dap])
AC_SUBST(HAS_NC2,[$nc_build_v2])
AC_SUBST(HAS_NC4,[$enable_netcdf_4])
AC_SUBST(HAS_SZIP,[$ac_cv_func_H5Z_SZIP])
AC_SUBST(HAS_HDF4,[$enable_hdf4])
AC_SUBST(HAS_PNETCDF,[$enable_pnetcdf])
AC_SUBST(HAS_HDF5,[$enable_netcdf_4])

View File

@ -16,76 +16,27 @@ Freely Available Software {#freely}
ANDX and ANAX {#ANDX}
------------------------------------
The ARM Program has developed [ANDX (ARM NetCDF Data
eXtract)](http://engineering.arm.gov/~sbeus/andx-web/html/), a
command-line utility designed for routine examination and extraction of
data from netcdf files. Data can be displayed graphically (line-plot,
scatter-plot, overlay, color-intensity, etc.) or extracted as ASCII
data. Whether displayed graphically or extracted as ASCII, results can
be saved to disk or viewed on screen.
The ARM Program has developed [ANDX (ARM NetCDF Data eXtract)](http://engineering.arm.gov/~sbeus/andx-web/html/), a command-line utility designed for routine examination and extraction of data from netcdf files. Data can be displayed graphically (line-plot, scatter-plot, overlay, color-intensity, etc.) or extracted as ASCII data. Whether displayed graphically or extracted as ASCII, results can be saved to disk or viewed on screen.
[ANAX (ARM NetCDF ASCII
eXtract)](http://science.arm.gov/~cflynn/ARM_Tested_Tools/) is a
scaled-down version of ANDX -- it is designed to only extract ASCII
data. All features of ANDX pertaining to non-graphic data extraction are
included in ANAX.
[ANAX (ARM NetCDF ASCII eXtract)](http://science.arm.gov/~cflynn/ARM_Tested_Tools/) is a scaled-down version of ANDX -- it is designed to only extract ASCII data. All features of ANDX pertaining to non-graphic data extraction are included in ANAX.
ANTS {#ANTS}
---------------------------
The ARM Program has developed [ANTS (ARM NetCDF Tool
Suite)](http://science.arm.gov/~cflynn/ANTS/), a collection of netCDF
tools and utilities providing various means of creating and modifying
netcdf files. ANTS is based on nctools written by Chuck Denham. The
utilities within nctools were modified to compile with version 3.5 of
the netCDF library, the command syntax was modified for consistency with
other tools, and changes were made to accommodate ARM standard netCDF.
The ARM Program has developed [ANTS (ARM NetCDF Tool Suite)](http://science.arm.gov/~cflynn/ANTS/), a collection of netCDF tools and utilities providing various means of creating and modifying netcdf files. ANTS is based on nctools written by Chuck Denham. The utilities within nctools were modified to compile with version 3.5 of the netCDF library, the command syntax was modified for consistency with other tools, and changes were made to accommodate ARM standard netCDF.
The original functions from nctools were intended mainly for the
creation, definition, and copying of fundamental netCDF elements. ARM
added others which focus on manipulation of data within existing netCDF
files. Additional functions have special support for multi-dimensional
data such as "slicing" cross sections from multi-dimensional variable
data or joining lesser-dimensional fields to form multi-dimensional
structures. Functions have been added to support execution of arithmetic
and logical operations, bundling or splitting netCDF files, comparing
the structure or content of files, and so on.
The original functions from nctools were intended mainly for the creation, definition, and copying of fundamental netCDF elements. ARM added others which focus on manipulation of data within existing netCDF files. Additional functions have special support for multi-dimensional data such as "slicing" cross sections from multi-dimensional variable data or joining lesser-dimensional fields to form multi-dimensional structures. Functions have been added to support execution of arithmetic and logical operations, bundling or splitting netCDF files, comparing the structure or content of files, and so on.
Essentially every type of netCDF library function call is exercised in
ANTS. In this way then, this open-source collection of tools also
represents a library of coding examples for fundamental netCDF tasks.
See the [website](http://science.arm.gov/~cflynn/ANTS/) for more
information.
Essentially every type of netCDF library function call is exercised in ANTS. In this way then, this open-source collection of tools also represents a library of coding examples for fundamental netCDF tasks. See the [website](http://science.arm.gov/~cflynn/ANTS/) for more information.
ARGOS {#ARGOS}
-----------------------------
[ARGOS](http://www.lapeth.ethz.ch/argos/index.html) (interActive
thRee-dimensional Graphics ObServatory) is a new IDL-based interactive
3D visualization tool, developed by [David N.
Bresch](http://www.lapeth.ethz.ch/~david/index.html) and [Mark A.
Liniger](http://www.lapeth.ethz.ch/~mark/index.html) at the Institute
for Atmospheric Science at the Swiss Federal Institute of Technology,
ETH, Zürich.
[ARGOS](http://www.lapeth.ethz.ch/argos/index.html) (interActive thRee-dimensional Graphics ObServatory) is a new IDL-based interactive 3D visualization tool, developed by [David N. Bresch](http://www.lapeth.ethz.ch/~david/index.html) and [Mark A. Liniger](http://www.lapeth.ethz.ch/~mark/index.html) at the Institute for Atmospheric Science at the Swiss Federal Institute of Technology, ETH, Zürich.
A highly optimized graphical user interface allows quick and elegant
creation of even complex 3D graphics (volume rendering,
isosurfaces,...), including Z-buffered overlays (with hidden lines),
light and data shading, Xray images, 3D trajectories, animations and
virtual flights around your data, all documented in a full on-line
[html-help](http://www.lapeth.ethz.ch/argos/argos_general.html). The
netCDF data format is preferred, but any other format can be read by
providing an IDL (or FORTRAN or C or C++) interface. Some toolboxes (for
atmospheric model output, trajectory display, radar data) have already
been written, others might easily be added (in IDL, FORTRAN or C code).
All interactive activities are tracked in a script, allowing quick
reconstruction of anything done as well as running ARGOS in batch script
mode.
A highly optimized graphical user interface allows quick and elegant creation of even complex 3D graphics (volume rendering, isosurfaces,...), including Z-buffered overlays (with hidden lines), light and data shading, Xray images, 3D trajectories, animations and virtual flights around your data, all documented in a full on-line [html-help](http://www.lapeth.ethz.ch/argos/argos_general.html). The netCDF data format is preferred, but any other format can be read by providing an IDL (or FORTRAN or C or C++) interface. Some toolboxes (for atmospheric model output, trajectory display, radar data) have already been written, others might easily be added (in IDL, FORTRAN or C code). All interactive activities are tracked in a script, allowing quick reconstruction of anything done as well as running ARGOS in batch script mode.
Information about [copyright and licensing
conditions](http://www.lapeth.ethz.ch/argos/argos_copyright.html) are
available. For further information and installation, please E-mail to:
bresch@atmos.umnw.ethz.ch
Information about [copyright and licensing conditions](http://www.lapeth.ethz.ch/argos/argos_copyright.html) are available. For further information and installation, please E-mail to: bresch@atmos.umnw.ethz.ch
CDAT {#CDAT}
---------------------------
@ -1829,6 +1780,32 @@ The SDS project is in beta phase and keeps evolving. You are welcome to
join discussions or report issues at the CodePlex site:
<http://sds.codeplex.com>.
sciNetCDF {#scinetcdf}
-------------------------------------------------------------
[sciNetCDF](https://atoms.scilab.org/toolboxes/scinetcdf)
In the context of the IASI-NG project, CNES is responsible for the development
of a Scilab/NetCDF4 interface, which CNES wanted to make available to the entire
scientific community.
The toolbox sciNetCDF is the result of this collaboration. It can read and write
NetCDF files of any version (version 4 of the format is used by default for
writing).
The toolbox provides high level functions to read/write NetCDF files natively in
Scilab in a friendly manner (data is converted automatically from Scilab to
NetCDF and inversely).
These functions are:
- nccreate
- ncwrite
- ncread
- ncwriteatt
- ncreadatt
- ncdisp
It provides also a low level interface to all the NetCDF C library functions
Apache Spatial Information System (SIS) {#SIS}
-------------------------------------------------------------

View File

@ -25,13 +25,10 @@ Extra libraries: @LIBS@
# Features
--------
NetCDF-2 API: @HAS_NC2@
NetCDF-4 API: @HAS_NC4@
CDF-5 Support: yes
HDF4 Support: @HAS_HDF4@
HDF5 Support: @HAS_HDF5@
HDF5/SZIP Support: @HAS_SZIP@
PNetCDF Support: @HAS_PNETCDF@
NetCDF-4 API: @HAS_NC4@
NC-4 Parallel Support: @HAS_PARALLEL4@
PNetCDF Support: @HAS_PNETCDF@
DAP Support: @HAS_DAP@
Diskless Support: @HAS_DISKLESS@
MMap Support: @HAS_MMAP@

View File

@ -8,7 +8,8 @@ SET(NC4_TESTS tst_dims tst_dims2 tst_dims3 tst_files tst_files4 tst_vars
tst_xplatform tst_xplatform2 tst_h_atts2 tst_endian_fill tst_atts
t_type cdm_sea_soundings tst_vl tst_atts1 tst_atts2
tst_vars2 tst_files5 tst_files6 tst_sync tst_h_strbug tst_h_refs
tst_h_scalar tst_rename tst_h5_endians tst_atts_string_rewrite)
tst_h_scalar tst_rename tst_h5_endians tst_atts_string_rewrite
tst_put_vars_two_unlim_dim)
# Note, renamegroup needs to be compiled before run_grp_rename
build_bin_test(renamegroup)
@ -85,5 +86,3 @@ ENDIF()
FILE(GLOB CUR_EXTRA_DIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/*.c ${CMAKE_CURRENT_SOURCE_DIR}/*.h ${CMAKE_CURRENT_SOURCE_DIR}/*.sh)
SET(CUR_EXTRA_DIST ${CUR_EXTRA_DIST} CMakeLists.txt Makefile.am)
SET(CUR_EXTRA_DIST ${CUR_EXTRA_DIST} ref_chunks1.cdl ref_chunks2.cdl ref_tst_compounds.nc ref_tst_xplatform2_1.nc ref_tst_xplatform2_2.nc ref_tst_dims.nc ref_tst_interops4.nc ref_grp_rename.cdl ref_tst_nvars.nc contig.hdf4 chunked.hdf4)
ADD_EXTRA_DIST("${CUR_EXTRA_DIST}")

View File

@ -0,0 +1,67 @@
/*
* Test contributed in support of netCDF issue
* https://github.com/Unidata/netcdf-c/issues/160
*/
#include "netcdf.h"
#include <stdio.h>
int main(int argc, char* argv[])
{
int ret;
int ncid;
int dim1id, dim2id;
int var1id, var2id;
size_t start = 0;
size_t count = 5;
double vals[] = { 1.0, 2.0, 3.0, 4.0, 5.0 };
if ((ret = nc_create("tst_put_vars_two_unlim_dim.nc", NC_NETCDF4 | NC_CLOBBER, &ncid))) {
printf("nc_create(...): error code = %d\n", ret);
return -1;
}
if ((ret = nc_def_dim(ncid, "dim1", NC_UNLIMITED, &dim1id))) {
printf("nc_def_dim(...\"dim1\"...): error code = %d\n", ret);
nc_close(ncid);
return -1;
}
if ((ret = nc_def_dim(ncid, "dim2", NC_UNLIMITED, &dim2id))) {
printf("nc_def_dim(...\"dim1\"...): error code = %d\n", ret);
nc_close(ncid);
return -1;
}
if ((ret = nc_def_var(ncid, "var1", NC_DOUBLE, 1, &dim1id, &var1id))) {
printf("nc_def_var(...\"var1\"...): error code = %d\n", ret);
nc_close(ncid);
return -1;
}
if ((ret = nc_def_var(ncid, "var2", NC_DOUBLE, 1, &dim2id, &var2id))) {
printf("nc_def_var(...\"var2\"...): error code = %d\n", ret);
nc_close(ncid);
return -1;
}
if ((ret = nc_put_vars_double(ncid, var1id, &start, &count, NULL, &vals[0]))) {
printf("nc_put_var_double(...var1id...): error code = %d\n", ret);
nc_close(ncid);
return -1;
}
if ((ret = nc_put_vars_double(ncid, var2id, &start, &count, NULL, &vals[0]))) {
printf("nc_put_var_double(...var2id...): error code = %d\n", ret);
nc_close(ncid);
return -1;
}
if ((ret = nc_close(ncid))) {
printf("nc_close(...): error code = %d\n", ret);
return -1;
}
return 0;
}

8
run_travis.sh Executable file
View File

@ -0,0 +1,8 @@
#!/bin/bash
###
# Travis script, so that we can more easily
# create a test matrix using travis-ci and docker.
###
docker run --rm -it -e USEDASH=OFF -e RUNF=OFF -e RUNCXX=OFF -e RUNP=OFF -e RUNNCO=OFF -e USEAC=OFF -v $(pwd):/netcdf-c $DOCKIMG