From 143a6dc38111d4a4c7d1c5a1587a01f4df2d6e59 Mon Sep 17 00:00:00 2001 From: Ward Fisher Date: Mon, 18 May 2015 10:07:34 -0600 Subject: [PATCH 01/12] Bumped travis-ci configuration to use recently-released hdf5 1.8.15. --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index ade04a81d..a1e0e8cb5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,9 +11,9 @@ before_install: # We need to install hdf5. # hdf5-1.8.14.tar.bz2 - - wget http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8.14/src/hdf5-1.8.14.tar.bz2 - - tar -jxf hdf5-1.8.14.tar.bz2 - - cd hdf5-1.8.14 && ./configure --enable-shared --disable-static --disable-fortran --enable-hl --disable-fortran --prefix=/usr && make -j 4 && sudo make install + - wget http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8.15/src/hdf5-1.8.15.tar.bz2 + - tar -jxf hdf5-1.8.15.tar.bz2 + - cd hdf5-1.8.15 && ./configure --enable-shared --disable-static --disable-fortran --enable-hl --disable-fortran --prefix=/usr && make -j 4 && sudo make install - cd .. before_script: From c9d7c88f63aa41982e741f083cde60dfc148f4c8 Mon Sep 17 00:00:00 2001 From: Ward Fisher Date: Mon, 18 May 2015 10:55:17 -0600 Subject: [PATCH 02/12] Added a stanza for 'NC_DOUBLE' into the recently-added endians test. --- nc_test4/tst_h_endians.c | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/nc_test4/tst_h_endians.c b/nc_test4/tst_h_endians.c index 2f493d9ac..02953a61b 100644 --- a/nc_test4/tst_h_endians.c +++ b/nc_test4/tst_h_endians.c @@ -24,6 +24,9 @@ #define BE_FLOAT_VARNAME "fl_be" #define LE_INT_VARNAME "int_le" #define BE_INT_VARNAME "int_be" +#define LE_DBL_VARNAME "dbl_le" +#define BE_DBL_VARNAME "dbl_be" + int main() { int ncid, dimid; @@ -31,6 +34,8 @@ int main() { int be_float_varid; int le_int_varid; int be_int_varid; + int le_dbl_varid; + int be_dbl_varid; int ed; int failures = 0; int retval = 0; @@ -63,6 +68,14 @@ int main() { retval = nc_def_var(ncid, BE_INT_VARNAME, NC_INT, 1, &dimid, &be_int_varid); retval = nc_def_var_endian(ncid, be_int_varid, NC_ENDIAN_BIG); + /* Little-Endian Double */ + retval = nc_def_var(ncid, LE_DBL_VARNAME, NC_DOUBLE, 1, &dimid, &le_dbl_varid); + retval = nc_def_var_endian(ncid, le_dbl_varid, NC_ENDIAN_LITTLE); + + /* Big-Endian Double */ + retval = nc_def_var(ncid, BE_DBL_VARNAME, NC_DOUBLE, 1, &dimid, &be_dbl_varid); + retval = nc_def_var_endian(ncid, be_dbl_varid, NC_ENDIAN_BIG); + retval = nc_close(ncid); } @@ -74,6 +87,13 @@ int main() { printf("** Checking test files.\n"); { ncid = 0; + le_float_varid = 0; + be_float_varid = 0; + le_int_varid = 0; + be_int_varid = 0; + le_dbl_varid = 0; + be_dbl_varid = 0; + printf("*** %s\n",FILE_NAME_NC); retval = nc_open(FILE_NAME_NC, NC_NETCDF4 | NC_NOWRITE, &ncid); @@ -81,6 +101,8 @@ int main() { retval = nc_inq_varid(ncid,BE_FLOAT_VARNAME,&be_float_varid); retval = nc_inq_varid(ncid,LE_INT_VARNAME,&le_int_varid); retval = nc_inq_varid(ncid,BE_INT_VARNAME,&be_int_varid); + retval = nc_inq_varid(ncid,LE_DBL_VARNAME,&le_dbl_varid); + retval = nc_inq_varid(ncid,BE_DBL_VARNAME,&be_dbl_varid); printf("\tLittle-Endian Float...\t"); retval = nc_inq_var_endian(ncid,le_float_varid,&ed); @@ -98,6 +120,14 @@ int main() { retval = nc_inq_var_endian(ncid,be_int_varid,&ed); if(ed == NC_ENDIAN_BIG) printf("passed\n"); else {printf("failed\n"); failures++;} + printf("\tLittle-Endian Double...\t"); + retval = nc_inq_var_endian(ncid,le_dbl_varid,&ed); + if(ed == NC_ENDIAN_LITTLE) printf("passed\n"); else {printf("failed\n"); failures++;} + + printf("\tBig-Endian Double...\t"); + retval = nc_inq_var_endian(ncid,be_dbl_varid,&ed); + if(ed == NC_ENDIAN_BIG) printf("passed\n"); else {printf("failed\n"); failures++;} + retval = nc_close(ncid); } From 0c30751b5890724de8b597be5463f21612398485 Mon Sep 17 00:00:00 2001 From: Ward Fisher Date: Mon, 18 May 2015 12:21:40 -0600 Subject: [PATCH 03/12] Updated automake file so that 'make distcheck' would pass. --- nc_test/Makefile.am | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nc_test/Makefile.am b/nc_test/Makefile.am index b767c1bf0..612d1ca36 100644 --- a/nc_test/Makefile.am +++ b/nc_test/Makefile.am @@ -12,7 +12,7 @@ CLEANFILES = nc_test_classic.nc nc_test_64bit.nc nc_test_netcdf4.nc \ tst_*.nc t_nc.nc large_files.nc quick_large_files.nc \ tst_diskless.nc tst_diskless2.nc \ tst_diskless3.nc tst_diskless3_file.cdl tst_diskless3_memory.cdl \ -tst_diskless4.cdl tst_diskless4.nc tst_formatx.nc +tst_diskless4.cdl tst_diskless4.nc tst_formatx.nc unlim.nc # These are the tests which are always run. TESTPROGRAMS = t_nc tst_small nc_test tst_misc tst_norm tst_names \ From 0f64cec6bd06b1ff890e68fb37bc952c6aaefc0a Mon Sep 17 00:00:00 2001 From: Ward Fisher Date: Wed, 20 May 2015 10:26:13 -0600 Subject: [PATCH 04/12] Changed identifier for FAQ.md for doxygen-based documentation. --- docs/FAQ.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/FAQ.md b/docs/FAQ.md index 51c61387c..8b4276a46 100644 --- a/docs/FAQ.md +++ b/docs/FAQ.md @@ -1,4 +1,4 @@ -FAQ {#ncFAQ} +FAQ {#faq} ======================= [TOC] From 167835ea329b4fffd0f6ff23094a194c55c2fb2d Mon Sep 17 00:00:00 2001 From: Ward Fisher Date: Wed, 20 May 2015 11:11:19 -0600 Subject: [PATCH 05/12] Added a check to avoid passing 0 to malloc; in this case gcc will return a pointer but some other compilers will return NULL, which is then flagged as a malloc failure. Attempting to bypass this behavior. --- libsrc4/nc4hdf.c | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/libsrc4/nc4hdf.c b/libsrc4/nc4hdf.c index 56cc770f9..81de2b471 100644 --- a/libsrc4/nc4hdf.c +++ b/libsrc4/nc4hdf.c @@ -657,8 +657,9 @@ nc4_put_vara(NC *nc, int ncid, int varid, const size_t *startp, /* If we're reading, we need bufr to have enough memory to store * the data in the file. If we're writing, we need bufr to be * big enough to hold all the data in the file's type. */ - if (!(bufr = malloc(len * file_type_size))) - BAIL(NC_ENOMEM); + if(len > 0) + if (!(bufr = malloc(len * file_type_size))) + BAIL(NC_ENOMEM); } else #endif /* ifndef HDF5_CONVERT */ @@ -823,7 +824,7 @@ nc4_put_vara(NC *nc, int ncid, int varid, const size_t *startp, num_plists--; #endif #ifndef HDF5_CONVERT - if (need_to_convert) free(bufr); + if (need_to_convert && bufr) free(bufr); #endif /* If there was an error return it, otherwise return any potential @@ -1039,8 +1040,9 @@ nc4_get_vara(NC *nc, int ncid, int varid, const size_t *startp, /* If we're reading, we need bufr to have enough memory to store * the data in the file. If we're writing, we need bufr to be * big enough to hold all the data in the file's type. */ - if (!(bufr = malloc(len * file_type_size))) - BAIL(NC_ENOMEM); + if(len > 0) + if (!(bufr = malloc(len * file_type_size))) + BAIL(NC_ENOMEM); } else #endif /* ifndef HDF5_CONVERT */ @@ -1187,7 +1189,7 @@ nc4_get_vara(NC *nc, int ncid, int varid, const size_t *startp, #endif } #ifndef HDF5_CONVERT - if (need_to_convert) + if (need_to_convert && bufr != NULL) free(bufr); #endif if (xtend_size) From 853eb066c5ec79de8e51e53d88eaefebb4cad0d5 Mon Sep 17 00:00:00 2001 From: Ward Fisher Date: Wed, 20 May 2015 11:13:33 -0600 Subject: [PATCH 06/12] Updated release notes. --- RELEASE_NOTES.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 9041a3b5a..be9a099a3 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -7,6 +7,8 @@ This file contains a high-level description of this package's evolution. Release ## 4.3.x Released TBD +* Addressed an issue with IBM's `XL C` compiler on AIX and how it handled some calls to malloc. Also, as suggested by Wolfgang Hayek, developers using this compiler may need to pass `CPPFLAGS=-D_LINUX_SOURCE_COMPAT` to avoid some test failures. + * Addressed an issure in netcdf4 related to specifying an endianness explicitly. When specifying an endianness for `NC_FLOAT`, the value would appear to not be written to file, if checked with `ncdump -s`. The issue was more subtle; the value would be written but was not being read from file properly for non-`NC_INT`. See [GitHub Issue](https://github.com/Unidata/netcdf-c/issues/112) or [NCF-331](https://bugtracking.unidata.ucar.edu/browse/NCF-331) for more information. * Addressed an issue in netcdf4 on Windows w/DAP related to how byte values were copied with sscanf. Issue originally reported by Ellen Johnson at Mathworks, see [NCF-330](https://bugtracking.unidata.ucar.edu/browse/NCF-330) for more information. From 8610f8da90c7e7e46dc4ba0385ecaa0bc51e0fe0 Mon Sep 17 00:00:00 2001 From: Ward Fisher Date: Fri, 22 May 2015 15:33:44 -0600 Subject: [PATCH 07/12] Added 'software.md', a markdown-formatted version of the software.html page. This way it can be more easily incorporated into the documentation generated/managed by Doxygen. Conversion was handled by pandoc. --- docs/software.md | 2720 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 2720 insertions(+) create mode 100644 docs/software.md diff --git a/docs/software.md b/docs/software.md new file mode 100644 index 000000000..5004b4afd --- /dev/null +++ b/docs/software.md @@ -0,0 +1,2720 @@ +Software for Manipulating or Displaying NetCDF Data (#software) +=================================================== + +This document provides references to software packages that may be used for manipulating or displaying [netCDF](/software/netcdf/) data. We include information about both freely-available and licensed (commercial) software that can be used with netCDF data. We rely on developers to help keep this list up-to-date. If you know of corrections or additions, please [send them to us (mailto:support@unidata.ucar.edu). Where practical, we would like to include WWW links to information about these packages in the HTML version of this document. + +Other useful guides to utilities that can handle netCDF data include ARM's list of [ARM-tested netCDF data tools](http://science.arm.gov/%7ecflynn/ARM_Tested_Tools/), which includes some downloadable binaries and the NOAA Geophysical Fluid Dynamics Laboratory [guide to netCDF utilities](http://nomads.gfdl.noaa.gov/sandbox/products/vis/data/netcdf/GFDL_VG_NetCDF_Utils.html). + +------------------------------------------------------------------------ + +[Freely Available Software](#freely) +------------------------------------ + +- [ANDX (ARM NetCDF Data eXtract) and ANAX (ARM NetCDF ASCII + eXtract)](#ANDX) +- [ANTS (ARM NetCDF Tool Suite)](#ANTS) +- [ARGOS (interActive thRee-dimensional Graphics ObServatory)](#ARGOS) +- [CDAT (Climate Data Analysis Tool)](#CDAT) +- [CDFconvert (Convert netCDF to RPN and GEMPAK Grids)](#CDFconvert) +- [cdfsync (network synchronization of netCDF files)](#cdfsync) +- [CDO (Climate Data Operators)](#CDO) +- [CIDS Tools](#CIDS%20Tools) +- [CSIRO MATLAB/netCDF interface](#CSIRO-MATLAB) +- [EPIC](#EPIC) +- [Excel Use](#ExcelUse) +- [EzGet](#EzGet) +- [FAN (File Array Notation)](#FAN) +- [FERRET](#FERRET) +- [FIMEX (File Interpolation, Manipulation, and EXtraction)](#fimex) +- [FWTools (GIS Binary Kit for Windows and Linux)](#fwtools) +- [GDAL (Geospatial Data Abstraction Library)](#GDAL) +- [GDL (GNU Data Language)](#GDL) +- [Gfdnavi (Geophysical fluid data navigator)](#Gfdnavi) +- [GMT (Generic Mapping Tools)](#GMT) +- [Grace](#Grace) +- [GrADS (Grid Analysis and Display System)](#GrADS) +- [Gri](#Gri) +- [GXSM - Gnome X Scanning Microscopy project](#GXSM) +- [HDF (Hierarchical Data Format) interface](#HDF%20interface) +- [HDF-EOS to netCDF converter](#HDF-EOS) +- [HIPHOP (Handy IDL-Program for HDF-Output Plotting)](#HIPHOP) +- [HOPS (Hyperslab OPerator + Suite)](#Hyperslab%20OPerator%20Suite%20(HOPS)) +- [iCDF (imports chromatographic netCDF data into MATLAB)](#iCDF) +- [IDV (Integrated Data Viewer)](#IDV) +- [Ingrid](#Ingrid) +- [Intel Array Visualizer](#IntelArrayVisualizer) +- [IVE (Interactive Visualization Environment)](#IVE) +- [JSON format with the ncdump-json utility](#JSON) +- [Java interface](#Java%20interface) +- [Kst (2D plotting tool)](#KST) +- [Labview interface](#Labview-API) +- [MBDyn (MultiBody Dynamics)](#MBDyn) +- [Max\_diff\_nc](#Max_diff_nc) +- [MeteoExplorer](#MeteoExplorer) +- [MeteoInfo](#MeteoInfo) +- [MexEPS (MATLAB interface)](#MexEPS) +- [MEXNC and SNCTOOLS (a MATLAB interface)](#MEXNC) +- [Mirone (Windows MATLAB-based display)](#Mirone) +- [ncBrowse (netCDF File Browser)](#ncBrowse) +- [nccmp (netCDF compare)](#nccmp) +- [ncdx (netCDF for OpenDX)](#ncdx) +- [ncensemble (command line utility to do ensemble + statistics)](#ncensemble) +- [NCL (NCAR Command Language)](#NCL) +- [NCO (NetCDF Operators)](#NCO) +- [ncregrid](#ncregrid) +- [nctoolbox (a MATLAB common data model interface)](#nctoolbox) +- [ncview](#ncview) +- [ncvtk](#ncvtk) +- [netcdf tools](#netcdf_tools) +- [netcdf4excel (add-in for MS Excel)](#netcdf4excel) +- [NetCDF95 alternative Fortran API](#netcdf95) +- [Objective-C interface](#Objective-C) +- [Octave interface](#NCMEX) +- [Octave interface (Barth)](#Octave) +- [OPeNDAP (formerly DODS)](#OPeNDAP) +- [OpenDX (formerly IBM Data Explorer)](#OpenDX) +- [Panoply](#Panoply) +- [Parallel-NetCDF](#Parallel-NetCDF) +- [Paraview and vtkCSCSNetCDF](#Paraview) +- [Perl interfaces](#Perl) +- [PolyPaint+](#PolyPaint+) +- [Pomegranate](#pomegranate) +- [Pupynere (PUre PYthon NEtcdf REader)](#pupynere) +- [PyNGL and PyNIO](#PyNGL) +- [Python interfaces](#Python) +- [QGIS (Quantum GIS)](#QGIS) +- [R interface](#R) +- [Ruby interface](#Ruby) +- [Scientific DataSet (SDS) Library](#SDS) +- [Apache Spatial Information System (SIS)](#SIS) +- [Tcl/Tk interfaces](#Tcl/Tk) +- [Tcl-nap (N-dimensional array processor)](#Tcl-nap) +- [Visual Basic and VB.net](#VB) +- [VisAD](#VisAD) +- [WebWinds](#WebWinds) +- [xray (Python N-D labelled arrays)](#xray) +- [Zebra](#Zebra) +- [User-contributed software](#user) + +------------------------------------------------------------------------ + +[Commercial or Licensed Packages](#commercial) +---------------------------------------------- + +- [ASA ViewNcDap](#ViewNcDap) +- [Avizo](#Avizo) +- [AVS](#AVS) +- [Barrodale UFI](#BCS-UFI) +- [DioVISTA/Storm](#DioVISTA/Storm) +- [EnSight](#EnSight) +- [Environmental WorkBench](#Environmental%20WorkBench) +- [ESRI](#ESRI) +- [FME](#FME) +- [HDF Explorer](#HDF-Explorer) +- [IDL Interface](#IDL) +- [InterFormat](#InterFormat) +- [IRIS Explorer Module](#IRIS%20Explorer%20Module) +- [LeoNetCDF](#LeoNetCDF) +- [Mathematica](#Mathematica) +- [MATLAB](#MATLAB) +- [Noesys](#Noesys) +- [Origin](#Origin) +- [PPLUS](#PPLUS) +- [PV-Wave](#PV-Wave) +- [Slicer Dicer](#SlicerDicer) +- [vGeo](#vGeo) +- [VISAGE and Decimate](#VISAGE%20and%20Decimate) +- [Voyager](#Voyager) + +------------------------------------------------------------------------ + +Freely Available Software {#freely} +========================= + +ANDX and ANAX +------------------------------------ + +The ARM Program has developed [ANDX (ARM NetCDF Data +eXtract)](http://engineering.arm.gov/~sbeus/andx-web/html/), a +command-line utility designed for routine examination and extraction of +data from netcdf files. Data can be displayed graphically (line-plot, +scatter-plot, overlay, color-intensity, etc.) or extracted as ASCII +data. Whether displayed graphically or extracted as ASCII, results can +be saved to disk or viewed on screen. + +[ANAX (ARM NetCDF ASCII +eXtract)](http://science.arm.gov/~cflynn/ARM_Tested_Tools/) is a +scaled-down version of ANDX -- it is designed to only extract ASCII +data. All features of ANDX pertaining to non-graphic data extraction are +included in ANAX. + +ANTS +--------------------------- + +The ARM Program has developed [ANTS (ARM NetCDF Tool +Suite)](http://science.arm.gov/~cflynn/ANTS/), a collection of netCDF +tools and utilities providing various means of creating and modifying +netcdf files. ANTS is based on nctools written by Chuck Denham. The +utilities within nctools were modified to compile with version 3.5 of +the netCDF library, the command syntax was modified for consistency with +other tools, and changes were made to accommodate ARM standard netCDF. + +The original functions from nctools were intended mainly for the +creation, definition, and copying of fundamental netCDF elements. ARM +added others which focus on manipulation of data within existing netCDF +files. Additional functions have special support for multi-dimensional +data such as "slicing" cross sections from multi-dimensional variable +data or joining lesser-dimensional fields to form multi-dimensional +structures. Functions have been added to support execution of arithmetic +and logical operations, bundling or splitting netCDF files, comparing +the structure or content of files, and so on. + +Essentially every type of netCDF library function call is exercised in +ANTS. In this way then, this open-source collection of tools also +represents a library of coding examples for fundamental netCDF tasks. +See the [website](http://science.arm.gov/~cflynn/ANTS/) for more +information. + +ARGOS +----------------------------- + +[ARGOS](http://www.lapeth.ethz.ch/argos/index.html) (interActive +thRee-dimensional Graphics ObServatory) is a new IDL-based interactive +3D visualization tool, developed by [David N. +Bresch](http://www.lapeth.ethz.ch/~david/index.html) and [Mark A. +Liniger](http://www.lapeth.ethz.ch/~mark/index.html) at the Institute +for Atmospheric Science at the Swiss Federal Institute of Technology, +ETH, Zürich. + +A highly optimized graphical user interface allows quick and elegant +creation of even complex 3D graphics (volume rendering, +isosurfaces,...), including Z-buffered overlays (with hidden lines), +light and data shading, Xray images, 3D trajectories, animations and +virtual flights around your data, all documented in a full on-line +[html-help](http://www.lapeth.ethz.ch/argos/argos_general.html). The +netCDF data format is preferred, but any other format can be read by +providing an IDL (or FORTRAN or C or C++) interface. Some toolboxes (for +atmospheric model output, trajectory display, radar data) have already +been written, others might easily be added (in IDL, FORTRAN or C code). +All interactive activities are tracked in a script, allowing quick +reconstruction of anything done as well as running ARGOS in batch script +mode. + +Information about [copyright and licensing +conditions](http://www.lapeth.ethz.ch/argos/argos_copyright.html) are +available. For further information and installation, please E-mail to: +bresch@atmos.umnw.ethz.ch + +CDAT +--------------------------- + +The [Climate Data Analysis Tool (CDAT)](http://cdat.sf.net), developed +by the [Program for Climate Model Diagnosis and Intercomparison +(PCMDI)](http://www-pcmdi.llnl.gov/) at Lawrence Livermore National +Laboratory, provides the capabilities needed to analyze model data, +perform complex mathematical calculations, and graphically display the +results. It provides the necessary tools to diagnose, validate, and +intercompare large observational and global climate model data sets. +It includes the ability to ingest large climate datasets in netCDF, HDF, +DRS, and GrADS/GRIB format; the Visualization and Computation System +(VCS) module, visually displays and animates ingested or created data; +and the Library of AMIP Data Transmission Standards (LATS) module +outputs data in the machine-independent netCDF or GrADS/GRIB file +formats. + +In addition, the Command Line Interface (CLI) module allows CDAT to +receive argument and function input via the command line, and the +Graphical User Interface (GUI) allows CDAT to receive argument and +function input via a point-and-click environment. + +The software, which runs as a standalone process or within PCMDI's +Visualization and Computation System (VCS), provides climate scientists +with an easy and fast method to read different file formats, and to +analyze and graphically display climate data in an integrated fashion. +CDAT includes a set of pre-defined functions to allow the user to +manipulate the data and send the output to a file which can be viewed as +an image, or as a collection of images in an animation. The software has +a gradual learning curve, allowing the novice user to quickly obtain +useful results. + +CDFconvert +--------------------------------------- + +The [MRG CDFconvert +package](http://www.atmos.albany.edu/facstaff/rmctc/cdf_cvt/) provided +by the Mesoscale Research Group, McGill University/SUNY Albany, is +designed to address data conversion issues for gridded datasets stored +under the +[COARDS](http://ferret.wrc.noaa.gov/noaa_coop/coop_cdf_profile.html) +convention. CDFconvert converts regular Cylindrical Equidistant +(Lat/Long) and Gaussian (Spherical) netCDF grids into either the +Canadian [RPN Standard +File](http://www.cmc.ec.gc.ca/rpn/modcom/si/libraries/rmnlib/fstd/index.html) +or [GEMPAK](/software/gempak/index.html) file formats. MRG CDFconvert +has the flexibility to handle netCDF files generated by a number of +sources, including NCEP and ECMWF. User-definable conversion tables make +the extension of the package to different datasets possible. + +cdfsync +--------------------------------- + +Joe Sirott of NOAA's Pacific Marine Environmental Laboratory has +developed cdfsync, a program that allows users to rapidly synchronize a +set of netCDF files over a network. Fast synchronization times are +achieved by only transmitting the differences between files. It is built +on the Open Source [rsync](http://samba.anu.edu.au/rsync/) program, but +contains a number of optimizations including: + +- Special handling of netCDF files for faster synchronization + calculations +- Much faster updates of large numbers of small netCDF files +- In-place updates of large netCDF files + +The latest version should run on Linux variants and Solaris. + +More information is available at the [cdfsync +website](http://www.epic.noaa.gov/epic/software/cdfsync/). + +CDO (Climate Data Operators) +-------------------------------------------------- + +Uwe Schulzweida at the Max Planck Institute for Meteorology has +developed [CDO](http://code.zmaw.de/projects/cdo), a collection of +Operators to manipulate and analyze Climate Data files. Supported file +formats include netCDF and GRIB. There are more than 350 operators +available. The following table provides a brief overview of the main +categories. + +- File information (info, sinfo, diff, ...) +- File operations (copy, cat, merge, split\*, ...) +- Selection (selcode, selvar, sellevel, seltimestep, ...) +- Missing values (setctomiss, setmisstoc, setrtomiss) +- Arithmetic (add, sub, mul, div, ...) +- Mathematical functions (sqrt, exp, log, sin, cos, ...) +- Comparision (eq, ne, le, lt, ge, gt, ...) +- Conditions (ifthen, ifnotthen, ifthenc, ifnotthenc) +- Field statistics (fldsum, fldavg, fldstd, fldmin, fldmax, ...) +- Vertical statistics (vertsum, vertavg, vertstd, vertmin, ...) +- Time range statistics (timavg, yearavg, monavg, dayavg, ...) +- Field interpolation (remapbil, remapcon, remapdis, ...) +- Vertical interpolation (ml2pl, ml2hl) +- Time interpolation (inttime, intyear) + +As an example of use of CDO, converting from GRIB to netCDF can be as +simple as + + cdo -f nc copy file.grb file.nc + +or with relative time axis (for usage with GrADS) + cdo -r -f nc copy file.grb file.nc + +or using ECMWF reanalysis on a reduced grid + cdo -R -f nc copy file.grb file.nc + +More information is available on the [CDO +homepage](http://code.zmaw.de/projects/cdo). + +CIDS Tools +--------------------------------------- + +The Center for Clouds Chemistry and Climate +([C4](http://www-c4.ucsd.edu/)) Integrated Data Systems +([CIDS](http://www-c4.ucsd.edu/~cids/)) group has developed several +useful netCDF utilities: +- cdf2idl: Writes an IDL script to read a NetCDF file. +- cdf2c: Writes C code to read a NetCDF file. +- cdf2fortran: Writes FORTRAN source code to read a NetCDF file. +- cdf2asc: Dumps NetCDF data to an ASCII file. + +The source for these utilities can be downloaded from [CIDS NetCDF +Visualization Tools +site](http://www-c4.ucsd.edu/~cids/software/visual.html). + +CSIRO MATLAB/netCDF interface +------------------------------------------------------------ + +The [CSIRO MATLAB/netCDF +interface](http://www.marine.csiro.au/sw/matlab-netcdf.html) is now +available from the [CSIRO Marine +Laboratories](http://www.marine.csiro.au). +The CSIRO MATLAB/netCDF interface is run from within MATLAB and has a +simple syntax. It has options for automatically handling missing values, +scale factors, and permutation of hyperslabs. It is, however, limited to +retrieving data from, and information about, existing netCDF files. + +The basis of the interface is a machine-dependent mex-file called +mexcdf53. Rather than call the mex-file directly users are advised to +employ both [Chuck Denham's netCDF toolbox](#NC4ML5) and the CSIRO +MATLAB/netCDF interface described here. For read-only access to existing +netCDF data, the CSIRO interface has a simpler syntax than the netCDF +Toolbox, but the latter may also be used to create and manipulate netCDF +variables and datasets. + +EPIC +--------------------------- + +NOAA's Pacific Marine Environmental Laboratory +([PMEL](http://www.pmel.noaa.gov/)) has developed the +[EPIC](http://www.pmel.noaa.gov/epic/) software package for +oceanographic data. EPIC provides graphical display and data field +manipulation for multi-dimensional netCDF files (up to 4 dimensions). +PMEL has been using this software on Unix and VMS several years. At +present, they have: + +- a data file I/O library ( + [epslib](http://www.pmel.noaa.gov/epic/eps-manual/epslib_toc.html), + which is layered on top of the netCDF library). +- epslib allows transparent access to multiple data file formats +- a [MATLAB MexEPS + interface](http://www.epic.noaa.gov/epic/software/mexeps.htm) for + using any supported EPIC file with MATLAB +- [suite of EPIC + programs](http://www.epic.noaa.gov/epic/software/ep_programs.htm) + for graphics and analysis of hydrographic profile data and time + series data. + +This software was developed on Sun/Unix and is also supported for +DEC/Ultrix and VAX/VMS as a system for data management, display and +analysis system for observational oceanographic time series and +hydrographic data. The EPIC software includes over 50 programs for +oceanographic display and analysis, as well as utilities for putting +in-situ or observational data on-line (with on-the-fly graphics and data +download) on the WWW. +The developers are interested in coordinating with others who may be +developing oceanographic software for use with netCDF files. The EPIC +software is available via anonymous FTP from ftp.noaapmel.gov in the +epic/ and /eps directories. To obtain the EPIC software, please see Web +pages at . For +information about EPIC, please see the Web pages at +. Contact epic@pmel.noaa.gov, +or Nancy Soreide, nns@noaapmel.gov, for more information. + +Excel Use +------------------------------------ + +Several packages are available for accessing netCDF data from Microsoft +Excel, including the [netcdf4excel](#netcdf4excel) add-in for Excel, and +a [Scientific Dataset (SDS) Library](#SDS) that supports a DataSetEditor +add-in for Excel to view and modify various forms of data, including +netCDF. + +EzGet +----------------------------- + +A FORTRAN library called +[EzGet](http://www-pcmdi.llnl.gov/ktaylor/ezget/ezget.html) has been +developed at [PCMDI](http://www-pcmdi.llnl.gov/PCMDI.html) to facilitate +retrieval of modeled and observed climate data stored in popular formats +including [DRS](http://www-pcmdi.llnl.gov/drach/DRS.html), +[netCDF](/software/netcdf/), [GrADS](http://grads.iges.org/grads), and, +if a control file is supplied, +[GRIB](ftp://nic.fb4.noaa.gov/pub/nws/nmc/docs/gribed1/). You can +specify how the data should be structured and whether it should undergo +a grid transformation before you receive it, even when you know little +about the original structure of the stored data (e.g., its original +dimension order, grid, and domain). +The EzGet library comprises a set of subroutines that can be linked to +any FORTRAN program. EzGet reads files through the +[cdunif](http://www-pcmdi.llnl.gov/drach/cdunif.html) interface, but use +of EzGet does not require familiarity with cdunif. The main advantages +of using EzGet instead of the lower level cdunif library include: + +- Substantial error trapping capabilities and detailed error messages +- Versatile capability of conveniently selecting data from specified + regions (e.g., oceans, North America, all land areas north of 45 + degrees latitude, etc.) +- Ability to map data to a new grid at the time it is retrieved by + EzGet +- Automatic creation of \`\`weights'' for use in subsequent averaging + or masking of data +- Increased control in specifying the domain of the data to be + retrieved. + +For more information about EzGet, including instructions for downloading +the documentation or software, see the EzGet home page at +. For questions or +comments on EzGet, contact Karl Taylor (taylor13@llnl.gov). + +FAN +------------------------- + +[FAN (File Array Notation)](/software/netcdf/fan_utils.html) is Harvey +Davies' package for extracting and manipulating array data from netCDF +files. The package includes the three utilities nc2text, text2nc, and +ncrob for printing selected data from netCDF arrays, copying ASCII data +into netCDF arrays, and performing various operations (sum, mean, max, +min, product, ...) on netCDF arrays. A library (fanlib) is also included +that supports the use of FAN from C programs. The package is available +via anonymous FTP from +. Questions and +comments may be sent to Harvey Davies, harvey.davies@csiro.au. + +FERRET +------------------------------- + +[FERRET](http://ferret.wrc.noaa.gov/Ferret/) is an interactive computer +visualization and analysis environment designed to meet the needs of +oceanographers and meteorologists analyzing large and complex gridded +data sets. It is available by anonymous ftp from abyss.pmel.noaa.gov for +a number of computer systems: SUN (Solaris and SUNOS), DECstation +(Ultrix and OSF/1), SGI, VAX/VMS and Macintosh (limited support), and +IBM RS-6000 (soon to be released). +FERRET offers a Mathematica-like approach to analysis; new variables may +be defined interactively as mathematical expressions involving data set +variables. Calculations may be applied over arbitrarily shaped regions. +Fully documented graphics are produced with a single command. Graphics +styles included line plots, scatter plots, contour plots, color-filled +contour plots, vector plots, wire frame plots, etc. Detailed controls +over plot characteristics, page layout and overlays are provided. NetCDF +is supported both as an input and an output format. + +Many excellent software packages have been developed recently for +scientific visualization. The features that make FERRET distinctive +among these packages are Mathematica-like flexibility, geophysical +formatting (latitude/longitude/date), "intelligent" connection to its +data base, special memory management for very large calculations, and +symmetrical processing in 4 dimensions. Contact Steve Hankin, +hankin@noaapmel.gov, for more information. + +Fimex +----------------------------- + +Heiko Klein (Norwegian Meteorological Institute) has developed the +[fimex](https://wiki.met.no/fimex/start) (File Interpolation, +Manipulation, and EXtraction) C++ library for gridded geospatial data. +It converts between several data formats (currently netCDF, NcML, GRIB1 +or GRIB2, and felt). Fimex also enables you to change the projection and +interpolation of scalar and vector grids, to subset the gridded data, +and to extract only parts of the files. Fimex supports a growing list of +other [features](https://wiki.met.no/fimex/features), including support +for most NcML features and for netCDF-4 compression. + +For simple usage, Fimex also comes with the command line program fimex. + +Documentation and downloads are available from the [fimex web +site](http://wiki.met.no/fimex/). + +FWTools (GIS Binary Kit for Windows and Linux) +------------------------------------------------------------------------ + +[FWTools](http://fwtools.maptools.org/) is Frank Warmerdam's set of Open +Source GIS binaries for Windows (win32) and Linux (x86 32bit) systems. +The kits are intended to be easy for end users to install and get going +with, and include OpenEV, GDAL, MapServer, PROJ.4 and OGDI as well as +some supporting components. FWTools aims to track the latest development +versions of the packages included as opposed to official releases, "to +give folks a chance to use the *latest and greatest*". + +GDAL +--------------------------- + +Frank Warmerdam's [GDAL](http://www.remotesensing.org/gdal/index.html) +is a translator library for raster geospatial data formats that is +released under an X/MIT style Open Source license. As a library, it +presents a [single abstract data +model](http://www.remotesensing.org/gdal/gdal_datamodel.html) to the +calling application for all supported formats. The related +[OGR](http://www.remotesensing.org/gdal/ogr) library (which lives within +the GDAL source tree) provides a similar capability for simple features +vector data. + +GDAL is in active use in several projects, and includes roughly 40 +format drivers, including a translator for netCDF (read/write). Other +translators include GeoTIFF (read/write), Erdas Imagine (read/write), +ESRI .BIL (read), .aux labeled raw (read/write), DTED (read), SDTS DEM +(read), CEOS (read), JPEG (read/write), PNG (read/write), Geosoft GXF +(read) and Arc/Info Binary Grid (read). A full list is available in +[Supported +Formats](http://www.remotesensing.org/gdal/formats_list.html). + +GDAL has recently included support for the netCDF-4 enhanced data model +and netCDF-4 format, as well as improved support for recent additions to +the CF conventions. + +As an example of the use of GDAL, converting an ArcInfo ASCII grid to +netCDF (GMT conventions) as easy as: + + gdal_translate arc_ascii.grd -of GMT gmt_grid.nc + +GDL (GNU Data Language) +--------------------------------------------- + +[GDL](http://gnudatalanguage.sourceforge.net/) is a free implementation +of most of the programming language supported by [IDL](#IDL) +(Interactive Data Language). GDL supports the netCDF-3 API. + +Gfdnavi (Geophysical fluid data navigator) +-------------------------------------------------------------------- + +[Gfdnavi](http://www.gfd-dennou.org/arch/davis/gfdnavi/index.en.htm) is +a web-based tool to archive, share, distribute, analyze, and visualize +geophysical fluid data and knowledge. The software is under development +by members of the GFD Dennou Club, including T. Horinouchi (RISH, Kyoto +U.), S. Nishizawa (RIMS, Kyoto U.), and colleagues. Gfdnavi uses a +metadata database for managing and analyzing data and visualizations. It +also permits publishing data for web access and will soon support access +to data on other Gfdnavi servers. Web service APIs are now under +development. A presentation [Introducing +Gfdnavi](http://www.gfd-dennou.org/arch/davis/gfdnavi/presen/2007-03-05_GfdnaviIntro.En/pub/) +describes the architecture and shows examples of use. + +Gfdnavi is dependent on two technologies: + +- [Ruby on Rails](http://www.rubyonrails.com/), a framework for web + applications, and +- [the Dennou Ruby Project](http://ruby.gfd-dennou.org/), a collection + of tools for geophysical data. These tools include + [GPhys](http://ruby.gfd-dennou.org/products/gphys/) software to + handle GRIB, GrADS, and netCDF data uniformly. + +As an example of this technology, Takuji Kubota has established [a +Gfdnavi server](http://www.gsmap.aero.osakafu-u.ac.jp/gfdnavi/) for the +Global Satellite Mapping of Precipitation +([GSMaP](http://www.radar.aero.osakafu-u.ac.jp/~gsmap/index_english.html)) +project. + +GMT +------------------------- + +[GMT](http://gmt.soest.hawaii.edu/) (Generic Mapping Tools) is an open +source collection of about 60 tools for manipulating geographic and +Cartesian data sets (including filtering, trend fitting, gridding, +projecting, etc.) and producing Encapsulated PostScript File (EPS) +illustrations ranging from simple x-y plots via contour maps to +artificially illuminated surfaces and 3-D perspective views. GMT +supports 30 map projections and transformations and comes with support +data such as coastlines, rivers, and political boundaries. GMT is +developed and maintained by Paul Wessel and Walter H. F. Smith with help +from a global set of volunteers, and is supported by the National +Science Foundation. It is released under the GNU General Public License. + +The package can access COARDS-compliant netCDF grids as well as ASCII, +native binary, or user-defined formats. The GMT package is available via +anonymous ftp from several servers; see +[gmt.soest.hawaii.edu](http://gmt.soest.hawaii.edu) for installation +information. + +Grace +----------------------------- + +[Grace](http://plasma-gate.weizmann.ac.il/Grace/) is a tool to make +two-dimensional plots of scientific data, including 1D netCDF variables. +It runs under the X Window System and OSF Motif (recent versions of +LessTif are, by and large, fine, too). Grace runs on practically any +version of Unix. As well, it has been successfully ported to VMS, OS/2 +and Win9\*/NT (some functionality may be missing, though). Grace is a +descendant of ACE/gr. +A few features of Grace are: + +- User defined scaling, tick marks, labels, symbols, line styles, + colors. +- Batch mode for unattended plotting. +- Read and write parameters used during a session. +- Regressions, splines, running averages, DFT/FFT, + cross/auto-correlation, ... +- Support for dynamic module loading. +- Hardcopy support for PostScript, PDF, GIF, and PNM formats. +- Device-independent Type1 font rastering. +- Ability to read or write netCDF data. + +GrADS +----------------------------- + +[GrADS](http://grads.iges.org/grads/grads.html) (Grid Analysis and +Display System) is an interactive desktop tool from +[COLA/IGES](http://grads.iges.org/cola.html) that is currently in use +worldwide for the analysis and display of earth science data. GrADS is +implemented on all commonly available UNIX workstations, Apple +Macintosh, and DOS or Linux based PCs, and is freely available via +anonymous ftp. GrADS provides an integrated environment for access, +manipulation, and display of earth science data in several forms, +including GRIB and netCDF. For more information, see the [GrADS User's +Guide](http://grads.iges.org/grads/gadoc/users.html). + +Gri +------------------------- + +Gri is an extensible plotting language for producing scientific graphs, +such as x-y plots, contour plots, and image plots. Dan Kelley of +Dalhousie University is the author of Gri, which can read data from +netCDF files as well as ASCII and native binary data. For more +information on Gri, see the URL . + +GXSM +--------------------------- + +The GXSM is the Gnome X Scanning Microscopy project, it is a bit more +than just a piece of software (the GXSM itself), there is full hardware +support for DSP cards including open source DSP software and a growing +set of SPM related electronics. For more information, see +. + +HDF interface +--------------------------------------------- + +The National Center for Supercomputing Applications (NCSA) has added the +netCDF interface to their [Hierarchical Data Format +(HDF)](http://hdf.ncsa.uiuc.edu/) software. HDF is an extensible data +format for self-describing files. A substantial set of applications and +utilities based on HDF is available; these support raster-image +manipulation and display and browsing through multidimensional +scientific data. An implementation is now available that provides the +netCDF interface to HDF. With this software, it is possible to use the +netCDF calling interface to place data into an HDF file. The netCDF +calling interface has not changed and netCDF files stored in XDR format +are readable, so existing programs and data will still be usable +(although programs will need to be relinked to the new library). There +is currently no support for the mixing of HDF and netCDF structures. For +example, a raster image can exist in the same file as a netCDF object, +but you have to use the Raster Image interface to read the image and the +netCDF interface to read the netCDF object. The other HDF interfaces are +currently being modified to allow multi-file access, closer integration +with the netCDF interface will probably be delayed until the end of that +project. +Eventually, it will be possible to integrate netCDF objects with the +rest of the HDF tool suite. Such an integration will then allow tools +written for netCDF and tools written for HDF to both interact +intelligently with the new data files. + +HDF-EOS to netCDF converter +----------------------------------------------------- + +The Goddard Earth Sciences Data and Information Services Center ([GES +DISC](http://disc.gsfc.nasa.gov)) has developed an on-the-fly HDF-EOS to +netCDF/CF converter for the following products, making them easier to +use in the [Unidata IDV](#IDV) and +[McIDAS-V](http://www.ssec.wisc.edu/mcidas/software/v/): + +- AIRS Level 2 (scene) profiles of moisture, air temperature and trace + gases +- AIRS Level 3 (global grid) profiles of moisture, air temperature and + trace gases +- OMI UV-B at the surface +- TOMS ozone and aerosols + +[Instructions](http://disc.gsfc.nasa.gov/services/NetCDFConversionforIDVandMcIDAS-V.shtml) +are available for searching and converting these data. More information +on AIRS products is available at +. + +HIPHOP +------------------------------- + +[HIPHOP](http://www.knmi.nl/onderzk/atmosam/English/Service/hiphop/hiphop.html), +developed by Dominik Brunner, is a widget based IDL application that +largely facilitates the visualization and analysis of 2D, 3D, and 4D +atmospheric science data, in particular atmospheric tracer distributions +and meteorological fields. +Graphical output of (atmospheric model) data can be quickly generated in +a large number of different ways, including horizontal maps at selected +model or pressure levels, vertical north-south, east-west, or slant +cross-sections (including zonal averages), time slices, animations, etc. +It also allows mathematical operations on the existing fields to +generate new fields for further analysis, and it can be run as a batch +application. + +The program handles data in netCDF, HDF and GRIB format. Interfaces to +other data formats (e.g. ASCII and binary data) can be added easily. + +Beginning with Version 4.0, it also supports the ability to overlay +meteorological fields on a number of different satellite images, and to +draw air parcel trajectories. + +Hyperslab OPerator Suite (HOPS) +--------------------------------------------------------------------------------- + +Hyperslab OPerator Suite +([HOPS](http://www.cgd.ucar.edu/gds/svn/hyperslab.html)), developed by +R. Saravanan at NCAR, is a bilingual, multi-platform software package +for processing data in netCDF files conforming to the NCAR-CCM format or +the NCAR Ocean Model format. HOPS is implemented in [IDL](#IDL), the +widely-used commercial interpreted language, and also in +[Yorick](ftp://ftp-icf.llnl.gov/pub/Yorick/), a public-domain +interpreted language that is freely available from the Lawrence +Livermore National Laboratory. The IDL version of HOPS should run on any +platform supported by IDL. The Yorick version too runs on most common +UNIX platforms, such as Sun, SGI, Cray, and LINUX computers. +HOPS is not a monolithic program, but a suite of operators that act on +data units called "hyperslabs". The design of HOPS is object-oriented, +rather than procedure-oriented; the operators treat the numeric data and +the associated meta-data (like coordinate information) as a single +object. + +Note that HOPS is not a general purpose netCDF utility and works only +for the NCAR CSM netCDF formats. For more information, check the [HOPS +home page](http://www.cgd.ucar.edu/gds/svn/hyperslab.html). + +iCDF (imports chromatographic netCDF data into MATLAB) +----------------------------------------------------------------------------- + +Klavs M. Sørensen, Thomas Skov and Rasmus Bro (Faculty of Life Sciences, +University of Copenhagen) have developed +[iCDF](http://www.models.life.ku.dk/source/iCDF/index.asp), a free and +documented toolbox for importing chromatographic data in the +netCDF-based format that most manufacturers of chromatographic software +support. + +The iCDF software is currently for XC-MS data (X: GC, LC, HPLC), but +soon it will be able to import data using other detectors as well. It +can be used to open netCDF files from many different instruments (e.g. +Agilent, Bruker) and many chromatographic software packages (e.g. +ChemStation). + +For more information, see the paper + +> Skov T and Bro R. (2008) Solving fundamental problems in +> chromatographic analysis Analytical and Bioanalytical Chemistry, 390 +> (1): 281-285. + +IDV (Integrated Data Viewer) +-------------------------------------------------- + +Unidata's [Integrated Data Viewer (IDV)](/software/idv/) is a Java +application (for Java 1.4 or later) that can be used to display a +variety of netCDF files, particularly well formatted, geolocated +datasets. Features include: + +- Access to local and remote netCDF files and a variety of [other data + formats](/software/idv/docs/userguide/data/DataSources.html) +- Slicing and probing of multidimensional data +- Support for netCDF conventions (CF, COARDS, NUWG, AWIPS) +- InstallAnywhere installers for easy download and installation +- Save display state to a bundle for easy recreation of views +- Support for non-gridded data through the [Common Data Model + (CDM)](/software/netcdf-java/CDM/) + +The IDV uses the [VisAD Java +library](http://www.ssec.wisc.edu/~billh/visad.html) for interactive and +collaborative visualization and analysis and the [netCDF Java +library](/software/netcdf-java/) for reading and manipulating netCDF +files. + +Ingrid +------------------------------- + +[Ingrid](http://ingrid.ldgo.columbia.edu/), by M. Benno Blumenthal +\, is designed to manipulate large datasets +and model input/output. It can read data from its data catalog, a netCDF +file, or a directly attached model, and output the data, either by +feeding it to a model, creating a netCDF file, or creating plots and +other representations of the data. + +Ingrid has a number of filters which allow simple data manipulations, +such as adding two datasets together, smoothing, averaging, and +regridding to a new coordinate. In addition to netCDF, it also reads +HDF, CDF, VOGL, and SGI GL. + +Ingrid is currently running as a WWW daemon that can be accessed through + to see some of its +capabilities on a climate data catalog maintained by the [Climate +Group](http://rainbow.ldeo.columbia.edu/) of the [Lamont-Doherty Earth +Observatory](http://www.ldeo.columbia.edu/) of Columbia University. To +quote the introduction: + +> The Data Catalog is both a catalog and a library of datasets, i.e. it +> both helps you figure out which data you want, and helps you work with +> the data. The interface allows you to make plots, tables, and files +> from any dataset, its subsets, or processed versions thereof. +> +> This data server is designed to make data accessible to people using +> WWW clients (viewers) and to serve as a data resource for WWW +> documents. Since most documents cannot use raw data, the server is +> able to deliver the data in a variety of ways: as data files (netCDF +> and HDF), as tables (html), and in a variety of plots (line, contour, +> color, vector) and plot formats (PostScript and gif). Processing of +> the data, particularly averaging, can be requested as well. +> +> The Data Viewer in particular demonstrates the power of the Ingrid +> daemon. + +Ingrid currently runs on Linux, for which binaries are available. CVS +access to the current source can be arranged. + + Intel Array Visualizer +-------------------------------------------------------------- + +The [Intel® Array +Visualizer](http://www.intel.com/cd/software/products/asmo-na/eng/compilers/226277.htm) +and Intel® Array Viewer are available as [free +downloads](http://www.intel.com/cd/software/products/asmo-na/eng/compilers/226277.htm) +for Windows platforms. They offer an application and a set of software +tools and components, which include C, Fortran, and .Net libraries, for +developing scientific visualization applications and for creating +interactive graphs of array data in various formats, including HDF and +netCDF. + +IVE +------------------------- + +[IVE (Interactive Visualization +Environment)](http://www.atmos.washington.edu/ive/) is a software +package designed to interactively display and analyze gridded data. IVE +assumes the data to be displayed are contained in one- two-, three- or +four-dimensional arrays. By default, the numbers within these arrays are +assumed to represent grid point values of some field variable (such as +pressure) on a rectangular evenly spaced grid. IVE is, nevertheless, +capable of displaying data on arbitrary curvilinear grids. + +If the data points are not evenly spaced on a rectangular grid, IVE must +be informed of the grid structure, either by specifying "attributes" in +the data input or by specifying the coordinate transform in a user +supplied subroutine. Stretched rectangular grids (which occur when the +stretching along a given coordinate is a function only of the value of +that coordinate) can be accommodated by specifying one-dimensional +arrays containing the grid-point locations along the stretched +coordinate as part of the IVE input data. Staggered meshes can also be +accommodated by setting "attributes" in the input data. The structure of +more complicated curvilinear grids must be communicated to IVE via user +supplied "transforms," which define the mapping between physical space +and the array indices. + +Since four-dimensional data cannot be directly displayed on a flat +computer screen, it is necessary to reduced the dimensionality of the +data before it is displayed. One of IVE's primary capabilities involves +dimension reduction or "data slicing." IVE allows the user to display +lower-dimensional subsets of the data by fixing a coordinate or by +averaging over the coordinate. + +IVE currently has the capability to display + +- scalar fields as + - 2D scalar plots + - 1D scalar plots + - vertical soundings + - a single point value +- vector fields as 2D vector plots + +IVE lets you overlay plots, loop plots, and control a wide variety of +display parameters. + +IVE also can perform algebraic computations on the gridded data and can +calculate derivatives. More complicated computations can be performed in +user supplied subroutines. + +IVE uses NetCDF for the data input format, and uses the [NCAR Graphics +Library](http://ngwww.ucar.edu/ng/) to produce graphical output. IVE is +[available](http://www.atmos.washington.edu/ive/getting.html) as source +via anonymous ftp; and as binary on request for licensees of NCAR +graphics. + +JSON format with the ncdump-json utility +--------------------------------------------------------------- + +Josep Llodrà has developed a program to output the contents of a +netCDF-3 or netCDF-4 file in JSON (JavaScript Object Notation). It is +based on Unidata's NCDUMP utility, and it keeps the original ncdump +functionality, unless the "-j" option is used to specify JSON output. + +The program and source are available from + . + +Java interface +----------------------------------------------- + +The [NetCDF-Java 4.2 Library](/packages/netcdf-java/) is a Java +interface to netCDF files, as well as to many other types of scientific +data formats. It is freely available and the source code is released +under the (MIT-style) netCDF C library license. Previous versions use +the GNU Lesser General Public License (LGPL). + +The library implements a Common Data Model +([CDM](/software/netcdf-java/CDM/)), a generalization of the netCDF, +OpenDAP and HDF5 data models. The library is a prototype for the +netCDF-4 project, which provides a C language API for the "data access +layer" of the CDM, on top of the HDF5 file format. The NetCDF-Java +library is a 100% Java framework for *reading* netCDF and other file +formats into the CDM, as well as *writing* to the netCDF-3 file format. +The library also implements +[NcML](http://www.unidata.ucar.edu/software/netcdf/ncml/), which allows +you to add metadata to CDM datasets, as well as to create virtual +datasets through aggregation. + +Kst (2D plotting tool) +-------------------------------------------- + +[Kst](http://kst-plot.kde.org) is an open-source, cross-platform 2D +plotting tool focused on performance and ease of use. Packages for +Windows, various Linux distributions and Mac OS X are +[available](http://sourceforge.net/projects/kst/files/), as well as the +complete source code and CMake-based build files. A more detailed +presentation of Kst can be found on the web page at +, including numerous screenshots and all the +useful download links. + +Kst is characterized by the following features: + +- Outstanding performance: curves with millions of points are no + problem +- Plotting of live streams +- Out-of-the box support for a variety of formats (currently ASCII, + netCDF, dirfile, Qimage-supported types, fits images) +- User-friendly with a modern and consistent user interface +- A set of unique tools to boost efficiency, including a data import + wizard, capacity to edit multiple objects at once or the "Change + Data File" tool to compare multiple experiments easily +- An active community +- Easily expandable for new data formats or data analysis algorithms + thanks to a plugin-based architecture +- Available on Windows, Linux, and Mac OSX + +Labview interface +----------------------------------------------- + +A netCDF Labview interface, implemented in the Labview programming +language is available. The software includes A graphical user interface +for editing netCDF data and conversion to other data formats. The +package was developed and is maintained by L. F. Hwang of Sun Yat-sen +University in China. For more information and to download the source +code, see the [NetCDFLabview web +site](https://sourceforge.net/projects/netcdflabview/). + +MBDyn (MultiBody Dynamics) +-------------------------------------------------- + +[MBDyn](http://www.aero.polimi.it/~mbdyn/) is an open-source MultiBody +Dynamics analysis system developed at the Dipartimento di Ingegneria +Aerospaziale of the University "Politecnico di Milano", Italy. It uses +netCDF as its primary output format. + +MBDyn features the integrated multidisciplinary analysis of multibody, +multiphysics systems, including nonlinear mechanics of rigid and +flexible constrained bodies, smart materials, electric networks, active +control, hydraulic networks, essential fixed-wing and rotorcraft +aerodynamics. It allows users to simulate the behavior of heterogeneous +mechanical, aero-servo-elastic systems based on first principles +equations. It is being actively developed and used in the aerospace and +automotive fields for dynamics analysis and simulation of complex +systems. Dynamic linking of user-defined modules is heavily exploited to +let users extend the feature library. + +Max\_diff\_nc +------------------------------------------- + +This is a program which compares two NetCDF files. Variables with the +same ID in the two files are assumed to be of the same type and have the +same shape. For each such couple of variables, the program computes the +maximum of the absolute value of the difference, and the maximum of the +absolute value of the relative difference. The program also tells you at +what location (the subscript list of the array) the maximum difference +is reached. + +The web page for this program is: + + +This is a freely available tool. + +MeteoExplorer +--------------------------------------------- + +[MeteoExplorer](http://www.eastmodelsoft.com/index_en.htm), developed by +Lianqing Yu at China Meteorological Administration, is a cross-platform +software application for analyzing and rendering atmospheric science and +geoscience data. It supports popular data formats including WMO +GRIB1/GRIB2, NetCDF, and MICAPS, and provides basic GIS functionalities. +Developed with C++, Meteo Explorer targets multiple computing platforms +including Microsoft Windows, GNU Linux, and SGI IRIX operating systems. + +The primary features include: + +- Graphics layer management (navigation and animation) +- Objective analysis of physical elements in surface or upperair + soundings data +- Isoline analysis and shading of grid field +- Streamline analysis of wind field +- Computation of physics elements +- NetCDF data process and display +- GRIB1/GRIB2 data process and display +- MICAPS data process and display +- Satellite nephogram data display and animation, support AWX, GPF and + HDF format +- Interactive composition of synoptic chart (command undo/redo, + automatic save) +- Map zoom, pan, projection and clipping +- Full screen display and zoom to area +- Quick navigation via thumbnail view of graphics layers +- Save screen shot as image file (support formats: BMP, JPG, PNG) +- Vector graphics exported to clipboard or saved as EMF file (Windows + version only) +- Remote desktop connection support +- System configuration (dynamic menu) +- Fast switch of user interface language on the fly + +For more information, please visit [MeteoExplorer's home +page](http://www.eastmodelsoft.com/software/mexplorer.htm) or contact +the support staff via meteoexplorer@hotmail.com . + +MeteoInfo +------------------------------------- + +For better cross-platform support, +[MeteoInfo](http://www.meteothinker.com) has recently been re-developed +using Unidata's NetCDF Java library. MeteoInfo is GIS software for +visualization and analysis of spatial and meteorological data. The Java +edition can be run in Windows, Mac OS, Linux, and Unix systems. The +Groovy script engine was coupled in the software, so users can write +Groovy script to run the software automatically for analysis with +complex steps. + +Download: + +Java 6 is needed to run the software. + +MexEPS +------------------------------- + +[PMEL](http://www.pmel.noaa.gov/) has developed a MATLAB interface, +[MexEPS](http://www.epic.noaa.gov/epic/software/mexeps.htm), which +supports several netCDF file conventions, including [those adopted by +PMEL](ftp://ftp.unidata.ucar.edu/pub/netcdf/Conventions/PMEL-EPIC/). +Many styles of time axes are supported and time manipulation routines +ease the use of the time axis in MATLAB. The MexEPS package supports the +following data formats: +- reading, writing and editing netCDF files; +- reading and writing Classic EPIC files +- reading formatted ASCII files + +It includes: +- VARIABLE, AXIS, ATTRIBUTE manipulation routines +- TIME manipulation + - TIME enters MATLAB as YYMMDDhhmmss.fff + - Can be converted to netCDF udunits time convention (e.g. days + *since* 1990-01-01 00:00:00) +- [MATLAB help](ftp://ftp.pmel.noaa.gov/eps/mexeps/help-m/) and + [example scripts](ftp://ftp.pmel.noaa.gov/eps/mexeps/examples/) + using MexEPS +- **ASCII2MAT** mexFunction, which reads a formatted file into MATLAB + as a matrix + +The MexEPS package is freely available in PMEL's anonymous ftp directory + + +If you have any questions or comments, please contact the author, Willa +Zhu [(willa@pmel.noaa.gov)](mailto:willa@pmel.noaa.gov) or Nancy Soreide +(nns@pmel.noaa.gov). + +MEXNC and SNCTOOLS +------------------------------------------ + +John Evans of Rutgers University maintains MEXNC and developed SNCTOOLS. +[MEXNC](http://mexcdf.sourceforge.net/) is a mexfile interface to NetCDF +files for MATLAB that has roughly a one-to-one equivalence with the C +API for netCDF. +[SNCTOOLS](http://mexcdf.sourceforge.net/tutorial/index.html) is a set +of higher-level m-files that sit atop MEXNC, shielding the user from +such low level netCDF details as file IDs, variable IDs, and dimension +IDs. The general philosophy behind SNCTOOLS is providing the ability to +read and write data without trying to invent a new syntax. + +Mirone (Windows MATLAB-based display) +-------------------------------------------------------------- + +Joaquim Luis of Universidade do Algarve has developed +[Mirone](http://w3.ualg.pt/~jluis/mirone/), a Windows MATLAB-based +framework tool that allows the display and manipulation of a large +number of grid/images formats through its interface with the +[GDAL](http://remotesensing.org/gdal/) library. Its main purpose is to +provide users with an easy-to-use graphical interface to manipulate +[GMT](http://gmt.soest.hawaii.edu/) grids. In addition it offers a wide +range of tools dedicated to topics in the earth sciences, including +tools for multibeam mission planning, elastic deformation studies, +tsunami propagation modeling, earth magnetic field computations and +magnetic Parker inversions, Euler rotations and poles computations, +plate tectonic reconstructions, and seismicity and focal mechanism +plotting. The high quality mapping and cartographic capabilities for +which GMT is renowned is guaranteed through Mirone's ability to +automatically generate GMT cshell scripts and dos batch files. + +Although Mirone is written in MATLAB, a stand-alone version to run under +Windows is also provided. Regrettably this version is not as efficient +as the native MATLAB code but provides a solution for users that don't +have MATLAB. + +Also see\ + J. F. Luis. Mirone: A multi-purpose tool for exploring grid data. +Computers & Geosciences, 33, 31-41, 2007. + +ncBrowse +----------------------------------- + +Donald Denbo of NOAA's Pacific Marine Environmental Laboratory has +developed and made available +[ncBrowse](http://www.epic.noaa.gov/java/ncBrowse), a Java application +(JDK1.2) that provides flexible, interactive graphical displays of data +and attributes from a wide range of netCDF data file conventions. +Features include: + +- Designed to work with arbitrary netCDF files. +- Browses file using the EPIC and COARDS conventions. +- Provides a "tree" view of the netCDF file. +- Handles character variables. +- Handles dimensions without an associated variable. +- Uses sgt graphics to perform 1 and 2 dimensional cuts through data. +- Save to file single variable as a "cdl" text file. +- InstallAnywhere scripts for UNIX, Win32, and MacOS. +- Currently uses Java 2 and Swing. + +ncBrowse will run on any UNIX or Windows machine with a Java 2 (JDK1.2) +virtual machine installed. Automated installation scripts are available +for Windows and UNIX. Additional information on ncBrowse and download +instructions are available at . + +Questions and suggestions should be directed to +\<[dwd@pmel.noaa.gov\>](mailto:dwd@pmel.noaa.gov). If you have problems +reading a netCDF file with ncBrowse, please send him a copy of the file +and he'll get ncBrowse to read it! + +nccmp +----------------------------- + +Remik Ziemlinski of the NOAA Geophysical Fluid Dynamics Laboratory has +developed [nccmp](http://nccmp.sourceforge.net/), a tool to compare two +netCDF files. It can use MPI, include/exclude specific variables or +metadata and operates quickly. Highly recommended for regression testing +with large datasets. See the Web site +for more information. + +NCL +------------------------- + +The [NCAR Command Language (NCL)](http://www.ncl.ucar.edu/) is an +intepreted programming language for scientific data analysis and +visualization developed and maintained in NCAR's [Computational and +Information Systems Laboratory](http://www.cisl.ucar.edu/). + +NCL has many features common to modern programming languages, including +types, variables, operators, expressions, conditional statements, loops, +and functions and procedures. NCL also has features that are not found +in other programming languages, including those that handle the +manipulation of metadata, the configuration of visualizations, the +import of data from a variety of data formats, and an algebra that +supports array operations. + +NCL has robust file input and output capabilities. It allows different +datasets of different formats (netCDF, netCDF-4 classic, HDF4, HDF4-EOS, +GRIB-1, and GRIB-2) to be imported into one uniform and consistent data +manipulation environment, which internally is the netCDF data format. +NCL doesn't place any restrictions or conventions on the organization of +input netCDF files. + +NCL comes with many useful built-in functions and procedures for +processing and manipulating data. There are over 600 functions and +procedures that include routines for use specifically with climate and +model data, empirical orthogonal functions, Fourier coefficients, +wavelets, singular value decomposition, 1-, 2-, and 3-dimensional +interpolation, approximation, and regridding, and computer analysis of +scalar and vector global geophysical quantities. + +The visualizations are publication-quality and highly customizable, with +hundreds of options available for tweaking the looks of your graphics. +NCL can generate contours, XY plots, vectors, streamlines, and can +overlay these plots on many different map projections. There are also +specialized functions for generating histograms, wind roses, meteograms, +skew-T plots, weather maps. + +Included with the software are two command line tools: "ncl\_convert2nc" +for converting GRIB-1/2 or HDF files to netCDF files, and +"ncl\_filedump" which will dump the contents of a file format that NCL +recognizes (netCDF, GRIB-1/2, HDF, etc). + +NCL is available under an open source license or in binary form for +several popular UNIX platforms, including (but not limited to) Linux, +MacOSX, and Windows/Cygwin. + +Documentation and additional information on NCL are available from the +[NCL website](http://www.ncl.ucar.edu/), which contains hundreds of +[application examples](http://www.ncl.ucar.edu/Applications/) for one to +download. You can also contact Mary Haley, at for more +information. + +NCO +------------------------- + +[NCO](http://nco.sourceforge.net) (netCDF operators) is a package of +command line operators that work on generic netCDF or HDF4 files: +- ncap2 - arithmetic processor +- ncatted - attribute editor +- ncbo - binary operator +- ncdiff - differencer +- ncea - ensemble averager +- ncecat - ensemble concatenator +- ncflint - file interpolator +- ncks - kitchen sink (extract, cut, paste, print data) +- ncpdq - permute dimensions quickly +- ncra - running averager +- ncrcat - record concatenator +- ncrename - renamer +- ncwa - weighted averager + +All operators may now be [OPeNDAP](/packages/dods) clients. OPeNDAP +enables network transparent data access to any OPeNDAP server. Thus +OPeNDAP-enabled NCO can operate on remote files accessible through any +OPeNDAP server without transferring the files. Only the required data +(e.g., the variable or hyperslab specified) are transferred. + +The source code is freely available from the [NCO home +page](http://nco.sourceforge.net/), as is the NCO User's Guide. + +For more information, contact the author, Charlie Zender. + +ncregrid +----------------------------------- + +Patrick Jöckel of the Max Planck Institute for Chemistry has developed +**ncregrid**, a tool (written in FORTRAN-90) for data transfer of +gridded 2- and 3-dimensional (spatial) geophysical/geochemical scalar +fields between grids of different resolutions. The algorithm handles +data on rectangular latitude/longitude grids (not necessarily evenly +spaced) and vertical pressure hybrid grids of arbitrary resolution. The +input/output data format is netCDF. ncregrid is freely available without +any warranty under the GNU public license (GPL). ncregrid can be used as +a "stand-alone" program, and/or linked as an interface to a model, in +order to re-grid automatically the input from an arbitrary grid space +onto the required grid resolution. + +More information is available on the web-page: +. + +nctoolbox (a MATLAB common data model interface) +---------------------------------------------------------------------------- + +[nctoolbox](http://nctoolbox.github.io/nctoolbox/) is a MATLAB interface +that provides read-only access to [Common Data +Model](/software/netcdf-java/CDM/index.html) datasets. Under the hood, +nctoolbox uses Unidata's NetCDF-Java as the data access layer. This +allows nctoolbox to access to netCDF, OPeNDAP, HDF5, GRIB, GRIB2, HDF4, +and many (15+) other file formats and services using the same API. It +works with MATLAB 2008a and later. The nctoolbox software was developed +by Brian Schlining (MBARI), Rich Signell (USGS), Sachin Kumar Bhate +(freelance), and Alex Crosby (RPS/ASA). + +ncdx +--------------------------- + +Patrick Jöckel of the Max Planck Institute for Chemistry has developed +**ncdx**, a tool (written in FORTRAN-90) that scans a netCDF file and +makes it [OpenDX](#OpenDX) compliant. ncdx is freely available without +any warranty under the GNU public license (GPL). More information is +available on the web-page: +. + +ncensemble +--------------------------------------- + +Alan Iwi, of Rutherford Appleton Laboratory, offers this command line +ensemble statistics utility. More information is available on the +web-page: . + +ncview +------------------------------- + +[Ncview](http://meteora.ucsd.edu/~pierce/ncview_home_page.html) is a +visual browser for netCDF files. Typically you would use ncview to get a +quick and easy, push-button look at your netCDF files. You can view +simple movies of the data, view along various dimensions, take a look at +the actual data values, change color maps, invert the data, etc. It runs +on UNIX platforms under X11, R4 or higher. For more information, check +out the [README](http://meteora.ucsd.edu/~pierce/docs/ncview.README) +file; you can also see a representative [screen +image](http://meteora.ucsd.edu/~pierce/docs/ncview.gif) (GIF, 66K) of +ncview in action. +The source may be downloaded from . +For more information, please contact the author, David W. Pierce at +. + +NetCDF Toolbox for MATLAB-5 +---------------------------------------------------- + +The [NetCDF Toolbox for MATLAB-5](http://mexcdf.sourceforge.net/), +originally developed by Charles R. Denham, combined netCDF-3 with +[MATLAB](http://www.mathworks.com/products/matlab/) to form an interface +that used MATLAB operator-syntax for arithmetic, logical, and +subscripting operations on netCDF entities. The NetCDF Toolbox is in +bug-fix-only mode, and is maintained by John.G.Evans.NE@gmail.com, on +the [MEXNC, SNCTOOLS, and the NetCDF Toolbox](http://mexcdf.sf.net) web +page. + +ncvtk +----------------------------- + +[Ncvtk](http://ncvtk.sourceforge.net/) is a program for exploring +planetary data stored in a NetCDF file. The NetCDF file should loosely +follow the [CF metadata +conventions](http://www.cgd.ucar.edu/cms/eaton/cf-metadata/). + +Ncvtk was designed from the ground up with the aim of offering a high +degree of interactivity to scientists who have a need to explore +structured, three-dimensional, time-dependent climate data on the +sphere. A graphical user interface allows users to interact with their +data via color/transparency/contour/vector plots, apply vertical slices, +probe data, apply an external sun light, overlay hydrographic and +geopolitical data, rotate, zoom, etc. with minimal fuss. + +Ncvtk is written in python and is based on the [Visualization Toolkit +(VTK)](http://public.kitware.com/VTK/). Like python and VTK, Ncvtk is +highly portable and known to run on Windows and Linux (i386, ia64, +EMT64) platforms. More information about Ncvtk is available at +. + +Ivan Shmakov's netcdf tools +---------------------------------------------------------- + +The NetCDF tools is a free software package consisting of a few tools +operating on NetCDF and, by utilizing the compatibility API, HDF4 files, +which are intended to be usable from Shell scripts. + +The currently packaged tools are: + +- a couple of simple shell wrappers over the respective NetCDF + functions (ncattget and ncattput); +- a more sophisticated ncget tool. + +The ncget tool implements functionalilty that is similar to hdp dumpsds +(for NetCDF, which lacks such a tool), or complements it in the case of +HDF4. It can be seen as a complement to the ncdump tool (included in +both the NetCDF and HDF4 distributions) as well. + +This tool allows a selected part of a NetCDF variable or an HDF4 +scientific data set (SDS) to be extracted in either an ASCII or binary +form, applying the transformation specified by the usual scale\_factor +and add\_offset attributes. It allows one to feed the data contained in +NetCDF variables (or HDF4 SDS) to the tools designed to operate on +either ASCII (text) or raw (binary) data. + +This version of the package is the first one to be announced to the +public. It has some known bugs and limitations, but it's proved to be +quite usable. A [project +page](http://freshmeat.net/projects/netcdf-tools) on freshmeat.net. The +[source](http://waterlily.siamics.net/~ivan/src/netcdf-tools-0.1-rc1.tar.gz) +is also available. + +netcdf4excel (add-in for MS Excel) +----------------------------------------------------------------- + +Alexander Bruhns has developed [a netCDF add-in written in Visual Basic +for MS Excel](http://code.google.com/p/netcdf4excel/). This add-in +simplifies the use of NetCDF data in Excel, providing a ready to use +solution for manipulating this type of data. + +For developers, the open-source (GPL V3 license) can be downloaded +directly or checked out with Mercurial. + +The add-in is written in VBA 6.0 (so it won't work with Office 2010 64 +bits) and is designed for Excel 2007 running with the Microsoft Windows +operating system. It supports opening netCDF classic format data with +Excel for read or write access. + +More details are available on the [netcdf4excel web +site](http://code.google.com/p/netcdf4excel/). + +NetCDF95 alternative Fortran API +----------------------------------------------------------- + +Lionel Guez has developed and made feely available +[NetCDF95](http://web.lmd.jussieu.fr/~lglmd/NetCDF95), a new alternative +Fortran interface to the NetCDF library. Compared to the +Unidata-provided Fortran 90 netCDF interface, the NetCDF95 interface is +meant to be easier to use and more secure. + +Objective-C API +--------------------------------------------- + +Tom Moore has an Objective-C API, available here: +[www.paleoterra.com/software](http://www.paleoterra.com/software). The +netCDF Framework is an open source (Argonne Open Source License) MacOSX +application framework that provides an Objective-C interface to the NCAR +netCDF library version 3. The framework is available both as source code +and universal compiles (works on both PPC and Intel macs). The source +code has also been compiled by users for the GNUStep environment. +Version 2 of the framework will provide classes for accessing multiple +netCDF files, working with in-memory data slabs using standard notation, +and some support for multithreading. + +### Mark Tracy's Objective-C API + +Mark Tracy has written [NetcdfStep](http://www.mt-se.com/nc_1.html), an +Objective-C API for netCDF that uses Objective-C Foundation Classes. + +NetcdfStep is framework for using the netCDF library in object-oriented +programming with Objective-C. It now supports the full functionality of +netCDF 3.6.2. + +A [complete Mac OS X +distribution](http://www.mt-se.com/pub/NetcdfStep-1.0.2.zip) including +pre-built static library and [online +documentation](http://www.mt-se.com/netcdfstep_doc/) are available. +Applications linked to this framework have no external dependencies +(other than Mac OS X itself). A [source-code only +distribution](http://www.mt-se.com/pub/NetcdfStep-GNUstep-0.6.1.tar.gz) +synced up to version 0.6.1 is available for GNUstep for use on Linux and +other Unix platforms. + +Octave interface +---------------------------------------- + +The ARM Program has contributed NCMEX for Octave, a port of Chuck +Denham's MATLAB NCMEX to [Octave](http://www.octave.org). The calling +syntax is identical, so scripts using NCMEX in MATLAB should in theory +be portable to Octave. In order to build NCMEX, a compiled C NetCDF +library must already be installed. + +In addition to the base NetCDF library interface, this package includes +a simple toolbox to automate the reading and writing of NetCDf files +within Octave using NCMEX. These tools as well as the source for NCMEX +are available from + (NOTE: this .tar +file contains other Octave extension functions besides NCMEX.) + +Also see [Octcdf](http://ocgmod1.marine.usf.edu/octcdf/), a netCDF +toolbox for Octave. + +For installation instructions, see the README file inside the .tar file. + +Octave interface (Barth) +------------------------------------------------- + +Alexander Barth has contributed the following: + +Octcdf is a netCDF toolbox for [Octave](http://www.octave.org/) which +uses the same operator syntax as the [matlab netCDF +toolbox](http://mexcdf.sourceforge.net/netcdf_toolbox.html) of Charles +R. Denham. NetCDF dimensions, attributes and variables are Octave +objects and can be accessed, sliced and changed just as regular +variables. Unlike most netCDF toolboxes for matlab, it does not depend +on the NCMEX wrapper around the netCDF interface. This octave toolbox is +written in C++ calling directly the netCDF library. The octcdf toolbox +can also be used to download data from an OpenDAP server. The octcdf +source code is available at +. +It was also included in the Octave Repository +[octave-forge](http://octave.sourceforge.net/). + +OPeNDAP (formerly DODS) +------------------------------------------------- + +The [OPeNDAP](http://opendap.org/) (formerly known as DODS) is an +Open-source Project for a Network Data Access Protocol that makes local +data and subsets of local data accessible to remote locations +independent of the local storage format. OPeNDAP also provides tools for +transforming existing applications into OPeNDAP clients, enabling them +to remotely access OPeNDAP served data. OPeNDAP is based on existing +data access tools; rather than developing a self contained system, it +makes extensive use of existing data access APIs. + +OPeNDAP can be used to make netCDF data files available over the +Internet and it can also be used to adapt existing software which use +the netCDF API (by re-linking) to read data served by an OPeNDAP data +server. In principle, any program written using netCDF can be adapted to +read data from an OPeNDAP server - in other words any program which uses +netCDF can become a client in the OPeNDAP client-server system. Included +in the source and binary distributions are two freely available programs +that have already been modified (re-linked). + +With a client program accessing data from a netCDF server, it is +possible to access a small subset of a large dataset over the Internet +without copying the entire dataset (as you would have to do with FTP or +AFS). The client can see changes to the netCDF dataset, e.g. when new +records are added (which would not be possible with FTP). Finally, the +client can also access cross-sections of variable data without paging +large amounts of data across the network (as you would have to do with +NFS, for example). + +OPeNDAP software is freely available in both source form or binary form +for selected platforms. + +OpenDX +------------------------------- + +[OpenDX](http://www.opendx.org/about.html) (formerly IBM Data Explorer, +also known as simply DX) is a general-purpose software package for data +visualization and analysis. It employs a data-flow driven client-server +execution model and provides a graphical program editor that allows the +user to create a visualization using a point and click interface. +DX runs on 7 major UNIX platforms as well as Windows 95/NT and is +designed to take full advantage of multi-processor systems from IBM, SGI +and Sun. + +DX is built upon an internal data model, which describes and provides +uniform access services for any data brought into, generated by, or +exported from the software. This data model supports a number of +different classes of scientific data, which can be described by their +shape (size and number of dimensions), rank (e.g., scalar, vector, +tensor), type (float, integer, byte, etc. or real, complex, quaternion), +where the data are located in space (positions), how the locations are +related to each other (connections), aggregates or groups (e.g., +hierarchies, series, composites, multizone grids, etc.). It also +supports those entities required for graphics and imaging operations +within the context of Data Explorer. Regular and irregular, deformed or +curvilinear, structured and unstructured data as well as "missing" or +invalid data are supported. + +The details of the data model are hidden at the user level. As a result +DX operations or modules are polymorphic and appear typeless. The DX +Import module, which reads data for use within Data Explorer directly +utilizes data in netCDF as well as other formats (e.g., HDF, CDF). One +or more variables may be selected as well as step(s) of a time series. +Data in conventional netCDFs are directly imported. Since the DX data +model is more comprehensive than the netCDF data model, a methodology to +extend netCDF via attribute conventions (e.g., for unstructured meshes, +non-scalar data and hierarchies) for use with Data Explorer is +available. + +DX supports a number of realization techniques for generating renderable +geometry from data. These include color and opacity mapping (e.g., for +surface and volume rendering), contours and isosurfaces, histograms, +two-dimensional and three-dimensional plotting, surface deformation, +etc. for scalar data. For vector data, arrow plots, streamlines, +streaklines, etc. are provided. Realizations may be annotated with +ribbons, tubes, axes, glyphs, text and display of data locations, meshes +and boundaries. Data probing, picking, arbitrary surface and volume +sampling, and arbitrary cutting/mapping planes are supported. + +DX supports a number of non-graphical functions such as point-wise +mathematical expressions (e.g., arithmetic, transcendental, boolean, +type conversion, etc.), univariate statistics and image processing +(e.g., transformation, filter, warp, edge detection, convolution, +equalization, blending, morphological operations, etc.). Field/vector +operations such as divergence, gradient and curl, dot and cross +products, etc. are provided. Non-gridded or scattered data may be +interpolated to an arbitrary grid or triangulated, depending on the +analysis requirements. The length, area or volume of various geometries +may also be computed. Tools for data manipulation such as removal of +data points, subsetting by position, sub/supersampling, grid +construction, mapping, interpolation, regridding, transposition, etc. +are available. + +Tools for doing cartographic projections and registration as well as +earth, space and environmental sciences examples are available at +Cornell University via info.tc.cornell.edu. Also see the [ncdx](#ncdx) +tool for making netCDF files OpenDX compliant. + +Panoply +--------------------------------- + +[Panoply](http://www.giss.nasa.gov/tools/panoply/) is an application +that plots geo-gridded and other arrays from netCDF, HDF, GRIB, and +other datasets. Features include: + +- Slice and plot geo-gridded latitude-longitude, latitude-vertical, + longitude-vertical, or time-latitude arrays from larger + multidimensional variables. +- Two arrays may be combined in one plot by differencing, summing, or + averaging. +- Lon-lat data may be plotted as global maps (using any of over 75 map + projections) or as zonal average plots. +- Overlay continent outlines or masks on lon-lat plots. +- Use your favorite CPT, GGR, PAL, or ACT color table for scale + colorbar. +- Save plots to disk in GIF, JPEG, PNG or TIFF bitmap images or as PDF + or PostScript graphics files. +- Export lon-lat map plots in KMZ format. +- Export animations as AVI or MOV video or as a collection of + invididual frame images. +- Explore remote THREDDS and OpenDAP catalogs and open datasets served + from them. + +Panoply requires that your computer have a Java SE 6 runtime +environment, or better, installed. + +Panoply is developed at the NASA Goddard Institute for Space Studies. +Questions and suggestions should be directed to [Dr. Robert B. +Schmunk](http://www.giss.nasa.gov/staff/rschmunk.html). + +Parallel-NetCDF +------------------------------------------------- + +A group of researchers at Northwestern University and Argonne National +Laboratory (Jianwei Li, Wei-keng Liao, Alok Choudhary, Robert Ross, +Rajeev Thakur, William Gropp, and Rob Latham) have designed and +implemented a new [parallel interface for writing and reading netCDF +data](http://www.mcs.anl.gov/parallel-netcdf/), tailored for use on high +performance platforms with parallel I/O. The implementation builds on +the MPI-IO interface, providing portability to most platforms in use and +allowing users to leverage the many optimizations built into MPI-IO +implementations. Testing so far has been on Linux platforms with ROMIO +and IBM SP machines using IBM's MPI. + +Documentation and code for Parallel-NetCDF is now available for testing. +Although a few interfaces are not implemented yet, the current +implementation is complete enough to provide significant I/O performance +improvements on parallel platforms, as described in a [technical +report](ftp://info.mcs.anl.gov/pub/tech_reports/reports/P1048.pdf). +Users are invited to test Parallel-NetCDF in their applications. + +Paraview and vtkCSCSNetCDF +----------------------------------------------------- + + + +ParaView is an application designed with the need to visualize large +data sets in mind. The goals of the ParaView project include the +following: + +- Develop an open-source, multi-platform visualization application. +- Support distributed computation models to process large data sets. +- Create an open, flexible, and intuitive user interface. +- Develop an extensible architecture based on open standards. + +ParaView runs on distributed and shared memory parallel as well as +single processor systems and has been successfully tested on Windows, +Linux and various Unix workstations and clusters. Under the hood, +ParaView uses the Visualization Toolkit as the data processing and +rendering engine and has a user interface written using a unique blend +of Tcl/Tk and C++. + +A vtk/ParaView reader for netCDF files can be found here. + +Perl interfaces +-------------------------------------- + +There are two netCDF interfaces for Perl: +- [PDL::NetCDF](http://search.cpan.org/~dhunt/PDL-NetCDF-4.05/netcdf.pd), + Doug Hunt's perl interface which uses the PDL (perl data language) + extension. +- [NetCDFPerl](/software/netcdf-perl/), Steve Emmerson's extension + module, based on version 2 of the netCDF package. Uses perl lists + for representing netCDF variables. + +PolyPaint+ +--------------------------------------- + +[PolyPaint+](http://lasp.colorado.edu/polypaint/home.html) is an +interactive scientific visualization tool that displays complex +structures within three-dimensional data fields. It provides both color +shaded-surface display and simple volumetric rendering in either index +or true color. For shaded surface rendering, the PolyPaint+ routines +first compute the polygon set that describes a desired surface within +the 3D data volume. These polygons are then rendered as continuously +shaded surfaces. PolyPaint+ contains a wide variety of options that +control lighting, viewing, and shading. Objects rendered volumetrically +may be viewed along with shaded surfaces. Additional data sets can be +overlaid on shaded surfaces by color coding the data according to a +specified color ramp. 3D visualizations can be viewed in stereo for +added depth perspective. +Currently supported 3D visualizations are the following: + +- Shaded isosurface +- Transparent contour shells or isosurfaces at varying levels +- Volumetric or density plot +- Planes +- Contour ribbons +- Topographic surface from 2D geographic data sets + +3D data volumes may be sliced in the X, Y, or Z plane using an +interactive cutting plane. A cross section of the data volume can be +viewed in a 2D window as a 2D contour plot, a vector plot, a raster +image or a combination of these options superimposed. Map outlines can +be used as a background for 2D cross section plots of geographic data. +All data is projected according to the coordinates specified by the user +for the cross section window. + +The user interface provides direct manipulation tools for specifying the +eye position, center of view, light sources, and color ramps. Subsetting +of data can be done easily by selecting the data by index or geographic +coordinate. On-line contextual help provides easy access to more detail +about the software. Tutorials which range from very simple +visualizations to complex combinations of data sets provide the user +with a quick learning tool. + +Currently PolyPaint+ accepts only data which is in the NetCDF file +format. A file conversion utility which converts from raw binary data to +netCDf is a part of the application. + +PolyPaint+ is a joint effort of the University of Colorado and NCAR +(National Center for Atmospheric Research) funded by the NASA AISRP +program. A beta version of PolyPaint+ is currently available free of +charge using FTP or for a nominal fee which would cover tape +distribution. A license agreement must be signed in order to use it. + +You may order by... + +- TELEPHONE : 303-492-7289 (Margi Klemp) : 303-497-8159 (Bill Boyd) +- U.S. MAIL : + + Margi Klemp + University of Colorado / LASP + 1234 Innovation Dr. + Boulder, CO 80303 + USA + +- E-MAIL : margi@aries.colorado.edu + +Pomegranate +----------------------------------------- + +The P9E Team at NASA JPL has developed +[Pomegranate](http://pomegranate.jpl.nasa.gov/), a python application +that "webifies" science data files. Supported formats include netCDF, +HDF4, HDF5, GRIB and FITS. + +Pomegranate can be installed on web servers as either a WSGI or CGI +application to provide webification (w10n) services. To learn more about +w10n of science data files, please visit . A +brief [help](http://pomegranate.jpl.nasa.gov/test/help.txt) document +describes how to use the [demo +directory](http://pomegranate.jpl.nasa.gov/test) to browse or download +metadata or data in netCDF, JSON, or other formats by clicking on data +folder and document icons. + +Pomegranate can also be used as a standalone library or command line +application. This greatly simplifies the retrieval of metadata and data +from files in supported formats. + +Pomegranate is open source software and can be downloaded from +. + +PyNGL and PyNIO +--------------------------------------- + +NCAR's Computational and Information Systems Laboratory has developed +[PyNGL](http://www.pyngl.ucar.edu/), a python package for scientific +visualization and data analysis and +[PyNIO](http://www.pyngl.ucar.edu/Nio.shtml), a Python package +supporting access to a variety of data formats using an interface +modelled on netCDF. + +Python interfaces +------------------------------------------ + +Python is an interpreted, object-oriented language that is supported on +a wide range of hardware and operating systems. Python information and +sources can be obtained from . There are now +several netCDF interfaces for Python. + +Jeff Whitaker of the NOAA Earth System Research Lab has developed a +netCDF-4 module for python: . +Most new features of netCDF-4 are implemented, such as multiple +unlimited dimensions, groups and zlib data compression. All the new +numeric data types (such as 64-bit and unsigned integer types) are +implemented. Compound and variable length (vlen) data types are +supported, but the enum and opaque data types are not. Mixtures of +compound and vlen data types (compound types containing vlens, and vlens +containing compound types) are not supported. + +[xray](#xray) is a higher-level interface that uses netcdf4-python +internally to implement a pandas-like package for N-D labelled arrays +for scientific data. + +André Gosselin of the Institut Maurice-Lamontagne, Péches & Océans +Canada, has implemented pycdf, a new Python interface to the netCDF +library. It is available from , +where you will find the install files, installation instructions, +extensive documentation in text and html format, and examples. pycdf +requires the Numeric python package, and installs through the simple +"python setyp.py install" command. + +Bill Noon (noon@snow.cit.cornell.edu) has implemented another netCDF +Python module that allows easy creation, access, and browsing of netCDF +data. The bindings also use the [udunits library](/software/udunits/) to +do unit conversions. More information and source for Noon's Python +netCDF module are available from +. + +The package from Konrad Hinsen has been integrated into his +[ScientificPython](https://sourcesup.cru.fr/projects/scientific-py/) +package. + +Dave Brown of NCAR's Computational and Information Systems Laboratory +has developed [PyNIO](http://www.pyngl.ucar.edu/Nio.shtml), a Python +package that allows read and/or write access to a variety of data +formats using an interface modelled on netCDF. Currently supported +formats include netCDF, HDF4, GRIB1 and GRIB2 (read only), and HDF-EOS 2 +Grid and Swath data (read only). + +Vicente Galiano of Miguel Hernandez University has developed a Python +interface to PnetCDF. This Python's package called "PyPnetCDF" allows +access to NetCDF files using MPI and the library pnetCDF developed by +http://www.mcs.anl.gov/parallel-netcdf/. The tools are very similar to +Konrad Hinsen's NetCDF package to Python but can read and write in a +parallel way. For more information, see: +. + +Pupynere (PUre PYthon NEtcdf REader) Roberto +De Almeida has developed +[pupynere](http://pypi.python.org/pypi/pupynere/), a PUre PYthon NEtcdf +REader that allows read-access to netCDF files using the same syntax as +the Scientific.IO.NetCDF Python module. Even though it's written in +Python, the module is up to 40% faster than Scientific.IO.NetCDF and +pynetcdf. + +R interface +------------------------------- + +The R Project for Statistical Computing has developed +[R](http://www.R-project.org/), a language and environment for +statistical computing and graphics. It provides a wide variety of +statistical and graphical techniques, including linear and nonlinear +modelling, statistical tests, time series analysis, classification, and +clustering. + +David Pierce has contributed the [ncdf4 +package](http://cran.r-project.org/web/packages/ncdf4/index.html) for +reading netCDF data into R and for creating new netCDF dimensions, +variables, and files, or manipulating existing netCDF files from R. + +Pavel Michna has contributed another package, +[RNetCDF](http://cran.r-project.org/web/packages/RNetCDF/index.html), +that also provides access to netCDF data and to udunits calendar +functions from R. + +Robert Hijmans (with additional contributors) has created the [R raster +package](http://cran.r-project.org/web/packages/raster/index.html) for +geographic data analysis and modeling. The raster package can be used +for reading, writing, manipulating, analyzing and modeling gridded +spatial data. The package is especially useful for large datasets that +don't fit into memory, because data is processed in chunks. See +[Introduction to the 'raster' +package](http://cran.r-project.org/web/packages/raster/vignettes/Raster.pdf) +for more information. + +Quantum GIS (QGIS) +----------------------------------------- + +[Quantum GIS](http://www.qgis.org/) (QGIS) is an Open Source Geographic +Information System (GIS) licensed under the GNU General Public License. +QGIS is an official project of the Open Source Geospatial Foundation +(OSGeo). It runs on Linux, Unix, Mac OSX, and Windows and supports +numerous vector, raster, and database formats and functionalities. QGIS +supports a desktop, browser, server, and client for viewing, editing, +analysis, serving, and accessing data. Its server complies with the OGC +WMS 1.3 standard. In addition to PostGIS and SpatiaLite formats, it can +access data in vector formats supported by the OGR library as well as +most raster formats supported by the GDAL library, including netCDF. For +a more detailed list of features of the QGIS desktop, browser, server, +and client, see the [QGIS features +page](http://www.qgis.org/en/about-qgis/features.html). + +Ruby interface +------------------------------------- + +A group at the Research Institute for Sustainable Humanosphere (RISH) of +Kyoto University has developed a [netCDF interface for +Ruby](http://www.gfd-dennou.org/arch/ruby/products/ruby-netcdf/), an +interpreted, object-oriented scripting language. This interface is +intended to cover all the functionality of the C library for netCDF. +Also available are combination functions such as iterators (which offer +abstract ways to scan files and variables). Numeric arrays are handled +by the "NArray" multi-dimensional array class, which is becoming the de +facto standard multi-dimensional array for Ruby. See also the Ruby-based +[GPhys software and Gfdnavi tool](#Gfdnavi) for accessing GRIB, GrADS, +and netCDF data uniformly. + +More information about Ruby is available from the [Ruby web +site](http://www.ruby-lang.org/). + +Scientific DataSet (SDS) Library +------------------------------------------------------ + +The [Scientific DataSet Library and Tools +project](http://sds.codeplex.com), developed jointly by Microsoft +Research Cambridge and Moscow State University, is aimed at manipulation +and visualization of multidimensional data sets. + +Scientific DataSet (or SDS in short) is a .NET class library for +manipulating scientific data and their metadata. SDS provides a unified +API for convenient access to various data storages. Three types of +storages are supported by the first release: NetCDF files, CSV text +files and volatile in-memory datasets. SDS uses native NetCDF library +built from version 4.0.1 both for 32 and 64-bit Windows platforms. New +storage types can be added to SDS infractructure as plugins. Support for +accessing TIFF image files from SDS as 2D arrays will be available soon +as a separate CodePlex project. + +Three applications are built on top of SDS: + +- sds command line utility. It allows users to examine data set + schema, copy data sets, modify their metadata. +- DataSetViewer application for visualization of data sets. + DataSetViewer is both a standalone application and Windows + Presentation Foundation Control that can be built into your + applications. DataSetViewer has support for interactive slicing of + multidimensional data along any dimension. +- DataSetEditor add-in for Microsoft Office Excel. DataSetEditor + provides ability to view and modify the contents of any data set as + Excel worksheets. + +You can read the Getting Started document at + +for a more detailed introduction to the Scientific DataSet software. A +Windows Installation package for SDS binaries along with DataSet Viewer +and DataSet Editor are available also. You can also build core class +libraries and the sds utility under Mono. You may use, copy, and +reproduce this software for any non-commercial purpose. For further +details see license at . + +The SDS project is in beta phase and keeps evolving. You are welcome to +join discussions or report issues at the CodePlex site: +. + +Apache Spatial Information System (SIS) +------------------------------------------------------------- + +[Apache Spatial Information System +(SIS)](https://builds.apache.org/job/sis-trunk/site/index.html) is a +Java library for developing geospatial applications. SIS enables +representation of coordinates for searching, data clustering, archiving, +or any other relevant spatial needs. The library is an implementation of +GeoAPI 3.0 interfaces and can be used for desktop or server +applications. + +SIS provides data structures for geographic data and associated metadata +along with methods to manipulate those data structures. The SIS metadata +module forms the base of the library and enables the creation of +metadata objects which comply with the ISO 19115 metadata model and +which can be read from or written to ISO 19139 compliant XML documents. +The SIS referencing module will enable the construction of geodetic data +structures for geospatial referencing based on the ISO 19111 model such +as axis, projection and coordinate reference system definitions, along +with the associated operations which enable the mathematical conversion +of coordinates between different systems of reference. The SIS storage +modules will provide a common approach to the reading and writing of +grid coverages applicable to simple imagery and multidimensional data +structures. + +SIS supports creating ISO 19115 metadata from metadata in a netCDF store +from a given file, URL, stream, or NetcdfFile object. SIS netCDF storage +is intended to be a bridge between NetCDF Climate and Forecast (CF) +conventions and ISO 19115 metadata. + +SIS is under developement as an Apache project. Release 0.3 is currently +available for download. + +Tcl/Tk interfaces +------------------------------------------ + +Dan Schmitt has developed [cdftcl](http://cnrit.tamu.edu/rsg/cdftcl/), a +[Tcl/Tk](http://www.scriptics.com/) interface for netCDF. It allows the +use of "wildcards" (\*) or ranges (1-4) in the subscript notation, and +use of name references instead of variable IDs. Contact dan@computer.org +for more information. + +Tcl-nap +--------------------------------- + +[Tcl-nap](http://tcl-nap.sourceforge.net) (n-dimensional array +processor) is a loadable extension of Tcl which provides a powerful and +efficient facility for processing data in the form of n-dimensional +arrays. It has been designed to provide an array-processing facility +with much of the functionality of languages such as +[APL](http://www.acm.org/sigapl/), Fortran-90, [IDL](#IDL), +[J](http://www.jsoftware.com/), [matlab](http://www.mathworks.com), and +[octave](http://www.octave.org/). + +Support is provided for data based on n-dimensional grids, where the +dimensions correspond to continuous spatial coordinates. There are +interfaces to the HDF and netCDF file formats commonly used for such +data, especially in Earth sciences such as Oceanography and Meteorology. + +The internal data structure is called a NAO (n-dimensional array object) +and contains similar information to that of HDF SDSs and netCDF +variables. + +Tcl-nap was developed as part of the [CSIRO CAPS +project](http://www.dar.csiro.au/rs/avhrr_processing_software.htm), but +can be loaded and used without the (satellite oriented) CAPS extension. + +Visual Basic and VB.net interfaces +------------------------------------------------------- + +Carsten Wieczorrek has developed code in VB 6 to export chromatographic +data into the netcdf/ANDI format. The application writes netCDF files +that can be read by CHROMELEON, for example. For others interested in +programming with netcdf.dll from VB 6, see Wieczorrek's web page on +[netCDF and VB 6.0](http://www.mn-net.com/netcdf_vb6) and for VB.net, +see [netCDF and VB.net](http://www.mn-net.com/netcdf_vbnet). + +VisAD +----------------------------- + +[VisAD](http://www.ssec.wisc.edu/~billh/visad.html) is a Java class +library for interactive and collaborative visualization and analysis of +numerical data. It combines: +- The use of pure Java for platform independence and to support data + sharing and real-time collaboration among geographically distributed + users. Support for distributed computing is integrated at the lowest + levels of the system using Java RMI distributed objects. +- A general mathematical data model that can be adapted to virtually + any numerical data, that supports data sharing among different + users, different data sources and different scientific disciplines, + and that provides transparent access to data independent of storage + format and location (i.e., memory, disk or remote). The data model + has been adapted to netCDF, FITS, HDF-EOS, McIDAS, Vis5D, GIF and + JPEG file formats. +- A general display model that supports interactive 3-D, data fusion, + multiple data views, direct manipulation, collaboration, and virtual + reality. The display model has been adapted to Java3D and Java2D and + used in an ImmersaDesk virtual reality display. +- Data analysis and computation integrated with visualization to + support computational steering and other complex interaction modes. +- Support for two distinct communities: developers who create domain- + specific systems based on VisAD, and users of those domain-specific + systems. VisAD is designed to support a wide variety of user + interfaces, ranging from simple data browser applets to complex + applications that allow groups of scientists to collaboratively + develop data analysis algorithms. +- Developer extensibility in as many ways as possible. + +VisAD was written by programmers at the [SSEC Visualization +Project](http://www.ssec.wisc.edu/~billh/vis.html) at the University of +Wisconsin-Madison [Space Science and Engineering +Center](http://www.ssec.wisc.edu/), and the [Unidata Program +Center](/index.html). + +WebWinds +----------------------------------- + +[WebWinds](http://www.openchannelsoftware.com/projects/WebWinds/) is a +free Java-based science visualization and analysis package. In addition +to several new analysis tools, the current fourth version does automatic +scripting. This allows + +1. a user to rapidly and automatically create and store a session, + either for his own use, or for use by a collaborator on another + machine; +2. a data provider to automatically create a specialized analysis + environment which can be downloaded (as a small script file) along + with a dataset from a Website; and +3. realtime collaboration or sharing of sessions over (even + low-bandwidth) networks, including the Internet. + +This scripting requires no knowledge of the scripting language syntax. +Several sample script files are included with the distribution. + +In addition, this version contains a capability to geo-reference some +data and to read ASCII data in tabular format. Also new is the ability +to output data in numerical form (e.g. NetCDF) and a context sensitive, +integrated help system. + +As with earlier versions, data in several different formats, including +NetCDF, can be read in easily from your local machine or from the Web. +In addition, most data can be subset or subsampled on load, making it +possible to visualize very large multidimensional and/or multispectral +datasets. The package includes several step-by-step examples. +Installation of the software (including Java) on the PC or Mac is a +process requiring one file to be downloaded and opened. If you need help +getting started, a remote tutorial is available once you've downloaded +the package. + +WebWinds is \`point and click' rather than language driven and it runs +well on Unix, Windows (95/98/NT) and Mac platforms. It currently +requires JDK 1.1. To download a copy of this release, go to + + +xray (Python N-D labelled arrays) +-------------------------------------------------------- + +[xray](http://xray.readthedocs.org/en/stable/index.html) is an open +source project and Python package that aims to bring the labeled data +power of [pandas](http://pandas.pydata.org/) to the physical sciences, +by providing N-dimensional variants of the core pandas data structures, +Series and DataFrame: the xray DataArray and Dataset. + +xray adopts the [Common Data +Model](http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/CDM) +for self-describing scientific data in widespread use in the Earth +sciences (e.g., netCDF and OPeNDAP): xray.Dataset is an in-memory +representation of a netCDF file. + +xray is being developed by Stephan Hoyer, Alex Kleeman, and [other +contributors](https://github.com/xray/xray/graphs/contributors). + +Zebra +----------------------------- + +[Zebra](http://www.atd.ucar.edu/rdp/zebra.html) (formerly named Zeb) is +a system for data ingest, storage, integration and display, designed to +operate in both real time and postprocessing modes. Zebra was developed +by Jonathan Corbet and others in NCAR's [Research Data +Program](http://www.atd.ucar.edu/rdp/rdp_home.html). +Zebra's primary use is for the superpositioning of observational data +sets (such as those collected by satellite, radar, mesonet and aircraft) +and analysis products (such as model results, dual-Doppler synthesis or +algorithm output). Data may be overlaid on a variety of display types, +including constant altitude planes, vertical cross-sections, X-Y graphs, +Skew-T plots and time-height profiles. The fields for display, color +tables, contour intervals and various other display options are defined +using an icon based user-interface. This highly flexible system allows +scientific investigators to interactively superimpose and highlight +diverse data sets; thus aiding data interpretation. + +Data handling capabilities permit external analysis programs to be +easily linked with display and data storage processes. The data store +accepts incoming data, stores it on disk, and makes it available to +processes which need it. An application library is available for data +handling. The library functions allow data storage, retrieval and +queries using a single applications interface, regardless of the data's +source and organization. NetCDF data that conforms to Zebra conventions +is supported by this interface. + +Zebra is currently available to the university research community +through the NCAR/ATD Research Data Program. Email requests to +rdp-support@atd.ucar.edu. More information is on the web page +http://www.atd.ucar.edu/rdp/zebra.html. + +------------------------------------------------------------------------ + +User-Contributed Software +================================================ + +Unidata makes available a separate +[catalog](/software/netcdf/Contrib.html) to a +[directory](ftp://ftp.unidata.ucar.edu/pub/netcdf/contrib/) of freely +available, user-contributed software and documentation related to the +netCDF library. This software may be retrieved by anonymous FTP. We +haven't necessarily used or tested this software; we make it available +"as is". + +The criteria for inclusion in the netcdf/contrib/ directory of +user-contributed software are: + +- General usefulness to a significant part of the netCDF community +- Small size +- Infrequent need for updates +- Free availability + +------------------------------------------------------------------------ + +Commercial or Licensed Packages {#commercial} +=============================== + +ASA ViewNcDap +----------------------------------------- + +Applied Science Associates, Inc. has made the ASA View NC/Dap +application freely available for +[download](http://www.asascience.com/downloads). ViewNcDap is a +stand-alone research-based tool (with included demonstration data) that +allows a user to visualize four dimensional NetCDF and OPeNDAP data. +ViewNcDap is a Windows application that includes temporal/time step +functionality for viewing animations of data that include temporal +information. The application may be used to visualize a variety of +time-varying geospatial scientific data in a simple map framework. It +handles CF conventions and includes some aliasing features that could +permit additional formats to be read. It should not be considered a GIS +system, but is used to quickly preview a variety of data on a simple +map. Data may also be filtered and saved to a local netCDF file. + +Avizo +----------------------------- + +[Avizo](http://www.avizo3d.com/) software is a powerful tool for 3D data +visualization and analysis. It offers a comprehensive feature set that +addresses visualization, processing, analysis, communication and +presentation. [Avizo Green +Edition](http://www.vsg3d.com/vsg_prod_avizo_green.php) includes an +advanced set of features dedicated to climate, oceanography, +environmental or earth-mapped data. It provides high-level support for +the netCDF format, a dedicated Earth visualization module, and a set of +advanced geographical projections applicable to a wide range of fast 2D +and 3D data representations. + +For more information, see [www.avizo3d.com](http://www.avizo3d.com/). + +AVS +------------------------- + +[AVS](ftp://testavs.ncsc.org/avs/Info/WHAT_IS_AVS) (Application +Visualization System) is a visualization application software and +development environment. An AVS module has been written that allows +multi-dimensional netCDF data sets to read into AVS as uniform or +rectilinear field files. The AVS user can point and click to specify the +name of the variable in the selected netCDF file, as well as selecting +the hyperslab. If 1D coordinate variables exist (a variable that has the +same name as a dimension) then the coordinate variable will be used to +specify the coordinates of resulting rectilinear field file. If no +coordinate variable exists, then the resulting field file will be +uniform. Once in AVS, there are hundreds of analysis and display modules +available for image processing, isosurface rendering, arbitrary slicing, +alpha blending, streamline and vorticity calculation, particle +advection, etc. AVS runs on many different platforms (Stardent, DEC, +Cray, Convex, E and S, SET, Sun, IBM, SGI, HP, FPS and WaveTracer), and +it has a flexible data model capable of handling multidimensional data +on non-Cartesian grids. +The module source code and documentation is available from the +[International AVS Center](http://iac.ncsc.org/), in the + +directory. + +See also the information on [DDI](#DDI) for another way to use netCDF +data with AVS. + +Barrodale UFI +--------------------------------------- + +[Barrodale Computing Services Ltd.](http://www.barrodale.com) (BCS) has +developed a product that addresses one of the main objections heard from +"technologists" (e.g., scientists, engineers, and other researchers) who +avoid using databases to manage their data: "my very large data files +are too cumbersome/difficult/slow/costly to load into a database". In +addition to netCDF, these files come in a variety of formats (HDF5, +GRIB, NITFS, FITS, etc.). + +This BCS product is called the [Universal File Interface +(UFI)](http://www.barrodale.com/bcs/universal-file-interface-ufi); it's +a database extension based on the IBM Informix Virtual Table Interface +(VTI). *(Please continue reading even if you don't have Informix running +on your system, because IBM has just made available, at no charge, the +[Innovator-C +Edition](http://www-01.ibm.com/software/data/informix/innovator-c-edition/) +of Informix.)* A demo that uses UFI to access wind speeds can be seen +[here](http://www.barrodale.com/bcs/universal-file-interface-animation). + +VTI is a technology that supports making external datasets appear as +tables to SQL queries and statements. UFI is a BCS database extension +for delivering the contents of external data files as though they were +rows in a database table. UFI makes a file look like a set of database +tables, so "UFI managed tables" are actually virtual database tables. +Consequently, users of UFI can perform SQL queries on their files +without having to first load them into a database. + + +------------- + +DioVISTA/Storm +----------------------------------------------- + +[DioVISTA/Storm](http://www.hitachi-power-solutions.com/products/product03/p03_61.html) +is a commercial software package that visualizes content of netCDF files +as a time series of grids, isosurfaces, and arrows on a 3D virtual +earth. Its user interface is similar to standard 3D earth visualizing +software. It displays OGC KML files, Shapefiles, and online map +resources through OGC Web Tile Map Services (WTMS). It supports CF +Conventions version 1.6 (lon-lat-alt-time axis and trajectory). Its +first version was released on Aug 5 2014. + +Environmental WorkBench +----------------------------------------------------------------- + +[SuperComputer Systems Engineering and Services +Company](http://www.ssesco.com/) (SSESCO) has developed the +[Environmental WorkBench](http://www.ssesco.com/files/ewb.html) (EWB), +an easy to use visualization and analysis application targeted at +environmental data. The EWB currently has numerous users in the fields +of meteorological research, air quality work, and groundwater +remediation. +EWB system features include: + +- Random access file structure using the netCDF-based public domain + MeRAF file system with support for gridded, discrete (non-grid-based + observation), and particle types +- Support for geo-referenced or Cartesian coordinate systems +- Object oriented Graphical User Interface (GUI) that is very easy to + use +- Tools for converting model and observational data sets and data + writers to netCDF +- Interactive rotation/translation of scenes in 3D space +- Time sequencing controls to step forward/backward, animate + sequentially, or go to a chosen time step; including multiple + asynchronous or non-uniform time steps +- Interactive slicers to select cross sections through 3D data sets +- Display operators available on the slices, including + - Contour lines with selectable contour levels + - Color shading by data value with variable transparency level + - Arrow and streamline representation for vector quantities + - Positional reference lines at user selected intervals + - Color coded shapes at each grid node +- Multiple 3D isosurfaces at selected parameters and values with + variable transparency +- Display of particle positions with coloring by type, height, and + source +- Display of discrete data using colored spheres and labels for scalar + data and arrows for vectors (with arrowheads or meteorological + style) +- Multiple user definable color maps to which isosurface and colored + field shading may be separately assigned +- On screen annotation for generation of report ready figures +- Image export in any of the common image formats (gif, tiff, + encapsulated postscript, etc.) +- Graceful handling of missing or bad data values by all the graphics + rendering routines +- Automatic data synchronization to allow automatic screen updating as + new data arrives in real-time from a model or set of sensors +- Two and three dimensional interpolation from scattered observations + to a grid, using the Natural Neighbor Method. This robust volume + based method yields results far superior to distance weighting + schemes. + +Systems currently supported include Win95, WinNT, OS/2, IBM RS/6000, +Silicon Graphics, HP and SUN workstations. + +SSESCO has implemented a meta-file layer on top of the netCDF library, +called MeRAF. It handles multiple netCDF files as well as automatic +max-min calculations, time-varying gridded, particle, and discrete data, +logical groupings for discrete data, and an overall simplified and +flexible interface for storing scientific data. MeRAF is being used by +the DOE at the Hanford-Meteorological Site for observational data and +will be used for their weather-modeling. + +ESRI +--------------------------- + +[ESRI ArcGIS](http://www.esri.com/software/arcgis/index.html) version +9.2 and later support [accessing netCDF time-based and multidimensional +data](http://webhelp.esri.com/arcgisdesktop/9.2/index.cfm?TopicName=An_overview_of_data_support_in_ArcGIS) +that follows CF or COARDS conventions for associating spatial locations +with data. A selected slice of netCDF data may be displayed in ArcGIS as +a raster layer, feature layer, or table. You can also drag a netCDF file +from Windows Explorer and drop it in an ESRI application such as ArcMap. + +FME +------------------------- + +[FME](http://www.safe.com/fme), developed by [Safe Software +Inc.](http://www.safe.com), is a tool for transforming data for exchange +between over [300 different formats and +models](http://www.safe.com/fme/format-search/), including netCDF. FME's +read and write support for netCDF allows users to move data into the +netCDF common standard, regardless of its source, and conversely enables +end-users to consume netCDF data for use in their preferred systems. For +more information visit . + +HDF Explorer +------------------------------------------- + +[HDF Explorer](http://www.space-research.org/) is a data visualization +program that reads the HDF, HDF5 and netCDF data file formats (including +netCDF classic format data). HDF Explorer runs in the Microsoft Windows +operating systems. + +HDF Explorer offers a simple yet powerful interface for the +visualization of HDF and netCDF data. The data is just a click of the +mouse away. Data is first viewed in a tree-like interface, and then +optionally loaded and visualized in a variety of ways. HDF Explorer +features include fast access to data, grid, scalar and vector views. It +also allows exporting your data either as an ASCII text file or a bitmap +image. + +IDL Interface +----------------------------------- + +[IDL](http://www.exelisvis.com/ProductsServices/IDL.aspx) (Interactive +Data Language) is a scientific computing environment, developed and +supported by [Excelis Visual Information +Solutions](http://www.exelisvis.com/), that combines mathematics, +advanced data visualization, scientific graphics, and a graphical user +interface toolkit to analyze and visualize scientific data. Designed for +use by scientists and scientific application developers, IDL's +array-oriented, fourth-generation programming language allows you to +prototype and develop complete applications. IDL now supports data in +netCDF format. +As an example, here is how to read data from a netCDF variable named GP +in a file named "data/aprin.nc" into an IDL variable named gp using the +IDL language: + + id = ncdf_open('data/april.nc') + ncdf_varget,id, ncdf_varid( id, 'GP'), gp + +Now you can visualize the data in the gp variable in a large variety of +ways and use it in other computations in IDL. You can FTP a demo version +of IDL, including the netCDF interface, by following the instructions in +pub/idl/README available via anonymous FTP from gateway.rsinc.com or +boulder.colorado.edu. +Other software packages that use or interoperate with IDL to access +netCDF data includes [ARGOS](#ARGOS), [CIDS Tools](#CIDS%20Tools), +[DDI](#DDI), [HIPHOP](#HIPHOP), [Hyperslab OPerator Suite +(HOPS)](Hyperslab%20OPerator%20Suite%20(HOPS)), and [Noesys](Noesys). + +InterFormat +----------------------------------------- + +[InterFormat](http://www.radio-logic.com/) is a medical image format +conversion program with both Motif and character interfaces. InterFormat +can automatically identify and convert most popular medical image +formats and write output files in many standard medical image formats, +or in formats such as netCDF that are suitable for input to leading +scientific visualization packages. InterFormat runs on UNIX +workstations; a version for OpenVMS is also available. A separate +external module for [IBM Data Explorer](#OpenDX) is available for use in +IBM Data Explorer's Visual Program Editor. +For more details about the formats handled, program features, and +pricing, see the Radio-Logic web site at +[\](http://www.radio-logic.com). + +IRIS Explorer Module +----------------------------------------------------------- + +The Atmospheric and Oceanic Sciences Group at the National Center for +Supercomputing Applications (NCSA) and the Mesoscale Dynamics and +Precipitation Branch at NASA-Goddard Space Flight Center have developed +the NCSA PATHFINDER module set for [IRIS +Explorer](http://www.nag.co.uk:70/1h/Welcome_IEC). Two of the modules, +[ReadDFG](http://redrock.ncsa.uiuc.edu/PATHFINDER/pathrel2/explorer/ReadDFG/ReadDFG.html) +(to output Grids), and +[ReadDF](http://redrock.ncsa.uiuc.edu/PATHFINDER/pathrel2/explorer/ReadDF/ReadDF.html) +(to output Lattices) are capable of reading from NCSA HDF files, +MFHDF/3.3 files, and Unidata netCDF files. A user-friendly interface +provides control and information about the contents of the files. + +For ReadDF, the format translation is handled transparently. Up to five +unique lattices may be generated from the file (as these files can +contain multiple data fields) using a single module. A variety of +dimensionalities and data types are supported also. Multiple variables +may be combined in a single lattice to generate vector data. All three +Explorer coordinate systems are supported. + +With ReadDFG, user selected variables from the file are output in up to +five PATHFINDER grids. Each grid can consist of scalar data from one +variable or vector data from multiple variables. Coordinate information +from the file is also included in the grids. Any number of dimensions in +any of the Explorer coordinate types are supported. + +For more information on the NCSA PATHFINDER project and other available +modules, visit the WWW/Mosaic PATHFINDER Home Page at + The +ReadDF module may be downloaded either via the WWW server or anonymous +ftp at redrock.ncsa.uiuc.edu in the /pub/PATHFINDER directory. For more +information please send email to: pathfinder@redrock.ncsa.uiuc.edu + +See also the information on [DDI](#DDI) for another way to use netCDF +data with IRIS Explorer. + +LeoNetCDF +------------------------------------- + +[LeoNetCDF](http://www.leokrut.com/leonetcdf.html) is a Windows +application (Windows96/NT and higher) for editing netCDF files. It can +display content of netCDF files in tree style control and permits +editing its parameters in a standard Windows interface environment. + +Mathematica +----------------------------------------- + +[Mathematica](http://www.wolfram.com/products/mathematica/index.html) is +a technical computing environment that provides advanced numerical and +symbolic computation and visualization. As of version 6, Mathematica +adds classic [netCDF +data](http://reference.wolfram.com/mathematica/ref/format/NetCDF.html) +to the many forms of data it can import, export, and visualize. + +MATLAB +------------------------------- + +[MATLAB](http://www.mathworks.com/products/matlab/) is an integrated +technical computing environment that combines numeric computation, +advanced graphics and visualization, and a high-level programming +language. Versions 7.7 and later of MATLAB have built-in support for +reading and writing netCDF data. MATLAB version 2012a includes the +netCDF 4.1.2 library with OPeNDAP client support turned on, so remote +access to netCDF and other data formats supported by OPeNDAP servers is +available. +For earlier versions, several freely-available software packages that +implement a MATLAB/netCDF interface are available: +[nctoolbox](#nctoolbox), [NetCDF Toolbox for MATLAB-5](#NC4ML5), +[MexEPS](#MexEPS), the [CSIRO MATLAB/netCDF interface](#CSIRO-MATLAB), +[NetCDF +reader](http://www.mathworks.com/matlabcentral/fileexchange/loadFile.do?objectId=15177&objectType=file), +and [fanmat](/software/netcdf/Contrib.html). + +Noesys +------------------------------- + +[Noesys](http://www.rsinc.com/NOeSYS/index.cfm) is software for desktop +science data access and visualization. Available for both Windows and +Power Macintosh platforms, Noesys allows users to access, process, +organize and visualize large amounts of technical data. +Noesys can be used to: + +- Access and organize complex technical data +- Export data objects to text and binary +- View and edit large multidimensional data sets (up to 7D) in a + spreadsheet-like environment +- Manipulate and process data using + [IDL®](http://www.exelisvis.com/ProductsServices/IDL.aspx), the + Interactive Data Language, from Research Systems, Inc. +- Interactively visualize column, matrix, and volumetric data sets +- Image global datasets as various map projections +- Create various projections from partial data or partial projections + from global data (Windows only) +- View and Edit HDF-EOS grid object data +- Subset datasets and data tables with a GUI dialog +- Change and save the number format of datasets and data table fields +- Drag and Drop HDF objects between files to organize or subset files +- Attach text annotations directly to the data file +- Add new data objects to files and create hierarchical groups +- Edit or create new color palettes +- Generate publication-quality graphics for data presentation + +Noesys has an interface to IDL®, allowing data to move back and forth +between Noesys and IDL with the click of a mouse. Noesys includes the +visual data analysis tools, Transform, T3D and Plot, for menu driven +plotting, rendering, and image analysis. Noesys can import HDF, HDF-EOS, +netCDF, ASCII, Binary, DTED, GeoTIFF, SDTS, TIFF, PICT, and BMP files, +create annotations, macros, images, projections and color palettes +specific to the data and save it the result as an HDF file. Noesys also +includes an HDF-EOS Grid Editor. Noesys runs on Windows 95/98 & NT and +Power Macintosh OS. More details and information about ordering Noesys +are available from +[\](http://www.rsinc.com/NOeSYS/index.cfm). + +Origin +------------------------------- + +Ryan Toomey reports: + +Our website is + +A general description of Origin: Origin includes a suite of features +that cater to the needs of scientists and engineers alike. Multi-sheet +workbooks, publication-quality graphics, and standardized analysis tools +provide a tightly integrated workspace for you to import data, create +and annotate graphs, explore and analyze data, and publish your work. To +ensure that Origin meets your data analysis requirements, intuitive +tools for advanced statistics, regression, nonlinear curve fitting, +signal processing, image processing and peak analysis are built-in. +Since any analysis operation can be set to automatically recalculate, +you can reuse your projects as templates for future work, thereby +simplifying your daily routine. + +A general description of OriginPro: OriginPro offers all of the features +of Origin plus extended analysis tools for statistics, 3D fitting, image +processing and signal processing. + +A general description of OriginLab Corporation: "OriginLab Corporation +produces professional data analysis and graphing software for scientists +and engineers. Our products are designed to be easy-to-use, yet have the +power and versatility to provide for the most demanding user." + +PPLUS +----------------------------- + +[Plot-Plus (PPLUS)](http://dwd6.home.mindspring.com/) is a general +purpose scientific graphics package, which is used in several PMEL +applications. It will read most standard ascii or binary files, as well +as netCDF file format, which used by the TOGA-TAO Project and the EPIC +system for management display and analysis. PPLUS is an interactive, +command driven, scientific graphics package which includes features such +as Mercator projection, Polar Stereographic projection, color or gray +scale area-fill contour plotting, and support for many devices: +X-windows, PostScript, HP, Tektronix, and others. This powerful and +flexible package recognizes netCDF data format, and it can extract axis +lables and graph titles from the data files. The user can customize a +plots, or combine several plots into a composite. Plots are of +publication quality. The PPLUS graphics package is used for all the TAO +workstation displays, including the animations. The animations are +created by generating a PPLUS plot for each frame, transforming the +PPLUS metacode files into HDF format with the PPLUS m2hdf filter, and +then displaying the resulting bit maps as an animation with the +XDataSlice utility, which is freely available on Internet from the +National Center for Supercomputing Applications, at +anonymous@ftp.ncsa.uiuc.edu (141.142.20.50). There is also a new m2gif +utility which produces GIF files from PPLUS metacode files. +PPLUS is supported for most Unix systems and for VAX/VMS, and is in use +at many oceanographic institutes in the US (e.g., (PMEL, Harvard, WHOI, +Scripps, NCAR, NASA, University of Rhode Island, University of Oregon, +Texas A&M...) and also internationally (Japan, Germany, Australia, +Korea...). + +Plot Plus is now available at no charge. It does require licensing on a +per computer basis, but the license is at no cost. For more information +about licensing, see +[http://dwd6.home.mindspring.com/pplus\_license.html/](http://dwd6.home.mindspring.com/pplus_license.html); +source and documentation are available via anonymous FTP from + and +. + + Email: plot_plus@halcyon.com + Postal mail: c/o Donald Denbo + 2138 N 186th St + Shoreline, WA 98133 + Fax and Voice: (206) 366-0624 + +PV-Wave +--------------------------------- + +[PV-Wave](http://www.vni.com/products/wave/index.html) is a software +environment from [Visual Numerics](http://www.vni.com/) for solving +problems requiring the application of graphics, mathematics, numerics +and statistics to data and equations. +PV-WAVE uses a fourth generation language (4GL) that analyzes and +displays data as you enter commands. PV-WAVE includes integrated +graphics, numerics, data I/O, and data management. The latest version of +PV-Wave supports data access in numerous formats, including netCDF. + +See also the information on [DDI](#DDI) for another way to use netCDF +data with PV-Wave. + +Slicer Dicer +------------------------------------------ + +[Slicer Dicer](http://www.slicerdicer.com/) is a volumetric data +visualization tool, currently available for Windows and under +development for other platforms. The Slicer Dicer Web site includes a +complete list of features, an on-line user's guide, and examples of +Slicer Dicer output. Visualizations features include: +- Perspective view of data rendered on interactively selected + orthogonal slices, oblique slices, blocks (arbitrary rectilinear + sub-volumes), cutouts, isosurfaces, and projected volumes (projected + maximum, minimum, maximum absolute, or minimum absolute). +- Optional annotations: caption, axes ticks and labels (default + "pretty" ticks, or override to place ticks where you want them), + color legend, data-cube outline. +- Animation modes: slices, space, time (any parametric dimension), + transparency, oblique slice orientation, rotation. Built-in + animation viewer supports speed and image size controls, + single-step, forward, backward, loop, and back-and-forth modes. +- Select color scale from 25+ built in color tables, or import from + palette file. Any data level or range of levels can be painted with + an arbitrary color. +- Any data level or range of levels can be rendered as either opaque + or transparent. + +vGeo +--------------------------- + +[vGeo](http://www.vrco.com/products/vgeo/vgeo.html) (Virtual Global +Explorer and Observatory) is an end-user product from +[VRCO](http://www.vrco.com/) designed to import and visualize multiple +disparate data sets, including computer simulations, observed +measurements, images, model objects, and more. vGeo is available for +IRIX, Linux and Windows platforms and supports displays ranging from +desktop monitors to multi-walled projection systems. It accepts data in +a variety of formats, including netCDF, and allows the user to specify +how multiple files and variables are mapped into a data source. 3D +graphics are built from the underlying data in real-time, and the user +has interactive control of graphics, navigation, animation, and more. + +VISAGE and Decimate +--------------------------------------------------------- + +[VISAGE](http://www.crd.ge.com/esl/cgsp/projects/visage/) +(VISualization, Animation, and Graphics Environment) is a turnkey 3D +visualization system developed at General Electric Corporate Research +and Development, (Schroeder, WJ et al, "VISAGE: An Object-Oriented +Scientific Visualization System", Proceedings of Visualization \`92 +Conference). VISAGE is designed to interface with a wide variety of +data, and uses netCDF as the preferred format. + +VISAGE is used at GE Corporate R & D, GE Aircraft Engine, GE Canada, GE +Power Generation, as well as ETH Zurich, Switzerland, MQS In Chieti, +Italy, and Rensselaer Polytechnic Institute in Troy, New York. + +GE has another application called "Decimate" that does polygon +reduction/decimation (Schroeder,WJ et al, "Decimation of Triangle +Meshes", Proceedings of SIGGRAPH \`92). This application uses netCDF as +a preferred format. Decimate is currently licensed to Cyberware, Inc., +makers of 3D laser digitizing hardware. Decimate is currently bundled +with the scanners, and will soon be available as a commercial product. + +Voyager +--------------------------------- + +[Makai Voyager](http://voyager.makai.com/), developed by Makai Ocean +Engineering, Inc., is 3D/4D geospatial visualization software that +enables users to import, fuse, view, and analyze large earth, ocean, and +atmosphere scientific data as it is collected or simulated in a global +geo-referenced GIS platform. The key differentiator of Makai Voyager is +its level-of-detail (LOD) technology that enables users to stream big +data rapidly over a network or the web. + +Features in Makai Voyager Version 1.2 include: + +- Preprocessing LiDAR, GIS, & volumetric data from common formats into + streamable files +- Volume rendering for large 4D (3D + time) data, such as NetCDF +- Analysis tools and customizable graphs +- WMS and other streamable formats + +Individual or group licenses are available for Windows (32- and 64-bit), +Linux, and Mac OS X. A full-featured 30-day trial version of Makai +Voyager is [available for download](http://voyager.makai.com%20). From b5856bc2042a3e0d720e399cbe5195a852528173 Mon Sep 17 00:00:00 2001 From: Ward Fisher Date: Fri, 22 May 2015 15:41:38 -0600 Subject: [PATCH 08/12] Wired the new software.md file into the doxygen toolchain. --- docs/Doxyfile.in | 1 + docs/Makefile.am | 3 ++- docs/software.md | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/Doxyfile.in b/docs/Doxyfile.in index 5b861615c..3a6f810f2 100644 --- a/docs/Doxyfile.in +++ b/docs/Doxyfile.in @@ -761,6 +761,7 @@ INPUT = \ @abs_top_srcdir@/docs/notes.md \ @abs_top_srcdir@/docs/all-error-codes.md \ @abs_top_srcdir@/docs/FAQ.md \ + @abs_top_srcdir@/docs/software.md \ @abs_top_srcdir@/docs/COPYRIGHT.dox \ @abs_top_srcdir@/docs/credits.md \ @abs_top_srcdir@/include/netcdf.h \ diff --git a/docs/Makefile.am b/docs/Makefile.am index ab2f7f003..c1b68a740 100644 --- a/docs/Makefile.am +++ b/docs/Makefile.am @@ -10,7 +10,8 @@ EXTRA_DIST = netcdf.m4 DoxygenLayout.xml Doxyfile.in footer.html \ architecture.dox internal.dox windows-binaries.md \ building-with-cmake.md CMakeLists.txt \ groups.dox install.md notes.md install-fortran.md \ - all-error-codes.md cmake_faq.md credits.md auth.md.in auth.md + all-error-codes.md cmake_faq.md credits.md auth.md.in auth.md \ + software.md # Turn off parallel builds in this directory. .NOTPARALLEL: diff --git a/docs/software.md b/docs/software.md index 5004b4afd..8f7c32280 100644 --- a/docs/software.md +++ b/docs/software.md @@ -1,4 +1,4 @@ -Software for Manipulating or Displaying NetCDF Data (#software) +Software for Manipulating or Displaying NetCDF Data {#software} =================================================== This document provides references to software packages that may be used for manipulating or displaying [netCDF](/software/netcdf/) data. We include information about both freely-available and licensed (commercial) software that can be used with netCDF data. We rely on developers to help keep this list up-to-date. If you know of corrections or additions, please [send them to us (mailto:support@unidata.ucar.edu). Where practical, we would like to include WWW links to information about these packages in the HTML version of this document. From d62f382894a32134a5d60a4a45219ec4ea9fec44 Mon Sep 17 00:00:00 2001 From: Ward Fisher Date: Fri, 22 May 2015 16:03:00 -0600 Subject: [PATCH 09/12] Cleaning up software.md file, eliminating warnings and errors generated by doxygen. Not done yet. --- docs/software.md | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/docs/software.md b/docs/software.md index 8f7c32280..cb5561b69 100644 --- a/docs/software.md +++ b/docs/software.md @@ -18,7 +18,7 @@ Other useful guides to utilities that can handle netCDF data include ARM's list - [CDFconvert (Convert netCDF to RPN and GEMPAK Grids)](#CDFconvert) - [cdfsync (network synchronization of netCDF files)](#cdfsync) - [CDO (Climate Data Operators)](#CDO) -- [CIDS Tools](#CIDS%20Tools) +- [CIDS Tools](#CIDS_Tools) - [CSIRO MATLAB/netCDF interface](#CSIRO-MATLAB) - [EPIC](#EPIC) - [Excel Use](#ExcelUse) @@ -35,22 +35,22 @@ Other useful guides to utilities that can handle netCDF data include ARM's list - [GrADS (Grid Analysis and Display System)](#GrADS) - [Gri](#Gri) - [GXSM - Gnome X Scanning Microscopy project](#GXSM) -- [HDF (Hierarchical Data Format) interface](#HDF%20interface) +- [HDF (Hierarchical Data Format) interface](#HDF_interface) - [HDF-EOS to netCDF converter](#HDF-EOS) - [HIPHOP (Handy IDL-Program for HDF-Output Plotting)](#HIPHOP) - [HOPS (Hyperslab OPerator - Suite)](#Hyperslab%20OPerator%20Suite%20(HOPS)) + Suite)](#Hyperslab_OPerator_Suite_(HOPS)) - [iCDF (imports chromatographic netCDF data into MATLAB)](#iCDF) - [IDV (Integrated Data Viewer)](#IDV) - [Ingrid](#Ingrid) - [Intel Array Visualizer](#IntelArrayVisualizer) - [IVE (Interactive Visualization Environment)](#IVE) - [JSON format with the ncdump-json utility](#JSON) -- [Java interface](#Java%20interface) +- [Java interface](#Java_interface) - [Kst (2D plotting tool)](#KST) - [Labview interface](#Labview-API) - [MBDyn (MultiBody Dynamics)](#MBDyn) -- [Max\_diff\_nc](#Max_diff_nc) +- [Max_diff_nc](#Maxdiffnc) - [MeteoExplorer](#MeteoExplorer) - [MeteoInfo](#MeteoInfo) - [MexEPS (MATLAB interface)](#MexEPS) @@ -109,13 +109,13 @@ Other useful guides to utilities that can handle netCDF data include ARM's list - [Barrodale UFI](#BCS-UFI) - [DioVISTA/Storm](#DioVISTA/Storm) - [EnSight](#EnSight) -- [Environmental WorkBench](#Environmental%20WorkBench) +- [Environmental WorkBench](#Environmental_WorkBench) - [ESRI](#ESRI) - [FME](#FME) - [HDF Explorer](#HDF-Explorer) - [IDL Interface](#IDL) - [InterFormat](#InterFormat) -- [IRIS Explorer Module](#IRIS%20Explorer%20Module) +- [IRIS Explorer Module](#IRIS_Explorer_Module) - [LeoNetCDF](#LeoNetCDF) - [Mathematica](#Mathematica) - [MATLAB](#MATLAB) @@ -125,15 +125,15 @@ Other useful guides to utilities that can handle netCDF data include ARM's list - [PV-Wave](#PV-Wave) - [Slicer Dicer](#SlicerDicer) - [vGeo](#vGeo) -- [VISAGE and Decimate](#VISAGE%20and%20Decimate) +- [VISAGE and Decimate](#VISAGE_and_Decimate) - [Voyager](#Voyager) ------------------------------------------------------------------------- + Freely Available Software {#freely} ========================= -ANDX and ANAX +ANDX and ANAX {#ANDX} ------------------------------------ The ARM Program has developed [ANDX (ARM NetCDF Data @@ -150,7 +150,7 @@ scaled-down version of ANDX -- it is designed to only extract ASCII data. All features of ANDX pertaining to non-graphic data extraction are included in ANAX. -ANTS +ANTS {#ANTS} --------------------------- The ARM Program has developed [ANTS (ARM NetCDF Tool @@ -177,7 +177,7 @@ represents a library of coding examples for fundamental netCDF tasks. See the [website](http://science.arm.gov/~cflynn/ANTS/) for more information. -ARGOS +ARGOS {#ARGOS} ----------------------------- [ARGOS](http://www.lapeth.ethz.ch/argos/index.html) (interActive @@ -207,7 +207,7 @@ conditions](http://www.lapeth.ethz.ch/argos/argos_copyright.html) are available. For further information and installation, please E-mail to: bresch@atmos.umnw.ethz.ch -CDAT +CDAT {#CDAT} --------------------------- The [Climate Data Analysis Tool (CDAT)](http://cdat.sf.net), developed @@ -239,7 +239,7 @@ an image, or as a collection of images in an animation. The software has a gradual learning curve, allowing the novice user to quickly obtain useful results. -CDFconvert +CDFconvert {#CDFconvert} --------------------------------------- The [MRG CDFconvert @@ -257,7 +257,7 @@ has the flexibility to handle netCDF files generated by a number of sources, including NCEP and ECMWF. User-definable conversion tables make the extension of the package to different datasets possible. -cdfsync +cdfsync {#cdfsync} --------------------------------- Joe Sirott of NOAA's Pacific Marine Environmental Laboratory has @@ -277,7 +277,7 @@ The latest version should run on Linux variants and Solaris. More information is available at the [cdfsync website](http://www.epic.noaa.gov/epic/software/cdfsync/). -CDO (Climate Data Operators) +CDO (Climate Data Operators) {#CDO} -------------------------------------------------- Uwe Schulzweida at the Max Planck Institute for Meteorology has @@ -316,7 +316,7 @@ or using ECMWF reanalysis on a reduced grid More information is available on the [CDO homepage](http://code.zmaw.de/projects/cdo). -CIDS Tools +CIDS Tools {#CIDS_Tools} --------------------------------------- The Center for Clouds Chemistry and Climate @@ -2411,9 +2411,9 @@ of IDL, including the netCDF interface, by following the instructions in pub/idl/README available via anonymous FTP from gateway.rsinc.com or boulder.colorado.edu. Other software packages that use or interoperate with IDL to access -netCDF data includes [ARGOS](#ARGOS), [CIDS Tools](#CIDS%20Tools), +netCDF data includes [ARGOS](#ARGOS), [CIDS Tools](#CIDS_Tools), [DDI](#DDI), [HIPHOP](#HIPHOP), [Hyperslab OPerator Suite -(HOPS)](Hyperslab%20OPerator%20Suite%20(HOPS)), and [Noesys](Noesys). +(HOPS)](Hyperslab_OPerator_Suite_(HOPS)), and [Noesys](Noesys). InterFormat ----------------------------------------- @@ -2717,4 +2717,4 @@ Features in Makai Voyager Version 1.2 include: Individual or group licenses are available for Windows (32- and 64-bit), Linux, and Mac OS X. A full-featured 30-day trial version of Makai -Voyager is [available for download](http://voyager.makai.com%20). +Voyager is [available for download](http://voyager.makai.com). From be5e3cd6069cdf0e44c6ac9f1d33d59de73b3007 Mon Sep 17 00:00:00 2001 From: dmh Date: Sun, 24 May 2015 17:31:39 -0600 Subject: [PATCH 10/12] 1. Allow for the user specified rc file via the env variable DAPRCFILE. Note that the value of this environment variable should be the absolute path of the rc file, not the path to its containing directory. 2. fixup testauth.sh and add some new tests 3. synch oc --- cf | 4 +- docs/auth.md | 826 ++++++++++---------- libsrc/attr.c | 339 ++++++++ ncdap_test/testauth.old | 213 +++++ ncdap_test/testauth.sh | 310 +++++--- oc2/daptab.c | 1630 +++++++++++++++++++++++---------------- oc2/daptab.h | 88 +-- oc2/occurlfunctions.c | 2 +- oc2/ocdata.c | 5 +- oc2/ocinternal.c | 3 +- oc2/ocrc.c | 6 +- oc2/ocuri.c | 2 - 12 files changed, 2184 insertions(+), 1244 deletions(-) create mode 100755 ncdap_test/testauth.old diff --git a/cf b/cf index 3736feac2..f22b46ded 100644 --- a/cf +++ b/cf @@ -122,8 +122,8 @@ FLAGS="$FLAGS --disable-examples" #FLAGS="$FLAGS --enable-large-file-tests" #FLAGS="$FLAGS --disable-testsets" #FLAGS="$FLAGS --disable-dap-remote-tests" -#FLAGS="$FLAGS --enable-dap-auth-tests" -FLAGS="$FLAGS --enable-doxygen" +FLAGS="$FLAGS --enable-dap-auth-tests" +#FLAGS="$FLAGS --enable-doxygen" #FLAGS="$FLAGS --enable-logging" #FLAGS="$FLAGS --disable-diskless" #FLAGS="$FLAGS --enable-mmap" diff --git a/docs/auth.md b/docs/auth.md index 8861c73e8..c27047ead 100644 --- a/docs/auth.md +++ b/docs/auth.md @@ -1,479 +1,449 @@ -OC Authorization Support {#auth} -======================== +Authorization Support in the netDF-C Libraries {#auth} +================================================== -[TOC] +\brief It is possible to support a number of authorization schemes +in the netCDF-C library. -###### Author: Dennis Heimbigner - dmh at ucar dot edu +With one exception, authorization in the netCDF-C library is +delegated to the oc2 code, which in turn delegates it to the +libcurl library. The exception is that the location of the rc +file can be specified by setting the environment variable *NCRCFILE*. +Note that the value of this environment variable should be the +absolute path of the rc file, not the path to its containing directory. -###### Draft: 11/21/2014 - Last Revised: 12/23/2014 - OC Version 2.1 - -###### Table of Contents {.break} - -1. [Introduction](#Introduction) -2. [URL-Based Authentication](#URL-AUTH) -3. [RC File Authentication](#DODSRC) -4. [Redirection-Based Authentication](#REDIR) -5. [URL Constrained RC File Entries](#URLCONS) -6. [Client-Side Certificates](#CLIENTCERTS) -7. [Appendix A. All RC-File Keys](#allkeys) -8. [Appendix B. ESG Access in Detail](#ESGDETAIL) - -Introduction {.break} ------------- +Following is the authorization documentation. + + + + + + +
+

OC Authorization Support

+
Author: Dennis Heimbigner
+dmh at ucar dot edu
+
Draft: 11/21/2014
+Last Revised: 12/23/2014
+OC Version 2.1
+
+ +
Table of Contents
+
    +
  1. Introduction +
  2. URL-Based Authentication +
  3. RC File Authentication +
  4. Redirection-Based Authentication +
  5. URL Constrained RC File Entries +
  6. Client-Side Certificates +
  7. Appendix A. All RC-File Keys +
  8. Appendix B. ESG Access in Detail +
+ +

Introduction

OC can support user authorization using those provided by the curl library. This includes basic password authentication as well as certificate-based authorization. +

+With some exceptions (e.g. see the section on redirection) +The libcurl authorization mechanisms can be accessed in two ways +

    +
  1. Inserting the username and password into the url, or +
  2. Accessing information from a so-called rc file named either +.daprc or .dodsrc +
-With some exceptions (e.g. see the section on [redirection](#REDIR)) The -libcurl authorization mechanisms can be accessed in two ways - -1. Inserting the username and password into the url, or -2. Accessing information from a so-called *rc* file named either - *.daprc* or *.dodsrc* - -URL-Based Authentication {.break} ------------------------- - -For simple password based authentication, it is possible to directly -insert the username and the password into a url in this form. - - http://username:password@host/... - +

URL-Based Authentication

+For simple password based authentication, it is possible to +directly insert the username and the password into a url in this form. +
+    http://username:password@host/...
+
This username and password will be used if the server asks for -authentication. Note that only simple password authentication is -supported in this format. Specifically note that [redirection](#REDIR) -based authorization will not work with this. - -RC File Authentication {.break} ----------------------- - -The oc library supports an *rc* file mechanism to allow the passing of a -number of parameters to liboc and libcurl. - -The file must be called one of the following names: ".daprc" or -".dodsrc" If both .daprc and .dodsrc exist, then the .daprc file will -take precedence. - -Searching for the rc file first looks in the current directory and then -in the home directory (as defined by the HOME environment variable). It -is also possible to specify a direct path using the *-R* option to -ocprint or using the *oc\_set\_rcfile* procedure (see oc.h). Note that -for these latter cases, the path must be to the file itself, not to the -containing directory. +authentication. Note that only simple password authentication +is supported in this format. +Specifically note that redirection based +authorization will not work with this. +

RC File Authentication

+The oc library supports an rc file mechanism to allow the passing +of a number of parameters to liboc and libcurl. +

+The file must be called one of the following names: +".daprc" or ".dodsrc" +If both .daprc and .dodsrc exist, then +the .daprc file will take precedence. +

+Searching for the rc file first looks in the current directory +and then in the home directory (as defined by the HOME environment +variable). It is also possible to specify a direct path using +the -R option to ocprint or using the oc_set_rcfile +procedure (see oc.h). Note that for these latter cases, the path +must be to the file itself, not to the containing directory. +

The rc file format is a series of lines of the general form: - - []= - -where the bracket-enclosed host:port is optional and will be discussed +

+[<host:port>]<key>=<value>
+
+where the bracket-enclosed host:port is optional and will be discussed subsequently. - +

The currently defined set of authorization-related keys are as follows. -The second column is the affected curl\_easy\_setopt option(s). - -Key - -curl\_easy\_setopt Option - -HTTP.COOKIEJAR - -CURLOPT\_COOKIEJAR, CURLOPT\_COOKIEFILE - -HTTP.PROXY\_SERVER - -CURLOPT\_PROXY, CURLOPT\_PROXYPORT, CURLOPT\_PROXYUSERPWD - -HTTP.SSL.CERTIFICATE - -CURLOPT\_SSLCERT - -HTTP.SSL.KEY - -CURLOPT\_SSLKEY - -HTTP.SSL.KEYPASSWORD - -CURLOPT\_KEYPASSWORD - -HTTP.SSL.CAINFO - -CURLOPT\_SSLCAINFO - -HTTP.SSL.CAPATH - -CURLOPT\_SSLCAPATH - -HTTP.SSL.VERIFYPEER - -CURLOPT\_SSL\_VERIFYPEER +The second column is the affected curl_easy_setopt option(s). + +
Keycurl_easy_setopt Option +
HTTP.COOKIEJARCURLOPT_COOKIEJAR, CURLOPT_COOKIEFILE +
HTTP.PROXY_SERVERCURLOPT_PROXY, CURLOPT_PROXYPORT, CURLOPT_PROXYUSERPWD +
HTTP.SSL.CERTIFICATECURLOPT_SSLCERT +
HTTP.SSL.KEYCURLOPT_SSLKEY +
HTTP.SSL.KEYPASSWORDCURLOPT_KEYPASSWORD +
HTTP.SSL.CAINFOCURLOPT_SSLCAINFO +
HTTP.SSL.CAPATHCURLOPT_SSLCAPATH +
HTTP.SSL.VERIFYPEERCURLOPT_SSL_VERIFYPEER +
HTTP.CREDENTIALS.USERPASSWORDCURLOPT_USERPASSWORD +
+ +

Password Authentication

+The key HTTP.CREDENTIALS.USERPASSWORD +can be used to set the simple password authentication. +This is an alternative to setting it in the url. +The value must be of the form "username:password". -CURLOPT\_USERPASSWORD +

Cookie Jar

+The HTTP.COOKIEJAR key +specifies the name of file from which +to read cookies (CURLOPT_COOKIEJAR) and also +the file into which to store cookies (CURLOPT_COOKIEFILE). +The same value is used for both CURLOPT values. +It defaults to in-memory storage. -### Password Authentication - -The key HTTP.CREDENTIALS.USERPASSWORD can be used to set the simple -password authentication. This is an alternative to setting it in the -url. The value must be of the form "username:password". - -### Cookie Jar - -The HTTP.COOKIEJAR key specifies the name of file from which to read -cookies (CURLOPT\_COOKIEJAR) and also the file into which to store -cookies (CURLOPT\_COOKIEFILE). The same value is used for both CURLOPT -values. It defaults to in-memory storage. - -### Certificate Authentication - -HTTP.SSL.CERTIFICATE specifies a file path for a file containing a PEM -cerficate. This is typically used for client-side authentication. - -HTTP.SSL.KEY is essentially the same as HTTP.SSL.CERTIFICATE and should -usually have the same value. - -HTTP.SSL.KEYPASSWORD specifies the password for accessing the -HTTP.SSL.KEY/HTTP.SSL.CERTIFICATE file. - -HTTP.SSL.CAPATH specifies the path to a directory containing trusted -certificates for validating server sertificates. - -HTTP.SSL.VALIDATE is a boolean (1/0) value that if true (1) specifies -that the client should verify the server's presented certificate. - -HTTP.PROXY\_SERVER specified the url for accessing the proxy: +

Certificate Authentication

+HTTP.SSL.CERTIFICATE +specifies a file path for a file containing a PEM cerficate. +This is typically used for client-side authentication. +

+HTTP.SSL.KEY is essentially the same as HTTP.SSL.CERTIFICATE +and should usually have the same value. +

+HTTP.SSL.KEYPASSWORD +specifies the password for accessing the HTTP.SSL.KEY/HTTP.SSL.CERTIFICATE +file. +

+HTTP.SSL.CAPATH +specifies the path to a directory containing +trusted certificates for validating server sertificates. +

+HTTP.SSL.VALIDATE +is a boolean (1/0) value that if true (1) +specifies that the client should verify the server's presented certificate. +

+HTTP.PROXY_SERVER +specified the url for accessing the proxy: (e.g.http://[username:password@]host[:port]) -Redirection-Based Authentication {.break} --------------------------------- - -Some sites provide authentication by using a third party site to to the -authentication. One example is -[URS](https://uat.urs.earthdata.nasa.gov), the EOSDIS User Registration -System. - +

Redirection-Based Authentication

+Some sites provide authentication by using a third party site +to to the authentication. One example is +URS, +the EOSDIS User Registration System. +

The process is usually as follows. - -1. The client contacts the server of interest (SOI), the actual data - provider. -2. The SOI sends a redirect to the client to connect to the URS system. -3. The client authenticates with URS. -4. URS sends a redirect (with authorization information) to send the - client back to the SOI to actually obtain the data. - -In order for this to work with libcurl, the client will usually need to -provide a .netrc file so that the redirection will work correctly. The -format of this .netrc file will contain content that typically look like -this. - - machine uat.urs.earthdata.nasa.gov login xxxxxx password yyyyyy - -where the machine is the one to which the client is redirected for -authorization, and the login and password are those needed to -authenticate. - +

    +
  1. The client contacts the server of interest (SOI), the actual data provider. +
  2. The SOI sends a redirect to the client to connect to the URS system. +
  3. The client authenticates with URS. +
  4. URS sends a redirect (with authorization information) to send +the client back to the SOI to actually obtain the data. +
+

+In order for this to work with libcurl, the client will usually need +to provide a .netrc file so that the redirection will work correctly. +The format of this .netrc file will contain content that +typically look like this. +

+machine uat.urs.earthdata.nasa.gov login xxxxxx password yyyyyy
+
+where the machine is the one to which the client is redirected +for authorization, and the login and password are those +needed to authenticate. +

The .netrc file can be specified in two ways. +

    +
  1. Specify the netrc file to liboc using the procedure in oc.h: +
    +oc_set_netrc(OClink* link, const char* file)
    +
    +(This is equivalent to the -N flag to ocprint). +

    +

  2. Put the following line in your .daprc/.dodsrc file. +
    +HTTP.NETRC=<path to netrc file>
    +
    +
+

+One final note. In using this, it is probable that you will +need to specify a cookie jar (HTTP.COOKIEJAR) so that the +redirect site can pass back authorization information. -1. Specify the netrc file to liboc using the procedure in oc.h: - - oc_set_netrc(OClink* link, const char* file) - - (This is equivalent to the -N flag to ocprint). - -2. Put the following line in your .daprc/.dodsrc file. - - HTTP.NETRC= - -One final note. In using this, it is probable that you will need to -specify a cookie jar (HTTP.COOKIEJAR) so that the redirect site can pass -back authorization information. - -URL Constrained RC File Entries {.break} -------------------------------- - -Each line of the rc file can begin with a host+port enclosed in square -brackets. The form is "host:port". If the port is not specified then the -form is just "host". The reason that more of the url is not used is that +

URL Constrained RC File Entries

+Each line of the rc file can begin with +a host+port enclosed in square brackets. +The form is "host:port". If the port is not specified +then the form is just "host". +The reason that more of the url is not used is that libcurl's authorization grain is not any finer than host level. - +

Examples. - - [remotetest.unidata.ucar.edu]HTTP.VERBOSE=1 - or - [fake.ucar.edu:9090]HTTP.VERBOSE=0 - -If the url request from, say, the *oc\_open* method has a host+port -matchine one of the prefixes in the rc file, then the corresponding -entry will be used, otherwise ignored. - +

+[remotetest.unidata.ucar.edu]HTTP.VERBOSE=1
+or
+[fake.ucar.edu:9090]HTTP.VERBOSE=0
+
+If the url request from, say, the oc_open method +has a host+port matchine one of the prefixes in the rc file, then +the corresponding entry will be used, otherwise ignored. +

For example, the URL - - http://remotetest.unidata.ucar.edu/thredds/dodsC/testdata/testData.nc - +

+http://remotetest.unidata.ucar.edu/thredds/dodsC/testdata/testData.nc
+
will have HTTP.VERBOSE set to 1. - -Similarly, - - http://fake.ucar.edu:9090/dts/test.01 - +

+Similarly, +

+http://fake.ucar.edu:9090/dts/test.01
+
will have HTTP.VERBOSE set to 0. -Client-Side Certificates {.break} ------------------------- - -Some systems, notably ESG (Earth System Grid), requires the use of -client-side certificates, as well as being [re-direction based](#REDIR). +

Client-Side Certificates

+Some systems, notably ESG (Earth System Grid), requires +the use of client-side certificates, as well as being +re-direction based. This requires setting the following entries: +
    +
  • HTTP.COOKIEJAR — a file path for storing cookies across re-direction. +
  • HTTP.NETRC — the path to the netrc file. +
  • HTTP.SSL.CERTIFICATE — the file path for the client side certificate file. +
  • HTTP.SSL.KEY — this should have the same value as HTTP.SSL.CERTIFICATE. +
  • HTTP.SSL.CAPATH — the path to a "certificates" directory. +
  • HTTP.SSL.VALIDATE — force validation of the server certificate. +
+Note that the first two are to support re-direction based authentication. -- HTTP.COOKIEJAR — a file path for storing cookies across - re-direction. -- HTTP.NETRC — the path to the netrc file. -- HTTP.SSL.CERTIFICATE — the file path for the client side certificate - file. -- HTTP.SSL.KEY — this should have the same value as - HTTP.SSL.CERTIFICATE. -- HTTP.SSL.CAPATH — the path to a "certificates" directory. -- HTTP.SSL.VALIDATE — force validation of the server certificate. - -Note that the first two are to support re-direction based -authentication. - -##### Appendix A. All RC-File Keys {.break} - +
Appendix A. All RC-File Keys
For completeness, this is the list of all rc-file keys. + +
Keycurl_easy_setopt Option +
HTTP.DEFLATECUROPT_DEFLATE
with value "deflate,gzip" +
HTTP.VERBOSE CUROPT_VERBOSE +
HTTP.TIMEOUTCUROPT_TIMEOUT +
HTTP.USERAGENTCUROPT_USERAGENT +
HTTP.COOKIEJARCUROPT_COOKIEJAR +
HTTP.COOKIE_JARCUROPT_COOKIEJAR +
HTTP.PROXY_SERVERCURLOPT_PROXY,
CURLOPT_PROXYPORT,
CURLOPT_PROXYUSERPWD +
HTTP.SSL.CERTIFICATECUROPT_SSLCERT +
HTTP.SSL.KEYCUROPT_SSLKEY +
HTTP.SSL.KEYPASSWORDCUROPT_KEYPASSWORD +
HTTP.SSL.CAINFOCUROPT_SSLCAINFO +
HTTP.SSL.CAPATHCUROPT_SSLCAPATH +
HTTP.SSL.VERIFYPEERCUROPT_SSL_VERIFYPEER +
HTTP.CREDENTIALS.USERPASSWORDCUROPT_USERPASSWORD +
HTTP.NETRCCURLOPT_NETRC,CURLOPT_NETRC_FILE +
+ -Key +
Appendix B. ESG Access in Detail
+It is possible to access Earth Systems Grid (ESG) datasets +from ESG servers through the OC API using the techniques +described in the section on Client-Side Certificates. +

+In order to access ESG datasets, however, it is necessary to +register as a user with ESG and to setup your environment +so that proper authentication is established between an oc +client program and the ESG data server. Specifically, it +is necessary to use what is called "client-side keys" to +enable this authentication. Normally, when a client accesses +a server in a secure fashion (using "https"), the server +provides an authentication certificate to the client. +With client-side keys, the client must also provide a +certificate to the server so that the server can know with +whom it is communicating. +

+The oc library uses the curl library and it is that +underlying library that must be properly configured. -curl\_easy\_setopt Option +

Terminology

+The key elements for client-side keys requires the constructions of +two "stores" on the client side. +
    +
  • Keystore - a repository to hold the client side key. +
  • Truststore - a repository to hold a chain of certificates + that can be used to validate the certificate + sent by the server to the client. +
+The server actually has a similar set of stores, but the client +need not be concerned with those. -HTTP.DEFLATE +

Initial Steps

-CUROPT\_DEFLATE\ -with value "deflate,gzip" +The first step is to obtain authorization from ESG. +Note that this information may evolve over time, and +may be out of date. +This discussion is in terms of BADC and NCSA. You will need +to substitute as necessary. +
    +
  1. Register at http://badc.nerc.ac.uk/register + to obtain access to badc and to obtain an openid, + which will looks something like: +
    https://ceda.ac.uk/openid/Firstname.Lastname
    +
  2. Ask BADC for access to whatever datasets are of interest. +

    +

  3. Obtain short term credentials at + http://grid.ncsa.illinois.edu/myproxy/MyProxyLogon/ + You will need to download and run the MyProxyLogon + program. + This will create a keyfile in, typically, the directory ".globus". + The keyfile will have a name similar to this: "x509up_u13615" + The other elements in ".globus" are certificates to use in + validating the certificate your client gets from the server. +

    +

  4. Obtain the program source ImportKey.java + from this location: http://www.agentbob.info/agentbob/79-AB.html + (read the whole page, it will help you understand the remaining steps). +
-HTTP.VERBOSE +

Building the KeyStore

+You will have to modify the keyfile in the previous step +and then create a keystore and install the key and a certificate. +The commands are these: +
+    openssl pkcs8 -topk8 -nocrypt -in x509up_u13615 -inform PEM -out key.der -outform DER
 
-CUROPT\_VERBOSE
+    openssl x509 -in x509up_u13615 -inform PEM -out cert.der -outform DER
 
-HTTP.TIMEOUT
-
-CUROPT\_TIMEOUT
-
-HTTP.USERAGENT
-
-CUROPT\_USERAGENT
-
-HTTP.COOKIEJAR
-
-CUROPT\_COOKIEJAR
-
-HTTP.COOKIE\_JAR
-
-CUROPT\_COOKIEJAR
-
-HTTP.PROXY\_SERVER
-
-CURLOPT\_PROXY,\
-CURLOPT\_PROXYPORT,\
-CURLOPT\_PROXYUSERPWD
-
-HTTP.SSL.CERTIFICATE
-
-CUROPT\_SSLCERT
-
-HTTP.SSL.KEY
-
-CUROPT\_SSLKEY
-
-HTTP.SSL.KEYPASSWORD
-
-CUROPT\_KEYPASSWORD
-
-HTTP.SSL.CAINFO
-
-CUROPT\_SSLCAINFO
-
-HTTP.SSL.CAPATH
-
-CUROPT\_SSLCAPATH
-
-HTTP.SSL.VERIFYPEER
-
-CUROPT\_SSL\_VERIFYPEER
-
-HTTP.CREDENTIALS.USERPASSWORD
-
-CUROPT\_USERPASSWORD
-
-HTTP.NETRC
-
-CURLOPT\_NETRC,CURLOPT\_NETRC\_FILE
-
-##### Appendix B. ESG Access in Detail {.break}
-
-It is possible to access Earth Systems Grid (ESG) datasets from ESG
-servers through the OC API using the techniques described in the section
-on [Client-Side Certificates](#CLIENTCERTS).
-
-In order to access ESG datasets, however, it is necessary to register as
-a user with ESG and to setup your environment so that proper
-authentication is established between an oc client program and the ESG
-data server. Specifically, it is necessary to use what is called
-"client-side keys" to enable this authentication. Normally, when a
-client accesses a server in a secure fashion (using "https"), the server
-provides an authentication certificate to the client. With client-side
-keys, the client must also provide a certificate to the server so that
-the server can know with whom it is communicating.
-
-The oc library uses the *curl* library and it is that underlying library
-that must be properly configured.
-
-### Terminology
-
-The key elements for client-side keys requires the constructions of two
-"stores" on the client side.
-
--   Keystore - a repository to hold the client side key.
--   Truststore - a repository to hold a chain of certificates that can
-    be used to validate the certificate sent by the server to the
-    client.
-
-The server actually has a similar set of stores, but the client need not
-be concerned with those.
-
-### Initial Steps
-
-The first step is to obtain authorization from ESG. Note that this
-information may evolve over time, and may be out of date. This
-discussion is in terms of BADC and NCSA. You will need to substitute as
-necessary.
-
-1.  Register at http://badc.nerc.ac.uk/register to obtain access to badc
-    and to obtain an openid, which will looks something like:
-
-        https://ceda.ac.uk/openid/Firstname.Lastname
-
-2.  Ask BADC for access to whatever datasets are of interest.
-3.  Obtain short term credentials at
-    http://grid.ncsa.illinois.edu/myproxy/MyProxyLogon/ You will need to
-    download and run the MyProxyLogon program. This will create a
-    keyfile in, typically, the directory ".globus". The keyfile will
-    have a name similar to this: "x509up\_u13615" The other elements in
-    ".globus" are certificates to use in validating the certificate your
-    client gets from the server.
-4.  Obtain the program source ImportKey.java from this location:
-    http://www.agentbob.info/agentbob/79-AB.html (read the whole page,
-    it will help you understand the remaining steps).
-
-### Building the KeyStore
-
-You will have to modify the keyfile in the previous step and then create
-a keystore and install the key and a certificate. The commands are
-these:
-
-        openssl pkcs8 -topk8 -nocrypt -in x509up_u13615 -inform PEM -out key.der -outform DER
-
-        openssl x509 -in x509up_u13615 -inform PEM -out cert.der -outform DER
-
-        java -classpath  -Dkeypassword="" -Dkeystore=./ key.der cert.der
-
-Note, the file names "key.der" and "cert.der" can be whatever you
-choose. It is probably best to leave the .der extension, though.
-
-### Building the TrustStore
+    java -classpath  -Dkeypassword="" -Dkeystore=./ key.der cert.der
+
+Note, the file names "key.der" and "cert.der" can be whatever you choose. +It is probably best to leave the .der extension, though. +

Building the TrustStore

Building the truststore is a bit tricky because as provided, the -certificates in ".globus" need some massaging. See the script below for -the details. The primary command is this, which is executed for every -certificate, c, in globus. It sticks the certificate into the file named -"truststore" +certificates in ".globus" need some massaging. See the script below +for the details. The primary command is this, which is executed for every +certificate, c, in globus. It sticks the certificate into the file +named "truststore" +
+  keytool -trustcacerts -storepass "password" -v -keystore "truststore"  -importcert -file "${c}"
+
- keytool -trustcacerts -storepass "password" -v -keystore "truststore" -importcert -file "${c}" +

Running the C Client

-### Running the C Client - -Refer to the section on [Client-Side Certificates](#CLIENTCERTS). The -keys specified there must be set in the rc file to support ESG access. - -- HTTP.COOKIEJAR=\~/.dods\_cookies -- HTTP.NETRC=\~/.netrc -- HTTP.SSL.CERTIFICATE=\~/esgkeystore -- HTTP.SSL.KEY=\~/esgkeystore -- HTTP.SSL.CAPATH=\~/.globus -- HTTP.SSL.VALIDATE=1 - -Of course, the file paths above are suggestions only; you can modify as -needed. The HTTP.SSL.CERTIFICATE and HTTP.SSL.KEY entries should have -same value, which is the file path for the certificate produced by -MyProxyLogon. The HTTP.SSL.CAPATH entry should be the path to the -"certificates" directory produced by MyProxyLogon. - -As noted, also uses re-direction based authentication. So, when it -receives an initial connection from a client, it redirects to a separate -authentication server. When that server has authenticated the client, it -redirects back to the original url to complete the request. - -### Script for creating Stores +Refer to the section on Client-Side Certificates. +The keys specified there must be set in the rc file to support +ESG access. +
    +
  • HTTP.COOKIEJAR=~/.dods_cookies +
  • HTTP.NETRC=~/.netrc +
  • HTTP.SSL.CERTIFICATE=~/esgkeystore +
  • HTTP.SSL.KEY=~/esgkeystore +
  • HTTP.SSL.CAPATH=~/.globus +
  • HTTP.SSL.VALIDATE=1 +
+Of course, the file paths above are suggestions only; +you can modify as needed. +The HTTP.SSL.CERTIFICATE and HTTP.SSL.KEY +entries should have same value, which is the file path for the +certificate produced by MyProxyLogon. The HTTP.SSL.CAPATH entry +should be the path to the "certificates" directory produced by +MyProxyLogon. +

+As noted, also uses re-direction based authentication. +So, when it receives an initial connection from a client, it +redirects to a separate authentication server. When that +server has authenticated the client, it redirects back to +the original url to complete the request. +

Script for creating Stores

The following script shows in detail how to actually construct the key -and trust stores. It is specific to the format of the globus file as it -was when ESG support was first added. It may have changed since then, in -which case, you will need to seek some help in fixing this script. It -would help if you communicated what you changed to the author so this -document can be updated. +and trust stores. It is specific to the format of the globus file +as it was when ESG support was first added. It may have changed +since then, in which case, you will need to seek some help +in fixing this script. It would help if you communicated +what you changed to the author so this document can be updated. +
+#!/bin/sh -x
+KEYSTORE="esgkeystore"
+TRUSTSTORE="esgtruststore"
+GLOBUS="globus"
+TRUSTROOT="certificates"
+CERT="x509up_u13615"
+TRUSTROOTPATH="$GLOBUS/$TRUSTROOT"
+CERTFILE="$GLOBUS/$CERT"
+PWD="password"
 
-    #!/bin/sh -x
-    KEYSTORE="esgkeystore"
-    TRUSTSTORE="esgtruststore"
-    GLOBUS="globus"
-    TRUSTROOT="certificates"
-    CERT="x509up_u13615"
-    TRUSTROOTPATH="$GLOBUS/$TRUSTROOT"
-    CERTFILE="$GLOBUS/$CERT"
-    PWD="password"
+D="-Dglobus=$GLOBUS"
+CCP="bcprov-jdk16-145.jar" 
+CP="./build:${CCP}" 
+JAR="myproxy.jar"
 
-    D="-Dglobus=$GLOBUS"
-    CCP="bcprov-jdk16-145.jar" 
-    CP="./build:${CCP}" 
-    JAR="myproxy.jar"
+# Initialize needed directories
+rm -fr build
+mkdir build
+rm -fr $GLOBUS
+mkdir $GLOBUS
+rm -f $KEYSTORE
+rm -f $TRUSTSTORE
 
-    # Initialize needed directories
-    rm -fr build
-    mkdir build
-    rm -fr $GLOBUS
-    mkdir $GLOBUS
-    rm -f $KEYSTORE
-    rm -f $TRUSTSTORE
+# Compile MyProxyCmd and ImportKey
+javac -d ./build -classpath "$CCP" *.java
+javac -d ./build ImportKey.java
 
-    # Compile MyProxyCmd and ImportKey
-    javac -d ./build -classpath "$CCP" *.java
-    javac -d ./build ImportKey.java
+# Execute MyProxyCmd
+java -cp "$CP myproxy.MyProxyCmd
 
-    # Execute MyProxyCmd
-    java -cp "$CP myproxy.MyProxyCmd
+# Build the keystore
+openssl pkcs8 -topk8 -nocrypt -in $CERTFILE -inform PEM -out key.der -outform DER
+openssl x509 -in $CERTFILE -inform PEM -out cert.der -outform DER
+java -Dkeypassword=$PWD -Dkeystore=./${KEYSTORE} -cp ./build ImportKey key.der cert.der
 
-    # Build the keystore
-    openssl pkcs8 -topk8 -nocrypt -in $CERTFILE -inform PEM -out key.der -outform DER
-    openssl x509 -in $CERTFILE -inform PEM -out cert.der -outform DER
-    java -Dkeypassword=$PWD -Dkeystore=./${KEYSTORE} -cp ./build ImportKey key.der cert.der
+# Clean up the certificates in the globus directory
+for c in ${TRUSTROOTPATH}/*.0 ; do
+    alias=`basename $c .0`
+    sed -e '0,/---/d' <$c >/tmp/${alias}
+    echo "-----BEGIN CERTIFICATE-----" >$c       
+    cat /tmp/${alias} >>$c
+done
+ 
+# Build the truststore
+for c in ${TRUSTROOTPATH}/*.0 ; do
+    alias=`basename $c .0`
+    echo "adding: $TRUSTROOTPATH/${c}"
+    echo "alias: $alias"
+    yes | keytool -trustcacerts -storepass "$PWD" -v -keystore ./$TRUSTSTORE -alias $alias -importcert -file "${c}"
+done
+exit
+
- # Clean up the certificates in the globus directory - for c in ${TRUSTROOTPATH}/*.0 ; do - alias=`basename $c .0` - sed -e '0,/---/d' <$c >/tmp/${alias} - echo "-----BEGIN CERTIFICATE-----" >$c - cat /tmp/${alias} >>$c - done - - # Build the truststore - for c in ${TRUSTROOTPATH}/*.0 ; do - alias=`basename $c .0` - echo "adding: $TRUSTROOTPATH/${c}" - echo "alias: $alias" - yes | keytool -trustcacerts -storepass "$PWD" -v -keystore ./$TRUSTSTORE -alias $alias -importcert -file "${c}" - done - exit + + diff --git a/libsrc/attr.c b/libsrc/attr.c index 3c1c66833..22a961d43 100644 --- a/libsrc/attr.c +++ b/libsrc/attr.c @@ -1,4 +1,6 @@ +#line 5 "attr.m4" /* Do not edit this file. It is produced from the corresponding .m4 source */ +#line 7 /* * Copyright 1996, University Corporation for Atmospheric Research * See netcdf/COPYRIGHT file for copying and redistribution conditions. @@ -611,367 +613,704 @@ NC3_del_att(int ncid, int varid, const char *uname) return NC_NOERR; } +#line 674 static int +#line 675 ncx_pad_putn_Iuchar(void **xpp, size_t nelems, const uchar *tp, nc_type type) +#line 675 { +#line 675 switch(type) { +#line 675 case NC_CHAR: +#line 675 return NC_ECHAR; +#line 675 case NC_BYTE: +#line 675 return ncx_pad_putn_schar_uchar(xpp, nelems, tp); +#line 675 case NC_SHORT: +#line 675 return ncx_pad_putn_short_uchar(xpp, nelems, tp); +#line 675 case NC_INT: +#line 675 return ncx_putn_int_uchar(xpp, nelems, tp); +#line 675 case NC_FLOAT: +#line 675 return ncx_putn_float_uchar(xpp, nelems, tp); +#line 675 case NC_DOUBLE: +#line 675 return ncx_putn_double_uchar(xpp, nelems, tp); +#line 675 default: +#line 675 assert("ncx_pad_putn_Iuchar invalid type" == 0); +#line 675 } +#line 675 return NC_EBADTYPE; +#line 675 } +#line 675 static int +#line 676 ncx_pad_getn_Iuchar(const void **xpp, size_t nelems, uchar *tp, nc_type type) +#line 676 { +#line 676 switch(type) { +#line 676 case NC_CHAR: +#line 676 return NC_ECHAR; +#line 676 case NC_BYTE: +#line 676 return ncx_pad_getn_schar_uchar(xpp, nelems, tp); +#line 676 case NC_SHORT: +#line 676 return ncx_pad_getn_short_uchar(xpp, nelems, tp); +#line 676 case NC_INT: +#line 676 return ncx_getn_int_uchar(xpp, nelems, tp); +#line 676 case NC_FLOAT: +#line 676 return ncx_getn_float_uchar(xpp, nelems, tp); +#line 676 case NC_DOUBLE: +#line 676 return ncx_getn_double_uchar(xpp, nelems, tp); +#line 676 default: +#line 676 assert("ncx_pad_getn_Iuchar invalid type" == 0); +#line 676 } +#line 676 return NC_EBADTYPE; +#line 676 } +#line 676 static int +#line 678 ncx_pad_putn_Ischar(void **xpp, size_t nelems, const schar *tp, nc_type type) +#line 678 { +#line 678 switch(type) { +#line 678 case NC_CHAR: +#line 678 return NC_ECHAR; +#line 678 case NC_BYTE: +#line 678 return ncx_pad_putn_schar_schar(xpp, nelems, tp); +#line 678 case NC_SHORT: +#line 678 return ncx_pad_putn_short_schar(xpp, nelems, tp); +#line 678 case NC_INT: +#line 678 return ncx_putn_int_schar(xpp, nelems, tp); +#line 678 case NC_FLOAT: +#line 678 return ncx_putn_float_schar(xpp, nelems, tp); +#line 678 case NC_DOUBLE: +#line 678 return ncx_putn_double_schar(xpp, nelems, tp); +#line 678 default: +#line 678 assert("ncx_pad_putn_Ischar invalid type" == 0); +#line 678 } +#line 678 return NC_EBADTYPE; +#line 678 } +#line 678 static int +#line 679 ncx_pad_getn_Ischar(const void **xpp, size_t nelems, schar *tp, nc_type type) +#line 679 { +#line 679 switch(type) { +#line 679 case NC_CHAR: +#line 679 return NC_ECHAR; +#line 679 case NC_BYTE: +#line 679 return ncx_pad_getn_schar_schar(xpp, nelems, tp); +#line 679 case NC_SHORT: +#line 679 return ncx_pad_getn_short_schar(xpp, nelems, tp); +#line 679 case NC_INT: +#line 679 return ncx_getn_int_schar(xpp, nelems, tp); +#line 679 case NC_FLOAT: +#line 679 return ncx_getn_float_schar(xpp, nelems, tp); +#line 679 case NC_DOUBLE: +#line 679 return ncx_getn_double_schar(xpp, nelems, tp); +#line 679 default: +#line 679 assert("ncx_pad_getn_Ischar invalid type" == 0); +#line 679 } +#line 679 return NC_EBADTYPE; +#line 679 } +#line 679 static int +#line 681 ncx_pad_putn_Ishort(void **xpp, size_t nelems, const short *tp, nc_type type) +#line 681 { +#line 681 switch(type) { +#line 681 case NC_CHAR: +#line 681 return NC_ECHAR; +#line 681 case NC_BYTE: +#line 681 return ncx_pad_putn_schar_short(xpp, nelems, tp); +#line 681 case NC_SHORT: +#line 681 return ncx_pad_putn_short_short(xpp, nelems, tp); +#line 681 case NC_INT: +#line 681 return ncx_putn_int_short(xpp, nelems, tp); +#line 681 case NC_FLOAT: +#line 681 return ncx_putn_float_short(xpp, nelems, tp); +#line 681 case NC_DOUBLE: +#line 681 return ncx_putn_double_short(xpp, nelems, tp); +#line 681 default: +#line 681 assert("ncx_pad_putn_Ishort invalid type" == 0); +#line 681 } +#line 681 return NC_EBADTYPE; +#line 681 } +#line 681 static int +#line 682 ncx_pad_getn_Ishort(const void **xpp, size_t nelems, short *tp, nc_type type) +#line 682 { +#line 682 switch(type) { +#line 682 case NC_CHAR: +#line 682 return NC_ECHAR; +#line 682 case NC_BYTE: +#line 682 return ncx_pad_getn_schar_short(xpp, nelems, tp); +#line 682 case NC_SHORT: +#line 682 return ncx_pad_getn_short_short(xpp, nelems, tp); +#line 682 case NC_INT: +#line 682 return ncx_getn_int_short(xpp, nelems, tp); +#line 682 case NC_FLOAT: +#line 682 return ncx_getn_float_short(xpp, nelems, tp); +#line 682 case NC_DOUBLE: +#line 682 return ncx_getn_double_short(xpp, nelems, tp); +#line 682 default: +#line 682 assert("ncx_pad_getn_Ishort invalid type" == 0); +#line 682 } +#line 682 return NC_EBADTYPE; +#line 682 } +#line 682 static int +#line 684 ncx_pad_putn_Iint(void **xpp, size_t nelems, const int *tp, nc_type type) +#line 684 { +#line 684 switch(type) { +#line 684 case NC_CHAR: +#line 684 return NC_ECHAR; +#line 684 case NC_BYTE: +#line 684 return ncx_pad_putn_schar_int(xpp, nelems, tp); +#line 684 case NC_SHORT: +#line 684 return ncx_pad_putn_short_int(xpp, nelems, tp); +#line 684 case NC_INT: +#line 684 return ncx_putn_int_int(xpp, nelems, tp); +#line 684 case NC_FLOAT: +#line 684 return ncx_putn_float_int(xpp, nelems, tp); +#line 684 case NC_DOUBLE: +#line 684 return ncx_putn_double_int(xpp, nelems, tp); +#line 684 default: +#line 684 assert("ncx_pad_putn_Iint invalid type" == 0); +#line 684 } +#line 684 return NC_EBADTYPE; +#line 684 } +#line 684 static int +#line 685 ncx_pad_getn_Iint(const void **xpp, size_t nelems, int *tp, nc_type type) +#line 685 { +#line 685 switch(type) { +#line 685 case NC_CHAR: +#line 685 return NC_ECHAR; +#line 685 case NC_BYTE: +#line 685 return ncx_pad_getn_schar_int(xpp, nelems, tp); +#line 685 case NC_SHORT: +#line 685 return ncx_pad_getn_short_int(xpp, nelems, tp); +#line 685 case NC_INT: +#line 685 return ncx_getn_int_int(xpp, nelems, tp); +#line 685 case NC_FLOAT: +#line 685 return ncx_getn_float_int(xpp, nelems, tp); +#line 685 case NC_DOUBLE: +#line 685 return ncx_getn_double_int(xpp, nelems, tp); +#line 685 default: +#line 685 assert("ncx_pad_getn_Iint invalid type" == 0); +#line 685 } +#line 685 return NC_EBADTYPE; +#line 685 } +#line 685 static int +#line 687 ncx_pad_putn_Ifloat(void **xpp, size_t nelems, const float *tp, nc_type type) +#line 687 { +#line 687 switch(type) { +#line 687 case NC_CHAR: +#line 687 return NC_ECHAR; +#line 687 case NC_BYTE: +#line 687 return ncx_pad_putn_schar_float(xpp, nelems, tp); +#line 687 case NC_SHORT: +#line 687 return ncx_pad_putn_short_float(xpp, nelems, tp); +#line 687 case NC_INT: +#line 687 return ncx_putn_int_float(xpp, nelems, tp); +#line 687 case NC_FLOAT: +#line 687 return ncx_putn_float_float(xpp, nelems, tp); +#line 687 case NC_DOUBLE: +#line 687 return ncx_putn_double_float(xpp, nelems, tp); +#line 687 default: +#line 687 assert("ncx_pad_putn_Ifloat invalid type" == 0); +#line 687 } +#line 687 return NC_EBADTYPE; +#line 687 } +#line 687 static int +#line 688 ncx_pad_getn_Ifloat(const void **xpp, size_t nelems, float *tp, nc_type type) +#line 688 { +#line 688 switch(type) { +#line 688 case NC_CHAR: +#line 688 return NC_ECHAR; +#line 688 case NC_BYTE: +#line 688 return ncx_pad_getn_schar_float(xpp, nelems, tp); +#line 688 case NC_SHORT: +#line 688 return ncx_pad_getn_short_float(xpp, nelems, tp); +#line 688 case NC_INT: +#line 688 return ncx_getn_int_float(xpp, nelems, tp); +#line 688 case NC_FLOAT: +#line 688 return ncx_getn_float_float(xpp, nelems, tp); +#line 688 case NC_DOUBLE: +#line 688 return ncx_getn_double_float(xpp, nelems, tp); +#line 688 default: +#line 688 assert("ncx_pad_getn_Ifloat invalid type" == 0); +#line 688 } +#line 688 return NC_EBADTYPE; +#line 688 } +#line 688 static int +#line 690 ncx_pad_putn_Idouble(void **xpp, size_t nelems, const double *tp, nc_type type) +#line 690 { +#line 690 switch(type) { +#line 690 case NC_CHAR: +#line 690 return NC_ECHAR; +#line 690 case NC_BYTE: +#line 690 return ncx_pad_putn_schar_double(xpp, nelems, tp); +#line 690 case NC_SHORT: +#line 690 return ncx_pad_putn_short_double(xpp, nelems, tp); +#line 690 case NC_INT: +#line 690 return ncx_putn_int_double(xpp, nelems, tp); +#line 690 case NC_FLOAT: +#line 690 return ncx_putn_float_double(xpp, nelems, tp); +#line 690 case NC_DOUBLE: +#line 690 return ncx_putn_double_double(xpp, nelems, tp); +#line 690 default: +#line 690 assert("ncx_pad_putn_Idouble invalid type" == 0); +#line 690 } +#line 690 return NC_EBADTYPE; +#line 690 } +#line 690 static int +#line 691 ncx_pad_getn_Idouble(const void **xpp, size_t nelems, double *tp, nc_type type) +#line 691 { +#line 691 switch(type) { +#line 691 case NC_CHAR: +#line 691 return NC_ECHAR; +#line 691 case NC_BYTE: +#line 691 return ncx_pad_getn_schar_double(xpp, nelems, tp); +#line 691 case NC_SHORT: +#line 691 return ncx_pad_getn_short_double(xpp, nelems, tp); +#line 691 case NC_INT: +#line 691 return ncx_getn_int_double(xpp, nelems, tp); +#line 691 case NC_FLOAT: +#line 691 return ncx_getn_float_double(xpp, nelems, tp); +#line 691 case NC_DOUBLE: +#line 691 return ncx_getn_double_double(xpp, nelems, tp); +#line 691 default: +#line 691 assert("ncx_pad_getn_Idouble invalid type" == 0); +#line 691 } +#line 691 return NC_EBADTYPE; +#line 691 } +#line 691 #ifdef IGNORE static int +#line 694 ncx_pad_putn_Ilong(void **xpp, size_t nelems, const long *tp, nc_type type) +#line 694 { +#line 694 switch(type) { +#line 694 case NC_CHAR: +#line 694 return NC_ECHAR; +#line 694 case NC_BYTE: +#line 694 return ncx_pad_putn_schar_long(xpp, nelems, tp); +#line 694 case NC_SHORT: +#line 694 return ncx_pad_putn_short_long(xpp, nelems, tp); +#line 694 case NC_INT: +#line 694 return ncx_putn_int_long(xpp, nelems, tp); +#line 694 case NC_FLOAT: +#line 694 return ncx_putn_float_long(xpp, nelems, tp); +#line 694 case NC_DOUBLE: +#line 694 return ncx_putn_double_long(xpp, nelems, tp); +#line 694 default: +#line 694 assert("ncx_pad_putn_Ilong invalid type" == 0); +#line 694 } +#line 694 return NC_EBADTYPE; +#line 694 } +#line 694 static int +#line 695 ncx_pad_getn_Ilong(const void **xpp, size_t nelems, long *tp, nc_type type) +#line 695 { +#line 695 switch(type) { +#line 695 case NC_CHAR: +#line 695 return NC_ECHAR; +#line 695 case NC_BYTE: +#line 695 return ncx_pad_getn_schar_long(xpp, nelems, tp); +#line 695 case NC_SHORT: +#line 695 return ncx_pad_getn_short_long(xpp, nelems, tp); +#line 695 case NC_INT: +#line 695 return ncx_getn_int_long(xpp, nelems, tp); +#line 695 case NC_FLOAT: +#line 695 return ncx_getn_float_long(xpp, nelems, tp); +#line 695 case NC_DOUBLE: +#line 695 return ncx_getn_double_long(xpp, nelems, tp); +#line 695 default: +#line 695 assert("ncx_pad_getn_Ilong invalid type" == 0); +#line 695 } +#line 695 return NC_EBADTYPE; +#line 695 } +#line 695 #endif static int +#line 698 ncx_pad_putn_Ilonglong(void **xpp, size_t nelems, const longlong *tp, nc_type type) +#line 698 { +#line 698 switch(type) { +#line 698 case NC_CHAR: +#line 698 return NC_ECHAR; +#line 698 case NC_BYTE: +#line 698 return ncx_pad_putn_schar_longlong(xpp, nelems, tp); +#line 698 case NC_SHORT: +#line 698 return ncx_pad_putn_short_longlong(xpp, nelems, tp); +#line 698 case NC_INT: +#line 698 return ncx_putn_int_longlong(xpp, nelems, tp); +#line 698 case NC_FLOAT: +#line 698 return ncx_putn_float_longlong(xpp, nelems, tp); +#line 698 case NC_DOUBLE: +#line 698 return ncx_putn_double_longlong(xpp, nelems, tp); +#line 698 default: +#line 698 assert("ncx_pad_putn_Ilonglong invalid type" == 0); +#line 698 } +#line 698 return NC_EBADTYPE; +#line 698 } +#line 698 static int +#line 699 ncx_pad_getn_Ilonglong(const void **xpp, size_t nelems, longlong *tp, nc_type type) +#line 699 { +#line 699 switch(type) { +#line 699 case NC_CHAR: +#line 699 return NC_ECHAR; +#line 699 case NC_BYTE: +#line 699 return ncx_pad_getn_schar_longlong(xpp, nelems, tp); +#line 699 case NC_SHORT: +#line 699 return ncx_pad_getn_short_longlong(xpp, nelems, tp); +#line 699 case NC_INT: +#line 699 return ncx_getn_int_longlong(xpp, nelems, tp); +#line 699 case NC_FLOAT: +#line 699 return ncx_getn_float_longlong(xpp, nelems, tp); +#line 699 case NC_DOUBLE: +#line 699 return ncx_getn_double_longlong(xpp, nelems, tp); +#line 699 default: +#line 699 assert("ncx_pad_getn_Ilonglong invalid type" == 0); +#line 699 } +#line 699 return NC_EBADTYPE; +#line 699 } +#line 699 diff --git a/ncdap_test/testauth.old b/ncdap_test/testauth.old new file mode 100755 index 000000000..c64d1e6da --- /dev/null +++ b/ncdap_test/testauth.old @@ -0,0 +1,213 @@ +#!/bin/sh + +#NOEMBED=1 +#NOLOCAL=1 +#NOHOME=1 +#NOENV=1 + +#DBG=1 +#SHOW=1 + +# Choose at most 1 +#GDB=1 +#VG=1 + +NFL=1 + +WD=`pwd` + +NETRCFILE=$WD/test_auth_netrc +# This is the control variable +NETRC=$NETRCFILE + +COOKIES="${WD}/test_auth_cookies" + +RC=.daprc + +NCLOGFILE=stderr +if test "x$DBG" = x1 ; then +SHOW=1 +fi + +# Major parameters + +BASICCOMBO="tiggeUser:tigge" +URLSERVER="remotetest.unidata.ucar.edu" +URLPATH="thredds/dodsC/restrict/testData.nc" + +# See if we need to override +if test "x$URS" != "x" ; then +#https://54.86.135.31/opendap/data/nc/fnoc1.nc.dds +URLSERVER="54.86.135.31" +URLPATH="opendap/data/nc/fnoc1.nc" +BASICCOMBO="$URS" +NOEMBED=1 +NETRC=$NETRCFILE +else +NETRC= +fi + +if test "x$DBG" = x1 ; then +URLPATH="${URLPATH}#log&show=fetch" +fi + +# Split the combo +BASICUSER=`echo $BASICCOMBO | cut -d: -f1` +BASICPWD=`echo $BASICCOMBO | cut -d: -f2` + +NCDUMP= +for o in ./.libs/ncdump.exe ./.libs/ncdump ./ncdump.exe ./ncdump ; do + if test -f $o ; then + NCDUMP=$o + break; + fi +done +if test "x$NCDUMP" = x ; then +echo "no ncdump" +exit 1 +fi + +if test "x$SHOW" = x ; then +OUTPUT="> /dev/null" +else +OUTPUT= +fi + +if test "x$TEMP" = x ; then + TEMP="/tmp" +fi +TEMP=`echo "$TEMP" | sed -e "s|/$||"` + +LOCALRC=./$RC +HOMERC=${HOME}/$RC +HOMERC=`echo "$HOMERC" | sed -e "s|//|/|g"` +ENVRC="$TEMP/$RC" + +cd `pwd` +builddir=`pwd` +# Hack for CYGWIN +cd $srcdir +srcdir=`pwd` +cd ${builddir} + +function createrc { +if test "x$1" != x ; then +RCP=$1 + +rm -f $RCP +echo "Creating rc file $RCP" +if test "x${DBG}" != x ; then +echo "HTTP.VERBOSE=1" >>$RCP +fi +echo "HTTP.COOKIEJAR=${COOKIES}" >>$RCP +if test "x${URS}" = x ; then +echo "HTTP.CREDENTIALS.USERPASSWORD=${BASICCOMBO}" >>$RCP +fi +if test "x${NETRC}" != x && test "x$NFL" = x ; then +echo "HTTP.NETRC=${NETRC}" >>$RCP +fi +fi +} + +function createnetrc { +if test "x$1" != x ; then +rm -f $1 +echo "Creating netrc file $1" +echo "machine uat.urs.earthdata.nasa.gov login $BASICUSER password $BASICPWD" >>$1 +#echo "machine 54.86.135.31 login $BASICUSER password $BASICPWD" >>$1 +fi +} + +# Forcibly remove all and do not restore with save +function reset { + for f in ./$RC $HOMERC $ENVRC $COOKIES $NETRC ; do + rm -f ${f} + done +} + +# Restore from .save files +function restore { + for f in ./$RC $HOMERC $ENVRC $COOKIES $NETRC ; do + rm -f ${f} + if test -f ${f}.save ; then + echo "restoring old ${f}" + cp ${f}.save ${f} + fi + done +} + +function save { + for f in ./$RC $HOMERC $ENVRC $COOKIES $NETRC ; do + if test -f $f ; then + if test -f ${f}.save ; then + ignore=1 + else + echo "saving $f" + cp ${f} ${f}.save + fi + fi + done +} + +export LD_LIBRARY_PATH="../liblib/.libs:/usr/local/lib:/usr/lib64:$LD_LIBRARY_PATH" + +if test "x$GDB" = x1 ; then +NCDUMP="gdb --args $NCDUMP" +fi +if test "x$VG" = x1 ; then +NCDUMP="valgrind --leak-check=full $NCDUMP" +fi + +# Initialize +save +reset + +if test "x$NOEMBED" != x1 ; then +echo "***Testing rc file with embedded user:pwd" +URL="https://${BASICCOMBO}@${URLSERVER}/$URLPATH" +# Invoke ncdump to extract a file from the URL +echo "command: ${NCDUMP} -h $URL ${OUTPUT}" +${NCDUMP} -h "$URL" ${OUTPUT} +fi + +URL="https://${URLSERVER}/$URLPATH" +if test "x$NOLOCAL" != x1 ; then +echo "***Testing rc file in local directory" +# Create the rc file and (optional) netrc file in ./ +reset +createnetrc $NETRC +createrc $LOCALRC + +# Invoke ncdump to extract a file the URL +echo "command: ${NCDUMP} -h $URL ${OUTPUT}" +${NCDUMP} -h "$URL" ${OUTPUT} +fi + +if test "x$NOHOME" != x1 ; then +echo "***Testing rc file in home directory" +# Create the rc file and (optional) netrc fil in ./ +reset +createnetrc $NETRC +createrc $HOMERC + +# Invoke ncdump to extract a file the URL +echo "command: ${NCDUMP} -h $URL ${OUTPUT}" +${NCDUMP} -h "$URL" ${OUTPUT} +fi + +if test "x$NOENV" != x1 ; then +echo "*** Testing rc file from env variable" +# Create the rc file and (optional) netrc file +reset +createnetrc $NETRC +export NCRCFILE=$ENVRC +createrc $NCRCFILE + +# Invoke ncdump to extract a file the URL +echo "command: ${NCDUMP} -h $URL ${OUTPUT}" +${NCDUMP} -h "$URL" ${OUTPUT} +fi + +set +x +#restore + diff --git a/ncdap_test/testauth.sh b/ncdap_test/testauth.sh index c64d1e6da..5f81a7048 100755 --- a/ncdap_test/testauth.sh +++ b/ncdap_test/testauth.sh @@ -1,12 +1,16 @@ #!/bin/sh -#NOEMBED=1 -#NOLOCAL=1 -#NOHOME=1 -#NOENV=1 +RCEMBED=1 +RCLOCAL=1 +RCHOME=1 +RCENV=1 +RCPREC=1 + +# Not currently testable in netcdf +#RCSPEC=1 -#DBG=1 #SHOW=1 +#DBG=1 # Choose at most 1 #GDB=1 @@ -17,14 +21,14 @@ NFL=1 WD=`pwd` NETRCFILE=$WD/test_auth_netrc -# This is the control variable -NETRC=$NETRCFILE +# This is the control variable; set when needed +unset NETRC COOKIES="${WD}/test_auth_cookies" RC=.daprc -NCLOGFILE=stderr +OCLOGFILE=stderr if test "x$DBG" = x1 ; then SHOW=1 fi @@ -32,8 +36,11 @@ fi # Major parameters BASICCOMBO="tiggeUser:tigge" +BADCOMBO="tiggeUser:xxxxx" URLSERVER="remotetest.unidata.ucar.edu" +#http://remotetest.unidata.ucar.edu/thredds/dodsC/restrict/testData.nc.html URLPATH="thredds/dodsC/restrict/testData.nc" +PROTO=http # See if we need to override if test "x$URS" != "x" ; then @@ -41,10 +48,9 @@ if test "x$URS" != "x" ; then URLSERVER="54.86.135.31" URLPATH="opendap/data/nc/fnoc1.nc" BASICCOMBO="$URS" -NOEMBED=1 +RCEMBED=0 NETRC=$NETRCFILE -else -NETRC= +PROTO=https fi if test "x$DBG" = x1 ; then @@ -55,23 +61,30 @@ fi BASICUSER=`echo $BASICCOMBO | cut -d: -f1` BASICPWD=`echo $BASICCOMBO | cut -d: -f2` +xf() { case $- in *[x]*) set +x; XP=1;; *) XP=0;; esac } +xo() { case $XP in 1) set -x;; *) set +x;; esac } + +xf NCDUMP= -for o in ./.libs/ncdump.exe ./.libs/ncdump ./ncdump.exe ./ncdump ; do - if test -f $o ; then - NCDUMP=$o - break; - fi +for d in "$WD/../ncdump" "$WD" ; do + for o in $d/.libs/ncdump.exe $d/.libs/ncdump $d/ncdump.exe $d/ncdump ; do + if test -f $o ; then + NCDUMP=$o + break; + fi + done + if test "x$NCDUMP" != x; then break; fi done +xo + if test "x$NCDUMP" = x ; then echo "no ncdump" exit 1 +else +echo "NCDUMP=$NCDUMP" fi -if test "x$SHOW" = x ; then -OUTPUT="> /dev/null" -else -OUTPUT= -fi +OUTPUT="./.output" if test "x$TEMP" = x ; then TEMP="/tmp" @@ -81,7 +94,8 @@ TEMP=`echo "$TEMP" | sed -e "s|/$||"` LOCALRC=./$RC HOMERC=${HOME}/$RC HOMERC=`echo "$HOMERC" | sed -e "s|//|/|g"` -ENVRC="$TEMP/$RC" +SPECRC="$TEMP/temprc" +ENVRC="$WD/envrc" cd `pwd` builddir=`pwd` @@ -91,44 +105,91 @@ srcdir=`pwd` cd ${builddir} function createrc { -if test "x$1" != x ; then -RCP=$1 - -rm -f $RCP -echo "Creating rc file $RCP" -if test "x${DBG}" != x ; then -echo "HTTP.VERBOSE=1" >>$RCP -fi -echo "HTTP.COOKIEJAR=${COOKIES}" >>$RCP -if test "x${URS}" = x ; then -echo "HTTP.CREDENTIALS.USERPASSWORD=${BASICCOMBO}" >>$RCP -fi -if test "x${NETRC}" != x && test "x$NFL" = x ; then -echo "HTTP.NETRC=${NETRC}" >>$RCP -fi -fi + xf + RCP="$1" ; shift + unset NOPWD + unset BADPWD + while [[ $# > 0 ]] ; do + case "$1" in + nopwd) NOPWD=1 ;; + badpwd) BADPWD=1 ;; + *) ;; + esac + shift + done + xo + if test "x$RCP" != x ; then + rm -f $RCP + echo "Creating rc file $RCP" + else + echo "createrc: no rc specified" + exit 1 + fi + if test "x${DBG}" != x ; then + echo "HTTP.VERBOSE=1" >>$RCP + fi + echo "HTTP.COOKIEJAR=${COOKIES}" >>$RCP + if test "x${URS}" = x ; then + if test "x${NOPWD}" = x ; then + if test "x${BADPWD}" = x ; then + echo "HTTP.CREDENTIALS.USERPASSWORD=${BASICCOMBO}" >>$RCP + else + echo "HTTP.CREDENTIALS.USERPASSWORD=${BADCOMBO}" >>$RCP + fi + fi + fi + if test "x${NETRC}" != x && test "x$NFL" = x ; then + echo "HTTP.NETRC=${NETRC}" >>$RCP + fi } function createnetrc { -if test "x$1" != x ; then -rm -f $1 -echo "Creating netrc file $1" -echo "machine uat.urs.earthdata.nasa.gov login $BASICUSER password $BASICPWD" >>$1 -#echo "machine 54.86.135.31 login $BASICUSER password $BASICPWD" >>$1 -fi + xf + NCP="$1" ; shift + unset NOPWD + unset BADPWD + while [[ $# > 0 ]] ; do + case "$1" in + nopwd) NOPWD=1 ;; + badpwd) BADPWD=1 ;; + *) ;; + esac + shift + done + xo + if test "x$NCP" != x ; then + rm -f $NCP + echo "Creating netrc file $NCP" + else + echo "createnetrc: no rc specified" + exit 1 + fi + if test "x$URS" != x ; then + echo "machine uat.urs.earthdata.nasa.gov login $BASICUSER password $BASICPWD" >>$NCP + #echo "machine 54.86.135.31 login $BASICUSER password $BASICPWD" >>$1 + else + echo -n "${PROTO}://$URLSERVER/$URLPATH" >>$NCP + if test "x$NOPWD" = x ; then + if test "x$BADPWD" = x ; then + echo -n " login $BASICUSER password $BASICPWD" >>$NCP + else + echo -n " login $BASICUSER password xxxxxx" >>$NCP + fi + fi + echo "" >>$NCP + fi } -# Forcibly remove all and do not restore with save function reset { - for f in ./$RC $HOMERC $ENVRC $COOKIES $NETRC ; do - rm -f ${f} - done + for f in ./$RC $HOME/$RC $SPECRC $ENVRC $COOKIES $NETRC $OUTPUT ; do + rm -f ${f} + done + unset DAPRCFILE } -# Restore from .save files function restore { - for f in ./$RC $HOMERC $ENVRC $COOKIES $NETRC ; do - rm -f ${f} + reset + for f in ./$RC $HOME/$RC $SPECRC $ENVRC $COOKIES $NETRC ; do if test -f ${f}.save ; then echo "restoring old ${f}" cp ${f}.save ${f} @@ -137,7 +198,7 @@ function restore { } function save { - for f in ./$RC $HOMERC $ENVRC $COOKIES $NETRC ; do + for f in ./$RC $HOME/$RC $SPECRC $ENVRC $COOKIES $NETRC ; do if test -f $f ; then if test -f ${f}.save ; then ignore=1 @@ -149,65 +210,118 @@ function save { done } -export LD_LIBRARY_PATH="../liblib/.libs:/usr/local/lib:/usr/lib64:$LD_LIBRARY_PATH" +function show { + if test "x$SHOW" = x1 ; then cat $OUTPUT; fi + if test "x$OUTPUT" != "x"; then rm -f $OUTPUT; fi +} + +# Assemble the ncdump command +if test "x$DBG" = x1; then +NCDUMP="$NCDUMP -D1" +fi if test "x$GDB" = x1 ; then -NCDUMP="gdb --args $NCDUMP" + NCDUMP="gdb --args $NCDUMP" fi if test "x$VG" = x1 ; then NCDUMP="valgrind --leak-check=full $NCDUMP" fi # Initialize +xf save reset +xo -if test "x$NOEMBED" != x1 ; then -echo "***Testing rc file with embedded user:pwd" -URL="https://${BASICCOMBO}@${URLSERVER}/$URLPATH" -# Invoke ncdump to extract a file from the URL -echo "command: ${NCDUMP} -h $URL ${OUTPUT}" -${NCDUMP} -h "$URL" ${OUTPUT} +if test "x$RCEMBED" = x1 ; then + echo "***Testing rc file with embedded user:pwd" + URL="${PROTO}://${BASICCOMBO}@${URLSERVER}/$URLPATH" + unset NETRC + # Invoke ncdump to extract a file the URL + echo "command: ${NCDUMP} -h $URL > $OUTPUT" + ${NCDUMP} -h "$URL" > $OUTPUT + show fi -URL="https://${URLSERVER}/$URLPATH" -if test "x$NOLOCAL" != x1 ; then -echo "***Testing rc file in local directory" -# Create the rc file and (optional) netrc file in ./ +# Rest of tests assume these defaults +URL="${PROTO}://${URLSERVER}/$URLPATH" +NETRC=$NETRCFILE + +if test "x$RCLOCAL" = x1 ; then + echo "***Testing rc file in local directory" + # Create the rc file and (optional) netrc fil in ./ + xf; reset; xo + createnetrc $NETRC + createrc $LOCALRC + + # Invoke ncdump to extract a file using the URL + echo "command: ${NCDUMP} -h $URL > $OUTPUT" + ${NCDUMP} -h "$URL" > $OUTPUT + show +fi + +if test "x$RCHOME" = x1 ; then + echo "***Testing rc file in home directory" + # Create the rc file and (optional) netrc file in ./ + xf; reset; xo + createnetrc $NETRC + createrc $HOMERC + + # Invoke ncdump to extract a file the URL + echo "command: ${NCDUMP} -h $URL > $OUTPUT" + ${NCDUMP} -h "$URL" > $OUTPUT + show +fi + +if test "x$RCSPEC" == x1 ; then + echo "*** Testing rc file in specified directory" + # Create the rc file and (optional) netrc file + xf; reset; xo + createnetrc $NETRC + createrc $SPECRC + + # Invoke ncdump to extract a file the URL + echo "command: ${NCDUMP} -h $URL > $OUTPUT" + ${NCDUMP} -h "$URL" > $OUTPUT + show +fi + +if test "x$RCENV" = x1 ; then + echo "*** Testing rc file using env variable" + # Create the rc file and (optional) netrc file + xf; reset; xo + createnetrc $NETRC + echo "ENV: export DAPRCFILE=$ENVRC" + export DAPRCFILE=$ENVRC + createrc $DAPRCFILE + + # Invoke ncdump to extract a file the URL + echo "command: ${NCDUMP} -h $URL > $OUTPUT" + ${NCDUMP} -h "$URL" > $OUTPUT + show + export DAPRCFILE= +fi + +# Test that .daprc overrides netcrc for password +URL="${PROTO}://${URLSERVER}/$URLPATH" +NETRC=$NETRCFILE +if test "x$RCPREC" = x1 ; then + echo "***Testing rc vs netrc file precedence" + # Create the rc file and (optional) netrc file in ./ + xf; reset; xo + createnetrc $NETRC badpwd + createrc $LOCALRC + + # Invoke ncdump to extract a file using the URL + echo "command: ${NCDUMP} -h $URL > $OUTPUT" + ${NCDUMP} -h "$URL" > $OUTPUT + show +fi + +xf reset -createnetrc $NETRC -createrc $LOCALRC +restore +xo -# Invoke ncdump to extract a file the URL -echo "command: ${NCDUMP} -h $URL ${OUTPUT}" -${NCDUMP} -h "$URL" ${OUTPUT} -fi - -if test "x$NOHOME" != x1 ; then -echo "***Testing rc file in home directory" -# Create the rc file and (optional) netrc fil in ./ -reset -createnetrc $NETRC -createrc $HOMERC - -# Invoke ncdump to extract a file the URL -echo "command: ${NCDUMP} -h $URL ${OUTPUT}" -${NCDUMP} -h "$URL" ${OUTPUT} -fi - -if test "x$NOENV" != x1 ; then -echo "*** Testing rc file from env variable" -# Create the rc file and (optional) netrc file -reset -createnetrc $NETRC -export NCRCFILE=$ENVRC -createrc $NCRCFILE - -# Invoke ncdump to extract a file the URL -echo "command: ${NCDUMP} -h $URL ${OUTPUT}" -${NCDUMP} -h "$URL" ${OUTPUT} -fi - -set +x -#restore +exit diff --git a/oc2/daptab.c b/oc2/daptab.c index 1014c728d..2b114d2ed 100644 --- a/oc2/daptab.c +++ b/oc2/daptab.c @@ -1,19 +1,19 @@ -/* A Bison parser, made by GNU Bison 3.0. */ +/* A Bison parser, made by GNU Bison 2.5. */ /* Bison implementation for Yacc-like parsers in C - - Copyright (C) 1984, 1989-1990, 2000-2013 Free Software Foundation, Inc. - + + Copyright (C) 1984, 1989-1990, 2000-2011 Free Software Foundation, Inc. + This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. - + This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - + You should have received a copy of the GNU General Public License along with this program. If not, see . */ @@ -26,7 +26,7 @@ special exception, which will cause the skeleton and the resulting Bison output files to be licensed under the GNU General Public License without this special exception. - + This special exception was added by the Free Software Foundation in version 2.2 of Bison. */ @@ -44,7 +44,7 @@ #define YYBISON 1 /* Bison version. */ -#define YYBISON_VERSION "3.0" +#define YYBISON_VERSION "2.5" /* Skeleton name. */ #define YYSKELETON_NAME "yacc.c" @@ -58,32 +58,37 @@ /* Pull parsers. */ #define YYPULL 1 +/* Using locations. */ +#define YYLSP_NEEDED 0 /* Substitute the variable and function names. */ #define yyparse dapparse #define yylex daplex #define yyerror daperror +#define yylval daplval +#define yychar dapchar #define yydebug dapdebug #define yynerrs dapnerrs /* Copy the first part of user declarations. */ -#line 11 "dap.y" /* yacc.c:339 */ + +/* Line 268 of yacc.c */ +#line 11 "dap.y" #include "config.h" #include "dapparselex.h" #include "daptab.h" int dapdebug = 0; -#line 79 "dap.tab.c" /* yacc.c:339 */ -# ifndef YY_NULL -# if defined __cplusplus && 201103L <= __cplusplus -# define YY_NULL nullptr -# else -# define YY_NULL 0 -# endif -# endif +/* Line 268 of yacc.c */ +#line 87 "dap.tab.c" + +/* Enabling traces. */ +#ifndef YYDEBUG +# define YYDEBUG 1 +#endif /* Enabling verbose error messages. */ #ifdef YYERROR_VERBOSE @@ -93,67 +98,61 @@ int dapdebug = 0; # define YYERROR_VERBOSE 1 #endif -/* In a future release of Bison, this section will be replaced - by #include "dap.tab.h". */ -#ifndef YY_DAP_DAP_TAB_H_INCLUDED -# define YY_DAP_DAP_TAB_H_INCLUDED -/* Debug traces. */ -#ifndef YYDEBUG -# define YYDEBUG 1 -#endif -#if YYDEBUG -extern int dapdebug; +/* Enabling the token table. */ +#ifndef YYTOKEN_TABLE +# define YYTOKEN_TABLE 0 #endif -/* Token type. */ + +/* Tokens. */ #ifndef YYTOKENTYPE # define YYTOKENTYPE - enum yytokentype - { - SCAN_ALIAS = 258, - SCAN_ARRAY = 259, - SCAN_ATTR = 260, - SCAN_BYTE = 261, - SCAN_CODE = 262, - SCAN_DATASET = 263, - SCAN_DATA = 264, - SCAN_ERROR = 265, - SCAN_FLOAT32 = 266, - SCAN_FLOAT64 = 267, - SCAN_GRID = 268, - SCAN_INT16 = 269, - SCAN_INT32 = 270, - SCAN_MAPS = 271, - SCAN_MESSAGE = 272, - SCAN_SEQUENCE = 273, - SCAN_STRING = 274, - SCAN_STRUCTURE = 275, - SCAN_UINT16 = 276, - SCAN_UINT32 = 277, - SCAN_URL = 278, - SCAN_PTYPE = 279, - SCAN_PROG = 280, - WORD_WORD = 281, - WORD_STRING = 282 - }; + /* Put the tokens into the symbol table, so that GDB and other debuggers + know about them. */ + enum yytokentype { + SCAN_ALIAS = 258, + SCAN_ARRAY = 259, + SCAN_ATTR = 260, + SCAN_BYTE = 261, + SCAN_CODE = 262, + SCAN_DATASET = 263, + SCAN_DATA = 264, + SCAN_ERROR = 265, + SCAN_FLOAT32 = 266, + SCAN_FLOAT64 = 267, + SCAN_GRID = 268, + SCAN_INT16 = 269, + SCAN_INT32 = 270, + SCAN_MAPS = 271, + SCAN_MESSAGE = 272, + SCAN_SEQUENCE = 273, + SCAN_STRING = 274, + SCAN_STRUCTURE = 275, + SCAN_UINT16 = 276, + SCAN_UINT32 = 277, + SCAN_URL = 278, + SCAN_PTYPE = 279, + SCAN_PROG = 280, + WORD_WORD = 281, + WORD_STRING = 282 + }; #endif -/* Value type. */ + + #if ! defined YYSTYPE && ! defined YYSTYPE_IS_DECLARED typedef int YYSTYPE; # define YYSTYPE_IS_TRIVIAL 1 +# define yystype YYSTYPE /* obsolescent; will be withdrawn */ # define YYSTYPE_IS_DECLARED 1 #endif - -int dapparse (DAPparsestate* parsestate); - -#endif /* !YY_DAP_DAP_TAB_H_INCLUDED */ - /* Copy the second part of user declarations. */ -#line 157 "dap.tab.c" /* yacc.c:358 */ + +/* Line 343 of yacc.c */ +#line 156 "dap.tab.c" #ifdef short # undef short @@ -167,8 +166,11 @@ typedef unsigned char yytype_uint8; #ifdef YYTYPE_INT8 typedef YYTYPE_INT8 yytype_int8; -#else +#elif (defined __STDC__ || defined __C99__FUNC__ \ + || defined __cplusplus || defined _MSC_VER) typedef signed char yytype_int8; +#else +typedef short int yytype_int8; #endif #ifdef YYTYPE_UINT16 @@ -188,7 +190,8 @@ typedef short int yytype_int16; # define YYSIZE_T __SIZE_TYPE__ # elif defined size_t # define YYSIZE_T size_t -# elif ! defined YYSIZE_T +# elif ! defined YYSIZE_T && (defined __STDC__ || defined __C99__FUNC__ \ + || defined __cplusplus || defined _MSC_VER) # include /* INFRINGES ON USER NAME SPACE */ # define YYSIZE_T size_t # else @@ -202,48 +205,38 @@ typedef short int yytype_int16; # if defined YYENABLE_NLS && YYENABLE_NLS # if ENABLE_NLS # include /* INFRINGES ON USER NAME SPACE */ -# define YY_(Msgid) dgettext ("bison-runtime", Msgid) +# define YY_(msgid) dgettext ("bison-runtime", msgid) # endif # endif # ifndef YY_ -# define YY_(Msgid) Msgid -# endif -#endif - -#ifndef __attribute__ -/* This feature is available in gcc versions 2.5 and later. */ -# if (! defined __GNUC__ || __GNUC__ < 2 \ - || (__GNUC__ == 2 && __GNUC_MINOR__ < 5)) -# define __attribute__(Spec) /* empty */ +# define YY_(msgid) msgid # endif #endif /* Suppress unused-variable warnings by "using" E. */ #if ! defined lint || defined __GNUC__ -# define YYUSE(E) ((void) (E)) +# define YYUSE(e) ((void) (e)) #else -# define YYUSE(E) /* empty */ +# define YYUSE(e) /* empty */ #endif -#if defined __GNUC__ && 407 <= __GNUC__ * 100 + __GNUC_MINOR__ -/* Suppress an incorrect diagnostic about yylval being uninitialized. */ -# define YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN \ - _Pragma ("GCC diagnostic push") \ - _Pragma ("GCC diagnostic ignored \"-Wuninitialized\"")\ - _Pragma ("GCC diagnostic ignored \"-Wmaybe-uninitialized\"") -# define YY_IGNORE_MAYBE_UNINITIALIZED_END \ - _Pragma ("GCC diagnostic pop") +/* Identity function, used to suppress warnings about constant conditions. */ +#ifndef lint +# define YYID(n) (n) #else -# define YY_INITIAL_VALUE(Value) Value +#if (defined __STDC__ || defined __C99__FUNC__ \ + || defined __cplusplus || defined _MSC_VER) +static int +YYID (int yyi) +#else +static int +YYID (yyi) + int yyi; #endif -#ifndef YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN -# define YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN -# define YY_IGNORE_MAYBE_UNINITIALIZED_END +{ + return yyi; +} #endif -#ifndef YY_INITIAL_VALUE -# define YY_INITIAL_VALUE(Value) /* Nothing. */ -#endif - #if ! defined yyoverflow || YYERROR_VERBOSE @@ -262,9 +255,9 @@ typedef short int yytype_int16; # define alloca _alloca # else # define YYSTACK_ALLOC alloca -# if ! defined _ALLOCA_H && ! defined EXIT_SUCCESS +# if ! defined _ALLOCA_H && ! defined EXIT_SUCCESS && (defined __STDC__ || defined __C99__FUNC__ \ + || defined __cplusplus || defined _MSC_VER) # include /* INFRINGES ON USER NAME SPACE */ - /* Use EXIT_SUCCESS as a witness for stdlib.h. */ # ifndef EXIT_SUCCESS # define EXIT_SUCCESS 0 # endif @@ -274,8 +267,8 @@ typedef short int yytype_int16; # endif # ifdef YYSTACK_ALLOC - /* Pacify GCC's 'empty if-body' warning. */ -# define YYSTACK_FREE(Ptr) do { /* empty */; } while (0) + /* Pacify GCC's `empty if-body' warning. */ +# define YYSTACK_FREE(Ptr) do { /* empty */; } while (YYID (0)) # ifndef YYSTACK_ALLOC_MAXIMUM /* The OS might guarantee only one guard page at the bottom of the stack, and a page size can be as small as 4096 bytes. So we cannot safely @@ -291,7 +284,7 @@ typedef short int yytype_int16; # endif # if (defined __cplusplus && ! defined EXIT_SUCCESS \ && ! ((defined YYMALLOC || defined malloc) \ - && (defined YYFREE || defined free))) + && (defined YYFREE || defined free))) # include /* INFRINGES ON USER NAME SPACE */ # ifndef EXIT_SUCCESS # define EXIT_SUCCESS 0 @@ -299,13 +292,15 @@ typedef short int yytype_int16; # endif # ifndef YYMALLOC # define YYMALLOC malloc -# if ! defined malloc && ! defined EXIT_SUCCESS +# if ! defined malloc && ! defined EXIT_SUCCESS && (defined __STDC__ || defined __C99__FUNC__ \ + || defined __cplusplus || defined _MSC_VER) void *malloc (YYSIZE_T); /* INFRINGES ON USER NAME SPACE */ # endif # endif # ifndef YYFREE # define YYFREE free -# if ! defined free && ! defined EXIT_SUCCESS +# if ! defined free && ! defined EXIT_SUCCESS && (defined __STDC__ || defined __C99__FUNC__ \ + || defined __cplusplus || defined _MSC_VER) void free (void *); /* INFRINGES ON USER NAME SPACE */ # endif # endif @@ -315,7 +310,7 @@ void free (void *); /* INFRINGES ON USER NAME SPACE */ #if (! defined yyoverflow \ && (! defined __cplusplus \ - || (defined YYSTYPE_IS_TRIVIAL && YYSTYPE_IS_TRIVIAL))) + || (defined YYSTYPE_IS_TRIVIAL && YYSTYPE_IS_TRIVIAL))) /* A type that is properly aligned for any stack member. */ union yyalloc @@ -340,35 +335,35 @@ union yyalloc elements in the stack, and YYPTR gives the new location of the stack. Advance YYPTR to a properly aligned location for the next stack. */ -# define YYSTACK_RELOCATE(Stack_alloc, Stack) \ - do \ - { \ - YYSIZE_T yynewbytes; \ - YYCOPY (&yyptr->Stack_alloc, Stack, yysize); \ - Stack = &yyptr->Stack_alloc; \ - yynewbytes = yystacksize * sizeof (*Stack) + YYSTACK_GAP_MAXIMUM; \ - yyptr += yynewbytes / sizeof (*yyptr); \ - } \ - while (0) +# define YYSTACK_RELOCATE(Stack_alloc, Stack) \ + do \ + { \ + YYSIZE_T yynewbytes; \ + YYCOPY (&yyptr->Stack_alloc, Stack, yysize); \ + Stack = &yyptr->Stack_alloc; \ + yynewbytes = yystacksize * sizeof (*Stack) + YYSTACK_GAP_MAXIMUM; \ + yyptr += yynewbytes / sizeof (*yyptr); \ + } \ + while (YYID (0)) #endif #if defined YYCOPY_NEEDED && YYCOPY_NEEDED -/* Copy COUNT objects from SRC to DST. The source and destination do +/* Copy COUNT objects from FROM to TO. The source and destination do not overlap. */ # ifndef YYCOPY # if defined __GNUC__ && 1 < __GNUC__ -# define YYCOPY(Dst, Src, Count) \ - __builtin_memcpy (Dst, Src, (Count) * sizeof (*(Src))) +# define YYCOPY(To, From, Count) \ + __builtin_memcpy (To, From, (Count) * sizeof (*(From))) # else -# define YYCOPY(Dst, Src, Count) \ - do \ - { \ - YYSIZE_T yyi; \ - for (yyi = 0; yyi < (Count); yyi++) \ - (Dst)[yyi] = (Src)[yyi]; \ - } \ - while (0) +# define YYCOPY(To, From, Count) \ + do \ + { \ + YYSIZE_T yyi; \ + for (yyi = 0; yyi < (Count); yyi++) \ + (To)[yyi] = (From)[yyi]; \ + } \ + while (YYID (0)) # endif # endif #endif /* !YYCOPY_NEEDED */ @@ -384,19 +379,17 @@ union yyalloc #define YYNNTS 34 /* YYNRULES -- Number of rules. */ #define YYNRULES 106 -/* YYNSTATES -- Number of states. */ +/* YYNRULES -- Number of states. */ #define YYNSTATES 201 -/* YYTRANSLATE[YYX] -- Symbol number corresponding to YYX as returned - by yylex, with out-of-bounds checking. */ +/* YYTRANSLATE(YYLEX) -- Bison symbol number corresponding to YYLEX. */ #define YYUNDEFTOK 2 #define YYMAXUTOK 282 -#define YYTRANSLATE(YYX) \ +#define YYTRANSLATE(YYX) \ ((unsigned int) (YYX) <= YYMAXUTOK ? yytranslate[YYX] : YYUNDEFTOK) -/* YYTRANSLATE[TOKEN-NUM] -- Symbol number corresponding to TOKEN-NUM - as returned by yylex, without out-of-bounds checking. */ +/* YYTRANSLATE[YYLEX] -- Bison symbol number corresponding to YYLEX. */ static const yytype_uint8 yytranslate[] = { 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, @@ -431,7 +424,62 @@ static const yytype_uint8 yytranslate[] = }; #if YYDEBUG - /* YYRLINE[YYN] -- Source line where rule number YYN was defined. */ +/* YYPRHS[YYN] -- Index of the first RHS symbol of rule number YYN in + YYRHS. */ +static const yytype_uint16 yyprhs[] = +{ + 0, 0, 3, 6, 10, 13, 16, 18, 20, 22, + 24, 30, 31, 34, 39, 47, 54, 66, 68, 70, + 72, 74, 76, 78, 80, 82, 84, 86, 87, 90, + 94, 99, 105, 107, 109, 111, 113, 117, 119, 120, + 123, 126, 131, 136, 141, 146, 151, 156, 161, 166, + 171, 176, 178, 180, 184, 186, 190, 192, 196, 198, + 202, 204, 208, 210, 214, 216, 220, 222, 226, 228, + 232, 234, 236, 238, 242, 250, 251, 256, 257, 262, + 263, 268, 269, 274, 276, 278, 280, 282, 284, 286, + 288, 290, 292, 294, 296, 298, 300, 302, 304, 306, + 308, 310, 312, 314, 316, 318, 320 +}; + +/* YYRHS -- A `-1'-separated list of the rules' RHS. */ +static const yytype_int8 yyrhs[] = +{ + 37, 0, -1, 38, 41, -1, 38, 41, 9, -1, + 39, 49, -1, 40, 64, -1, 1, -1, 8, -1, + 5, -1, 10, -1, 28, 42, 29, 47, 30, -1, + -1, 42, 43, -1, 44, 48, 45, 30, -1, 20, + 28, 42, 29, 48, 45, 30, -1, 18, 28, 42, + 29, 48, 30, -1, 13, 28, 4, 31, 43, 16, + 31, 42, 29, 48, 30, -1, 1, -1, 6, -1, + 14, -1, 21, -1, 15, -1, 22, -1, 11, -1, + 12, -1, 23, -1, 19, -1, -1, 45, 46, -1, + 32, 26, 33, -1, 32, 34, 26, 33, -1, 32, + 69, 34, 26, 33, -1, 1, -1, 48, -1, 1, + -1, 69, -1, 28, 50, 29, -1, 1, -1, -1, + 50, 51, -1, 63, 30, -1, 6, 69, 52, 30, + -1, 14, 69, 53, 30, -1, 21, 69, 54, 30, + -1, 15, 69, 55, 30, -1, 22, 69, 56, 30, + -1, 11, 69, 57, 30, -1, 12, 69, 58, 30, + -1, 19, 69, 59, 30, -1, 23, 69, 60, 30, + -1, 69, 28, 50, 29, -1, 1, -1, 26, -1, + 52, 35, 26, -1, 26, -1, 53, 35, 26, -1, + 26, -1, 54, 35, 26, -1, 26, -1, 55, 35, + 26, -1, 26, -1, 56, 35, 26, -1, 26, -1, + 57, 35, 26, -1, 26, -1, 58, 35, 26, -1, + 62, -1, 59, 35, 62, -1, 61, -1, 60, 35, + 61, -1, 62, -1, 69, -1, 27, -1, 3, 26, + 26, -1, 28, 65, 66, 67, 68, 29, 30, -1, + -1, 7, 34, 26, 30, -1, -1, 17, 34, 26, + 30, -1, -1, 24, 34, 26, 30, -1, -1, 25, + 34, 26, 30, -1, 26, -1, 3, -1, 4, -1, + 5, -1, 6, -1, 8, -1, 9, -1, 10, -1, + 11, -1, 12, -1, 13, -1, 14, -1, 15, -1, + 16, -1, 18, -1, 19, -1, 20, -1, 21, -1, + 22, -1, 23, -1, 7, -1, 17, -1, 25, -1, + 24, -1 +}; + +/* YYRLINE[YYN] -- source line where rule number YYN was defined. */ static const yytype_uint16 yyrline[] = { 0, 54, 54, 55, 56, 57, 58, 62, 66, 70, @@ -448,7 +496,7 @@ static const yytype_uint16 yyrline[] = }; #endif -#if YYDEBUG || YYERROR_VERBOSE || 1 +#if YYDEBUG || YYERROR_VERBOSE || YYTOKEN_TABLE /* YYTNAME[SYMBOL-NUM] -- String name of the symbol SYMBOL-NUM. First, the terminals, then, starting at YYNTOKENS, nonterminals. */ static const char *const yytname[] = @@ -464,13 +512,13 @@ static const char *const yytname[] = "array_decl", "datasetname", "var_name", "attributebody", "attr_list", "attribute", "bytes", "int16", "uint16", "int32", "uint32", "float32", "float64", "strs", "urls", "url", "str_or_id", "alias", "errorbody", - "errorcode", "errormsg", "errorptype", "errorprog", "name", YY_NULL + "errorcode", "errormsg", "errorptype", "errorprog", "name", 0 }; #endif # ifdef YYPRINT -/* YYTOKNUM[NUM] -- (External) token number corresponding to the - (internal) symbol number NUM (which must be that of a token). */ +/* YYTOKNUM[YYLEX-NUM] -- Internal token number corresponding to + token YYLEX-NUM. */ static const yytype_uint16 yytoknum[] = { 0, 256, 257, 258, 259, 260, 261, 262, 263, 264, @@ -480,46 +528,41 @@ static const yytype_uint16 yytoknum[] = }; # endif -#define YYPACT_NINF -91 - -#define yypact_value_is_default(Yystate) \ - (!!((Yystate) == (-91))) - -#define YYTABLE_NINF -1 - -#define yytable_value_is_error(Yytable_value) \ - 0 - - /* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing - STATE-NUM. */ -static const yytype_int16 yypact[] = +/* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */ +static const yytype_uint8 yyr1[] = { - 6, -91, -91, -91, -91, 9, -22, 7, -16, -91, - -91, 10, -91, -91, -91, 20, -91, 37, -91, 191, - -6, 14, -91, -91, -91, -91, 17, -91, -91, 18, - -91, 19, -91, -91, -91, 271, -91, 320, -91, 27, - -91, -91, 320, -91, -91, -91, -91, 320, 320, -91, - 320, 320, -91, -91, -91, 320, -91, 320, 320, 320, - -91, -91, -91, -91, -91, 24, 43, 35, 39, 50, - 74, -91, -91, -91, -91, -91, -91, -91, -91, -91, - -91, -91, -91, -91, 55, -91, -91, -91, 60, 67, - 68, 70, 71, 73, 295, 77, 78, 295, -91, -91, - 65, 79, 66, 81, 76, 69, 127, -91, 4, -91, - -91, -20, -91, -13, -91, -12, -91, -10, -91, -9, - -91, 32, -91, -91, -91, 33, -91, 34, 42, -91, - -91, 218, -91, 80, 82, 75, 83, 346, 320, 320, - -91, -91, 159, -91, -91, 85, -91, 88, -91, 89, - -91, 90, -91, 91, -91, 295, -91, 92, -91, 93, - -91, 295, -91, -91, 95, 94, 96, 105, 97, -91, - 98, 103, 100, -91, -91, -91, -91, -91, -91, -91, - -91, -91, -91, 102, -91, 99, -91, 12, -91, 111, - 109, -91, -91, -91, -91, 118, 244, -91, 320, 106, - -91 + 0, 36, 37, 37, 37, 37, 37, 38, 39, 40, + 41, 42, 42, 43, 43, 43, 43, 43, 44, 44, + 44, 44, 44, 44, 44, 44, 44, 45, 45, 46, + 46, 46, 46, 47, 47, 48, 49, 49, 50, 50, + 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, + 51, 51, 52, 52, 53, 53, 54, 54, 55, 55, + 56, 56, 57, 57, 58, 58, 59, 59, 60, 60, + 61, 62, 62, 63, 64, 65, 65, 66, 66, 67, + 67, 68, 68, 69, 69, 69, 69, 69, 69, 69, + 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, + 69, 69, 69, 69, 69, 69, 69 }; - /* YYDEFACT[STATE-NUM] -- Default reduction number in state STATE-NUM. - Performed when YYTABLE does not specify something else to do. Zero - means the default is an error. */ +/* YYR2[YYN] -- Number of symbols composing right hand side of rule YYN. */ +static const yytype_uint8 yyr2[] = +{ + 0, 2, 2, 3, 2, 2, 1, 1, 1, 1, + 5, 0, 2, 4, 7, 6, 11, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 0, 2, 3, + 4, 5, 1, 1, 1, 1, 3, 1, 0, 2, + 2, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 1, 1, 3, 1, 3, 1, 3, 1, 3, + 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, + 1, 1, 1, 3, 7, 0, 4, 0, 4, 0, + 4, 0, 4, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1 +}; + +/* YYDEFACT[STATE-NAME] -- Default reduction number in state STATE-NUM. + Performed when YYTABLE doesn't specify something else to do. Zero + means the default is an error. */ static const yytype_uint8 yydefact[] = { 0, 6, 8, 7, 9, 0, 0, 0, 0, 1, @@ -545,16 +588,7 @@ static const yytype_uint8 yydefact[] = 16 }; - /* YYPGOTO[NTERM-NUM]. */ -static const yytype_int8 yypgoto[] = -{ - -91, -91, -91, -91, -91, -91, -69, -15, -91, -17, - -91, -91, -37, -91, 54, -91, -91, -91, -91, -91, - -91, -91, -91, -91, -91, -7, -90, -91, -91, -91, - -91, -91, -91, -18 -}; - - /* YYDEFGOTO[NTERM-NUM]. */ +/* YYDEFGOTO[NTERM-NUM]. */ static const yytype_int16 yydefgoto[] = { -1, 5, 6, 7, 8, 11, 17, 36, 37, 108, @@ -563,9 +597,47 @@ static const yytype_int16 yydefgoto[] = 69, 103, 136, 86 }; - /* YYTABLE[YYPACT[STATE-NUM]] -- What to do in state STATE-NUM. If - positive, shift that token. If negative, reduce the rule whose - number is the opposite. If YYTABLE_NINF, syntax error. */ +/* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing + STATE-NUM. */ +#define YYPACT_NINF -91 +static const yytype_int16 yypact[] = +{ + 6, -91, -91, -91, -91, 9, -22, 7, -16, -91, + -91, 10, -91, -91, -91, 20, -91, 37, -91, 191, + -6, 14, -91, -91, -91, -91, 17, -91, -91, 18, + -91, 19, -91, -91, -91, 271, -91, 320, -91, 27, + -91, -91, 320, -91, -91, -91, -91, 320, 320, -91, + 320, 320, -91, -91, -91, 320, -91, 320, 320, 320, + -91, -91, -91, -91, -91, 24, 43, 35, 39, 50, + 74, -91, -91, -91, -91, -91, -91, -91, -91, -91, + -91, -91, -91, -91, 55, -91, -91, -91, 60, 67, + 68, 70, 71, 73, 295, 77, 78, 295, -91, -91, + 65, 79, 66, 81, 76, 69, 127, -91, 4, -91, + -91, -20, -91, -13, -91, -12, -91, -10, -91, -9, + -91, 32, -91, -91, -91, 33, -91, 34, 42, -91, + -91, 218, -91, 80, 82, 75, 83, 346, 320, 320, + -91, -91, 159, -91, -91, 85, -91, 88, -91, 89, + -91, 90, -91, 91, -91, 295, -91, 92, -91, 93, + -91, 295, -91, -91, 95, 94, 96, 105, 97, -91, + 98, 103, 100, -91, -91, -91, -91, -91, -91, -91, + -91, -91, -91, 102, -91, 99, -91, 12, -91, 111, + 109, -91, -91, -91, -91, 118, 244, -91, 320, 106, + -91 +}; + +/* YYPGOTO[NTERM-NUM]. */ +static const yytype_int8 yypgoto[] = +{ + -91, -91, -91, -91, -91, -91, -69, -15, -91, -17, + -91, -91, -37, -91, 54, -91, -91, -91, -91, -91, + -91, -91, -91, -91, -91, -7, -90, -91, -91, -91, + -91, -91, -91, -18 +}; + +/* YYTABLE[YYPACT[STATE-NUM]]. What to do in state STATE-NUM. If + positive, shift that token. If negative, reduce the rule which + number is the opposite. If YYTABLE_NINF, syntax error. */ +#define YYTABLE_NINF -1 static const yytype_uint8 yytable[] = { 87, 66, 105, 106, 122, 140, 10, 1, 12, 9, @@ -607,6 +679,12 @@ static const yytype_uint8 yytable[] = 27, 28, 0, 0, 29, 30, 31, 32, 33, 34 }; +#define yypact_value_is_default(yystate) \ + ((yystate) == (-91)) + +#define yytable_value_is_error(yytable_value) \ + YYID (0) + static const yytype_int16 yycheck[] = { 37, 19, 71, 72, 94, 1, 28, 1, 1, 0, @@ -648,8 +726,8 @@ static const yytype_int16 yycheck[] = 14, 15, -1, -1, 18, 19, 20, 21, 22, 23 }; - /* YYSTOS[STATE-NUM] -- The (internal number of the) accessing - symbol of state STATE-NUM. */ +/* YYSTOS[STATE-NUM] -- The (internal number of the) accessing + symbol of state STATE-NUM. */ static const yytype_uint8 yystos[] = { 0, 1, 5, 8, 10, 37, 38, 39, 40, 0, @@ -675,73 +753,94 @@ static const yytype_uint8 yystos[] = 30 }; - /* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */ -static const yytype_uint8 yyr1[] = -{ - 0, 36, 37, 37, 37, 37, 37, 38, 39, 40, - 41, 42, 42, 43, 43, 43, 43, 43, 44, 44, - 44, 44, 44, 44, 44, 44, 44, 45, 45, 46, - 46, 46, 46, 47, 47, 48, 49, 49, 50, 50, - 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, - 51, 51, 52, 52, 53, 53, 54, 54, 55, 55, - 56, 56, 57, 57, 58, 58, 59, 59, 60, 60, - 61, 62, 62, 63, 64, 65, 65, 66, 66, 67, - 67, 68, 68, 69, 69, 69, 69, 69, 69, 69, - 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, - 69, 69, 69, 69, 69, 69, 69 -}; +#define yyerrok (yyerrstatus = 0) +#define yyclearin (yychar = YYEMPTY) +#define YYEMPTY (-2) +#define YYEOF 0 - /* YYR2[YYN] -- Number of symbols on the right hand side of rule YYN. */ -static const yytype_uint8 yyr2[] = -{ - 0, 2, 2, 3, 2, 2, 1, 1, 1, 1, - 5, 0, 2, 4, 7, 6, 11, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 0, 2, 3, - 4, 5, 1, 1, 1, 1, 3, 1, 0, 2, - 2, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 1, 1, 3, 1, 3, 1, 3, 1, 3, - 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, - 1, 1, 1, 3, 7, 0, 4, 0, 4, 0, - 4, 0, 4, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1 -}; +#define YYACCEPT goto yyacceptlab +#define YYABORT goto yyabortlab +#define YYERROR goto yyerrorlab -#define yyerrok (yyerrstatus = 0) -#define yyclearin (yychar = YYEMPTY) -#define YYEMPTY (-2) -#define YYEOF 0 - -#define YYACCEPT goto yyacceptlab -#define YYABORT goto yyabortlab -#define YYERROR goto yyerrorlab +/* Like YYERROR except do call yyerror. This remains here temporarily + to ease the transition to the new meaning of YYERROR, for GCC. + Once GCC version 2 has supplanted version 1, this can go. However, + YYFAIL appears to be in use. Nevertheless, it is formally deprecated + in Bison 2.4.2's NEWS entry, where a plan to phase it out is + discussed. */ +#define YYFAIL goto yyerrlab +#if defined YYFAIL + /* This is here to suppress warnings from the GCC cpp's + -Wunused-macros. Normally we don't worry about that warning, but + some users do, and we want to make it easy for users to remove + YYFAIL uses, which will produce warnings from Bison 2.5. */ +#endif #define YYRECOVERING() (!!yyerrstatus) -#define YYBACKUP(Token, Value) \ -do \ - if (yychar == YYEMPTY) \ - { \ - yychar = (Token); \ - yylval = (Value); \ - YYPOPSTACK (yylen); \ - yystate = *yyssp; \ - goto yybackup; \ - } \ - else \ - { \ +#define YYBACKUP(Token, Value) \ +do \ + if (yychar == YYEMPTY && yylen == 1) \ + { \ + yychar = (Token); \ + yylval = (Value); \ + YYPOPSTACK (1); \ + goto yybackup; \ + } \ + else \ + { \ yyerror (parsestate, YY_("syntax error: cannot back up")); \ - YYERROR; \ - } \ -while (0) - -/* Error token number */ -#define YYTERROR 1 -#define YYERRCODE 256 + YYERROR; \ + } \ +while (YYID (0)) +#define YYTERROR 1 +#define YYERRCODE 256 + + +/* YYLLOC_DEFAULT -- Set CURRENT to span from RHS[1] to RHS[N]. + If N is 0, then set CURRENT to the empty location which ends + the previous symbol: RHS[0] (always defined). */ + +#define YYRHSLOC(Rhs, K) ((Rhs)[K]) +#ifndef YYLLOC_DEFAULT +# define YYLLOC_DEFAULT(Current, Rhs, N) \ + do \ + if (YYID (N)) \ + { \ + (Current).first_line = YYRHSLOC (Rhs, 1).first_line; \ + (Current).first_column = YYRHSLOC (Rhs, 1).first_column; \ + (Current).last_line = YYRHSLOC (Rhs, N).last_line; \ + (Current).last_column = YYRHSLOC (Rhs, N).last_column; \ + } \ + else \ + { \ + (Current).first_line = (Current).last_line = \ + YYRHSLOC (Rhs, 0).last_line; \ + (Current).first_column = (Current).last_column = \ + YYRHSLOC (Rhs, 0).last_column; \ + } \ + while (YYID (0)) +#endif + + +/* This macro is provided for backward compatibility. */ + +#ifndef YY_LOCATION_PRINT +# define YY_LOCATION_PRINT(File, Loc) ((void) 0) +#endif + + +/* YYLEX -- calling `yylex' with the right arguments. */ + +#ifdef YYLEX_PARAM +# define YYLEX yylex (&yylval, YYLEX_PARAM) +#else +# define YYLEX yylex (&yylval, parsestate) +#endif /* Enable debugging if requested. */ #if YYDEBUG @@ -751,47 +850,56 @@ while (0) # define YYFPRINTF fprintf # endif -# define YYDPRINTF(Args) \ -do { \ - if (yydebug) \ - YYFPRINTF Args; \ -} while (0) +# define YYDPRINTF(Args) \ +do { \ + if (yydebug) \ + YYFPRINTF Args; \ +} while (YYID (0)) -/* This macro is provided for backward compatibility. */ -#ifndef YY_LOCATION_PRINT -# define YY_LOCATION_PRINT(File, Loc) ((void) 0) -#endif +# define YY_SYMBOL_PRINT(Title, Type, Value, Location) \ +do { \ + if (yydebug) \ + { \ + YYFPRINTF (stderr, "%s ", Title); \ + yy_symbol_print (stderr, \ + Type, Value, parsestate); \ + YYFPRINTF (stderr, "\n"); \ + } \ +} while (YYID (0)) -# define YY_SYMBOL_PRINT(Title, Type, Value, Location) \ -do { \ - if (yydebug) \ - { \ - YYFPRINTF (stderr, "%s ", Title); \ - yy_symbol_print (stderr, \ - Type, Value, parsestate); \ - YYFPRINTF (stderr, "\n"); \ - } \ -} while (0) - - -/*----------------------------------------. -| Print this symbol's value on YYOUTPUT. | -`----------------------------------------*/ +/*--------------------------------. +| Print this symbol on YYOUTPUT. | +`--------------------------------*/ +/*ARGSUSED*/ +#if (defined __STDC__ || defined __C99__FUNC__ \ + || defined __cplusplus || defined _MSC_VER) static void yy_symbol_value_print (FILE *yyoutput, int yytype, YYSTYPE const * const yyvaluep, DAPparsestate* parsestate) +#else +static void +yy_symbol_value_print (yyoutput, yytype, yyvaluep, parsestate) + FILE *yyoutput; + int yytype; + YYSTYPE const * const yyvaluep; + DAPparsestate* parsestate; +#endif { - FILE *yyo = yyoutput; - YYUSE (yyo); - YYUSE (parsestate); if (!yyvaluep) return; + YYUSE (parsestate); # ifdef YYPRINT if (yytype < YYNTOKENS) YYPRINT (yyoutput, yytoknum[yytype], *yyvaluep); +# else + YYUSE (yyoutput); # endif - YYUSE (yytype); + switch (yytype) + { + default: + break; + } } @@ -799,11 +907,23 @@ yy_symbol_value_print (FILE *yyoutput, int yytype, YYSTYPE const * const yyvalue | Print this symbol on YYOUTPUT. | `--------------------------------*/ +#if (defined __STDC__ || defined __C99__FUNC__ \ + || defined __cplusplus || defined _MSC_VER) static void yy_symbol_print (FILE *yyoutput, int yytype, YYSTYPE const * const yyvaluep, DAPparsestate* parsestate) +#else +static void +yy_symbol_print (yyoutput, yytype, yyvaluep, parsestate) + FILE *yyoutput; + int yytype; + YYSTYPE const * const yyvaluep; + DAPparsestate* parsestate; +#endif { - YYFPRINTF (yyoutput, "%s %s (", - yytype < YYNTOKENS ? "token" : "nterm", yytname[yytype]); + if (yytype < YYNTOKENS) + YYFPRINTF (yyoutput, "token %s (", yytname[yytype]); + else + YYFPRINTF (yyoutput, "nterm %s (", yytname[yytype]); yy_symbol_value_print (yyoutput, yytype, yyvaluep, parsestate); YYFPRINTF (yyoutput, ")"); @@ -814,8 +934,16 @@ yy_symbol_print (FILE *yyoutput, int yytype, YYSTYPE const * const yyvaluep, DAP | TOP (included). | `------------------------------------------------------------------*/ +#if (defined __STDC__ || defined __C99__FUNC__ \ + || defined __cplusplus || defined _MSC_VER) static void yy_stack_print (yytype_int16 *yybottom, yytype_int16 *yytop) +#else +static void +yy_stack_print (yybottom, yytop) + yytype_int16 *yybottom; + yytype_int16 *yytop; +#endif { YYFPRINTF (stderr, "Stack now"); for (; yybottom <= yytop; yybottom++) @@ -826,42 +954,50 @@ yy_stack_print (yytype_int16 *yybottom, yytype_int16 *yytop) YYFPRINTF (stderr, "\n"); } -# define YY_STACK_PRINT(Bottom, Top) \ -do { \ - if (yydebug) \ - yy_stack_print ((Bottom), (Top)); \ -} while (0) +# define YY_STACK_PRINT(Bottom, Top) \ +do { \ + if (yydebug) \ + yy_stack_print ((Bottom), (Top)); \ +} while (YYID (0)) /*------------------------------------------------. | Report that the YYRULE is going to be reduced. | `------------------------------------------------*/ +#if (defined __STDC__ || defined __C99__FUNC__ \ + || defined __cplusplus || defined _MSC_VER) static void -yy_reduce_print (yytype_int16 *yyssp, YYSTYPE *yyvsp, int yyrule, DAPparsestate* parsestate) +yy_reduce_print (YYSTYPE *yyvsp, int yyrule, DAPparsestate* parsestate) +#else +static void +yy_reduce_print (yyvsp, yyrule, parsestate) + YYSTYPE *yyvsp; + int yyrule; + DAPparsestate* parsestate; +#endif { - unsigned long int yylno = yyrline[yyrule]; int yynrhs = yyr2[yyrule]; int yyi; + unsigned long int yylno = yyrline[yyrule]; YYFPRINTF (stderr, "Reducing stack by rule %d (line %lu):\n", - yyrule - 1, yylno); + yyrule - 1, yylno); /* The symbols being reduced. */ for (yyi = 0; yyi < yynrhs; yyi++) { YYFPRINTF (stderr, " $%d = ", yyi + 1); - yy_symbol_print (stderr, - yystos[yyssp[yyi + 1 - yynrhs]], - &(yyvsp[(yyi + 1) - (yynrhs)]) - , parsestate); + yy_symbol_print (stderr, yyrhs[yyprhs[yyrule] + yyi], + &(yyvsp[(yyi + 1) - (yynrhs)]) + , parsestate); YYFPRINTF (stderr, "\n"); } } -# define YY_REDUCE_PRINT(Rule) \ -do { \ - if (yydebug) \ - yy_reduce_print (yyssp, yyvsp, Rule, parsestate); \ -} while (0) +# define YY_REDUCE_PRINT(Rule) \ +do { \ + if (yydebug) \ + yy_reduce_print (yyvsp, Rule, parsestate); \ +} while (YYID (0)) /* Nonzero means print parse trace. It is left uninitialized so that multiple parsers can coexist. */ @@ -875,7 +1011,7 @@ int yydebug; /* YYINITDEPTH -- initial size of the parser's stacks. */ -#ifndef YYINITDEPTH +#ifndef YYINITDEPTH # define YYINITDEPTH 200 #endif @@ -898,8 +1034,15 @@ int yydebug; # define yystrlen strlen # else /* Return the length of YYSTR. */ +#if (defined __STDC__ || defined __C99__FUNC__ \ + || defined __cplusplus || defined _MSC_VER) static YYSIZE_T yystrlen (const char *yystr) +#else +static YYSIZE_T +yystrlen (yystr) + const char *yystr; +#endif { YYSIZE_T yylen; for (yylen = 0; yystr[yylen]; yylen++) @@ -915,8 +1058,16 @@ yystrlen (const char *yystr) # else /* Copy YYSRC to YYDEST, returning the address of the terminating '\0' in YYDEST. */ +#if (defined __STDC__ || defined __C99__FUNC__ \ + || defined __cplusplus || defined _MSC_VER) static char * yystpcpy (char *yydest, const char *yysrc) +#else +static char * +yystpcpy (yydest, yysrc) + char *yydest; + const char *yysrc; +#endif { char *yyd = yydest; const char *yys = yysrc; @@ -946,27 +1097,27 @@ yytnamerr (char *yyres, const char *yystr) char const *yyp = yystr; for (;;) - switch (*++yyp) - { - case '\'': - case ',': - goto do_not_strip_quotes; + switch (*++yyp) + { + case '\'': + case ',': + goto do_not_strip_quotes; - case '\\': - if (*++yyp != '\\') - goto do_not_strip_quotes; - /* Fall through. */ - default: - if (yyres) - yyres[yyn] = *yyp; - yyn++; - break; + case '\\': + if (*++yyp != '\\') + goto do_not_strip_quotes; + /* Fall through. */ + default: + if (yyres) + yyres[yyn] = *yyp; + yyn++; + break; - case '"': - if (yyres) - yyres[yyn] = '\0'; - return yyn; - } + case '"': + if (yyres) + yyres[yyn] = '\0'; + return yyn; + } do_not_strip_quotes: ; } @@ -989,11 +1140,12 @@ static int yysyntax_error (YYSIZE_T *yymsg_alloc, char **yymsg, yytype_int16 *yyssp, int yytoken) { - YYSIZE_T yysize0 = yytnamerr (YY_NULL, yytname[yytoken]); + YYSIZE_T yysize0 = yytnamerr (0, yytname[yytoken]); YYSIZE_T yysize = yysize0; + YYSIZE_T yysize1; enum { YYERROR_VERBOSE_ARGS_MAXIMUM = 5 }; /* Internationalized format string. */ - const char *yyformat = YY_NULL; + const char *yyformat = 0; /* Arguments of yyformat. */ char const *yyarg[YYERROR_VERBOSE_ARGS_MAXIMUM]; /* Number of reported tokens (one for the "unexpected", one per @@ -1001,6 +1153,10 @@ yysyntax_error (YYSIZE_T *yymsg_alloc, char **yymsg, int yycount = 0; /* There are many possibilities here to consider: + - Assume YYFAIL is not used. It's too flawed to consider. See + + for details. YYERROR is fine as it does not invoke this + function. - If this state is a consistent state with a default action, then the only way this function was invoked is if the default action is an error action. In that case, don't check for expected @@ -1049,13 +1205,11 @@ yysyntax_error (YYSIZE_T *yymsg_alloc, char **yymsg, break; } yyarg[yycount++] = yytname[yyx]; - { - YYSIZE_T yysize1 = yysize + yytnamerr (YY_NULL, yytname[yyx]); - if (! (yysize <= yysize1 - && yysize1 <= YYSTACK_ALLOC_MAXIMUM)) - return 2; - yysize = yysize1; - } + yysize1 = yysize + yytnamerr (0, yytname[yyx]); + if (! (yysize <= yysize1 + && yysize1 <= YYSTACK_ALLOC_MAXIMUM)) + return 2; + yysize = yysize1; } } } @@ -1075,12 +1229,10 @@ yysyntax_error (YYSIZE_T *yymsg_alloc, char **yymsg, # undef YYCASE_ } - { - YYSIZE_T yysize1 = yysize + yystrlen (yyformat); - if (! (yysize <= yysize1 && yysize1 <= YYSTACK_ALLOC_MAXIMUM)) - return 2; - yysize = yysize1; - } + yysize1 = yysize + yystrlen (yyformat); + if (! (yysize <= yysize1 && yysize1 <= YYSTACK_ALLOC_MAXIMUM)) + return 2; + yysize = yysize1; if (*yymsg_alloc < yysize) { @@ -1117,39 +1269,83 @@ yysyntax_error (YYSIZE_T *yymsg_alloc, char **yymsg, | Release the memory associated to this symbol. | `-----------------------------------------------*/ +/*ARGSUSED*/ +#if (defined __STDC__ || defined __C99__FUNC__ \ + || defined __cplusplus || defined _MSC_VER) static void yydestruct (const char *yymsg, int yytype, YYSTYPE *yyvaluep, DAPparsestate* parsestate) +#else +static void +yydestruct (yymsg, yytype, yyvaluep, parsestate) + const char *yymsg; + int yytype; + YYSTYPE *yyvaluep; + DAPparsestate* parsestate; +#endif { YYUSE (yyvaluep); YYUSE (parsestate); + if (!yymsg) yymsg = "Deleting"; YY_SYMBOL_PRINT (yymsg, yytype, yyvaluep, yylocationp); - YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN - YYUSE (yytype); - YY_IGNORE_MAYBE_UNINITIALIZED_END + switch (yytype) + { + + default: + break; + } } +/* Prevent warnings from -Wmissing-prototypes. */ +#ifdef YYPARSE_PARAM +#if defined __STDC__ || defined __cplusplus +int yyparse (void *YYPARSE_PARAM); +#else +int yyparse (); +#endif +#else /* ! YYPARSE_PARAM */ +#if defined __STDC__ || defined __cplusplus +int yyparse (DAPparsestate* parsestate); +#else +int yyparse (); +#endif +#endif /* ! YYPARSE_PARAM */ /*----------. | yyparse. | `----------*/ +#ifdef YYPARSE_PARAM +#if (defined __STDC__ || defined __C99__FUNC__ \ + || defined __cplusplus || defined _MSC_VER) +int +yyparse (void *YYPARSE_PARAM) +#else +int +yyparse (YYPARSE_PARAM) + void *YYPARSE_PARAM; +#endif +#else /* ! YYPARSE_PARAM */ +#if (defined __STDC__ || defined __C99__FUNC__ \ + || defined __cplusplus || defined _MSC_VER) int yyparse (DAPparsestate* parsestate) +#else +int +yyparse (parsestate) + DAPparsestate* parsestate; +#endif +#endif { /* The lookahead symbol. */ int yychar; - /* The semantic value of the lookahead symbol. */ -/* Default value used for initialization, for pacifying older GCCs - or non-GCC compilers. */ -YY_INITIAL_VALUE (static YYSTYPE yyval_default;) -YYSTYPE yylval YY_INITIAL_VALUE (= yyval_default); +YYSTYPE yylval; /* Number of syntax errors so far. */ int yynerrs; @@ -1159,10 +1355,10 @@ YYSTYPE yylval YY_INITIAL_VALUE (= yyval_default); int yyerrstatus; /* The stacks and their tools: - 'yyss': related to states. - 'yyvs': related to semantic values. + `yyss': related to states. + `yyvs': related to semantic values. - Refer to the stacks through separate pointers, to allow yyoverflow + Refer to the stacks thru separate pointers, to allow yyoverflow to reallocate them elsewhere. */ /* The state stack. */ @@ -1180,7 +1376,7 @@ YYSTYPE yylval YY_INITIAL_VALUE (= yyval_default); int yyn; int yyresult; /* Lookahead token as an internal (translated) token number. */ - int yytoken = 0; + int yytoken; /* The variables used to return semantic value and location from the action routines. */ YYSTYPE yyval; @@ -1198,8 +1394,9 @@ YYSTYPE yylval YY_INITIAL_VALUE (= yyval_default); Keep to zero when no symbol should be popped. */ int yylen = 0; - yyssp = yyss = yyssa; - yyvsp = yyvs = yyvsa; + yytoken = 0; + yyss = yyssa; + yyvs = yyvsa; yystacksize = YYINITDEPTH; YYDPRINTF ((stderr, "Starting parse\n")); @@ -1208,6 +1405,14 @@ YYSTYPE yylval YY_INITIAL_VALUE (= yyval_default); yyerrstatus = 0; yynerrs = 0; yychar = YYEMPTY; /* Cause a token to be read. */ + + /* Initialize stack pointers. + Waste one element of value and location stack + so that they stay on the same level as the state stack. + The wasted elements are never initialized. */ + yyssp = yyss; + yyvsp = yyvs; + goto yysetstate; /*------------------------------------------------------------. @@ -1228,23 +1433,23 @@ YYSTYPE yylval YY_INITIAL_VALUE (= yyval_default); #ifdef yyoverflow { - /* Give user a chance to reallocate the stack. Use copies of - these so that the &'s don't force the real ones into - memory. */ - YYSTYPE *yyvs1 = yyvs; - yytype_int16 *yyss1 = yyss; + /* Give user a chance to reallocate the stack. Use copies of + these so that the &'s don't force the real ones into + memory. */ + YYSTYPE *yyvs1 = yyvs; + yytype_int16 *yyss1 = yyss; - /* Each stack pointer address is followed by the size of the - data in use in that stack, in bytes. This used to be a - conditional around just the two extra args, but that might - be undefined if yyoverflow is a macro. */ - yyoverflow (YY_("memory exhausted"), - &yyss1, yysize * sizeof (*yyssp), - &yyvs1, yysize * sizeof (*yyvsp), - &yystacksize); + /* Each stack pointer address is followed by the size of the + data in use in that stack, in bytes. This used to be a + conditional around just the two extra args, but that might + be undefined if yyoverflow is a macro. */ + yyoverflow (YY_("memory exhausted"), + &yyss1, yysize * sizeof (*yyssp), + &yyvs1, yysize * sizeof (*yyvsp), + &yystacksize); - yyss = yyss1; - yyvs = yyvs1; + yyss = yyss1; + yyvs = yyvs1; } #else /* no yyoverflow */ # ifndef YYSTACK_RELOCATE @@ -1252,22 +1457,22 @@ YYSTYPE yylval YY_INITIAL_VALUE (= yyval_default); # else /* Extend the stack our own way. */ if (YYMAXDEPTH <= yystacksize) - goto yyexhaustedlab; + goto yyexhaustedlab; yystacksize *= 2; if (YYMAXDEPTH < yystacksize) - yystacksize = YYMAXDEPTH; + yystacksize = YYMAXDEPTH; { - yytype_int16 *yyss1 = yyss; - union yyalloc *yyptr = - (union yyalloc *) YYSTACK_ALLOC (YYSTACK_BYTES (yystacksize)); - if (! yyptr) - goto yyexhaustedlab; - YYSTACK_RELOCATE (yyss_alloc, yyss); - YYSTACK_RELOCATE (yyvs_alloc, yyvs); + yytype_int16 *yyss1 = yyss; + union yyalloc *yyptr = + (union yyalloc *) YYSTACK_ALLOC (YYSTACK_BYTES (yystacksize)); + if (! yyptr) + goto yyexhaustedlab; + YYSTACK_RELOCATE (yyss_alloc, yyss); + YYSTACK_RELOCATE (yyvs_alloc, yyvs); # undef YYSTACK_RELOCATE - if (yyss1 != yyssa) - YYSTACK_FREE (yyss1); + if (yyss1 != yyssa) + YYSTACK_FREE (yyss1); } # endif #endif /* no yyoverflow */ @@ -1276,10 +1481,10 @@ YYSTYPE yylval YY_INITIAL_VALUE (= yyval_default); yyvsp = yyvs + yysize - 1; YYDPRINTF ((stderr, "Stack size increased to %lu\n", - (unsigned long int) yystacksize)); + (unsigned long int) yystacksize)); if (yyss + yystacksize - 1 <= yyssp) - YYABORT; + YYABORT; } YYDPRINTF ((stderr, "Entering state %d\n", yystate)); @@ -1308,7 +1513,7 @@ yybackup: if (yychar == YYEMPTY) { YYDPRINTF ((stderr, "Reading a token: ")); - yychar = yylex (&yylval, parsestate); + yychar = YYLEX; } if (yychar <= YYEOF) @@ -1348,9 +1553,7 @@ yybackup: yychar = YYEMPTY; yystate = yyn; - YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN *++yyvsp = yylval; - YY_IGNORE_MAYBE_UNINITIALIZED_END goto yynewstate; @@ -1373,7 +1576,7 @@ yyreduce: yylen = yyr2[yyn]; /* If YYLEN is nonzero, implement the default value of the action: - '$$ = $1'. + `$$ = $1'. Otherwise, the following line sets YYVAL to garbage. This behavior is undocumented and Bison @@ -1387,613 +1590,716 @@ yyreduce: switch (yyn) { case 6: -#line 58 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 58 "dap.y" {dap_unrecognizedresponse(parsestate); YYABORT;} -#line 1393 "dap.tab.c" /* yacc.c:1646 */ break; case 7: -#line 63 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 63 "dap.y" {dap_tagparse(parsestate,SCAN_DATASET);} -#line 1399 "dap.tab.c" /* yacc.c:1646 */ break; case 8: -#line 67 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 67 "dap.y" {dap_tagparse(parsestate,SCAN_ATTR);} -#line 1405 "dap.tab.c" /* yacc.c:1646 */ break; case 9: -#line 71 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 71 "dap.y" {dap_tagparse(parsestate,SCAN_ERROR);} -#line 1411 "dap.tab.c" /* yacc.c:1646 */ break; case 10: -#line 76 "dap.y" /* yacc.c:1646 */ - {dap_datasetbody(parsestate,(yyvsp[-1]),(yyvsp[-3]));} -#line 1417 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 76 "dap.y" + {dap_datasetbody(parsestate,(yyvsp[(4) - (5)]),(yyvsp[(2) - (5)]));} break; case 11: -#line 81 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 81 "dap.y" {(yyval)=dap_declarations(parsestate,null,null);} -#line 1423 "dap.tab.c" /* yacc.c:1646 */ break; case 12: -#line 82 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_declarations(parsestate,(yyvsp[-1]),(yyvsp[0]));} -#line 1429 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 82 "dap.y" + {(yyval)=dap_declarations(parsestate,(yyvsp[(1) - (2)]),(yyvsp[(2) - (2)]));} break; case 13: -#line 89 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_makebase(parsestate,(yyvsp[-2]),(yyvsp[-3]),(yyvsp[-1]));} -#line 1435 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 89 "dap.y" + {(yyval)=dap_makebase(parsestate,(yyvsp[(2) - (4)]),(yyvsp[(1) - (4)]),(yyvsp[(3) - (4)]));} break; case 14: -#line 91 "dap.y" /* yacc.c:1646 */ - {if(((yyval)=dap_makestructure(parsestate,(yyvsp[-2]),(yyvsp[-1]),(yyvsp[-4])))==null) {YYABORT;}} -#line 1441 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 91 "dap.y" + {if(((yyval)=dap_makestructure(parsestate,(yyvsp[(5) - (7)]),(yyvsp[(6) - (7)]),(yyvsp[(3) - (7)])))==null) {YYABORT;}} break; case 15: -#line 93 "dap.y" /* yacc.c:1646 */ - {if(((yyval)=dap_makesequence(parsestate,(yyvsp[-1]),(yyvsp[-3])))==null) {YYABORT;}} -#line 1447 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 93 "dap.y" + {if(((yyval)=dap_makesequence(parsestate,(yyvsp[(5) - (6)]),(yyvsp[(3) - (6)])))==null) {YYABORT;}} break; case 16: -#line 96 "dap.y" /* yacc.c:1646 */ - {if(((yyval)=dap_makegrid(parsestate,(yyvsp[-1]),(yyvsp[-6]),(yyvsp[-3])))==null) {YYABORT;}} -#line 1453 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 96 "dap.y" + {if(((yyval)=dap_makegrid(parsestate,(yyvsp[(10) - (11)]),(yyvsp[(5) - (11)]),(yyvsp[(8) - (11)])))==null) {YYABORT;}} break; case 17: -#line 98 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 98 "dap.y" {dapsemanticerror(parsestate,OC_EBADTYPE,"Unrecognized type"); YYABORT;} -#line 1459 "dap.tab.c" /* yacc.c:1646 */ break; case 18: -#line 103 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 103 "dap.y" {(yyval)=(Object)SCAN_BYTE;} -#line 1465 "dap.tab.c" /* yacc.c:1646 */ break; case 19: -#line 104 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 104 "dap.y" {(yyval)=(Object)SCAN_INT16;} -#line 1471 "dap.tab.c" /* yacc.c:1646 */ break; case 20: -#line 105 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 105 "dap.y" {(yyval)=(Object)SCAN_UINT16;} -#line 1477 "dap.tab.c" /* yacc.c:1646 */ break; case 21: -#line 106 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 106 "dap.y" {(yyval)=(Object)SCAN_INT32;} -#line 1483 "dap.tab.c" /* yacc.c:1646 */ break; case 22: -#line 107 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 107 "dap.y" {(yyval)=(Object)SCAN_UINT32;} -#line 1489 "dap.tab.c" /* yacc.c:1646 */ break; case 23: -#line 108 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 108 "dap.y" {(yyval)=(Object)SCAN_FLOAT32;} -#line 1495 "dap.tab.c" /* yacc.c:1646 */ break; case 24: -#line 109 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 109 "dap.y" {(yyval)=(Object)SCAN_FLOAT64;} -#line 1501 "dap.tab.c" /* yacc.c:1646 */ break; case 25: -#line 110 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 110 "dap.y" {(yyval)=(Object)SCAN_URL;} -#line 1507 "dap.tab.c" /* yacc.c:1646 */ break; case 26: -#line 111 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 111 "dap.y" {(yyval)=(Object)SCAN_STRING;} -#line 1513 "dap.tab.c" /* yacc.c:1646 */ break; case 27: -#line 115 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 115 "dap.y" {(yyval)=dap_arraydecls(parsestate,null,null);} -#line 1519 "dap.tab.c" /* yacc.c:1646 */ break; case 28: -#line 116 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_arraydecls(parsestate,(yyvsp[-1]),(yyvsp[0]));} -#line 1525 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 116 "dap.y" + {(yyval)=dap_arraydecls(parsestate,(yyvsp[(1) - (2)]),(yyvsp[(2) - (2)]));} break; case 29: -#line 120 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_arraydecl(parsestate,null,(yyvsp[-1]));} -#line 1531 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 120 "dap.y" + {(yyval)=dap_arraydecl(parsestate,null,(yyvsp[(2) - (3)]));} break; case 30: -#line 121 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_arraydecl(parsestate,null,(yyvsp[-1]));} -#line 1537 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 121 "dap.y" + {(yyval)=dap_arraydecl(parsestate,null,(yyvsp[(3) - (4)]));} break; case 31: -#line 122 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_arraydecl(parsestate,(yyvsp[-3]),(yyvsp[-1]));} -#line 1543 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 122 "dap.y" + {(yyval)=dap_arraydecl(parsestate,(yyvsp[(2) - (5)]),(yyvsp[(4) - (5)]));} break; case 32: -#line 124 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 124 "dap.y" {dapsemanticerror(parsestate,OC_EDIMSIZE,"Illegal dimension declaration"); YYABORT;} -#line 1549 "dap.tab.c" /* yacc.c:1646 */ break; case 33: -#line 128 "dap.y" /* yacc.c:1646 */ - {(yyval)=(yyvsp[0]);} -#line 1555 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 128 "dap.y" + {(yyval)=(yyvsp[(1) - (1)]);} break; case 34: -#line 130 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 130 "dap.y" {dapsemanticerror(parsestate,OC_EDDS,"Illegal dataset declaration"); YYABORT;} -#line 1561 "dap.tab.c" /* yacc.c:1646 */ break; case 35: -#line 133 "dap.y" /* yacc.c:1646 */ - {(yyval)=(yyvsp[0]);} -#line 1567 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 133 "dap.y" + {(yyval)=(yyvsp[(1) - (1)]);} break; case 36: -#line 136 "dap.y" /* yacc.c:1646 */ - {dap_attributebody(parsestate,(yyvsp[-1]));} -#line 1573 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 136 "dap.y" + {dap_attributebody(parsestate,(yyvsp[(2) - (3)]));} break; case 37: -#line 138 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 138 "dap.y" {dapsemanticerror(parsestate,OC_EDAS,"Illegal DAS body"); YYABORT;} -#line 1579 "dap.tab.c" /* yacc.c:1646 */ break; case 38: -#line 142 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 142 "dap.y" {(yyval)=dap_attrlist(parsestate,null,null);} -#line 1585 "dap.tab.c" /* yacc.c:1646 */ break; case 39: -#line 143 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrlist(parsestate,(yyvsp[-1]),(yyvsp[0]));} -#line 1591 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 143 "dap.y" + {(yyval)=dap_attrlist(parsestate,(yyvsp[(1) - (2)]),(yyvsp[(2) - (2)]));} break; case 40: -#line 147 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 147 "dap.y" {(yyval)=null;} -#line 1597 "dap.tab.c" /* yacc.c:1646 */ break; case 41: -#line 149 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attribute(parsestate,(yyvsp[-2]),(yyvsp[-1]),(Object)SCAN_BYTE);} -#line 1603 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 149 "dap.y" + {(yyval)=dap_attribute(parsestate,(yyvsp[(2) - (4)]),(yyvsp[(3) - (4)]),(Object)SCAN_BYTE);} break; case 42: -#line 151 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attribute(parsestate,(yyvsp[-2]),(yyvsp[-1]),(Object)SCAN_INT16);} -#line 1609 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 151 "dap.y" + {(yyval)=dap_attribute(parsestate,(yyvsp[(2) - (4)]),(yyvsp[(3) - (4)]),(Object)SCAN_INT16);} break; case 43: -#line 153 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attribute(parsestate,(yyvsp[-2]),(yyvsp[-1]),(Object)SCAN_UINT16);} -#line 1615 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 153 "dap.y" + {(yyval)=dap_attribute(parsestate,(yyvsp[(2) - (4)]),(yyvsp[(3) - (4)]),(Object)SCAN_UINT16);} break; case 44: -#line 155 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attribute(parsestate,(yyvsp[-2]),(yyvsp[-1]),(Object)SCAN_INT32);} -#line 1621 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 155 "dap.y" + {(yyval)=dap_attribute(parsestate,(yyvsp[(2) - (4)]),(yyvsp[(3) - (4)]),(Object)SCAN_INT32);} break; case 45: -#line 157 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attribute(parsestate,(yyvsp[-2]),(yyvsp[-1]),(Object)SCAN_UINT32);} -#line 1627 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 157 "dap.y" + {(yyval)=dap_attribute(parsestate,(yyvsp[(2) - (4)]),(yyvsp[(3) - (4)]),(Object)SCAN_UINT32);} break; case 46: -#line 159 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attribute(parsestate,(yyvsp[-2]),(yyvsp[-1]),(Object)SCAN_FLOAT32);} -#line 1633 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 159 "dap.y" + {(yyval)=dap_attribute(parsestate,(yyvsp[(2) - (4)]),(yyvsp[(3) - (4)]),(Object)SCAN_FLOAT32);} break; case 47: -#line 161 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attribute(parsestate,(yyvsp[-2]),(yyvsp[-1]),(Object)SCAN_FLOAT64);} -#line 1639 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 161 "dap.y" + {(yyval)=dap_attribute(parsestate,(yyvsp[(2) - (4)]),(yyvsp[(3) - (4)]),(Object)SCAN_FLOAT64);} break; case 48: -#line 163 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attribute(parsestate,(yyvsp[-2]),(yyvsp[-1]),(Object)SCAN_STRING);} -#line 1645 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 163 "dap.y" + {(yyval)=dap_attribute(parsestate,(yyvsp[(2) - (4)]),(yyvsp[(3) - (4)]),(Object)SCAN_STRING);} break; case 49: -#line 165 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attribute(parsestate,(yyvsp[-2]),(yyvsp[-1]),(Object)SCAN_URL);} -#line 1651 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 165 "dap.y" + {(yyval)=dap_attribute(parsestate,(yyvsp[(2) - (4)]),(yyvsp[(3) - (4)]),(Object)SCAN_URL);} break; case 50: -#line 166 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrset(parsestate,(yyvsp[-3]),(yyvsp[-1]));} -#line 1657 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 166 "dap.y" + {(yyval)=dap_attrset(parsestate,(yyvsp[(1) - (4)]),(yyvsp[(3) - (4)]));} break; case 51: -#line 168 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 168 "dap.y" {dapsemanticerror(parsestate,OC_EDAS,"Illegal attribute"); YYABORT;} -#line 1663 "dap.tab.c" /* yacc.c:1646 */ break; case 52: -#line 172 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[0]),(Object)SCAN_BYTE);} -#line 1669 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 172 "dap.y" + {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[(1) - (1)]),(Object)SCAN_BYTE);} break; case 53: -#line 174 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,(yyvsp[-2]),(yyvsp[0]),(Object)SCAN_BYTE);} -#line 1675 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 174 "dap.y" + {(yyval)=dap_attrvalue(parsestate,(yyvsp[(1) - (3)]),(yyvsp[(3) - (3)]),(Object)SCAN_BYTE);} break; case 54: -#line 177 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[0]),(Object)SCAN_INT16);} -#line 1681 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 177 "dap.y" + {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[(1) - (1)]),(Object)SCAN_INT16);} break; case 55: -#line 179 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,(yyvsp[-2]),(yyvsp[0]),(Object)SCAN_INT16);} -#line 1687 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 179 "dap.y" + {(yyval)=dap_attrvalue(parsestate,(yyvsp[(1) - (3)]),(yyvsp[(3) - (3)]),(Object)SCAN_INT16);} break; case 56: -#line 182 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[0]),(Object)SCAN_UINT16);} -#line 1693 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 182 "dap.y" + {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[(1) - (1)]),(Object)SCAN_UINT16);} break; case 57: -#line 184 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,(yyvsp[-2]),(yyvsp[0]),(Object)SCAN_UINT16);} -#line 1699 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 184 "dap.y" + {(yyval)=dap_attrvalue(parsestate,(yyvsp[(1) - (3)]),(yyvsp[(3) - (3)]),(Object)SCAN_UINT16);} break; case 58: -#line 187 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[0]),(Object)SCAN_INT32);} -#line 1705 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 187 "dap.y" + {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[(1) - (1)]),(Object)SCAN_INT32);} break; case 59: -#line 189 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,(yyvsp[-2]),(yyvsp[0]),(Object)SCAN_INT32);} -#line 1711 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 189 "dap.y" + {(yyval)=dap_attrvalue(parsestate,(yyvsp[(1) - (3)]),(yyvsp[(3) - (3)]),(Object)SCAN_INT32);} break; case 60: -#line 192 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[0]),(Object)SCAN_UINT32);} -#line 1717 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 192 "dap.y" + {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[(1) - (1)]),(Object)SCAN_UINT32);} break; case 61: -#line 193 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,(yyvsp[-2]),(yyvsp[0]),(Object)SCAN_UINT32);} -#line 1723 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 193 "dap.y" + {(yyval)=dap_attrvalue(parsestate,(yyvsp[(1) - (3)]),(yyvsp[(3) - (3)]),(Object)SCAN_UINT32);} break; case 62: -#line 196 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[0]),(Object)SCAN_FLOAT32);} -#line 1729 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 196 "dap.y" + {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[(1) - (1)]),(Object)SCAN_FLOAT32);} break; case 63: -#line 197 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,(yyvsp[-2]),(yyvsp[0]),(Object)SCAN_FLOAT32);} -#line 1735 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 197 "dap.y" + {(yyval)=dap_attrvalue(parsestate,(yyvsp[(1) - (3)]),(yyvsp[(3) - (3)]),(Object)SCAN_FLOAT32);} break; case 64: -#line 200 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[0]),(Object)SCAN_FLOAT64);} -#line 1741 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 200 "dap.y" + {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[(1) - (1)]),(Object)SCAN_FLOAT64);} break; case 65: -#line 201 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,(yyvsp[-2]),(yyvsp[0]),(Object)SCAN_FLOAT64);} -#line 1747 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 201 "dap.y" + {(yyval)=dap_attrvalue(parsestate,(yyvsp[(1) - (3)]),(yyvsp[(3) - (3)]),(Object)SCAN_FLOAT64);} break; case 66: -#line 204 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[0]),(Object)SCAN_STRING);} -#line 1753 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 204 "dap.y" + {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[(1) - (1)]),(Object)SCAN_STRING);} break; case 67: -#line 205 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,(yyvsp[-2]),(yyvsp[0]),(Object)SCAN_STRING);} -#line 1759 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 205 "dap.y" + {(yyval)=dap_attrvalue(parsestate,(yyvsp[(1) - (3)]),(yyvsp[(3) - (3)]),(Object)SCAN_STRING);} break; case 68: -#line 209 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[0]),(Object)SCAN_URL);} -#line 1765 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 209 "dap.y" + {(yyval)=dap_attrvalue(parsestate,null,(yyvsp[(1) - (1)]),(Object)SCAN_URL);} break; case 69: -#line 210 "dap.y" /* yacc.c:1646 */ - {(yyval)=dap_attrvalue(parsestate,(yyvsp[-2]),(yyvsp[0]),(Object)SCAN_URL);} -#line 1771 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 210 "dap.y" + {(yyval)=dap_attrvalue(parsestate,(yyvsp[(1) - (3)]),(yyvsp[(3) - (3)]),(Object)SCAN_URL);} break; case 70: -#line 214 "dap.y" /* yacc.c:1646 */ - {(yyval)=(yyvsp[0]);} -#line 1777 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 214 "dap.y" + {(yyval)=(yyvsp[(1) - (1)]);} break; case 71: -#line 218 "dap.y" /* yacc.c:1646 */ - {(yyval)=(yyvsp[0]);} -#line 1783 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 218 "dap.y" + {(yyval)=(yyvsp[(1) - (1)]);} break; case 72: -#line 219 "dap.y" /* yacc.c:1646 */ - {(yyval)=(yyvsp[0]);} -#line 1789 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 219 "dap.y" + {(yyval)=(yyvsp[(1) - (1)]);} break; case 73: -#line 230 "dap.y" /* yacc.c:1646 */ - {(yyval)=(yyvsp[-1]); (yyval)=(yyvsp[0]); (yyval)=null;} -#line 1795 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 230 "dap.y" + {(yyval)=(yyvsp[(2) - (3)]); (yyval)=(yyvsp[(3) - (3)]); (yyval)=null;} break; case 74: -#line 235 "dap.y" /* yacc.c:1646 */ - {dap_errorbody(parsestate,(yyvsp[-5]),(yyvsp[-4]),(yyvsp[-3]),(yyvsp[-2]));} -#line 1801 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 235 "dap.y" + {dap_errorbody(parsestate,(yyvsp[(2) - (7)]),(yyvsp[(3) - (7)]),(yyvsp[(4) - (7)]),(yyvsp[(5) - (7)]));} break; case 75: -#line 238 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 238 "dap.y" {(yyval)=null;} -#line 1807 "dap.tab.c" /* yacc.c:1646 */ break; case 76: -#line 238 "dap.y" /* yacc.c:1646 */ - {(yyval)=(yyvsp[-1]);} -#line 1813 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 238 "dap.y" + {(yyval)=(yyvsp[(3) - (4)]);} break; case 77: -#line 239 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 239 "dap.y" {(yyval)=null;} -#line 1819 "dap.tab.c" /* yacc.c:1646 */ break; case 78: -#line 239 "dap.y" /* yacc.c:1646 */ - {(yyval)=(yyvsp[-1]);} -#line 1825 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 239 "dap.y" + {(yyval)=(yyvsp[(3) - (4)]);} break; case 79: -#line 240 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 240 "dap.y" {(yyval)=null;} -#line 1831 "dap.tab.c" /* yacc.c:1646 */ break; case 80: -#line 240 "dap.y" /* yacc.c:1646 */ - {(yyval)=(yyvsp[-1]);} -#line 1837 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 240 "dap.y" + {(yyval)=(yyvsp[(3) - (4)]);} break; case 81: -#line 241 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 241 "dap.y" {(yyval)=null;} -#line 1843 "dap.tab.c" /* yacc.c:1646 */ break; case 82: -#line 241 "dap.y" /* yacc.c:1646 */ - {(yyval)=(yyvsp[-1]);} -#line 1849 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 241 "dap.y" + {(yyval)=(yyvsp[(3) - (4)]);} break; case 83: -#line 247 "dap.y" /* yacc.c:1646 */ - {(yyval)=dapdecode(parsestate->lexstate,(yyvsp[0]));} -#line 1855 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 247 "dap.y" + {(yyval)=dapdecode(parsestate->lexstate,(yyvsp[(1) - (1)]));} break; case 84: -#line 248 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 248 "dap.y" {(yyval)=strdup("alias");} -#line 1861 "dap.tab.c" /* yacc.c:1646 */ break; case 85: -#line 249 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 249 "dap.y" {(yyval)=strdup("array");} -#line 1867 "dap.tab.c" /* yacc.c:1646 */ break; case 86: -#line 250 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 250 "dap.y" {(yyval)=strdup("attributes");} -#line 1873 "dap.tab.c" /* yacc.c:1646 */ break; case 87: -#line 251 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 251 "dap.y" {(yyval)=strdup("byte");} -#line 1879 "dap.tab.c" /* yacc.c:1646 */ break; case 88: -#line 252 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 252 "dap.y" {(yyval)=strdup("dataset");} -#line 1885 "dap.tab.c" /* yacc.c:1646 */ break; case 89: -#line 253 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 253 "dap.y" {(yyval)=strdup("data");} -#line 1891 "dap.tab.c" /* yacc.c:1646 */ break; case 90: -#line 254 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 254 "dap.y" {(yyval)=strdup("error");} -#line 1897 "dap.tab.c" /* yacc.c:1646 */ break; case 91: -#line 255 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 255 "dap.y" {(yyval)=strdup("float32");} -#line 1903 "dap.tab.c" /* yacc.c:1646 */ break; case 92: -#line 256 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 256 "dap.y" {(yyval)=strdup("float64");} -#line 1909 "dap.tab.c" /* yacc.c:1646 */ break; case 93: -#line 257 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 257 "dap.y" {(yyval)=strdup("grid");} -#line 1915 "dap.tab.c" /* yacc.c:1646 */ break; case 94: -#line 258 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 258 "dap.y" {(yyval)=strdup("int16");} -#line 1921 "dap.tab.c" /* yacc.c:1646 */ break; case 95: -#line 259 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 259 "dap.y" {(yyval)=strdup("int32");} -#line 1927 "dap.tab.c" /* yacc.c:1646 */ break; case 96: -#line 260 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 260 "dap.y" {(yyval)=strdup("maps");} -#line 1933 "dap.tab.c" /* yacc.c:1646 */ break; case 97: -#line 261 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 261 "dap.y" {(yyval)=strdup("sequence");} -#line 1939 "dap.tab.c" /* yacc.c:1646 */ break; case 98: -#line 262 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 262 "dap.y" {(yyval)=strdup("string");} -#line 1945 "dap.tab.c" /* yacc.c:1646 */ break; case 99: -#line 263 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 263 "dap.y" {(yyval)=strdup("structure");} -#line 1951 "dap.tab.c" /* yacc.c:1646 */ break; case 100: -#line 264 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 264 "dap.y" {(yyval)=strdup("uint16");} -#line 1957 "dap.tab.c" /* yacc.c:1646 */ break; case 101: -#line 265 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 265 "dap.y" {(yyval)=strdup("uint32");} -#line 1963 "dap.tab.c" /* yacc.c:1646 */ break; case 102: -#line 266 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 266 "dap.y" {(yyval)=strdup("url");} -#line 1969 "dap.tab.c" /* yacc.c:1646 */ break; case 103: -#line 267 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 267 "dap.y" {(yyval)=strdup("code");} -#line 1975 "dap.tab.c" /* yacc.c:1646 */ break; case 104: -#line 268 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 268 "dap.y" {(yyval)=strdup("message");} -#line 1981 "dap.tab.c" /* yacc.c:1646 */ break; case 105: -#line 269 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 269 "dap.y" {(yyval)=strdup("program");} -#line 1987 "dap.tab.c" /* yacc.c:1646 */ break; case 106: -#line 270 "dap.y" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 270 "dap.y" {(yyval)=strdup("program_type");} -#line 1993 "dap.tab.c" /* yacc.c:1646 */ break; -#line 1997 "dap.tab.c" /* yacc.c:1646 */ + +/* Line 1806 of yacc.c */ +#line 2303 "dap.tab.c" default: break; } /* User semantic actions sometimes alter yychar, and that requires @@ -2015,7 +2321,7 @@ yyreduce: *++yyvsp = yyval; - /* Now 'shift' the result of the reduction. Determine what state + /* Now `shift' the result of the reduction. Determine what state that goes to, based on the state we popped back to and the rule number reduced by. */ @@ -2030,9 +2336,9 @@ yyreduce: goto yynewstate; -/*--------------------------------------. -| yyerrlab -- here on detecting error. | -`--------------------------------------*/ +/*------------------------------------. +| yyerrlab -- here on detecting error | +`------------------------------------*/ yyerrlab: /* Make sure we have latest lookahead translation. See comments at user semantic actions for why this is necessary. */ @@ -2083,20 +2389,20 @@ yyerrlab: if (yyerrstatus == 3) { /* If just tried and failed to reuse lookahead token after an - error, discard it. */ + error, discard it. */ if (yychar <= YYEOF) - { - /* Return failure if at end of input. */ - if (yychar == YYEOF) - YYABORT; - } + { + /* Return failure if at end of input. */ + if (yychar == YYEOF) + YYABORT; + } else - { - yydestruct ("Error: discarding", - yytoken, &yylval, parsestate); - yychar = YYEMPTY; - } + { + yydestruct ("Error: discarding", + yytoken, &yylval, parsestate); + yychar = YYEMPTY; + } } /* Else will try to reuse lookahead token after shifting the error @@ -2115,7 +2421,7 @@ yyerrorlab: if (/*CONSTCOND*/ 0) goto yyerrorlab; - /* Do not reclaim the symbols of the rule whose action triggered + /* Do not reclaim the symbols of the rule which action triggered this YYERROR. */ YYPOPSTACK (yylen); yylen = 0; @@ -2128,37 +2434,35 @@ yyerrorlab: | yyerrlab1 -- common code for both syntax error and YYERROR. | `-------------------------------------------------------------*/ yyerrlab1: - yyerrstatus = 3; /* Each real token shifted decrements this. */ + yyerrstatus = 3; /* Each real token shifted decrements this. */ for (;;) { yyn = yypact[yystate]; if (!yypact_value_is_default (yyn)) - { - yyn += YYTERROR; - if (0 <= yyn && yyn <= YYLAST && yycheck[yyn] == YYTERROR) - { - yyn = yytable[yyn]; - if (0 < yyn) - break; - } - } + { + yyn += YYTERROR; + if (0 <= yyn && yyn <= YYLAST && yycheck[yyn] == YYTERROR) + { + yyn = yytable[yyn]; + if (0 < yyn) + break; + } + } /* Pop the current state because it cannot handle the error token. */ if (yyssp == yyss) - YYABORT; + YYABORT; yydestruct ("Error: popping", - yystos[yystate], yyvsp, parsestate); + yystos[yystate], yyvsp, parsestate); YYPOPSTACK (1); yystate = *yyssp; YY_STACK_PRINT (yyss, yyssp); } - YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN *++yyvsp = yylval; - YY_IGNORE_MAYBE_UNINITIALIZED_END /* Shift the error token. */ @@ -2182,7 +2486,7 @@ yyabortlab: yyresult = 1; goto yyreturn; -#if !defined yyoverflow || YYERROR_VERBOSE +#if !defined(yyoverflow) || YYERROR_VERBOSE /*-------------------------------------------------. | yyexhaustedlab -- memory exhaustion comes here. | `-------------------------------------------------*/ @@ -2201,14 +2505,14 @@ yyreturn: yydestruct ("Cleanup: discarding lookahead", yytoken, &yylval, parsestate); } - /* Do not reclaim the symbols of the rule whose action triggered + /* Do not reclaim the symbols of the rule which action triggered this YYABORT or YYACCEPT. */ YYPOPSTACK (yylen); YY_STACK_PRINT (yyss, yyssp); while (yyssp != yyss) { yydestruct ("Cleanup: popping", - yystos[*yyssp], yyvsp, parsestate); + yystos[*yyssp], yyvsp, parsestate); YYPOPSTACK (1); } #ifndef yyoverflow @@ -2219,7 +2523,13 @@ yyreturn: if (yymsg != yymsgbuf) YYSTACK_FREE (yymsg); #endif - return yyresult; + /* Make sure YYID is used. */ + return YYID (yyresult); } -#line 273 "dap.y" /* yacc.c:1906 */ + + + +/* Line 2067 of yacc.c */ +#line 273 "dap.y" + diff --git a/oc2/daptab.h b/oc2/daptab.h index d015b33e4..e085df562 100644 --- a/oc2/daptab.h +++ b/oc2/daptab.h @@ -1,19 +1,19 @@ -/* A Bison parser, made by GNU Bison 3.0. */ +/* A Bison parser, made by GNU Bison 2.5. */ /* Bison interface for Yacc-like parsers in C - - Copyright (C) 1984, 1989-1990, 2000-2013 Free Software Foundation, Inc. - + + Copyright (C) 1984, 1989-1990, 2000-2011 Free Software Foundation, Inc. + This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. - + This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - + You should have received a copy of the GNU General Public License along with this program. If not, see . */ @@ -26,62 +26,54 @@ special exception, which will cause the skeleton and the resulting Bison output files to be licensed under the GNU General Public License without this special exception. - + This special exception was added by the Free Software Foundation in version 2.2 of Bison. */ -#ifndef YY_DAP_DAP_TAB_H_INCLUDED -# define YY_DAP_DAP_TAB_H_INCLUDED -/* Debug traces. */ -#ifndef YYDEBUG -# define YYDEBUG 1 -#endif -#if YYDEBUG -extern int dapdebug; -#endif -/* Token type. */ +/* Tokens. */ #ifndef YYTOKENTYPE # define YYTOKENTYPE - enum yytokentype - { - SCAN_ALIAS = 258, - SCAN_ARRAY = 259, - SCAN_ATTR = 260, - SCAN_BYTE = 261, - SCAN_CODE = 262, - SCAN_DATASET = 263, - SCAN_DATA = 264, - SCAN_ERROR = 265, - SCAN_FLOAT32 = 266, - SCAN_FLOAT64 = 267, - SCAN_GRID = 268, - SCAN_INT16 = 269, - SCAN_INT32 = 270, - SCAN_MAPS = 271, - SCAN_MESSAGE = 272, - SCAN_SEQUENCE = 273, - SCAN_STRING = 274, - SCAN_STRUCTURE = 275, - SCAN_UINT16 = 276, - SCAN_UINT32 = 277, - SCAN_URL = 278, - SCAN_PTYPE = 279, - SCAN_PROG = 280, - WORD_WORD = 281, - WORD_STRING = 282 - }; + /* Put the tokens into the symbol table, so that GDB and other debuggers + know about them. */ + enum yytokentype { + SCAN_ALIAS = 258, + SCAN_ARRAY = 259, + SCAN_ATTR = 260, + SCAN_BYTE = 261, + SCAN_CODE = 262, + SCAN_DATASET = 263, + SCAN_DATA = 264, + SCAN_ERROR = 265, + SCAN_FLOAT32 = 266, + SCAN_FLOAT64 = 267, + SCAN_GRID = 268, + SCAN_INT16 = 269, + SCAN_INT32 = 270, + SCAN_MAPS = 271, + SCAN_MESSAGE = 272, + SCAN_SEQUENCE = 273, + SCAN_STRING = 274, + SCAN_STRUCTURE = 275, + SCAN_UINT16 = 276, + SCAN_UINT32 = 277, + SCAN_URL = 278, + SCAN_PTYPE = 279, + SCAN_PROG = 280, + WORD_WORD = 281, + WORD_STRING = 282 + }; #endif -/* Value type. */ + + #if ! defined YYSTYPE && ! defined YYSTYPE_IS_DECLARED typedef int YYSTYPE; # define YYSTYPE_IS_TRIVIAL 1 +# define yystype YYSTYPE /* obsolescent; will be withdrawn */ # define YYSTYPE_IS_DECLARED 1 #endif -int dapparse (DAPparsestate* parsestate); -#endif /* !YY_DAP_DAP_TAB_H_INCLUDED */ diff --git a/oc2/occurlfunctions.c b/oc2/occurlfunctions.c index 7979f136e..29fee704d 100644 --- a/oc2/occurlfunctions.c +++ b/oc2/occurlfunctions.c @@ -170,7 +170,7 @@ ocset_curlflag(OCstate* state, int flag) { struct OCSSL* ssl = &state->ssl; CHECK(state, CURLOPT_SSL_VERIFYPEER, (OPTARG)(ssl->verifypeer?1L:0L)); - CHECK(state, CURLOPT_SSL_VERIFYHOST, (OPTARG)(ssl->verifyhost?2L:0L)); + CHECK(state, CURLOPT_SSL_VERIFYHOST, (OPTARG)(ssl->verifyhost?1L:0L)); if(ssl->certificate) CHECK(state, CURLOPT_SSLCERT, ssl->certificate); if(ssl->key) diff --git a/oc2/ocdata.c b/oc2/ocdata.c index 63444200f..cec5880d9 100644 --- a/oc2/ocdata.c +++ b/oc2/ocdata.c @@ -238,7 +238,7 @@ ocdata_read(OCstate* state, OCdata* data, size_t start, size_t count, { int stat = OC_NOERR; XXDR* xdrs; - OCtype etype, octype; + OCtype etype; int isscalar; size_t elemsize, totalsize, countsize; OCnode* pattern; @@ -251,8 +251,7 @@ ocdata_read(OCstate* state, OCdata* data, size_t start, size_t count, assert(memsize > 0); pattern = data->pattern; - octype = pattern->octype; - assert(octype == OC_Atomic); + assert(pattern->octype == OC_Atomic); etype = pattern->etype; isscalar = (pattern->array.rank == 0 ? 1 : 0); diff --git a/oc2/ocinternal.c b/oc2/ocinternal.c index 0a2c0eb55..3e94995cb 100644 --- a/oc2/ocinternal.c +++ b/oc2/ocinternal.c @@ -611,7 +611,8 @@ ocset_curlproperties(OCstate* state) /* If no cookie file was defined, define a default */ char tmp[OCPATHMAX+1]; int stat; - snprintf(tmp,sizeof(tmp)-1,"%s/%s/",ocglobalstate.tempdir,OCDIR); + pid_t pid = getpid(); + snprintf(tmp,sizeof(tmp)-1,"%s/%s.%ld/",ocglobalstate.tempdir,OCDIR,(long)pid); #ifdef _MSC_VER stat = mkdir(tmp); #else diff --git a/oc2/ocrc.c b/oc2/ocrc.c index 384c31051..c7e18e5fb 100644 --- a/oc2/ocrc.c +++ b/oc2/ocrc.c @@ -13,6 +13,8 @@ #include "ocdebug.h" #include "oclog.h" +#define OCRCFILEENV "DAPRCFILE" + #define RTAG ']' #define LTAG '[' @@ -366,12 +368,14 @@ ocrc_load(void) /* locate the configuration files in the following order: 1. specified by set_rcfile - 2. set by OCRCFILE env variable + 2. set by DAPRCFILE env variable 3. '.' 4. $HOME */ if(ocglobalstate.rc.rcfile != NULL) { /* always use this */ path = strdup(ocglobalstate.rc.rcfile); + } else if(getenv(OCRCFILEENV) != NULL && strlen(getenv(OCRCFILEENV)) > 0) { + path = strdup(getenv(OCRCFILEENV)); } else { char** rcname; int found = 0; diff --git a/oc2/ocuri.c b/oc2/ocuri.c index 985ee2d64..3f18a5542 100644 --- a/oc2/ocuri.c +++ b/oc2/ocuri.c @@ -593,12 +593,10 @@ ocuridecodeparams(OCURI* ocuri) int nparams; char* params = NULL; char** plist; - size_t len; if(ocuri == NULL) return 0; if(ocuri->params == NULL) return 1; - len = strlen(ocuri->params); params = strdup(ocuri->params); if(params == NULL) return 0; /* no memory */ From 6cba2a720ca8edb5ae21202ba59dd0517b9f6691 Mon Sep 17 00:00:00 2001 From: Ward Fisher Date: Tue, 26 May 2015 11:00:35 -0600 Subject: [PATCH 11/12] Converted html anchors into markdown-style anchors, for processing by Doxygen. --- docs/software.md | 235 ++++++++++++++++++++++++----------------------- 1 file changed, 118 insertions(+), 117 deletions(-) diff --git a/docs/software.md b/docs/software.md index cb5561b69..a7c425386 100644 --- a/docs/software.md +++ b/docs/software.md @@ -1,6 +1,8 @@ Software for Manipulating or Displaying NetCDF Data {#software} =================================================== +[TOC] + This document provides references to software packages that may be used for manipulating or displaying [netCDF](/software/netcdf/) data. We include information about both freely-available and licensed (commercial) software that can be used with netCDF data. We rely on developers to help keep this list up-to-date. If you know of corrections or additions, please [send them to us (mailto:support@unidata.ucar.edu). Where practical, we would like to include WWW links to information about these packages in the HTML version of this document. Other useful guides to utilities that can handle netCDF data include ARM's list of [ARM-tested netCDF data tools](http://science.arm.gov/%7ecflynn/ARM_Tested_Tools/), which includes some downloadable binaries and the NOAA Geophysical Fluid Dynamics Laboratory [guide to netCDF utilities](http://nomads.gfdl.noaa.gov/sandbox/products/vis/data/netcdf/GFDL_VG_NetCDF_Utils.html). @@ -12,6 +14,7 @@ Other useful guides to utilities that can handle netCDF data include ARM's list - [ANDX (ARM NetCDF Data eXtract) and ANAX (ARM NetCDF ASCII eXtract)](#ANDX) + - [ANTS (ARM NetCDF Tool Suite)](#ANTS) - [ARGOS (interActive thRee-dimensional Graphics ObServatory)](#ARGOS) - [CDAT (Climate Data Analysis Tool)](#CDAT) @@ -39,7 +42,7 @@ Other useful guides to utilities that can handle netCDF data include ARM's list - [HDF-EOS to netCDF converter](#HDF-EOS) - [HIPHOP (Handy IDL-Program for HDF-Output Plotting)](#HIPHOP) - [HOPS (Hyperslab OPerator - Suite)](#Hyperslab_OPerator_Suite_(HOPS)) + Suite)](#HOPS)) - [iCDF (imports chromatographic netCDF data into MATLAB)](#iCDF) - [IDV (Integrated Data Viewer)](#IDV) - [Ingrid](#Ingrid) @@ -66,6 +69,7 @@ Other useful guides to utilities that can handle netCDF data include ARM's list - [ncregrid](#ncregrid) - [nctoolbox (a MATLAB common data model interface)](#nctoolbox) - [ncview](#ncview) +- [NetCDF Toolbox for MATLAB-5](#matlab5) - [ncvtk](#ncvtk) - [netcdf tools](#netcdf_tools) - [netcdf4excel (add-in for MS Excel)](#netcdf4excel) @@ -79,7 +83,7 @@ Other useful guides to utilities that can handle netCDF data include ARM's list - [Parallel-NetCDF](#Parallel-NetCDF) - [Paraview and vtkCSCSNetCDF](#Paraview) - [Perl interfaces](#Perl) -- [PolyPaint+](#PolyPaint+) +- [PolyPaint+](#PolyPaint) - [Pomegranate](#pomegranate) - [Pupynere (PUre PYthon NEtcdf REader)](#pupynere) - [PyNGL and PyNIO](#PyNGL) @@ -89,7 +93,7 @@ Other useful guides to utilities that can handle netCDF data include ARM's list - [Ruby interface](#Ruby) - [Scientific DataSet (SDS) Library](#SDS) - [Apache Spatial Information System (SIS)](#SIS) -- [Tcl/Tk interfaces](#Tcl/Tk) +- [Tcl/Tk interfaces](#TclTk) - [Tcl-nap (N-dimensional array processor)](#Tcl-nap) - [Visual Basic and VB.net](#VB) - [VisAD](#VisAD) @@ -107,7 +111,7 @@ Other useful guides to utilities that can handle netCDF data include ARM's list - [Avizo](#Avizo) - [AVS](#AVS) - [Barrodale UFI](#BCS-UFI) -- [DioVISTA/Storm](#DioVISTA/Storm) +- [DioVISTA/Storm](#DioVISTAStorm) - [EnSight](#EnSight) - [Environmental WorkBench](#Environmental_WorkBench) - [ESRI](#ESRI) @@ -133,7 +137,7 @@ Other useful guides to utilities that can handle netCDF data include ARM's list Freely Available Software {#freely} ========================= -ANDX and ANAX {#ANDX} +ANDX and ANAX {#ANDX} ------------------------------------ The ARM Program has developed [ANDX (ARM NetCDF Data @@ -150,7 +154,7 @@ scaled-down version of ANDX -- it is designed to only extract ASCII data. All features of ANDX pertaining to non-graphic data extraction are included in ANAX. -ANTS {#ANTS} +ANTS {#ANTS} --------------------------- The ARM Program has developed [ANTS (ARM NetCDF Tool @@ -177,7 +181,7 @@ represents a library of coding examples for fundamental netCDF tasks. See the [website](http://science.arm.gov/~cflynn/ANTS/) for more information. -ARGOS {#ARGOS} +ARGOS {#ARGOS} ----------------------------- [ARGOS](http://www.lapeth.ethz.ch/argos/index.html) (interActive @@ -207,7 +211,7 @@ conditions](http://www.lapeth.ethz.ch/argos/argos_copyright.html) are available. For further information and installation, please E-mail to: bresch@atmos.umnw.ethz.ch -CDAT {#CDAT} +CDAT {#CDAT} --------------------------- The [Climate Data Analysis Tool (CDAT)](http://cdat.sf.net), developed @@ -239,7 +243,7 @@ an image, or as a collection of images in an animation. The software has a gradual learning curve, allowing the novice user to quickly obtain useful results. -CDFconvert {#CDFconvert} +CDFconvert {#CDFconvert} --------------------------------------- The [MRG CDFconvert @@ -257,7 +261,7 @@ has the flexibility to handle netCDF files generated by a number of sources, including NCEP and ECMWF. User-definable conversion tables make the extension of the package to different datasets possible. -cdfsync {#cdfsync} +cdfsync {#cdfsync} --------------------------------- Joe Sirott of NOAA's Pacific Marine Environmental Laboratory has @@ -277,7 +281,7 @@ The latest version should run on Linux variants and Solaris. More information is available at the [cdfsync website](http://www.epic.noaa.gov/epic/software/cdfsync/). -CDO (Climate Data Operators) {#CDO} +CDO (Climate Data Operators) {#CDO} -------------------------------------------------- Uwe Schulzweida at the Max Planck Institute for Meteorology has @@ -316,7 +320,7 @@ or using ECMWF reanalysis on a reduced grid More information is available on the [CDO homepage](http://code.zmaw.de/projects/cdo). -CIDS Tools {#CIDS_Tools} +CIDS Tools {#CIDS_Tools} --------------------------------------- The Center for Clouds Chemistry and Climate @@ -332,7 +336,7 @@ The source for these utilities can be downloaded from [CIDS NetCDF Visualization Tools site](http://www-c4.ucsd.edu/~cids/software/visual.html). -CSIRO MATLAB/netCDF interface +CSIRO MATLAB/netCDF interface {#CSIRO-MATLAB} ------------------------------------------------------------ The [CSIRO MATLAB/netCDF @@ -352,7 +356,7 @@ netCDF data, the CSIRO interface has a simpler syntax than the netCDF Toolbox, but the latter may also be used to create and manipulate netCDF variables and datasets. -EPIC +EPIC {#EPIC} --------------------------- NOAA's Pacific Marine Environmental Laboratory @@ -391,7 +395,7 @@ information about EPIC, please see the Web pages at . Contact epic@pmel.noaa.gov, or Nancy Soreide, nns@noaapmel.gov, for more information. -Excel Use +Excel Use ------------------------------------ Several packages are available for accessing netCDF data from Microsoft @@ -400,7 +404,7 @@ a [Scientific Dataset (SDS) Library](#SDS) that supports a DataSetEditor add-in for Excel to view and modify various forms of data, including netCDF. -EzGet +EzGet {#EzGet} ----------------------------- A FORTRAN library called @@ -437,7 +441,7 @@ the documentation or software, see the EzGet home page at . For questions or comments on EzGet, contact Karl Taylor (taylor13@llnl.gov). -FAN +FAN ------------------------- [FAN (File Array Notation)](/software/netcdf/fan_utils.html) is Harvey @@ -451,7 +455,7 @@ via anonymous FTP from . Questions and comments may be sent to Harvey Davies, harvey.davies@csiro.au. -FERRET +FERRET {#FERRET} ------------------------------- [FERRET](http://ferret.wrc.noaa.gov/Ferret/) is an interactive computer @@ -478,7 +482,7 @@ data base, special memory management for very large calculations, and symmetrical processing in 4 dimensions. Contact Steve Hankin, hankin@noaapmel.gov, for more information. -Fimex +Fimex {#fimex} ----------------------------- Heiko Klein (Norwegian Meteorological Institute) has developed the @@ -496,7 +500,7 @@ For simple usage, Fimex also comes with the command line program fimex. Documentation and downloads are available from the [fimex web site](http://wiki.met.no/fimex/). -FWTools (GIS Binary Kit for Windows and Linux) +FWTools (GIS Binary Kit for Windows and Linux) {#fwtools} ------------------------------------------------------------------------ [FWTools](http://fwtools.maptools.org/) is Frank Warmerdam's set of Open @@ -507,7 +511,7 @@ some supporting components. FWTools aims to track the latest development versions of the packages included as opposed to official releases, "to give folks a chance to use the *latest and greatest*". -GDAL +GDAL {#GDAL} --------------------------- Frank Warmerdam's [GDAL](http://www.remotesensing.org/gdal/index.html) @@ -538,14 +542,14 @@ netCDF (GMT conventions) as easy as: gdal_translate arc_ascii.grd -of GMT gmt_grid.nc -GDL (GNU Data Language) +GDL (GNU Data Language) {#GDL} --------------------------------------------- [GDL](http://gnudatalanguage.sourceforge.net/) is a free implementation of most of the programming language supported by [IDL](#IDL) (Interactive Data Language). GDL supports the netCDF-3 API. -Gfdnavi (Geophysical fluid data navigator) +Gfdnavi (Geophysical fluid data navigator) {#Gfdnavi} -------------------------------------------------------------------- [Gfdnavi](http://www.gfd-dennou.org/arch/davis/gfdnavi/index.en.htm) is @@ -575,7 +579,7 @@ Global Satellite Mapping of Precipitation ([GSMaP](http://www.radar.aero.osakafu-u.ac.jp/~gsmap/index_english.html)) project. -GMT +GMT {#GMT} ------------------------- [GMT](http://gmt.soest.hawaii.edu/) (Generic Mapping Tools) is an open @@ -596,7 +600,7 @@ anonymous ftp from several servers; see [gmt.soest.hawaii.edu](http://gmt.soest.hawaii.edu) for installation information. -Grace +Grace {#Grace} ----------------------------- [Grace](http://plasma-gate.weizmann.ac.il/Grace/) is a tool to make @@ -619,7 +623,7 @@ A few features of Grace are: - Device-independent Type1 font rastering. - Ability to read or write netCDF data. -GrADS +GrADS {#GrADS} ----------------------------- [GrADS](http://grads.iges.org/grads/grads.html) (Grid Analysis and @@ -633,7 +637,7 @@ manipulation, and display of earth science data in several forms, including GRIB and netCDF. For more information, see the [GrADS User's Guide](http://grads.iges.org/grads/gadoc/users.html). -Gri +Gri ------------------------- Gri is an extensible plotting language for producing scientific graphs, @@ -642,7 +646,7 @@ Dalhousie University is the author of Gri, which can read data from netCDF files as well as ASCII and native binary data. For more information on Gri, see the URL . -GXSM +GXSM {#GXSM} --------------------------- The GXSM is the Gnome X Scanning Microscopy project, it is a bit more @@ -651,7 +655,7 @@ support for DSP cards including open source DSP software and a growing set of SPM related electronics. For more information, see . -HDF interface +HDF interface {#HDF_interface} --------------------------------------------- The National Center for Supercomputing Applications (NCSA) has added the @@ -678,7 +682,7 @@ rest of the HDF tool suite. Such an integration will then allow tools written for netCDF and tools written for HDF to both interact intelligently with the new data files. -HDF-EOS to netCDF converter +HDF-EOS to netCDF converter {#HDF-EOS} ----------------------------------------------------- The Goddard Earth Sciences Data and Information Services Center ([GES @@ -699,7 +703,7 @@ are available for searching and converting these data. More information on AIRS products is available at . -HIPHOP +HIPHOP {#HIPHOP} ------------------------------- [HIPHOP](http://www.knmi.nl/onderzk/atmosam/English/Service/hiphop/hiphop.html), @@ -722,7 +726,7 @@ Beginning with Version 4.0, it also supports the ability to overlay meteorological fields on a number of different satellite images, and to draw air parcel trajectories. -Hyperslab OPerator Suite (HOPS) +Hyperslab OPerator Suite (HOPS) {#HOPS} --------------------------------------------------------------------------------- Hyperslab OPerator Suite @@ -746,7 +750,7 @@ Note that HOPS is not a general purpose netCDF utility and works only for the NCAR CSM netCDF formats. For more information, check the [HOPS home page](http://www.cgd.ucar.edu/gds/svn/hyperslab.html). -iCDF (imports chromatographic netCDF data into MATLAB) +iCDF (imports chromatographic netCDF data into MATLAB) {#iCDF} ----------------------------------------------------------------------------- Klavs M. Sørensen, Thomas Skov and Rasmus Bro (Faculty of Life Sciences, @@ -768,7 +772,7 @@ For more information, see the paper > chromatographic analysis Analytical and Bioanalytical Chemistry, 390 > (1): 281-285. -IDV (Integrated Data Viewer) +IDV (Integrated Data Viewer) {#IDV} -------------------------------------------------- Unidata's [Integrated Data Viewer (IDV)](/software/idv/) is a Java @@ -791,7 +795,7 @@ collaborative visualization and analysis and the [netCDF Java library](/software/netcdf-java/) for reading and manipulating netCDF files. -Ingrid +Ingrid {#Ingrid} ------------------------------- [Ingrid](http://ingrid.ldgo.columbia.edu/), by M. Benno Blumenthal @@ -832,7 +836,7 @@ quote the introduction: Ingrid currently runs on Linux, for which binaries are available. CVS access to the current source can be arranged. - Intel Array Visualizer + Intel Array Visualizer {#IntelArrayVisualizer} -------------------------------------------------------------- The [Intel® Array @@ -845,7 +849,7 @@ developing scientific visualization applications and for creating interactive graphs of array data in various formats, including HDF and netCDF. -IVE +IVE {#IVE} ------------------------- [IVE (Interactive Visualization @@ -899,7 +903,7 @@ Library](http://ngwww.ucar.edu/ng/) to produce graphical output. IVE is via anonymous ftp; and as binary on request for licensees of NCAR graphics. -JSON format with the ncdump-json utility +JSON format with the ncdump-json utility {#JSON} --------------------------------------------------------------- Josep Llodrà has developed a program to output the contents of a @@ -910,7 +914,7 @@ functionality, unless the "-j" option is used to specify JSON output. The program and source are available from . -Java interface +Java interface {#Java_interface} ----------------------------------------------- The [NetCDF-Java 4.2 Library](/packages/netcdf-java/) is a Java @@ -931,7 +935,7 @@ The library also implements you to add metadata to CDM datasets, as well as to create virtual datasets through aggregation. -Kst (2D plotting tool) +Kst (2D plotting tool) {#KST} -------------------------------------------- [Kst](http://kst-plot.kde.org) is an open-source, cross-platform 2D @@ -959,7 +963,7 @@ Kst is characterized by the following features: thanks to a plugin-based architecture - Available on Windows, Linux, and Mac OSX -Labview interface +Labview interface {#Labview-API} ----------------------------------------------- A netCDF Labview interface, implemented in the Labview programming @@ -970,7 +974,7 @@ University in China. For more information and to download the source code, see the [NetCDFLabview web site](https://sourceforge.net/projects/netcdflabview/). -MBDyn (MultiBody Dynamics) +MBDyn (MultiBody Dynamics) #{MBDyn} -------------------------------------------------- [MBDyn](http://www.aero.polimi.it/~mbdyn/) is an open-source MultiBody @@ -989,7 +993,7 @@ automotive fields for dynamics analysis and simulation of complex systems. Dynamic linking of user-defined modules is heavily exploited to let users extend the feature library. -Max\_diff\_nc +Max_diff_nc ${Maxdiffnc} ------------------------------------------- This is a program which compares two NetCDF files. Variables with the @@ -1005,7 +1009,7 @@ The web page for this program is: This is a freely available tool. -MeteoExplorer +MeteoExplorer {#MeteoExplorer} --------------------------------------------- [MeteoExplorer](http://www.eastmodelsoft.com/index_en.htm), developed by @@ -1045,7 +1049,7 @@ For more information, please visit [MeteoExplorer's home page](http://www.eastmodelsoft.com/software/mexplorer.htm) or contact the support staff via meteoexplorer@hotmail.com . -MeteoInfo +MeteoInfo {#MeteoInfo} ------------------------------------- For better cross-platform support, @@ -1061,7 +1065,7 @@ Download: Java 6 is needed to run the software. -MexEPS +MexEPS {#MexEPS} ------------------------------- [PMEL](http://www.pmel.noaa.gov/) has developed a MATLAB interface, @@ -1094,7 +1098,7 @@ If you have any questions or comments, please contact the author, Willa Zhu [(willa@pmel.noaa.gov)](mailto:willa@pmel.noaa.gov) or Nancy Soreide (nns@pmel.noaa.gov). -MEXNC and SNCTOOLS +MEXNC and SNCTOOLS {#MEXNC} ------------------------------------------ John Evans of Rutgers University maintains MEXNC and developed SNCTOOLS. @@ -1107,7 +1111,7 @@ such low level netCDF details as file IDs, variable IDs, and dimension IDs. The general philosophy behind SNCTOOLS is providing the ability to read and write data without trying to invent a new syntax. -Mirone (Windows MATLAB-based display) +Mirone (Windows MATLAB-based display) {#Mirone} -------------------------------------------------------------- Joaquim Luis of Universidade do Algarve has developed @@ -1135,7 +1139,7 @@ Also see\ J. F. Luis. Mirone: A multi-purpose tool for exploring grid data. Computers & Geosciences, 33, 31-41, 2007. -ncBrowse +ncBrowse {#ncBrowse} ----------------------------------- Donald Denbo of NOAA's Pacific Marine Environmental Laboratory has @@ -1165,7 +1169,7 @@ Questions and suggestions should be directed to reading a netCDF file with ncBrowse, please send him a copy of the file and he'll get ncBrowse to read it! -nccmp +nccmp {#nccmp} ----------------------------- Remik Ziemlinski of the NOAA Geophysical Fluid Dynamics Laboratory has @@ -1175,7 +1179,7 @@ metadata and operates quickly. Highly recommended for regression testing with large datasets. See the Web site for more information. -NCL +NCL {#NCL} ------------------------- The [NCAR Command Language (NCL)](http://www.ncl.ucar.edu/) is an @@ -1228,7 +1232,7 @@ Documentation and additional information on NCL are available from the download. You can also contact Mary Haley, at for more information. -NCO +NCO {#NCO} ------------------------- [NCO](http://nco.sourceforge.net) (netCDF operators) is a package of @@ -1258,7 +1262,7 @@ page](http://nco.sourceforge.net/), as is the NCO User's Guide. For more information, contact the author, Charlie Zender. -ncregrid +ncregrid {#ncregrid} ----------------------------------- Patrick Jöckel of the Max Planck Institute for Chemistry has developed @@ -1276,7 +1280,7 @@ onto the required grid resolution. More information is available on the web-page: . -nctoolbox (a MATLAB common data model interface) +nctoolbox (a MATLAB common data model interface) {#nctoolbox} ---------------------------------------------------------------------------- [nctoolbox](http://nctoolbox.github.io/nctoolbox/) is a MATLAB interface @@ -1289,7 +1293,7 @@ works with MATLAB 2008a and later. The nctoolbox software was developed by Brian Schlining (MBARI), Rich Signell (USGS), Sachin Kumar Bhate (freelance), and Alex Crosby (RPS/ASA). -ncdx +ncdx {#ncdx} --------------------------- Patrick Jöckel of the Max Planck Institute for Chemistry has developed @@ -1299,14 +1303,14 @@ any warranty under the GNU public license (GPL). More information is available on the web-page: . -ncensemble +ncensemble {#ncensemble} --------------------------------------- Alan Iwi, of Rutherford Appleton Laboratory, offers this command line ensemble statistics utility. More information is available on the web-page: . -ncview +ncview {#ncview} ------------------------------- [Ncview](http://meteora.ucsd.edu/~pierce/ncview_home_page.html) is a @@ -1323,7 +1327,7 @@ The source may be downloaded from . For more information, please contact the author, David W. Pierce at . -NetCDF Toolbox for MATLAB-5 +NetCDF Toolbox for MATLAB-5 {#matlab5} ---------------------------------------------------- The [NetCDF Toolbox for MATLAB-5](http://mexcdf.sourceforge.net/), @@ -1335,7 +1339,7 @@ bug-fix-only mode, and is maintained by John.G.Evans.NE@gmail.com, on the [MEXNC, SNCTOOLS, and the NetCDF Toolbox](http://mexcdf.sf.net) web page. -ncvtk +ncvtk {#ncvtk} ----------------------------- [Ncvtk](http://ncvtk.sourceforge.net/) is a program for exploring @@ -1357,7 +1361,7 @@ highly portable and known to run on Windows and Linux (i386, ia64, EMT64) platforms. More information about Ncvtk is available at . -Ivan Shmakov's netcdf tools +Ivan Shmakov's netcdf tools {#netcdf_tools} ---------------------------------------------------------- The NetCDF tools is a free software package consisting of a few tools @@ -1389,7 +1393,7 @@ page](http://freshmeat.net/projects/netcdf-tools) on freshmeat.net. The [source](http://waterlily.siamics.net/~ivan/src/netcdf-tools-0.1-rc1.tar.gz) is also available. -netcdf4excel (add-in for MS Excel) +netcdf4excel (add-in for MS Excel) {#netcdf4excel} ----------------------------------------------------------------- Alexander Bruhns has developed [a netCDF add-in written in Visual Basic @@ -1408,7 +1412,7 @@ Excel for read or write access. More details are available on the [netcdf4excel web site](http://code.google.com/p/netcdf4excel/). -NetCDF95 alternative Fortran API +NetCDF95 alternative Fortran API {#netcdf95} ----------------------------------------------------------- Lionel Guez has developed and made feely available @@ -1417,7 +1421,7 @@ Fortran interface to the NetCDF library. Compared to the Unidata-provided Fortran 90 netCDF interface, the NetCDF95 interface is meant to be easier to use and more secure. -Objective-C API +Objective-C API {#Objective-C} --------------------------------------------- Tom Moore has an Objective-C API, available here: @@ -1450,7 +1454,7 @@ distribution](http://www.mt-se.com/pub/NetcdfStep-GNUstep-0.6.1.tar.gz) synced up to version 0.6.1 is available for GNUstep for use on Linux and other Unix platforms. -Octave interface +Octave interface {#NCMEX} ---------------------------------------- The ARM Program has contributed NCMEX for Octave, a port of Chuck @@ -1471,7 +1475,7 @@ toolbox for Octave. For installation instructions, see the README file inside the .tar file. -Octave interface (Barth) +Octave interface (Barth) {#Octave} ------------------------------------------------- Alexander Barth has contributed the following: @@ -1490,7 +1494,7 @@ source code is available at It was also included in the Octave Repository [octave-forge](http://octave.sourceforge.net/). -OPeNDAP (formerly DODS) +OPeNDAP (formerly DODS) {#OPeNDAP} ------------------------------------------------- The [OPeNDAP](http://opendap.org/) (formerly known as DODS) is an @@ -1523,7 +1527,7 @@ NFS, for example). OPeNDAP software is freely available in both source form or binary form for selected platforms. -OpenDX +OpenDX {#OpenDX} ------------------------------- [OpenDX](http://www.opendx.org/about.html) (formerly IBM Data Explorer, @@ -1589,7 +1593,7 @@ earth, space and environmental sciences examples are available at Cornell University via info.tc.cornell.edu. Also see the [ncdx](#ncdx) tool for making netCDF files OpenDX compliant. -Panoply +Panoply {#Panoply} --------------------------------- [Panoply](http://www.giss.nasa.gov/tools/panoply/) is an application @@ -1621,7 +1625,7 @@ Panoply is developed at the NASA Goddard Institute for Space Studies. Questions and suggestions should be directed to [Dr. Robert B. Schmunk](http://www.giss.nasa.gov/staff/rschmunk.html). -Parallel-NetCDF +Parallel-NetCDF {#Parallel-NetCDF} ------------------------------------------------- A group of researchers at Northwestern University and Argonne National @@ -1642,7 +1646,7 @@ improvements on parallel platforms, as described in a [technical report](ftp://info.mcs.anl.gov/pub/tech_reports/reports/P1048.pdf). Users are invited to test Parallel-NetCDF in their applications. -Paraview and vtkCSCSNetCDF +Paraview and vtkCSCSNetCDF {#Paraview} ----------------------------------------------------- @@ -1665,7 +1669,7 @@ of Tcl/Tk and C++. A vtk/ParaView reader for netCDF files can be found here. -Perl interfaces +Perl interfaces {#Perl} -------------------------------------- There are two netCDF interfaces for Perl: @@ -1676,7 +1680,7 @@ There are two netCDF interfaces for Perl: module, based on version 2 of the netCDF package. Uses perl lists for representing netCDF variables. -PolyPaint+ +PolyPaint+ {#PolyPaint} --------------------------------------- [PolyPaint+](http://lasp.colorado.edu/polypaint/home.html) is an @@ -1740,7 +1744,7 @@ You may order by... - E-MAIL : margi@aries.colorado.edu -Pomegranate +Pomegranate {#Pomegranate} ----------------------------------------- The P9E Team at NASA JPL has developed @@ -1764,7 +1768,7 @@ from files in supported formats. Pomegranate is open source software and can be downloaded from . -PyNGL and PyNIO +PyNGL and PyNIO {#PyNGL} --------------------------------------- NCAR's Computational and Information Systems Laboratory has developed @@ -1774,7 +1778,7 @@ visualization and data analysis and supporting access to a variety of data formats using an interface modelled on netCDF. -Python interfaces +Python interfaces {#Python} ------------------------------------------ Python is an interpreted, object-oriented language that is supported on @@ -1830,7 +1834,7 @@ Konrad Hinsen's NetCDF package to Python but can read and write in a parallel way. For more information, see: . -Pupynere (PUre PYthon NEtcdf REader) Roberto +Pupynere (PUre PYthon NEtcdf REader) Roberto De Almeida has developed [pupynere](http://pypi.python.org/pypi/pupynere/), a PUre PYthon NEtcdf REader that allows read-access to netCDF files using the same syntax as @@ -1838,7 +1842,7 @@ the Scientific.IO.NetCDF Python module. Even though it's written in Python, the module is up to 40% faster than Scientific.IO.NetCDF and pynetcdf. -R interface +R interface {#R} ------------------------------- The R Project for Statistical Computing has developed @@ -1868,7 +1872,7 @@ don't fit into memory, because data is processed in chunks. See package](http://cran.r-project.org/web/packages/raster/vignettes/Raster.pdf) for more information. -Quantum GIS (QGIS) +Quantum GIS (QGIS) {#QGIS} ----------------------------------------- [Quantum GIS](http://www.qgis.org/) (QGIS) is an Open Source Geographic @@ -1885,7 +1889,7 @@ a more detailed list of features of the QGIS desktop, browser, server, and client, see the [QGIS features page](http://www.qgis.org/en/about-qgis/features.html). -Ruby interface +Ruby interface {#Ruby} ------------------------------------- A group at the Research Institute for Sustainable Humanosphere (RISH) of @@ -1903,7 +1907,7 @@ and netCDF data uniformly. More information about Ruby is available from the [Ruby web site](http://www.ruby-lang.org/). -Scientific DataSet (SDS) Library +Scientific DataSet (SDS) Library {#SDS} ------------------------------------------------------ The [Scientific DataSet Library and Tools @@ -1947,7 +1951,7 @@ The SDS project is in beta phase and keeps evolving. You are welcome to join discussions or report issues at the CodePlex site: . -Apache Spatial Information System (SIS) +Apache Spatial Information System (SIS) {#SIS} ------------------------------------------------------------- [Apache Spatial Information System @@ -1980,7 +1984,7 @@ conventions and ISO 19115 metadata. SIS is under developement as an Apache project. Release 0.3 is currently available for download. -Tcl/Tk interfaces +Tcl/Tk interfaces {#TclTk} ------------------------------------------ Dan Schmitt has developed [cdftcl](http://cnrit.tamu.edu/rsg/cdftcl/), a @@ -1989,7 +1993,7 @@ use of "wildcards" (\*) or ranges (1-4) in the subscript notation, and use of name references instead of variable IDs. Contact dan@computer.org for more information. -Tcl-nap +Tcl-nap {#Tcl-nap} --------------------------------- [Tcl-nap](http://tcl-nap.sourceforge.net) (n-dimensional array @@ -2014,7 +2018,7 @@ Tcl-nap was developed as part of the [CSIRO CAPS project](http://www.dar.csiro.au/rs/avhrr_processing_software.htm), but can be loaded and used without the (satellite oriented) CAPS extension. -Visual Basic and VB.net interfaces +Visual Basic and VB.net interfaces {#VB} ------------------------------------------------------- Carsten Wieczorrek has developed code in VB 6 to export chromatographic @@ -2024,7 +2028,7 @@ programming with netcdf.dll from VB 6, see Wieczorrek's web page on [netCDF and VB 6.0](http://www.mn-net.com/netcdf_vb6) and for VB.net, see [netCDF and VB.net](http://www.mn-net.com/netcdf_vbnet). -VisAD +VisAD {#VisAD} ----------------------------- [VisAD](http://www.ssec.wisc.edu/~billh/visad.html) is a Java class @@ -2061,7 +2065,7 @@ Wisconsin-Madison [Space Science and Engineering Center](http://www.ssec.wisc.edu/), and the [Unidata Program Center](/index.html). -WebWinds +WebWinds {#WebWinds} ----------------------------------- [WebWinds](http://www.openchannelsoftware.com/projects/WebWinds/) is a @@ -2101,7 +2105,7 @@ well on Unix, Windows (95/98/NT) and Mac platforms. It currently requires JDK 1.1. To download a copy of this release, go to -xray (Python N-D labelled arrays) +xray (Python N-D labelled arrays) {#xray} -------------------------------------------------------- [xray](http://xray.readthedocs.org/en/stable/index.html) is an open @@ -2119,7 +2123,7 @@ representation of a netCDF file. xray is being developed by Stephan Hoyer, Alex Kleeman, and [other contributors](https://github.com/xray/xray/graphs/contributors). -Zebra +Zebra {#Zebra} ----------------------------- [Zebra](http://www.atd.ucar.edu/rdp/zebra.html) (formerly named Zeb) is @@ -2154,7 +2158,7 @@ http://www.atd.ucar.edu/rdp/zebra.html. ------------------------------------------------------------------------ -User-Contributed Software +User-Contributed Software {#user} ================================================ Unidata makes available a separate @@ -2178,7 +2182,7 @@ user-contributed software are: Commercial or Licensed Packages {#commercial} =============================== -ASA ViewNcDap +ASA ViewNcDap {#ViewNcDap} ----------------------------------------- Applied Science Associates, Inc. has made the ASA View NC/Dap @@ -2195,7 +2199,7 @@ permit additional formats to be read. It should not be considered a GIS system, but is used to quickly preview a variety of data on a simple map. Data may also be filtered and saved to a local netCDF file. -Avizo +Avizo {#Avizo} ----------------------------- [Avizo](http://www.avizo3d.com/) software is a powerful tool for 3D data @@ -2211,7 +2215,7 @@ and 3D data representations. For more information, see [www.avizo3d.com](http://www.avizo3d.com/). -AVS +AVS {#AVS} ------------------------- [AVS](ftp://testavs.ncsc.org/avs/Info/WHAT_IS_AVS) (Application @@ -2239,7 +2243,7 @@ directory. See also the information on [DDI](#DDI) for another way to use netCDF data with AVS. -Barrodale UFI +Barrodale UFI {#BCS-UFI} --------------------------------------- [Barrodale Computing Services Ltd.](http://www.barrodale.com) (BCS) has @@ -2268,10 +2272,7 @@ tables, so "UFI managed tables" are actually virtual database tables. Consequently, users of UFI can perform SQL queries on their files without having to first load them into a database. - -------------- - -DioVISTA/Storm +DioVISTA/Storm {#DioVISTAStorm} ----------------------------------------------- [DioVISTA/Storm](http://www.hitachi-power-solutions.com/products/product03/p03_61.html) @@ -2283,7 +2284,7 @@ resources through OGC Web Tile Map Services (WTMS). It supports CF Conventions version 1.6 (lon-lat-alt-time axis and trajectory). Its first version was released on Aug 5 2014. -Environmental WorkBench +Environmental WorkBench {#Environmental_WorkBench} ----------------------------------------------------------------- [SuperComputer Systems Engineering and Services @@ -2346,7 +2347,7 @@ flexible interface for storing scientific data. MeRAF is being used by the DOE at the Hanford-Meteorological Site for observational data and will be used for their weather-modeling. -ESRI +ESRI {#ESRI} --------------------------- [ESRI ArcGIS](http://www.esri.com/software/arcgis/index.html) version @@ -2357,7 +2358,7 @@ with data. A selected slice of netCDF data may be displayed in ArcGIS as a raster layer, feature layer, or table. You can also drag a netCDF file from Windows Explorer and drop it in an ESRI application such as ArcMap. -FME +FME {#FME} ------------------------- [FME](http://www.safe.com/fme), developed by [Safe Software @@ -2369,7 +2370,7 @@ netCDF common standard, regardless of its source, and conversely enables end-users to consume netCDF data for use in their preferred systems. For more information visit . -HDF Explorer +HDF Explorer {#HDF-Explorer} ------------------------------------------- [HDF Explorer](http://www.space-research.org/) is a data visualization @@ -2385,7 +2386,7 @@ features include fast access to data, grid, scalar and vector views. It also allows exporting your data either as an ASCII text file or a bitmap image. -IDL Interface +IDL Interface {#IDL} ----------------------------------- [IDL](http://www.exelisvis.com/ProductsServices/IDL.aspx) (Interactive @@ -2415,7 +2416,7 @@ netCDF data includes [ARGOS](#ARGOS), [CIDS Tools](#CIDS_Tools), [DDI](#DDI), [HIPHOP](#HIPHOP), [Hyperslab OPerator Suite (HOPS)](Hyperslab_OPerator_Suite_(HOPS)), and [Noesys](Noesys). -InterFormat +InterFormat {#InterFormat} ----------------------------------------- [InterFormat](http://www.radio-logic.com/) is a medical image format @@ -2431,7 +2432,7 @@ For more details about the formats handled, program features, and pricing, see the Radio-Logic web site at [\](http://www.radio-logic.com). -IRIS Explorer Module +IRIS Explorer Module {#IRIS_Explorer_Module} ----------------------------------------------------------- The Atmospheric and Oceanic Sciences Group at the National Center for @@ -2469,15 +2470,15 @@ information please send email to: pathfinder@redrock.ncsa.uiuc.edu See also the information on [DDI](#DDI) for another way to use netCDF data with IRIS Explorer. -LeoNetCDF +LeoNetCDF {#LeoNetCDF} ------------------------------------- [LeoNetCDF](http://www.leokrut.com/leonetcdf.html) is a Windows -application (Windows96/NT and higher) for editing netCDF files. It can +application (Windows95/NT and higher) for editing netCDF files. It can display content of netCDF files in tree style control and permits editing its parameters in a standard Windows interface environment. -Mathematica +Mathematica {#Mathematica} ----------------------------------------- [Mathematica](http://www.wolfram.com/products/mathematica/index.html) is @@ -2487,7 +2488,7 @@ adds classic [netCDF data](http://reference.wolfram.com/mathematica/ref/format/NetCDF.html) to the many forms of data it can import, export, and visualize. -MATLAB +MATLAB {#MATLAB} ------------------------------- [MATLAB](http://www.mathworks.com/products/matlab/) is an integrated @@ -2506,7 +2507,7 @@ implement a MATLAB/netCDF interface are available: reader](http://www.mathworks.com/matlabcentral/fileexchange/loadFile.do?objectId=15177&objectType=file), and [fanmat](/software/netcdf/Contrib.html). -Noesys +Noesys {#Neosys} ------------------------------- [Noesys](http://www.rsinc.com/NOeSYS/index.cfm) is software for desktop @@ -2547,7 +2548,7 @@ Power Macintosh OS. More details and information about ordering Noesys are available from [\](http://www.rsinc.com/NOeSYS/index.cfm). -Origin +Origin {#Origin} ------------------------------- Ryan Toomey reports: @@ -2575,7 +2576,7 @@ produces professional data analysis and graphing software for scientists and engineers. Our products are designed to be easy-to-use, yet have the power and versatility to provide for the most demanding user." -PPLUS +PPLUS {#PPLUS} ----------------------------- [Plot-Plus (PPLUS)](http://dwd6.home.mindspring.com/) is a general @@ -2608,7 +2609,7 @@ Korea...). Plot Plus is now available at no charge. It does require licensing on a per computer basis, but the license is at no cost. For more information about licensing, see -[http://dwd6.home.mindspring.com/pplus\_license.html/](http://dwd6.home.mindspring.com/pplus_license.html); +[http://dwd6.home.mindspring.com/pplus_license.html/](http://dwd6.home.mindspring.com/pplus_license.html); source and documentation are available via anonymous FTP from and . @@ -2619,7 +2620,7 @@ source and documentation are available via anonymous FTP from Shoreline, WA 98133 Fax and Voice: (206) 366-0624 -PV-Wave +PV-Wave {#PV-Wave} --------------------------------- [PV-Wave](http://www.vni.com/products/wave/index.html) is a software @@ -2634,7 +2635,7 @@ PV-Wave supports data access in numerous formats, including netCDF. See also the information on [DDI](#DDI) for another way to use netCDF data with PV-Wave. -Slicer Dicer +Slicer Dicer {#SlicerDicer} ------------------------------------------ [Slicer Dicer](http://www.slicerdicer.com/) is a volumetric data @@ -2659,7 +2660,7 @@ Slicer Dicer output. Visualizations features include: - Any data level or range of levels can be rendered as either opaque or transparent. -vGeo +vGeo {#vGeo} --------------------------- [vGeo](http://www.vrco.com/products/vgeo/vgeo.html) (Virtual Global @@ -2674,7 +2675,7 @@ how multiple files and variables are mapped into a data source. 3D graphics are built from the underlying data in real-time, and the user has interactive control of graphics, navigation, animation, and more. -VISAGE and Decimate +VISAGE and Decimate {#VISAGE_and_Decimate} --------------------------------------------------------- [VISAGE](http://www.crd.ge.com/esl/cgsp/projects/visage/) @@ -2696,7 +2697,7 @@ a preferred format. Decimate is currently licensed to Cyberware, Inc., makers of 3D laser digitizing hardware. Decimate is currently bundled with the scanners, and will soon be available as a commercial product. -Voyager +Voyager {#Voyager} --------------------------------- [Makai Voyager](http://voyager.makai.com/), developed by Makai Ocean From 0ee68b1b6d194f8bcd613c4f11acb97c10878a23 Mon Sep 17 00:00:00 2001 From: Ward Fisher Date: Tue, 26 May 2015 14:29:28 -0600 Subject: [PATCH 12/12] Cleaned up some dangling doxygen-related options which were being defined by cmake but not by automake. --- configure.ac | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/configure.ac b/configure.ac index 34d078f3f..ac34bb442 100644 --- a/configure.ac +++ b/configure.ac @@ -79,12 +79,35 @@ AC_ARG_WITH([minblocksize], AC_MSG_RESULT([$NCIO_MINBLOCKSIZE]) AC_DEFINE_UNQUOTED([NCIO_MINBLOCKSIZE], [$NCIO_MINBLOCKSIZE], [min blocksize for posixio.]) +### +# Doxygen and doxygen-related options. +### AC_ARG_ENABLE([doxygen], [AS_HELP_STRING([--enable-doxygen], [Enable generation of documentation.])]) test "x$enable_doxygen" = xyes || enable_doxygen=no AM_CONDITIONAL([BUILD_DOCS], [test "x$enable_doxygen" = xyes]) +AC_ARG_ENABLE([doxygen-tasks], + [AS_HELP_STRING([--enable-doxygen-tasks], + [Enable Doxygen-generated test, todo and bug list documentation. Developers only.])]) +test "x$enable_doxygen_tasks" = xyes || enable_doxygen_tasks=no +AM_CONDITIONAL([SHOW_DOXYGEN_TAG_LIST], [test "x$enable_doxygen_tasks" = xyes]) +AC_SUBST([SHOW_DOXYGEN_TAG_LIST], [$enable_doxygen_tasks]) + +AC_ARG_ENABLE([doxygen-server-side-search], + [AS_HELP_STRING([--enable-doxygen-server-side-search], + [Enable doxygen server-side search. This is of interest to developers only, or users who will serve the documentation via a web server instead of browsing locally.])]) +test "x$enable_doxygen_server_side_search" = xyes || enable_doxygen_server_side_search=no +AM_CONDITIONAL([SERVER_SIDE_SEARCH], [test "x$enable_doxygen_server_side_search" = xyes]) +AC_SUBST([SERVER_SIDE_SEARCH], [$enable_doxygen_server_side_search]) + +AC_ARG_ENABLE([doxygen-pdf-output], + [AS_HELP_STRING([--enable-doxygen-pdf-output], + [Build netCDF library documentation in PDF format. Experimental.])]) + AM_CONDITIONAL([NC_ENABLE_DOXYGEN_PDF_OUTPUT], [test "x$enable_doxygen_pdf_output" = xyes]) +AC_SUBST([NC_ENABLE_DOXYGEN_PDF_OUTPUT], [$enable_doxygen_pdf_output]) + AC_ARG_ENABLE([dot], [AS_HELP_STRING([--enable-dot], [Use dot (provided by graphviz) to generate charts and graphs in the doxygen-based documentation.])]) @@ -974,6 +997,9 @@ AM_CONDITIONAL(BUILD_CDMREMOTE, [test "x$enable_cdmremote" = xyes]) # Alias AM_CONDITIONAL(BUILD_RPC, [test "x$enable_rpc" = xyes]) AM_CONDITIONAL(BUILD_DISKLESS, [test x$enable_diskless = xyes]) AM_CONDITIONAL(BUILD_MMAP, [test x$enable_mmap = xyes]) +AM_CONDITIONAL(BUILD_DOCS, [test x$enable_doxygen = xyes]) +AM_CONDITIONAL(SHOW_DOXYGEN_TAG_LIST, [test x$enable_doxygen_tasks = xyes]) +AM_CONDITIONAL(SERVER_SIDE_SEARCH, [test x$enable_doxygen_server_side_search = xyes]) # If the machine doesn't have a long long, and we want netCDF-4, then # we've got problems!