Conflicts:
	ncdump/nccopy.c
This commit is contained in:
Russ Rew 2014-03-06 21:12:48 -07:00
commit 8e59dc3ce8
20 changed files with 5514 additions and 1594 deletions

View File

@ -785,6 +785,15 @@ IF(ENABLE_DOXYGEN)
SET(BUILD_INTERNAL_DOCS NO CACHE STRING "")
ENDIF()
# Option to turn on the TODO list in the doxygen-generated documentation.
OPTION(ENABLE_DOXYGEN_TODO_LIST "Turn on todo list in documentation. This is of interest to developers only." OFF)
IF(ENABLE_DOXYGEN_TODO_LIST)
SET(SHOW_DOXYGEN_TODO_LIST YES CACHE STRING "")
ELSE()
SET(SHOW_DOXYGEN_TODO_LIST NO CACHE STRING "")
ENDIF()
# Specify whether or not 'dot' was found on the system path.
IF(NC_DOT)
SET(HAVE_DOT YES CACHE STRING "")

View File

@ -1,5 +1,5 @@
Release Notes {#release_notes}
=============
===============================
\brief Release notes file for the netcdf-c package.

View File

@ -6,7 +6,7 @@ alias q0=;alias qq=;alias qv=;alias q=;alias qh=;alias qqh=;alias qall=;alias q
#TOP="/home/dmh/mach/netcdf-c"
TOP="/cygdrive/f/git/netcdf-c"
F="http://thredds-test.ucar.edu/dts/test.07"
F="http://tiggeUser:tigge@thredds-test.ucar.edu/thredds/dodsC/restrict/testData.nc"
#CON='SPEED.SPEED'
#VAR=SPEED
@ -122,4 +122,6 @@ F="http://iridl.ldeo.columbia.edu/SOURCES/.Models/.NMME/.NASA-GMAO/.MONTHLY/.sst
#F="http://ticket:ticket1@utmea.enea.it:8080/thredds/dodsC/UNIDATA_passwd/head_out.nc"
F="http://nomads.ncep.noaa.gov:9090/dods/gens/gens20140123/gep_all_12z"
VAR=prmslmsl
F="http://data.nodc.noaa.gov/thredds/dodsC/testdata/pathfinderAgg/pathFinderV5.2_night.ncml"
CON="sst_dtime.sst_dtime"
fi

View File

@ -29,7 +29,8 @@ nc_inq_libvers(void)
return nc_libvers;
}
/** \defgroup error Error Handling
/** \addtogroup error NetCDF Error Handling
NetCDF functions non-zero status codes on error.
Each netCDF function returns an integer status value. If the returned
@ -55,7 +56,7 @@ status value.
/**
\ingroup error
Given an error number, return an error message.
Given an error number, return an error message.
This function returns a static reference to an error message string
corresponding to an integer netCDF error status or to a system error

View File

@ -1,4 +1,5 @@
\page netCDF-CMake Build Instructions for netCDF-C using CMake
Build Instructions for NetCDF-C using CMake {#netCDF-CMake}
===========================================
[TOC]
@ -102,4 +103,4 @@ or
# See Also {#cmake_see_also}
For further information regarding NetCDF and CMake, see \ref cmake_faq
For further information regarding NetCDF and CMake, see \ref cmake_faq

View File

@ -19,14 +19,22 @@ IF(BUILD_DOCS)
# doxyfile template.
SET(abs_top_srcdir ${CMAKE_SOURCE_DIR})
CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile.in
${CMAKE_CURRENT_BINARY_DIR}/Doxyfile @ONLY)
ADD_CUSTOM_TARGET(doc ALL
${CMAKE_CURRENT_BINARY_DIR}/Doxyfile @ONLY)
CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/Doxyfile.guide.in
${CMAKE_CURRENT_BINARY_DIR}/Doxyfile.guide @ONLY)
ADD_CUSTOM_TARGET(doc_all ALL
${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMENT "Generating API Documentation with Doxygen" VERBATIM
)
FILE(COPY ${IMG_FILES} DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/html/)
ADD_CUSTOM_TARGET(doc_guide ALL
${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile.guide
WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}
COMMENT "Generating NetCDF User Guide with Doxygen" VERBATIM
)
FILE(COPY ${IMG_FILES} DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/html)
FILE(COPY ${IMG_FILES} DESTINATION ${CMAKE_CURRENT_BINARY_DIR}/html/html_guide)
ENDIF()
ENDIF()

2333
man4/Doxyfile.guide.in Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,65 +1,54 @@
NetCDF Error Codes {#nc-error-codes}
NetCDF Error Code Listing {#nc-error-codes}
==================
\brief nc-error-codes NetCDF-C Error Codes
\ingroup error
<a name="top"></a>
\todo Review list for completeness.
There are several different classes of error codes used in NetCDF-C.
\tableofcontents
* [NetCDF-3 Error Codes](#NetCDF-3-Error-Codes)
* [NetCDF-4 Error Codes](#NetCDF-4-Error-Codes)
* [DAP Error Codes](#DAP-Error-Codes)
### NetCDF-3 Error Codes <a name="NetCDF-3-Error-Codes"></a>
# NetCDF-3 Error Codes {#nc3-error-codes}
~~~~
#define NC_NOERR 0 /* No Error */
#define NC_NOERR 0 // No Error
#define NC_EBADID (-33) // Not a netcdf id
#define NC_ENFILE (-34) // Too many netcdfs open
#define NC_EEXIST (-35) // netcdf file exists && NC_NOCLOBBER
#define NC_EINVAL (-36) // Invalid Argument
#define NC_EPERM (-37) // Write to read only
#define NC_ENOTINDEFINE (-38) // Operation not allowed in data mode
#define NC_EINDEFINE (-39) // Operation not allowed in define mode
#define NC_EINVALCOORDS (-40) // Index exceeds dimension bound
#define NC_EMAXDIMS (-41) // NC_MAX_DIMS exceeded
#define NC_ENAMEINUSE (-42) // String match to name in use
#define NC_ENOTATT (-43) // Attribute not found
#define NC_EMAXATTS (-44) // NC_MAX_ATTRS exceeded
#define NC_EBADTYPE (-45) // Not a netcdf data type
#define NC_EBADDIM (-46) // Invalid dimension id or name
#define NC_EUNLIMPOS (-47) // NC_UNLIMITED in the wrong index
#define NC_EMAXVARS (-48) // NC_MAX_VARS exceeded
#define NC_ENOTVAR (-49) // Variable not found
#define NC_EGLOBAL (-50) // Action prohibited on NC_GLOBAL varid
#define NC_ENOTNC (-51) // Not a netcdf file
#define NC_ESTS (-52) // In Fortran, string too short
#define NC_EMAXNAME (-53) // NC_MAX_NAME exceeded
#define NC_EUNLIMIT (-54) // NC_UNLIMITED size already in use
#define NC_ENORECVARS (-55) // nc_rec op when there are no record vars
#define NC_ECHAR (-56) // Attempt to convert between text & numbers
#define NC_EEDGE (-57) // Edge+start exceeds dimension bound
#define NC_ESTRIDE (-58) // Illegal stride
#define NC_EBADNAME (-59) // Attribute or variable name contains illegal characters
#define NC_EBADID (-33) /* Not a netcdf id */
#define NC_ENFILE (-34) /* Too many netcdfs open */
#define NC_EEXIST (-35) /* netcdf file exists && NC_NOCLOBBER */
#define NC_EINVAL (-36) /* Invalid Argument */
#define NC_EPERM (-37) /* Write to read only */
#define NC_ENOTINDEFINE (-38) /* Operation not allowed in data mode */
#define NC_EINDEFINE (-39) /* Operation not allowed in define mode */
#define NC_EINVALCOORDS (-40) /* Index exceeds dimension bound */
#define NC_EMAXDIMS (-41) /* NC_MAX_DIMS exceeded */
#define NC_ENAMEINUSE (-42) /* String match to name in use */
#define NC_ENOTATT (-43) /* Attribute not found */
#define NC_EMAXATTS (-44) /* NC_MAX_ATTRS exceeded */
#define NC_EBADTYPE (-45) /* Not a netcdf data type */
#define NC_EBADDIM (-46) /* Invalid dimension id or name */
#define NC_EUNLIMPOS (-47) /* NC_UNLIMITED in the wrong index */
#define NC_EMAXVARS (-48) /* NC_MAX_VARS exceeded */
#define NC_ENOTVAR (-49) /* Variable not found */
#define NC_EGLOBAL (-50) /* Action prohibited on NC_GLOBAL varid */
#define NC_ENOTNC (-51) /* Not a netcdf file */
#define NC_ESTS (-52) /* In Fortran, string too short */
#define NC_EMAXNAME (-53) /* NC_MAX_NAME exceeded */
#define NC_EUNLIMIT (-54) /* NC_UNLIMITED size already in use */
#define NC_ENORECVARS (-55) /* nc_rec op when there are no record vars */
#define NC_ECHAR (-56) /* Attempt to convert between text & numbers */
#define NC_EEDGE (-57) /* Edge+start exceeds dimension bound */
#define NC_ESTRIDE (-58) /* Illegal stride */
#define NC_EBADNAME (-59) /* Attribute or variable name
contains illegal characters */
/* N.B. following must match value in ncx.h */
#define NC_ERANGE (-60) /* Math result not representable */
#define NC_ENOMEM (-61) /* Memory allocation (malloc) failure */
// N.B. following must match value in ncx.h
#define NC_EVARSIZE (-62) /* One or more variable sizes violate
format constraints */
#define NC_EDIMSIZE (-63) /* Invalid dimension size */
#define NC_ETRUNC (-64) /* File likely truncated or possibly corrupted */
#define NC_ERANGE (-60) // Math result not representable
#define NC_ENOMEM (-61) // Memory allocation (malloc) failure
#define NC_EVARSIZE (-62) // One or more variable sizes violate format constraints
#define NC_EDIMSIZE (-63) // Invalid dimension size
#define NC_ETRUNC (-64) // File likely truncated or possibly corrupted
~~~~
[top](#top)
### NetCDF-4 Error Codes <a name="NetCDF-4-Error-Codes"></a>
# NetCDF-4 Error Codes {#nc4-error-codes}
NetCDF-4 uses all error codes from NetCDF-3 (see section [NetCDF-3 Error
Codes](#NetCDF_002d3-Error-Codes)). The following additional error codes
@ -76,42 +65,39 @@ were added for new errors unique to netCDF-4.
#define NC_EVARMETA (-108)
#define NC_ENOCOMPOUND (-109)
#define NC_EATTEXISTS (-110)
#define NC_ENOTNC4 (-111) /* Attempting netcdf-4 operation on netcdf-3 file. */
#define NC_ESTRICTNC3 (-112) /* Attempting netcdf-4 operation on strict nc3 netcdf-4 file. */
#define NC_EBADGRPID (-113) /* Bad group id. Bad! */
#define NC_EBADTYPEID (-114) /* Bad type id. */
#define NC_EBADFIELDID (-115) /* Bad field id. */
#define NC_ENOTNC4 (-111) // Attempting netcdf-4 operation on netcdf-3 file.
#define NC_ESTRICTNC3 (-112) // Attempting netcdf-4 operation on strict nc3 netcdf-4 file.
#define NC_EBADGRPID (-113) // Bad group id. Bad!
#define NC_EBADTYPEID (-114) // Bad type id.
#define NC_EBADFIELDID (-115) // Bad field id.
#define NC_EUNKNAME (-116)
~~~~
[top](#top)
### DAP Error Codes <a name="DAP-Error-Codes"></a>
# DAP Error Codes {#dap-error-codes}
If the DAP client is enabled, then the following additional error codes
may occur.
~~~~
#define NC_EDAP (-66) /* Generic DAP error */
#define NC_ECURL (-67) /* Generic libcurl error */
#define NC_EIO (-68) /* Generic IO error */
#define NC_ENODATA (-69) /* Attempt to access variable with no data */
#define NC_EDAPSVC (-70) /* DAP Server side error */
#define NC_EDAS (-71) /* Malformed or inaccessible DAS */
#define NC_EDDS (-72) /* Malformed or inaccessible DDS */
#define NC_EDATADDS (-73) /* Malformed or inaccessible DATADDS */
#define NC_EDAPURL (-74) /* Malformed DAP URL */
#define NC_EDAPCONSTRAINT (-75) /* Malformed DAP Constraint*/
#define NC_EDAP (-66) /* Generic DAP error */
#define NC_ECURL (-67) /* Generic libcurl error */
#define NC_EIO (-68) /* Generic IO error */
#define NC_ENODATA (-69) /* Attempt to access variable with no data */
#define NC_EDAPSVC (-70) /* DAP Server side error */
#define NC_EDAS (-71) /* Malformed or inaccessible DAS */
#define NC_EDDS (-72) /* Malformed or inaccessible DDS */
#define NC_EDATADDS (-73) /* Malformed or inaccessible DATADDS */
#define NC_EDAPURL (-74) /* Malformed DAP URL */
#define NC_EDAPCONSTRAINT (-75) /* Malformed DAP Constraint*/
#define NC_EDAP (-66) // Generic DAP error
#define NC_ECURL (-67) // Generic libcurl error
#define NC_EIO (-68) // Generic IO error
#define NC_ENODATA (-69) // Attempt to access variable with no data
#define NC_EDAPSVC (-70) // DAP Server side error
#define NC_EDAS (-71) // Malformed or inaccessible DAS
#define NC_EDDS (-72) // Malformed or inaccessible DDS
#define NC_EDATADDS (-73) // Malformed or inaccessible DATADDS
#define NC_EDAPURL (-74) // Malformed DAP URL
#define NC_EDAPCONSTRAINT (-75) // Malformed DAP Constraint
#define NC_EDAP (-66) // Generic DAP error
#define NC_ECURL (-67) // Generic libcurl error
#define NC_EIO (-68) // Generic IO error
#define NC_ENODATA (-69) // Attempt to access variable with no data
#define NC_EDAPSVC (-70) // DAP Server side error
#define NC_EDAS (-71) // Malformed or inaccessible DAS
#define NC_EDDS (-72) // Malformed or inaccessible DDS
#define NC_EDATADDS (-73) // Malformed or inaccessible DATADDS
#define NC_EDAPURL (-74) // Malformed DAP URL
#define NC_EDAPCONSTRAINT (-75) // Malformed DAP Constraint
~~~~
[top](#top)

View File

@ -3,7 +3,8 @@
<hr size="2"/>
<address style="text-align: center;">
Return to the <a href="http://www.unidata.ucar.edu/software/netcdf/">Main Unidata NetCDF page.</a>
<a href="http://www.unidata.ucar.edu/software/netcdf/">Return to the Main Unidata NetCDF page.</a><br>
<img src="http://www.unidata.ucar.edu/img/v3/logos/uniLogo.png">
<address style="text-align: right;"><small>
Generated on $datetime for $projectname. NetCDF is

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,4 @@
/*! \file
\brief Documentation for getting and building netCDF
/*!
This document is for getting and building the netCDF C library and
utilities, version 4.3.0. Other libraries that depend on the netCDF C
@ -78,7 +76,7 @@ full functionality. (See \ref architecture).
- \ref netCDF-CMake
- \ref winbin
\page build_default Building with NetCDF-4 and the Remote Data Client
\subsection build_default Building with NetCDF-4 and the Remote Data Client
The usual way of building netCDF requires the HDF5, zlib, and curl
libraries. (And, optionally, the szlib library). Versions required are
@ -166,7 +164,7 @@ include/, and bin/. The installation location specified with the
<CODE>--prefix</CODE> option must be different from the source directory where the
software is being built.
\page build_classic Building NetCDF with Classic Library Only
\subsection build_classic Building NetCDF with Classic Library Only
It is possible to build the netCDF C libraries and utilities so that
only the netCDF classic and 64-bit offset formats are supported, or
@ -209,7 +207,7 @@ make check install
If you get the message that netCDF installed correctly, then you are
done!
\page build_hdf4 Building with HDF4 Support
\subsection build_hdf4 Building with HDF4 Support
The netCDF-4 library can (since version 4.1) read HDF4 data files, if
they were created with the SD (Scientific Data) API.
@ -252,7 +250,7 @@ make check
make install
\endverbatim
\page build_parallel Building with Parallel I/O Support
\subsection build_parallel Building with Parallel I/O Support
For parallel I/O to work, HDF5 must be installed with
enable-parallel, and an MPI library (and related libraries) must be
@ -297,7 +295,7 @@ href=ftp://ftp.unidata.ucar.edu/pub/netcdf/contrib/pnetcdf.h>replacement
pnetcdf.h</a> should no longer be used.) Then configure netCDF with the
"--enable-pnetcdf" option.
\page linking Linking to NetCDF
\subsection linking Linking to NetCDF
For static build, to use netCDF-4 you must link to all the libraries,
netCDF, HDF5, zlib, szip (if used with HDF5 build), and curl (if the
@ -325,7 +323,7 @@ netCDF-4 libraries:
cc -o myapp myapp.c `nc-config --cflags --libs`
\endverbatim
\page configure_options ./configure options
\subsection configure_options ./configure options
Note: --disable prefix indicates that the option is normally enabled.
<table>
@ -370,4 +368,3 @@ Note: --disable prefix indicates that the option is normally enabled.
<tr><td>--enable-mmap<td>Use mmap to implement NC_DISKLESS<td>
</table>
*/

490
man4/install.md Normal file
View File

@ -0,0 +1,490 @@
Getting and Building NetCDF-C {#getting_and_building_netcdf}
=============================
[TOC]
This document is for getting and building the netCDF C library and
utilities, version 4.3.0. Other libraries that depend on the netCDF C
library, such as the Fortran and C++ libraries, are available as
separate distributions that can be built and installed after the C
library is successfully installed. The netCDF-Java library is also a
separate distribution that is currently independent of the netCDF C
library.
Getting NetCDF {#getting}
=========================
Getting pre-built netCDF-C libraries. {#sec_get_pre_built}
-------------------------------------
The easiest way to get netCDF is through a package management program,
such as rpm, yum, adept, and others. NetCDF is available from many
different repositories, including the default Red Hat and Ubuntu
repositories.
When getting netCDF from a software repository, you will wish to get
the development version of the package ("netcdf-devel"). This includes
the netcdf.h header file.
If you are interested in building NetCDF-C on Windows, please see \ref winbin and \ref netCDF-CMake.
Getting the latest netCDF-C Source Code {#sec_get_source}
----------------------------------------
Starting with netCDF-C version 4.3.1, the netCDF-C source code is hosted at the
Unidata GitHub repository, available at http://github.com/Unidata/netcdf-c.
Two options are available for building from source:
- The latest release.
- The developer snapshot.
### The latest release {#sec_latest_release}
The latest release may be downloaded from github at the following location:
- http://github.com/Unidata/netcdf-c/releases
Source files are available in `.tar.gz` and `.zip` formats.
### The developer snapshot {#sec_dev_snapshot}
The developer snapshot may be cloned from github directly by using the `git` command.
> $ git clone http://github.com/Unidata/netcdf-c netcdf-c
**Note:**
> ***The developer snapshot release contains bug-fixes and new
features added since the last full release. It may also contain
portability bugs.***
Once you have downloaded and unpacked the distribution, see the
following section on \ref building.
Building NetCDF {#building}
===========================
The netCDF-C library and utilities require third-party libraries for
full functionality. (See \ref architecture).
- \ref build_default
- \ref build_classic
- \ref build_hdf4
- \ref build_parallel
- <a class="el" href="http://www.unidata.ucar.edu/netcdf/docs/netcdf-fortran-install.html" >Building netCDF-Fortran Libraries</a>
- \ref configure_options
CMake and Windows support {#sub}
--------------------------------
- \ref netCDF-CMake
- \ref winbin
Building with NetCDF-4 and the Remote Data Client {#build_default}
--------------------------------
The usual way of building netCDF requires the HDF5, zlib, and curl
libraries. (And, optionally, the szlib library). Versions required are
at least HDF5 1.8.8, zlib 1.2.5, and curl 7.18.0 or later.
(Optionally, if building with szlib, get szip 2.0 or later.)
HDF5 1.8.9 and zlib 1.2.7 packages are available from the <a
href="ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4">netCDF-4 ftp
site</a>. If you wish to use the remote data client code, then you
will also need libcurl, which can be obtained from the <a
href="http://curl.haxx.se/download.html">curl website</a>.
Make sure you run ``make check'' for the HDF5 and zlib
distributions. They are very well-behaved distributions, but sometimes
the build doesn't work (perhaps because of something subtly
misconfigured on the target machine). If one of these libraries is not
working, netCDF will have serious problems.
Note that for building netCDF, it is not necessary to build the HDF5
Fortran, C++, or Java API's. Only the HDF5 C library is used.
Optionally, you can also build netCDF-4 with the szip library
(a.k.a. szlib). NetCDF cannot create szipped data files, but can read
HDF5 data files that have used szip.
There are license restrictions on the use of szip, see the section on
licensing terms in the <a
href="http://www.hdfgroup.org/doc_resource/SZIP/">web page on szip
compression in HDF products</a>. These license restrictions seem to
apply to commercial users who are writing data. (Data readers are not
restricted.) But here at NetCDF World Headquarters, in Sunny Boulder,
Colorado, there are no lawyers, only programmers, so please read the
szip documents for the license agreement to see how it applies to your
situation.
If ``make check'' fails for either zlib or HDF5, the problem must be
resolved before the netCDF-4 installation can continue. For HDF5
problems, see the <a
href="http://www.hdfgroup.org/services/support.html">HDF5 help
services</a>.
Build zlib like this:
~~~
$ ./configure --prefix=/home/ed/local
$ make check install
~~~
Then you build HDF5, specifying the location of the zlib library:
~~~
$ ./configure --with-zlib=/home/ed/local --prefix=/home/ed/local
$ make check install
~~~
In all cases, the installation location specified with the <CODE>--prefix</CODE>
option must be different from the source directory where the software
is being built.
Note that for shared libraries, you may need to add the install
directory to the LD_LIBRARY_PATH environment variable. See
the <a href="http://www.unidata.ucar.edu/netcdf/docs/faq.html#Shared%20Libraries">netCDF
FAQ</a> for more details on using shared libraries.
If you are building HDF5 with szip, then include the <CODE>--with-szlib=</CODE>
option, with the directory holding the szip library.
After HDF5 is done, build netcdf, specifying the location of the
HDF5, zlib, and (if built into HDF5) the szip header files and
libraries in the CPPFLAGS and LDFLAGS environment variables. For example:
~~~
$ CPPFLAGS=-I/home/ed/local/include LDFLAGS=-L/home/ed/local/lib ./configure --prefix=/home/ed/local
$ make check install
~~~
The configure script will try to find necessary tools in your
path. When you run configure you may optionally use the <CODE>--prefix</CODE>
argument to change the default installation directory. The above
examples install the zlib, HDF5, and netCDF-4 libraries in
/home/ed/local/lib, the header file in /home/ed/local/include, and the
utilities in /home/ed/local/bin. If you don't provide a <CODE>--prefix</CODE>
option, installation will be in /usr/local/, in subdirectories lib/,
include/, and bin/. The installation location specified with the
<CODE>--prefix</CODE> option must be different from the source directory where the
software is being built.
Building NetCDF with Classic Library Only {#build_classic}
---------------------------------------
It is possible to build the netCDF C libraries and utilities so that
only the netCDF classic and 64-bit offset formats are supported, or
the remote data access client is not built. (See \ref netcdf_format)
for more information about the netCDF format variants. See the <a
href="http://opendap.org/netCDF-DAP">netCDF-DAP site</a>
for more information about remote client access to data
on OPeNDAP servers.)
To build without support for the netCDF-4 formats or the additional
netCDF-4 functions, but with remote access, use:
~~~
$ ./configure --prefix=/home/ed/local --disable-netcdf-4
$ make check install
~~~
(Replace `/home/ed/local` with the name of the directory where
netCDF is to be installed. The installation location specified with
the <CODE>--prefix</CODE> option must be different from the source directory where
the software is being built.)
Starting with version 4.1.1 the netCDF C libraries and utilities have
supported remote data access, using the OPeNDAP protocols. To build
with full support for netCDF-4 APIs and format but without remote
client access, use:
~~~
$ ./configure --prefix=/home/ed/local --disable-dap
$ make check install
~~~
To build without netCDF-4 support or remote client access, use:
~~~
$ ./configure --prefix=/home/ed/local --disable-netcdf-4 --disable-dap
$ make check install
~~~
If you get the message that netCDF installed correctly, then you are
done!
Building with HDF4 Support {#build_hdf4}
---------------------
The netCDF-4 library can (since version 4.1) read HDF4 data files, if
they were created with the SD (Scientific Data) API.
For this to work, you must build the HDF4 library with the
configure option
~~~
--disable-netcdf
~~~
to prevent it from building an HDF4 version of the netCDF-2 library
that conflicts with the netCDF-2 functions that are built into the Unidata
netCDF library.
Then, when building netCDF-4, use the
~~~
--enable-hdf4
~~~
option to configure. The location for the HDF4 header files and
library must be set in the CPPFLAGS and LDFLAGS options.
For HDF4 access to work, the library must be build with netCDF-4
features.
Here's an example, assuming the HDF5 library has been built and
installed in H5DIR and you will build and install the HDF4 library in
H4DIR (which could be the same as H5DIR):
~~~
# Build and install HDF4
$ cd ${HDF4_SOURCE_DIRECTORY}
$ ./configure --enable-shared --disable-netcdf --disable-fortran --prefix=${H4DIR}
$ make
$ make install
$ # Build and install netCDF with HDF4 access enabled
$ cd ${NETCDF_SOURCE_DIRECTORY}
$ CPPFLAGS="-I${H5DIR}/include -I${H4DIR}/include" \
$ LDFLAGS="-L${H5DIR}/lib -L${H4DIR}/lib" \
$ ./configure --enable-hdf4 --enable-hdf4-file-tests
$ make check
$ make install
~~~
Building with Parallel I/O Support {#build_parallel}
--------------
For parallel I/O to work, HDF5 must be installed with
enable-parallel, and an MPI library (and related libraries) must be
made available to the HDF5 configure. This can be accomplished with
the mpicc wrapper script, in the case of MPICH2.
The following works to build HDF5 with parallel I/O on our netCDF
testing system:
~~~
CC=mpicc ./configure --enable-parallel
make check install
~~~
If the HDF5 used by netCDF has been built with parallel I/O, then
netCDF will also be built with support for parallel I/O. This allows
parallel I/O access to netCDF-4/HDF5 files. Note that shared libraries
are not supported for parallel HDF5, which makes linking more
difficult to get right. "LIBS=-ldl" is also sometimes needed to link
successfully with parallel HDF5 libraries.
(See /ref netcdf_formats for more information about the netCDF format
variants.)
The following works to build netCDF-4 with parallel I/O on our netCDF
testing system:
~~~
$ H5DIR=/where/parallel/HDF5/was/installed
$ CPPFLAGS="-I${H5DIR}/include"
$ CC=mpicc
$ LDFLAGS=-L${H5DIR}/lib
$ LIBS=-ldl
$ ./configure --disable-shared --enable-parallel-tests
$ make check install
~~~
If parallel I/O access to netCDF classic and 64-bit offset files is
also needed, the parallel-netcdf library should also be installed.
(Note: the previously recommended <a
href=ftp://ftp.unidata.ucar.edu/pub/netcdf/contrib/pnetcdf.h>replacement
pnetcdf.h</a> should no longer be used.) Then configure netCDF with the
"--enable-pnetcdf" option.
Linking to NetCDF {#linking}
-------------------
For static build, to use netCDF-4 you must link to all the libraries,
netCDF, HDF5, zlib, szip (if used with HDF5 build), and curl (if the
remote access client has not been disabled). This will mean -L options
to your build for the locations of the libraries, and -l (lower-case
L) for the names of the libraries.
For example, one user reports that she can build other applications
with netCDF-4 by setting the LIBS environment variable:
~~~
LIBS='-L/X/netcdf-4.0/lib -lnetcdf -L/X/hdf5-1.8.6/lib -lhdf5_hl -lhdf5 -lz -lm -L/X/szip-2.1/lib -lsz'
~~~
For shared builds, only -lnetcdf is needed. All other libraries will
be found automatically.
The ``nc-config --all'' command can be used to learn what options are
needed for the local netCDF installation.
For example, this works for linking an application named myapp.c with
netCDF-4 libraries:
~~~
cc -o myapp myapp.c `nc-config --cflags --libs`
~~~
configure options {#configure_options}
-----------------------------
These options are used for `autotools`-based builds. For `cmake` options, see \todo Finish this reference.
Note: --disable prefix indicates that the option is normally enabled.
<table>
<tr><th>Option<th>Description<th>Dependencies
<tr><td>--disable-doxygen<td>Disable generation of documentation.<td>doxygen
<tr><td>--disable-fsync<td>disable fsync support<td>kernel fsync support
<tr><td>--enable-valgrind-tests <td>build with valgrind-tests; static builds only<td>valgrind
<tr><td>--enable-netcdf-4<td>build with netcdf-4<td>HDF5 and zlib
<tr><td>--enable-netcdf4<td>synonym for enable-netcdf-4
<tr><td>--enable-hdf4<td>build netcdf-4 with HDF4 read capability<td>HDF4, HDF5 and zlib
<tr><td>--enable-hdf4-file-tests<td>test ability to read HDF4 files<td>selected HDF4 files from Unidata ftp site
<tr><td>--enable-pnetcdf<td>build netcdf-4 with parallel I/O for classic and
64-bit offset files using parallel-netcdf
<tr><td>--enable-extra-example-tests<td>Run extra example tests<td>--enable-netcdf-4,GNU sed
<tr><td>--enable-parallel-tests <td>run extra parallel IO tests<td>--enable-netcdf-4, parallel IO support
<tr><td>--enable-logging<td>enable logging capability<td>--enable-netcdf-4
<tr><td>--disable-dap<td>build without DAP client support.<td>libcurl
<tr><td>--disable-dap-remote-tests<td>disable dap remote tests<td>--enable-dap
<tr><td>--enable-dap-long-tests<td>enable dap long tests<td>
<tr><td>--enable-extra-tests<td>run some extra tests that may not pass because of known issues<td>
<tr><td>--enable-ffio<td>use ffio instead of posixio (ex. on the Cray)<td>
<tr><td>--disable-examples<td>don't build the netCDF examples during make check
(examples are treated as extra tests by netCDF)<td>
<tr><td>--disable-v2<td>turn off the netCDF version 2 API<td>
<tr><td>--disable-utilities<td>don't build netCDF utilities ncgen, ncdump, and nccopy<td>
<tr><td>--disable-testsets<td>don't build or run netCDF tests<td>
<tr><td>--enable-large-file-tests <td>Run tests which create very large data
files<td>~13 GB disk space required, but recovered when
tests are complete). See option --with-temp-large to
specify temporary directory<td>
<tr><td>--enable-benchmarks<td>Run benchmarks. This is an experimental feature.
The benchmarks are a
bunch of extra tests, which are timed. We use these
tests to check netCDF performance.
<td>sample data files from the Unidata ftp site
<tr><td>--disable-extreme-numbers
<td>don't use extreme numbers during testing, such as MAX_INT - 1<td>
<tr><td>--enable-dll<td>build a win32 DLL<td>mingw compiler
<tr><td>--disable-shared<td>build shared libraries<td>
<tr><td>--disable-static<td>build static libraries<td>
<tr><td>--disable-largefile<td>omit support for large files<td>
<tr><td>--enable-mmap<td>Use mmap to implement NC_DISKLESS<td>
</table>
Build Instructions for NetCDF-C using CMake {#netCDF-CMake}
===========================================
## Overview {#cmake_overview}
Starting with netCDF-C 4.3.0, we are happy to announce the inclusion of CMake support. CMake will allow for building netCDF on a wider range of platforms, include Microsoft Windows with Visual Studio. CMake support also provides robust unit and regression testing tools. We will also maintain the standard autotools-based build system in parallel.
In addition to providing new build options for netCDF-C, we will also provide pre-built binary downloads for the shared versions of netCDF for use with Visual Studio.
## Requirements {#cmake_requirements}
The following packages are required to build netCDF-C using CMake.
* netCDF-C Source Code
* CMake version 2.8.9 or greater.
* Optional Requirements:
* HDF5 Libraries for netCDF4/HDF5 support.
* libcurl for DAP support.
<center>
<img src="deptree.jpg" height="250px" />
</center>
## The CMake Build Process {#cmake_build}
There are four steps in the Build Process when using CMake
1. Configuration: Before compiling, the software is configured based on the desired options.
2. Building: Once configuration is complete, the libraries are compiled.
3. Testing: Post-build, it is possible to run tests to ensure the functionality of the netCDF-C libraries.
4. Installation: If all tests pass, the libraries can be installed in the location specified during configuration.
For users who prefer pre-built binaries, installation packages are available at \ref winbin
### Configuration {#cmake_configuration}
The output of the configuration step is a project file based on the appropriate configurator specified. Common configurators include:
* Unix Makefiles
* Visual Studio
* CodeBlocks
* ... and others
### Common CMake Options {#cmake_common_options}
| **Option** | **Autotools** | **CMake** |
| :------- | :---- | :----- |
Specify Install Location | --prefix=PREFIX | -D"CMAKE\_INSTALL\_PREFIX=PREFIX"
Enable/Disable netCDF-4 | --enable-netcdf-4<br>--disable-netcdf-4 | -D"ENABLE\_NETCDF\_4=ON" <br> -D"ENABLE\_NETCDF\_4=OFF"
Enable/Disable DAP | --enable-dap <br> --disable-dap | -D"ENABLE\_DAP=ON" <br> -D"ENABLE\_DAP=OFF"
Enable/Disable Utilities | --enable-utilities <br> --disable-utilities | -D"BUILD\_UTILITIES=ON" <br> -D"BUILD\_UTILITIES=OFF"
Specify shared/Static Libraries | --enable-shared <br> --enable-static | -D"BUILD\_SHARED\_LIBS=ON" <br> -D"BUILD\_SHARED\_LIBS=OFF"
Enable/Disable Tests | --enable-testsets <br> --disable-testsets | -D"ENABLE\_TESTS=ON" <br> -D"ENABLE\_TESTS=OFF"
Specify a custom library location | Use *CFLAGS* and *LDFLAGS* | -D"CMAKE\_PREFIX\_PATH=/usr/custom_libs/"
A full list of *basic* options can be found by invoking `cmake [Source Directory] -L`. To enable a list of *basic* and *advanced* options, one would invoke `cmake [Source Directory] -LA`.
### Configuring your build from the command line. {#cmake_command_line}
The easiest configuration case would be one in which all of the dependent libraries are installed on the system path (in either Unix/Linux or Windows) and all the default options are desired. From the build directory (often, but not required to be located within the source directory):
> $ cmake [Source Directory]
If you have libraries installed in a custom directory, you may need to specify the **CMAKE\_PREFIX_PATH** variable to tell cmake where the libraries are installed. For example:
> $ cmake [Source Directory] -DCMAKE\_PREFIX\_PATH=/usr/custom_libraries/
## Building {#cmake_building}
The compiler can be executed directly with 'make' or the appropriate command for the configurator which was used.
> $ make
Building can also be executed indirectly via cmake:
> $ cmake --build [Build Directory]
## Testing {#cmake_testing}
Testing can be executed several different ways:
> $ make test
or
> $ ctest
or
> $ cmake --build [Build Directory] --target test
### Installation {#cmake_installation}
Once netCDF has been built and tested, it may be installed using the following commands:
> $ make install
or
> $ cmake --build [Build Directory] --target install
## See Also {#cmake_see_also}
For further information regarding NetCDF and CMake, see \ref cmake_faq

View File

@ -6,11 +6,13 @@
\tableofcontents
\section mainpage_netcdf NetCDF
\section mainpage_netcdf NetCDF Overview
NetCDF is a set of software libraries and self-describing,
machine-independent data formats that support the creation, access,
and sharing of array-oriented scientific data. The NetCDF homepage may be found at <a href="http://www.unidata.ucar.edu/netcdf">http://www.unidata.ucar.edu/netcdf</a>. The NetCDF source-code is hosted at <a href="http://github.com">GitHub</a>, and may be found directly at <a href="http://github.com/Unidata/netcdf-c">http://github.com/Unidata/netcdf-c</a>.
\subsection what_is_netcdf What is NetCDF?
NetCDF is a set of software libraries and self-describing, machine-independent data formats that support the creation, access, and sharing of array-oriented scientific data. NetCDF was developed and is maintained at <a href="http://www.unidata.ucar.edu">Unidata</a>. Unidata provides data and software tools for use in geoscience education and research. Unidata is part of the University Corporation for Atmospheric Research (<a href="http://www.ucar.edu">UCAR</a>) Community Programs (<a href="http://www.uop.ucar.edu">UCP</a>). Unidata is funded primarily by the National Science Foundation.
The NetCDF homepage may be found at <a href="http://www.unidata.ucar.edu/netcdf">http://www.unidata.ucar.edu/netcdf</a>. The NetCDF source-code is hosted at <a href="http://github.com">GitHub</a>, and may be found directly at <a href="http://github.com/Unidata/netcdf-c">http://github.com/Unidata/netcdf-c</a>.
\subsection this_release Learn more about the current NetCDF-C Release

BIN
man4/netcdf-50x50.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

View File

@ -38,20 +38,6 @@ This page contains refrences to various other NetCDF background and tutorial pag
\subsection sub_sec_example_programs Example Programs:
- \ref examples1
\page what_is_netcdf What is NetCDF?
NetCDF is a set of data formats, programming interfaces, and software
libraries that help read and write scientific data files.
NetCDF was developed and is maintained at <a
href="http://www.unidata.ucar.edu">Unidata</a>. Unidata provides data
and software tools for use in geoscience education and research.
Unidata is part of the University Corporation for Atmospheric Research
(<a href="http://www.ucar.edu">UCAR</a>) Community Programs (<a
href="http://www.uop.ucar.edu">UCP</a>). Unidata is funded primarily
by the National Science Foundation.
\page netcdf_working_with_netcdf_files Working with NetCDF Files from the command line.
\brief Options for working with netcdf-formatted files from the command line or with an external program.
@ -363,13 +349,10 @@ information about the CF conventions, see http://cf-pcmdi.llnl.gov.
\page error_handling Error Handling
See Also:
- \ref nc-error-codes
- Function \ref nc_strerror
\ingroup error
Each netCDF function in the C, Fortran 77, and Fortran 90 APIs returns
0 on success, in the tradition of C.
0 on success, in the tradition of C.
When programming with netCDF in these languages, always check return
values of every netCDF API call. The return code can be looked up in

View File

@ -1532,6 +1532,7 @@ usage(void)
progname, USAGE);
}
<<<<<<< HEAD
/** @page guide_nccopy nccopy tool - Copy a netCDF file, optionally changing format, compression, or chunking in the output.
\tableofcontents
@ -1841,6 +1842,8 @@ nccopy -w -c time/1000,lat/40,lon/40 slow.nc fast.nc
ncdump(1), ncgen(1), netcdf(3)
*/
=======
>>>>>>> d6d5077494c420e73bfd48513c88eb0025905e4b
int
main(int argc, char**argv)
{

View File

@ -1897,277 +1897,6 @@ void adapt_url_for_cache(char **pathp) {
}
#endif
/** \page guide_ncdump ncdump tool - Convert netCDF file to text form (CDL)
\tableofcontents
\section ncdump_SYNOPSIS ncdump synopsis
\code
ncdump [-chistxw] [-v var1,...] [-b lang] [-f lang]
[-l len] [-n name] [-p n[,n]] [-g grp1,...] file
ncdump -k file
\endcode
\section ncdump_DESCRIPTION ncdump description
The \b ncdump utility generates a text representation of a specified
netCDF file on standard output, optionally excluding some or all of
the variable data in the output. The text representation is in a form
called CDL (network Common Data form Language) that can be viewed,
edited, or serve as input to \b ncgen, a companion program that can
generate a binary netCDF file from a CDL file. Hence \b ncgen and \b
ncdump can be used as inverses to transform the data representation
between binary and text representations. See \b ncgen documentation
for a description of CDL and netCDF representations.
\b ncdump may also be used to determine what kind of netCDF file
is used (which variant of the netCDF file format) with the -k
option.
If DAP support was enabled when \b ncdump was built, the file name may
specify a DAP URL. This allows \b ncdump to access data sources from
DAP servers, including data in other formats than netCDF. When used
with DAP URLs, \b ncdump shows the translation from the DAP data
model to the netCDF data model.
\b ncdump may also be used as a simple browser for netCDF data files,
to display the dimension names and lengths; variable names, types, and
shapes; attribute names and values; and optionally, the values of data
for all variables or selected variables in a netCDF file. For
netCDF-4 files, groups and user-defined types are also included in \b
ncdump output.
\b ncdump uses '_' to represent data values that are equal to the
'_FillValue' attribute for a variable, intended to represent
data that has not yet been written. If a variable has no
'_FillValue' attribute, the default fill value for the variable
type is used unless the variable is of byte type.
\b ncdump defines a default display format used for each type of
netCDF data, but this can be changed if a `C_format' attribute
is defined for a netCDF variable. In this case, \b ncdump will
use the `C_format' attribute to format each value. For
example, if floating-point data for the netCDF variable `Z' is
known to be accurate to only three significant digits, it would
be appropriate to use the variable attribute
\code
Z:C_format = "%.3g"
\endcode
\section ncdump_OPTIONS ncdump options
@par -c
Show the values of \e coordinate \e variables (1D variables with the
same names as dimensions) as well as the declarations of all
dimensions, variables, attribute values, groups, and user-defined
types. Data values of non-coordinate variables are not included in
the output. This is usually the most suitable option to use for a
brief look at the structure and contents of a netCDF file.
@par -h
Show only the header information in the output, that is, output only
the declarations for the netCDF dimensions, variables, attributes,
groups, and user-defined types of the input file, but no data values
for any variables. The output is identical to using the '-c' option
except that the values of coordinate variables are not included. (At
most one of '-c' or '-h' options may be present.)
@par -v \a var1,...
@par
The output will include data values for the specified variables, in
addition to the declarations of all dimensions, variables, and
attributes. One or more variables must be specified by name in the
comma-delimited list following this option. The list must be a single
argument to the command, hence cannot contain unescaped blanks or
other white space characters. The named variables must be valid netCDF
variables in the input-file. A variable within a group in a netCDF-4
file may be specified with an absolute path name, such as
`/GroupA/GroupA2/var'. Use of a relative path name such as `var' or
`grp/var' specifies all matching variable names in the file. The
default, without this option and in the absence of the '-c' or '-h'
options, is to include data values for \e all variables in the output.
@par -b [c|f]
A brief annotation in the form of a CDL comment (text beginning with
the characters '//') will be included in the data section of the
output for each 'row' of data, to help identify data values for
multidimensional variables. If lang begins with 'C' or 'c', then C
language conventions will be used (zero-based indices, last dimension
varying fastest). If lang begins with 'F' or 'f', then FORTRAN
language conventions will be used (one-based indices, first dimension
varying fastest). In either case, the data will be presented in the
same order; only the annotations will differ. This option may be
useful for browsing through large volumes of multidimensional data.
@par -f [c|f]
Full annotations in the form of trailing CDL comments (text beginning
with the characters '//') for every data value (except individual
characters in character arrays) will be included in the data
section. If lang begins with 'C' or 'c', then C language conventions
will be used. If lang begins with 'F' or 'f', then FORTRAN language
conventions will be used. In either case, the data will be presented
in the same order; only the annotations will differ. This option may
be useful for piping data into other filters, since each data value
appears on a separate line, fully identified. (At most one of '-b' or
'-f' options may be present.)
@par -l \e length
@par
Changes the default maximum line length (80) used in formatting lists
of non-character data values.
@par -n \e name
@par
CDL requires a name for a netCDF file, for use by 'ncgen -b' in
generating a default netCDF file name. By default, \b ncdump
constructs this name from the last component of the file name of
the input netCDF file by stripping off any extension it has. Use
the '-n' option to specify a different name. Although the output
file name used by 'ncgen -b' can be specified, it may be wise to
have \b ncdump change the default name to avoid inadvertently
overwriting a valuable netCDF file when using \b ncdump, editing the
resulting CDL file, and using 'ncgen -b' to generate a new netCDF
file from the edited CDL file.
@par -p \e float_digits[, \e double_digits ]
@par
Specifies default precision (number of significant digits) to use in
displaying floating-point or double precision data values for
attributes and variables. If specified, this value overrides the value
of the C_format attribute, if any, for a variable. Floating-point data
will be displayed with \e float_digits significant digits. If \e
double_digits is also specified, double-precision values will be
displayed with that many significant digits. In the absence of any
'-p' specifications, floating-point and double-precision data are
displayed with 7 and 15 significant digits respectively. CDL files can
be made smaller if less precision is required. If both floating-point
and double precisions are specified, the two values must appear
separated by a comma (no blanks) as a single argument to the command.
(To represent every last bit of precision in a CDL file for all
possible floating-point values would requires '-p 9,17'.)
@par -k
Show \e kind of netCDF file, that is which format variant the file uses.
Other options are ignored if this option is specified. Output will be
one of 'classic'. '64-bit offset', 'netCDF-4', or 'netCDF-4 classic
model'.
@par -s
Specifies that \e special virtual attributes should be output for the
file format variant and for variable properties such as
compression, chunking, and other properties specific to the format
implementation that are primarily related to performance rather
than the logical schema of the data. All the special virtual
attributes begin with '_' followed by an upper-case
letter. Currently they include the global attribute '_Format' and
the variable attributes '_ChunkSizes', '_DeflateLevel',
'_Endianness', '_Fletcher32', '_NoFill', '_Shuffle', and '_Storage'.
The \b ncgen utility recognizes these attributes and
supports them appropriately.
@par -t
Controls display of time data, if stored in a variable that uses a
udunits compliant time representation such as 'days since 1970-01-01'
or 'seconds since 2009-03-15 12:01:17'. If this option is specified,
time values are displayed as a human-readable date-time strings rather
than numerical values, interpreted in terms of a 'calendar' variable
attribute, if specified. For numeric attributes of time variables,
the human-readable time value is displayed after the actual value, in
an associated CDL comment. Calendar attribute values interpreted with
this option include the CF Conventions values 'gregorian' or
'standard', 'proleptic_gregorian', 'noleap' or '365_day', 'all_leap'
or '366_day', '360_day', and 'julian'.
@par -i
Same as the '-t' option, except output time data as date-time strings
with ISO-8601 standard 'T' separator, instead of a blank.
@par -g \e grp1,...
@par
The output will include data values only for the specified groups.
One or more groups must be specified by name in the comma-delimited
list following this option. The list must be a single argument to the
command. The named groups must be valid netCDF groups in the
input-file. The default, without this option and in the absence of the
'-c' or '-h' options, is to include data values for all groups in the
output.
@par -w
For file names that request remote access using DAP URLs, access data
with client-side caching of entire variables.
@par -x
Output XML (NcML) instead of CDL. The NcML does not include data values.
The NcML output option currently only works for netCDF classic model data.
\section ncdump_EXAMPLES ncdump examples
Look at the structure of the data in the netCDF file foo.nc:
\code
ncdump -c foo.nc
\endcode
Produce an annotated CDL version of the structure and data in the
netCDF file foo.nc, using C-style indexing for the annotations:
\code
ncdump -b c foo.nc > foo.cdl
\endcode
Output data for only the variables uwind and vwind from the netCDF
file foo.nc, and show the floating-point data with only three
significant digits of precision:
\code
ncdump -v uwind,vwind -p 3 foo.nc
\endcode
Produce a fully-annotated (one data value per line) listing of the
data for the variable omega, using FORTRAN conventions for indices,
and changing the netCDF file name in the resulting CDL file to
omega:
\code
ncdump -v omega -f fortran -n omega foo.nc > Z.cdl
\endcode
Examine the translated DDS for the DAP source from the specified URL:
\code
ncdump -h http://test.opendap.org:8080/dods/dts/test.01
\endcode
Without dumping all the data, show the special virtual attributes that indicate
performance-related characterisitics of a netCDF-4 file:
\code
ncdump -h -s nc4file.nc
\endcode
\section see_also SEE ALSO
ncgen(1), netcdf(3)
- \ref guide_ncgen
- \ref guide_nccopy
\section ncdump_string_note NOTE ON STRING OUTPUT
For classic, 64-bit offset or netCDF-4 classic model data, \b ncdump
generates line breaks after embedded newlines in displaying character
data. This is not done for netCDF-4 files, because netCDF-4 supports
arrays of real strings of varying length.
*/
int
main(int argc, char *argv[])
{

View File

@ -1,6 +1,6 @@
THISDIR=../oc2
OCDIR=/home/dmh/svn/oc2.0
#OCDIR=f:/svn/oc2.0
#OCDIR=/home/dmh/svn/oc2.0
OCDIR=f:/svn/oc2.0
# Make consistent with Makefile.am
SRC=oc.c \

View File

@ -726,12 +726,12 @@ ocmktmp(const char* base, char** tmpnamep, int* fdp)
}
#endif /* !HAVE_MKSTEMP */
if(fd < 0) {
free(tmpname);
if(tmpname != NULL) free(tmpname);
return OC_EOPEN;
}
if(tmpnamep) *tmpnamep = tmpname;
else free(tmpname);
else if(tmpname != NULL) {free(tmpname);}
if(fdp) *fdp = fd;
else if(fd) close(fd);
else if(fd >= 0) close(fd);
return OC_NOERR;
}