ftp --> resources

This commit is contained in:
Jennifer Oxelson 2024-05-13 16:48:14 -06:00
parent 2960f56b81
commit ef5fcf962b
7 changed files with 16 additions and 15 deletions

View File

@ -103,9 +103,10 @@ libraries. (And, optionally, the szlib library). Versions required are
at least HDF5 1.8.9, zlib 1.2.5, and curl 7.18.0 or later. at least HDF5 1.8.9, zlib 1.2.5, and curl 7.18.0 or later.
(Optionally, if building with szlib, get szip 2.0 or later.) (Optionally, if building with szlib, get szip 2.0 or later.)
HDF5 1.8.9 and zlib 1.2.7 packages are available from the <a These packages are available at:
href="ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4">netCDF-4 ftp https://resources.unidata.ucar.edu/netcdf/netcdf-4/
site</a>. If you wish to use the remote data client code, then you
If you wish to use the remote data client code, then you
will also need libcurl, which can be obtained from the <a will also need libcurl, which can be obtained from the <a
href="http://curl.haxx.se/download.html">curl website</a>. href="http://curl.haxx.se/download.html">curl website</a>.
@ -314,7 +315,7 @@ $ make check install
If parallel I/O access to netCDF classic, 64-bit offset, CDF-5 files is If parallel I/O access to netCDF classic, 64-bit offset, CDF-5 files is
also needed, the PnetCDF library should also be installed. also needed, the PnetCDF library should also be installed.
(Note: the previously recommended <a (Note: the previously recommended <a
href=ftp://ftp.unidata.ucar.edu/pub/netcdf/contrib/pnetcdf.h>replacement href="https://resources.unidata.ucar.edu/netcdf/contrib/pnetcdf.h">replacement
pnetcdf.h</a> should no longer be used.) Then configure netCDF with the pnetcdf.h</a> should no longer be used.) Then configure netCDF with the
"--enable-pnetcdf" option. "--enable-pnetcdf" option.
@ -361,7 +362,7 @@ Note: --disable prefix indicates that the option is normally enabled.
<tr><td>--enable-netcdf-4<td>build with netcdf-4<td>HDF5 and zlib <tr><td>--enable-netcdf-4<td>build with netcdf-4<td>HDF5 and zlib
<tr><td>--enable-netcdf4<td>synonym for enable-netcdf-4 <tr><td>--enable-netcdf4<td>synonym for enable-netcdf-4
<tr><td>--enable-hdf4<td>build netcdf-4 with HDF4 read capability<td>HDF4, HDF5 and zlib <tr><td>--enable-hdf4<td>build netcdf-4 with HDF4 read capability<td>HDF4, HDF5 and zlib
<tr><td>--enable-hdf4-file-tests<td>test ability to read HDF4 files<td>selected HDF4 files from Unidata ftp site <tr><td>--enable-hdf4-file-tests<td>test ability to read HDF4 files<td>selected HDF4 files from Unidata resources site
<tr><td>--enable-pnetcdf<td>build netcdf-4 with parallel I/O for classic, 64-bit offset, and CDF-5 files using PnetCDF <tr><td>--enable-pnetcdf<td>build netcdf-4 with parallel I/O for classic, 64-bit offset, and CDF-5 files using PnetCDF
<tr><td>--enable-extra-example-tests<td>Run extra example tests<td>--enable-netcdf-4,GNU sed <tr><td>--enable-extra-example-tests<td>Run extra example tests<td>--enable-netcdf-4,GNU sed
<tr><td>--enable-parallel-tests <td>run extra parallel IO tests<td>--enable-netcdf-4, parallel IO support <tr><td>--enable-parallel-tests <td>run extra parallel IO tests<td>--enable-netcdf-4, parallel IO support
@ -384,7 +385,7 @@ Note: --disable prefix indicates that the option is normally enabled.
The benchmarks are a The benchmarks are a
bunch of extra tests, which are timed. We use these bunch of extra tests, which are timed. We use these
tests to check netCDF performance. tests to check netCDF performance.
<td>sample data files from the Unidata ftp site <td>sample data files from the Unidata resources site
<tr><td>--disable-extreme-numbers <tr><td>--disable-extreme-numbers
<td>don't use extreme numbers during testing, such as MAX_INT - 1<td> <td>don't use extreme numbers during testing, such as MAX_INT - 1<td>
<tr><td>--enable-dll<td>build a win32 DLL<td>mingw compiler <tr><td>--enable-dll<td>build a win32 DLL<td>mingw compiler

View File

@ -69,7 +69,7 @@ if(NETCDF_ENABLE_HDF4)
message(STATUS "Found JPEG libraries: ${JPEG_LIB}") message(STATUS "Found JPEG libraries: ${JPEG_LIB}")
# Option to enable HDF4 file tests. # Option to enable HDF4 file tests.
option(NETCDF_ENABLE_HDF4_FILE_TESTS "Run HDF4 file tests. This fetches sample HDF4 files from the Unidata ftp site to test with (requires curl)." ON) option(NETCDF_ENABLE_HDF4_FILE_TESTS "Run HDF4 file tests. This fetches sample HDF4 files from the Unidata resources site to test with (requires curl)." ON)
if(NETCDF_ENABLE_HDF4_FILE_TESTS) if(NETCDF_ENABLE_HDF4_FILE_TESTS)
find_program(PROG_CURL NAMES curl) find_program(PROG_CURL NAMES curl)
if(PROG_CURL) if(PROG_CURL)

View File

@ -354,9 +354,9 @@ AC_MSG_RESULT([$enable_dynamic_loading])
# Does the user want to turn on extra HDF4 file tests? # Does the user want to turn on extra HDF4 file tests?
AC_MSG_CHECKING([whether to fetch some sample HDF4 files from Unidata ftp site to test HDF4 reading (requires wget)]) AC_MSG_CHECKING([whether to fetch some sample HDF4 files from Unidata resources site to test HDF4 reading (requires wget)])
AC_ARG_ENABLE([hdf4-file-tests], [AS_HELP_STRING([--enable-hdf4-file-tests], AC_ARG_ENABLE([hdf4-file-tests], [AS_HELP_STRING([--enable-hdf4-file-tests],
[get some HDF4 files from Unidata ftp site and test that they can be read])]) [get some HDF4 files from Unidata resources site and test that they can be read])])
test "x$enable_hdf4" = xyes -a "x$enable_hdf4_file_tests" = xyes || enable_hdf4_file_tests=no test "x$enable_hdf4" = xyes -a "x$enable_hdf4_file_tests" = xyes || enable_hdf4_file_tests=no
if test "x$enable_hdf4_file_tests" = xyes; then if test "x$enable_hdf4_file_tests" = xyes; then
AC_DEFINE([USE_HDF4_FILE_TESTS], 1, [If true, use use wget to fetch some sample HDF4 data, and then test against it.]) AC_DEFINE([USE_HDF4_FILE_TESTS], 1, [If true, use use wget to fetch some sample HDF4 data, and then test against it.])
@ -1096,7 +1096,7 @@ fi
AC_MSG_CHECKING([whether benchmarks should be run]) AC_MSG_CHECKING([whether benchmarks should be run])
AC_ARG_ENABLE([benchmarks], AC_ARG_ENABLE([benchmarks],
[AS_HELP_STRING([--enable-benchmarks], [AS_HELP_STRING([--enable-benchmarks],
[Run benchmarks. This will cause sample data files from the Unidata ftp [Run benchmarks. This will cause sample data files from the Unidata resources
site to be fetched. The benchmarks are a bunch of extra tests, which site to be fetched. The benchmarks are a bunch of extra tests, which
are timed. We use these tests to check netCDF performance.])]) are timed. We use these tests to check netCDF performance.])])
test "x$enable_benchmarks" = xyes || enable_benchmarks=no test "x$enable_benchmarks" = xyes || enable_benchmarks=no

View File

@ -115,7 +115,7 @@ It is strongly recommended that applicable conventions be followed unless there
`Conventions` `Conventions`
> If present, 'Conventions' is a global attribute that is a character array for the name of the conventions followed by the dataset. Originally, these conventions were named by a string that was interpreted as a directory name relative to the directory /pub/netcdf/Conventions/ on the now defunct host ftp.unidata.ucar.edu. The web page https://www.unidata.ucar.edu/netcdf/conventions.html is now the preferred and authoritative location for registering a URI reference to a set of conventions maintained elsewhere. Authors of new conventions should submit a request to support-netcdf@unidata.ucar.edu for listing on the Unidata conventions web page. > If present, 'Conventions' is a global attribute that is a character array for the name of the conventions followed by the dataset. Originally, these conventions were named by a string that was interpreted as a directory name relative to the directory /pub/netcdf/Conventions/ on the now defunct ftp host. The web page https://www.unidata.ucar.edu/netcdf/conventions.html is now the preferred and authoritative location for registering a URI reference to a set of conventions maintained elsewhere. Authors of new conventions should submit a request to support-netcdf@unidata.ucar.edu for listing on the Unidata conventions web page.
<p> <p>

View File

@ -28,7 +28,7 @@
recently made some use of netCDF, based on recently made some use of netCDF, based on
<ol> <ol>
<li> <li>
downloads from the Unidata site (ftp and http) downloads from the Unidata downloads site
</li> </li>
<li> <li>
subscribers and posters to netCDF mailing lists subscribers and posters to netCDF mailing lists

View File

@ -14,7 +14,7 @@ if test "x$srcdir" = x ; then srcdir=`pwd`; fi
# Get a file from the resources site; retry several times # Get a file from the resources site; retry several times
getfile() { getfile() {
DATAFILE="https://resources.unidata.ucar.edu/sample_data/hdf4/$1.gz" DATAFILE="https://resources.unidata.ucar.edu/netcdf/sample_data/hdf4/$1.gz"
for try in 1 2 3 4 ; do # try 4 times for try in 1 2 3 4 ; do # try 4 times
@ -30,7 +30,7 @@ getfile() {
set -e set -e
echo "" echo ""
echo "Getting HDF4 sample files from Unidata FTP site..." echo "Getting HDF4 sample files from Unidata resources site..."
file_list="AMSR_E_L2_Rain_V10_200905312326_A.hdf AMSR_E_L3_DailyLand_V06_20020619.hdf \ file_list="AMSR_E_L2_Rain_V10_200905312326_A.hdf AMSR_E_L3_DailyLand_V06_20020619.hdf \
MYD29.A2009152.0000.005.2009153124331.hdf MYD29.A2002185.0000.005.2007160150627.hdf \ MYD29.A2009152.0000.005.2009153124331.hdf MYD29.A2002185.0000.005.2007160150627.hdf \

View File

@ -15,7 +15,7 @@ echo "Getting KNMI test files $file_list"
for f1 in $file_list for f1 in $file_list
do do
if ! test -f $f1; then if ! test -f $f1; then
wget https://resources.unidata.ucar.edu/sample_data/$f1.gz wget https://resources.unidata.ucar.edu/netcdf/sample_data/$f1.gz
gunzip $f1.gz gunzip $f1.gz
fi fi
done done