Attempt to fix the problem of ftp failing

when getting the hdf4 from our ftp site.
Solution used here is to repeat the request
and to use the wget -c flag so we do not
start over on every retry.
This commit is contained in:
Dennis Heimbigner 2018-03-29 16:30:58 -06:00
parent 595b1abb2e
commit a747cc32ce

View File

@ -1,5 +1,7 @@
#!/bin/sh
#Q=-q
# This shell gets some sample HDF4 files from the netCDF ftp site for
# testing. Then it runs program tst_interops3 on the test file to
# check that HDF4 reading works.
@ -9,6 +11,20 @@
if test "x$srcdir" = x ; then srcdir=`pwd`; fi
. ../test_common.sh
# Get a file from the ftp site; retry several times
getfile() {
FTPFILE="ftp://ftp.unidata.ucar.edu/pub/netcdf/sample_data/hdf4/$1.gz"
for try in 1 2 3 ; do # try 3 times
# signal sucess/failure
if wget -c $Q --passive-ftp $FTPFILE ; then
return 0 # got it
fi
echo "wget failed: try $try"
sleep 5 # seconds
done
return 1 # did not get it
}
set -e
echo ""
echo "Getting HDF4 sample files from Unidata FTP site..."
@ -21,8 +37,12 @@ echo "Getting HDF4 test files $file_list"
for f1 in $file_list
do
if ! test -f $f1; then
wget "ftp://ftp.unidata.ucar.edu/pub/netcdf/sample_data/hdf4/$f1.gz"
gunzip $f1.gz
if getfile $f1 ; then
gunzip $f1.gz
else
echo Could not ftp $f1.gz
return 1
fi
fi
done