mirror of
https://github.com/Unidata/netcdf-c.git
synced 2024-12-03 08:01:25 +08:00
3db4f013bf
Specific changes: 1. Add dap4 code: libdap4 and dap4_test. Note that until the d4ts server problem is solved, dap4 is turned off. 2. Modify various files to support dap4 flags: configure.ac, Makefile.am, CMakeLists.txt, etc. 3. Add nc_test/test_common.sh. This centralizes the handling of the locations of various things in the build tree: e.g. where is ncgen.exe located. See nc_test/test_common.sh for details. 4. Modify .sh files to use test_common.sh 5. Obsolete separate oc2 by moving it to be part of netcdf-c. This means replacing code with netcdf-c equivalents. 5. Add --with-testserver to configure.ac to allow override of the servers to be used for --enable-dap-remote-tests. 6. There were multiple versions of nctypealignment code. Try to centralize in libdispatch/doffset.c and include/ncoffsets.h 7. Add a unit test for the ncuri code because of its complexity. 8. Move the findserver code out of libdispatch and into a separate, self contained program in ncdap_test and dap4_test. 9. Move the dispatch header files (nc{3,4}dispatch.h) to .../include because they are now shared by modules. 10. Revamp the handling of TOPSRCDIR and TOPBUILDDIR for shell scripts. 11. Make use of MREMAP if available 12. Misc. minor changes e.g. - #include <config.h> -> #include "config.h" - Add some no-install headers to /include - extern -> EXTERNL and vice versa as needed - misc header cleanup - clean up checking for misc. unix vs microsoft functions 13. Change copyright decls in some files to point to LICENSE file. 14. Add notes to RELEASENOTES.md
81 lines
2.0 KiB
Bash
Executable File
81 lines
2.0 KiB
Bash
Executable File
#!/bin/sh
|
|
|
|
if test "x$srcdir" = x ; then srcdir=`pwd`; fi
|
|
. ../test_common.sh
|
|
|
|
# This shell runs a bunch of benchmarks on some specific files
|
|
# available at Unidata. If you want to run this shell, you need these
|
|
# data files.
|
|
|
|
# This script gets and benchmarks against some 2D radar data.
|
|
|
|
# $Id: run_bm_radar_2D_endianness1.sh,v 1.1 2008/01/03 16:19:08 ed Exp $
|
|
|
|
set -e
|
|
|
|
# Radar 2D file. Make sure we have a local disk copy. Not much point
|
|
# in benchmarking read and write times over NFS!
|
|
TMP=/shecky/data
|
|
d1=20070803-2300
|
|
file_num=0
|
|
for t in 1 2 4
|
|
do
|
|
file=${d1}_tile${t}-2d.nc3
|
|
in_file[$file_num]=$file
|
|
let file_num=$file_num+1
|
|
if ! test -f $TMP/$file; then
|
|
echo "getting file: $file"
|
|
cp -f /upc/share/testdata/nssl/mosaic2d_nc/tile${t}/$d1.netcdf.gz $TMP
|
|
gunzip -f $TMP/$d1.netcdf.gz
|
|
cp $d1.netcdf $TMP/$file
|
|
fi
|
|
done
|
|
num_in_files=${#in_file[@]}
|
|
|
|
# Copy the 2D rarar file into a netCDF-4 version, with various
|
|
# CHUNKING settings.
|
|
out1=radar_2d_endianness.csv
|
|
rm -rf $out1
|
|
|
|
# Turn on header (for the first run of bm_file).
|
|
h=-h
|
|
|
|
# Turn off compression and shuffle filters.
|
|
s=0
|
|
d=-1
|
|
|
|
# Set good chunksizes.
|
|
c0=501
|
|
c1=1001
|
|
file_num=0
|
|
for ((end=0; end <= 2 ; end++))
|
|
do
|
|
# Confuse the disk buffering by copying the file each time, so
|
|
# always reading a new file.
|
|
cp $TMP/${in_file[${file_num}]} $TMP/cp_${in_file[${file_num}]}
|
|
|
|
# Build the command including chunk sizes for all 13 vars.
|
|
cmd="./bm_file -e $end $h -f 3 -d -o $TMP/$d1-2d.nc4 -c 0:${d}:${s}:${c0}:${c1}"
|
|
for ((v=1; v < 12; v++))
|
|
do
|
|
cmd="$cmd,${v}:${d}:${s}:${c0}:${c1}"
|
|
done
|
|
cmd="$cmd $TMP/cp_${in_file[${file_num}]}"
|
|
echo "cmd=$cmd"
|
|
if ! ($cmd >> $out1); then
|
|
exit 1;
|
|
fi
|
|
|
|
# Remove the copy. Next read will a "new" file.
|
|
rm $TMP/cp_${in_file[${file_num}]}
|
|
|
|
# Turn off header next time around.
|
|
h=
|
|
|
|
# Switch to the next input file of three.
|
|
let file_num=$file_num+1
|
|
test $file_num -eq $num_in_files && file_num=0
|
|
done
|
|
|
|
exit 0
|