mirror of
https://github.com/Unidata/netcdf-c.git
synced 2025-02-17 16:50:18 +08:00
Merge branch 'main' into noshape.dmh
This commit is contained in:
commit
7852b2b0e3
177
.github/workflows/run_tests_cdash.yml
vendored
Normal file
177
.github/workflows/run_tests_cdash.yml
vendored
Normal file
@ -0,0 +1,177 @@
|
|||||||
|
###
|
||||||
|
# Build hdf5 dependencies and cache them in a combined directory.
|
||||||
|
###
|
||||||
|
|
||||||
|
name: Run CDash Ubuntu/Linux netCDF Tests
|
||||||
|
|
||||||
|
on: workflow_dispatch
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
build-deps-cdash:
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
hdf5: [ 1.10.8, 1.12.2, 1.14.0 ]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install System dependencies
|
||||||
|
shell: bash -l {0}
|
||||||
|
run: sudo apt update && sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev doxygen openssl
|
||||||
|
|
||||||
|
###
|
||||||
|
# Installing libhdf5
|
||||||
|
###
|
||||||
|
- name: Cache libhdf5-${{ matrix.hdf5 }}
|
||||||
|
id: cache-hdf5
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: ~/environments/${{ matrix.hdf5 }}
|
||||||
|
key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }}
|
||||||
|
|
||||||
|
|
||||||
|
- name: Build libhdf5-${{ matrix.hdf5 }}
|
||||||
|
if: steps.cache-hdf5.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
set -x
|
||||||
|
|
||||||
|
wget https://support.hdfgroup.org/ftp/HDF/releases/HDF4.2.15/src/hdf-4.2.15.tar.bz2
|
||||||
|
tar -jxf hdf-4.2.15.tar.bz2
|
||||||
|
pushd hdf-4.2.15
|
||||||
|
./configure --prefix=${HOME}/environments/${{ matrix.hdf5 }} --disable-static --enable-shared --disable-fortran --disable-netcdf --with-szlib --enable-hdf4-xdr
|
||||||
|
make -j
|
||||||
|
make install -j
|
||||||
|
popd
|
||||||
|
|
||||||
|
wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-$(echo ${{ matrix.hdf5 }} | cut -d. -f 1,2)/hdf5-${{ matrix.hdf5 }}/src/hdf5-${{ matrix.hdf5 }}.tar.bz2
|
||||||
|
tar -jxf hdf5-${{ matrix.hdf5 }}.tar.bz2
|
||||||
|
pushd hdf5-${{ matrix.hdf5 }}
|
||||||
|
./configure --disable-static --enable-shared --prefix=${HOME}/environments/${{ matrix.hdf5 }} --enable-hl --with-szlib
|
||||||
|
make -j
|
||||||
|
make install -j
|
||||||
|
popd
|
||||||
|
|
||||||
|
|
||||||
|
build-deps-parallel:
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
hdf5: [ 1.14.0 ]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install System dependencies
|
||||||
|
shell: bash -l {0}
|
||||||
|
run: sudo apt update && sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev mpich libmpich-dev
|
||||||
|
|
||||||
|
###
|
||||||
|
# Installing libhdf5
|
||||||
|
###
|
||||||
|
- name: Cache libhdf5-parallel-${{ matrix.hdf5 }}
|
||||||
|
id: cache-hdf5
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: ~/environments/${{ matrix.hdf5 }}
|
||||||
|
key: hdf5-parallel-${{ runner.os }}-${{ matrix.hdf5 }}
|
||||||
|
|
||||||
|
|
||||||
|
- name: Build libhdf5-${{ matrix.hdf5 }}-pnetcdf-1.12.3
|
||||||
|
if: steps.cache-hdf5.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
set -x
|
||||||
|
|
||||||
|
wget https://support.hdfgroup.org/ftp/HDF/releases/HDF4.2.15/src/hdf-4.2.15.tar.bz2
|
||||||
|
tar -jxf hdf-4.2.15.tar.bz2
|
||||||
|
pushd hdf-4.2.15
|
||||||
|
CC=mpicc ./configure --prefix=${HOME}/environments/${{ matrix.hdf5 }} --disable-static --enable-shared --disable-fortran --disable-netcdf --with-szlib --enable-parallel --enable-hdf4-xdr
|
||||||
|
make -j
|
||||||
|
make install -j
|
||||||
|
popd
|
||||||
|
|
||||||
|
wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-$(echo ${{ matrix.hdf5 }} | cut -d. -f 1,2)/hdf5-${{ matrix.hdf5 }}/src/hdf5-${{ matrix.hdf5 }}.tar.bz2
|
||||||
|
tar -jxf hdf5-${{ matrix.hdf5 }}.tar.bz2
|
||||||
|
pushd hdf5-${{ matrix.hdf5 }}
|
||||||
|
CC=mpicc ./configure --disable-static --enable-shared --prefix=${HOME}/environments/${{ matrix.hdf5 }} --enable-hl --with-szlib --enable-parallel
|
||||||
|
make -j
|
||||||
|
make install -j
|
||||||
|
popd
|
||||||
|
wget https://parallel-netcdf.github.io/Release/pnetcdf-1.12.3.tar.gz
|
||||||
|
tar -zxf pnetcdf-1.12.3.tar.gz
|
||||||
|
pushd pnetcdf-1.12.3
|
||||||
|
CC=mpicc ./configure --disable-static --enable-shared --prefix=${HOME}/environments/${{ matrix.hdf5 }}
|
||||||
|
make -j
|
||||||
|
make install -j
|
||||||
|
popd
|
||||||
|
|
||||||
|
|
||||||
|
###
|
||||||
|
# Run CTest Serial Script
|
||||||
|
###
|
||||||
|
nc-ctest-serial:
|
||||||
|
needs: build-deps-cdash
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
environment: CDashCI
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
hdf5: [ 1.10.8, 1.12.2, 1.14.0 ]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
CDASH_TOKEN: ${{ secrets.CDASH_TOKEN }}
|
||||||
|
env:
|
||||||
|
CDASH_TOKEN: ${{ secrets.CDASH_TOKEN }}
|
||||||
|
|
||||||
|
- name: Install System dependencies
|
||||||
|
shell: bash -l {0}
|
||||||
|
run: sudo apt update && sudo apt install -y libaec-dev zlib1g-dev automake autoconf libcurl4-openssl-dev libjpeg-dev wget curl bzip2 m4 flex bison cmake libzip-dev
|
||||||
|
|
||||||
|
###
|
||||||
|
# Set Environmental Variables
|
||||||
|
###
|
||||||
|
|
||||||
|
- run: echo "CMAKE_PREFIX_PATH=${HOME}/environments/${{ matrix.hdf5 }}/" >> $GITHUB_ENV
|
||||||
|
- run: echo "LD_LIBRARY_PATH=${HOME}/environments/${{ matrix.hdf5 }}/lib" >> $GITHUB_ENV
|
||||||
|
- run: echo "CTEST_OUTPUT_ON_FAILURE=1" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
###
|
||||||
|
# Fetch Cache
|
||||||
|
###
|
||||||
|
|
||||||
|
- name: Fetch HDF Cache
|
||||||
|
id: cache-hdf5
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: ~/environments/${{ matrix.hdf5 }}
|
||||||
|
key: hdf5-${{ runner.os }}-${{ matrix.hdf5 }}
|
||||||
|
|
||||||
|
- name: Check Cache
|
||||||
|
shell: bash -l {0}
|
||||||
|
run: ls ${HOME}/environments/${{ matrix.hdf5 }} && ls ${HOME}/environments/${{ matrix.hdf5}}/lib
|
||||||
|
|
||||||
|
- name: Run ctest serial script
|
||||||
|
shell: bash -l {0}
|
||||||
|
env:
|
||||||
|
CDASH_TOKEN: ${{ secrets.CDASH_TOKEN }}
|
||||||
|
run: |
|
||||||
|
mkdir build
|
||||||
|
cd build
|
||||||
|
LD_LIBRARY_PATH=${LD_LIBRARY_PATH} ctest -j 12 -V -S ../ctest_scripts/ctest_serial.ctest
|
||||||
|
|
||||||
|
- name: Verbose Output if CTest Failure
|
||||||
|
shell: bash -l {0}
|
||||||
|
run: |
|
||||||
|
cd build
|
||||||
|
LD_LIBRARY_PATH=${LD_LIBRARY_PATH} ctest -j 12 --rerun-failed --output-on-failure -VV
|
||||||
|
if: ${{ failure() }}
|
92
.github/workflows/run_tests_ubuntu.yml
vendored
92
.github/workflows/run_tests_ubuntu.yml
vendored
@ -196,6 +196,19 @@ jobs:
|
|||||||
CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} make check -j
|
CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} make check -j
|
||||||
if: ${{ success() }}
|
if: ${{ success() }}
|
||||||
|
|
||||||
|
- name: Create source distribution
|
||||||
|
shell: bash -l {0}
|
||||||
|
if: ${{ success() }}
|
||||||
|
run: make dist -j
|
||||||
|
|
||||||
|
- uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: netcdf-c-autotools-source-distribution
|
||||||
|
path: |
|
||||||
|
*.tar*
|
||||||
|
*.zip
|
||||||
|
*.tgz
|
||||||
|
|
||||||
##
|
##
|
||||||
# Parallel
|
# Parallel
|
||||||
##
|
##
|
||||||
@ -449,7 +462,28 @@ jobs:
|
|||||||
use_nczarr: [ nczarr_off, nczarr_on ]
|
use_nczarr: [ nczarr_off, nczarr_on ]
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: netcdf-c-autotools-source-distribution
|
||||||
|
|
||||||
|
- name: Unpack source distribution
|
||||||
|
shell: bash -l {0}
|
||||||
|
run: |
|
||||||
|
if [ -f *.zip ];
|
||||||
|
then
|
||||||
|
unzip *.zip
|
||||||
|
else
|
||||||
|
tar xvzf $(ls *.tar* *.tgz *.zip | head -1)
|
||||||
|
fi
|
||||||
|
ls -d netcdf-c*
|
||||||
|
for name in netcdf-c*;
|
||||||
|
do
|
||||||
|
if [ -d ${name} ];
|
||||||
|
then
|
||||||
|
cd ${name}
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
- name: Install System dependencies
|
- name: Install System dependencies
|
||||||
shell: bash -l {0}
|
shell: bash -l {0}
|
||||||
@ -498,11 +532,28 @@ jobs:
|
|||||||
|
|
||||||
- name: Run autoconf
|
- name: Run autoconf
|
||||||
shell: bash -l {0}
|
shell: bash -l {0}
|
||||||
run: autoreconf -if
|
run: |
|
||||||
|
for name in netcdf-c*;
|
||||||
|
do
|
||||||
|
if [ -d ${name} ];
|
||||||
|
then
|
||||||
|
cd ${name}
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
autoreconf -if
|
||||||
|
|
||||||
- name: Configure
|
- name: Configure
|
||||||
shell: bash -l {0}
|
shell: bash -l {0}
|
||||||
run: |
|
run: |
|
||||||
|
for name in netcdf-c*;
|
||||||
|
do
|
||||||
|
if [ -d ${name} ];
|
||||||
|
then
|
||||||
|
cd ${name}
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
current_directory="$(pwd)"
|
current_directory="$(pwd)"
|
||||||
mkdir ../build
|
mkdir ../build
|
||||||
cd ../build && CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} "${current_directory}/configure" ${ENABLE_HDF5} ${ENABLE_DAP} ${ENABLE_NCZARR}
|
cd ../build && CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} "${current_directory}/configure" ${ENABLE_HDF5} ${ENABLE_DAP} ${ENABLE_NCZARR}
|
||||||
@ -510,29 +561,56 @@ jobs:
|
|||||||
|
|
||||||
- name: Look at config.log if error
|
- name: Look at config.log if error
|
||||||
shell: bash -l {0}
|
shell: bash -l {0}
|
||||||
run: cd ../build && cat config.log
|
run: |
|
||||||
|
if [ -d ../build ];
|
||||||
|
then
|
||||||
|
cd ../build
|
||||||
|
else
|
||||||
|
cd build
|
||||||
|
fi && cat config.log
|
||||||
if: ${{ failure() }}
|
if: ${{ failure() }}
|
||||||
|
|
||||||
- name: Print Summary
|
- name: Print Summary
|
||||||
shell: bash -l {0}
|
shell: bash -l {0}
|
||||||
run: cd ../build && cat libnetcdf.settings
|
run: |
|
||||||
|
if [ -d ../build ];
|
||||||
|
then
|
||||||
|
cd ../build
|
||||||
|
else
|
||||||
|
cd build
|
||||||
|
fi && cat libnetcdf.settings
|
||||||
|
|
||||||
- name: Build Library and Utilities
|
- name: Build Library and Utilities
|
||||||
shell: bash -l {0}
|
shell: bash -l {0}
|
||||||
run: |
|
run: |
|
||||||
cd ../build && CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} make -j
|
if [ -d ../build ];
|
||||||
|
then
|
||||||
|
cd ../build
|
||||||
|
else
|
||||||
|
cd build
|
||||||
|
fi && CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} make -j
|
||||||
if: ${{ success() }}
|
if: ${{ success() }}
|
||||||
|
|
||||||
- name: Build Tests
|
- name: Build Tests
|
||||||
shell: bash -l {0}
|
shell: bash -l {0}
|
||||||
run: |
|
run: |
|
||||||
cd ../build && CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} make check TESTS="" -j
|
if [ -d ../build ];
|
||||||
|
then
|
||||||
|
cd ../build
|
||||||
|
else
|
||||||
|
cd build
|
||||||
|
fi && CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} make check TESTS="" -j
|
||||||
if: ${{ success() }}
|
if: ${{ success() }}
|
||||||
|
|
||||||
- name: Run Tests
|
- name: Run Tests
|
||||||
shell: bash -l {0}
|
shell: bash -l {0}
|
||||||
run: |
|
run: |
|
||||||
cd ../build && CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} make check -j
|
if [ -d ../build ];
|
||||||
|
then
|
||||||
|
cd ../build
|
||||||
|
else
|
||||||
|
cd build
|
||||||
|
fi && CFLAGS=${CFLAGS} LDFLAGS=${LDFLAGS} LD_LIBRARY_PATH=${LD_LIBRARY_PATH} make check -j
|
||||||
if: ${{ success() }}
|
if: ${{ success() }}
|
||||||
|
|
||||||
nc-cmake:
|
nc-cmake:
|
||||||
|
2
.github/workflows/run_tests_win_mingw.yml
vendored
2
.github/workflows/run_tests_win_mingw.yml
vendored
@ -4,7 +4,7 @@
|
|||||||
# for information related to github runners.
|
# for information related to github runners.
|
||||||
###
|
###
|
||||||
|
|
||||||
name: Run MSYS2, MinGW64-based Tests
|
name: Run MSYS2, MinGW64-based Tests (Not Visual Studio)
|
||||||
|
|
||||||
env:
|
env:
|
||||||
CPPFLAGS: "-D_BSD_SOURCE"
|
CPPFLAGS: "-D_BSD_SOURCE"
|
||||||
|
@ -267,7 +267,8 @@ IF(CMAKE_COMPILER_IS_GNUCC OR APPLE)
|
|||||||
|
|
||||||
# Coverage tests need to have optimization turned off.
|
# Coverage tests need to have optimization turned off.
|
||||||
IF(ENABLE_COVERAGE_TESTS)
|
IF(ENABLE_COVERAGE_TESTS)
|
||||||
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -fprofile-arcs -ftest-coverage")
|
SET(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -coverage -fprofile-arcs -ftest-coverage")
|
||||||
|
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -coverage -fprofile-arcs -ftest-coverage")
|
||||||
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fprofile-arcs -ftest-coverage")
|
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fprofile-arcs -ftest-coverage")
|
||||||
MESSAGE(STATUS "Coverage Tests: On.")
|
MESSAGE(STATUS "Coverage Tests: On.")
|
||||||
ENDIF()
|
ENDIF()
|
||||||
@ -2250,7 +2251,7 @@ ENDIF(ENABLE_BASH_SCRIPT_TESTING)
|
|||||||
|
|
||||||
MACRO(add_sh_test prefix F)
|
MACRO(add_sh_test prefix F)
|
||||||
IF(HAVE_BASH)
|
IF(HAVE_BASH)
|
||||||
ADD_TEST(${prefix}_${F} bash "-c" "export srcdir=${CMAKE_CURRENT_SOURCE_DIR};export TOPSRCDIR=${CMAKE_SOURCE_DIR};bash ${CMAKE_CURRENT_BINARY_DIR}/${F}.sh ${ARGN}")
|
ADD_TEST(${prefix}_${F} bash "-c" "export srcdir=${CMAKE_CURRENT_SOURCE_DIR};export TOPSRCDIR=${CMAKE_SOURCE_DIR};${CMAKE_CURRENT_BINARY_DIR}/${F}.sh ${ARGN}")
|
||||||
ENDIF()
|
ENDIF()
|
||||||
ENDMACRO()
|
ENDMACRO()
|
||||||
|
|
||||||
|
@ -10,7 +10,11 @@ SET(CTEST_SOURCE_DIRECTORY "..")
|
|||||||
SET(CTEST_BINARY_DIRECTORY ".")
|
SET(CTEST_BINARY_DIRECTORY ".")
|
||||||
|
|
||||||
set(CDASH_TOKEN $ENV{CDASH_TOKEN})
|
set(CDASH_TOKEN $ENV{CDASH_TOKEN})
|
||||||
MESSAGE("Using cdash token: ${CDASH_TOKEN}")
|
IF (CDASH_TOKEN)
|
||||||
|
MESSAGE("CDASH TOKEN FOUND")
|
||||||
|
ELSE (CDASH_TOKEN)
|
||||||
|
MESSAGE("NO CDASH TOKEN FOUND")
|
||||||
|
ENDIF (CDASH_TOKEN)
|
||||||
|
|
||||||
|
|
||||||
SITE_NAME(local_site_name)
|
SITE_NAME(local_site_name)
|
||||||
|
@ -10,9 +10,13 @@ SET(CTEST_SOURCE_DIRECTORY "..")
|
|||||||
SET(CTEST_BINARY_DIRECTORY ".")
|
SET(CTEST_BINARY_DIRECTORY ".")
|
||||||
|
|
||||||
set(CDASH_TOKEN $ENV{CDASH_TOKEN})
|
set(CDASH_TOKEN $ENV{CDASH_TOKEN})
|
||||||
MESSAGE("Using cdash token: ${CDASH_TOKEN}")
|
|
||||||
|
|
||||||
|
|
||||||
|
IF (CDASH_TOKEN)
|
||||||
|
MESSAGE("CDASH TOKEN FOUND")
|
||||||
|
ELSE (CDASH_TOKEN)
|
||||||
|
MESSAGE("NO CDASH TOKEN FOUND")
|
||||||
|
ENDIF (CDASH_TOKEN)
|
||||||
|
|
||||||
SITE_NAME(local_site_name)
|
SITE_NAME(local_site_name)
|
||||||
set(CTEST_SITE ${local_site_name})
|
set(CTEST_SITE ${local_site_name})
|
||||||
|
|
||||||
@ -29,7 +33,7 @@ find_program(CTEST_GIT_COMMAND NAMES git)
|
|||||||
find_program(CTEST_COVERAGE_COMMAND NAMES gcov)
|
find_program(CTEST_COVERAGE_COMMAND NAMES gcov)
|
||||||
find_program(CTEST_MEMORYCHECK_COMMAND NAMES valgrind)
|
find_program(CTEST_MEMORYCHECK_COMMAND NAMES valgrind)
|
||||||
|
|
||||||
set(CTEST_BUILD_OPTIONS "-DENABLE_COVERAGE_TESTS=TRUE -DENABLE_ERANGE_FILL=TRUE -DENABLE_LOGGING=TRUE -DENABLE_BYTERANGE=TRUE -DENABLE_LARGE_FILE_TESTS=FALSE")
|
set(CTEST_BUILD_OPTIONS "-DENABLE_COVERAGE_TESTS=FALSE -DENABLE_ERANGE_FILL=TRUE -DENABLE_LOGGING=TRUE -DENABLE_BYTERANGE=TRUE -DENABLE_LARGE_FILE_TESTS=FALSE")
|
||||||
|
|
||||||
|
|
||||||
set(CTEST_DROP_METHOD https)
|
set(CTEST_DROP_METHOD https)
|
||||||
@ -42,7 +46,6 @@ ctest_start("Experimental")
|
|||||||
ctest_configure()
|
ctest_configure()
|
||||||
ctest_build()
|
ctest_build()
|
||||||
ctest_test()
|
ctest_test()
|
||||||
ctest_coverage()
|
|
||||||
if (NOT "${CDASH_TOKEN}" STREQUAL "")
|
if (NOT "${CDASH_TOKEN}" STREQUAL "")
|
||||||
ctest_submit(HTTPHEADER "Authorization: Bearer ${CDASH_TOKEN}")
|
ctest_submit(HTTPHEADER "Authorization: Bearer ${CDASH_TOKEN}")
|
||||||
endif()
|
endif()
|
||||||
|
52
ctest_scripts/ctest_serial_coverage.ctest
Normal file
52
ctest_scripts/ctest_serial_coverage.ctest
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
###
|
||||||
|
# Standard CTest Script for testing netCDF.
|
||||||
|
# Requires a CDash Token.
|
||||||
|
#
|
||||||
|
# Set the CDASH_TOKEN environmental variable.
|
||||||
|
#
|
||||||
|
###
|
||||||
|
|
||||||
|
SET(CTEST_SOURCE_DIRECTORY "..")
|
||||||
|
SET(CTEST_BINARY_DIRECTORY ".")
|
||||||
|
|
||||||
|
set(CDASH_TOKEN $ENV{CDASH_TOKEN})
|
||||||
|
IF (CDASH_TOKEN)
|
||||||
|
MESSAGE("CDASH TOKEN FOUND")
|
||||||
|
ELSE (CDASH_TOKEN)
|
||||||
|
MESSAGE("NO CDASH TOKEN FOUND")
|
||||||
|
ENDIF (CDASH_TOKEN)
|
||||||
|
|
||||||
|
|
||||||
|
SITE_NAME(local_site_name)
|
||||||
|
set(CTEST_SITE ${local_site_name})
|
||||||
|
|
||||||
|
set(CTEST_BUILD_CONFIGURATION "Profiling")
|
||||||
|
set(CTEST_CMAKE_GENERATOR "Unix Makefiles")
|
||||||
|
set(CTEST_BUILD_NAME "default")
|
||||||
|
set(CTEST_BUILD_CONFIGURATION "Profiling")
|
||||||
|
set(CTEST_DROP_METHOD "https")
|
||||||
|
set(CTEST_DROP_SITE_CDASH TRUE)
|
||||||
|
set(CTEST_PROJECT_NAME netcdf-c)
|
||||||
|
|
||||||
|
find_program(CMAKE_COMMAND cmake)
|
||||||
|
find_program(CTEST_GIT_COMMAND NAMES git)
|
||||||
|
find_program(CTEST_COVERAGE_COMMAND NAMES gcov)
|
||||||
|
find_program(CTEST_MEMORYCHECK_COMMAND NAMES valgrind)
|
||||||
|
|
||||||
|
set(CTEST_BUILD_OPTIONS "-DENABLE_COVERAGE_TESTS=TRUE -DENABLE_ERANGE_FILL=TRUE -DENABLE_LOGGING=TRUE -DENABLE_BYTERANGE=TRUE -DENABLE_LARGE_FILE_TESTS=FALSE")
|
||||||
|
|
||||||
|
|
||||||
|
set(CTEST_DROP_METHOD https)
|
||||||
|
set(CTEST_DROP_SITE "cdash.unidata.ucar.edu:443")
|
||||||
|
set(CTEST_DROP_LOCATION "/submit.php?project=netcdf-c")
|
||||||
|
|
||||||
|
set(CTEST_CONFIGURE_COMMAND "${CMAKE_COMMAND} -DCMAKE_BUILD_TYPE:STRING=${CTEST_BUILD_CONFIGURATION} ${CTEST_BUILD_OPTIONS} ${CTEST_SOURCE_DIRECTORY}")
|
||||||
|
|
||||||
|
ctest_start("Experimental")
|
||||||
|
ctest_configure()
|
||||||
|
ctest_build()
|
||||||
|
ctest_test()
|
||||||
|
ctest_coverage()
|
||||||
|
if (NOT "${CDASH_TOKEN}" STREQUAL "")
|
||||||
|
ctest_submit(HTTPHEADER "Authorization: Bearer ${CDASH_TOKEN}")
|
||||||
|
endif()
|
@ -10,7 +10,11 @@ SET(CTEST_SOURCE_DIRECTORY "..")
|
|||||||
SET(CTEST_BINARY_DIRECTORY ".")
|
SET(CTEST_BINARY_DIRECTORY ".")
|
||||||
|
|
||||||
set(CDASH_TOKEN $ENV{CDASH_TOKEN})
|
set(CDASH_TOKEN $ENV{CDASH_TOKEN})
|
||||||
MESSAGE("Using cdash token: ${CDASH_TOKEN}")
|
IF (CDASH_TOKEN)
|
||||||
|
MESSAGE("CDASH TOKEN FOUND")
|
||||||
|
ELSE (CDASH_TOKEN)
|
||||||
|
MESSAGE("NO CDASH TOKEN FOUND")
|
||||||
|
ENDIF (CDASH_TOKEN)
|
||||||
|
|
||||||
|
|
||||||
SITE_NAME(local_site_name)
|
SITE_NAME(local_site_name)
|
||||||
|
@ -78,15 +78,4 @@ ENDIF(ENABLE_TESTS)
|
|||||||
#FILE(COPY ./cdltestfiles DESTINATION ${CMAKE_CURRENT_SOURCE_DIR})
|
#FILE(COPY ./cdltestfiles DESTINATION ${CMAKE_CURRENT_SOURCE_DIR})
|
||||||
#FILE(COPY ./rawtestfiles DESTINATION ${CMAKE_CURRENT_SOURCE_DIR})
|
#FILE(COPY ./rawtestfiles DESTINATION ${CMAKE_CURRENT_SOURCE_DIR})
|
||||||
|
|
||||||
## Specify files to be distributed by 'make dist'
|
|
||||||
FILE(GLOB CUR_EXTRA_DIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/*.c ${CMAKE_CURRENT_SOURCE_DIR}/*.h ${CMAKE_CURRENT_SOURCE_DIR}/*.sh
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/cdltestfiles
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/rawtestfiles
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/baseline
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/baselineraw
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/baselineremote
|
|
||||||
#${CMAKE_CURRENT_SOURCE_DIR}/baselinerthredds
|
|
||||||
${CMAKE_CURRENT_SOURCE_DIR}/baselinehyrax
|
|
||||||
)
|
|
||||||
SET(CUR_EXTRA_DIST ${CUR_EXTRA_DIST} CMakeLists.txt Makefile.am)
|
|
||||||
ADD_EXTRA_DIST("${CUR_EXTRA_DIST}")
|
|
||||||
|
@ -168,6 +168,7 @@ done:
|
|||||||
}
|
}
|
||||||
nullfree(tmp1);
|
nullfree(tmp1);
|
||||||
clearPath(&inparsed);
|
clearPath(&inparsed);
|
||||||
|
//fprintf(stderr,">>> ncpathcvt: inpath=%s result=%s\n",inpath,result);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -82,9 +82,11 @@ ncloginit(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
/*!
|
/*!
|
||||||
Enable/Disable logging.
|
Enable logging messages to a given level. Set to NCLOGOFF to disable
|
||||||
|
all messages, NCLOGERR for errors only, NCLOGWARN for warnings and
|
||||||
|
errors, and so on
|
||||||
|
|
||||||
\param[in] tf If 1, then turn on logging, if 0, then turn off logging.
|
\param[in] level Messages above this level are ignored
|
||||||
|
|
||||||
\return The previous value of the logging flag.
|
\return The previous value of the logging flag.
|
||||||
*/
|
*/
|
||||||
@ -136,8 +138,11 @@ ncvlog(int level, const char* fmt, va_list ap)
|
|||||||
const char* prefix;
|
const char* prefix;
|
||||||
|
|
||||||
if(!nclogginginitialized) ncloginit();
|
if(!nclogginginitialized) ncloginit();
|
||||||
if(nclog_global.loglevel < level)
|
|
||||||
return;
|
if(nclog_global.loglevel < level || nclog_global.nclogstream == NULL) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
prefix = nctagname(level);
|
prefix = nctagname(level);
|
||||||
fprintf(nclog_global.nclogstream,"%s: ",prefix);
|
fprintf(nclog_global.nclogstream,"%s: ",prefix);
|
||||||
if(fmt != NULL) {
|
if(fmt != NULL) {
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
if test "x$srcdir" = x ; then srcdir=`pwd`; fi
|
if test "x$srcdir" = x ; then srcdir=`pwd`; fi
|
||||||
. ../test_common.sh
|
. ../test_common.sh
|
||||||
|
|
||||||
@ -43,16 +45,19 @@ testcasezip() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
testcases3() {
|
testcases3() {
|
||||||
|
set -x
|
||||||
echo -e "\to Running S3 Testcase:\t$1\t$2"
|
echo -e "\to Running S3 Testcase:\t$1\t$2"
|
||||||
zext=s3
|
zext=s3
|
||||||
base=$1
|
base=$1
|
||||||
mode=$2
|
mode=$2
|
||||||
rm -f tmp_${base}_${zext}.cdl
|
rm -f tmp_${base}_${zext}.cdl
|
||||||
url="https://${UH}/${UB}/${base}.zarr#mode=${mode},s3"
|
url="https://${UH}/${UB}/${base}.zarr#mode=${mode},s3"
|
||||||
|
echo "flags: $flags"
|
||||||
# Dumping everything causes timeout so dump a single var
|
# Dumping everything causes timeout so dump a single var
|
||||||
${NCDUMP} -v "/group_with_dims/var2D" $flags $url > tmp_${base}_${zext}.cdl
|
${NCDUMP} -v "group_with_dims/var2D" $flags $url > tmp_${base}_${zext}.cdl
|
||||||
# Find the proper ref file
|
# Find the proper ref file
|
||||||
diff -b ${ISOPATH}/ref_${base}_2d.cdl tmp_${base}_${zext}.cdl
|
diff -b ${ISOPATH}/ref_${base}_2d.cdl tmp_${base}_${zext}.cdl
|
||||||
|
set +x
|
||||||
}
|
}
|
||||||
|
|
||||||
testallcases() {
|
testallcases() {
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
#set -x
|
||||||
|
#set -e
|
||||||
if test "x$srcdir" = x ; then srcdir=`pwd`; fi
|
if test "x$srcdir" = x ; then srcdir=`pwd`; fi
|
||||||
. ../test_common.sh
|
. ../test_common.sh
|
||||||
|
|
||||||
@ -29,7 +30,7 @@ cp ${srcdir}/ref_notzarr.tar.gz .
|
|||||||
gunzip ref_notzarr.tar.gz
|
gunzip ref_notzarr.tar.gz
|
||||||
tar -xf ref_notzarr.tar
|
tar -xf ref_notzarr.tar
|
||||||
if test "x$FEATURE_S3TESTS" = xyes ; then
|
if test "x$FEATURE_S3TESTS" = xyes ; then
|
||||||
${execdir}/s3util -f notzarr.file/notzarr.txt -u "https://${URL}" -k "/${S3ISOPATH}/notzarr.s3/notzarr.txt" upload
|
${execdir}/s3util -f notzarr.file/notzarr.txt -u "https://${URL}" -k "//${S3ISOPATH}/notzarr.s3/notzarr.txt" upload
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Test empty file"
|
echo "Test empty file"
|
||||||
|
@ -14,7 +14,7 @@ isolate "testdir_uts3sdk"
|
|||||||
# Create an isolation path for S3; build on the isolation directory
|
# Create an isolation path for S3; build on the isolation directory
|
||||||
S3ISODIR="$ISODIR"
|
S3ISODIR="$ISODIR"
|
||||||
S3ISOPATH="/${S3TESTSUBTREE}"
|
S3ISOPATH="/${S3TESTSUBTREE}"
|
||||||
S3ISOPATH="${S3ISOPATH}/$S3ISODIR"
|
S3ISOPATH="/${S3ISOPATH}/$S3ISODIR"
|
||||||
|
|
||||||
test_cleanup() {
|
test_cleanup() {
|
||||||
${CMD} ${execdir}/../nczarr_test/s3util -u "${URL}" -k "${S3ISOPATH}" clear
|
${CMD} ${execdir}/../nczarr_test/s3util -u "${URL}" -k "${S3ISOPATH}" clear
|
||||||
|
Loading…
Reference in New Issue
Block a user