This commit is contained in:
Binh-Minh Ribler 2018-07-28 08:05:16 -05:00
commit 7d9f5ed49a
4 changed files with 85 additions and 41 deletions

View File

@ -188,45 +188,47 @@ tar2zip()
# Returns 0 if successful; 1 otherwise
#
# need function to create another temporary directory, extract the
# $tmpdir/$HDF5_VERS.tar into it, add (create) build-unix.sh,
# CTestScript.cmake, HDF5config.cmake, SZIP.tar.gz and ZLib.tar.gz,
# and then tar.gz it.
# $tmpdir/$HDF5_VERS.tar into it, create build-VS*.bat files,
# add CTestScript.cmake, HDF5config.cmake, SZIP.tar.gz
# ZLib.tar.gz, HDF5 examples, and then zip it.
tar2cmakezip()
{
if [ $# -ne 3 ]; then
echo "usage: tar2cmakezip <tarfilename> <tgzfilename>"
echo "usage: tar2cmakezip <tarfilename> <zipfilename>"
return 1
fi
cmziptmpdir=/tmp/cmziptmpdir$$
mkdir -p $cmziptmpdir
cmziptmpsubdir=$cmziptmpdir/CMake-$HDF5_VERS
mkdir -p $cmziptmpsubdir
version=$1
tarfile=$2
zipfile=$3
# step 1: untar tarball in cmgztmpdir
(cd $cmziptmpdir; tar xf -) < $tarfile
(cd $cmziptmpsubdir; tar xf -) < $tarfile
# sanity check
if [ ! -d $cmziptmpdir/$version ]; then
echo "untar did not create $cmziptmpdir/$version source dir"
if [ ! -d $cmziptmpsubdir/$version ]; then
echo "untar did not create $cmziptmpsubdir/$version source dir"
# cleanup
rm -rf $cmziptmpdir
return 1
fi
# step 2: add batch file for building CMake on window
cp /mnt/scr1/pre-release/hdf5/CMake/build-VS2013-32.bat $cmziptmpdir
cp /mnt/scr1/pre-release/hdf5/CMake/build-VS2013-64.bat $cmziptmpdir
cp /mnt/scr1/pre-release/hdf5/CMake/build-VS2015-32.bat $cmziptmpdir
cp /mnt/scr1/pre-release/hdf5/CMake/build-VS2015-64.bat $cmziptmpdir
cp /mnt/scr1/pre-release/hdf5/CMake/build-VS2017-32.bat $cmziptmpdir
cp /mnt/scr1/pre-release/hdf5/CMake/build-VS2017-64.bat $cmziptmpdir
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2013 -C Release -V -O hdf5.log" > build-VS2013-32.bat; chmod 755 build-VS2013-32.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201364 -C Release -V -O hdf5.log" > build-VS2013-64.bat; chmod 755 build-VS2013-64.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2015 -C Release -V -O hdf5.log" > build-VS2015-32.bat; chmod 755 build-VS2015-32.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201564 -C Release -V -O hdf5.log" > build-VS2015-64.bat; chmod 755 build-VS2015-64.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS2017 -C Release -V -O hdf5.log" > build-VS2017-32.bat; chmod 755 build-VS2017-32.bat)
(cd $cmziptmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=VS201764 -C Release -V -O hdf5.log" > build-VS2017-64.bat; chmod 755 build-VS2017-64.bat)
# step 3: add SZIP.tar.gz, ZLib.tar.gz and cmake files
cp /mnt/scr1/pre-release/hdf5/CMake/SZip.tar.gz $cmziptmpdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmziptmpdir
cp $cmziptmpdir/$version/config/cmake/scripts/CTestScript.cmake $cmziptmpdir
cp $cmziptmpdir/$version/config/cmake/scripts/HDF5config.cmake $cmziptmpdir
cp $cmziptmpdir/$version/config/cmake/scripts/HDF5options.cmake $cmziptmpdir
cp /mnt/scr1/pre-release/hdf5/CMake/SZip.tar.gz $cmziptmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmziptmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.10.8-Source.tar.gz $cmziptmpsubdir
cp $cmziptmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmziptmpsubdir
cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmziptmpsubdir
cp $cmziptmpsubdir/$version/config/cmake/scripts/HDF5options.cmake $cmziptmpsubdir
# step 4: convert text files
# There maybe a simpler way to do this.
@ -234,12 +236,13 @@ tar2cmakezip()
# -k Keep the date stamp
# -q quiet mode
# grep redirect output to /dev/null because -q or -s are not portable.
find $cmziptmpdir/$version | \
find $cmziptmpsubdir/$version | \
while read inf; do \
if file $inf | grep "$inf\: .*text" > /dev/null 2>&1 ; then \
unix2dos -q -k $inf; \
fi\
done
# step 3: make zipball
# -9 maximum compression
# -y Store symbolic links as such in the zip archive
@ -284,9 +287,9 @@ tar2cmakezip()
# Returns 0 if successful; 1 otherwise
#
# need function to create another temporary directory, extract the
# $tmpdir/$HDF5_VERS.tar into it, add (create) build-unix.sh,
# CTestScript.cmake, HDF5config.cmake, SZIP.tar.gz and ZLib.tar.gz,
# and then tar.gz it.
# $tmpdir/$HDF5_VERS.tar into it, create build-unix.sh,
# add CTestScript.cmake, HDF5config.cmake, SZIP.tar.gz
# ZLib.tar.gz, HDF5 examples, and then tar.gz it.
tar2cmaketgz()
{
if [ $# -ne 3 ]; then
@ -294,16 +297,17 @@ tar2cmaketgz()
return 1
fi
cmgztmpdir=/tmp/cmgztmpdir$$
mkdir -p $cmgztmpdir
cmgztmpsubdir=$cmgztmpdir/CMake-$HDF5_VERS
mkdir -p $cmgztmpsubdir
version=$1
tarfile=$2
tgzfile=$3
# step 1: untar tarball in cmgztmpdir
(cd $cmgztmpdir; tar xf -) < $tarfile
(cd $cmgztmpsubdir; tar xf -) < $tarfile
# sanity check
if [ ! -d $cmgztmpdir/$version ]; then
echo "untar did not create $cmgztmpdir/$version source dir"
if [ ! -d $cmgztmpsubdir/$version ]; then
echo "untar did not create $cmgztmpsubdir/$version source dir"
# cleanup
rm -rf $cmgztmpdir
return 1
@ -311,14 +315,15 @@ tar2cmaketgz()
# step 2: add build-unix.sh script
(cd $cmgztmpdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=Unix -C Release -V -O hdf5.log" > build-unix.sh; chmod 755 build-unix.sh)
(cd $cmgztmpsubdir; echo "ctest -S HDF5config.cmake,BUILD_GENERATOR=Unix -C Release -V -O hdf5.log" > build-unix.sh; chmod 755 build-unix.sh)
# step 3: add SZIP.tar.gz, ZLib.tar.gz and cmake files
cp /mnt/scr1/pre-release/hdf5/CMake/SZip.tar.gz $cmgztmpdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpdir
cp $cmgztmpdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpdir
cp $cmgztmpdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpdir
cp $cmgztmpdir/$version/config/cmake/scripts/HDF5options.cmake $cmgztmpdir
cp /mnt/scr1/pre-release/hdf5/CMake/SZip.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/ZLib.tar.gz $cmgztmpsubdir
cp /mnt/scr1/pre-release/hdf5/CMake/HDF5Examples-1.10.8-Source.tar.gz $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/CTestScript.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5config.cmake $cmgztmpsubdir
cp $cmgztmpsubdir/$version/config/cmake/scripts/HDF5options.cmake $cmgztmpsubdir
tar czf $DEST/CMake-$HDF5_VERS.tar.gz -C $cmgztmpdir . || exit 1
# cleanup

View File

@ -49,13 +49,14 @@ libhdf5_cpp_la_LIBADD=$(LIBHDF5)
# Public headers
include_HEADERS=H5Cpp.h H5AbstractDs.h H5AtomType.h \
H5Attribute.h H5Classes.h H5CommonFG.h H5CompType.h \
H5DataSet.h H5DataSpace.h H5DataType.h H5OcreatProp.h \
H5DaccProp.h H5DcreatProp.h H5DxferProp.h H5EnumType.h \
H5Exception.h H5FaccProp.h H5FcreatProp.h H5File.h \
H5FloatType.h H5Group.h H5IdComponent.h H5Include.h \
H5IntType.h H5LcreatProp.h H5LaccProp.h H5Library.h \
H5Location.h H5Object.h H5PredType.h H5PropList.h \
H5StrType.h H5CppDoc.h H5ArrayType.h H5VarLenType.h
H5DataSet.h H5DataSpace.h H5DataType.h H5OcreatProp.h \
H5DaccProp.h H5DcreatProp.h \
H5DxferProp.h H5EnumType.h H5Exception.h H5FaccProp.h \
H5FcreatProp.h H5File.h H5FloatType.h H5Group.h \
H5IdComponent.h H5Include.h H5IntType.h H5LcreatProp.h \
H5LaccProp.h H5Library.h H5Location.h H5Object.h \
H5PredType.h H5PropList.h H5StrType.h H5CppDoc.h \
H5ArrayType.h H5VarLenType.h
# h5c++ and libhdf5.settings are generated during configure. Remove only when
# distclean.

View File

@ -85,7 +85,7 @@ case "$cc_vendor-$cc_version" in
esac
case "$host_os-$host_cpu" in
# cygwin needs the "-std-c99" flag removed, so make
# cygwin needs the "-std=c99" flag removed, so make
# a specific case for Cygwin without the flag and a default
# case to add the flag everywhere else
cygwin-*)

View File

@ -197,6 +197,44 @@ Bug Fixes since HDF5-1.10.2 release
(DER - 2018/02/26, HDFFV-10356)
- A bug was discovered in the parallel library which caused partial
parallel reads of filtered datasets to return incorrect data. The
library used the incorrect dataspace for each chunk read, causing
the selection used in each chunk to be wrong.
The bug was not caught during testing because all of the current
tests which do parallel reads of filtered data read all of the data
using an H5S_ALL selection. Several tests were added which exercise
partial parallel reads.
(JTH - 2018/07/16, HDFFV-10467)
- A bug was discovered in the parallel library which caused parallel
writes of filtered datasets to trigger an assertion failure in the
file free space manager.
This occurred when the filter used caused chunks to repeatedly shrink
and grow over the course of several dataset writes. The previous chunk
information, such as the size of the chunk and the offset in the file,
was being cached and not updated after each write, causing the next write
to the chunk to retrieve the incorrect cached information and run into
issues when reallocating space in the file for the chunk.
(JTH - 2018/07/16, HDFFV-10509)
- A bug was discovered in the parallel library which caused the
H5D__mpio_array_gatherv() function to allocate too much memory.
When the function is called with the 'allgather' parameter set
to a non-true value, the function will receive data from all MPI
ranks and gather it to the single rank specied by the 'root'
parameter. However, the bug in the function caused memory for
the received data to be allocated on all MPI ranks, not just the
singular rank specified as the receiver. In some circumstances,
this would cause an application to fail due to the large amounts
of memory being allocated.
(JTH - 2018/07/16, HDFFV-10467)
Configuration
-------------