[svn-r20469] Bug 1386 - allow dimension size to be zero even though it isn't unlimited. This is a follow-up checkin for r20440:

1. I added a test case of extending dataset of zero dimension size and shrinking back to zero dimension size.
  2. I updated the Makefile to include the new data file to be cleaned up.

Tested on jam - relatively simple.
This commit is contained in:
Raymond Lu 2011-04-11 10:59:41 -05:00
parent 338beff4e4
commit a67dbbaf81
5 changed files with 71 additions and 10 deletions

2
configure vendored
View File

@ -1,5 +1,5 @@
#! /bin/sh
# From configure.in Id: configure.in 20315 2011-03-24 18:33:45Z songyulu .
# From configure.in Id: configure.in 20405 2011-04-04 02:28:07Z koziol .
# Guess values for system-dependent variables and create Makefiles.
# Generated by GNU Autoconf 2.68 for HDF5 1.9.81.
#

View File

@ -81,6 +81,11 @@ const H5O_msg_class_t H5O_MSG_EFL[1] = {{
* Programmer: Robb Matzke
* Tuesday, November 25, 1997
*
* Modification:
* Raymond Lu
* 11 April 2011
* We allow zero dimension size starting from the 1.8.7 release.
* The dataset size of external storage can be zero.
*-------------------------------------------------------------------------
*/
static void *
@ -156,7 +161,6 @@ H5O_efl_decode(H5F_t *f, hid_t dxpl_id, H5O_t UNUSED *open_oh,
/* Size */
H5F_DECODE_LENGTH (f, p, mesg->slot[u].size);
HDassert(mesg->slot[u].size > 0);
} /* end for */
if(H5HL_unprotect(heap) < 0)

View File

@ -108,7 +108,7 @@ CHECK_CLEANFILES+=accum.h5 cmpd_dset.h5 compact_dataset.h5 dataset.h5 dset_offse
max_compact_dataset.h5 simple.h5 set_local.h5 random_chunks.h5 \
huge_chunks.h5 chunk_cache.h5 big_chunk.h5 chunk_expand.h5 \
copy_dcpl_newfile.h5 extend.h5 istore.h5 extlinks*.h5 frspace.h5 links*.h5 \
sys_file1 tfile[1-5].h5 th5s[1-3].h5 lheap.h5 fheap.h5 ohdr.h5 \
sys_file1 tfile[1-5].h5 th5s[1-4].h5 lheap.h5 fheap.h5 ohdr.h5 \
stab.h5 extern_[1-3].h5 extern_[1-4][ab].raw gheap[0-4].h5 \
dt_arith[1-2] links.h5 links[0-6]*.h5 extlinks[0-15].h5 tmp \
big.data big[0-9][0-9][0-9][0-9][0-9].h5 \

View File

@ -714,7 +714,7 @@ CHECK_CLEANFILES = *.chkexe *.chklog *.clog accum.h5 cmpd_dset.h5 \
max_compact_dataset.h5 simple.h5 set_local.h5 random_chunks.h5 \
huge_chunks.h5 chunk_cache.h5 big_chunk.h5 chunk_expand.h5 \
copy_dcpl_newfile.h5 extend.h5 istore.h5 extlinks*.h5 \
frspace.h5 links*.h5 sys_file1 tfile[1-5].h5 th5s[1-3].h5 \
frspace.h5 links*.h5 sys_file1 tfile[1-5].h5 th5s[1-4].h5 \
lheap.h5 fheap.h5 ohdr.h5 stab.h5 extern_[1-3].h5 \
extern_[1-4][ab].raw gheap[0-4].h5 dt_arith[1-2] links.h5 \
links[0-6]*.h5 extlinks[0-15].h5 tmp big.data \

View File

@ -528,6 +528,7 @@ test_h5s_zero_dim(void)
hid_t attr; /* Attribute ID */
int rank; /* Logical rank of dataspace */
hsize_t dims1[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
hsize_t extend_dims[] = {SPACE1_DIM1, SPACE1_DIM2, SPACE1_DIM3};
hsize_t chunk_dims[] = {SPACE1_DIM1, SPACE1_DIM2/3, SPACE1_DIM3};
hsize_t tdims[SPACE1_RANK]; /* Dimension array to test with */
int wdata[SPACE1_DIM2][SPACE1_DIM3];
@ -610,6 +611,12 @@ test_h5s_zero_dim(void)
rdata_short[i][j] = 7;
}
for(i=0; i<SPACE1_DIM1; i++)
for(j=0; j<SPACE1_DIM2; j++)
for(k=0; k<SPACE1_DIM3; k++)
wdata_real[i][j][k] = i + j + k;
/* Contiguous dataset */
dset1 = H5Dcreate2(fid1, BASICDATASET, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
CHECK(dset1, FAIL, "H5Dcreate2");
@ -721,6 +728,54 @@ test_h5s_zero_dim(void)
}
}
/* Now extend the dataset and make sure we can write data to it */
ret = H5Dset_extent(dset1, extend_dims);
CHECK(ret, FAIL, "H5Dset_extent");
ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata_real);
CHECK(ret, FAIL, "H5Dwrite");
ret = H5Fflush(fid1, H5F_SCOPE_GLOBAL);
CHECK(ret, FAIL, "H5Fflush");
ret = H5Dread(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata_real);
CHECK(ret, FAIL, "H5Dread");
/* Check results */
for(i=0; i<SPACE1_DIM1; i++) {
for(j=0; j<SPACE1_DIM2; j++) {
for(k=0; k<SPACE1_DIM3; k++) {
if(rdata_real[i][j][k] != wdata_real[i][j][k]) {
H5_FAILED();
printf("element [%d][%d][%d] is %d but should have been %d\n",
i, j, k, rdata_real[i][j][k], wdata_real[i][j][k]);
}
}
}
}
/* Now shrink the dataset to 0 dimension size and make sure no data is in it */
extend_dims[0] = 0;
ret = H5Dset_extent(dset1, extend_dims);
CHECK(ret, FAIL, "H5Dset_extent");
ret = H5Fflush(fid1, H5F_SCOPE_GLOBAL);
CHECK(ret, FAIL, "H5Fflush");
/* Try reading from the dataset (make certain our buffer is unmodified) */
ret = H5Dread(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
CHECK(ret, FAIL, "H5Dread");
/* Check results */
for(i=0; i<SPACE1_DIM2; i++)
for(j=0; j<SPACE1_DIM3; j++) {
if(rdata[i][j] != 7) {
H5_FAILED();
printf("element [%d][%d] is %d but should have been 7\n",
i, j, rdata[i][j]);
}
}
ret = H5Pclose(plist_id);
CHECK(ret, FAIL, "H5Pclose");
@ -883,12 +938,6 @@ test_h5s_zero_dim(void)
dset1 = H5Dcreate2(fid1, BASICDATASET4, H5T_NATIVE_INT, sid1, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
CHECK(dset1, FAIL, "H5Dcreate2");
for(i=0; i<SPACE1_DIM1; i++)
for(j=0; j<SPACE1_DIM2; j++)
for(k=0; k<SPACE1_DIM3; k++)
wdata_real[i][j][k] = i + j + k;
ret = H5Dwrite(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wdata_real);
CHECK(ret, FAIL, "H5Dwrite");
@ -969,6 +1018,13 @@ test_h5s_zero_dim(void)
nelem = H5Sget_simple_extent_npoints(sid1);
VERIFY(nelem, 0, "H5Sget_simple_extent_npoints");
/* Verify the dimension sizes are correct */
rank = H5Sget_simple_extent_dims(sid1, tdims, NULL);
CHECK(rank, FAIL, "H5Sget_simple_extent_dims");
VERIFY(tdims[0], 0, "H5Sget_simple_extent_dims");
VERIFY(tdims[1], SPACE1_DIM2, "H5Sget_simple_extent_dims");
VERIFY(tdims[2], SPACE1_DIM3, "H5Sget_simple_extent_dims");
/* Try reading from the dataset (make certain our buffer is unmodified) */
ret = H5Dread(dset1, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, rdata);
CHECK(ret, FAIL, "H5Dread");
@ -2139,4 +2195,5 @@ cleanup_h5s(void)
remove(DATAFILE);
remove(NULLFILE);
remove(BASICFILE);
remove(ZEROFILE);
}