[svn-r21801] Remove unintentional change to dsets.c from r21793.

Tested: durandal
This commit is contained in:
Neil Fortner 2011-12-05 10:44:04 -05:00
parent 51d81cb5ef
commit 414e34edb9

View File

@ -8066,121 +8066,6 @@ error:
return -1;
} /* end test_chunk_expand() */
/*-------------------------------------------------------------------------
* Function: test_large_chunk_shrink
*
* Purpose: Tests support for shrinking a chunk larger than 1 MB by a
* size greater than 1 MB.
*
* Return: Success: 0
* Failure: -1
*
* Programmer: Neil Fortner
* Monday, November 31, 2011
*
*-------------------------------------------------------------------------
*/
static herr_t
test_large_chunk_shrink(hid_t fapl)
{
char filename[FILENAME_BUF_SIZE];
hid_t fid = -1; /* File ID */
hid_t dcpl = -1; /* Dataset creation property list ID */
hid_t sid = -1; /* Dataspace ID */
hid_t scalar_sid = -1;/* Scalar dataspace ID */
hid_t dsid = -1; /* Dataset ID */
hsize_t dim, max_dim, chunk_dim; /* Dataset and chunk dimensions */
hsize_t hs_offset; /* Hyperslab offset */
hsize_t hs_size; /* Hyperslab size */
unsigned write_elem, read_elem; /* Element written/read */
TESTING("shrinking large chunk");
h5_fixname(FILENAME[10], fapl, filename, sizeof filename);
/* Create file */
if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) FAIL_STACK_ERROR
/* Create dataset creation property list */
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) FAIL_STACK_ERROR
/* Set 2 MB chunk size */
chunk_dim = 2 * 1024 * 1024 / sizeof(unsigned);
if(H5Pset_chunk(dcpl, 1, &chunk_dim) < 0) FAIL_STACK_ERROR
/* Create scalar dataspace */
if((scalar_sid = H5Screate(H5S_SCALAR)) < 0) FAIL_STACK_ERROR
/* Create 1-D dataspace */
dim = 2 * 1024 * 1024 / sizeof(unsigned);
max_dim = H5S_UNLIMITED;
if((sid = H5Screate_simple(1, &dim, &max_dim)) < 0) FAIL_STACK_ERROR
/* Create 2 MB chunked dataset */
if((dsid = H5Dcreate2(fid, "dset", H5T_NATIVE_UINT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
FAIL_STACK_ERROR
/* Select last element in the dataset */
hs_offset = dim - 1;
hs_size = 1;
if(H5Sselect_hyperslab(sid, H5S_SELECT_SET, &hs_offset, NULL, &hs_size, NULL) < 0) FAIL_STACK_ERROR
/* Read (unwritten) element from dataset */
read_elem = 1;
if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
/* Verify unwritten element is fill value (0) */
if(read_elem != 0) FAIL_PUTS_ERROR("invalid unwritten element read");
/* Write element to dataset */
write_elem = 2;
if(H5Dwrite(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &write_elem) < 0) FAIL_STACK_ERROR
/* Read element from dataset */
read_elem = write_elem + 1;
if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
/* Verify written element is read in */
if(read_elem != write_elem) FAIL_PUTS_ERROR("invalid written element read");
/* Shrink dataset to 512 KB */
dim = 512 * 1024 / sizeof(unsigned);
if(H5Dset_extent(dsid, &dim) < 0) FAIL_STACK_ERROR
/* Expand dataset back to 2MB */
dim = 2 * 1024 * 1024 / sizeof(unsigned);
if(H5Dset_extent(dsid, &dim) < 0) FAIL_STACK_ERROR
/* Read element from dataset */
read_elem = 1;
if(H5Dread(dsid, H5T_NATIVE_UINT, scalar_sid, sid, H5P_DEFAULT, &read_elem) < 0) FAIL_STACK_ERROR
/* Verify element is now 0 */
if(read_elem != 0) FAIL_PUTS_ERROR("invalid element read");
/* Close everything */
if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
if(H5Sclose(scalar_sid) < 0) FAIL_STACK_ERROR
if(H5Dclose(dsid) < 0) FAIL_STACK_ERROR
if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
PASSED();
return 0;
error:
H5E_BEGIN_TRY {
H5Pclose(dcpl);
H5Dclose(dsid);
H5Sclose(sid);
H5Sclose(scalar_sid);
H5Fclose(fid);
} H5E_END_TRY;
return -1;
} /* end test_large_chunk_shrink() */
/*-------------------------------------------------------------------------
* Function: test_large_chunk_shrink