[svn-r15757] Bug #1312

Solution:
Check for the existence of chunks before pruning them (an assertion was triggered when attempting to do so)

Tested: windows , linux, solaris
This commit is contained in:
Pedro Vicente Nunes 2008-10-02 14:56:45 -05:00
parent 4f7970f9f0
commit 8e97904f5f
2 changed files with 420 additions and 229 deletions

View File

@ -2313,9 +2313,9 @@ herr_t
H5D_set_extent(H5D_t *dset, const hsize_t *size, hid_t dxpl_id)
{
H5S_t *space; /* Dataset's dataspace */
int rank; /* Dataspace # of dimensions */
hsize_t curr_dims[H5O_LAYOUT_NDIMS]; /* Current dimension sizes */
htri_t changed; /* Whether the dataspace changed size */
int rank; /* Dataspace # of dimensions */
hsize_t curr_dims[H5O_LAYOUT_NDIMS];/* Current dimension sizes */
htri_t changed; /* Whether the dataspace changed size */
herr_t ret_value = SUCCEED; /* Return value */
FUNC_ENTER_NOAPI_NOINIT(H5D_set_extent)
@ -2326,7 +2326,7 @@ H5D_set_extent(H5D_t *dset, const hsize_t *size, hid_t dxpl_id)
/* Check if we are allowed to modify this file */
if(0 == (H5F_get_intent(dset->oloc.file) & H5F_ACC_RDWR))
HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "no write intent on file")
HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "no write intent on file")
/* Check if the filters in the DCPL will need to encode, and if so, can they? */
if(H5D_check_filters(dset) < 0)
@ -2337,20 +2337,22 @@ H5D_set_extent(H5D_t *dset, const hsize_t *size, hid_t dxpl_id)
/* Check if we are shrinking or expanding any of the dimensions */
if((rank = H5S_get_simple_extent_dims(space, curr_dims, NULL)) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions")
HGOTO_ERROR(H5E_DATASET, H5E_CANTGET, FAIL, "can't get dataset dimensions")
/* Modify the size of the data space */
if((changed = H5S_set_extent(space, size)) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to modify size of data space")
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to modify size of data space")
/* Don't bother updating things, unless they've changed */
if(changed) {
if(changed)
{
hbool_t shrink = FALSE; /* Flag to indicate a dimension has shrank */
hbool_t expand = FALSE; /* Flag to indicate a dimension has grown */
unsigned u; /* Local index variable */
/* Determine if we are shrinking and/or expanding any dimensions */
for(u = 0; u < (unsigned)rank; u++) {
for(u = 0; u < (unsigned)rank; u++)
{
if(size[u] < curr_dims[u])
shrink = TRUE;
if(size[u] > curr_dims[u])
@ -2366,7 +2368,7 @@ H5D_set_extent(H5D_t *dset, const hsize_t *size, hid_t dxpl_id)
if(H5D_chunk_update_cache(dset, dxpl_id) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to update cached chunk indices")
/* Allocate space for the new parts of the dataset, if appropriate */
/* Allocate space for the new parts of the dataset, if appropriate */
if(expand && dset->shared->dcpl_cache.fill.alloc_time == H5D_ALLOC_TIME_EARLY)
if(H5D_alloc_storage(dset, dxpl_id, H5D_ALLOC_EXTEND, FALSE) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to initialize dataset storage")
@ -2375,13 +2377,17 @@ H5D_set_extent(H5D_t *dset, const hsize_t *size, hid_t dxpl_id)
/*-------------------------------------------------------------------------
* Remove chunk information in the case of chunked datasets
* This removal takes place only in case we are shrinking the dateset
* and if the chunks are written
*-------------------------------------------------------------------------
*/
if(shrink && H5D_CHUNKED == dset->shared->layout.type) {
/* Remove excess chunks */
if(H5D_chunk_prune_by_extent(dset, dxpl_id, curr_dims) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to remove chunks ")
} /* end if */
if( shrink &&
H5D_CHUNKED == dset->shared->layout.type &&
H5F_addr_defined(dset->shared->layout.u.chunk.addr))
{
/* Remove excess chunks */
if(H5D_chunk_prune_by_extent(dset, dxpl_id, curr_dims) < 0)
HGOTO_ERROR(H5E_DATASET, H5E_WRITEERROR, FAIL, "unable to remove chunks ")
} /* end if */
/* Mark the dataspace as dirty, for later writing to the file */
dset->shared->space_dirty = TRUE;

View File

@ -39,15 +39,16 @@
int main( void )
{
hid_t file_id;
hid_t dataset_id=(-1);
hid_t space_id=(-1);
hid_t plist_id=(-1);
hid_t fcpl; /* File creation property list */
hid_t fid;
hid_t did=(-1);
hid_t sid=(-1);
hid_t dcpl=(-1);
hid_t fcpl;
hsize_t dims[RANK] = { 90, 90 };
hsize_t dims_new[RANK] = { 70, 70 };
hsize_t dims_chunk[RANK] = { 20, 20 };
hsize_t dims_out[RANK];
hsize_t dims3[RANK];
hsize_t maxdims[RANK] = { H5S_UNLIMITED, H5S_UNLIMITED };
int data[ 90 ][ 90 ];
int buf1[ 70 ][ 70 ];
@ -55,7 +56,7 @@ int main( void )
int i, j, n = 0;
int fillvalue = 1; /* Fill value for the dataset */
#ifdef H5_HAVE_FILTER_DEFLATE
hbool_t do_compress; /* Iterator for looping over compress/no compress */
hbool_t do_compress; /* Iterator for looping over compress/no compress */
#endif /* H5_HAVE_FILTER_DEFLATE */
@ -68,7 +69,8 @@ int main( void )
*-------------------------------------------------------------------------
*/
#ifdef H5_HAVE_FILTER_DEFLATE
for(do_compress = FALSE; do_compress <= TRUE; do_compress++) {
for (do_compress = FALSE; do_compress <= TRUE; do_compress++)
{
if(do_compress)
puts("Testing WITH compression on chunks.");
else
@ -80,18 +82,26 @@ int main( void )
TESTING("extend dataset create with fill value");
/* Create a new file using default properties. */
if((file_id = H5Fcreate("set_extent_create.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR;
if((fid = H5Fcreate("set_extent_create.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT)) < 0)
TEST_ERROR;
/* Create the data space with unlimited dimensions. */
if((space_id = H5Screate_simple(RANK, dims, maxdims)) < 0) TEST_ERROR;
if((sid = H5Screate_simple(RANK, dims, maxdims)) < 0)
TEST_ERROR;
/* Modify dataset creation properties, i.e. enable chunking. */
if((plist_id = H5Pcreate (H5P_DATASET_CREATE)) < 0) TEST_ERROR;
if(H5Pset_chunk(plist_id, RANK, dims_chunk) < 0) TEST_ERROR;
if(H5Pset_fill_value(plist_id, H5T_NATIVE_INT, &fillvalue) < 0) TEST_ERROR;
if((dcpl = H5Pcreate (H5P_DATASET_CREATE)) < 0)
TEST_ERROR;
if(H5Pset_chunk(dcpl, RANK, dims_chunk) < 0)
TEST_ERROR;
if(H5Pset_fill_value(dcpl, H5T_NATIVE_INT, &fillvalue) < 0)
TEST_ERROR;
#ifdef H5_HAVE_FILTER_DEFLATE
if(do_compress)
if(H5Pset_deflate(plist_id, 9) < 0) FAIL_STACK_ERROR
{
if(H5Pset_deflate(dcpl, 9) < 0)
FAIL_STACK_ERROR
}
#endif /* H5_HAVE_FILTER_DEFLATE */
@ -101,10 +111,12 @@ int main( void )
*/
/* Create a new dataset */
if((dataset_id = H5Dcreate2(file_id , "Dataset1", H5T_NATIVE_INT, space_id, H5P_DEFAULT, plist_id, H5P_DEFAULT)) < 0) TEST_ERROR;
if((did = H5Dcreate2(fid , "Dataset1", H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
TEST_ERROR;
/* Write the data. */
if(H5Dwrite(dataset_id , H5T_NATIVE_INT, space_id, H5S_ALL, H5P_DEFAULT, data) < 0) TEST_ERROR;
if(H5Dwrite(did , H5T_NATIVE_INT, sid, H5S_ALL, H5P_DEFAULT, data) < 0)
TEST_ERROR;
/*-------------------------------------------------------------------------
* Set new dimensions for the array; shrink it
@ -112,16 +124,21 @@ int main( void )
*/
/* Set new dimensions for the array. */
if(H5Dset_extent(dataset_id , dims_new) < 0) TEST_ERROR;
if(H5Dset_extent(did , dims_new) < 0)
TEST_ERROR;
/* Get the space. */
if((space_id = H5Dget_space(dataset_id)) < 0) TEST_ERROR;
if((sid = H5Dget_space(did)) < 0)
TEST_ERROR;
/* Get dimensions. */
if(H5Sget_simple_extent_dims(space_id, dims_out, NULL) < 0) TEST_ERROR;
if(H5Sget_simple_extent_dims(sid, dims_out, NULL) < 0)
TEST_ERROR;
if(dims_out[0] != dims_new[0]) TEST_ERROR;
if(dims_out[1] != dims_new[1]) TEST_ERROR;
if(dims_out[0] != dims_new[0])
TEST_ERROR;
if(dims_out[1] != dims_new[1])
TEST_ERROR;
/*-------------------------------------------------------------------------
@ -130,17 +147,23 @@ int main( void )
*/
/* Read the new dataset. */
if (H5Dread( dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf1 ) < 0) TEST_ERROR;
if (H5Dread( did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf1 ) < 0)
TEST_ERROR;
/* Compare the read array with the original array */
for( i = 0; i < (int)dims_out[0]; i++ )
{
for( j = 0; j < (int)dims_out[1]; j++ )
if ( buf1[i][j] != data[i][j] ) {
{
if ( buf1[i][j] != data[i][j] )
{
printf("buf1[%d][%d] = %d\n", i, j, buf1[i][j]);
printf("data[%d][%d] = %d\n", i, j, data[i][j]);
TEST_ERROR;
} /* end if */
}
}
}
/*-------------------------------------------------------------------------
@ -149,16 +172,21 @@ int main( void )
*/
/* Set new dimensions for the array. */
if(H5Dset_extent(dataset_id, dims) < 0) TEST_ERROR;
if(H5Dset_extent(did, dims) < 0)
TEST_ERROR;
/* Get the space. */
if((space_id = H5Dget_space(dataset_id)) < 0) TEST_ERROR;
if((sid = H5Dget_space(did)) < 0)
TEST_ERROR;
/* Get dimensions. */
if(H5Sget_simple_extent_dims(space_id, dims_out, NULL) < 0) TEST_ERROR;
if(H5Sget_simple_extent_dims(sid, dims_out, NULL) < 0)
TEST_ERROR;
if(dims_out[0] != dims[0]) TEST_ERROR;
if(dims_out[1] != dims[1]) TEST_ERROR;
if(dims_out[0] != dims[0])
TEST_ERROR;
if(dims_out[1] != dims[1])
TEST_ERROR;
/*-------------------------------------------------------------------------
@ -167,125 +195,26 @@ int main( void )
*/
/* Read the new dataset. */
if(H5Dread(dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf2) < 0) TEST_ERROR;
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf2) < 0)
TEST_ERROR;
/* Compare the read array with the original array */
for(i = 0; i < (int)dims_out[0]; i++ )
{
for(j = 0; j < (int)dims_out[1]; j++ )
if(i >= 70 || j >= 70) {
{
if(i >= 70 || j >= 70)
{
if(buf2[i][j] != fillvalue) {
printf("buf1[%d][%d] = %d\n", i, j, buf1[i][j]);
printf("fillvalue = %d\n", fillvalue);
TEST_ERROR;
} /* end if */
} /* end if */
else {
if(buf2[i][j] != data[i][j]) TEST_ERROR;
}
/*-------------------------------------------------------------------------
* Close/release resources
*-------------------------------------------------------------------------
*/
if(H5Dclose(dataset_id) < 0) TEST_ERROR
if(H5Sclose(space_id) < 0) TEST_ERROR
if(H5Pclose(plist_id) < 0) TEST_ERROR
PASSED();
TESTING("extend dataset create without fill value");
/* Create the data space with unlimited dimensions. */
if((space_id = H5Screate_simple(RANK, dims, maxdims)) < 0) TEST_ERROR;
/* Modify dataset creation properties, i.e. enable chunking. */
if((plist_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR;
if(H5Pset_chunk(plist_id, RANK, dims_chunk) < 0) TEST_ERROR;
if(H5Pset_fill_time(plist_id, H5D_FILL_TIME_ALLOC) < 0) TEST_ERROR;
#ifdef H5_HAVE_FILTER_DEFLATE
if(do_compress)
if(H5Pset_deflate(plist_id, 9) < 0) FAIL_STACK_ERROR
#endif /* H5_HAVE_FILTER_DEFLATE */
/*-------------------------------------------------------------------------
* Create and write one dataset
*-------------------------------------------------------------------------
*/
/* Create a new dataset */
if((dataset_id = H5Dcreate2(file_id , "Dataset2", H5T_NATIVE_INT, space_id, H5P_DEFAULT, plist_id, H5P_DEFAULT)) < 0) TEST_ERROR;
/* Write the data. */
if(H5Dwrite(dataset_id , H5T_NATIVE_INT, space_id, H5S_ALL, H5P_DEFAULT, data) < 0) TEST_ERROR;
/*-------------------------------------------------------------------------
* Set new dimensions for the array; shrink it
*-------------------------------------------------------------------------
*/
/* Set new dimensions for the array. */
if(H5Dset_extent(dataset_id , dims_new) < 0) TEST_ERROR;
/* Get the space. */
if((space_id = H5Dget_space(dataset_id)) < 0) TEST_ERROR;
/* Get dimensions. */
if(H5Sget_simple_extent_dims(space_id, dims_out, NULL) < 0) TEST_ERROR;
if(dims_out[0] != dims_new[0]) TEST_ERROR;
/*-------------------------------------------------------------------------
* Read
*-------------------------------------------------------------------------
*/
/* Read the new dataset. */
if (H5Dread( dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf1 ) < 0) TEST_ERROR;
/* Compare the read array with the original array */
for( i = 0; i < (int)dims_out[0]; i++ )
for( j = 0; j < (int)dims_out[1]; j++ )
if ( buf1[i][j] != data[i][j] ) TEST_ERROR;
/*-------------------------------------------------------------------------
* Set new dimensions for the array; expand it again
*-------------------------------------------------------------------------
*/
/* Set new dimensions for the array. */
if (H5Dset_extent( dataset_id , dims ) < 0) TEST_ERROR;
/* Get the space. */
if ((space_id = H5Dget_space( dataset_id )) < 0) TEST_ERROR;
/* Get dimensions. */
if (H5Sget_simple_extent_dims( space_id, dims_out, NULL ) < 0) TEST_ERROR;
if ( dims_out[0] != dims[0] ) TEST_ERROR;
/*-------------------------------------------------------------------------
* Read
*-------------------------------------------------------------------------
*/
/* Read the new dataset. */
if (H5Dread( dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf2 ) < 0) TEST_ERROR;
/* Compare the read array with the original array */
for( i = 0; i < (int)dims_out[0]; i++ ) {
for( j = 0; j < (int)dims_out[1]; j++ ) {
if ( i >= 70 || j >= 70 ) {
if ( buf2[i][j] != 0 ) TEST_ERROR;
}
else {
if ( buf2[i][j] != data[i][j] ) TEST_ERROR;
else
{
if(buf2[i][j] != data[i][j])
TEST_ERROR;
}
}
}
@ -296,11 +225,150 @@ int main( void )
*-------------------------------------------------------------------------
*/
H5Dclose( dataset_id );
H5Sclose( space_id );
H5Pclose( plist_id );
if(H5Dclose(did) < 0)
TEST_ERROR
if(H5Sclose(sid) < 0)
TEST_ERROR
if(H5Pclose(dcpl) < 0)
TEST_ERROR
H5Fclose( file_id );
PASSED();
TESTING("extend dataset create without fill value");
/* Create the data space with unlimited dimensions. */
if((sid = H5Screate_simple(RANK, dims, maxdims)) < 0)
TEST_ERROR;
/* Modify dataset creation properties, i.e. enable chunking. */
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
TEST_ERROR;
if(H5Pset_chunk(dcpl, RANK, dims_chunk) < 0)
TEST_ERROR;
if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_ALLOC) < 0)
TEST_ERROR;
#ifdef H5_HAVE_FILTER_DEFLATE
if(do_compress)
if(H5Pset_deflate(dcpl, 9) < 0)
FAIL_STACK_ERROR
#endif /* H5_HAVE_FILTER_DEFLATE */
/*-------------------------------------------------------------------------
* Create and write one dataset
*-------------------------------------------------------------------------
*/
/* Create a new dataset */
if((did = H5Dcreate2(fid , "Dataset2", H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
TEST_ERROR;
/* Write the data. */
if(H5Dwrite(did , H5T_NATIVE_INT, sid, H5S_ALL, H5P_DEFAULT, data) < 0)
TEST_ERROR;
/*-------------------------------------------------------------------------
* Set new dimensions for the array; shrink it
*-------------------------------------------------------------------------
*/
/* Set new dimensions for the array. */
if(H5Dset_extent(did , dims_new) < 0)
TEST_ERROR;
/* Get the space. */
if((sid = H5Dget_space(did)) < 0)
TEST_ERROR;
/* Get dimensions. */
if(H5Sget_simple_extent_dims(sid, dims_out, NULL) < 0)
TEST_ERROR;
if(dims_out[0] != dims_new[0])
TEST_ERROR;
/*-------------------------------------------------------------------------
* Read
*-------------------------------------------------------------------------
*/
/* Read the new dataset. */
if (H5Dread( did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf1 ) < 0)
TEST_ERROR;
/* Compare the read array with the original array */
for( i = 0; i < (int)dims_out[0]; i++ )
{
for( j = 0; j < (int)dims_out[1]; j++ )
{
if ( buf1[i][j] != data[i][j] )
TEST_ERROR;
}
}
/*-------------------------------------------------------------------------
* Set new dimensions for the array; expand it again
*-------------------------------------------------------------------------
*/
/* Set new dimensions for the array. */
if (H5Dset_extent( did , dims ) < 0)
TEST_ERROR;
/* Get the space. */
if ((sid = H5Dget_space( did )) < 0)
TEST_ERROR;
/* Get dimensions. */
if (H5Sget_simple_extent_dims( sid, dims_out, NULL ) < 0)
TEST_ERROR;
if ( dims_out[0] != dims[0] )
TEST_ERROR;
/*-------------------------------------------------------------------------
* Read
*-------------------------------------------------------------------------
*/
/* Read the new dataset. */
if (H5Dread( did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf2 ) < 0)
TEST_ERROR;
/* Compare the read array with the original array */
for( i = 0; i < (int)dims_out[0]; i++ )
{
for( j = 0; j < (int)dims_out[1]; j++ )
{
if ( i >= 70 || j >= 70 ) {
if ( buf2[i][j] != 0 )
TEST_ERROR;
}
else {
if ( buf2[i][j] != data[i][j] )
TEST_ERROR;
}
}
}
/*-------------------------------------------------------------------------
* Close/release resources
*-------------------------------------------------------------------------
*/
if (H5Dclose(did) < 0)
TEST_ERROR
if (H5Sclose(sid) < 0)
TEST_ERROR
if (H5Pclose(dcpl) < 0)
TEST_ERROR
if (H5Fclose( fid ) < 0)
TEST_ERROR
PASSED();
@ -312,69 +380,91 @@ int main( void )
*/
/* Create a file creation property list */
if((fcpl = H5Pcreate(H5P_FILE_CREATE)) < 0) TEST_ERROR;
if((fcpl = H5Pcreate(H5P_FILE_CREATE)) < 0)
TEST_ERROR;
/* Set non-default indexed storage B-tree internal 'K' value */
if(H5Pset_istore_k(fcpl,ISTORE_IK) < 0) TEST_ERROR;
if(H5Pset_istore_k(fcpl,ISTORE_IK) < 0)
TEST_ERROR;
/* Create a new file using properties. */
if((file_id = H5Fcreate("set_extent_read.h5", H5F_ACC_TRUNC, fcpl, H5P_DEFAULT)) < 0) TEST_ERROR;
if((fid = H5Fcreate("set_extent_read.h5", H5F_ACC_TRUNC, fcpl, H5P_DEFAULT)) < 0)
TEST_ERROR;
/* Close property list */
if(H5Pclose(fcpl) < 0) TEST_ERROR;
if(H5Pclose(fcpl) < 0)
TEST_ERROR;
TESTING("extend dataset read with fill value");
/* Create the data space with unlimited dimensions. */
if((space_id = H5Screate_simple(RANK, dims, maxdims)) < 0) TEST_ERROR;
if((sid = H5Screate_simple(RANK, dims, maxdims)) < 0)
TEST_ERROR;
/* Modify dataset creation properties, i.e. enable chunking. */
if((plist_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR;
if(H5Pset_chunk(plist_id, RANK, dims_chunk) < 0) TEST_ERROR;
if(H5Pset_fill_value(plist_id, H5T_NATIVE_INT, &fillvalue) < 0) TEST_ERROR;
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
TEST_ERROR;
if(H5Pset_chunk(dcpl, RANK, dims_chunk) < 0)
TEST_ERROR;
if(H5Pset_fill_value(dcpl, H5T_NATIVE_INT, &fillvalue) < 0)
TEST_ERROR;
#ifdef H5_HAVE_FILTER_DEFLATE
if(do_compress)
if(H5Pset_deflate(plist_id, 9) < 0) FAIL_STACK_ERROR
if(H5Pset_deflate(dcpl, 9) < 0)
FAIL_STACK_ERROR
#endif /* H5_HAVE_FILTER_DEFLATE */
/* Create a new dataset within the file using cparms creation properties. */
if((dataset_id = H5Dcreate2(file_id , "Dataset1", H5T_NATIVE_INT, space_id, H5P_DEFAULT, plist_id, H5P_DEFAULT)) < 0) TEST_ERROR;
if((did = H5Dcreate2(fid , "Dataset1", H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
TEST_ERROR;
/* Write the data. */
if(H5Dwrite(dataset_id , H5T_NATIVE_INT, space_id, H5S_ALL, H5P_DEFAULT, data) < 0) TEST_ERROR;
if(H5Dwrite(did , H5T_NATIVE_INT, sid, H5S_ALL, H5P_DEFAULT, data) < 0)
TEST_ERROR;
/* Close/release resources. */
if(H5Dclose(dataset_id) < 0) FAIL_STACK_ERROR
if(H5Sclose(space_id) < 0) FAIL_STACK_ERROR
if(H5Pclose(plist_id) < 0) FAIL_STACK_ERROR
if(H5Fclose(file_id) < 0) FAIL_STACK_ERROR
if(H5Dclose(did) < 0) FAIL_STACK_ERROR
if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
/* Open the file */
if((file_id = H5Fopen("set_extent_read.h5", H5F_ACC_RDWR, H5P_DEFAULT)) < 0) TEST_ERROR;
if((fid = H5Fopen("set_extent_read.h5", H5F_ACC_RDWR, H5P_DEFAULT)) < 0)
TEST_ERROR;
/* Open the dataset */
if((dataset_id = H5Dopen2(file_id , "Dataset1", H5P_DEFAULT)) < 0) TEST_ERROR;
if((did = H5Dopen2(fid , "Dataset1", H5P_DEFAULT)) < 0)
TEST_ERROR;
/* Set new dimensions for the array. */
if(H5Dset_extent(dataset_id, dims_new) < 0) TEST_ERROR;
if(H5Dset_extent(did, dims_new) < 0)
TEST_ERROR;
/* Get the space. */
if((space_id = H5Dget_space(dataset_id)) < 0) TEST_ERROR;
if((sid = H5Dget_space(did)) < 0)
TEST_ERROR;
/* Get dimensions. */
if(H5Sget_simple_extent_dims(space_id, dims_out, NULL) < 0) TEST_ERROR;
if(H5Sget_simple_extent_dims(sid, dims_out, NULL) < 0)
TEST_ERROR;
if(dims_out[0] != dims_new[0]) TEST_ERROR;
if(dims_out[0] != dims_new[0])
TEST_ERROR;
/* Read the new dataset. */
if(H5Dread(dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf1) < 0) TEST_ERROR;
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf1) < 0)
TEST_ERROR;
/* Compare the read array with the original array */
for(i = 0; i < (int)dims_out[0]; i++)
{
for(j = 0; j < (int)dims_out[1]; j++)
{
if(buf1[i][j] != data[i][j])
TEST_ERROR;
}
}
/*-------------------------------------------------------------------------
* Set new dimensions for the array; expand it again
@ -382,27 +472,38 @@ int main( void )
*/
/* Set new dimensions for the array. */
if (H5Dset_extent( dataset_id , dims ) < 0) TEST_ERROR;
if (H5Dset_extent( did , dims ) < 0)
TEST_ERROR;
/* Get the space. */
if ((space_id = H5Dget_space( dataset_id )) < 0) TEST_ERROR;
if ((sid = H5Dget_space( did )) < 0)
TEST_ERROR;
/* Get dimensions. */
if (H5Sget_simple_extent_dims( space_id, dims_out, NULL ) < 0) TEST_ERROR;
if (H5Sget_simple_extent_dims( sid, dims_out, NULL ) < 0)
TEST_ERROR;
if ( dims_out[0] != dims[0] ) TEST_ERROR;
if ( dims_out[0] != dims[0] )
TEST_ERROR;
/* Read the new dataset. */
if (H5Dread( dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf2 ) < 0) TEST_ERROR;
if (H5Dread( did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf2 ) < 0)
TEST_ERROR;
/* Compare the read array with the original array */
for( i = 0; i < (int)dims_out[0]; i++ ) {
for( j = 0; j < (int)dims_out[1]; j++ ) {
if ( i >= 70 || j >= 70 ) {
if ( buf2[i][j] != fillvalue ) TEST_ERROR;
for( i = 0; i < (int)dims_out[0]; i++ )
{
for( j = 0; j < (int)dims_out[1]; j++ )
{
if ( i >= 70 || j >= 70 )
{
if ( buf2[i][j] != fillvalue )
TEST_ERROR;
}
else {
if ( buf2[i][j] != data[i][j] ) TEST_ERROR;
else
{
if ( buf2[i][j] != data[i][j] )
TEST_ERROR;
}
}
}
@ -413,8 +514,10 @@ int main( void )
*-------------------------------------------------------------------------
*/
H5Dclose( dataset_id );
H5Sclose( space_id );
if (H5Dclose(did) < 0)
TEST_ERROR
if (H5Sclose(sid) < 0)
TEST_ERROR
PASSED();
@ -422,55 +525,73 @@ int main( void )
TESTING("extend dataset read without fill value");
/* Create the data space with unlimited dimensions. */
if((space_id = H5Screate_simple(RANK, dims, maxdims)) < 0) TEST_ERROR;
if((sid = H5Screate_simple(RANK, dims, maxdims)) < 0)
TEST_ERROR;
/* Modify dataset creation properties, i.e. enable chunking. */
if((plist_id = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR;
if(H5Pset_chunk(plist_id, RANK, dims_chunk) < 0) TEST_ERROR;
if(H5Pset_fill_time(plist_id, H5D_FILL_TIME_ALLOC) < 0) TEST_ERROR;
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
TEST_ERROR;
if(H5Pset_chunk(dcpl, RANK, dims_chunk) < 0)
TEST_ERROR;
if(H5Pset_fill_time(dcpl, H5D_FILL_TIME_ALLOC) < 0)
TEST_ERROR;
#ifdef H5_HAVE_FILTER_DEFLATE
if(do_compress)
if(H5Pset_deflate(plist_id, 9) < 0) FAIL_STACK_ERROR
if(H5Pset_deflate(dcpl, 9) < 0)
FAIL_STACK_ERROR
#endif /* H5_HAVE_FILTER_DEFLATE */
/* Create a new dataset within the file using cparms creation properties. */
if((dataset_id = H5Dcreate2(file_id , "Dataset2", H5T_NATIVE_INT, space_id, H5P_DEFAULT, plist_id, H5P_DEFAULT)) < 0) TEST_ERROR;
if((did = H5Dcreate2(fid , "Dataset2", H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
TEST_ERROR;
/* Write the data. */
if(H5Dwrite(dataset_id , H5T_NATIVE_INT, space_id, H5S_ALL, H5P_DEFAULT, data) < 0) TEST_ERROR;
if(H5Dwrite(did , H5T_NATIVE_INT, sid, H5S_ALL, H5P_DEFAULT, data) < 0)
TEST_ERROR;
/* Close/release resources. */
if(H5Dclose(dataset_id) < 0) FAIL_STACK_ERROR
if(H5Sclose(space_id) < 0) FAIL_STACK_ERROR
if(H5Pclose(plist_id) < 0) FAIL_STACK_ERROR
if(H5Fclose(file_id) < 0) FAIL_STACK_ERROR
if(H5Dclose(did) < 0) FAIL_STACK_ERROR
if(H5Sclose(sid) < 0) FAIL_STACK_ERROR
if(H5Pclose(dcpl) < 0) FAIL_STACK_ERROR
if(H5Fclose(fid) < 0) FAIL_STACK_ERROR
/* Open the file */
if((file_id = H5Fopen("set_extent_read.h5", H5F_ACC_RDWR, H5P_DEFAULT)) < 0) TEST_ERROR;
if((fid = H5Fopen("set_extent_read.h5", H5F_ACC_RDWR, H5P_DEFAULT)) < 0)
TEST_ERROR;
/* Open the dataset */
if((dataset_id = H5Dopen2(file_id , "Dataset2", H5P_DEFAULT)) < 0) TEST_ERROR;
if((did = H5Dopen2(fid , "Dataset2", H5P_DEFAULT)) < 0)
TEST_ERROR;
/* Set new dimensions for the array. */
if(H5Dset_extent(dataset_id, dims_new) < 0) TEST_ERROR;
if(H5Dset_extent(did, dims_new) < 0)
TEST_ERROR;
/* Get the space. */
if((space_id = H5Dget_space(dataset_id)) < 0) TEST_ERROR;
if((sid = H5Dget_space(did)) < 0)
TEST_ERROR;
/* Get dimensions. */
if(H5Sget_simple_extent_dims(space_id, dims_out, NULL) < 0) TEST_ERROR;
if(H5Sget_simple_extent_dims(sid, dims_out, NULL) < 0)
TEST_ERROR;
if(dims_out[0] != dims_new[0]) TEST_ERROR;
if(dims_out[0] != dims_new[0])
TEST_ERROR;
/* Read the new dataset. */
if(H5Dread(dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf1) < 0) TEST_ERROR;
if(H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf1) < 0)
TEST_ERROR;
/* Compare the read array with the original array */
for(i = 0; i < (int)dims_out[0]; i++)
{
for(j = 0; j < (int)dims_out[1]; j++)
{
if(buf1[i][j] != data[i][j])
TEST_ERROR;
}
}
/*-------------------------------------------------------------------------
* Set new dimensions for the array; expand it again
@ -478,44 +599,108 @@ int main( void )
*/
/* Set new dimensions for the array. */
if (H5Dset_extent( dataset_id , dims ) < 0) TEST_ERROR;
if (H5Dset_extent( did , dims ) < 0)
TEST_ERROR;
/* Get the space. */
if ((space_id = H5Dget_space( dataset_id )) < 0) TEST_ERROR;
if ((sid = H5Dget_space( did )) < 0)
TEST_ERROR;
/* Get dimensions. */
if (H5Sget_simple_extent_dims( space_id, dims_out, NULL ) < 0) TEST_ERROR;
if (H5Sget_simple_extent_dims( sid, dims_out, NULL ) < 0)
TEST_ERROR;
if ( dims_out[0] != dims[0] ) TEST_ERROR;
if ( dims_out[0] != dims[0] )
TEST_ERROR;
/* Read the new dataset. */
if (H5Dread( dataset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf2 ) < 0) TEST_ERROR;
if (H5Dread( did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf2 ) < 0)
TEST_ERROR;
/* Compare the read array with the original array */
for( i = 0; i < (int)dims_out[0]; i++ ) {
for( j = 0; j < (int)dims_out[1]; j++ ) {
if ( i >= 70 || j >= 70 ) {
if ( buf2[i][j] != 0 ) TEST_ERROR;
for( i = 0; i < (int)dims_out[0]; i++ )
{
for( j = 0; j < (int)dims_out[1]; j++ )
{
if ( i >= 70 || j >= 70 )
{
if ( buf2[i][j] != 0 )
TEST_ERROR;
}
else {
if ( buf2[i][j] != data[i][j] ) TEST_ERROR;
else
{
if ( buf2[i][j] != data[i][j] )
TEST_ERROR;
}
}
}
/*-------------------------------------------------------------------------
* Close/release resources
* close
*-------------------------------------------------------------------------
*/
H5Dclose( dataset_id );
H5Sclose( space_id );
H5Fclose( file_id );
if (H5Dclose(did) < 0)
TEST_ERROR
if (H5Sclose(sid) < 0)
TEST_ERROR
if (H5Fclose( fid ) < 0)
TEST_ERROR
PASSED();
/*-------------------------------------------------------------------------
* test a dataset with initialized chunks
*-------------------------------------------------------------------------
*/
TESTING("initialized chunks");
dims3[ 0 ] = 90;
dims3[ 1 ] = 90;
if ((fid = H5Fopen("set_extent_create.h5", H5F_ACC_RDWR, H5P_DEFAULT)) < 0)
TEST_ERROR;
if ((sid = H5Screate_simple(RANK, dims3, maxdims)) < 0)
TEST_ERROR;
if ((dcpl = H5Pcreate (H5P_DATASET_CREATE)) < 0)
TEST_ERROR;
if (H5Pset_chunk(dcpl, RANK, dims_chunk) < 0)
TEST_ERROR;
#ifdef H5_HAVE_FILTER_DEFLATE
if(do_compress)
{
if(H5Pset_deflate(dcpl, 9) < 0)
FAIL_STACK_ERROR
}
#endif /* H5_HAVE_FILTER_DEFLATE */
/* create a new dataset */
if((did = H5Dcreate2(fid , "Dataset3", H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
TEST_ERROR;
/* set new dimensions for the array */
dims3[ 0 ] = 0;
dims3[ 1 ] = 0;
if (H5Dset_extent( did , dims3 ) < 0)
TEST_ERROR;
if (H5Dclose(did) < 0)
TEST_ERROR
if (H5Sclose(sid) < 0)
TEST_ERROR
if (H5Pclose(dcpl) < 0)
TEST_ERROR
if (H5Fclose( fid ) < 0)
TEST_ERROR
PASSED();
#ifdef H5_HAVE_FILTER_DEFLATE
} /* end for */
#endif /* H5_HAVE_FILTER_DEFLATE */
@ -525,10 +710,10 @@ int main( void )
error:
H5Dclose( dataset_id );
H5Sclose( space_id );
H5Pclose( plist_id );
H5Fclose( file_id );
H5Dclose( did );
H5Sclose( sid );
H5Pclose( dcpl );
H5Fclose( fid );
H5_FAILED();
return 1;
}