mirror of
https://github.com/HDFGroup/hdf5.git
synced 2025-02-17 16:10:24 +08:00
[svn-r22242] Issue 7934 - The data sieve buffer size was for all the datasets in the file. It
could waste memory if any dataset size is smaller than the sieve buffer size. Now the library picks the smaller one between the dataset size and the sieve buffer size from the file access property. Tested on jam, koala, and ostrich.
This commit is contained in:
parent
b797818c8b
commit
3c017d7d37
@ -93,6 +93,11 @@ New Features
|
||||
|
||||
Library:
|
||||
--------
|
||||
- The data sieve buffer size was for all the datasets in the file. It
|
||||
could waste memory if any dataset size is smaller than the sieve buffer
|
||||
size. Now the library picks the smaller one between the dataset size
|
||||
and the sieve buffer size from the file access property. See Issue 7934.
|
||||
(SLU - 2012/4/2)
|
||||
- I added a new parameter of object access property list to the function
|
||||
H5Rdereference (Issue 2763). It's called H5Rdereference2 now. The former
|
||||
H5Rdereference function has been deprecated to H5Rdereference1. (SLU -
|
||||
|
@ -420,6 +420,7 @@ H5D_contig_construct(H5F_t *f, H5D_t *dset)
|
||||
hsize_t nelmts; /* Number of elements in dataspace */
|
||||
size_t dt_size; /* Size of datatype */
|
||||
hsize_t tmp_size; /* Temporary holder for raw data size */
|
||||
size_t tmp_sieve_buf_size; /* Temporary holder for sieve buffer size */
|
||||
hsize_t dim[H5O_LAYOUT_NDIMS]; /* Current size of data in elements */
|
||||
hsize_t max_dim[H5O_LAYOUT_NDIMS]; /* Maximum size of data in elements */
|
||||
int ndims; /* Rank of dataspace */
|
||||
@ -464,8 +465,15 @@ H5D_contig_construct(H5F_t *f, H5D_t *dset)
|
||||
/* Assign the dataset's contiguous storage size */
|
||||
dset->shared->layout.storage.u.contig.size = tmp_size;
|
||||
|
||||
/* Get the sieve buffer size for this dataset */
|
||||
dset->shared->cache.contig.sieve_buf_size = H5F_SIEVE_BUF_SIZE(f);
|
||||
/* Get the sieve buffer size for the file */
|
||||
tmp_sieve_buf_size = H5F_SIEVE_BUF_SIZE(f);
|
||||
|
||||
/* Adjust the sieve buffer size to the smaller one between the dataset size and the buffer size
|
||||
* from the file access property. (SLU - 2012/3/30) */
|
||||
if(tmp_size < tmp_sieve_buf_size)
|
||||
dset->shared->cache.contig.sieve_buf_size = tmp_size;
|
||||
else
|
||||
dset->shared->cache.contig.sieve_buf_size = tmp_sieve_buf_size;
|
||||
|
||||
done:
|
||||
FUNC_LEAVE_NOAPI(ret_value)
|
||||
@ -965,6 +973,7 @@ H5D_contig_writevv_sieve_cb(hsize_t dst_off, hsize_t src_off, size_t len,
|
||||
/* Allocate room for the data sieve buffer */
|
||||
if(NULL == (dset_contig->sieve_buf = H5FL_BLK_MALLOC(sieve_buf, dset_contig->sieve_buf_size)))
|
||||
HGOTO_ERROR(H5E_DATASET, H5E_CANTALLOC, FAIL, "memory allocation failed")
|
||||
|
||||
#ifdef H5_CLEAR_MEMORY
|
||||
if(dset_contig->sieve_size > len)
|
||||
HDmemset(dset_contig->sieve_buf + len, 0, (dset_contig->sieve_size - len));
|
||||
|
@ -384,6 +384,10 @@ H5D_layout_oh_read(H5D_t *dataset, hid_t dxpl_id, hid_t dapl_id, H5P_genplist_t
|
||||
|
||||
switch(dataset->shared->layout.type) {
|
||||
case H5D_CONTIGUOUS:
|
||||
{
|
||||
hsize_t tmp_size; /* Temporary holder for raw data size */
|
||||
size_t tmp_sieve_buf_size; /* Temporary holder for sieve buffer size */
|
||||
|
||||
/* Compute the size of the contiguous storage for versions of the
|
||||
* layout message less than version 3 because versions 1 & 2 would
|
||||
* truncate the dimension sizes to 32-bits of information. - QAK 5/26/04
|
||||
@ -392,7 +396,6 @@ H5D_layout_oh_read(H5D_t *dataset, hid_t dxpl_id, hid_t dapl_id, H5P_genplist_t
|
||||
hssize_t snelmts; /* Temporary holder for number of elements in dataspace */
|
||||
hsize_t nelmts; /* Number of elements in dataspace */
|
||||
size_t dt_size; /* Size of datatype */
|
||||
hsize_t tmp_size; /* Temporary holder for raw data size */
|
||||
|
||||
/* Retrieve the number of elements in the dataspace */
|
||||
if((snelmts = H5S_GET_EXTENT_NPOINTS(dataset->shared->space)) < 0)
|
||||
@ -412,10 +415,19 @@ H5D_layout_oh_read(H5D_t *dataset, hid_t dxpl_id, hid_t dapl_id, H5P_genplist_t
|
||||
|
||||
/* Assign the dataset's contiguous storage size */
|
||||
dataset->shared->layout.storage.u.contig.size = tmp_size;
|
||||
} /* end if */
|
||||
} else
|
||||
tmp_size = dataset->shared->layout.storage.u.contig.size;
|
||||
|
||||
/* Get the sieve buffer size for this dataset */
|
||||
dataset->shared->cache.contig.sieve_buf_size = H5F_SIEVE_BUF_SIZE(dataset->oloc.file);
|
||||
/* Get the sieve buffer size for the file */
|
||||
tmp_sieve_buf_size = H5F_SIEVE_BUF_SIZE(dataset->oloc.file);
|
||||
|
||||
/* Adjust the sieve buffer size to the smaller one between the dataset size and the buffer size
|
||||
* from the file access property. (SLU - 2012/3/30) */
|
||||
if(tmp_size < tmp_sieve_buf_size)
|
||||
dataset->shared->cache.contig.sieve_buf_size = tmp_size;
|
||||
else
|
||||
dataset->shared->cache.contig.sieve_buf_size = tmp_sieve_buf_size;
|
||||
}
|
||||
break;
|
||||
|
||||
case H5D_CHUNKED:
|
||||
|
Loading…
Reference in New Issue
Block a user