[svn-r355] Changes since 19980417

----------------------

This checkin completes the compression stuff almost two weeks ahead of
schedule.  It should now all be working and documented.

./Makefile.in
./config/commence.in
	A distclean causes the distribution makefile to be copied to
	Makefile again.

./src/H5D.c
	Opening a compressed dataset now works properly.

./src/H5P.c
./src/H5Ppublic.h
	The H5Pset_compression() and H5Pget_compression() now work as
	documented.

./src/H5T.c
	Fixed a bug that causes the library to crash sometimes if
	debugging is turned on for the `t' package.

./src/H5Z.c
	Fixed a bug where the number of bytes attributed to
	compression overruns was too low in the debugging output.

./test/dsets.c
	More compression tests: reading of uninitialized data, opening
	an existing dataset and reading it, partial I/O which isn't
	aligned on chunk boundaries, and use of application-defined
	compression methods.

./MANIFEST
	Removed ./test/dspace.c.
This commit is contained in:
Robb Matzke 1998-04-21 15:32:07 -05:00
parent 54256b22c0
commit 91a34f543d
9 changed files with 267 additions and 40 deletions

View File

@ -195,7 +195,6 @@
./test/big.c
./test/cmpd_dset.c
./test/dsets.c
./test/dspace.c
./test/dtypes.c
./test/extend.c
./test/external.c

View File

@ -81,6 +81,7 @@ distclean:
$(RM) config/commence config/conclude config/depend
$(RM) config.cache config.log config.status src/H5config.h
$(RM) Makefile
$(CP) Makefile.dist Makefile
maintainer-clean:
@echo "This command is intented for maintainers to use;"

View File

@ -18,6 +18,7 @@ LIBS=@LIBS@
AR=@AR@
RANLIB=@RANLIB@
RM=rm -f
CP=cp
INSTALL=@INSTALL@
INSTALL_PROGRAM=@INSTALL_PROGRAM@
INSTALL_DATA=@INSTALL_DATA@

View File

@ -969,6 +969,14 @@ H5D_open(H5G_t *loc, const char *name)
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, NULL,
"unable to load type or space info from dataset header");
}
/* Get the optional compression message */
if (NULL==H5O_read (&(dataset->ent), H5O_COMPRESS, 0,
&(dataset->create_parms->compress))) {
H5E_clear ();
HDmemset (&(dataset->create_parms->compress), 0,
sizeof(dataset->create_parms->compress));
}
/*
* Get the raw data layout info. It's actually stored in two locations:

View File

@ -1859,6 +1859,10 @@ H5Pget_preserve (hid_t plist_id)
* specific compression initialization functions like
* H5Pset_deflate().
*
* The FLAGS, CD_SIZE, and CLIENT_DATA are copied to the
* property list and eventually to the file and passed to the
* compression functions.
*
* Return: Success: SUCCEED
*
* Failure: FAIL
@ -1871,7 +1875,8 @@ H5Pget_preserve (hid_t plist_id)
*-------------------------------------------------------------------------
*/
herr_t
H5Pset_compression (hid_t plist_id, H5Z_method_t method)
H5Pset_compression (hid_t plist_id, H5Z_method_t method, unsigned int flags,
size_t cd_size, const void *client_data)
{
H5D_create_t *plist = NULL;
@ -1891,7 +1896,12 @@ H5Pset_compression (hid_t plist_id, H5Z_method_t method)
/* Clear any previous compression method info, then set new value */
H5O_reset (H5O_COMPRESS, &(plist->compress));
plist->compress.method = method;
plist->compress.flags = flags;
plist->compress.cd_size = cd_size;
if (cd_size) {
plist->compress.client_data = H5MM_xmalloc (cd_size);
HDmemcpy (plist->compress.client_data, client_data, cd_size);
}
FUNC_LEAVE (SUCCEED);
}
@ -1900,7 +1910,11 @@ H5Pset_compression (hid_t plist_id, H5Z_method_t method)
* Function: H5Pget_compression
*
* Purpose: Gets the compression method information from a dataset
* creation property list.
* creation property list. The CLIENT_DATA buffer is initially
* CD_SIZE bytes. On return, CLIENT_DATA will be initialized
* with at most that many bytes, and CD_SIZE will contain the
* actual size of the client data, which might be larger than
* its original value.
*
* Return: Success: Compression method.
*
@ -1914,7 +1928,8 @@ H5Pset_compression (hid_t plist_id, H5Z_method_t method)
*-------------------------------------------------------------------------
*/
H5Z_method_t
H5Pget_compression (hid_t plist_id)
H5Pget_compression (hid_t plist_id, unsigned int *flags/*out*/,
size_t *cd_size/*in,out*/, void *client_data/*out*/)
{
H5D_create_t *plist = NULL;
@ -1927,6 +1942,16 @@ H5Pget_compression (hid_t plist_id)
"not a dataset creation property list");
}
/* Output values */
if (flags) *flags = plist->compress.flags;
if (cd_size) {
if (*cd_size>0 && client_data) {
HDmemcpy (client_data, plist->compress.client_data,
MIN(plist->compress.cd_size, *cd_size));
}
*cd_size = plist->compress.cd_size;
}
FUNC_LEAVE (plist->compress.method);
}

View File

@ -88,8 +88,12 @@ herr_t H5Pset_buffer (hid_t plist_id, size_t size, void *tconv, void *bkg);
size_t H5Pget_buffer (hid_t plist_id, void **tconv/*out*/, void **bkg/*out*/);
herr_t H5Pset_preserve (hid_t plist_id, hbool_t status);
int H5Pget_preserve (hid_t plist_id);
herr_t H5Pset_compression (hid_t plist_id, H5Z_method_t method);
H5Z_method_t H5Pget_compression (hid_t plist_id);
herr_t H5Pset_compression (hid_t plist_id, H5Z_method_t method,
unsigned int flags, size_t cd_size,
const void *client_data);
H5Z_method_t H5Pget_compression (hid_t plist_id, unsigned int *flags/*out*/,
size_t *cd_size/*in,out*/,
void *client_data/*out*/);
herr_t H5Pset_deflate (hid_t plist_id, int level);
int H5Pget_deflate (hid_t plist_id);

View File

@ -3424,6 +3424,10 @@ H5T_find(const H5T_t *src, const H5T_t *dst, H5T_bkg_t need_bkg,
FUNC_ENTER(H5T_find, NULL);
if (!noop_cdata.stats) {
noop_cdata.stats = H5MM_xcalloc (1, sizeof(H5T_stats_t));
}
/* No-op case */
if (need_bkg<H5T_BKG_YES && 0==H5T_cmp(src, dst)) {
*pcdata = &noop_cdata;

View File

@ -268,7 +268,7 @@ H5Z_compress (const H5O_compress_t *comp, size_t nbytes, const void *src,
} else if (ret_value>=nbytes) {
/* Output is not smaller than input */
#ifdef H5Z_DEBUG
H5Z_g[method].comp.over += 1;
H5Z_g[method].comp.over += nbytes;
over = 1;
#endif
HGOTO_DONE (0);

View File

@ -36,6 +36,9 @@
#define DSET_SIMPLE_IO_NAME "simple_io"
#define DSET_TCONV_NAME "tconv"
#define DSET_COMPRESS_NAME "compressed"
#define DSET_BOGUS_NAME "bogus"
#define H5Z_BOGUS 255
/*-------------------------------------------------------------------------
@ -351,6 +354,33 @@ test_tconv(hid_t file)
return 0;
}
/*-------------------------------------------------------------------------
* Function: bogus
*
* Purpose: A bogus compression method.
*
* Return: Success: SRC_NBYTES, see compression documentation.
*
* Failure: 0
*
* Programmer: Robb Matzke
* Tuesday, April 21, 1998
*
* Modifications:
*
*-------------------------------------------------------------------------
*/
static size_t
bogus (unsigned int flags,
size_t cd_size, const void *client_data,
size_t src_nbytes, const void *src,
size_t dst_nbytes, void *dst/*out*/)
{
memcpy (dst, src, src_nbytes);
return src_nbytes;
}
/*-------------------------------------------------------------------------
* Function: test_compression
@ -374,48 +404,89 @@ test_compression(hid_t file)
hid_t dataset, space, xfer, dc;
herr_t status;
int points[100][200], check[100][200];
int i, j, n;
hsize_t dims[2], chunk_size[2];
const hsize_t size[2] = {100, 200};
const hsize_t chunk_size[2] = {2, 25};
const hssize_t hs_offset[2] = {7, 30};
const hsize_t hs_size[2] = {4, 50};
hsize_t i, j, n;
void *tconv_buf = NULL;
printf("%-70s", "Testing compression");
fflush (stdout);
printf ("%-70s", "Testing compression (setup)");
fflush (stderr);
/* Initialize the dataset */
for (i = n = 0; i < 100; i++) {
for (j = 0; j < 100; j++) {
points[i][j] = n++;
}
}
/* Create the data space */
dims[0] = 100;
dims[1] = 200;
space = H5Screate_simple(2, dims, NULL);
space = H5Screate_simple(2, size, NULL);
assert(space>=0);
/* Create a small conversion buffer to test strip mining */
tconv_buf = malloc (1000);
xfer = H5Pcreate (H5P_DATASET_XFER);
assert (xfer>=0);
#if 0
tconv_buf = malloc (1000);
status = H5Pset_buffer (xfer, 1000, tconv_buf, NULL);
assert (status>=0);
#endif
/* Use chunked storage with compression */
dc = H5Pcreate (H5P_DATASET_CREATE);
chunk_size[0] = 2;
chunk_size[1] = 25;
H5Pset_chunk (dc, 2, chunk_size);
H5Pset_deflate (dc, 6);
/* Create the dataset */
dataset = H5Dcreate(file, DSET_COMPRESS_NAME, H5T_NATIVE_INT, space, dc);
assert(dataset >= 0);
puts (" PASSED");
/*----------------------------------------------------------------------
* STEP 1: Read uninitialized data. It should be zero.
*----------------------------------------------------------------------
*/
printf ("%-70s", "Testing compression (uninitialized read)");
fflush (stdout);
status = H5Dread (dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL,
xfer, check);
if (status<0) goto error;
for (i=0; i<size[0]; i++) {
for (j=0; j<size[1]; j++) {
if (0!=check[i][j]) {
puts("*FAILED*");
printf(" Read a non-zero value.\n");
printf(" At index %lu,%lu\n",
(unsigned long)i, (unsigned long)j);
goto error;
}
}
}
puts (" PASSED");
/*----------------------------------------------------------------------
* STEP 2: Test compression by setting up a chunked dataset and writing
* to it.
*----------------------------------------------------------------------
*/
printf("%-70s", "Testing compression (write)");
fflush (stdout);
for (i=n=0; i<size[0]; i++) {
for (j=0; j<size[1]; j++) {
points[i][j] = n++;
}
}
/* Write the data to the dataset */
status = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL,
xfer, points);
if (status<0) goto error;
puts (" PASSED");
/*----------------------------------------------------------------------
* STEP 3: Try to read the data we just wrote.
*----------------------------------------------------------------------
*/
printf ("%-70s", "Testing compression (read)");
fflush (stdout);
/* Read the dataset back */
status = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL,
@ -423,23 +494,31 @@ test_compression(hid_t file)
if (status<0) goto error;
/* Check that the values read are the same as the values written */
for (i = 0; i < 100; i++) {
for (j = 0; j < 200; j++) {
for (i=0; i<size[0]; i++) {
for (j=0; j<size[1]; j++) {
if (points[i][j] != check[i][j]) {
puts("*FAILED*");
printf(" Read different values than written.\n");
printf(" At index %d,%d\n", i, j);
printf(" At index %lu,%lu\n",
(unsigned long)i, (unsigned long)j);
goto error;
}
}
}
puts (" PASSED");
/*
* Write some random data to the dataset, hopefully causing chunks to be
* reallocated as they grow.
/*----------------------------------------------------------------------
* STEP 4: Write new data over the top of the old data. The new data is
* random thus not very compressible, and will cause the chunks to move
* around as they grow. We only change values for the left half of the
* dataset although we rewrite the whole thing.
*----------------------------------------------------------------------
*/
for (i=0; i<100; i++) {
for (j=0; j<100; j++) {
printf ("%-70s", "Testing compression (modify)");
fflush (stdout);
for (i=0; i<size[0]; i++) {
for (j=0; j<size[1]/2; j++) {
points[i][j] = rand ();
}
}
@ -454,23 +533,129 @@ test_compression(hid_t file)
if (status<0) goto error;
/* Check that the values read are the same as the values written */
for (i = 0; i < 100; i++) {
for (j = 0; j < 200; j++) {
for (i=0; i<size[0]; i++) {
for (j=0; j<size[1]; j++) {
if (points[i][j] != check[i][j]) {
puts("*FAILED*");
printf(" Read different values than written.\n");
printf(" At index %d,%d\n", i, j);
printf(" At index %lu,%lu\n",
(unsigned long)i, (unsigned long)j);
goto error;
}
}
}
puts (" PASSED");
/*----------------------------------------------------------------------
* STEP 5: Close the dataset and then open it and read it again. This
* insures that the compression message is picked up properly from the
* object header.
*----------------------------------------------------------------------
*/
printf ("%-70s", "Testing compression (re-open)");
fflush (stdout);
H5Dclose (dataset);
dataset = H5Dopen (file, DSET_COMPRESS_NAME);
status = H5Dread(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL,
xfer, check);
if (status<0) goto error;
/* Check that the values read are the same as the values written */
for (i=0; i<size[0]; i++) {
for (j=0; j<size[1]; j++) {
if (points[i][j] != check[i][j]) {
puts("*FAILED*");
printf(" Read different values than written.\n");
printf(" At index %lu,%lu\n",
(unsigned long)i, (unsigned long)j);
goto error;
}
}
}
puts (" PASSED");
/*----------------------------------------------------------------------
* STEP 6: Test partial I/O by writing to and then reading from a
* hyperslab of the dataset. The hyperslab does not line up on chunk
* boundaries (we know that case already works from above tests).
*----------------------------------------------------------------------
*/
printf ("%-70s", "Testing compression (partial I/O)");
fflush (stderr);
for (i=0; i<hs_size[0]; i++) {
for (j=0; j<hs_size[1]; j++) {
points[hs_offset[0]+i][hs_offset[1]+j] = rand ();
}
}
H5Sset_hyperslab (space, hs_offset, hs_size, NULL);
status = H5Dwrite (dataset, H5T_NATIVE_INT, space, space, xfer, points);
if (status<0) goto error;
status = H5Dread (dataset, H5T_NATIVE_INT, space, space, xfer, check);
if (status<0) goto error;
/* Check that the values read are the same as the values written */
for (i=0; i<hs_size[0]; i++) {
for (j=0; j<hs_size[1]; j++) {
if (points[hs_offset[0]+i][hs_offset[1]+j] !=
check[hs_offset[0]+i][hs_offset[1]+j]) {
puts("*FAILED*");
printf(" Read different values than written.\n");
printf(" At index %lu,%lu\n",
(unsigned long)(hs_offset[0]+i),
(unsigned long)(hs_offset[1]+j));
goto error;
}
}
}
puts (" PASSED");
/*----------------------------------------------------------------------
* STEP 7: Register an application-defined compression method and use it
* to write and then read the dataset.
*----------------------------------------------------------------------
*/
printf ("%-70s", "Testing compression (app-defined method)");
fflush (stdout);
H5Zregister (H5Z_BOGUS, "bogus", bogus, bogus);
H5Pset_compression (dc, H5Z_BOGUS, 0, 0, NULL);
H5Dclose (dataset);
H5Sclose (space);
space = H5Screate_simple (2, size, NULL);
dataset = H5Dcreate (file, DSET_BOGUS_NAME, H5T_NATIVE_INT, space, dc);
assert (dataset>=0);
status = H5Dwrite (dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL,
xfer, points);
if (status<0) goto error;
status = H5Dread (dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL,
xfer, check);
if (status<0) goto error;
for (i=0; i<size[0]; i++) {
for (j=0; j<size[1]; j++) {
if (points[i][j] != check[i][j]) {
puts("*FAILED*");
printf(" Read different values than written.\n");
printf(" At index %lu,%lu\n",
(unsigned long)i, (unsigned long)j);
goto error;
}
}
}
puts (" PASSED");
/*----------------------------------------------------------------------
* Cleanup
*----------------------------------------------------------------------
*/
H5Dclose(dataset);
puts(" PASSED");
return 0;
error: