mirror of
https://github.com/HDFGroup/hdf5.git
synced 2024-12-15 07:40:23 +08:00
Fix for the two issues reported in HDFFV-10051:
(1) Repeated open/close of a compact dataset fails due to the increment of ndims in the dataset structure for every open. --This is done only for chunked dataset via H5D__chunk_set_sizes(). (2) layout "dirty" flag for a compact dataset is not reset properly after flushing the data at dataset close. --Reset the "dirty" flag before flushing the message to the object header via H5O_msg_write(). Tested on moohan, kituo, platypus, ostrich, osx1010test, quail, emu.
This commit is contained in:
parent
b8d68f3160
commit
f1c9163c91
@ -368,9 +368,9 @@ H5D__compact_flush(H5D_t *dset, hid_t dxpl_id)
|
||||
|
||||
/* Check if the buffered compact information is dirty */
|
||||
if(dset->shared->layout.storage.u.compact.dirty) {
|
||||
dset->shared->layout.storage.u.compact.dirty = FALSE;
|
||||
if(H5O_msg_write(&(dset->oloc), H5O_LAYOUT_ID, 0, H5O_UPDATE_TIME, &(dset->shared->layout), dxpl_id) < 0)
|
||||
HGOTO_ERROR(H5E_FILE, H5E_CANTINIT, FAIL, "unable to update layout message")
|
||||
dset->shared->layout.storage.u.compact.dirty = FALSE;
|
||||
} /* end if */
|
||||
|
||||
done:
|
||||
|
@ -593,6 +593,7 @@ herr_t
|
||||
H5D__layout_oh_read(H5D_t *dataset, hid_t dxpl_id, hid_t dapl_id, H5P_genplist_t *plist)
|
||||
{
|
||||
htri_t msg_exists; /* Whether a particular type of message exists */
|
||||
hbool_t layout_copied = FALSE; /* Flag to indicate that layout message was copied */
|
||||
herr_t ret_value = SUCCEED; /* Return value */
|
||||
|
||||
FUNC_ENTER_PACKAGE
|
||||
@ -622,6 +623,7 @@ H5D__layout_oh_read(H5D_t *dataset, hid_t dxpl_id, hid_t dapl_id, H5P_genplist_t
|
||||
*/
|
||||
if(NULL == H5O_msg_read(&(dataset->oloc), H5O_LAYOUT_ID, &(dataset->shared->layout), dxpl_id))
|
||||
HGOTO_ERROR(H5E_DATASET, H5E_CANTINIT, FAIL, "unable to read data layout message")
|
||||
layout_copied = TRUE;
|
||||
|
||||
/* Check for external file list message (which might not exist) */
|
||||
if((msg_exists = H5O_msg_exists(&(dataset->oloc), H5O_EFL_ID, dxpl_id)) < 0)
|
||||
@ -655,10 +657,16 @@ H5D__layout_oh_read(H5D_t *dataset, hid_t dxpl_id, hid_t dapl_id, H5P_genplist_t
|
||||
HGOTO_ERROR(H5E_DATASET, H5E_CANTSET, FAIL, "can't set layout")
|
||||
|
||||
/* Set chunk sizes */
|
||||
if(H5D__chunk_set_sizes(dataset) < 0)
|
||||
HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "unable to set chunk sizes")
|
||||
if(H5D_CHUNKED == dataset->shared->layout.type) {
|
||||
if(H5D__chunk_set_sizes(dataset) < 0)
|
||||
HGOTO_ERROR(H5E_DATASET, H5E_BADVALUE, FAIL, "unable to set chunk sizes")
|
||||
}
|
||||
|
||||
done:
|
||||
if(ret_value < 0 && layout_copied) {
|
||||
if(H5O_msg_reset(H5O_LAYOUT_ID, &dataset->shared->layout) < 0)
|
||||
HDONE_ERROR(H5E_DATASET, H5E_CANTRESET, FAIL, "unable to reset layout info")
|
||||
}
|
||||
FUNC_LEAVE_NOAPI(ret_value)
|
||||
} /* end H5D__layout_oh_read() */
|
||||
|
||||
|
119
test/dsets.c
119
test/dsets.c
@ -25,13 +25,16 @@
|
||||
#define H5FD_TESTING
|
||||
|
||||
#define H5Z_FRIEND /*suppress error about including H5Zpkg */
|
||||
#define H5D_FRIEND /*suppress error about including H5Dpkg */
|
||||
|
||||
#include "h5test.h"
|
||||
#include "H5srcdir.h"
|
||||
#include "H5Dpkg.h"
|
||||
#include "H5FDpkg.h"
|
||||
#include "H5VMprivate.h"
|
||||
#include "H5Iprivate.h"
|
||||
#include "H5Zpkg.h"
|
||||
#include "H5Dpkg.h"
|
||||
#ifdef H5_HAVE_SZLIB_H
|
||||
# include "szlib.h"
|
||||
#endif
|
||||
@ -12637,7 +12640,122 @@ error:
|
||||
|
||||
} /* dls_01_main() */
|
||||
|
||||
/*-------------------------------------------------------------------------
|
||||
* Function: test_compact_open_close_dirty
|
||||
*
|
||||
* Purpose: Verify that the two issues reported in HDFFV-10051 are fixed:
|
||||
* (1) Repeated open/close of a compact dataset fails due to the
|
||||
* increment of ndims in the dataset structure for every open.
|
||||
* (2) layout "dirty" flag for a compact dataset is not reset
|
||||
* properly after flushing the data at dataset close.
|
||||
* The test for issue #1 is based on compactoc.c attached
|
||||
* to the jira issue HDFFV-10051
|
||||
*
|
||||
* Return: Success: 0
|
||||
* Failure: -1
|
||||
*
|
||||
* Programmer: Vailin Choi; April 2017
|
||||
*
|
||||
*-------------------------------------------------------------------------
|
||||
*/
|
||||
static herr_t
|
||||
test_compact_open_close_dirty(hid_t fapl)
|
||||
{
|
||||
hid_t fid = -1; /* File ID */
|
||||
hid_t did = -1; /* Dataset ID */
|
||||
hid_t sid = -1; /* Dataspace ID */
|
||||
hid_t dcpl = -1; /* Dataset creation property list */
|
||||
hsize_t dims[1] = {10}; /* Dimension */
|
||||
int wbuf[10]; /* Data buffer */
|
||||
char filename[FILENAME_BUF_SIZE]; /* Filename */
|
||||
int i; /* Local index variable */
|
||||
H5D_t *dset = NULL; /* Internal dataset pointer */
|
||||
|
||||
TESTING("compact dataset repeated open/close and dirty flag");
|
||||
|
||||
/* Create a file */
|
||||
h5_fixname(FILENAME[1], fapl, filename, sizeof filename);
|
||||
if((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0)
|
||||
TEST_ERROR
|
||||
|
||||
/* Initialize data */
|
||||
for(i = 0; i < 10; i++)
|
||||
wbuf[i] = i;
|
||||
|
||||
/* Create dataspace */
|
||||
if((sid = H5Screate_simple(1, dims, NULL)) < 0)
|
||||
TEST_ERROR
|
||||
|
||||
/* Set compact layout */
|
||||
if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0)
|
||||
TEST_ERROR
|
||||
if(H5Pset_layout(dcpl, H5D_COMPACT) < 0)
|
||||
TEST_ERROR
|
||||
if(H5Pset_alloc_time(dcpl, H5D_ALLOC_TIME_EARLY) < 0)
|
||||
TEST_ERROR
|
||||
|
||||
/* Create a compact dataset */
|
||||
if((did = H5Dcreate2(fid, DSET_COMPACT_MAX_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0)
|
||||
TEST_ERROR
|
||||
|
||||
/* Write to the dataset */
|
||||
if(H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf) < 0)
|
||||
TEST_ERROR
|
||||
|
||||
/* Close the dataset */
|
||||
if(H5Dclose(did) < 0)
|
||||
TEST_ERROR
|
||||
|
||||
/* Verify the repeated open/close of the dataset will not fail */
|
||||
for(i = 0; i < 20;i++) {
|
||||
H5E_BEGIN_TRY {
|
||||
did = H5Dopen (fid, DSET_COMPACT_MAX_NAME, H5P_DEFAULT);
|
||||
} H5E_END_TRY;
|
||||
if(did < 0)
|
||||
TEST_ERROR
|
||||
if(H5Dclose(did) < 0)
|
||||
TEST_ERROR
|
||||
}
|
||||
|
||||
/* Open the dataset */
|
||||
if((did = H5Dopen2(fid, DSET_COMPACT_MAX_NAME, H5P_DEFAULT)) < 0)
|
||||
TEST_ERROR
|
||||
|
||||
/* Get the internal dataset pointer */
|
||||
if(NULL == (dset = (H5D_t *)H5I_object_verify(did, H5I_DATASET)))
|
||||
TEST_ERROR
|
||||
|
||||
/* Verify that the "dirty" flag is false */
|
||||
if(dset->shared->layout.storage.u.compact.dirty)
|
||||
TEST_ERROR
|
||||
|
||||
/* Close the dataset */
|
||||
if(H5Dclose(did) < 0) TEST_ERROR
|
||||
|
||||
/* Close the dataspace */
|
||||
if(H5Sclose(sid) < 0)
|
||||
TEST_ERROR
|
||||
|
||||
/* Close the dataset creation property list */
|
||||
if(H5Pclose(dcpl) < 0)
|
||||
TEST_ERROR
|
||||
|
||||
/* Close the file */
|
||||
if(H5Fclose(fid) < 0)
|
||||
TEST_ERROR
|
||||
|
||||
PASSED();
|
||||
return 0;
|
||||
|
||||
error:
|
||||
H5E_BEGIN_TRY {
|
||||
H5Sclose(sid);
|
||||
H5Pclose(dcpl);
|
||||
H5Dclose(did);
|
||||
H5Fclose(fid);
|
||||
} H5E_END_TRY;
|
||||
return -1;
|
||||
} /* test_compact_open_close_dirty() */
|
||||
|
||||
|
||||
/*-------------------------------------------------------------------------
|
||||
@ -12759,6 +12877,7 @@ main(void)
|
||||
nerrors += (test_simple_io(envval, my_fapl) < 0 ? 1 : 0);
|
||||
nerrors += (test_compact_io(my_fapl) < 0 ? 1 : 0);
|
||||
nerrors += (test_max_compact(my_fapl) < 0 ? 1 : 0);
|
||||
nerrors += (test_compact_open_close_dirty(my_fapl) < 0 ? 1 : 0);
|
||||
nerrors += (test_conv_buffer(file) < 0 ? 1 : 0);
|
||||
nerrors += (test_tconv(file) < 0 ? 1 : 0);
|
||||
nerrors += (test_filters(file, my_fapl) < 0 ? 1 : 0);
|
||||
|
Loading…
Reference in New Issue
Block a user