mirror of
https://github.com/HDFGroup/hdf5.git
synced 2025-02-23 16:20:57 +08:00
[svn-r14738] Description:
Bring r14737 back from the 1.8 branch: Fix bug which would incorrectly encode the member offsets for compound datatypes whose size was between 256 & 511 bytes, when the "use the latest format" feature was enabled. Tested on: Mac OS X/32 10.5.2 (amazon) w/debug FreeBSD/32 6.2 (duty) w/production
This commit is contained in:
parent
6763f7c882
commit
afd5021ef9
@ -70,6 +70,11 @@ Bug Fixes since HDF5-1.8.0 release
|
||||
|
||||
Library
|
||||
-------
|
||||
- Fixed possible file corruption bug when encoding datatype
|
||||
descriptions for compound datatypes whose size was between
|
||||
256 & 511 bytes and the file was opened with the "use the
|
||||
latest format" property enabled (with H5Pset_libver_bounds).
|
||||
(QAK - 2008/03/13)
|
||||
- Fixed bug in H5Aget_num_attrs() routine to handle invalid location
|
||||
ID correctly. (QAK - 2008/03/11)
|
||||
|
||||
|
@ -203,7 +203,7 @@ HDmemset(shared->page, 0, shared->node_size);
|
||||
|
||||
/* Compute size to store # of records in each node */
|
||||
/* (uses leaf # of records because its the largest) */
|
||||
shared->max_nrec_size = (H5V_log2_gen((uint64_t)shared->node_info[0].max_nrec) + 7) / 8;
|
||||
shared->max_nrec_size = H5V_limit_enc_size((uint64_t)shared->node_info[0].max_nrec);
|
||||
HDassert(shared->max_nrec_size <= H5B2_SIZEOF_RECORDS_PER_NODE);
|
||||
|
||||
/* Initialize internal node info */
|
||||
@ -217,7 +217,7 @@ HDmemset(shared->page, 0, shared->node_size);
|
||||
|
||||
shared->node_info[u].cum_max_nrec = ((shared->node_info[u].max_nrec + 1) *
|
||||
shared->node_info[u - 1].cum_max_nrec) + shared->node_info[u].max_nrec;
|
||||
shared->node_info[u].cum_max_nrec_size = (H5V_log2_gen((uint64_t)shared->node_info[u].cum_max_nrec) + 7) / 8;
|
||||
shared->node_info[u].cum_max_nrec_size = H5V_limit_enc_size((uint64_t)shared->node_info[u].cum_max_nrec);
|
||||
|
||||
if((shared->node_info[u].nat_rec_fac = H5FL_fac_init(shared->type->nrec_size * shared->node_info[u].max_nrec)) == NULL)
|
||||
HGOTO_ERROR(H5E_RESOURCE, H5E_CANTINIT, FAIL, "can't create node native key block factory")
|
||||
@ -576,7 +576,7 @@ H5B2_split_root(H5F_t *f, hid_t dxpl_id, H5B2_t *bt2, unsigned *bt2_flags_ptr)
|
||||
shared->node_info[shared->depth].merge_nrec = (shared->node_info[shared->depth].max_nrec * shared->merge_percent) / 100;
|
||||
shared->node_info[shared->depth].cum_max_nrec = ((shared->node_info[shared->depth].max_nrec + 1) *
|
||||
shared->node_info[shared->depth - 1].cum_max_nrec) + shared->node_info[shared->depth].max_nrec;
|
||||
shared->node_info[shared->depth].cum_max_nrec_size = (H5V_log2_gen((uint64_t)shared->node_info[shared->depth].cum_max_nrec) + 7) / 8;
|
||||
shared->node_info[shared->depth].cum_max_nrec_size = H5V_limit_enc_size((uint64_t)shared->node_info[shared->depth].cum_max_nrec);
|
||||
if((shared->node_info[shared->depth].nat_rec_fac = H5FL_fac_init(shared->type->nrec_size * shared->node_info[shared->depth].max_nrec)) == NULL)
|
||||
HGOTO_ERROR(H5E_RESOURCE, H5E_CANTINIT, FAIL, "can't create node native key block factory")
|
||||
if((shared->node_info[shared->depth].node_ptr_fac = H5FL_fac_init(sizeof(H5B2_node_ptr_t) * (shared->node_info[shared->depth].max_nrec + 1))) == NULL)
|
||||
|
@ -606,7 +606,7 @@ HDfprintf(stderr, "%s: fspace->addr = %a, fs_addr = %a\n", FUNC, fspace->addr, f
|
||||
unsigned sect_cnt_size; /* The size of the section size counts */
|
||||
|
||||
/* Compute the size of the section counts */
|
||||
sect_cnt_size = MAX(1, (H5V_log2_gen(fspace->serial_sect_count) + 7) / 8);
|
||||
sect_cnt_size = H5V_limit_enc_size((uint64_t)fspace->serial_sect_count);
|
||||
#ifdef QAK
|
||||
HDfprintf(stderr, "%s: sect_cnt_size = %u\n", FUNC, sect_cnt_size);
|
||||
HDfprintf(stderr, "%s: fspace->sect_len_size = %u\n", FUNC, fspace->sect_len_size);
|
||||
@ -905,7 +905,7 @@ HDfprintf(stderr, "%s: sinfo->fspace->addr = %a\n", FUNC, sinfo->fspace->addr);
|
||||
/* Set up user data for iterator */
|
||||
udata.sinfo = sinfo;
|
||||
udata.p = &p;
|
||||
udata.sect_cnt_size = MAX(1, (H5V_log2_gen(sinfo->fspace->serial_sect_count) + 7) / 8);
|
||||
udata.sect_cnt_size = H5V_limit_enc_size((uint64_t)sinfo->fspace->serial_sect_count);
|
||||
#ifdef QAK
|
||||
HDfprintf(stderr, "%s: udata.sect_cnt_size = %u\n", FUNC, udata.sect_cnt_size);
|
||||
#endif /* QAK */
|
||||
|
@ -147,7 +147,7 @@ H5FS_sinfo_new(H5F_t *f, H5FS_t *fspace)
|
||||
sinfo->nbins = H5V_log2_gen(fspace->max_sect_size);
|
||||
sinfo->sect_prefix_size = H5FS_SINFO_PREFIX_SIZE(f);
|
||||
sinfo->sect_off_size = (fspace->max_sect_addr + 7) / 8;
|
||||
sinfo->sect_len_size = (H5V_log2_gen(fspace->max_sect_size) + 7) / 8;
|
||||
sinfo->sect_len_size = H5V_limit_enc_size((uint64_t)fspace->max_sect_size);
|
||||
sinfo->fspace = fspace;
|
||||
#ifdef QAK
|
||||
HDfprintf(stderr, "%s: sinfo->nbins = %u\n", FUNC, sinfo->nbins);
|
||||
@ -1300,7 +1300,7 @@ HDfprintf(stderr, "%s: fspace->sinfo->serial_size_count = %Zu\n", FUNC, fspace->
|
||||
HDfprintf(stderr, "%s: fspace->sinfo->serial_size_count = %Zu\n", FUNC, fspace->sinfo->serial_size_count);
|
||||
HDfprintf(stderr, "%s: fspace->serial_sect_count = %Hu\n", FUNC, fspace->serial_sect_count);
|
||||
#endif /* QAK */
|
||||
sect_buf_size += fspace->sinfo->serial_size_count * MAX(1, ((H5V_log2_gen(fspace->serial_sect_count) + 7) / 8));
|
||||
sect_buf_size += fspace->sinfo->serial_size_count * H5V_limit_enc_size((uint64_t)fspace->serial_sect_count);
|
||||
|
||||
/* Size for each differently sized serializable section */
|
||||
sect_buf_size += fspace->sinfo->serial_size_count * fspace->sinfo->sect_len_size;
|
||||
|
@ -225,7 +225,7 @@ H5HF_hdr_finish_init_phase1(H5HF_hdr_t *hdr)
|
||||
|
||||
/* Set the size of heap IDs */
|
||||
hdr->heap_len_size = MIN(hdr->man_dtable.max_dir_blk_off_size,
|
||||
((H5V_log2_gen((uint64_t)hdr->max_man_size) + 7) / 8));
|
||||
H5V_limit_enc_size((uint64_t)hdr->max_man_size));
|
||||
|
||||
done:
|
||||
FUNC_LEAVE_NOAPI(ret_value)
|
||||
|
@ -232,10 +232,11 @@ H5O_dtype_decode_helper(H5F_t *f, const uint8_t **pp, H5T_t *dt)
|
||||
case H5T_COMPOUND:
|
||||
{
|
||||
unsigned offset_nbytes; /* Size needed to encode member offsets */
|
||||
size_t max_memb_pos = 0; /* Maximum member covered, so far */
|
||||
unsigned j;
|
||||
|
||||
/* Compute the # of bytes required to store a member offset */
|
||||
offset_nbytes = (H5V_log2_gen((uint64_t)dt->shared->size) + 7) / 8;
|
||||
offset_nbytes = H5V_limit_enc_size((uint64_t)dt->shared->size);
|
||||
|
||||
/*
|
||||
* Compound datatypes...
|
||||
@ -336,6 +337,18 @@ H5O_dtype_decode_helper(H5F_t *f, const uint8_t **pp, H5T_t *dt)
|
||||
/* Set the field datatype (finally :-) */
|
||||
dt->shared->u.compnd.memb[i].type = temp_type;
|
||||
|
||||
/* Check if this field overlaps with a prior field */
|
||||
/* (probably indicates that the file is corrupt) */
|
||||
if(i > 0 && dt->shared->u.compnd.memb[i].offset < max_memb_pos) {
|
||||
for(j = 0; j < i; j++)
|
||||
if(dt->shared->u.compnd.memb[i].offset >= dt->shared->u.compnd.memb[j].offset
|
||||
&& dt->shared->u.compnd.memb[i].offset < (dt->shared->u.compnd.memb[j].offset + dt->shared->u.compnd.memb[j].size))
|
||||
HGOTO_ERROR(H5E_DATATYPE, H5E_CANTDECODE, FAIL, "member overlaps with previous member")
|
||||
} /* end if */
|
||||
|
||||
/* Update the maximum member position covered */
|
||||
max_memb_pos = MAX(max_memb_pos, (dt->shared->u.compnd.memb[i].offset + dt->shared->u.compnd.memb[i].size));
|
||||
|
||||
/* Check if the datatype stayed packed */
|
||||
if(dt->shared->u.compnd.packed) {
|
||||
/* Check if the member type is packed */
|
||||
@ -734,7 +747,7 @@ H5O_dtype_encode_helper(const H5F_t *f, uint8_t **pp, const H5T_t *dt)
|
||||
unsigned offset_nbytes; /* Size needed to encode member offsets */
|
||||
|
||||
/* Compute the # of bytes required to store a member offset */
|
||||
offset_nbytes = (H5V_log2_gen((uint64_t)dt->shared->size) + 7) / 8;
|
||||
offset_nbytes = H5V_limit_enc_size((uint64_t)dt->shared->size);
|
||||
|
||||
/*
|
||||
* Compound datatypes...
|
||||
@ -1097,7 +1110,7 @@ H5O_dtype_size(const H5F_t *f, const void *_mesg)
|
||||
unsigned offset_nbytes; /* Size needed to encode member offsets */
|
||||
|
||||
/* Compute the # of bytes required to store a member offset */
|
||||
offset_nbytes = (H5V_log2_gen((uint64_t)dt->shared->size) + 7) / 8;
|
||||
offset_nbytes = H5V_limit_enc_size((uint64_t)dt->shared->size);
|
||||
|
||||
/* Compute the total size needed to encode compound datatype */
|
||||
for(u = 0; u < dt->shared->u.compnd.nmembs; u++) {
|
||||
|
@ -410,5 +410,25 @@ H5V_log2_of2(uint32_t n)
|
||||
return(MultiplyDeBruijnBitPosition[(n * (uint32_t)0x077CB531UL) >> 27]);
|
||||
} /* H5V_log2_of2() */
|
||||
|
||||
|
||||
/*-------------------------------------------------------------------------
|
||||
* Function: H5V_limit_enc_size
|
||||
*
|
||||
* Purpose: Determine the # of bytes needed to encode values within a
|
||||
* range from 0 to a given limit
|
||||
*
|
||||
* Return: Number of bytes needed
|
||||
*
|
||||
* Programmer: Quincey Koziol
|
||||
* Thursday, March 13, 2008
|
||||
*
|
||||
*-------------------------------------------------------------------------
|
||||
*/
|
||||
static H5_inline unsigned UNUSED
|
||||
H5V_limit_enc_size(uint64_t limit)
|
||||
{
|
||||
return (H5V_log2_gen(limit) / 8) + 1;
|
||||
} /* end H5V_limit_enc_size() */
|
||||
|
||||
#endif /* H5Vprivate_H */
|
||||
|
||||
|
108
test/dtypes.c
108
test/dtypes.c
@ -90,6 +90,10 @@ typedef enum dtype_t {
|
||||
/* Constant for size of conversion buffer for int <-> float exception test */
|
||||
#define CONVERT_SIZE 4
|
||||
|
||||
/* Constants for compound_13 test */
|
||||
#define COMPOUND13_ARRAY_SIZE 256
|
||||
#define COMPOUND13_ATTR_NAME "attr"
|
||||
|
||||
/* Count opaque conversions */
|
||||
static int num_opaque_conversions_g = 0;
|
||||
|
||||
@ -2172,6 +2176,107 @@ test_compound_12(void)
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
/*-------------------------------------------------------------------------
|
||||
* Function: test_compound_12
|
||||
*
|
||||
* Purpose: Tests compound datatypes whose size is at the boundary for
|
||||
* needing 2 bytes for the datatype size and "use the latest
|
||||
* format" flag is enabled so that the size of the offsets uses
|
||||
* the smallest # of bytes possible.
|
||||
*
|
||||
* Return: Success: 0
|
||||
* Failure: number of errors
|
||||
*
|
||||
* Programmer: Quincey Koziol
|
||||
* Thursday, March 13, 2008
|
||||
*
|
||||
*-------------------------------------------------------------------------
|
||||
*/
|
||||
static int
|
||||
test_compound_13(void)
|
||||
{
|
||||
struct s1 {
|
||||
unsigned char x[COMPOUND13_ARRAY_SIZE + 1];
|
||||
float y;
|
||||
};
|
||||
struct s1 data_out, data_in;
|
||||
hid_t fileid, grpid, typeid, array1_tid, spaceid, attid;
|
||||
hid_t fapl_id;
|
||||
hsize_t dims[1] = {COMPOUND13_ARRAY_SIZE + 1};
|
||||
char filename[1024];
|
||||
unsigned u;
|
||||
|
||||
TESTING("compound datatypes of boundary size with latest format");
|
||||
|
||||
/* Create some phony data. */
|
||||
for(u = 0; u < COMPOUND13_ARRAY_SIZE + 1; u++)
|
||||
data_out.x[u] = u;
|
||||
data_out.y = 99.99;
|
||||
|
||||
/* Set latest_format in access propertly list to enable the latest
|
||||
* compound datatype format.
|
||||
*/
|
||||
if((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) FAIL_STACK_ERROR
|
||||
if(H5Pset_libver_bounds(fapl_id, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) FAIL_STACK_ERROR
|
||||
|
||||
/* Open file and get root group. */
|
||||
h5_fixname(FILENAME[4], H5P_DEFAULT, filename, sizeof filename);
|
||||
if((fileid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id)) < 0) FAIL_STACK_ERROR
|
||||
if((grpid = H5Gopen2(fileid, "/", H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
|
||||
|
||||
/* Create a compound type. */
|
||||
if((typeid = H5Tcreate(H5T_COMPOUND, sizeof(struct s1))) < 0) FAIL_STACK_ERROR
|
||||
if((array1_tid = H5Tarray_create2(H5T_NATIVE_UCHAR, 1, dims)) < 0) FAIL_STACK_ERROR
|
||||
if(H5Tinsert(typeid, "x", HOFFSET(struct s1, x), array1_tid) < 0) FAIL_STACK_ERROR
|
||||
if(H5Tinsert(typeid, "y", HOFFSET(struct s1, y), H5T_NATIVE_FLOAT) < 0) FAIL_STACK_ERROR
|
||||
|
||||
/* Create a space. */
|
||||
if((spaceid = H5Screate(H5S_SCALAR)) < 0) FAIL_STACK_ERROR
|
||||
|
||||
/* Create an attribute of this compound type. */
|
||||
if((attid = H5Acreate2(grpid, COMPOUND13_ATTR_NAME, typeid, spaceid, H5P_DEFAULT, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
|
||||
|
||||
/* Write some data. */
|
||||
if(H5Awrite(attid, typeid, &data_out) < 0) FAIL_STACK_ERROR
|
||||
|
||||
/* Release all resources. */
|
||||
if(H5Aclose(attid) < 0) FAIL_STACK_ERROR
|
||||
if(H5Tclose(array1_tid) < 0) FAIL_STACK_ERROR
|
||||
if(H5Tclose(typeid) < 0) FAIL_STACK_ERROR
|
||||
if(H5Sclose(spaceid) < 0) FAIL_STACK_ERROR
|
||||
if(H5Gclose(grpid) < 0) FAIL_STACK_ERROR
|
||||
if(H5Fclose(fileid) < 0) FAIL_STACK_ERROR
|
||||
if(H5Pclose(fapl_id) < 0) FAIL_STACK_ERROR
|
||||
|
||||
/* Now open the file and read it. */
|
||||
if((fileid = H5Fopen(filename, H5F_ACC_RDONLY, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
|
||||
if((grpid = H5Gopen2(fileid, "/", H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
|
||||
if((attid = H5Aopen(grpid, COMPOUND13_ATTR_NAME, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR
|
||||
if((typeid = H5Aget_type(attid)) < 0) FAIL_STACK_ERROR
|
||||
if(H5Tget_class(typeid) != H5T_COMPOUND) FAIL_STACK_ERROR
|
||||
if(HOFFSET(struct s1, x) != H5Tget_member_offset(typeid, 0)) TEST_ERROR
|
||||
if(HOFFSET(struct s1, y) != H5Tget_member_offset(typeid, 1)) TEST_ERROR
|
||||
if(H5Aread(attid, typeid, &data_in) < 0) FAIL_STACK_ERROR
|
||||
|
||||
/* Check the data. */
|
||||
for (u = 0; u < COMPOUND13_ARRAY_SIZE + 1; u++)
|
||||
if(data_out.x[u] != data_in.x[u]) TEST_ERROR
|
||||
if(data_out.y != data_in.y) TEST_ERROR
|
||||
|
||||
/* Release all resources. */
|
||||
if(H5Aclose(attid) < 0) FAIL_STACK_ERROR
|
||||
if(H5Tclose(typeid) < 0) FAIL_STACK_ERROR
|
||||
if(H5Gclose(grpid) < 0) FAIL_STACK_ERROR
|
||||
if(H5Fclose(fileid) < 0) FAIL_STACK_ERROR
|
||||
|
||||
PASSED();
|
||||
return 0;
|
||||
|
||||
error:
|
||||
return 1;
|
||||
} /* end test_compound_13() */
|
||||
|
||||
|
||||
/*-------------------------------------------------------------------------
|
||||
* Function: test_query
|
||||
@ -4703,7 +4808,7 @@ test_deprec(hid_t fapl)
|
||||
|
||||
/* Create an array datatype with an atomic base type */
|
||||
/* (dimension permutations allowed, but not stored) */
|
||||
if((type = H5Tarray_create1(H5T_NATIVE_INT, rank, dims, perm)) < 0)
|
||||
if((type = H5Tarray_create1(H5T_NATIVE_INT, (int)rank, dims, perm)) < 0)
|
||||
FAIL_STACK_ERROR
|
||||
|
||||
/* Make certain that the correct classes can be detected */
|
||||
@ -4862,6 +4967,7 @@ main(void)
|
||||
nerrors += test_compound_10();
|
||||
nerrors += test_compound_11();
|
||||
nerrors += test_compound_12();
|
||||
nerrors += test_compound_13();
|
||||
nerrors += test_conv_enum_1();
|
||||
nerrors += test_conv_enum_2();
|
||||
nerrors += test_conv_bitfield();
|
||||
|
Loading…
Reference in New Issue
Block a user