mirror of
https://github.com/HDFGroup/hdf5.git
synced 2025-04-12 17:31:09 +08:00
[svn-r11771] Purpose:
Code cleanup Description: Fix a bunch of warnings flagged by Windows compilers. Platforms tested: FreeBSD 4.11 (sleipnir) Too minor to require h5committest
This commit is contained in:
parent
bfa65eed0f
commit
88c15b1617
@ -1022,7 +1022,6 @@ test_attr_dtype_shared(void)
|
||||
int data=8; /* Data to write */
|
||||
int rdata=0; /* Read read in */
|
||||
H5G_stat_t statbuf; /* Object's information */
|
||||
off_t empty_filesize; /* Size of empty file */
|
||||
off_t filesize; /* Size of file after modifications */
|
||||
|
||||
// Output message about test being performed */
|
||||
|
@ -91,9 +91,6 @@ nh5dopen_c (hid_t_f *loc_id, _fcd name, int_f *namelen, hid_t_f *dset_id)
|
||||
int c_namelen;
|
||||
hid_t c_loc_id;
|
||||
hid_t c_dset_id;
|
||||
hid_t plist;
|
||||
off_t offset;
|
||||
hsize_t size;
|
||||
|
||||
/*
|
||||
* Convert FORTRAN name to C name
|
||||
|
@ -320,7 +320,8 @@ nh5sget_select_elem_pointlist_c( hid_t_f *space_id ,hsize_t_f * startpoint,
|
||||
hid_t c_space_id;
|
||||
hsize_t c_num_points;
|
||||
hsize_t c_startpoint,* c_buf;
|
||||
int i, rank;
|
||||
int rank;
|
||||
hsize_t i;
|
||||
|
||||
c_space_id = *space_id;
|
||||
c_num_points = (hsize_t)* numpoints;
|
||||
|
@ -88,7 +88,7 @@
|
||||
* is set to 0 on success. On failure, returns 0 and
|
||||
* error is set to negative.
|
||||
*/
|
||||
unsigned int PacketTable::GetPacketCount(int& error)
|
||||
hsize_t PacketTable::GetPacketCount(int& error)
|
||||
{
|
||||
hsize_t npackets;
|
||||
|
||||
|
@ -75,7 +75,7 @@ public:
|
||||
* is set to 0 on success. On failure, returns 0 and
|
||||
* error is set to negative.
|
||||
*/
|
||||
unsigned int GetPacketCount(int& error);
|
||||
hsize_t GetPacketCount(int& error);
|
||||
|
||||
unsigned int GetPacketCount()
|
||||
{
|
||||
|
@ -15,6 +15,7 @@
|
||||
/* This files contains C stubs for H5D Fortran APIs */
|
||||
|
||||
#include "H5IM.h"
|
||||
#include "H5IMcc.h"
|
||||
#include "H5LTf90proto.h"
|
||||
#include "../../../fortran/src/H5f90i_gen.h"
|
||||
|
||||
|
@ -674,9 +674,9 @@ nh5tbget_field_info_c(hid_t_f *loc_id,
|
||||
/* return values*/
|
||||
|
||||
/* names array */
|
||||
tmp = (char *)malloc(c_lenmax* (hsize_t_f) c_nfields + 1);
|
||||
tmp = (char *)malloc(c_lenmax* (size_t) c_nfields + 1);
|
||||
tmp_p = tmp;
|
||||
memset(tmp,' ', c_lenmax* (hsize_t_f) c_nfields);
|
||||
memset(tmp,' ', c_lenmax* (size_t) c_nfields);
|
||||
tmp[c_lenmax*c_nfields] = '\0';
|
||||
for (i=0; i < c_nfields; i++) {
|
||||
memcpy(tmp_p, c_field_names[i], strlen(c_field_names[i]));
|
||||
|
@ -2003,7 +2003,6 @@ hid_t H5LTtext_to_dtype(const char *text)
|
||||
{
|
||||
extern int yyparse(void);
|
||||
hid_t type_id;
|
||||
hsize_t i;
|
||||
|
||||
input_len = strlen(text);
|
||||
myinput = strdup(text);
|
||||
|
@ -13,6 +13,7 @@
|
||||
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
|
||||
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
#include "H5LT.h"
|
||||
|
||||
|
@ -280,7 +280,7 @@ H5BP_create_leaf(H5F_t *f, hid_t dxpl_id, H5RC_t *bpt_shared, H5BP_node_ptr_t *n
|
||||
|
||||
/* Create record pool for leaf node */
|
||||
if((leaf->rec_pool = H5MP_create(H5BP_LEAF_POOL_PAGE, H5MP_FLG_DEFAULT)) == NULL)
|
||||
HGOTO_ERROR(H5E_BTREE, H5E_NOSPACE, NULL, "can't allocate memory pool")
|
||||
HGOTO_ERROR(H5E_BTREE, H5E_NOSPACE, FAIL, "can't allocate memory pool")
|
||||
|
||||
/* Set number of records */
|
||||
leaf->nrec=0;
|
||||
@ -323,8 +323,10 @@ H5BP_insert_leaf(H5F_t *f, hid_t dxpl_id, H5RC_t *bpt_shared,
|
||||
{
|
||||
H5BP_leaf_t *leaf; /* Pointer to leaf node */
|
||||
H5BP_shared_t *shared; /* Pointer to B+ tree's shared information */
|
||||
#ifdef LATER
|
||||
int cmp; /* Comparison value of records */
|
||||
unsigned idx; /* Location of record which matches key */
|
||||
#endif /* LATER */
|
||||
herr_t ret_value = SUCCEED;
|
||||
|
||||
FUNC_ENTER_NOAPI_NOINIT(H5BP_insert_leaf)
|
||||
|
@ -1031,7 +1031,8 @@ H5D_contig_copy(H5F_t *f_src, H5O_layout_t *layout_src,
|
||||
|
||||
/* Set up number of bytes to copy, and initial buffer size */
|
||||
total_nbytes = layout_src->u.contig.size;
|
||||
buf_size = MIN(H5D_XFER_MAX_TEMP_BUF_DEF, total_nbytes);
|
||||
H5_CHECK_OVERFLOW(total_nbytes,hsize_t,size_t);
|
||||
buf_size = MIN(H5D_XFER_MAX_TEMP_BUF_DEF, (size_t)total_nbytes);
|
||||
|
||||
/* If there's a source datatype, set up type conversion information */
|
||||
if(dt_src) {
|
||||
|
@ -875,8 +875,8 @@ H5D_istore_iter_allocated (H5F_t UNUSED *f, hid_t UNUSED dxpl_id, const void *_l
|
||||
*
|
||||
* Failure: Negative
|
||||
*
|
||||
* Programmer: Robb Matzke
|
||||
* Wednesday, April 21, 1999
|
||||
* Programmer: Kent Yang
|
||||
* Tuesday, November 15, 2005
|
||||
*
|
||||
*-------------------------------------------------------------------------
|
||||
*/
|
||||
@ -2356,27 +2356,25 @@ H5D_istore_allocated(H5D_t *dset, hid_t dxpl_id)
|
||||
done:
|
||||
FUNC_LEAVE_NOAPI(ret_value)
|
||||
} /* end H5D_istore_allocated() */
|
||||
|
||||
|
||||
/*-------------------------------------------------------------------------
|
||||
* Function: H5D_istore_chunkmap
|
||||
*
|
||||
* Purpose: obtain the chunk address and corresponding chunk index
|
||||
*
|
||||
*
|
||||
*
|
||||
* Return: Success: Non-negative on succeed.
|
||||
*
|
||||
* Failure: negative value
|
||||
*
|
||||
* Programmer:
|
||||
*
|
||||
* Programmer: Kent Yang
|
||||
* November 15, 2005
|
||||
*
|
||||
*-------------------------------------------------------------------------
|
||||
*/
|
||||
herr_t
|
||||
H5D_istore_chunkmap(const H5D_io_info_t *io_info, hsize_t total_chunks,haddr_t chunk_addr[],hsize_t down_chunks[])
|
||||
{
|
||||
|
||||
H5D_t *dset=io_info->dset; /* Local pointer to dataset info */
|
||||
const H5D_rdcc_t *rdcc = &(dset->shared->cache.chunk); /*raw data chunk cache */
|
||||
H5D_rdcc_ent_t *ent; /*cache entry */
|
||||
@ -2425,8 +2423,7 @@ H5D_istore_chunkmap(const H5D_io_info_t *io_info, hsize_t total_chunks,haddr_t c
|
||||
|
||||
done:
|
||||
FUNC_LEAVE_NOAPI(ret_value)
|
||||
} /* end H5D_istore_allocated() */
|
||||
|
||||
} /* end H5D_istore_chunkmap() */
|
||||
|
||||
|
||||
/*-------------------------------------------------------------------------
|
||||
|
@ -195,7 +195,8 @@ H5G_link_build_table(H5O_loc_t *oloc, hid_t dxpl_id, H5G_link_table_t *ltable)
|
||||
HGOTO_ERROR(H5E_SYM, H5E_BADMESG, FAIL, "can't get link info")
|
||||
|
||||
/* Set size of table */
|
||||
ltable->nlinks = linfo.nlinks;
|
||||
H5_CHECK_OVERFLOW(linfo.nlinks, hsize_t, size_t);
|
||||
ltable->nlinks = (size_t)linfo.nlinks;
|
||||
|
||||
/* Allocate space for the table entries */
|
||||
if(ltable->nlinks > 0) {
|
||||
|
@ -675,7 +675,8 @@ H5G_obj_remove(H5O_loc_t *oloc, const char *name, H5G_obj_t *obj_type, hid_t dxp
|
||||
udata.heap_addr = stab.heap_addr;
|
||||
udata.lnk_table = lnk_table;
|
||||
udata.nlinks = 0;
|
||||
udata.max_links = linfo.nlinks;
|
||||
H5_CHECK_OVERFLOW(linfo.nlinks, hsize_t, size_t);
|
||||
udata.max_links = (size_t)linfo.nlinks;
|
||||
|
||||
/* Iterate over the group members, building a table of equivalent link messages */
|
||||
if((ret_value = H5B_iterate(oloc->file, dxpl_id, H5B_SNODE,
|
||||
|
@ -4303,8 +4303,10 @@ H5O_copy_header_real(const H5O_loc_t *oloc_src,
|
||||
addr_map->is_locked = FALSE;
|
||||
|
||||
/* Increment object header's reference count, if any descendents have created links to link to this object */
|
||||
if(addr_map->inc_ref_count)
|
||||
oh_dst->nlink += addr_map->inc_ref_count;
|
||||
if(addr_map->inc_ref_count) {
|
||||
H5_CHECK_OVERFLOW(addr_map->inc_ref_count, hsize_t, int);
|
||||
oh_dst->nlink += (int)addr_map->inc_ref_count;
|
||||
} /* end if */
|
||||
|
||||
/* Insert destination object header in cache */
|
||||
if(H5AC_set(oloc_dst->file, dxpl_id, H5AC_OHDR, oloc_dst->addr, oh_dst, H5AC__DIRTIED_FLAG) < 0)
|
||||
|
@ -472,9 +472,9 @@ H5Z_class_t H5Z_SCALEOFFSET[1] = {{
|
||||
if(filavail == H5Z_SCALEOFFSET_FILL_DEFINED) { /* fill value defined */ \
|
||||
H5Z_scaleoffset_get_filval_1(i, type, filval_buf, filval) \
|
||||
for(i = 0; i < d_nelmts; i++) \
|
||||
buf[i] = (buf[i] == (((type)1 << minbits) - 1))?filval:(buf[i] + minval);\
|
||||
buf[i] = (type)((buf[i] == (((type)1 << minbits) - 1))?filval:(buf[i] + minval));\
|
||||
} else /* fill value undefined */ \
|
||||
for(i = 0; i < d_nelmts; i++) buf[i] += minval; \
|
||||
for(i = 0; i < d_nelmts; i++) buf[i] += (type)(minval); \
|
||||
}
|
||||
|
||||
/* Postdecompress for signed integer type */
|
||||
@ -485,9 +485,9 @@ H5Z_class_t H5Z_SCALEOFFSET[1] = {{
|
||||
if(filavail == H5Z_SCALEOFFSET_FILL_DEFINED) { /* fill value defined */ \
|
||||
H5Z_scaleoffset_get_filval_1(i, type, filval_buf, filval) \
|
||||
for(i = 0; i < d_nelmts; i++) \
|
||||
buf[i] = ((unsigned type)buf[i] == (((unsigned type)1 << minbits) - 1)) ? filval : (buf[i] + minval);\
|
||||
buf[i] = (type)(((unsigned type)buf[i] == (((unsigned type)1 << minbits) - 1)) ? filval : (buf[i] + minval));\
|
||||
} else /* fill value undefined */ \
|
||||
for(i = 0; i < d_nelmts; i++) buf[i] += minval; \
|
||||
for(i = 0; i < d_nelmts; i++) buf[i] += (type)(minval); \
|
||||
}
|
||||
|
||||
/* Retrive minimum value of floating-point type */
|
||||
@ -1135,7 +1135,7 @@ H5Z_filter_scaleoffset (unsigned flags, size_t cd_nelmts, const unsigned cd_valu
|
||||
((unsigned char *)outbuf)[4] = sizeof(unsigned long_long);
|
||||
|
||||
for(i = 0; i < sizeof(unsigned long_long); i++)
|
||||
((unsigned char *)outbuf)[5+i] = (minval & ((unsigned long_long)0xff << i*8)) >> i*8;
|
||||
((unsigned char *)outbuf)[5+i] = (unsigned char)((minval & ((unsigned long_long)0xff << i*8)) >> i*8);
|
||||
|
||||
/* special case: minbits equal to full precision */
|
||||
if(minbits == p.size * 8) {
|
||||
|
78
test/dsets.c
78
test/dsets.c
@ -2572,8 +2572,8 @@ test_nbit_int(hid_t file)
|
||||
/* Initialize data, assuming size of long_long >= size of int */
|
||||
for (i= 0;i< size[0]; i++)
|
||||
for (j = 0; j < size[1]; j++) {
|
||||
orig_data[i][j] = ((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision - 1)) << offset;
|
||||
orig_data[i][j] = (int)(((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision - 1)) << offset);
|
||||
|
||||
/* even-numbered values are negtive */
|
||||
if((i*size[1]+j+1)%2 == 0)
|
||||
@ -2677,8 +2677,8 @@ test_nbit_float(hid_t file)
|
||||
/* orig_data[] are initialized to be within the range that can be represented by
|
||||
* dataset datatype (no precision loss during datatype conversion)
|
||||
*/
|
||||
float orig_data[2][5] = {{188384.00, 19.103516, -1.0831790e9, -84.242188,
|
||||
5.2045898}, {-49140.000, 2350.2500, -3.2110596e-1, 6.4998865e-5, -0.0000000}};
|
||||
float orig_data[2][5] = {{(float)188384.00, (float)19.103516, (float)-1.0831790e9, (float)-84.242188,
|
||||
(float)5.2045898}, {(float)-49140.000, (float)2350.2500, (float)-3.2110596e-1, (float)6.4998865e-5, (float)-0.0000000}};
|
||||
float new_data[2][5];
|
||||
size_t precision, offset;
|
||||
hsize_t i, j;
|
||||
@ -2986,8 +2986,8 @@ test_nbit_array(hid_t file)
|
||||
for (j = 0; j < size[1]; j++)
|
||||
for (m = 0; m < adims[0]; m++)
|
||||
for (n = 0; n < adims[1]; n++)
|
||||
orig_data[i][j][m][n] = ((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision)) << offset;
|
||||
orig_data[i][j][m][n] = (unsigned int)(((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision)) << offset);
|
||||
PASSED();
|
||||
#else
|
||||
SKIPPED();
|
||||
@ -3097,8 +3097,8 @@ test_nbit_compound(hid_t file)
|
||||
hid_t dataset, space, dc;
|
||||
const hsize_t size[2] = {2, 5};
|
||||
const hsize_t chunk_size[2] = {2, 5};
|
||||
const float float_val[2][5] = {{188384.00, 19.103516, -1.0831790e9, -84.242188,
|
||||
5.2045898}, {-49140.000, 2350.2500, -3.2110596e-1, 6.4998865e-5, -0.0000000}};
|
||||
const float float_val[2][5] = {{(float)188384.00, (float)19.103516, (float)-1.0831790e9, (float)-84.242188,
|
||||
(float)5.2045898}, {(float)-49140.000, (float)2350.2500, (float)-3.2110596e-1, (float)6.4998865e-5, (float)-0.0000000}};
|
||||
atomic orig_data[2][5];
|
||||
atomic new_data[2][5];
|
||||
unsigned int i_mask, s_mask, c_mask;
|
||||
@ -3166,12 +3166,12 @@ test_nbit_compound(hid_t file)
|
||||
/* Initialize data, assuming size of long_long >= size of member datatypes */
|
||||
for (i= 0;i< size[0]; i++)
|
||||
for (j = 0; j < size[1]; j++) {
|
||||
orig_data[i][j].i = ((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[0]-1)) << offset[0];
|
||||
orig_data[i][j].c = ((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[1]-1)) << offset[1];
|
||||
orig_data[i][j].s = ((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[2]-1)) << offset[2];
|
||||
orig_data[i][j].i = (int)(((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[0]-1)) << offset[0]);
|
||||
orig_data[i][j].c = (char)(((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[1]-1)) << offset[1]);
|
||||
orig_data[i][j].s = (short)(((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[2]-1)) << offset[2]);
|
||||
orig_data[i][j].f = float_val[i][j];
|
||||
|
||||
/* some even-numbered integer values are negtive */
|
||||
@ -3312,8 +3312,8 @@ test_nbit_compound_2(hid_t file)
|
||||
hid_t dataset, space, dc;
|
||||
const hsize_t size[2] = {2, 5};
|
||||
const hsize_t chunk_size[2] = {2, 5};
|
||||
const float float_val[2][5] = {{188384.00, 19.103516, -1.0831790e9, -84.242188,
|
||||
5.2045898}, {-49140.000, 2350.2500, -3.2110596e-1, 6.4998865e-5, -0.0000000}};
|
||||
const float float_val[2][5] = {{(float)188384.00, (float)19.103516, (float)-1.0831790e9, (float)-84.242188,
|
||||
(float)5.2045898}, {(float)-49140.000, (float)2350.2500, (float)-3.2110596e-1, (float)6.4998865e-5, (float)-0.0000000}};
|
||||
complex orig_data[2][5];
|
||||
complex new_data[2][5];
|
||||
unsigned int i_mask, s_mask, c_mask, b_mask;
|
||||
@ -3413,30 +3413,30 @@ test_nbit_compound_2(hid_t file)
|
||||
/* Initialize data, assuming size of long_long >= size of member datatypes */
|
||||
for (i= 0;i< size[0]; i++)
|
||||
for (j = 0; j < size[1]; j++) {
|
||||
orig_data[i][j].a.i = ((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[0]-1)) << offset[0];
|
||||
orig_data[i][j].a.c = ((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[1]-1)) << offset[1];
|
||||
orig_data[i][j].a.s = -((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[2]-1)) << offset[2];
|
||||
orig_data[i][j].a.i = (int)(((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[0]-1)) << offset[0]);
|
||||
orig_data[i][j].a.c = (char)(((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[1]-1)) << offset[1]);
|
||||
orig_data[i][j].a.s = (short)(-((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[2]-1)) << offset[2]);
|
||||
orig_data[i][j].a.f = float_val[i][j];
|
||||
|
||||
orig_data[i][j].v = ((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[3])) << offset[3];
|
||||
orig_data[i][j].v = (unsigned int)(((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[3])) << offset[3]);
|
||||
|
||||
for(m = 0; m < array_dims[0]; m++)
|
||||
for(n = 0; n < array_dims[1]; n++)
|
||||
orig_data[i][j].b[m][n] = ((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[4]-1)) << offset[4];
|
||||
orig_data[i][j].b[m][n] = (char)(((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[4]-1)) << offset[4]);
|
||||
|
||||
for(m = 0; m < array_dims[0]; m++)
|
||||
for(n = 0; n < array_dims[1]; n++) {
|
||||
orig_data[i][j].d[m][n].i = -((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[0]-1)) << offset[0];
|
||||
orig_data[i][j].d[m][n].c = ((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[1]-1)) << offset[1];
|
||||
orig_data[i][j].d[m][n].s = ((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[2]-1)) << offset[2];
|
||||
orig_data[i][j].d[m][n].i = (int)(-((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[0]-1)) << offset[0]);
|
||||
orig_data[i][j].d[m][n].c = (char)(((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[1]-1)) << offset[1]);
|
||||
orig_data[i][j].d[m][n].s = (short)(((long_long)HDrandom() %
|
||||
(long_long)HDpow(2.0, precision[2]-1)) << offset[2]);
|
||||
orig_data[i][j].d[m][n].f = float_val[i][j];
|
||||
}
|
||||
}
|
||||
@ -3643,14 +3643,14 @@ test_nbit_compound_3(hid_t file)
|
||||
strcpy(orig_data[i].str, "fixed-length C string");
|
||||
orig_data[i].vl_str = strdup("variable-length C string");
|
||||
|
||||
orig_data[i].v.p = HDmalloc((i+1)*sizeof(unsigned int));
|
||||
orig_data[i].v.len = i+1;
|
||||
for(k = 0; k < (i+1); k++) ((unsigned int *)orig_data[i].v.p)[k] = i*100 + k;
|
||||
orig_data[i].v.p = HDmalloc((size_t)(i+1)*sizeof(unsigned int));
|
||||
orig_data[i].v.len = (size_t)i+1;
|
||||
for(k = 0; k < (i+1); k++) ((unsigned int *)orig_data[i].v.p)[k] = (unsigned int)(i*100 + k);
|
||||
|
||||
/* Create reference to the dataset "nbit_obj_ref" */
|
||||
if(H5Rcreate(&orig_data[i].r, file, "nbit_obj_ref", H5R_OBJECT, -1)<0) goto error;
|
||||
|
||||
for(j = 0; j < 5; j++) orig_data[i].o[j] = i + j;
|
||||
for(j = 0; j < 5; j++) orig_data[i].o[j] = (unsigned char)(i + j);
|
||||
}
|
||||
|
||||
PASSED();
|
||||
@ -3957,7 +3957,7 @@ test_scaleoffset_int_2(hid_t file)
|
||||
|
||||
/* Initialize data of hyperslab */
|
||||
for (j = 0; j < size[1]; j++) {
|
||||
orig_data[0][j] = (long_long)HDrandom() % 10000;
|
||||
orig_data[0][j] = (int)HDrandom() % 10000;
|
||||
|
||||
/* even-numbered values are negtive */
|
||||
if((j+1)%2 == 0)
|
||||
@ -4088,7 +4088,7 @@ test_scaleoffset_float(hid_t file)
|
||||
/* Initialize data */
|
||||
for (i= 0;i< size[0]; i++)
|
||||
for (j = 0; j < size[1]; j++) {
|
||||
orig_data[i][j] = (HDrandom() % 100000) / 1000.0;
|
||||
orig_data[i][j] = (float)((HDrandom() % 100000) / (float)1000.0);
|
||||
|
||||
/* even-numbered values are negtive */
|
||||
if((i*size[1]+j+1)%2 == 0)
|
||||
@ -4238,7 +4238,7 @@ test_scaleoffset_float_2(hid_t file)
|
||||
|
||||
/* Initialize data of hyperslab */
|
||||
for (j = 0; j < size[1]; j++) {
|
||||
orig_data[0][j] = (HDrandom() % 100000) / 1000.0;
|
||||
orig_data[0][j] = (float)((HDrandom() % 100000) / (float)1000.0);
|
||||
|
||||
/* even-numbered values are negtive */
|
||||
if((j+1)%2 == 0)
|
||||
|
@ -714,9 +714,8 @@ static int test_particular_fp_integer(void)
|
||||
int dst_i;
|
||||
int fill_value = 13;
|
||||
int endian; /*endianess */
|
||||
char str[256]; /*message string */
|
||||
unsigned int fails_this_test = 0;
|
||||
int i, j;
|
||||
int j;
|
||||
|
||||
TESTING("hard particular floating number -> integer conversions");
|
||||
|
||||
|
@ -398,8 +398,8 @@ int test_poly(const hid_t dxpl_id_polynomial)
|
||||
{
|
||||
for(col = 0; col<COLS; col++)
|
||||
{
|
||||
windchillC = (5/9.0)*(windchillFfloat[row][col] - 32);
|
||||
polyflres[row][col] = (2.0+windchillC)*((windchillC-8.0)/2.0);
|
||||
windchillC = (int)((5.0/9.0)*(windchillFfloat[row][col] - 32));
|
||||
polyflres[row][col] = (float)((2.0+windchillC)*((windchillC-8.0)/2.0));
|
||||
}
|
||||
}
|
||||
|
||||
@ -412,8 +412,8 @@ int test_poly(const hid_t dxpl_id_polynomial)
|
||||
{
|
||||
for(col = 0; col<COLS; col++)
|
||||
{
|
||||
windchillC = (5/9.0)*(windchillFfloat[row][col] - 32);
|
||||
polyflres[row][col] = (2+windchillC)*((windchillC-8)/2);
|
||||
windchillC = (int)((5.0/9.0)*(windchillFfloat[row][col] - 32));
|
||||
polyflres[row][col] = (float)((2+windchillC)*((windchillC-8)/2));
|
||||
}
|
||||
}
|
||||
|
||||
@ -439,8 +439,8 @@ int test_copy(const hid_t dxpl_id_c_to_f_copy, const hid_t dxpl_id_polynomial_co
|
||||
{
|
||||
for(col = 0; col<COLS; col++)
|
||||
{
|
||||
windchillC = (5/9.0)*(windchillFfloat[row][col] - 32);
|
||||
polyflres[row][col] = (2+windchillC)*((windchillC-8)/2);
|
||||
windchillC = (int)((5.0/9.0)*(windchillFfloat[row][col] - 32));
|
||||
polyflres[row][col] = (float)((2+windchillC)*((windchillC-8)/2));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3896,7 +3896,7 @@ test_int_float_except(void)
|
||||
float buf_float[CONVERT_SIZE] = {INT_MIN, INT_MAX + 1.0, INT_MAX - 127.0, 4};
|
||||
int *intp; /* Pointer to buffer, as integers */
|
||||
int buf2[CONVERT_SIZE] = {INT_MIN, INT_MAX, INT_MAX - 72, 0};
|
||||
float buf2_float[CONVERT_SIZE] = {INT_MIN, INT_MAX, INT_MAX - 127.0, 0.0};
|
||||
float buf2_float[CONVERT_SIZE] = {INT_MIN, INT_MAX, INT_MAX - 127.0, (float)0.0};
|
||||
int buf2_int[CONVERT_SIZE] = {INT_MIN, INT_MAX, INT_MAX - 127, 0};
|
||||
float *floatp; /* Pointer to buffer #2, as floats */
|
||||
hid_t dxpl; /* Dataset transfer property list */
|
||||
|
@ -33,8 +33,8 @@ typedef struct dtype_info_t {
|
||||
} dtype_info_t;
|
||||
|
||||
typedef struct ohdr_info_t {
|
||||
unsigned long total_size; /* Total size of object headers */
|
||||
unsigned long free_size; /* Total free space in object headers */
|
||||
hsize_t total_size; /* Total size of object headers */
|
||||
hsize_t free_size; /* Total free space in object headers */
|
||||
} ohdr_info_t;
|
||||
|
||||
/* Info to pass to the iteration functions */
|
||||
@ -50,7 +50,7 @@ typedef struct iter_t {
|
||||
|
||||
unsigned long max_depth; /* Maximum depth of hierarchy */
|
||||
unsigned long max_links; /* Maximum # of links to an object */
|
||||
unsigned long max_fanout; /* Maximum fanout from a group */
|
||||
hsize_t max_fanout; /* Maximum fanout from a group */
|
||||
unsigned long num_small_groups[SIZE_SMALL_GROUPS]; /* Size of small groups tracked */
|
||||
unsigned group_nbins; /* Number of bins for group counts */
|
||||
unsigned long *group_bins; /* Pointer to array of bins for group counts */
|
||||
@ -58,7 +58,7 @@ typedef struct iter_t {
|
||||
|
||||
unsigned long max_dset_rank; /* Maximum rank of dataset */
|
||||
unsigned long dset_rank_count[H5S_MAX_RANK]; /* Number of datasets of each rank */
|
||||
unsigned long max_dset_dims; /* Maximum dimension size of dataset */
|
||||
hsize_t max_dset_dims; /* Maximum dimension size of dataset */
|
||||
unsigned long small_dset_dims[SIZE_SMALL_DSETS]; /* Size of dimensions of small datasets tracked */
|
||||
unsigned long dset_layouts[H5D_NLAYOUTS]; /* Type of storage for each dataset */
|
||||
unsigned long dset_ntypes; /* Number of diff. dataset datatypes found */
|
||||
@ -66,7 +66,7 @@ typedef struct iter_t {
|
||||
unsigned dset_dim_nbins; /* Number of bins for dataset dimensions */
|
||||
unsigned long *dset_dim_bins; /* Pointer to array of bins for dataset dimensions */
|
||||
ohdr_info_t dset_ohdr_info; /* Object header information for datasets */
|
||||
unsigned long dset_storage_size; /* Size of raw data for datasets */
|
||||
hsize_t dset_storage_size; /* Size of raw data for datasets */
|
||||
} iter_t;
|
||||
|
||||
/* Table containing object id and object name */
|
||||
@ -553,11 +553,11 @@ main(int argc, char *argv[])
|
||||
printf("\t# of unique other: %lu\n", iter.uniq_others);
|
||||
printf("\tMax. # of links to object: %lu\n", iter.max_links);
|
||||
printf("\tMax. depth of hierarchy: %lu\n", iter.max_depth);
|
||||
printf("\tMax. # of objects in group: %lu\n", iter.max_fanout);
|
||||
HDfprintf(stdout, "\tMax. # of objects in group: %Hu\n", iter.max_fanout);
|
||||
|
||||
printf("Object header size: (total/unused)\n");
|
||||
printf("\tGroups: %lu/%lu\n", iter.group_ohdr_info.total_size,iter.group_ohdr_info.free_size);
|
||||
printf("\tDatasets: %lu/%lu\n", iter.dset_ohdr_info.total_size,iter.dset_ohdr_info.free_size);
|
||||
HDfprintf(stdout, "\tGroups: %Hu/%Hu\n", iter.group_ohdr_info.total_size, iter.group_ohdr_info.free_size);
|
||||
HDfprintf(stdout, "\tDatasets: %Hu/%Hu\n", iter.dset_ohdr_info.total_size, iter.dset_ohdr_info.free_size);
|
||||
|
||||
printf("Small groups:\n");
|
||||
total = 0;
|
||||
@ -595,7 +595,7 @@ main(int argc, char *argv[])
|
||||
printf("\t\t# of dataset with rank %u: %lu\n", u, iter.dset_rank_count[u]);
|
||||
|
||||
printf("1-D Dataset info:\n");
|
||||
printf("\tMax. dimension size of 1-D datasets: %lu\n", iter.max_dset_dims);
|
||||
HDfprintf(stdout, "\tMax. dimension size of 1-D datasets: %Hu\n", iter.max_dset_dims);
|
||||
printf("\tSmall 1-D datasets:\n");
|
||||
total = 0;
|
||||
for(u = 0; u < SIZE_SMALL_DSETS; u++) {
|
||||
@ -626,7 +626,7 @@ main(int argc, char *argv[])
|
||||
} /* end if */
|
||||
|
||||
printf("Dataset storage info:\n");
|
||||
printf("\tTotal raw data size: %lu\n", iter.dset_storage_size);
|
||||
HDfprintf(stdout, "\tTotal raw data size: %Hu\n", iter.dset_storage_size);
|
||||
|
||||
printf("Dataset layout info:\n");
|
||||
for(u = 0; u < H5D_NLAYOUTS; u++)
|
||||
|
Loading…
x
Reference in New Issue
Block a user