mirror of
https://github.com/HDFGroup/hdf5.git
synced 2025-04-24 17:51:25 +08:00
[svn-r27800] Minor warning fixes in the library tests.
Tested on: jam (too minor for h5committest)
This commit is contained in:
parent
cc0165a973
commit
2385b2b0ad
@ -120,7 +120,7 @@ static hsize_t values_used[WRT_N];
|
||||
static hsize_t
|
||||
randll(hsize_t limit, int current_index)
|
||||
{
|
||||
hsize_t acc;
|
||||
hsize_t acc = 0;
|
||||
int overlap = 1;
|
||||
int i;
|
||||
int tries = 0;
|
||||
|
102
test/dsets.c
102
test/dsets.c
@ -1017,15 +1017,15 @@ test_conv_buffer(hid_t fid)
|
||||
|
||||
/* Populate the data members */
|
||||
for(j = 0; j < DIM1; j++)
|
||||
for(k = 0; k < DIM2; k++)
|
||||
for(l = 0; l < DIM3; l++)
|
||||
cf->a[j][k][l] = 10*(j+1) + l + k;
|
||||
for(k = 0; k < DIM2; k++)
|
||||
for(l = 0; l < DIM3; l++)
|
||||
cf->a[j][k][l] = 10*(j+1) + l + k;
|
||||
|
||||
for(j = 0; j < DIM2; j++)
|
||||
cf->b[j] = (float)(100.0f*(j+1) + 0.01f*j);
|
||||
cf->b[j] = 100.0f * (float)(j+1) + 0.01f * (float)j;
|
||||
|
||||
for(j = 0; j < DIM3; j++)
|
||||
cf->c[j] = 100.0f*(j+1) + 0.02f*j;
|
||||
cf->c[j] = 100.0f * (float)(j+1) + 0.02f * (float)j;
|
||||
|
||||
|
||||
/* Create data space */
|
||||
@ -1102,7 +1102,7 @@ test_conv_buffer(hid_t fid)
|
||||
HDfree(cf);
|
||||
HDfree(cfrR);
|
||||
puts(" PASSED");
|
||||
return(0);
|
||||
return 0;
|
||||
|
||||
error:
|
||||
return -1;
|
||||
@ -2692,7 +2692,8 @@ test_nbit_int(hid_t file)
|
||||
int new_data[2][5];
|
||||
unsigned int mask;
|
||||
size_t precision, offset;
|
||||
size_t i, j;
|
||||
double power;
|
||||
size_t i, j;
|
||||
|
||||
puts("Testing nbit filter");
|
||||
TESTING(" nbit int (setup)");
|
||||
@ -2725,8 +2726,8 @@ test_nbit_int(hid_t file)
|
||||
/* Initialize data, assuming size of long long >= size of int */
|
||||
for(i= 0;i< (size_t)size[0]; i++)
|
||||
for(j = 0; j < (size_t)size[1]; j++) {
|
||||
orig_data[i][j] = (int)(((long long)HDrandom() %
|
||||
(long long)HDpow(2.0f, (double)(precision - 1))) << offset);
|
||||
power = HDpow(2.0f, (double)(precision - 1));
|
||||
orig_data[i][j] = (int)(((long long)HDrandom() % (long long)power) << offset);
|
||||
|
||||
/* even-numbered values are negtive */
|
||||
if((i*size[1]+j+1)%2 == 0)
|
||||
@ -3052,7 +3053,8 @@ test_nbit_array(hid_t file)
|
||||
unsigned int orig_data[2][5][3][2];
|
||||
unsigned int new_data[2][5][3][2];
|
||||
size_t precision, offset;
|
||||
size_t i, j, m, n;
|
||||
double power;
|
||||
size_t i, j, m, n;
|
||||
|
||||
TESTING(" nbit array (setup)");
|
||||
|
||||
@ -3091,9 +3093,11 @@ test_nbit_array(hid_t file)
|
||||
for(i= 0;i< (size_t)size[0]; i++)
|
||||
for(j = 0; j < (size_t)size[1]; j++)
|
||||
for(m = 0; m < (size_t)adims[0]; m++)
|
||||
for(n = 0; n < (size_t)adims[1]; n++)
|
||||
for(n = 0; n < (size_t)adims[1]; n++) {
|
||||
power = HDpow(2.0F, (double)precision);
|
||||
orig_data[i][j][m][n] = (unsigned int)(((long long)HDrandom() %
|
||||
(long long)HDpow(2.0F, (double)precision)) << offset);
|
||||
(long long)power) << offset);
|
||||
} /* end for */
|
||||
PASSED();
|
||||
|
||||
/*----------------------------------------------------------------------
|
||||
@ -3192,7 +3196,8 @@ test_nbit_compound(hid_t file)
|
||||
atomic orig_data[2][5];
|
||||
atomic new_data[2][5];
|
||||
unsigned int i_mask, s_mask, c_mask;
|
||||
size_t i, j;
|
||||
double power;
|
||||
size_t i, j;
|
||||
|
||||
|
||||
TESTING(" nbit compound (setup)");
|
||||
@ -3251,12 +3256,12 @@ test_nbit_compound(hid_t file)
|
||||
/* Initialize data, assuming size of long long >= size of member datatypes */
|
||||
for(i= 0;i< (size_t)size[0]; i++)
|
||||
for(j = 0; j < (size_t)size[1]; j++) {
|
||||
orig_data[i][j].i = (int)(((long long)HDrandom() %
|
||||
(long long)HDpow(2.0F, (double)(precision[0]-1))) << offset[0]);
|
||||
orig_data[i][j].c = (char)(((long long)HDrandom() %
|
||||
(long long)HDpow(2.0F, (double)(precision[1]-1))) << offset[1]);
|
||||
orig_data[i][j].s = (short)(((long long)HDrandom() %
|
||||
(long long)HDpow(2.0F, (double)(precision[2]-1))) << offset[2]);
|
||||
power = HDpow(2.0F, (double)(precision[0]-1));
|
||||
orig_data[i][j].i = (int)(((long long)HDrandom() % (long long)power) << offset[0]);
|
||||
power = HDpow(2.0F, (double)(precision[1]-1));
|
||||
orig_data[i][j].c = (char)(((long long)HDrandom() % (long long)power) << offset[1]);
|
||||
power = HDpow(2.0F, (double)(precision[2]-1));
|
||||
orig_data[i][j].s = (short)(((long long)HDrandom() % (long long)power) << offset[2]);
|
||||
orig_data[i][j].f = float_val[i][j];
|
||||
|
||||
/* some even-numbered integer values are negtive */
|
||||
@ -3386,7 +3391,8 @@ test_nbit_compound_2(hid_t file)
|
||||
complex orig_data[2][5];
|
||||
complex new_data[2][5];
|
||||
unsigned int i_mask, s_mask, c_mask, b_mask;
|
||||
size_t i, j, m, n, b_failed, d_failed;
|
||||
double power;
|
||||
size_t i, j, m, n, b_failed, d_failed;
|
||||
|
||||
|
||||
TESTING(" nbit compound complex (setup)");
|
||||
@ -3477,33 +3483,34 @@ test_nbit_compound_2(hid_t file)
|
||||
/* Initialize data, assuming size of long long >= size of member datatypes */
|
||||
for(i= 0;i< (size_t)size[0]; i++)
|
||||
for(j = 0; j < (size_t)size[1]; j++) {
|
||||
orig_data[i][j].a.i = (int)(((long long)HDrandom() %
|
||||
(long long)HDpow(2.0F, (double)(precision[0]-1))) << offset[0]);
|
||||
orig_data[i][j].a.c = (char)(((long long)HDrandom() %
|
||||
(long long)HDpow(2.0F, (double)(precision[1]-1))) << offset[1]);
|
||||
orig_data[i][j].a.s = (short)(-((long long)HDrandom() %
|
||||
(long long)HDpow(2.0F, (double)(precision[2]-1))) << offset[2]);
|
||||
power = HDpow(2.0F, (double)(precision[0]-1));
|
||||
orig_data[i][j].a.i = (int)(((long long)HDrandom() % (long long)power) << offset[0]);
|
||||
power = HDpow(2.0F, (double)(precision[1]-1));
|
||||
orig_data[i][j].a.c = (char)(((long long)HDrandom() % (long long)power) << offset[1]);
|
||||
power = HDpow(2.0F, (double)(precision[2]-1));
|
||||
orig_data[i][j].a.s = (short)(-((long long)HDrandom() % (long long)power) << offset[2]);
|
||||
orig_data[i][j].a.f = float_val[i][j];
|
||||
|
||||
orig_data[i][j].v = (unsigned int)(((long long)HDrandom() %
|
||||
(long long)HDpow(2.0F, (double)precision[3])) << offset[3]);
|
||||
|
||||
for(m = 0; m < (size_t)array_dims[0]; m++)
|
||||
for(n = 0; n < (size_t)array_dims[1]; n++)
|
||||
orig_data[i][j].b[m][n] = (char)(((long long)HDrandom() %
|
||||
(long long)HDpow(2.0F, (double)(precision[4]-1))) << offset[4]);
|
||||
power = HDpow(2.0F, (double)precision[3]);
|
||||
orig_data[i][j].v = (unsigned int)(((long long)HDrandom() % (long long)power) << offset[3]);
|
||||
|
||||
for(m = 0; m < (size_t)array_dims[0]; m++)
|
||||
for(n = 0; n < (size_t)array_dims[1]; n++) {
|
||||
orig_data[i][j].d[m][n].i = (int)(-((long long)HDrandom() %
|
||||
(long long)HDpow(2.0F, (double)(precision[0]-1))) << offset[0]);
|
||||
orig_data[i][j].d[m][n].c = (char)(((long long)HDrandom() %
|
||||
(long long)HDpow(2.0F, (double)(precision[1]-1))) << offset[1]);
|
||||
orig_data[i][j].d[m][n].s = (short)(((long long)HDrandom() %
|
||||
(long long)HDpow(2.0F, (double)(precision[2]-1))) << offset[2]);
|
||||
power = HDpow(2.0F, (double)(precision[4]-1));
|
||||
orig_data[i][j].b[m][n] = (char)(((long long)HDrandom() % (long long)power) << offset[4]);
|
||||
} /* end for */
|
||||
|
||||
for(m = 0; m < (size_t)array_dims[0]; m++)
|
||||
for(n = 0; n < (size_t)array_dims[1]; n++) {
|
||||
power = HDpow(2.0F, (double)(precision[0]-1));
|
||||
orig_data[i][j].d[m][n].i = (int)(-((long long)HDrandom() % (long long)power) << offset[0]);
|
||||
power = HDpow(2.0F, (double)(precision[1]-1));
|
||||
orig_data[i][j].d[m][n].c = (char)(((long long)HDrandom() % (long long)power) << offset[1]);
|
||||
power = HDpow(2.0F, (double)(precision[2]-1));
|
||||
orig_data[i][j].d[m][n].s = (short)(((long long)HDrandom() % (long long)power) << offset[2]);
|
||||
orig_data[i][j].d[m][n].f = float_val[i][j];
|
||||
}
|
||||
}
|
||||
} /* end for */
|
||||
} /* end for */
|
||||
|
||||
PASSED();
|
||||
|
||||
@ -3648,7 +3655,8 @@ test_nbit_compound_3(hid_t file)
|
||||
const hsize_t chunk_size[1] = {5};
|
||||
atomic orig_data[5];
|
||||
atomic new_data[5];
|
||||
size_t i, k, j;
|
||||
double power;
|
||||
size_t i, k, j;
|
||||
|
||||
|
||||
TESTING(" nbit compound with no-op type (setup)");
|
||||
@ -3695,8 +3703,9 @@ test_nbit_compound_3(hid_t file)
|
||||
|
||||
/* Initialize data */
|
||||
for(i = 0; i < (size_t)size[0]; i++) {
|
||||
power = HDpow(2.0F, 17.0F - 1.0F);
|
||||
HDmemset(&orig_data[i], 0, sizeof(orig_data[i]));
|
||||
orig_data[i].i = HDrandom() % (long)HDpow(2.0F, 17.0F - 1.0F);
|
||||
orig_data[i].i = HDrandom() % (long)power;
|
||||
HDstrcpy(orig_data[i].str, "fixed-length C string");
|
||||
orig_data[i].vl_str = HDstrdup("variable-length C string");
|
||||
|
||||
@ -3815,6 +3824,7 @@ test_nbit_int_size(hid_t file)
|
||||
hsize_t dims[2], chunk_size[2];
|
||||
hsize_t dset_size = 0;
|
||||
int orig_data[DSET_DIM1][DSET_DIM2];
|
||||
double power;
|
||||
int i, j;
|
||||
size_t precision, offset;
|
||||
|
||||
@ -3865,8 +3875,10 @@ test_nbit_int_size(hid_t file)
|
||||
* corresponding to the memory datatype's precision and offset.
|
||||
*/
|
||||
for (i=0; i < DSET_DIM1; i++)
|
||||
for (j=0; j < DSET_DIM2; j++)
|
||||
orig_data[i][j] = rand() % (int)pow((double)2, (double)(precision-1)) << offset;
|
||||
for (j=0; j < DSET_DIM2; j++) {
|
||||
power = HDpow(2.0F, (double)(precision-1));
|
||||
orig_data[i][j] = HDrandom() % (int)power << offset;
|
||||
} /* end for */
|
||||
|
||||
|
||||
/* Describe the dataspace. */
|
||||
|
@ -2859,7 +2859,7 @@ test_conv_flt_1 (const char *name, int run_test, hid_t src, hid_t dst)
|
||||
#endif
|
||||
unsigned char *hw=NULL; /*ptr to hardware-conv'd*/
|
||||
int underflow; /*underflow occurred */
|
||||
int overflow; /*overflow occurred */
|
||||
int overflow = 0; /*overflow occurred */
|
||||
int uflow=0; /*underflow debug counters*/
|
||||
size_t j, k; /*counters */
|
||||
int sendian; /* source type endianess */
|
||||
|
@ -467,7 +467,7 @@ test_poly(const hid_t dxpl_id_polynomial)
|
||||
for(row = 0; row < ROWS; row++)
|
||||
for(col = 0; col < COLS; col++) {
|
||||
windchillC = (int) ((5.0f / 9.0f) * (windchillFfloat[row][col] - 32));
|
||||
polyflres[row][col] = (float) ((2.0f + windchillC) * ((windchillC - 8.0f) / 2.0f));
|
||||
polyflres[row][col] = ((2.0f + (float)windchillC) * (((float)windchillC - 8.0f) / 2.0f));
|
||||
}
|
||||
|
||||
TESTING("data transform, polynomial transform (int->float)")
|
||||
|
10
test/mf.c
10
test/mf.c
@ -331,9 +331,9 @@ test_mf_eoa_shrink(const char *env_h5_drvr, hid_t fapl)
|
||||
hid_t fapl_new = -1; /* copy of fapl */
|
||||
char filename[FILENAME_LEN]; /* Filename to use */
|
||||
H5F_t *f = NULL; /* Internal file object pointer */
|
||||
h5_stat_size_t file_size, new_file_size; /* file size */
|
||||
h5_stat_size_t file_size = 0, new_file_size; /* file size */
|
||||
H5FD_mem_t type;
|
||||
haddr_t addr;
|
||||
haddr_t addr = 0;
|
||||
haddr_t ma_addr=HADDR_UNDEF, new_ma_addr=HADDR_UNDEF;
|
||||
hsize_t ma_size=0, new_ma_size=0;
|
||||
hbool_t contig_addr_vfd; /* Whether VFD used has a contigous address space */
|
||||
@ -3236,7 +3236,7 @@ test_mf_aggr_extend(const char *env_h5_drvr, hid_t fapl)
|
||||
hid_t file = -1; /* File ID */
|
||||
char filename[FILENAME_LEN]; /* Filename to use */
|
||||
H5F_t *f = NULL; /* Internal file object pointer */
|
||||
h5_stat_size_t empty_size, file_size;
|
||||
h5_stat_size_t empty_size = 0, file_size;
|
||||
H5FD_mem_t type, stype;
|
||||
haddr_t new_addr, addr, saddr;
|
||||
haddr_t ma_addr=HADDR_UNDEF, new_ma_addr=HADDR_UNDEF, sdata_addr=HADDR_UNDEF;
|
||||
@ -3528,7 +3528,7 @@ test_mf_aggr_absorb(const char *env_h5_drvr, hid_t fapl)
|
||||
hid_t file = -1; /* File ID */
|
||||
char filename[FILENAME_LEN]; /* Filename to use */
|
||||
H5F_t *f = NULL; /* Internal file object pointer */
|
||||
h5_stat_size_t empty_size, file_size;
|
||||
h5_stat_size_t empty_size = 0, file_size;
|
||||
H5FD_mem_t type, stype;
|
||||
haddr_t addr1, addr2, addr3, saddr1;
|
||||
haddr_t ma_addr=HADDR_UNDEF, new_ma_addr=HADDR_UNDEF;
|
||||
@ -3764,7 +3764,7 @@ static unsigned
|
||||
test_mf_align_eoa(const char *env_h5_drvr, hid_t fapl, hid_t new_fapl)
|
||||
{
|
||||
hid_t file = -1; /* File ID */
|
||||
hid_t fapl1;
|
||||
hid_t fapl1 = -1;
|
||||
char filename[FILENAME_LEN]; /* Filename to use */
|
||||
H5F_t *f = NULL; /* Internal file object pointer */
|
||||
h5_stat_size_t file_size, new_file_size;
|
||||
|
12
test/tattr.c
12
test/tattr.c
@ -846,7 +846,7 @@ test_attr_compound_read(hid_t fapl)
|
||||
t_class = H5Tget_class(field);
|
||||
VERIFY(t_class, H5T_INTEGER, "H5Tget_class");
|
||||
order = H5Tget_order(field);
|
||||
VERIFY(order, H5Tget_order(H5T_NATIVE_INT), "H5Tget_order");
|
||||
VERIFY_TYPE(order, H5Tget_order(H5T_NATIVE_INT), H5T_order_t, "%d", "H5Tget_order");
|
||||
size = H5Tget_size(field);
|
||||
VERIFY(size, H5Tget_size(H5T_NATIVE_INT), "H5Tget_size");
|
||||
H5Tclose(field);
|
||||
@ -855,7 +855,7 @@ test_attr_compound_read(hid_t fapl)
|
||||
t_class = H5Tget_class(field);
|
||||
VERIFY(t_class, H5T_FLOAT, "H5Tget_class");
|
||||
order = H5Tget_order(field);
|
||||
VERIFY(order, H5Tget_order(H5T_NATIVE_DOUBLE), "H5Tget_order");
|
||||
VERIFY_TYPE(order, H5Tget_order(H5T_NATIVE_DOUBLE), H5T_order_t, "%d", "H5Tget_order");
|
||||
size = H5Tget_size(field);
|
||||
VERIFY(size, H5Tget_size(H5T_NATIVE_DOUBLE), "H5Tget_size");
|
||||
H5Tclose(field);
|
||||
@ -864,7 +864,7 @@ test_attr_compound_read(hid_t fapl)
|
||||
t_class = H5Tget_class(field);
|
||||
VERIFY(t_class, H5T_INTEGER, "H5Tget_class");
|
||||
order = H5Tget_order(field);
|
||||
VERIFY(order, H5Tget_order(H5T_NATIVE_SCHAR), "H5Tget_order");
|
||||
VERIFY_TYPE(order, H5Tget_order(H5T_NATIVE_SCHAR), H5T_order_t, "%d", "H5Tget_order");
|
||||
size = H5Tget_size(field);
|
||||
VERIFY(size, H5Tget_size(H5T_NATIVE_SCHAR), "H5Tget_size");
|
||||
H5Tclose(field);
|
||||
@ -1229,7 +1229,7 @@ test_attr_mult_read(hid_t fapl)
|
||||
t_class = H5Tget_class(type);
|
||||
VERIFY(t_class, H5T_INTEGER, "H5Tget_class");
|
||||
order = H5Tget_order(type);
|
||||
VERIFY(order, H5Tget_order(H5T_NATIVE_INT), "H5Tget_order");
|
||||
VERIFY_TYPE(order, H5Tget_order(H5T_NATIVE_INT), H5T_order_t, "%d", "H5Tget_order");
|
||||
size = H5Tget_size(type);
|
||||
VERIFY(size, H5Tget_size(H5T_NATIVE_INT), "H5Tget_size");
|
||||
H5Tclose(type);
|
||||
@ -1284,7 +1284,7 @@ test_attr_mult_read(hid_t fapl)
|
||||
t_class = H5Tget_class(type);
|
||||
VERIFY(t_class, H5T_INTEGER, "H5Tget_class");
|
||||
order = H5Tget_order(type);
|
||||
VERIFY(order, H5Tget_order(H5T_NATIVE_INT), "H5Tget_order");
|
||||
VERIFY_TYPE(order, H5Tget_order(H5T_NATIVE_INT), H5T_order_t, "%d", "H5Tget_order");
|
||||
size = H5Tget_size(type);
|
||||
VERIFY(size, H5Tget_size(H5T_NATIVE_INT), "H5Tget_size");
|
||||
H5Tclose(type);
|
||||
@ -1342,7 +1342,7 @@ test_attr_mult_read(hid_t fapl)
|
||||
t_class = H5Tget_class(type);
|
||||
VERIFY(t_class, H5T_FLOAT, "H5Tget_class");
|
||||
order = H5Tget_order(type);
|
||||
VERIFY(order, H5Tget_order(H5T_NATIVE_DOUBLE), "H5Tget_order");
|
||||
VERIFY_TYPE(order, H5Tget_order(H5T_NATIVE_DOUBLE), H5T_order_t, "%d", "H5Tget_order");
|
||||
size = H5Tget_size(type);
|
||||
VERIFY(size, H5Tget_size(H5T_NATIVE_DOUBLE), "H5Tget_size");
|
||||
H5Tclose(type);
|
||||
|
@ -164,7 +164,7 @@ test_h5o_close(void)
|
||||
/* Create the group and close it with H5Oclose */
|
||||
grp = H5Gcreate2(fid, "group", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
|
||||
CHECK(grp, FAIL, "H5Gcreate2");
|
||||
VERIFY(H5Iget_type(grp), H5I_GROUP, "H5Iget_type");
|
||||
VERIFY_TYPE(H5Iget_type(grp), H5I_GROUP, H5I_type_t, "%d", "H5Iget_type");
|
||||
ret = H5Oclose(grp);
|
||||
CHECK(ret, FAIL, "H5Oclose");
|
||||
|
||||
|
@ -704,7 +704,7 @@ static void test_grp_memb_funcs(hid_t fapl)
|
||||
char dataset_name[NAMELEN]; /* dataset name */
|
||||
ssize_t name_len; /* Length of object's name */
|
||||
H5G_info_t ginfo; /* Buffer for querying object's info */
|
||||
herr_t ret; /* Generic return value */
|
||||
herr_t ret = SUCCEED; /* Generic return value */
|
||||
|
||||
/* Output message about test being performed */
|
||||
MESSAGE(5, ("Testing Group Member Information Functionality\n"));
|
||||
|
Loading…
x
Reference in New Issue
Block a user