mirror of
https://github.com/HDFGroup/hdf5.git
synced 2025-02-17 16:10:24 +08:00
[svn-r26405] Removed compiler warnings, hl/examples:
HDF5-237 tested: h5committest
This commit is contained in:
parent
64f65d96d9
commit
31ffa03914
@ -24,7 +24,6 @@ unsigned char buf [ WIDTH*HEIGHT ];
|
||||
int main( void )
|
||||
{
|
||||
hid_t file_id;
|
||||
herr_t status;
|
||||
hsize_t pal_dims[] = {PAL_ENTRIES,3};
|
||||
size_t i, j;
|
||||
int n, space;
|
||||
@ -56,16 +55,16 @@ int main( void )
|
||||
file_id = H5Fcreate( "ex_image1.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
|
||||
|
||||
/* make the image */
|
||||
status = H5IMmake_image_8bit( file_id, "image1", (hsize_t)WIDTH, (hsize_t)HEIGHT, buf );
|
||||
H5IMmake_image_8bit( file_id, "image1", (hsize_t)WIDTH, (hsize_t)HEIGHT, buf );
|
||||
|
||||
/* make a palette */
|
||||
status = H5IMmake_palette( file_id, "pallete", pal_dims, pal );
|
||||
H5IMmake_palette( file_id, "pallete", pal_dims, pal );
|
||||
|
||||
/* attach the palette to the image */
|
||||
status = H5IMlink_palette( file_id, "image1", "pallete" );
|
||||
H5IMlink_palette( file_id, "image1", "pallete" );
|
||||
|
||||
/* close the file. */
|
||||
status = H5Fclose( file_id );
|
||||
H5Fclose( file_id );
|
||||
|
||||
return 0;
|
||||
|
||||
|
@ -35,7 +35,7 @@ int main( void )
|
||||
hsize_t height; /* height of image */
|
||||
unsigned char pal[ PAL_ENTRIES * 3 ]; /* palette array */
|
||||
hsize_t pal_dims[2] = {PAL_ENTRIES,3}; /* palette dimensions */
|
||||
herr_t status, i, n;
|
||||
herr_t i, n;
|
||||
|
||||
/* create a new HDF5 file using default properties. */
|
||||
file_id = H5Fcreate( "ex_image2.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
|
||||
@ -45,7 +45,7 @@ int main( void )
|
||||
goto out;
|
||||
|
||||
/* make the image */
|
||||
status=H5IMmake_image_8bit( file_id, IMAGE1_NAME, width, height, gbuf );
|
||||
H5IMmake_image_8bit( file_id, IMAGE1_NAME, width, height, gbuf );
|
||||
if (gbuf) {
|
||||
free(gbuf);
|
||||
gbuf = NULL;
|
||||
@ -63,10 +63,10 @@ int main( void )
|
||||
}
|
||||
|
||||
/* make a palette */
|
||||
status=H5IMmake_palette( file_id, PAL_NAME, pal_dims, pal );
|
||||
H5IMmake_palette( file_id, PAL_NAME, pal_dims, pal );
|
||||
|
||||
/* attach the palette to the image */
|
||||
status=H5IMlink_palette( file_id, IMAGE1_NAME, PAL_NAME );
|
||||
H5IMlink_palette( file_id, IMAGE1_NAME, PAL_NAME );
|
||||
|
||||
/*-------------------------------------------------------------------------
|
||||
* True color image example with pixel interlace
|
||||
@ -78,7 +78,7 @@ int main( void )
|
||||
goto out;
|
||||
|
||||
/* make dataset */
|
||||
status=H5IMmake_image_24bit( file_id, IMAGE2_NAME, width, height, "INTERLACE_PIXEL", gbuf );
|
||||
H5IMmake_image_24bit( file_id, IMAGE2_NAME, width, height, "INTERLACE_PIXEL", gbuf );
|
||||
|
||||
/* close the file. */
|
||||
H5Fclose( file_id );
|
||||
|
@ -25,16 +25,15 @@ int main( void )
|
||||
hid_t file_id;
|
||||
hsize_t dims[RANK]={2,3};
|
||||
int data[6]={1,2,3,4,5,6};
|
||||
herr_t status;
|
||||
|
||||
/* create a HDF5 file */
|
||||
file_id = H5Fcreate ("ex_lite1.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
|
||||
|
||||
/* create and write an integer type dataset named "dset" */
|
||||
status = H5LTmake_dataset(file_id,"/dset",RANK,dims,H5T_NATIVE_INT,data);
|
||||
H5LTmake_dataset(file_id,"/dset",RANK,dims,H5T_NATIVE_INT,data);
|
||||
|
||||
/* close file */
|
||||
status = H5Fclose (file_id);
|
||||
H5Fclose (file_id);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
@ -21,17 +21,16 @@ int main( void )
|
||||
hid_t file_id;
|
||||
int data[6];
|
||||
hsize_t dims[2];
|
||||
herr_t status;
|
||||
size_t i, j, nrow, n_values;
|
||||
|
||||
/* open file from ex_lite1.c */
|
||||
file_id = H5Fopen ("ex_lite1.h5", H5F_ACC_RDONLY, H5P_DEFAULT);
|
||||
|
||||
/* read dataset */
|
||||
status = H5LTread_dataset_int(file_id,"/dset",data);
|
||||
H5LTread_dataset_int(file_id,"/dset",data);
|
||||
|
||||
/* get the dimensions of the dataset */
|
||||
status = H5LTget_dataset_info(file_id,"/dset",dims,NULL,NULL);
|
||||
H5LTget_dataset_info(file_id,"/dset",dims,NULL,NULL);
|
||||
|
||||
/* print it by rows */
|
||||
n_values = (size_t)(dims[0] * dims[1]);
|
||||
@ -44,7 +43,7 @@ int main( void )
|
||||
}
|
||||
|
||||
/* close file */
|
||||
status = H5Fclose (file_id);
|
||||
H5Fclose (file_id);
|
||||
|
||||
return 0;
|
||||
|
||||
|
@ -26,7 +26,6 @@ int main( void )
|
||||
hid_t space_id;
|
||||
hsize_t dims[1] = { ATTR_SIZE };
|
||||
int data[ATTR_SIZE] = {1,2,3,4,5};
|
||||
herr_t status;
|
||||
int i;
|
||||
|
||||
/* create a file */
|
||||
@ -39,8 +38,8 @@ int main( void )
|
||||
dset_id = H5Dcreate2(file_id, "dset", H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
|
||||
|
||||
/* close */
|
||||
status = H5Dclose(dset_id);
|
||||
status = H5Sclose(space_id);
|
||||
H5Dclose(dset_id);
|
||||
H5Sclose(space_id);
|
||||
|
||||
/*-------------------------------------------------------------------------
|
||||
* example of H5LTset_attribute_int
|
||||
@ -48,7 +47,7 @@ int main( void )
|
||||
*/
|
||||
|
||||
/* create and write the attribute "attr1" on the dataset "dset" */
|
||||
status = H5LTset_attribute_int(file_id, "dset", "attr1", data, ATTR_SIZE);
|
||||
H5LTset_attribute_int(file_id, "dset", "attr1", data, ATTR_SIZE);
|
||||
|
||||
/*-------------------------------------------------------------------------
|
||||
* example of H5LTget_attribute_int
|
||||
@ -56,14 +55,14 @@ int main( void )
|
||||
*/
|
||||
|
||||
/* get the attribute "attr1" from the dataset "dset" */
|
||||
status = H5LTget_attribute_int(file_id, "dset", "attr1", data);
|
||||
H5LTget_attribute_int(file_id, "dset", "attr1", data);
|
||||
|
||||
for(i = 0; i < ATTR_SIZE; i++ )
|
||||
printf(" %d", data[i]);
|
||||
printf("\n");
|
||||
|
||||
/* close file */
|
||||
status = H5Fclose(file_id);
|
||||
H5Fclose(file_id);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
@ -80,7 +80,6 @@ int main( void )
|
||||
hsize_t chunk_size = 10;
|
||||
int *fill_data = NULL;
|
||||
int compress = 0;
|
||||
herr_t status;
|
||||
int i;
|
||||
|
||||
/* Initialize field_type */
|
||||
@ -100,7 +99,7 @@ int main( void )
|
||||
*-------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
status=H5TBmake_table( "Table Title", file_id, TABLE_NAME,NFIELDS,NRECORDS,
|
||||
H5TBmake_table( "Table Title", file_id, TABLE_NAME,NFIELDS,NRECORDS,
|
||||
dst_size,field_names, dst_offset, field_type,
|
||||
chunk_size, fill_data, compress, p_data );
|
||||
|
||||
@ -109,7 +108,7 @@ int main( void )
|
||||
*-------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
status=H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
|
||||
H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
|
||||
|
||||
/* print it by rows */
|
||||
for (i=0; i<NRECORDS; i++) {
|
||||
|
@ -78,7 +78,6 @@ int main( void )
|
||||
hsize_t chunk_size = 10;
|
||||
int *fill_data = NULL;
|
||||
int compress = 0;
|
||||
herr_t status;
|
||||
int i;
|
||||
|
||||
/* Append particles */
|
||||
@ -99,16 +98,16 @@ int main( void )
|
||||
file_id = H5Fcreate( "ex_table_02.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
|
||||
|
||||
/* make a table */
|
||||
status=H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,
|
||||
H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,
|
||||
dst_size, field_names, dst_offset, field_type,
|
||||
chunk_size, fill_data, compress, p_data );
|
||||
|
||||
/* append two records */
|
||||
status=H5TBappend_records(file_id, TABLE_NAME,NRECORDS_ADD, dst_size, dst_offset, dst_sizes,
|
||||
H5TBappend_records(file_id, TABLE_NAME,NRECORDS_ADD, dst_size, dst_offset, dst_sizes,
|
||||
&particle_in );
|
||||
|
||||
/* read the table */
|
||||
status=H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
|
||||
H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
|
||||
|
||||
/* print it by rows */
|
||||
for (i=0; i<NRECORDS+NRECORDS_ADD; i++) {
|
||||
|
@ -71,7 +71,6 @@ int main( void )
|
||||
hsize_t chunk_size = 10;
|
||||
hsize_t start; /* Record to start reading/writing */
|
||||
hsize_t nrecords; /* Number of records to read/write */
|
||||
herr_t status;
|
||||
int i;
|
||||
|
||||
/* Define 2 new particles to write */
|
||||
@ -92,7 +91,7 @@ int main( void )
|
||||
file_id = H5Fcreate( "ex_table_03.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
|
||||
|
||||
/* Make the table */
|
||||
status=H5TBmake_table( "Table Title",
|
||||
H5TBmake_table( "Table Title",
|
||||
file_id,
|
||||
TABLE_NAME,
|
||||
NFIELDS,
|
||||
@ -110,11 +109,11 @@ int main( void )
|
||||
/* Overwrite 2 records starting at record 0 */
|
||||
start = 0;
|
||||
nrecords = NRECORDS_WRITE;
|
||||
status=H5TBwrite_records( file_id, TABLE_NAME, start, nrecords, dst_size, dst_offset,
|
||||
H5TBwrite_records( file_id, TABLE_NAME, start, nrecords, dst_size, dst_offset,
|
||||
dst_sizes, particle_in);
|
||||
|
||||
/* read the table */
|
||||
status=H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
|
||||
H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
|
||||
|
||||
/* print it by rows */
|
||||
for (i=0; i<NRECORDS; i++) {
|
||||
|
@ -81,7 +81,6 @@ int main( void )
|
||||
hsize_t start; /* Record to start reading/writing */
|
||||
hsize_t nrecords; /* Number of records to read/write */
|
||||
int compress = 0;
|
||||
herr_t status;
|
||||
int i;
|
||||
Particle *p_data = NULL; /* Initially no data */
|
||||
float pressure_in [NRECORDS_ADD] = /* Define new values for the field "Pressure" */
|
||||
@ -118,24 +117,24 @@ int main( void )
|
||||
file_id = H5Fcreate( "ex_table_04.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
|
||||
|
||||
/* Make the table */
|
||||
status=H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,
|
||||
H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,
|
||||
dst_size,field_names, dst_offset, field_type,
|
||||
chunk_size, fill_data, compress, p_data );
|
||||
|
||||
/* Write the pressure field starting at record 2 */
|
||||
start = 2;
|
||||
nrecords = NRECORDS_ADD;
|
||||
status=H5TBwrite_fields_name( file_id, TABLE_NAME, "Pressure", start, nrecords,
|
||||
H5TBwrite_fields_name( file_id, TABLE_NAME, "Pressure", start, nrecords,
|
||||
sizeof( float ), 0, field_sizes_pre, pressure_in );
|
||||
|
||||
/* Write the new longitude and latitude information starting at record 2 */
|
||||
start = 2;
|
||||
nrecords = NRECORDS_ADD;
|
||||
status=H5TBwrite_fields_name( file_id, TABLE_NAME, "Latitude,Longitude", start, nrecords,
|
||||
H5TBwrite_fields_name( file_id, TABLE_NAME, "Latitude,Longitude", start, nrecords,
|
||||
sizeof( Position ), field_offset_pos, field_sizes_pos, position_in );
|
||||
|
||||
/* read the table */
|
||||
status=H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
|
||||
H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
|
||||
|
||||
/* print it by rows */
|
||||
for (i=0; i<NRECORDS; i++) {
|
||||
|
@ -51,13 +51,6 @@ int main( void )
|
||||
int longi;
|
||||
} Position;
|
||||
|
||||
/* Define a subset of Particle, with name and pressure fields */
|
||||
typedef struct NamePressure
|
||||
{
|
||||
char name[16];
|
||||
float pressure;
|
||||
} NamePressure;
|
||||
|
||||
/* Calculate the type_size and the offsets of our struct members */
|
||||
Particle dst_buf[NRECORDS];
|
||||
size_t dst_size = sizeof( Particle );
|
||||
@ -91,7 +84,6 @@ int main( void )
|
||||
hsize_t nfields;
|
||||
hsize_t start; /* Record to start reading/writing */
|
||||
hsize_t nrecords; /* Number of records to read/write */
|
||||
herr_t status;
|
||||
int i;
|
||||
|
||||
/* Define new values for the field "Pressure" */
|
||||
@ -129,7 +121,7 @@ int main( void )
|
||||
file_id = H5Fcreate( "ex_table_05.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
|
||||
|
||||
/* Make the table */
|
||||
status=H5TBmake_table( "Table Title", file_id, TABLE_NAME,NFIELDS,NRECORDS,
|
||||
H5TBmake_table( "Table Title", file_id, TABLE_NAME,NFIELDS,NRECORDS,
|
||||
dst_size,field_names, dst_offset, field_type,
|
||||
chunk_size, fill_data, compress, p_data );
|
||||
|
||||
@ -137,7 +129,7 @@ int main( void )
|
||||
nfields = 1;
|
||||
start = 2;
|
||||
nrecords = NRECORDS_ADD;
|
||||
status=H5TBwrite_fields_index( file_id, TABLE_NAME, nfields, field_index_pre, start, nrecords,
|
||||
H5TBwrite_fields_index( file_id, TABLE_NAME, nfields, field_index_pre, start, nrecords,
|
||||
sizeof( float ), 0, field_sizes_pre, pressure_in );
|
||||
|
||||
|
||||
@ -145,12 +137,12 @@ int main( void )
|
||||
nfields = 2;
|
||||
start = 2;
|
||||
nrecords = NRECORDS_ADD;
|
||||
status=H5TBwrite_fields_index( file_id, TABLE_NAME, nfields, field_index_pos, start, nrecords,
|
||||
H5TBwrite_fields_index( file_id, TABLE_NAME, nfields, field_index_pos, start, nrecords,
|
||||
sizeof( Position ), field_offset_pos, field_sizes_pos, position_in );
|
||||
|
||||
|
||||
/* read the table */
|
||||
status=H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
|
||||
H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
|
||||
|
||||
/* print it by rows */
|
||||
for (i=0; i<NRECORDS; i++) {
|
||||
@ -163,8 +155,7 @@ int main( void )
|
||||
printf ("\n");
|
||||
}
|
||||
|
||||
|
||||
/* close type */
|
||||
/* close type */
|
||||
H5Tclose( string_type );
|
||||
|
||||
/* close the file */
|
||||
|
@ -60,7 +60,6 @@ int main( void )
|
||||
int compress = 0;
|
||||
hsize_t nfields_out;
|
||||
hsize_t nrecords_out;
|
||||
herr_t status;
|
||||
|
||||
/* Initialize field_type */
|
||||
string_type = H5Tcopy( H5T_C_S1 );
|
||||
@ -75,12 +74,12 @@ int main( void )
|
||||
file_id = H5Fcreate( "ex_table_06.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
|
||||
|
||||
/* Make a table */
|
||||
status=H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,dst_size,
|
||||
H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,dst_size,
|
||||
field_names, dst_offset, field_type,
|
||||
chunk_size, fill_data, compress, NULL);
|
||||
|
||||
/* Get table info */
|
||||
status=H5TBget_table_info (file_id,TABLE_NAME, &nfields_out, &nrecords_out );
|
||||
H5TBget_table_info (file_id,TABLE_NAME, &nfields_out, &nrecords_out );
|
||||
|
||||
/* print */
|
||||
printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
|
||||
|
@ -73,7 +73,6 @@ int main( void )
|
||||
hsize_t nrecords; /* Number of records to insert/delete */
|
||||
hsize_t nfields_out;
|
||||
hsize_t nrecords_out;
|
||||
herr_t status;
|
||||
|
||||
/* Initialize the field field_type */
|
||||
string_type = H5Tcopy( H5T_C_S1 );
|
||||
@ -88,17 +87,17 @@ int main( void )
|
||||
file_id = H5Fcreate( "ex_table_07.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
|
||||
|
||||
/* Make the table */
|
||||
status=H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,
|
||||
H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,
|
||||
dst_size,field_names, dst_offset, field_type,
|
||||
chunk_size, fill_data, compress, p_data );
|
||||
|
||||
/* Delete records */
|
||||
start = 3;
|
||||
nrecords = 3;
|
||||
status=H5TBdelete_record( file_id, TABLE_NAME, start, nrecords );
|
||||
H5TBdelete_record( file_id, TABLE_NAME, start, nrecords );
|
||||
|
||||
/* Get table info */
|
||||
status=H5TBget_table_info (file_id,TABLE_NAME, &nfields_out, &nrecords_out );
|
||||
H5TBget_table_info (file_id,TABLE_NAME, &nfields_out, &nrecords_out );
|
||||
|
||||
/* print */
|
||||
printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
|
||||
|
@ -84,7 +84,6 @@ int main( void )
|
||||
int *fill_data = NULL;
|
||||
hsize_t start; /* Record to start reading */
|
||||
hsize_t nrecords; /* Number of records to insert/delete */
|
||||
herr_t status;
|
||||
hsize_t nfields_out;
|
||||
hsize_t nrecords_out;
|
||||
int i;
|
||||
@ -102,21 +101,21 @@ int main( void )
|
||||
file_id = H5Fcreate( "ex_table_08.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
|
||||
|
||||
/* Make the table */
|
||||
status=H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,
|
||||
H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,
|
||||
dst_size,field_names, dst_offset, field_type,
|
||||
chunk_size, fill_data, compress, p_data );
|
||||
|
||||
/* Insert records */
|
||||
start = 3;
|
||||
nrecords = NRECORDS_INS;
|
||||
status=H5TBinsert_record( file_id, TABLE_NAME, start, nrecords, dst_size, dst_offset,
|
||||
H5TBinsert_record( file_id, TABLE_NAME, start, nrecords, dst_size, dst_offset,
|
||||
dst_sizes, p_data_insert );
|
||||
|
||||
/* read the table */
|
||||
status=H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
|
||||
H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
|
||||
|
||||
/* get table info */
|
||||
status=H5TBget_table_info(file_id,TABLE_NAME, &nfields_out, &nrecords_out );
|
||||
H5TBget_table_info(file_id,TABLE_NAME, &nfields_out, &nrecords_out );
|
||||
|
||||
/* print */
|
||||
printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
|
||||
|
@ -81,7 +81,6 @@ int main( void )
|
||||
hsize_t start1; /* Record to start reading from 1st table */
|
||||
hsize_t nrecords; /* Number of records to insert */
|
||||
hsize_t start2; /* Record to start writing in 2nd table */
|
||||
herr_t status;
|
||||
int i;
|
||||
hsize_t nfields_out;
|
||||
hsize_t nrecords_out;
|
||||
@ -99,11 +98,11 @@ int main( void )
|
||||
file_id = H5Fcreate( "ex_table_09.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
|
||||
|
||||
/* Make 2 tables: TABLE2_NAME is empty */
|
||||
status=H5TBmake_table( "Table Title",file_id,TABLE1_NAME,NFIELDS,NRECORDS,
|
||||
H5TBmake_table( "Table Title",file_id,TABLE1_NAME,NFIELDS,NRECORDS,
|
||||
dst_size,field_names, dst_offset, field_type,
|
||||
chunk_size, fill_data, compress, p_data );
|
||||
|
||||
status=H5TBmake_table( "Table Title",file_id,TABLE2_NAME,NFIELDS,NRECORDS,
|
||||
H5TBmake_table( "Table Title",file_id,TABLE2_NAME,NFIELDS,NRECORDS,
|
||||
dst_size,field_names, dst_offset, field_type,
|
||||
chunk_size, fill_data, compress, NULL );
|
||||
|
||||
@ -112,13 +111,13 @@ int main( void )
|
||||
start1 = 3;
|
||||
nrecords = NRECORDS_INS;
|
||||
start2 = 6;
|
||||
status=H5TBadd_records_from( file_id, TABLE1_NAME, start1, nrecords, TABLE2_NAME, start2 );
|
||||
H5TBadd_records_from( file_id, TABLE1_NAME, start1, nrecords, TABLE2_NAME, start2 );
|
||||
|
||||
/* read TABLE2_NAME: it should have 2 more records now */
|
||||
status=H5TBread_table( file_id, TABLE2_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
|
||||
H5TBread_table( file_id, TABLE2_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
|
||||
|
||||
/* Get table info */
|
||||
status=H5TBget_table_info (file_id,TABLE2_NAME, &nfields_out, &nrecords_out );
|
||||
H5TBget_table_info (file_id,TABLE2_NAME, &nfields_out, &nrecords_out );
|
||||
|
||||
/* print */
|
||||
printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
|
||||
|
@ -77,7 +77,6 @@ int main( void )
|
||||
hsize_t chunk_size = 10;
|
||||
int compress = 0;
|
||||
int *fill_data = NULL;
|
||||
herr_t status;
|
||||
hsize_t nfields_out;
|
||||
hsize_t nrecords_out;
|
||||
int i;
|
||||
@ -95,22 +94,22 @@ int main( void )
|
||||
file_id = H5Fcreate( "ex_table_10.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
|
||||
|
||||
/* Make two tables */
|
||||
status=H5TBmake_table( "Table Title",file_id,TABLE1_NAME,NFIELDS,NRECORDS,
|
||||
H5TBmake_table( "Table Title",file_id,TABLE1_NAME,NFIELDS,NRECORDS,
|
||||
dst_size,field_names, dst_offset, field_type,
|
||||
chunk_size, fill_data, compress, p_data );
|
||||
|
||||
status=H5TBmake_table( "Table Title",file_id,TABLE2_NAME,NFIELDS,NRECORDS,
|
||||
H5TBmake_table( "Table Title",file_id,TABLE2_NAME,NFIELDS,NRECORDS,
|
||||
dst_size,field_names, dst_offset, field_type,
|
||||
chunk_size, fill_data, compress, p_data );
|
||||
|
||||
/* Combine the two tables into a third in the same file */
|
||||
status=H5TBcombine_tables( file_id, TABLE1_NAME, file_id, TABLE2_NAME, TABLE3_NAME );
|
||||
H5TBcombine_tables( file_id, TABLE1_NAME, file_id, TABLE2_NAME, TABLE3_NAME );
|
||||
|
||||
/* read the combined table */
|
||||
status=H5TBread_table( file_id, TABLE3_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
|
||||
H5TBread_table( file_id, TABLE3_NAME, dst_size, dst_offset, dst_sizes, dst_buf );
|
||||
|
||||
/* Get table info */
|
||||
status=H5TBget_table_info (file_id,TABLE3_NAME, &nfields_out, &nrecords_out );
|
||||
H5TBget_table_info (file_id,TABLE3_NAME, &nfields_out, &nrecords_out );
|
||||
|
||||
/* print */
|
||||
printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
|
||||
|
@ -67,10 +67,9 @@ int main( void )
|
||||
hid_t file_id;
|
||||
hsize_t chunk_size = 10;
|
||||
int compress = 0;
|
||||
Particle1 fill_data[1] = { "no data",-1,-1, -99.0f, -99.0 };
|
||||
Particle1 fill_data[1] = { {"no data",-1,-1, -99.0f, -99.0} };
|
||||
int fill_data_new[1] = { -100 };
|
||||
hsize_t position;
|
||||
herr_t status;
|
||||
hsize_t nfields_out;
|
||||
hsize_t nrecords_out;
|
||||
|
||||
@ -91,17 +90,17 @@ int main( void )
|
||||
file_id = H5Fcreate( "ex_table_11.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
|
||||
|
||||
/* Make the table */
|
||||
status=H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,
|
||||
H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,
|
||||
dst_size1,field_names, dst_offset1, field_type,
|
||||
chunk_size, fill_data, compress, p_data );
|
||||
|
||||
/* Insert the new field at the end of the field list */
|
||||
position = NFIELDS;
|
||||
status=H5TBinsert_field( file_id, TABLE_NAME, "New Field", field_type_new, position,
|
||||
H5TBinsert_field( file_id, TABLE_NAME, "New Field", field_type_new, position,
|
||||
fill_data_new, data );
|
||||
|
||||
/* Get table info */
|
||||
status=H5TBget_table_info (file_id,TABLE_NAME, &nfields_out, &nrecords_out );
|
||||
H5TBget_table_info (file_id,TABLE_NAME, &nfields_out, &nrecords_out );
|
||||
|
||||
/* print */
|
||||
printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
|
||||
|
@ -70,7 +70,6 @@ int main( void )
|
||||
int compress = 0;
|
||||
Particle fill_data[1] =
|
||||
{ {"no data",-1,-1, -99.0f, -99.0} };
|
||||
herr_t status;
|
||||
hsize_t nfields_out;
|
||||
hsize_t nrecords_out;
|
||||
|
||||
@ -87,15 +86,15 @@ int main( void )
|
||||
file_id = H5Fcreate( "ex_table_12.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );
|
||||
|
||||
/* Make a table */
|
||||
status=H5TBmake_table( "Table Title", file_id, TABLE_NAME, NFIELDS, NRECORDS, dst_size,
|
||||
H5TBmake_table( "Table Title", file_id, TABLE_NAME, NFIELDS, NRECORDS, dst_size,
|
||||
field_names, dst_offset, field_type,
|
||||
chunk_size, fill_data, compress, p_data );
|
||||
|
||||
/* Delete the field */
|
||||
status=H5TBdelete_field( file_id, TABLE_NAME, "Pressure" );
|
||||
H5TBdelete_field( file_id, TABLE_NAME, "Pressure" );
|
||||
|
||||
/* Get table info */
|
||||
status=H5TBget_table_info (file_id,TABLE_NAME, &nfields_out, &nrecords_out );
|
||||
H5TBget_table_info (file_id,TABLE_NAME, &nfields_out, &nrecords_out );
|
||||
|
||||
/* print */
|
||||
printf ("Table has %d fields and %d records\n",(int)nfields_out,(int)nrecords_out);
|
||||
|
Loading…
Reference in New Issue
Block a user