[svn-r958] Purpose:

New feature

Solution:
    Testing of Extendable Dataset support in h5toh4 converter when
    extendable dimension is first dimension.

Platform tested:
    Solaris2.5
This commit is contained in:
Paul Harten 1998-12-01 16:56:14 -05:00
parent 5bbc648241
commit bdc304d612
11 changed files with 936 additions and 238 deletions

View File

@ -534,295 +534,300 @@ hid_t fieldtype;
int32 order;
off_t offset;
sd_id = op_data->sd_id;
sd_id = op_data->sd_id;
/* hard link */
if ((status = H5Gget_objinfo(did, ".", TRUE, &statbuf)) != SUCCEED ) {
fprintf(stderr,"Error: H5Gget_objinfo() did not work\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
return (status);
fprintf(stderr,"Error: H5Gget_objinfo() did not work\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
return (status);
}
if ((type = H5Dget_type(did)) <= 0) {
fprintf(stderr, "Error: H5Dget_type() didn't return appropriate value.\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
if ((type = H5Dget_type(did)) <= 0) {
fprintf(stderr, "Error: H5Dget_type() didn't return appropriate value.\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
return status;
}
}
if ((space = H5Dget_space(did)) <= 0) {
fprintf(stderr, "Error: H5Dget_space() didn't return appropriate value.\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
if ((space = H5Dget_space(did)) <= 0) {
fprintf(stderr, "Error: H5Dget_space() didn't return appropriate value.\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
return status;
}
}
if ((n_values = H5Sget_simple_extent_npoints(space)) <= 0) {
fprintf(stderr, "Error: H5Sget_simple_extent_npoints() returned inappropriate value.\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
if ((n_values = H5Sget_simple_extent_npoints(space)) <= 0) {
fprintf(stderr, "Error: H5Sget_simple_extent_npoints() returned inappropriate value.\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
return status;
}
}
if ((ndims = H5Sget_simple_extent_dims(space,dims,maxdims)) < 0 ) {
fprintf(stderr, "Error: Problems getting ndims, dims, and maxdims of dataset\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = ndims;
if ((ndims = H5Sget_simple_extent_dims(space,dims,maxdims)) < 0 ) {
fprintf(stderr, "Error: Problems getting ndims, dims, and maxdims of dataset\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = ndims;
return status;
}
if ((class = H5Tget_class(type)) < 0 ) {
fprintf(stderr,"Error: problem with getting class\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = class;
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = class;
return status;
}
switch (class) {
case H5T_INTEGER:
case H5T_FLOAT:
if ((h4_type = h5type_to_h4type(type)) == FAIL ) {
fprintf(stderr, "Error: Problems translating h5 type to h4 type\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
break;
}
if ((mem_type = h4type_to_memtype(h4_type)) == FAIL ) {
fprintf(stderr, "Error: Problems translating h4 type to mem type\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
if ((h4_type = h5type_to_h4type(type)) == FAIL ) {
fprintf(stderr, "Error: Problems translating h5 type to h4 type\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
break;
}
if ((mem_type = h4type_to_memtype(h4_type)) == FAIL ) {
fprintf(stderr, "Error: Problems translating h4 type to mem type\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
return status;
}
if ((typesize = H5Tget_size(mem_type)) <= 0) {
fprintf(stderr, "Error: H5Tget_size() didn't return appropriate value.\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
break;
}
if ((buffer = HDmalloc(n_values*typesize)) == NULL) {
fprintf(stderr, "Error: Problems with HDmalloc of memory space\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
break;
}
if ((status = H5Dread(did, mem_type, space, space, H5P_DEFAULT, buffer)) != SUCCEED) {
fprintf(stderr, "Error: Problems with H5Dread\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
break;
}
for (i=0;i<ndims;i++) {
if (maxdims[i] == H5S_UNLIMITED) {
if ( i == 0 ) {
dim_sizes[0] = 0; /* this is how HDF4 communicates unlimited dimension */
} else {
dim_sizes[i] = (int32)dims[i];
}
} else {
dim_sizes[i] = (int32)maxdims[i];
}
if ((typesize = H5Tget_size(mem_type)) <= 0) {
fprintf(stderr, "Error: H5Tget_size() didn't return appropriate value.\n");
start[i] = 0;
edges[i] = (int32)dims[i];
}
if ((sds_id = SDcreate(sd_id, name, h4_type, ndims, dim_sizes)) <= 0 ) {
fprintf(stderr, "Error: Unable to create SDS %s.\n",name);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
break;
}
op_data->sds_id = sds_id;
if ((status = SDwritedata(sds_id, start, NULL, edges, buffer)) != SUCCEED ) {
fprintf(stderr, "Error: Unable to write SDS %s.\n",name);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
}
if ((status = H5Aiterate(did, NULL, (H5G_operator_t)convert_attr, op_data)) < 0 ) {
fprintf(stderr,"Error: iterate over attributes\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
}
if ((status = SDendaccess(sds_id)) != SUCCEED ) {
fprintf(stderr, "Error: Unable to end access to SDS %s.\n",name);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
}
break;
case H5T_TIME:
fprintf(stderr,"Error: H5T_TIME not yet implemented.\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
case H5T_STRING:
fprintf(stderr,"Error: H5T_STRING not yet implemented.\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
case H5T_BITFIELD:
fprintf(stderr,"Error: H5T_BITFIELD not yet implemented.\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
case H5T_OPAQUE:
fprintf(stderr,"Error: H5T_OPAQUE not yet implemented.\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
case H5T_COMPOUND:
if (ndims==1) {
if ((nmembers = H5Tget_nmembers(type)) <= 0 ) {
fprintf(stderr, "Error: Unable to get information about compound datatype %d\n",nmembers);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
for (idx=0;idx<nmembers;idx++) {
if ((ndimf = H5Tget_member_dims(type, idx, dimf, permf)) > 1 ) {
fprintf(stdout,"Warning: H5 datasets of H5T_COMPOUND type with ndims = 1, whose members\n");
fprintf(stdout,"Warning: of the H5T_COMPOUND type have rank > 1 are not converted.\n");
break;
}
}
hfile_id = op_data->hfile_id;
if ((vdata_id = VSattach(hfile_id, -1, "w")) <= 0 ) {
fprintf(stderr, "Error: Unable to create vdata %s.\n",name);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
break;
}
if ((buffer = HDmalloc(n_values*typesize)) == NULL) {
op_data->vdata_id = vdata_id;
if ((status = VSsetname(vdata_id, name)) != SUCCEED ) {
fprintf(stderr, "Error: Unable to set vdata name %s.\n",name);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((status = VSsetclass(vdata_id, "HDF5")) != SUCCEED ) {
fprintf(stderr, "Error: Unable to set class on vdata %s\n", name);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
for (idx=0;idx<nmembers;idx++) {
if ((ndimf = H5Tget_member_dims(type, idx, dimf, NULL)) < 0 ) {
fprintf(stderr, "Error: field rank for H5T_COMPOUND type %d, idx %d < 0\n", type, idx);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((fieldname = H5Tget_member_name(type, idx)) == NULL ) {
fprintf(stderr, "Error: Unable to get fieldname for compound type %d, idx %d\n", type, idx);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((fieldtype = H5Tget_member_type(type, idx)) < 0 ) {
fprintf(stderr,"Error: H5 datasets of H5T_COMPOUND type with fieldtype %d, idx %d.\n",fieldtype,idx);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((h4_type = h5type_to_h4type(fieldtype)) < 0 ) {
fprintf(stderr, "Error: Problems translating h5 type to h4 type\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
/*
if ((mem_type = h4type_to_memtype(h4_type)) == FAIL ) {
fprintf(stderr, "Error: Problems translating h4 type to mem type\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
return FAIL;
}
*/
if (ndimf == 0 ) {
order = 1;
} else {
order = dimf[0];
}
if ((status = VSfdefine(vdata_id, fieldname, h4_type, order)) != SUCCEED ) {
fprintf(stderr, "Error: Unable to set field %d\n", idx);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
strcat(fieldname_list,fieldname);
if (idx<nmembers-1) {
strcat(fieldname_list,", ");
}
HDfree(fieldname);
}
if ((status = VSsetfields(vdata_id, fieldname_list)) != SUCCEED ) {
fprintf(stderr, "Error: Unable to set fieldname list %s\n", name);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((status = VSsetinterlace(vdata_id, FULL_INTERLACE)) != SUCCEED ) {
fprintf(stderr, "Error: Unable to set FULL_INTERLACE mode, status %d\n", (int)status);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((type2 = H5Tcopy(type)) <= 0 ) {
fprintf(stderr, "Error: H5Tcopy did not SUCCEED, type %d\n", type2);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((status = H5Tpack(type2)) != SUCCEED ) {
fprintf(stderr, "Error: H5Tpack did not SUCCEED, status %d\n", (int)status);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((recsize = H5Tget_size(type2)) <= 0 ) {
fprintf(stderr, "Error: Unable to get record size %d\n", (int)recsize);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
/*
Since the space is rank 1, n_records does not depend on maxdims.
*/
n_records = n_values;
if ((buffer = HDmalloc(n_records*recsize)) == NULL) {
fprintf(stderr, "Error: Problems with HDmalloc of memory space\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
break;
}
if ((status = H5Dread(did, mem_type, space, space, H5P_DEFAULT, buffer)) != SUCCEED) {
fprintf(stderr, "Error: Problems with H5Aread\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
break;
}
for (i=0;i<ndims;i++) {
if (i==0) {
dim_sizes[i] = (int32)maxdims[i];
} else if (maxdims[i] == 0) {
dim_sizes[i] = (int32)dims[i];
} else {
dim_sizes[i] = (int32)maxdims[i];
}
start[i] = 0;
edges[i] = (int32)dims[i];
}
if ((sds_id = SDcreate(sd_id, name, h4_type, ndims, dim_sizes)) <= 0 ) {
fprintf(stderr, "Error: Unable to create SDS %s.\n",name);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
break;
}
op_data->sds_id = sds_id;
if ((status = SDwritedata(sds_id, start, NULL, edges, buffer)) != SUCCEED ) {
fprintf(stderr, "Error: Unable to write SDS %s.\n",name);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
}
if ((status = H5Aiterate(did, NULL, (H5G_operator_t)convert_attr, op_data)) < 0 ) {
fprintf(stderr,"Error: iterate over attributes\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
}
if ((status = SDendaccess(sds_id)) != SUCCEED ) {
fprintf(stderr, "Error: Unable to end access to SDS %s.\n",name);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
}
break;
case H5T_TIME:
fprintf(stderr,"Error: H5T_TIME not yet implemented.\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
case H5T_STRING:
fprintf(stderr,"Error: H5T_STRING not yet implemented.\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
case H5T_BITFIELD:
fprintf(stderr,"Error: H5T_BITFIELD not yet implemented.\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
case H5T_OPAQUE:
fprintf(stderr,"Error: H5T_OPAQUE not yet implemented.\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
case H5T_COMPOUND:
if (ndims==1) {
if ((nmembers = H5Tget_nmembers(type)) <= 0 ) {
fprintf(stderr, "Error: Unable to get information about compound datatype %d\n",nmembers);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
for (idx=0;idx<nmembers;idx++) {
if ((ndimf = H5Tget_member_dims(type, idx, dimf, permf)) > 1 ) {
fprintf(stdout,"Warning: H5 datasets of H5T_COMPOUND type with ndims = 1, whose members\n");
fprintf(stdout,"Warning: of the H5T_COMPOUND type have rank > 1 are not converted.\n");
break;
}
}
hfile_id = op_data->hfile_id;
if ((vdata_id = VSattach(hfile_id, -1, "w")) <= 0 ) {
fprintf(stderr, "Error: Unable to create vdata %s.\n",name);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
break;
}
op_data->vdata_id = vdata_id;
if ((status = VSsetname(vdata_id, name)) != SUCCEED ) {
fprintf(stderr, "Error: Unable to set vdata name %s.\n",name);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((status = VSsetclass(vdata_id, "HDF5")) != SUCCEED ) {
fprintf(stderr, "Error: Unable to set class on vdata %s\n", name);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
for (idx=0;idx<nmembers;idx++) {
if ((ndimf = H5Tget_member_dims(type, idx, dimf, NULL)) < 0 ) {
fprintf(stderr, "Error: field rank for H5T_COMPOUND type %d, idx %d < 0\n", type, idx);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((fieldname = H5Tget_member_name(type, idx)) == NULL ) {
fprintf(stderr, "Error: Unable to get fieldname for compound type %d, idx %d\n", type, idx);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((fieldtype = H5Tget_member_type(type, idx)) < 0 ) {
fprintf(stderr,"Error: H5 datasets of H5T_COMPOUND type with fieldtype %d, idx %d.\n",fieldtype,idx);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((h4_type = h5type_to_h4type(fieldtype)) < 0 ) {
fprintf(stderr, "Error: Problems translating h5 type to h4 type\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
/*
if ((mem_type = h4type_to_memtype(h4_type)) == FAIL ) {
fprintf(stderr, "Error: Problems translating h4 type to mem type\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
return FAIL;
}
*/
if (ndimf == 0 ) {
order = 1;
} else {
order = dimf[0];
}
if ((status = VSfdefine(vdata_id, fieldname, h4_type, order)) != SUCCEED ) {
fprintf(stderr, "Error: Unable to set field %d\n", idx);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
strcat(fieldname_list,fieldname);
if (idx<nmembers-1) {
strcat(fieldname_list,", ");
}
HDfree(fieldname);
}
if ((status = VSsetfields(vdata_id, fieldname_list)) != SUCCEED ) {
fprintf(stderr, "Error: Unable to set fieldname list %s\n", name);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((status = VSsetinterlace(vdata_id, FULL_INTERLACE)) != SUCCEED ) {
fprintf(stderr, "Error: Unable to set FULL_INTERLACE mode, status %d\n", (int)status);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((type2 = H5Tcopy(type)) <= 0 ) {
fprintf(stderr, "Error: H5Tcopy did not SUCCEED, type %d\n", type2);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((status = H5Tpack(type2)) != SUCCEED ) {
fprintf(stderr, "Error: H5Tpack did not SUCCEED, status %d\n", (int)status);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((recsize = H5Tget_size(type2)) <= 0 ) {
fprintf(stderr, "Error: Unable to get record size %d\n", (int)recsize);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
n_records = (int32)dims[0];
if ((buffer = HDmalloc(n_records*recsize)) == NULL) {
fprintf(stderr, "Error: Problems with HDmalloc of memory space\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
break;
}
if ((status = H5Dread(did, type2, space, space, H5P_DEFAULT, buffer)) != SUCCEED) {
fprintf(stderr, "Error: Problems with H5Aread\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
break;
}
fprintf(stderr, "Error: Problems with H5Dread\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
status = FAIL;
break;
}
if ((record_pos = VSseek(vdata_id, 0)) != 0 ) {
fprintf(stderr, "Error: Could not seek the beginning of the Vdata, %d\n", (int)record_pos);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((num_of_recs = VSwrite(vdata_id, (void *)buffer, n_records, FULL_INTERLACE)) != n_records ) {
fprintf(stderr, "Error: Only able to write %d of %d records\n", (int)num_of_recs, (int)n_records);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
/* there are only vdata attributes, no field attributes */
if ((record_pos = VSseek(vdata_id, 0)) != 0 ) {
fprintf(stderr, "Error: Could not seek the beginning of the Vdata, %d\n", (int)record_pos);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((num_of_recs = VSwrite(vdata_id, (void *)buffer, n_records, FULL_INTERLACE)) != n_records ) {
fprintf(stderr, "Error: Only able to write %d of %d records\n", (int)num_of_recs, (int)n_records);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
/* there are only vdata attributes, no field attributes */
if ((status = H5Aiterate(did, NULL, (H5G_operator_t)convert_attr, op_data)) < 0 ) {
fprintf(stderr,"Error: iterate over attributes\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
}
if ((status = VSdetach(vdata_id)) != SUCCEED ) {
fprintf(stderr, "Error: Unable to detach to vdata %s.\n",name);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
if ((status = H5Tclose(type2)) != SUCCEED ) {
fprintf(stderr, "Error: H5Tclose did not SUCCEED, status %d\n", (int)status);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
} else {
fprintf(stdout,"Warning: H5 datasets of H5T_COMPOUND type with ndims > 1 are not converted.\n");
if ((status = VSdetach(vdata_id)) != SUCCEED ) {
fprintf(stderr, "Error: Unable to detach to vdata %s.\n",name);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
break;
if ((status = H5Tclose(type2)) != SUCCEED ) {
fprintf(stderr, "Error: H5Tclose did not SUCCEED, status %d\n", (int)status);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
break;
}
} else {
fprintf(stdout,"Warning: H5 datasets of H5T_COMPOUND type with ndims > 1 are not converted.\n");
}
break;
default:
fprintf(stderr,"Error: %d class not found\n",class);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
}
if ((status = H5Tclose(type)) < 0 ) {
fprintf(stderr,"Error: closing type\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
}
if ((status = H5Sclose(space)) < 0 ) {
fprintf(stderr,"Error: closing space\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
}
HDfree(buffer);
return status;
return status;
}
@ -889,7 +894,7 @@ int32 n_values;
if ((mem_type = h4type_to_memtype(h4_type)) == FAIL ) {
fprintf(stderr, "Error: Problems translating h4 type to mem type\n");
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_dataset", __FILE__, __LINE__);
DEBUG_PRINT("Error detected in %s() [%s line %d]\n", "convert_attr", __FILE__, __LINE__);
status = FAIL;
return status;
}

View File

@ -1,6 +1,7 @@
/*
* Generate the binary hdf5 files for the h5dump tests.
*/
#include <limits.h>
#include "hdf5.h"
#define FILE1 "tgroup.h5"
@ -10,6 +11,8 @@
#define FILE5 "thlink.h5"
#define FILE6 "tcompound.h5"
#define FILE7 "tall.h5"
#define FILE8 "tdset2.h5"
#define FILE9 "tcompound2.h5"
static void test_group(void) {
hid_t fid, group;
@ -90,6 +93,49 @@ int i, j;
H5Fclose(fid);
}
static void test_dataset2(void) {
hid_t fid, dataset, space, create_plist;
hsize_t dims[2];
hsize_t maxdims[2];
int dset1[10][20];
double dset2[30][10];
int i, j;
fid = H5Fcreate(FILE8, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
create_plist = H5Pcreate(H5P_DATASET_CREATE);
dims[0] = 5; dims[1] = 5;
H5Pset_chunk(create_plist, 2, dims);
/* dset1 */
dims[0] = 10; dims[1] = 20;
maxdims[0] = H5S_UNLIMITED; maxdims[1] = 20;
space = H5Screate_simple(2, dims, maxdims);
dataset = H5Dcreate(fid, "/dset1", H5T_STD_I32BE, space, create_plist);
for (i = 0; i < 10; i++)
for (j = 0; j < 20; j++)
dset1[i][j] = j;
H5Dwrite(dataset, H5T_STD_I32BE, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset1);
H5Sclose(space);
H5Dclose(dataset);
/* dset2 */
dims[0] = 30; dims[1] = 10;
maxdims[0] = 30; maxdims[1] = H5S_UNLIMITED;
space = H5Screate_simple(2, dims, maxdims);
dataset = H5Dcreate(fid, "/dset2", H5T_IEEE_F64BE, space, create_plist);
for (i = 0; i < 30; i++)
for (j = 0; j < 10; j++)
dset2[i][j] = j;
H5Dwrite(dataset, H5T_IEEE_F64BE, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset2);
H5Sclose(space);
H5Dclose(dataset);
H5Fclose(fid);
}
static void test_attribute(void) {
hid_t fid, root, space, attr;
@ -329,6 +375,156 @@ hsize_t sdim = 5;
}
/*
/
/ | \ \
dset1 group1 type1 type2
|
dset2
*/
static void test_compound_dt2(void) { /* test compound data type */
hid_t fid, group, dataset, space, type, create_plist;
typedef struct {
int a;
float b;
double c;
} dset1_t;
dset1_t dset1[10];
typedef struct {
int a;
float b;
} dset2_t;
dset2_t dset2[10];
typedef struct {
int a[4];
float b[5][6];
} dset3_t;
typedef struct {
int a;
float b;
} dset4_t;
dset4_t dset4[10];
typedef struct {
int a;
float b;
} dset5_t;
dset5_t dset5[10];
int i, ndims;
const int perm[2];
size_t dim[2];
hsize_t sdim, maxdim;
sdim = 10;
for (i = 0; i < (int)sdim; i++) {
dset1[i].a = i;
dset1[i].b = i*i;
dset1[i].c = 1./(i+1);
dset2[i].a = i;
dset2[i].b = i+ i*0.1;
dset4[i].a = i;
dset4[i].b = i*1.0;
dset5[i].a = i;
dset5[i].b = i*1.0;
}
fid = H5Fcreate(FILE9, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
create_plist = H5Pcreate(H5P_DATASET_CREATE);
sdim = 2;
H5Pset_chunk(create_plist, 1, &sdim);
sdim = 6;
maxdim = H5S_UNLIMITED;
space = H5Screate_simple(1, &sdim, &maxdim);
type = H5Tcreate (H5T_COMPOUND, sizeof(dset1[0]));
H5Tinsert(type, "a_name", HOFFSET(dset1_t, a), H5T_STD_I32BE);
H5Tinsert(type, "b_name", HOFFSET(dset1_t, b), H5T_IEEE_F32BE);
H5Tinsert(type, "c_name", HOFFSET(dset1_t, c), H5T_IEEE_F64BE);
dataset = H5Dcreate(fid, "/dset1", type, space, create_plist);
H5Dwrite(dataset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset1);
H5Tclose(type);
H5Sclose(space);
H5Dclose(dataset);
sdim = 6;
maxdim = 10;
space = H5Screate_simple(1, &sdim, &maxdim);
/* shared data type 1 */
type = H5Tcreate (H5T_COMPOUND, sizeof(dset2_t));
H5Tinsert(type, "int_name", HOFFSET(dset2_t, a), H5T_STD_I32BE);
H5Tinsert(type, "float_name", HOFFSET(dset2_t, b), H5T_IEEE_F32BE);
H5Tcommit(fid, "type1", type);
group = H5Gcreate (fid, "/group1", 0);
dataset = H5Dcreate(group, "dset2", type, space, create_plist);
H5Dwrite(dataset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset2);
H5Tclose(type);
H5Dclose(dataset);
/* shared data type 2 */
type = H5Tcreate (H5T_COMPOUND, sizeof(dset3_t));
ndims = 1; dim[0] = 4;
H5Tinsert_array(type, "int_array", HOFFSET(dset3_t, a), ndims, dim, perm, H5T_STD_I32BE);
ndims = 2; dim[0] = 5; dim[1] = 6;
H5Tinsert_array(type, "float_array", HOFFSET(dset3_t, b), ndims, dim, perm, H5T_STD_I32BE);
H5Tcommit(fid, "type2", type);
H5Tclose(type);
/* shared data type 3 */
type = H5Tcreate (H5T_COMPOUND, sizeof(dset4_t));
H5Tinsert(type, "int", HOFFSET(dset4_t, a), H5T_STD_I32BE);
H5Tinsert(type, "float", HOFFSET(dset4_t, b), H5T_IEEE_F32BE);
H5Tcommit(group, "type3", type);
dataset = H5Dcreate(group, "dset4", type, space, create_plist);
H5Dwrite(dataset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset4);
H5Tclose(type);
H5Dclose(dataset);
H5Gclose(group);
/* unamed data type */
group = H5Gcreate (fid, "/group2", 0);
type = H5Tcreate (H5T_COMPOUND, sizeof(dset5_t));
H5Tinsert(type, "int", HOFFSET(dset5_t, a), H5T_STD_I32BE);
H5Tinsert(type, "float", HOFFSET(dset5_t, b), H5T_IEEE_F32BE);
H5Tcommit(group, "type4", type);
dataset = H5Dcreate(group, "dset5", type, space, create_plist);
H5Dwrite(dataset, type, H5S_ALL, H5S_ALL, H5P_DEFAULT, dset5);
H5Gunlink(group,"type4");
H5Tclose(type);
H5Dclose(dataset);
H5Sclose(space);
H5Gclose(group);
H5Fclose(fid);
}
/*
@ -472,8 +668,10 @@ test_group();
test_attribute();
test_softlink();
test_dataset();
test_dataset2();
test_hardlink();
test_compound_dt();
test_compound_dt2();
test_all();
return 0;

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,118 @@
File name: testfiles/tcompound2.hdf
Vgroup:0
tag = 1965; reference = 2;
name = /; class = HDF5;
number of entries = 2;
number of attributes = 0
Entries:-
#0 (Vgroup)
tag = 1965;reference = 4;
number of entries = 0;
name = group1; class = HDF5
number of attributes = 0
#1 (Vgroup)
tag = 1965;reference = 10;
number of entries = 0;
name = group2; class = HDF5
number of attributes = 0
Vgroup:1
tag = 1965; reference = 4;
name = group1; class = HDF5;
number of entries = 0;
number of attributes = 0
Entries:-
None.
Vgroup:2
tag = 1965; reference = 10;
name = group2; class = HDF5;
number of entries = 0;
number of attributes = 0
Entries:-
None.
Graphical representation of the file:-
(vg#: vgroup; vd: vdata)
vg0 -- vg1
-- vg2
vg1
vg2
File name: testfiles/tcompound2.hdf
Vdata: 0
tag = 1962; reference = 3;
number of records = 6; interlace = 0;
fields = [a_name, b_name, c_name];
record size (in bytes) = 16;
name = dset1; class = HDF5;
number of attributes = 0
- field index 0: [a_name], type=24, order=1
number of attributes = 0
- field index 1: [b_name], type=5, order=1
number of attributes = 0
- field index 2: [c_name], type=6, order=1
number of attributes = 0
Loc. Data
0 0 0.000000 1.000000 ; 1 1.000000 0.500000 ; 2 4.000000 0.333333 ;
3 3 9.000000 0.250000 ; 4 16.000000 0.200000 ; 5 25.000000 0.166667 ;
Vdata: 1
tag = 1962; reference = 5;
number of records = 6; interlace = 0;
fields = [int_name, float_name];
record size (in bytes) = 8;
name = dset2; class = HDF5;
number of attributes = 0
- field index 0: [int_name], type=24, order=1
number of attributes = 0
- field index 1: [float_name], type=5, order=1
number of attributes = 0
Loc. Data
0 0 0.000000 ; 1 1.100000 ; 2 2.200000 ;
3 3 3.300000 ; 4 4.400000 ; 5 5.500000 ;
Vdata: 2
tag = 1962; reference = 7;
number of records = 6; interlace = 0;
fields = [int, float];
record size (in bytes) = 8;
name = dset4; class = HDF5;
number of attributes = 0
- field index 0: [int], type=24, order=1
number of attributes = 0
- field index 1: [float], type=5, order=1
number of attributes = 0
Loc. Data
0 0 0.000000 ; 1 1.000000 ; 2 2.000000 ;
3 3 3.000000 ; 4 4.000000 ; 5 5.000000 ;
Vdata: 3
tag = 1962; reference = 11;
number of records = 6; interlace = 0;
fields = [int, float];
record size (in bytes) = 8;
name = dset5; class = HDF5;
number of attributes = 0
- field index 0: [int], type=24, order=1
number of attributes = 0
- field index 1: [float], type=5, order=1
number of attributes = 0
Loc. Data
0 0 0.000000 ; 1 1.000000 ; 2 2.000000 ;
3 3 3.000000 ; 4 4.000000 ; 5 5.000000 ;
File name: testfiles/tcompound2.hdf

Binary file not shown.

Binary file not shown.

373
tools/testfiles/tdset2.dmp Normal file
View File

@ -0,0 +1,373 @@
File name: testfiles/tdset2.hdf
Vgroup:0
tag = 1965; reference = 2;
name = /; class = HDF5;
number of entries = 0;
number of attributes = 0
Entries:-
None.
Vgroup:1
tag = 1965; reference = 8;
name = fakeDim0; class = UDim0.0;
number of entries = 1;
number of attributes = 0
Entries:-
#0 (Vdata)
tag = 1962; reference = 7;
number of records = 1; interlace = 0;
fields = [Values];
record size (in bytes) = 4;
name = fakeDim0; class = DimVal0.1;
total number of attributes = 0.
Vgroup:2
tag = 1965; reference = 10;
name = fakeDim1; class = Dim0.0;
number of entries = 1;
number of attributes = 0
Entries:-
#0 (Vdata)
tag = 1962; reference = 9;
number of records = 1; interlace = 0;
fields = [Values];
record size (in bytes) = 4;
name = fakeDim1; class = DimVal0.1;
total number of attributes = 0.
Vgroup:3
tag = 1965; reference = 12;
name = fakeDim2; class = Dim0.0;
number of entries = 1;
number of attributes = 0
Entries:-
#0 (Vdata)
tag = 1962; reference = 11;
number of records = 1; interlace = 0;
fields = [Values];
record size (in bytes) = 4;
name = fakeDim2; class = DimVal0.1;
total number of attributes = 0.
Vgroup:4
tag = 1965; reference = 14;
name = fakeDim3; class = Dim0.0;
number of entries = 1;
number of attributes = 0
Entries:-
#0 (Vdata)
tag = 1962; reference = 13;
number of records = 1; interlace = 0;
fields = [Values];
record size (in bytes) = 4;
name = fakeDim3; class = DimVal0.1;
total number of attributes = 0.
Vgroup:5
tag = 1965; reference = 16;
name = dset1; class = Var0.0;
number of entries = 6;
number of attributes = 0
Entries:-
#0 (Vgroup)
tag = 1965;reference = 8;
number of entries = 1;
name = fakeDim0; class = UDim0.0
number of attributes = 0
#1 (Vgroup)
tag = 1965;reference = 10;
number of entries = 1;
name = fakeDim1; class = Dim0.0
number of attributes = 0
#2 (Scientific Data)
tag = 702; reference = 4;
#3 (Number type)
tag = 106; reference = 15;
#4 (SciData dimension record)
tag = 701; reference = 15;
#5 (Numeric Data Group)
tag = 720; reference = 3;
Vgroup:6
tag = 1965; reference = 18;
name = dset2; class = Var0.0;
number of entries = 6;
number of attributes = 0
Entries:-
#0 (Vgroup)
tag = 1965;reference = 12;
number of entries = 1;
name = fakeDim2; class = Dim0.0
number of attributes = 0
#1 (Vgroup)
tag = 1965;reference = 14;
number of entries = 1;
name = fakeDim3; class = Dim0.0
number of attributes = 0
#2 (Scientific Data)
tag = 702; reference = 6;
#3 (Number type)
tag = 106; reference = 17;
#4 (SciData dimension record)
tag = 701; reference = 17;
#5 (Numeric Data Group)
tag = 720; reference = 5;
Vgroup:7
tag = 1965; reference = 19;
name = tdset2.hdf; class = CDF0.0;
number of entries = 6;
number of attributes = 0
Entries:-
#0 (Vgroup)
tag = 1965;reference = 8;
number of entries = 1;
name = fakeDim0; class = UDim0.0
number of attributes = 0
#1 (Vgroup)
tag = 1965;reference = 10;
number of entries = 1;
name = fakeDim1; class = Dim0.0
number of attributes = 0
#2 (Vgroup)
tag = 1965;reference = 12;
number of entries = 1;
name = fakeDim2; class = Dim0.0
number of attributes = 0
#3 (Vgroup)
tag = 1965;reference = 14;
number of entries = 1;
name = fakeDim3; class = Dim0.0
number of attributes = 0
#4 (Vgroup)
tag = 1965;reference = 16;
number of entries = 6;
name = dset1; class = Var0.0
number of attributes = 0
#5 (Vgroup)
tag = 1965;reference = 18;
number of entries = 6;
name = dset2; class = Var0.0
number of attributes = 0
Graphical representation of the file:-
(vg#: vgroup; vd: vdata)
vg0
vg1 -- vd
vg2 -- vd
vg3 -- vd
vg4 -- vd
vg5 -- vg1 -- vd
-- vg2 -- vd
-- Scientific Data
-- Number type
-- SciData dimension record
-- Numeric Data Group
vg6 -- vg3 -- vd
-- vg4 -- vd
-- Scientific Data
-- Number type
-- SciData dimension record
-- Numeric Data Group
vg7 -- vg1 -- vd
-- vg2 -- vd
-- vg3 -- vd
-- vg4 -- vd
-- vg5 -- vg1 -- vd
-- vg2 -- vd
-- Scientific Data
-- Number type
-- SciData dimension record
-- Numeric Data Group
-- vg6 -- vg3 -- vd
-- vg4 -- vd
-- Scientific Data
-- Number type
-- SciData dimension record
-- Numeric Data Group
File name: testfiles/tdset2.hdf
Vdata: 0
tag = 1962; reference = 7;
number of records = 1; interlace = 0;
fields = [Values];
record size (in bytes) = 4;
name = fakeDim0; class = DimVal0.1;
number of attributes = 0
- field index 0: [Values], type=24, order=1
number of attributes = 0
Loc. Data
0 1 ;
Vdata: 1
tag = 1962; reference = 9;
number of records = 1; interlace = 0;
fields = [Values];
record size (in bytes) = 4;
name = fakeDim1; class = DimVal0.1;
number of attributes = 0
- field index 0: [Values], type=24, order=1
number of attributes = 0
Loc. Data
0 20 ;
Vdata: 2
tag = 1962; reference = 11;
number of records = 1; interlace = 0;
fields = [Values];
record size (in bytes) = 4;
name = fakeDim2; class = DimVal0.1;
number of attributes = 0
- field index 0: [Values], type=24, order=1
number of attributes = 0
Loc. Data
0 30 ;
Vdata: 3
tag = 1962; reference = 13;
number of records = 1; interlace = 0;
fields = [Values];
record size (in bytes) = 4;
name = fakeDim3; class = DimVal0.1;
number of attributes = 0
- field index 0: [Values], type=24, order=1
number of attributes = 0
Loc. Data
0 10 ;
File name: testfiles/tdset2.hdf
Variable Name = dset1
Index = 0
Type= 32-bit signed integer
Ref. = 3
Rank = 2
Number of attributes = 0
Dim0: Name=fakeDim0
Size = UNLIMITED (currently 10)
Scale Type = number-type not set
Number of attributes = 0
Dim1: Name=fakeDim1
Size = 20
Scale Type = number-type not set
Number of attributes = 0
Data :
0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19
Variable Name = dset2
Index = 1
Type= 64-bit floating point
Ref. = 5
Rank = 2
Number of attributes = 0
Dim0: Name=fakeDim2
Size = 30
Scale Type = number-type not set
Number of attributes = 0
Dim1: Name=fakeDim3
Size = 10
Scale Type = number-type not set
Number of attributes = 0
Data :
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000
0.000000 1.000000 2.000000 3.000000 4.000000 5.000000
6.000000 7.000000 8.000000 9.000000

BIN
tools/testfiles/tdset2.h5 Normal file

Binary file not shown.

Binary file not shown.

View File

@ -236,21 +236,25 @@ MESG 3 "$TestName"
$RM ./testfiles/*.hdf ./testfiles/*.tmp
TEST tgroup.h5
TEST tdset.h5
TEST tdset2.h5
TEST tattr.h5
TEST tslink.h5
TEST thlink.h5
TEST tcompound.h5
TEST tcompound2.h5
TEST tall.h5
$RM ./testfiles/*.tmp
TEST tgroup.h5 tgroup.hdf
TEST tdset.h5 tdset.hdf
TEST tdset2.h5 tdset2.hdf
TEST tattr.h5 tattr.hdf
TEST tslink.h5 tslink.hdf
TEST thlink.h5 thlink.hdf
TEST tcompound.h5 tcompound.hdf
TEST tcompound2.h5 tcompound2.hdf
TEST tall.h5 tall.hdf
$RM ./testfiles/*.hdf ./testfiles/*.tmp
TEST -m tgroup.h5 tdset.h5 tattr.h5 tslink.h5 thlink.h5 tcompound.h5 tall.h5
TEST -m tgroup.h5 tdset.h5 tdset2.h5 tattr.h5 tslink.h5 thlink.h5 tcompound.h5 tcompound2.h5 tall.h5
$RM ./testfiles/*.hdf ./testfiles/*.tmp
else
MESG 3 "$TestName <<<SKIPPED>>>"