mirror of
https://github.com/HDFGroup/hdf5.git
synced 2025-03-31 17:10:47 +08:00
develop JNI export references and java updates (#467)
* OESS-98 convert plugin option to FetchContent, add tests * Fixes for pkcfg files because of plugin option * OESS-98 fix tools test for plugins * Keep doxygen comments under 100 chars long - format hint * Whitespace * HDFFV-11144 - Reclassify CMake messages * HDFFV-11099/11100 added help text * Reworked switch statement to compare string instead * Fix typo * Update CDash mode * Correct name of threadsafe * Correct option name * Undo accidental commit * Note LLVM 10 to 11 format default changes * Update format plugin * Undo clang-format version 11 changes * One more correction * Update supported platforms * Revert whitespace changes * Correct whitespace * Changes from PR#3 * HDFFV-11213 added option to control gcc10 warnings diagnostics * HDFFV-11212 Use the new references correctly in JNI utility and tests * format source * Fix typo * Add new test file * HDFFV-11212 - update test and remove unused arg * Minor non-space formatting changes * Use H5I_INVALID_ID instead of "-1" * source formatting * add missing testfile, update jni function * Undo commit of debug code * remove mislocated file * Fix h5repack test for handling of fapls and id close * Update h5diff test files usage text * HDFFV-11212 add new ref tests for JNI export dataset * src format update * Remove blank line typo * src format typo * long double requires %Lg * Another long double foramt specifer S.B. %Lg * issue with t128bit test * Windows issue with h5dump and type. * Fix review issues * refactor function nesting and fix error checks * format fixes * Remove untested functions and javadoc quiet comments * Restore TRY block. * Change string append errors to memory exception * revert to H5_JNI_FATAL_ERROR - support functions need work * Add assertion error for h5util functions * remove duplicate function * format fix * Revert HD function error handling * Update copyright comments * GH #386 java folder copyright corrections * Whitespace * GH #359 implement and fix tools 1.6 API usage * remove excessive comments * Flip inits to correct ifdef section * rework ifdef to be simpler * format issue * Reformat ifdef inits * remove static attribute * format compliance * Update names * Revert because logic relies on float not being int * Changes noticed from creating merge of #412 * Double underscore change * Correct compiler version variable used * Remove header guard underscores * Whitespace cleanup * Split format source and commit changes on repo push * remove pre-split setting * Change windows TS to use older VS. * correct window os name * HDFFV-11212 JNI export util and Javadoc * Suggested review changes * Another change found * Committing clang-format changes Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
This commit is contained in:
parent
6f760f200d
commit
748da20bbc
@ -512,8 +512,8 @@ public class H5 implements java.io.Serializable {
|
||||
*
|
||||
* @param file_export_name
|
||||
* The file name to export data into.
|
||||
* @param file_name
|
||||
* The name of the HDF5 file containing the dataset.
|
||||
* @param file_id
|
||||
* The identifier of the HDF5 file containing the dataset.
|
||||
* @param object_path
|
||||
* The full path of the dataset to be exported.
|
||||
* @param binary_order
|
||||
@ -525,9 +525,30 @@ public class H5 implements java.io.Serializable {
|
||||
* @exception HDF5LibraryException
|
||||
* - Error from the HDF-5 Library.
|
||||
**/
|
||||
public synchronized static native void H5export_dataset(String file_export_name, String file_name,
|
||||
public synchronized static native void H5export_dataset(String file_export_name, long file_id,
|
||||
String object_path, int binary_order) throws HDF5LibraryException;
|
||||
|
||||
/**
|
||||
* H5export_attribute is a utility function to save data in a file.
|
||||
*
|
||||
* @param file_export_name
|
||||
* The file name to export data into.
|
||||
* @param dataset_id
|
||||
* The identifier of the dataset containing the attribute.
|
||||
* @param attribute_name
|
||||
* The attribute to be exported.
|
||||
* @param binary_order
|
||||
* 99 - export data as text.
|
||||
* 1 - export data as binary Native Order.
|
||||
* 2 - export data as binary Little Endian.
|
||||
* 3 - export data as binary Big Endian.
|
||||
*
|
||||
* @exception HDF5LibraryException
|
||||
* - Error from the HDF-5 Library.
|
||||
**/
|
||||
public synchronized static native void H5export_attribute(String file_export_name, long dataset_id,
|
||||
String attribute_name, int binary_order) throws HDF5LibraryException;
|
||||
|
||||
/**
|
||||
* H5is_library_threadsafe Checks to see if the library was built with thread-safety enabled.
|
||||
*
|
||||
|
@ -55,15 +55,15 @@ public class HDF5Constants {
|
||||
public static final int H5_INDEX_CRT_ORDER = H5_INDEX_CRT_ORDER();
|
||||
/** indices on links, number of indices defined */
|
||||
public static final int H5_INDEX_N = H5_INDEX_N();
|
||||
/** */
|
||||
/** Common iteration orders, Unknown order */
|
||||
public static final int H5_ITER_UNKNOWN = H5_ITER_UNKNOWN();
|
||||
/** */
|
||||
/** Common iteration orders, Increasing order */
|
||||
public static final int H5_ITER_INC = H5_ITER_INC();
|
||||
/** */
|
||||
/** Common iteration orders, Decreasing order */
|
||||
public static final int H5_ITER_DEC = H5_ITER_DEC();
|
||||
/** */
|
||||
/** Common iteration orders, No particular order, whatever is fastest */
|
||||
public static final int H5_ITER_NATIVE = H5_ITER_NATIVE();
|
||||
/** */
|
||||
/** Common iteration orders, Number of iteration orders */
|
||||
public static final int H5_ITER_N = H5_ITER_N();
|
||||
/** */
|
||||
public static final int H5AC_CURR_CACHE_CONFIG_VERSION = H5AC_CURR_CACHE_CONFIG_VERSION();
|
||||
|
@ -90,32 +90,44 @@ public class HDF5GroupInfo {
|
||||
linklen = 0;
|
||||
}
|
||||
|
||||
/** fileno accessors */
|
||||
/** fileno accessors
|
||||
* @return the file number if successful
|
||||
*/
|
||||
public long[] getFileno() {
|
||||
return fileno;
|
||||
}
|
||||
|
||||
/** accessors */
|
||||
/** accessors
|
||||
* @return the object number if successful
|
||||
*/
|
||||
public long[] getObjno() {
|
||||
return objno;
|
||||
}
|
||||
|
||||
/** accessors */
|
||||
/** accessors
|
||||
* @return type of group if successful
|
||||
*/
|
||||
public int getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
/** accessors */
|
||||
/** accessors
|
||||
* @return the number of links in the group if successful
|
||||
*/
|
||||
public int getNlink() {
|
||||
return nlink;
|
||||
}
|
||||
|
||||
/** accessors */
|
||||
/** accessors
|
||||
* @return the modified time value if successful
|
||||
*/
|
||||
public long getMtime() {
|
||||
return mtime;
|
||||
}
|
||||
|
||||
/** accessors */
|
||||
/** accessors
|
||||
* @return a length of link name if successful
|
||||
*/
|
||||
public int getLinklen() {
|
||||
return linklen;
|
||||
}
|
||||
|
@ -827,12 +827,10 @@ class ArrayDescriptor {
|
||||
else if (NT == 'S') {
|
||||
NTsize = 2;
|
||||
}
|
||||
else if ((NT == 'I')
|
||||
|| (NT == 'F')) {
|
||||
else if ((NT == 'I') || (NT == 'F')) {
|
||||
NTsize = 4;
|
||||
}
|
||||
else if ((NT == 'J')
|
||||
|| (NT == 'D')) {
|
||||
else if ((NT == 'J') || (NT == 'D')) {
|
||||
NTsize = 8;
|
||||
}
|
||||
else if (css.startsWith("Ljava.lang.Byte")) {
|
||||
@ -925,8 +923,8 @@ class ArrayDescriptor {
|
||||
System.out.println("Type: " + theType);
|
||||
System.out.println("Class: " + theClass);
|
||||
System.out.println("NT: " + NT + " NTsize: " + NTsize);
|
||||
System.out
|
||||
.println("Array has " + dims + " dimensions (" + totalSize + " bytes, " + totalElements + " elements)");
|
||||
System.out.println("Array has " + dims + " dimensions (" + totalSize
|
||||
+ " bytes, " + totalElements + " elements)");
|
||||
int i;
|
||||
for (i = 0; i <= dims; i++) {
|
||||
Class tc = objs[i].getClass();
|
||||
|
@ -20,9 +20,9 @@ import java.io.Serializable;
|
||||
*/
|
||||
public class H5_ih_info_t implements Serializable {
|
||||
private static final long serialVersionUID = -142238015615462707L;
|
||||
/** */
|
||||
public long index_size; /* btree and/or list */
|
||||
/** */
|
||||
/** btree and/or list size of index */
|
||||
public long index_size;
|
||||
/** btree and/or list size of hp */
|
||||
public long heap_size;
|
||||
|
||||
H5_ih_info_t (long index_size, long heap_size)
|
||||
|
@ -52,6 +52,8 @@ void * edata;
|
||||
/* Local Prototypes */
|
||||
/********************/
|
||||
|
||||
int h5str_region_dataset(JNIEnv *env, h5str_t *out_str, H5R_ref_t *ref_vp, int expand_data);
|
||||
|
||||
static int h5str_dump_region_blocks(JNIEnv *env, h5str_t *str, hid_t region, hid_t region_obj,
|
||||
int expand_data);
|
||||
static int h5str_dump_region_points(JNIEnv *env, h5str_t *str, hid_t region, hid_t region_obj,
|
||||
@ -1098,14 +1100,15 @@ h5str_sprintf(JNIEnv *env, h5str_t *out_str, hid_t container, hid_t tid, void *i
|
||||
}
|
||||
|
||||
case H5T_REFERENCE: {
|
||||
if (H5Tequal(tid, H5T_STD_REF)) {
|
||||
hid_t new_obj_id = H5I_INVALID_HID;
|
||||
H5O_type_t obj_type = -1; /* Object type */
|
||||
H5R_type_t ref_type; /* Reference type */
|
||||
/* H5T_STD_REF */
|
||||
hid_t new_obj_id = H5I_INVALID_HID;
|
||||
H5O_type_t obj_type = -1; /* Object type */
|
||||
H5R_type_t ref_type; /* Reference type */
|
||||
|
||||
H5R_ref_t *ref_vp = (H5R_ref_t *)cptr;
|
||||
H5R_ref_t *ref_vp = (H5R_ref_t *)cptr;
|
||||
|
||||
ref_type = H5Rget_type(ref_vp);
|
||||
ref_type = H5Rget_type(ref_vp);
|
||||
if (!h5str_is_zero(ref_vp, H5Tget_size(H5T_STD_REF))) {
|
||||
switch (ref_type) {
|
||||
case H5R_OBJECT1:
|
||||
if (H5Rget_obj_type3(ref_vp, H5P_DEFAULT, &obj_type) >= 0) {
|
||||
@ -1209,18 +1212,10 @@ h5str_sprintf(JNIEnv *env, h5str_t *out_str, hid_t container, hid_t tid, void *i
|
||||
default:
|
||||
break;
|
||||
} /* end switch */
|
||||
}
|
||||
|
||||
if (H5Rdestroy(ref_vp) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
}
|
||||
else if (H5Tequal(tid, H5T_STD_REF_DSETREG)) {
|
||||
/* (H5R_DSET_REG_REF_BUF_SIZE == typeSize) */
|
||||
if (H5Rdestroy(ref_vp) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
}
|
||||
else if (H5Tequal(tid, H5T_STD_REF_OBJ)) {
|
||||
/* (H5R_OBJ_REF_BUF_SIZE == typeSize) */
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
@ -2240,39 +2235,31 @@ h5str_render_bin_output(FILE *stream, hid_t container, hid_t tid, void *_mem, hs
|
||||
}
|
||||
|
||||
case H5T_REFERENCE: {
|
||||
if (H5Tequal(tid, H5T_STD_REF)) {
|
||||
hid_t region_id = H5I_INVALID_HID;
|
||||
hid_t region_space = H5I_INVALID_HID;
|
||||
H5S_sel_type region_type;
|
||||
hid_t region_id = H5I_INVALID_HID;
|
||||
hid_t region_space = H5I_INVALID_HID;
|
||||
H5S_sel_type region_type;
|
||||
|
||||
/* Region data */
|
||||
for (block_index = 0; block_index < block_nelmts; block_index++) {
|
||||
mem = ((unsigned char *)_mem) + block_index * size;
|
||||
if ((region_id = H5Ropen_object((H5R_ref_t *)mem, H5P_DEFAULT, H5P_DEFAULT)) < 0)
|
||||
continue;
|
||||
if ((region_space = H5Ropen_region((H5R_ref_t *)mem, H5P_DEFAULT, H5P_DEFAULT)) >= 0) {
|
||||
if (!h5str_is_zero(mem, H5Tget_size(H5T_STD_REF))) {
|
||||
region_type = H5Sget_select_type(region_space);
|
||||
if (region_type == H5S_SEL_POINTS)
|
||||
ret_value = render_bin_output_region_points(stream, region_space, region_id,
|
||||
container);
|
||||
else if (region_type == H5S_SEL_HYPERSLABS)
|
||||
ret_value = render_bin_output_region_blocks(stream, region_space, region_id,
|
||||
container);
|
||||
}
|
||||
H5Sclose(region_space);
|
||||
} /* end if (region_space >= 0) */
|
||||
H5Dclose(region_id);
|
||||
/* Region data */
|
||||
for (block_index = 0; block_index < block_nelmts; block_index++) {
|
||||
mem = ((unsigned char *)_mem) + block_index * size;
|
||||
if ((region_id = H5Ropen_object((H5R_ref_t *)mem, H5P_DEFAULT, H5P_DEFAULT)) < 0)
|
||||
continue;
|
||||
if ((region_space = H5Ropen_region((H5R_ref_t *)mem, H5P_DEFAULT, H5P_DEFAULT)) >= 0) {
|
||||
if (!h5str_is_zero(mem, H5Tget_size(H5T_STD_REF))) {
|
||||
region_type = H5Sget_select_type(region_space);
|
||||
if (region_type == H5S_SEL_POINTS)
|
||||
ret_value =
|
||||
render_bin_output_region_points(stream, region_space, region_id, container);
|
||||
else if (region_type == H5S_SEL_HYPERSLABS)
|
||||
ret_value =
|
||||
render_bin_output_region_blocks(stream, region_space, region_id, container);
|
||||
}
|
||||
H5Sclose(region_space);
|
||||
} /* end if (region_space >= 0) */
|
||||
H5Dclose(region_id);
|
||||
|
||||
if (ret_value < 0)
|
||||
break;
|
||||
}
|
||||
}
|
||||
else if (H5Tequal(tid, H5T_STD_REF_DSETREG)) {
|
||||
;
|
||||
}
|
||||
else if (H5Tequal(tid, H5T_STD_REF_OBJ)) {
|
||||
;
|
||||
if (ret_value < 0)
|
||||
break;
|
||||
}
|
||||
|
||||
break;
|
||||
@ -2762,11 +2749,11 @@ h5str_dump_simple_dset(JNIEnv *env, FILE *stream, hid_t dset, int binary_order)
|
||||
size_t p_type_nbytes; /* size of memory type */
|
||||
|
||||
/* Stripmine info */
|
||||
unsigned char *sm_buf = NULL; /* buffer for raw data */
|
||||
hsize_t sm_size[H5S_MAX_RANK]; /* stripmine size */
|
||||
hsize_t sm_nbytes; /* bytes per stripmine */
|
||||
hsize_t sm_nelmts; /* elements per stripmine */
|
||||
hid_t sm_space = H5I_INVALID_HID; /* stripmine data space */
|
||||
void * sm_buf = NULL; /* buffer for raw data */
|
||||
hsize_t sm_size[H5S_MAX_RANK]; /* stripmine size */
|
||||
hsize_t sm_nbytes; /* bytes per stripmine */
|
||||
hsize_t sm_nelmts; /* elements per stripmine */
|
||||
hid_t sm_space = H5I_INVALID_HID; /* stripmine data space */
|
||||
|
||||
/* Hyperslab info */
|
||||
hsize_t hs_offset[H5S_MAX_RANK]; /* starting offset */
|
||||
@ -2786,142 +2773,168 @@ h5str_dump_simple_dset(JNIEnv *env, FILE *stream, hid_t dset, int binary_order)
|
||||
if ((f_type = H5Dget_type(dset)) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
switch (binary_order) {
|
||||
case 1: {
|
||||
if ((p_type = h5str_get_native_type(f_type)) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 2: {
|
||||
if ((p_type = h5str_get_little_endian_type(f_type)) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 3: {
|
||||
if ((p_type = h5str_get_big_endian_type(f_type)) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
if ((p_type = H5Tcopy(f_type)) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if ((f_space = H5Dget_space(dset)) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
if ((ndims = H5Sget_simple_extent_ndims(f_space)) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
if ((size_t)ndims <= (sizeof(sm_size) / sizeof(sm_size[0]))) {
|
||||
if (H5Sget_simple_extent_dims(f_space, total_size, NULL) < 0)
|
||||
if (H5Sget_simple_extent_dims(f_space, total_size, NULL) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
if (H5Tequal(f_type, H5T_STD_REF_DSETREG)) {
|
||||
p_nelmts = H5Sget_simple_extent_npoints(f_space);
|
||||
if (NULL ==
|
||||
(sm_buf = (H5R_ref_t *)HDcalloc(MAX(sizeof(unsigned), sizeof(H5R_ref_t)), (size_t)p_nelmts)))
|
||||
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_mem: failed to allocate sm_buf");
|
||||
|
||||
/* Read the data */
|
||||
if (H5Dread(dset, H5T_STD_REF, H5S_ALL, H5S_ALL, H5P_DEFAULT, sm_buf) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
/* Calculate the number of elements we're going to print */
|
||||
p_nelmts = 1;
|
||||
if (binary_order == 99) {
|
||||
if (h5str_dump_simple_data(ENVONLY, stream, dset, H5T_STD_REF, sm_buf, p_nelmts) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
}
|
||||
else {
|
||||
if (h5str_render_bin_output(stream, dset, H5T_STD_REF, sm_buf, p_nelmts) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
}
|
||||
}
|
||||
else {
|
||||
switch (binary_order) {
|
||||
case 1: {
|
||||
if ((p_type = h5str_get_native_type(f_type)) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
|
||||
if (ndims > 0) {
|
||||
for (i = 0; i < (size_t)ndims; i++)
|
||||
p_nelmts *= total_size[i];
|
||||
} /* end if */
|
||||
|
||||
if (p_nelmts > 0) {
|
||||
/* Check if we have VL data in the dataset's datatype */
|
||||
if (h5str_detect_vlen(p_type) != 0)
|
||||
vl_data = 1;
|
||||
|
||||
/*
|
||||
* Determine the strip mine size and allocate a buffer. The strip mine is
|
||||
* a hyperslab whose size is manageable.
|
||||
*/
|
||||
if (!(sm_nbytes = p_type_nbytes = H5Tget_size(p_type)))
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
if (ndims > 0) {
|
||||
for (i = (size_t)ndims; i > 0; --i) {
|
||||
hsize_t size = H5TOOLS_BUFSIZE / sm_nbytes;
|
||||
if (size == 0) /* datum size > H5TOOLS_BUFSIZE */
|
||||
size = 1;
|
||||
sm_size[i - 1] = (((total_size[i - 1]) < (size)) ? (total_size[i - 1]) : (size));
|
||||
sm_nbytes *= sm_size[i - 1];
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (sm_nbytes > 0) {
|
||||
if (NULL == (sm_buf = (unsigned char *)HDmalloc((size_t)sm_nbytes)))
|
||||
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_dset: failed to allocate sm_buf");
|
||||
case 2: {
|
||||
if ((p_type = h5str_get_little_endian_type(f_type)) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
|
||||
sm_nelmts = sm_nbytes / p_type_nbytes;
|
||||
break;
|
||||
}
|
||||
|
||||
if ((sm_space = H5Screate_simple(1, &sm_nelmts, NULL)) < 0)
|
||||
case 3: {
|
||||
if ((p_type = h5str_get_big_endian_type(f_type)) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
if ((p_type = H5Tcopy(f_type)) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
/* The stripmine loop */
|
||||
HDmemset(hs_offset, 0, sizeof hs_offset);
|
||||
HDmemset(zero, 0, sizeof zero);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
for (elmtno = 0; elmtno < p_nelmts; elmtno += hs_nelmts) {
|
||||
/* Calculate the hyperslab size */
|
||||
if (ndims > 0) {
|
||||
for (i = 0, hs_nelmts = 1; i < (size_t)ndims; i++) {
|
||||
hs_size[i] = (((total_size[i] - hs_offset[i]) < (sm_size[i]))
|
||||
? (total_size[i] - hs_offset[i])
|
||||
: (sm_size[i]));
|
||||
hs_nelmts *= hs_size[i];
|
||||
}
|
||||
if ((size_t)ndims <= (sizeof(sm_size) / sizeof(sm_size[0]))) {
|
||||
if (H5Sget_simple_extent_dims(f_space, total_size, NULL) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
if (H5Sselect_hyperslab(f_space, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
/* Calculate the number of elements we're going to print */
|
||||
p_nelmts = 1;
|
||||
|
||||
if (H5Sselect_hyperslab(sm_space, H5S_SELECT_SET, zero, NULL, &hs_nelmts, NULL) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
if (ndims > 0) {
|
||||
for (i = 0; i < (size_t)ndims; i++)
|
||||
p_nelmts *= total_size[i];
|
||||
} /* end if */
|
||||
|
||||
if (p_nelmts > 0) {
|
||||
/* Check if we have VL data in the dataset's datatype */
|
||||
if (h5str_detect_vlen(p_type) != 0)
|
||||
vl_data = 1;
|
||||
|
||||
/*
|
||||
* Determine the strip mine size and allocate a buffer. The strip mine is
|
||||
* a hyperslab whose size is manageable.
|
||||
*/
|
||||
if (!(sm_nbytes = p_type_nbytes = H5Tget_size(p_type)))
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
if (ndims > 0) {
|
||||
for (i = (size_t)ndims; i > 0; --i) {
|
||||
hsize_t size = H5TOOLS_BUFSIZE / sm_nbytes;
|
||||
if (size == 0) /* datum size > H5TOOLS_BUFSIZE */
|
||||
size = 1;
|
||||
sm_size[i - 1] = (((total_size[i - 1]) < (size)) ? (total_size[i - 1]) : (size));
|
||||
sm_nbytes *= sm_size[i - 1];
|
||||
}
|
||||
else {
|
||||
if (H5Sselect_all(f_space) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
}
|
||||
|
||||
if (H5Sselect_all(sm_space) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
if (sm_nbytes > 0) {
|
||||
if (NULL == (sm_buf = (unsigned char *)HDmalloc((size_t)sm_nbytes)))
|
||||
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_dset: failed to allocate sm_buf");
|
||||
|
||||
hs_nelmts = 1;
|
||||
}
|
||||
sm_nelmts = sm_nbytes / p_type_nbytes;
|
||||
|
||||
/* Read the data */
|
||||
if (H5Dread(dset, p_type, sm_space, f_space, H5P_DEFAULT, sm_buf) < 0)
|
||||
if ((sm_space = H5Screate_simple(1, &sm_nelmts, NULL)) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
if (binary_order == 99) {
|
||||
if (h5str_dump_simple_data(ENVONLY, stream, dset, p_type, sm_buf, hs_nelmts) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
}
|
||||
else {
|
||||
if (h5str_render_bin_output(stream, dset, p_type, sm_buf, hs_nelmts) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
}
|
||||
/* The stripmine loop */
|
||||
HDmemset(hs_offset, 0, sizeof hs_offset);
|
||||
HDmemset(zero, 0, sizeof zero);
|
||||
|
||||
/* Reclaim any VL memory, if necessary */
|
||||
if (vl_data) {
|
||||
if (H5Treclaim(p_type, sm_space, H5P_DEFAULT, sm_buf) < 0)
|
||||
for (elmtno = 0; elmtno < p_nelmts; elmtno += hs_nelmts) {
|
||||
/* Calculate the hyperslab size */
|
||||
if (ndims > 0) {
|
||||
for (i = 0, hs_nelmts = 1; i < (size_t)ndims; i++) {
|
||||
hs_size[i] = (((total_size[i] - hs_offset[i]) < (sm_size[i]))
|
||||
? (total_size[i] - hs_offset[i])
|
||||
: (sm_size[i]));
|
||||
hs_nelmts *= hs_size[i];
|
||||
}
|
||||
|
||||
if (H5Sselect_hyperslab(f_space, H5S_SELECT_SET, hs_offset, NULL, hs_size, NULL) <
|
||||
0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
if (H5Sselect_hyperslab(sm_space, H5S_SELECT_SET, zero, NULL, &hs_nelmts, NULL) <
|
||||
0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
}
|
||||
else {
|
||||
if (H5Sselect_all(f_space) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
if (H5Sselect_all(sm_space) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
hs_nelmts = 1;
|
||||
}
|
||||
|
||||
/* Read the data */
|
||||
if (H5Dread(dset, p_type, sm_space, f_space, H5P_DEFAULT, sm_buf) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
}
|
||||
|
||||
/* Calculate the next hyperslab offset */
|
||||
for (i = (size_t)ndims, carry = 1; i > 0 && carry; --i) {
|
||||
hs_offset[i - 1] += hs_size[i - 1];
|
||||
if (binary_order == 99) {
|
||||
if (h5str_dump_simple_data(ENVONLY, stream, dset, p_type, sm_buf, hs_nelmts) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
}
|
||||
else {
|
||||
if (h5str_render_bin_output(stream, dset, p_type, sm_buf, hs_nelmts) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
}
|
||||
|
||||
if (hs_offset[i - 1] == total_size[i - 1])
|
||||
hs_offset[i - 1] = 0;
|
||||
else
|
||||
carry = 0;
|
||||
/* Reclaim any VL memory, if necessary */
|
||||
if (vl_data) {
|
||||
if (H5Treclaim(p_type, sm_space, H5P_DEFAULT, sm_buf) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
}
|
||||
|
||||
/* Calculate the next hyperslab offset */
|
||||
for (i = (size_t)ndims, carry = 1; i > 0 && carry; --i) {
|
||||
hs_offset[i - 1] += hs_size[i - 1];
|
||||
|
||||
if (hs_offset[i - 1] == total_size[i - 1])
|
||||
hs_offset[i - 1] = 0;
|
||||
else
|
||||
carry = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2945,6 +2958,148 @@ done:
|
||||
return ret_value;
|
||||
} /* end h5str_dump_simple_dset */
|
||||
|
||||
int
|
||||
h5str_dump_simple_mem(JNIEnv *env, FILE *stream, hid_t attr_id, int binary_order)
|
||||
{
|
||||
hid_t f_space = H5I_INVALID_HID; /* file data space */
|
||||
hsize_t alloc_size;
|
||||
int ndims; /* rank of dataspace */
|
||||
unsigned i; /* counters */
|
||||
hsize_t total_size[H5S_MAX_RANK]; /* total size of dataset*/
|
||||
hsize_t p_nelmts; /* total selected elmts */
|
||||
|
||||
void * sm_buf = NULL; /* buffer for raw data */
|
||||
hsize_t sm_size[H5S_MAX_RANK]; /* stripmine size */
|
||||
|
||||
int ret_value = 0;
|
||||
|
||||
/* VL data special information */
|
||||
unsigned int vl_data = 0; /* contains VL datatypes */
|
||||
hid_t p_type = H5I_INVALID_HID;
|
||||
hid_t f_type = H5I_INVALID_HID;
|
||||
|
||||
if (attr_id < 0)
|
||||
H5_BAD_ARGUMENT_ERROR(ENVONLY, "h5str_dump_simple_mem: attr ID < 0");
|
||||
|
||||
if ((f_type = H5Aget_type(attr_id)) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
if (H5I_INVALID_HID == (f_space = H5Aget_space(attr_id)))
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
if ((ndims = H5Sget_simple_extent_ndims(f_space)) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
if (H5Sget_simple_extent_dims(f_space, total_size, NULL) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
if (H5Tequal(f_type, H5T_STD_REF_DSETREG)) {
|
||||
p_nelmts = H5Sget_simple_extent_npoints(f_space);
|
||||
if (NULL ==
|
||||
(sm_buf = (H5R_ref_t *)HDcalloc(MAX(sizeof(unsigned), sizeof(H5R_ref_t)), (size_t)p_nelmts)))
|
||||
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_mem: failed to allocate sm_buf");
|
||||
|
||||
/* Read the data */
|
||||
if (H5Aread(attr_id, H5T_STD_REF, sm_buf) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
if (binary_order == 99) {
|
||||
if (h5str_dump_simple_data(ENVONLY, stream, attr_id, H5T_STD_REF, sm_buf, p_nelmts) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
}
|
||||
else {
|
||||
if (h5str_render_bin_output(stream, attr_id, H5T_STD_REF, sm_buf, p_nelmts) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
}
|
||||
}
|
||||
else {
|
||||
switch (binary_order) {
|
||||
case 1: {
|
||||
if ((p_type = h5str_get_native_type(f_type)) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 2: {
|
||||
if ((p_type = h5str_get_little_endian_type(f_type)) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
case 3: {
|
||||
if ((p_type = h5str_get_big_endian_type(f_type)) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
default: {
|
||||
if ((p_type = H5Tcopy(f_type)) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if ((size_t)ndims <= (sizeof(sm_size) / sizeof(sm_size[0]))) {
|
||||
if (H5Sget_simple_extent_dims(f_space, total_size, NULL) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
/* Calculate the number of elements we're going to print */
|
||||
p_nelmts = 1;
|
||||
|
||||
if (ndims > 0) {
|
||||
for (i = 0; i < (size_t)ndims; i++)
|
||||
p_nelmts *= total_size[i];
|
||||
} /* end if */
|
||||
|
||||
if (p_nelmts > 0) {
|
||||
/* Check if we have VL data in the dataset's datatype */
|
||||
if (h5str_detect_vlen(p_type) != 0)
|
||||
vl_data = 1;
|
||||
|
||||
alloc_size = p_nelmts * H5Tget_size(p_type);
|
||||
if (NULL == (sm_buf = (unsigned char *)HDmalloc((size_t)alloc_size)))
|
||||
H5_OUT_OF_MEMORY_ERROR(ENVONLY, "h5str_dump_simple_mem: failed to allocate sm_buf");
|
||||
|
||||
/* Read the data */
|
||||
if (H5Aread(attr_id, p_type, sm_buf) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
if (binary_order == 99) {
|
||||
if (h5str_dump_simple_data(ENVONLY, stream, attr_id, p_type, sm_buf, p_nelmts) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
}
|
||||
else {
|
||||
if (h5str_render_bin_output(stream, attr_id, p_type, sm_buf, p_nelmts) < 0)
|
||||
CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE);
|
||||
}
|
||||
|
||||
/* Reclaim any VL memory, if necessary */
|
||||
if (vl_data) {
|
||||
if (H5Treclaim(p_type, f_space, H5P_DEFAULT, sm_buf) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ret_value = SUCCEED;
|
||||
|
||||
done:
|
||||
if (sm_buf)
|
||||
HDfree(sm_buf);
|
||||
if (f_space >= 0)
|
||||
H5Sclose(f_space);
|
||||
if (p_type >= 0)
|
||||
H5Tclose(p_type);
|
||||
if (f_type >= 0)
|
||||
H5Tclose(f_type);
|
||||
|
||||
return ret_value;
|
||||
}
|
||||
|
||||
htri_t
|
||||
H5Tdetect_variable_str(hid_t tid)
|
||||
{
|
||||
@ -3007,7 +3162,6 @@ h5str_dump_simple_data(JNIEnv *env, FILE *stream, hid_t container, hid_t type, v
|
||||
H5_JNI_FATAL_ERROR(ENVONLY, "h5str_dump_simple_data: HDfprintf failure");
|
||||
}
|
||||
}
|
||||
|
||||
if (HDfprintf(stream, "%s", buffer.s) < 0)
|
||||
H5_JNI_FATAL_ERROR(ENVONLY, "h5str_dump_simple_data: HDfprintf failure");
|
||||
|
||||
@ -3577,18 +3731,16 @@ done:
|
||||
/*
|
||||
* Class: hdf_hdf5lib_H5
|
||||
* Method: H5export_dataset
|
||||
* Signature: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;I)V
|
||||
* Signature: (Ljava/lang/String;JLjava/lang/String;I)V
|
||||
*/
|
||||
JNIEXPORT void JNICALL
|
||||
Java_hdf_hdf5lib_H5_H5export_1dataset(JNIEnv *env, jclass clss, jstring file_export_name, jstring file_name,
|
||||
Java_hdf_hdf5lib_H5_H5export_1dataset(JNIEnv *env, jclass clss, jstring file_export_name, jlong file_id,
|
||||
jstring object_path, jint binary_order)
|
||||
{
|
||||
const char *file_export = NULL;
|
||||
const char *object_name = NULL;
|
||||
const char *fileName = NULL;
|
||||
jboolean isCopy;
|
||||
herr_t ret_val = FAIL;
|
||||
hid_t file_id = H5I_INVALID_HID;
|
||||
hid_t dataset_id = H5I_INVALID_HID;
|
||||
FILE * stream = NULL;
|
||||
|
||||
@ -3597,17 +3749,9 @@ Java_hdf_hdf5lib_H5_H5export_1dataset(JNIEnv *env, jclass clss, jstring file_exp
|
||||
if (NULL == file_export_name)
|
||||
H5_NULL_ARGUMENT_ERROR(ENVONLY, "H5export_dataset: file_export_name is NULL");
|
||||
|
||||
if (NULL == file_name)
|
||||
H5_NULL_ARGUMENT_ERROR(ENVONLY, "H5export_dataset: file_name is NULL");
|
||||
|
||||
if (NULL == object_path)
|
||||
H5_NULL_ARGUMENT_ERROR(ENVONLY, "H5export_dataset: object_path is NULL");
|
||||
|
||||
PIN_JAVA_STRING(ENVONLY, file_name, fileName, NULL, "H5export_dataset: file name not pinned");
|
||||
|
||||
if ((file_id = H5Fopen(fileName, (unsigned)H5F_ACC_RDWR, (hid_t)H5P_DEFAULT)) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
PIN_JAVA_STRING(ENVONLY, object_path, object_name, &isCopy, "H5export_dataset: object_path not pinned");
|
||||
|
||||
if ((dataset_id = H5Dopen2(file_id, object_name, H5P_DEFAULT)) < 0)
|
||||
@ -3634,14 +3778,65 @@ done:
|
||||
UNPIN_JAVA_STRING(ENVONLY, file_export_name, file_export);
|
||||
if (object_name)
|
||||
UNPIN_JAVA_STRING(ENVONLY, object_path, object_name);
|
||||
if (fileName)
|
||||
UNPIN_JAVA_STRING(ENVONLY, file_name, fileName);
|
||||
if (dataset_id >= 0)
|
||||
H5Dclose(dataset_id);
|
||||
if (file_id >= 0)
|
||||
H5Fclose(file_id);
|
||||
} /* end Java_hdf_hdf5lib_H5_H5export_1dataset */
|
||||
|
||||
/*
|
||||
* Class: hdf_hdf5lib_H5
|
||||
* Method: H5export_attribute
|
||||
* Signature: (Ljava/lang/String;JLjava/lang/String;I)V
|
||||
*/
|
||||
JNIEXPORT void JNICALL
|
||||
Java_hdf_hdf5lib_H5_H5export_1attribute(JNIEnv *env, jclass clss, jstring file_export_name, jlong dset_id,
|
||||
jstring attribute_name, jint binary_order)
|
||||
{
|
||||
const char *file_export = NULL;
|
||||
const char *object_name = NULL;
|
||||
jboolean isCopy;
|
||||
herr_t ret_val = FAIL;
|
||||
hid_t attr_id = H5I_INVALID_HID;
|
||||
FILE * stream = NULL;
|
||||
|
||||
UNUSED(clss);
|
||||
|
||||
if (NULL == file_export_name)
|
||||
H5_NULL_ARGUMENT_ERROR(ENVONLY, "H5export_dataset: file_export_name is NULL");
|
||||
|
||||
if (NULL == attribute_name)
|
||||
H5_NULL_ARGUMENT_ERROR(ENVONLY, "H5export_dataset: object_path is NULL");
|
||||
|
||||
PIN_JAVA_STRING(ENVONLY, attribute_name, object_name, &isCopy,
|
||||
"H5export_dataset: object_path not pinned");
|
||||
|
||||
if ((attr_id = H5Aopen(dset_id, object_name, H5P_DEFAULT)) < 0)
|
||||
H5_LIBRARY_ERROR(ENVONLY);
|
||||
|
||||
PIN_JAVA_STRING(ENVONLY, file_export_name, file_export, NULL,
|
||||
"H5export_dataset: file_export name not pinned");
|
||||
|
||||
if (NULL == (stream = HDfopen(file_export, "w+")))
|
||||
H5_JNI_FATAL_ERROR(ENVONLY, "HDfopen failed");
|
||||
|
||||
if ((ret_val = h5str_dump_simple_mem(ENVONLY, stream, attr_id, binary_order)) < 0)
|
||||
H5_ASSERTION_ERROR(ENVONLY, "h5str_dump_simple_dset failed");
|
||||
|
||||
if (stream) {
|
||||
HDfclose(stream);
|
||||
stream = NULL;
|
||||
}
|
||||
|
||||
done:
|
||||
if (stream)
|
||||
HDfclose(stream);
|
||||
if (file_export)
|
||||
UNPIN_JAVA_STRING(ENVONLY, file_export_name, file_export);
|
||||
if (object_name)
|
||||
UNPIN_JAVA_STRING(ENVONLY, attribute_name, object_name);
|
||||
if (attr_id >= 0)
|
||||
H5Aclose(attr_id);
|
||||
} /* end Java_hdf_hdf5lib_H5_H5export_1attribute */
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
@ -46,6 +46,7 @@ extern size_t h5str_sprintf(JNIEnv *env, h5str_t *out_str, hid_t container, hid_
|
||||
int expand_data);
|
||||
extern void h5str_array_free(char **strs, size_t len);
|
||||
extern int h5str_dump_simple_dset(JNIEnv *env, FILE *stream, hid_t dset, int binary_order);
|
||||
extern int h5str_dump_simple_mem(JNIEnv *env, FILE *stream, hid_t attr, int binary_order);
|
||||
|
||||
extern htri_t H5Tdetect_variable_str(hid_t tid);
|
||||
|
||||
@ -105,9 +106,16 @@ JNIEXPORT jint JNICALL Java_hdf_hdf5lib_H5_H5Gget_1obj_1info_1max(JNIEnv *, jcla
|
||||
/*
|
||||
* Class: hdf_hdf5lib_H5
|
||||
* Method: H5export_dataset
|
||||
* Signature: (Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;I)V
|
||||
* Signature: (Ljava/lang/String;JLjava/lang/String;I)V
|
||||
*/
|
||||
JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5export_1dataset(JNIEnv *, jclass, jstring, jstring, jstring,
|
||||
jint);
|
||||
JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5export_1dataset(JNIEnv *, jclass, jstring, jlong, jstring, jint);
|
||||
|
||||
/*
|
||||
* Class: hdf_hdf5lib_H5
|
||||
* Method: H5export_attribute
|
||||
* Signature: (Ljava/lang/String;JLjava/lang/String;I)V
|
||||
*/
|
||||
JNIEXPORT void JNICALL Java_hdf_hdf5lib_H5_H5export_1attribute(JNIEnv *, jclass, jstring, jlong, jstring,
|
||||
jint);
|
||||
|
||||
#endif /* H5UTIL_H__ */
|
||||
|
@ -96,6 +96,8 @@ HDFTEST_COPY_FILE("${PROJECT_SOURCE_DIR}/h5ex_g_iterate.orig" "${PROJECT_BINARY_
|
||||
HDFTEST_COPY_FILE("${PROJECT_SOURCE_DIR}/h5ex_g_iterate.orig" "${PROJECT_BINARY_DIR}/h5ex_g_iterateO2.hdf" "${HDF5_JAVA_TEST_LIB_TARGET}_files")
|
||||
HDFTEST_COPY_FILE("${HDF5_TOOLS_DIR}/testfiles/trefer_reg.h5" "${PROJECT_BINARY_DIR}/trefer_reg.h5" "${HDF5_JAVA_TEST_LIB_TARGET}_files")
|
||||
HDFTEST_COPY_FILE("${HDF5_TOOLS_DIR}/testfiles/trefer_attr.h5" "${PROJECT_BINARY_DIR}/trefer_attr.h5" "${HDF5_JAVA_TEST_LIB_TARGET}_files")
|
||||
HDFTEST_COPY_FILE("${HDF5_TOOLS_DIR}/testfiles/tdatareg.h5" "${PROJECT_BINARY_DIR}/tdatareg.h5" "${HDF5_JAVA_TEST_LIB_TARGET}_files")
|
||||
HDFTEST_COPY_FILE("${HDF5_TOOLS_DIR}/testfiles/tattrreg.h5" "${PROJECT_BINARY_DIR}/tattrreg.h5" "${HDF5_JAVA_TEST_LIB_TARGET}_files")
|
||||
|
||||
add_custom_target(${HDF5_JAVA_TEST_LIB_TARGET}_files ALL COMMENT "Copying files needed by ${HDF5_JAVA_TEST_LIB_TARGET} tests" DEPENDS ${${HDF5_JAVA_TEST_LIB_TARGET}_files_list})
|
||||
|
||||
@ -213,6 +215,8 @@ if (HDF5_TEST_JAVA AND HDF5_TEST_SERIAL)
|
||||
HDFTEST_COPY_FILE("${PROJECT_SOURCE_DIR}/h5ex_g_iterate.orig" "${PROJECT_BINARY_DIR}/${voltest}/h5ex_g_iterateO2.hdf" "${HDF5_JAVA_TEST_LIB_TARGET}_${voltest}_files")
|
||||
HDFTEST_COPY_FILE("${HDF5_TOOLS_DIR}/testfiles/trefer_reg.h5" "${PROJECT_BINARY_DIR}/${voltest}/trefer_reg.h5" "${HDF5_JAVA_TEST_LIB_TARGET}_${voltest}_files")
|
||||
HDFTEST_COPY_FILE("${HDF5_TOOLS_DIR}/testfiles/trefer_attr.h5" "${PROJECT_BINARY_DIR}/${voltest}/trefer_attr.h5" "${HDF5_JAVA_TEST_LIB_TARGET}_${voltest}_files")
|
||||
HDFTEST_COPY_FILE("${HDF5_TOOLS_DIR}/testfiles/tdatareg.h5" "${PROJECT_BINARY_DIR}/${voltest}/tdatareg.h5" "${HDF5_JAVA_TEST_LIB_TARGET}_${voltest}_files")
|
||||
HDFTEST_COPY_FILE("${HDF5_TOOLS_DIR}/testfiles/tattrreg.h5" "${PROJECT_BINARY_DIR}/${voltest}/tattrreg.h5" "${HDF5_JAVA_TEST_LIB_TARGET}_${voltest}_files")
|
||||
|
||||
add_custom_target(${HDF5_JAVA_TEST_LIB_TARGET}_${voltest}_files ALL COMMENT "Copying files needed by ${HDF5_JAVA_TEST_LIB_TARGET} tests" DEPENDS ${${HDF5_JAVA_TEST_LIB_TARGET}_${voltest}_files_list})
|
||||
|
||||
|
@ -49,9 +49,13 @@ public class TestH5 {
|
||||
@Rule public TestName testname = new TestName();
|
||||
private static final String H5_FILE = "testData.h5";
|
||||
private static final String EXPORT_FILE = "testExport.txt";
|
||||
private static final String H5_DREG_FILE = "trefer_reg.h5";
|
||||
private static final String EXPORT_DREG_FILE = "testExportReg.txt";
|
||||
private static final String H5_AREG_FILE = "trefer_attr.h5";
|
||||
private static final String H5_REGION_FILE = "trefer_reg.h5";
|
||||
private static final String EXPORT_REGION_FILE = "testExportReg.txt";
|
||||
private static final String H5_ATTR_FILE = "trefer_attr.h5";
|
||||
private static final String EXPORT_ATTR_FILE = "testExportAttr.txt";
|
||||
private static final String H5_DREG_FILE = "tdatareg.h5";
|
||||
private static final String EXPORT_DREG_FILE = "testExportDReg.txt";
|
||||
private static final String H5_AREG_FILE = "tattrreg.h5";
|
||||
private static final String EXPORT_AREG_FILE = "testExportAReg.txt";
|
||||
private static final int DIM_X = 4;
|
||||
private static final int DIM_Y = 6;
|
||||
@ -129,7 +133,7 @@ public class TestH5 {
|
||||
public void _openH5File(String filename, String dsetname) {
|
||||
try {
|
||||
H5fid = H5.H5Fopen(filename,
|
||||
HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
|
||||
HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
|
||||
}
|
||||
catch (Throwable err) {
|
||||
err.printStackTrace();
|
||||
@ -162,6 +166,7 @@ public class TestH5 {
|
||||
@After
|
||||
public void closeH5File() throws HDF5LibraryException {
|
||||
_closeH5File();
|
||||
assertTrue("H5 open ids is 0", H5.getOpenIDCount()==0);
|
||||
System.out.println();
|
||||
}
|
||||
|
||||
@ -415,8 +420,10 @@ public class TestH5 {
|
||||
|
||||
_closeH5File();
|
||||
|
||||
_openH5File(H5_FILE, "/dset");
|
||||
|
||||
try {
|
||||
H5.H5export_dataset(EXPORT_FILE, H5_FILE, "/dset", 99);
|
||||
H5.H5export_dataset(EXPORT_FILE, H5fid, "/dset", 99);
|
||||
}
|
||||
catch (HDF5LibraryException err) {
|
||||
err.printStackTrace();
|
||||
@ -453,6 +460,86 @@ public class TestH5 {
|
||||
_deleteH5file();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testH5export_region() {
|
||||
int[] dset_data_expect = {66, 69, 72, 75, 78, 81, 96, 99, 102, 105, 108,
|
||||
111, 126, 129, 132, 135, 138, 141, 156, 159, 162, 165, 168, 171,
|
||||
186, 189, 192, 195, 198, 201, 216, 219, 222, 225, 228, 231,
|
||||
207, 66, 252, 48, 84, 96, 12, 14, 213, 99};
|
||||
int[] dset_indata = new int[DIM_BLKS+DIM_PNTS];
|
||||
String objName = "/Dataset1";
|
||||
|
||||
_openH5File(H5_REGION_FILE, objName);
|
||||
|
||||
try {
|
||||
H5.H5export_dataset(EXPORT_REGION_FILE, H5fid, objName, 99);
|
||||
}
|
||||
catch (HDF5LibraryException err) {
|
||||
err.printStackTrace();
|
||||
fail("H5export_dataset failed: " + err);
|
||||
}
|
||||
|
||||
File file = new File(EXPORT_REGION_FILE);
|
||||
|
||||
try {
|
||||
Reader reader = new FileReader(EXPORT_REGION_FILE);
|
||||
StreamTokenizer streamTokenizer = new StreamTokenizer(reader);
|
||||
int indx = 0;
|
||||
while(streamTokenizer.nextToken() != StreamTokenizer.TT_EOF){
|
||||
if(streamTokenizer.ttype == StreamTokenizer.TT_NUMBER) {
|
||||
dset_indata[indx] = (int)streamTokenizer.nval;
|
||||
indx++;
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
}
|
||||
catch (IOException err) {
|
||||
err.printStackTrace();
|
||||
fail("read file failed: " + err);
|
||||
}
|
||||
for(int row = 0; row < DIM_X; row++)
|
||||
assertTrue("testH5export_region: <"+row+">"+dset_indata[row], dset_indata[row]==dset_data_expect[row]);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testH5export_attribute() {
|
||||
int[] dset_data_expect = {0, 3, 6, 9, 1, 4, 7, 10, 2, 5, 8, 11};
|
||||
int[] dset_indata = new int[DIM_ATTR];
|
||||
String objName = "/Dataset3";
|
||||
|
||||
_openH5File(H5_ATTR_FILE, objName);
|
||||
|
||||
try {
|
||||
H5.H5export_dataset(EXPORT_ATTR_FILE, H5did, objName, 99);
|
||||
}
|
||||
catch (HDF5LibraryException err) {
|
||||
err.printStackTrace();
|
||||
fail("H5export_dataset failed: " + err);
|
||||
}
|
||||
|
||||
File file = new File(EXPORT_ATTR_FILE);
|
||||
|
||||
try {
|
||||
Reader reader = new FileReader(EXPORT_ATTR_FILE);
|
||||
StreamTokenizer streamTokenizer = new StreamTokenizer(reader);
|
||||
int indx = 0;
|
||||
int jndx = 0;
|
||||
while(streamTokenizer.nextToken() != StreamTokenizer.TT_EOF){
|
||||
if(streamTokenizer.ttype == StreamTokenizer.TT_NUMBER) {
|
||||
dset_indata[indx] = (int)streamTokenizer.nval;
|
||||
indx++;
|
||||
}
|
||||
}
|
||||
reader.close();
|
||||
}
|
||||
catch (IOException err) {
|
||||
err.printStackTrace();
|
||||
fail("read file failed: " + err);
|
||||
}
|
||||
for(int row = 0; row < DIM_X; row++)
|
||||
assertTrue("testH5export_attribute: <"+row+">"+dset_indata[row], dset_indata[row]==dset_data_expect[row]);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testH5export_regdataset() {
|
||||
int[] dset_data_expect = {66, 69, 72, 75, 78, 81, 96, 99, 102, 105, 108,
|
||||
@ -465,7 +552,7 @@ public class TestH5 {
|
||||
_openH5File(H5_DREG_FILE, objName);
|
||||
|
||||
try {
|
||||
H5.H5export_dataset(EXPORT_DREG_FILE, H5_DREG_FILE, objName, 99);
|
||||
H5.H5export_dataset(EXPORT_DREG_FILE, H5fid, objName, 99);
|
||||
}
|
||||
catch (HDF5LibraryException err) {
|
||||
err.printStackTrace();
|
||||
@ -491,23 +578,27 @@ public class TestH5 {
|
||||
fail("read file failed: " + err);
|
||||
}
|
||||
for(int row = 0; row < DIM_X; row++)
|
||||
assertTrue("H5export_dataset: <"+row+">"+dset_indata[row], dset_indata[row]==dset_data_expect[row]);
|
||||
assertTrue("testH5export_regdataset: <"+row+">"+dset_indata[row], dset_indata[row]==dset_data_expect[row]);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testH5export_attrdataset() {
|
||||
int[] dset_data_expect = {0, 3, 6, 9, 1, 4, 7, 10, 2, 5, 8, 11};
|
||||
int[] dset_indata = new int[DIM_ATTR];
|
||||
String objName = "/Dataset3";
|
||||
int[] dset_data_expect = {66, 69, 72, 75, 78, 81, 96, 99, 102, 105, 108,
|
||||
111, 126, 129, 132, 135, 138, 141, 156, 159, 162, 165, 168, 171,
|
||||
186, 189, 192, 195, 198, 201, 216, 219, 222, 225, 228, 231,
|
||||
207, 66, 252, 48, 84, 96, 12, 14, 213, 99};
|
||||
int[] dset_indata = new int[DIM_BLKS+DIM_PNTS];
|
||||
String dsetName = "/Dataset1";
|
||||
String objName = "Attribute1";
|
||||
|
||||
_openH5File(H5_AREG_FILE, objName);
|
||||
_openH5File(H5_AREG_FILE, dsetName);
|
||||
|
||||
try {
|
||||
H5.H5export_dataset(EXPORT_AREG_FILE, H5_AREG_FILE, objName, 99);
|
||||
H5.H5export_attribute(EXPORT_AREG_FILE, H5did, objName, 99);
|
||||
}
|
||||
catch (HDF5LibraryException err) {
|
||||
err.printStackTrace();
|
||||
fail("H5export_dataset failed: " + err);
|
||||
fail("H5export_attribute failed: " + err);
|
||||
}
|
||||
|
||||
File file = new File(EXPORT_AREG_FILE);
|
||||
@ -530,6 +621,6 @@ public class TestH5 {
|
||||
fail("read file failed: " + err);
|
||||
}
|
||||
for(int row = 0; row < DIM_X; row++)
|
||||
assertTrue("H5export_dataset: <"+row+">"+dset_indata[row], dset_indata[row]==dset_data_expect[row]);
|
||||
assertTrue("testH5export_attrdataset: <"+row+">"+dset_indata[row], dset_indata[row]==dset_data_expect[row]);
|
||||
}
|
||||
}
|
||||
|
@ -197,7 +197,7 @@ public class TestH5D {
|
||||
private final void _openH5file(String filename, String dsetname, long dapl) {
|
||||
try {
|
||||
H5fid = H5.H5Fopen(filename,
|
||||
HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
|
||||
HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
|
||||
}
|
||||
catch (Throwable err) {
|
||||
err.printStackTrace();
|
||||
|
@ -53,7 +53,7 @@ public class TestH5Rref {
|
||||
public void openH5file(String filename, String dsetname) {
|
||||
try {
|
||||
H5fid = H5.H5Fopen(filename,
|
||||
HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
|
||||
HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
|
||||
}
|
||||
catch (Throwable err) {
|
||||
err.printStackTrace();
|
||||
|
@ -206,7 +206,7 @@ public class TestH5T {
|
||||
|
||||
@Test
|
||||
public void testH5Tenum_functions() {
|
||||
long filetype_id =HDF5Constants.H5I_INVALID_HID;
|
||||
long filetype_id = HDF5Constants.H5I_INVALID_HID;
|
||||
String enum_type ="Enum_type";
|
||||
byte[] enum_val = new byte[1];
|
||||
String enum_name = null;
|
||||
@ -398,7 +398,7 @@ public class TestH5T {
|
||||
|
||||
@Test
|
||||
public void testH5Tcompound_functions() {
|
||||
long filetype_id =HDF5Constants.H5I_INVALID_HID;
|
||||
long filetype_id = HDF5Constants.H5I_INVALID_HID;
|
||||
|
||||
// Create a compound datatype
|
||||
try {
|
||||
|
@ -252,6 +252,8 @@ COPY_DATAFILES_TO_BLDDIR()
|
||||
$CP -f $HDFTEST_HOME/h5ex_g_iterate.orig $BLDDIR/h5ex_g_iterateO2.hdf
|
||||
$CP -f $TOOLS_TESTFILES/trefer_reg.h5 $BLDDIR/trefer_reg.h5
|
||||
$CP -f $TOOLS_TESTFILES/trefer_attr.h5 $BLDDIR/trefer_attr.h5
|
||||
$CP -f $TOOLS_TESTFILES/tdatareg.h5 $BLDDIR/tdatareg.h5
|
||||
$CP -f $TOOLS_TESTFILES/tattrreg.h5 $BLDDIR/tattrreg.h5
|
||||
}
|
||||
|
||||
CLEAN_DATAFILES_AND_BLDDIR()
|
||||
|
@ -1,4 +1,5 @@
|
||||
JUnit version 4.11
|
||||
.testH5export_region
|
||||
.testH5get_libversion_null_param
|
||||
.testJ2C
|
||||
.testH5export_dataset
|
||||
@ -7,6 +8,7 @@ JUnit version 4.11
|
||||
.testH5garbage_collect
|
||||
.testH5error_off
|
||||
.testH5export_regdataset
|
||||
.testH5export_attribute
|
||||
.serializeToDisk
|
||||
.testH5open
|
||||
.testH5check_version
|
||||
@ -15,5 +17,5 @@ JUnit version 4.11
|
||||
|
||||
Time: XXXX
|
||||
|
||||
OK (13 tests)
|
||||
OK (15 tests)
|
||||
|
||||
|
10
src/H5Spkg.h
10
src/H5Spkg.h
@ -12,12 +12,12 @@
|
||||
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
|
||||
|
||||
/*
|
||||
* Programmer: Quincey Koziol
|
||||
* Thursday, September 28, 2000
|
||||
* Programmer: Quincey Koziol
|
||||
* Thursday, September 28, 2000
|
||||
*
|
||||
* Purpose: This file contains declarations which are visible only within
|
||||
* the H5S package. Source files outside the H5S package should
|
||||
* include H5Sprivate.h instead.
|
||||
* Purpose: This file contains declarations which are visible only within
|
||||
* the H5S package. Source files outside the H5S package should
|
||||
* include H5Sprivate.h instead.
|
||||
*/
|
||||
#if !(defined H5S_FRIEND || defined H5S_MODULE)
|
||||
#error "Do not include this file outside the H5S package!"
|
||||
|
@ -3388,7 +3388,7 @@ print_pos(diff_opt_t *opts, hsize_t idx, size_t u)
|
||||
H5TOOLS_DEBUG("... sset loop:%d with curr_pos:%ld (curr_idx:%ld) - c:%ld b:%ld s:%ld", j,
|
||||
curr_pos, curr_idx, cnt_idx, blk_idx, str_idx);
|
||||
dim_size = opts->dims[j]; /* Current dimension size */
|
||||
// elmnt_cnt *= dim_size; /* Total number of elements in dimension */
|
||||
/* elmnt_cnt *= dim_size; /* Total number of elements in dimension */
|
||||
H5TOOLS_DEBUG("... sset loop:%d with elmnt_cnt:%ld - (prev_dim_size:%ld - dim_size:%ld) "
|
||||
"- str_cnt:%ld",
|
||||
j, elmnt_cnt, prev_dim_size, dim_size, str_cnt);
|
||||
@ -3411,7 +3411,7 @@ print_pos(diff_opt_t *opts, hsize_t idx, size_t u)
|
||||
"(curr_idx:%ld - data_idx:%ld)",
|
||||
i, dim_size, str_cnt, curr_idx, data_idx);
|
||||
}
|
||||
next_idx += dim_size * str_cnt; // + prev_dim_size;
|
||||
next_idx += dim_size * str_cnt;
|
||||
H5TOOLS_DEBUG("... sset loop:%d with curr_idx:%ld (next_idx:%ld)", j, curr_idx, next_idx);
|
||||
str_cnt = 0;
|
||||
prev_str = str_idx;
|
||||
|
Loading…
x
Reference in New Issue
Block a user