mirror of
https://github.com/HDFGroup/hdf5.git
synced 2024-11-27 02:10:55 +08:00
89fbe00dec
* commit '54957d37f5aa73912763dbb6e308555e863c43f4': Commit copyright header change for src/H5PLpkg.c which was added after running script to make changes. Add new files in release_docs to MANIFEST. Cimmit changes to Makefile.in(s) and H5PL.c that resulted from running autogen.sh. Merge pull request #407 in HDFFV/hdf5 from ~LRKNOX/hdf5_lrk:hdf5_1_10_1 to hdf5_1_10_1 Change copyright headers to replace url referring to file to be removed and replace it with new url for COPYING file.
213 lines
8.5 KiB
Java
213 lines
8.5 KiB
Java
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
|
* Copyright by The HDF Group. *
|
|
* Copyright by the Board of Trustees of the University of Illinois. *
|
|
* All rights reserved. *
|
|
* *
|
|
* This file is part of HDF5. The full HDF5 copyright notice, including *
|
|
* terms governing use, modification, and redistribution, is contained in *
|
|
* the COPYING file, which can be found at the root of the source code *
|
|
* distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
|
|
* If you do not have access to either file, you may request a copy from *
|
|
* help@hdfgroup.org. *
|
|
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
|
|
|
|
package test;
|
|
|
|
import static org.junit.Assert.assertTrue;
|
|
import static org.junit.Assert.fail;
|
|
import hdf.hdf5lib.H5;
|
|
import hdf.hdf5lib.HDF5Constants;
|
|
import hdf.hdf5lib.exceptions.HDF5LibraryException;
|
|
|
|
import org.junit.After;
|
|
import org.junit.Before;
|
|
import org.junit.Ignore;
|
|
import org.junit.Rule;
|
|
import org.junit.Test;
|
|
import org.junit.rules.TestName;
|
|
|
|
public class TestH5PL {
|
|
@Rule public TestName testname = new TestName();
|
|
private static String FILENAME = "h5_dlopenChunk.h5";
|
|
private static String DATASETNAME = "DS1";
|
|
private static final int DIM_X = 6;
|
|
private static final int DIM_Y = 8;
|
|
private static final int CHUNK_X = 4;
|
|
private static final int CHUNK_Y = 4;
|
|
private static final int RANK = 2;
|
|
private static final int NDIMS = 2;
|
|
private static final int H5Z_FILTER_DYNLIB4 = 260;
|
|
|
|
@Before
|
|
public void checkOpenIDs() {
|
|
assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
|
|
System.out.print(testname.getMethodName());
|
|
}
|
|
@After
|
|
public void nextTestName() {
|
|
System.out.println();
|
|
}
|
|
|
|
@Test
|
|
public void TestH5PLplugins() {
|
|
try {
|
|
int plugin_flags = H5.H5PLget_loading_state();
|
|
assertTrue("H5.H5PLget_loading_state: "+plugin_flags, plugin_flags == HDF5Constants.H5PL_ALL_PLUGIN);
|
|
int new_setting = plugin_flags & ~HDF5Constants.H5PL_FILTER_PLUGIN;
|
|
H5.H5PLset_loading_state (new_setting);
|
|
int changed_flags = H5.H5PLget_loading_state();
|
|
assertTrue("H5.H5PLget_loading_state: "+changed_flags, changed_flags == new_setting);
|
|
H5.H5PLset_loading_state (plugin_flags);
|
|
changed_flags = H5.H5PLget_loading_state();
|
|
assertTrue("H5.H5PLget_loading_state: "+changed_flags, changed_flags == HDF5Constants.H5PL_ALL_PLUGIN);
|
|
}
|
|
catch (Throwable err) {
|
|
err.printStackTrace();
|
|
fail("TestH5PLplugins " + err);
|
|
}
|
|
}
|
|
|
|
@Test
|
|
public void TestH5PLpaths() {
|
|
try {
|
|
int original_entries = H5.H5PLsize();
|
|
H5.H5PLappend("path_one");
|
|
int plugin_entries = H5.H5PLsize();
|
|
assertTrue("H5.H5PLsize: "+plugin_entries, (original_entries+1) == plugin_entries);
|
|
H5.H5PLprepend("path_two");
|
|
plugin_entries = H5.H5PLsize();
|
|
assertTrue("H5.H5PLsize: "+plugin_entries, (original_entries+2) == plugin_entries);
|
|
H5.H5PLinsert("path_three", original_entries);
|
|
plugin_entries = H5.H5PLsize();
|
|
assertTrue("H5.H5PLsize: "+plugin_entries, (original_entries+3) == plugin_entries);
|
|
String first_path = H5.H5PLget(original_entries);
|
|
assertTrue("First path was : "+first_path + " ",first_path.compareToIgnoreCase("path_three")==0);
|
|
H5.H5PLreplace("path_four", original_entries);
|
|
first_path = H5.H5PLget(original_entries);
|
|
assertTrue("First path changed to : "+first_path + " ",first_path.compareToIgnoreCase("path_four")==0);
|
|
H5.H5PLremove(original_entries);
|
|
first_path = H5.H5PLget(original_entries);
|
|
assertTrue("First path now : "+first_path + " ",first_path.compareToIgnoreCase("path_two")==0);
|
|
plugin_entries = H5.H5PLsize();
|
|
assertTrue("H5.H5PLsize: "+plugin_entries, (original_entries+2) == plugin_entries);
|
|
}
|
|
catch (Throwable err) {
|
|
err.printStackTrace();
|
|
fail("TestH5PLpaths " + err);
|
|
}
|
|
}
|
|
|
|
@Ignore
|
|
public void TestH5PLdlopen() {
|
|
long file_id = -1;
|
|
long filespace_id = -1;
|
|
long dataset_id = -1;
|
|
long fapl_id = -1;
|
|
long dcpl_id = -1;
|
|
try {
|
|
int[] cd_values = {9, 0, 0, 0};
|
|
int[] libversion = {0, 0, 0};
|
|
long[] dims = { DIM_X, DIM_Y };
|
|
long[] chunk_dims = { CHUNK_X, CHUNK_Y };
|
|
int[][] dset_data = new int[DIM_X][DIM_Y];
|
|
int[] mdc_nelmts = {0};
|
|
long[] rdcc_nelmts = {0};
|
|
long[] rdcc_nbytes = {0};
|
|
double[] rdcc_w0 = {0};
|
|
|
|
// Initialize data to "1", to make it easier to see the selections.
|
|
for (int indx = 0; indx < DIM_X; indx++)
|
|
for (int jndx = 0; jndx < DIM_Y; jndx++)
|
|
dset_data[indx][jndx] = 1;
|
|
|
|
// Create a new file using default properties.
|
|
try {
|
|
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
|
|
HDF5Constants.H5P_DEFAULT);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
fail("TestH5PLdlopen H5Fcreate:" + e);
|
|
}
|
|
|
|
// Create dataspace. Setting maximum size to NULL sets the maximum
|
|
// size to be the current size.
|
|
try {
|
|
filespace_id = H5.H5Screate_simple(RANK, dims, null);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
fail("TestH5PLdlopen H5Screate_simple:" + e);
|
|
}
|
|
|
|
// Create the dataset creation property list.
|
|
try {
|
|
dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
fail("TestH5PLdlopen H5Pcreate:" + e);
|
|
}
|
|
|
|
// Set the chunk size.
|
|
try {
|
|
if (dcpl_id >= 0)
|
|
H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
fail("TestH5PLdlopen H5Pset_chunk:" + e);
|
|
}
|
|
|
|
try {
|
|
H5.H5get_libversion(libversion);
|
|
cd_values[1] = libversion[0];
|
|
cd_values[2] = libversion[1];
|
|
cd_values[3] = libversion[2];
|
|
if (dcpl_id >= 0)
|
|
H5.H5Pset_filter(dcpl_id, H5Z_FILTER_DYNLIB4, HDF5Constants.H5Z_FLAG_MANDATORY, 4, cd_values);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
fail("TestH5PLdlopen H5Pset_filter:" + e);
|
|
}
|
|
|
|
// Create the chunked dataset.
|
|
try {
|
|
if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
|
|
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_NATIVE_INT, filespace_id,
|
|
HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
fail("TestH5PLdlopen H5Dcreate:" + e);
|
|
}
|
|
|
|
try {
|
|
if (dataset_id >= 0)
|
|
H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
|
|
HDF5Constants.H5S_ALL, dset_data);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
fail("TestH5PLdlopen H5Dwrite:" + e);
|
|
}
|
|
}
|
|
catch (Throwable err) {
|
|
err.printStackTrace();
|
|
fail("TestH5PLdlopen " + err);
|
|
}
|
|
finally {
|
|
// End access to the dataset and release resources used by it.
|
|
if (dcpl_id >= 0)
|
|
try {H5.H5Pclose_class(dcpl_id);} catch (Throwable err) {}
|
|
if (dataset_id >= 0)
|
|
try {H5.H5Dclose(dataset_id);} catch (Throwable err) {}
|
|
if (filespace_id >= 0)
|
|
try {H5.H5Sclose(filespace_id);} catch (Throwable err) {}
|
|
if (file_id >= 0)
|
|
try {H5.H5Fclose(file_id);} catch (Throwable err) {}
|
|
}
|
|
}
|
|
}
|