mirror of
https://github.com/HDFGroup/hdf5.git
synced 2024-12-09 07:32:32 +08:00
290 lines
9.1 KiB
Java
290 lines
9.1 KiB
Java
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
|
|
* Copyright by The HDF Group. *
|
|
* Copyright by the Board of Trustees of the University of Illinois. *
|
|
* All rights reserved. *
|
|
* *
|
|
* This file is part of HDF5. The full HDF5 copyright notice, including *
|
|
* terms governing use, modification, and redistribution, is contained in *
|
|
* the files COPYING and Copyright.html. COPYING can be found at the root *
|
|
* of the source code distribution tree; Copyright.html can be found at the *
|
|
* root level of an installed copy of the electronic HDF5 document set and *
|
|
* is linked from the top-level documents page. It can also be found at *
|
|
* http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
|
|
* access to either file, you may request a copy from help@hdfgroup.org. *
|
|
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
|
|
|
|
/************************************************************
|
|
This example shows how to read and write data to a compact
|
|
dataset. The program first writes integers to a compact
|
|
dataset with dataspace dimensions of DIM_XxDIM_Y, then
|
|
closes the file. Next, it reopens the file, reads back
|
|
the data, and outputs it to the screen.
|
|
************************************************************/
|
|
package examples.datasets;
|
|
|
|
import java.util.EnumSet;
|
|
import java.util.HashMap;
|
|
import java.util.Map;
|
|
|
|
import hdf.hdf5lib.H5;
|
|
import hdf.hdf5lib.HDF5Constants;
|
|
|
|
public class H5Ex_D_Compact {
|
|
private static String FILENAME = "H5Ex_D_Compact.h5";
|
|
private static String DATASETNAME = "DS1";
|
|
private static final int DIM_X = 4;
|
|
private static final int DIM_Y = 7;
|
|
private static final int RANK = 2;
|
|
|
|
// Values for the status of space allocation
|
|
enum H5D_layout {
|
|
H5D_LAYOUT_ERROR(-1), H5D_COMPACT(0), H5D_CONTIGUOUS(1), H5D_CHUNKED(2), H5D_NLAYOUTS(3);
|
|
private static final Map<Integer, H5D_layout> lookup = new HashMap<Integer, H5D_layout>();
|
|
|
|
static {
|
|
for (H5D_layout s : EnumSet.allOf(H5D_layout.class))
|
|
lookup.put(s.getCode(), s);
|
|
}
|
|
|
|
private int code;
|
|
|
|
H5D_layout(int layout_type) {
|
|
this.code = layout_type;
|
|
}
|
|
|
|
public int getCode() {
|
|
return this.code;
|
|
}
|
|
|
|
public static H5D_layout get(int code) {
|
|
return lookup.get(code);
|
|
}
|
|
}
|
|
|
|
private static void writeCompact() {
|
|
long file_id = -1;
|
|
long filespace_id = -1;
|
|
long dataset_id = -1;
|
|
long dcpl_id = -1;
|
|
long[] dims = { DIM_X, DIM_Y };
|
|
int[][] dset_data = new int[DIM_X][DIM_Y];
|
|
|
|
// Initialize data.
|
|
for (int indx = 0; indx < DIM_X; indx++)
|
|
for (int jndx = 0; jndx < DIM_Y; jndx++)
|
|
dset_data[indx][jndx] = indx * jndx - jndx;
|
|
|
|
// Create a new file using default properties.
|
|
try {
|
|
file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
|
|
HDF5Constants.H5P_DEFAULT);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
// Create dataspace. Setting maximum size to NULL sets the maximum
|
|
// size to be the current size.
|
|
try {
|
|
filespace_id = H5.H5Screate_simple(RANK, dims, null);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
// Create the dataset creation property list.
|
|
try {
|
|
dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
// Set the layout to compact.
|
|
try {
|
|
if (dcpl_id >= 0)
|
|
H5.H5Pset_layout(dcpl_id, H5D_layout.H5D_COMPACT.getCode());
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
// Create the dataset. We will use all default properties for this example.
|
|
try {
|
|
if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
|
|
dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
|
|
HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
// Write the data to the dataset.
|
|
try {
|
|
if (dataset_id >= 0)
|
|
H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
|
|
HDF5Constants.H5P_DEFAULT, dset_data);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
// End access to the dataset and release resources used by it.
|
|
try {
|
|
if (dcpl_id >= 0)
|
|
H5.H5Pclose(dcpl_id);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
try {
|
|
if (dataset_id >= 0)
|
|
H5.H5Dclose(dataset_id);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
try {
|
|
if (filespace_id >= 0)
|
|
H5.H5Sclose(filespace_id);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
// Close the file.
|
|
try {
|
|
if (file_id >= 0)
|
|
H5.H5Fclose(file_id);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
}
|
|
|
|
private static void readCompact() {
|
|
long file_id = -1;
|
|
long filespace_id = -1;
|
|
long dataset_id = -1;
|
|
long dcpl_id = -1;
|
|
int[][] dset_data = new int[DIM_X][DIM_Y];
|
|
|
|
// Open file and dataset using the default properties.
|
|
try {
|
|
file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
// Open an existing dataset.
|
|
try {
|
|
if (file_id >= 0)
|
|
dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
// Retrieve the dataset creation property list.
|
|
try {
|
|
if (dataset_id >= 0)
|
|
dcpl_id = H5.H5Dget_create_plist(dataset_id);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
// Print the storage layout.
|
|
try {
|
|
if (dcpl_id >= 0) {
|
|
int layout_type = H5.H5Pget_layout(dcpl_id);
|
|
System.out.print("Storage layout for " + DATASETNAME + " is: ");
|
|
switch (H5D_layout.get(layout_type)) {
|
|
case H5D_COMPACT:
|
|
System.out.println("H5D_COMPACT");
|
|
break;
|
|
case H5D_CONTIGUOUS:
|
|
System.out.println("H5D_CONTIGUOUS");
|
|
break;
|
|
case H5D_CHUNKED:
|
|
System.out.println("H5D_CHUNKED");
|
|
break;
|
|
case H5D_LAYOUT_ERROR:
|
|
break;
|
|
case H5D_NLAYOUTS:
|
|
break;
|
|
default:
|
|
break;
|
|
}
|
|
System.out.println();
|
|
}
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
// Read the data using the default properties.
|
|
try {
|
|
if (dataset_id >= 0)
|
|
H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
|
|
HDF5Constants.H5P_DEFAULT, dset_data);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
// Output the data to the screen.
|
|
System.out.println("Data for " + DATASETNAME + " is: ");
|
|
for (int indx = 0; indx < DIM_X; indx++) {
|
|
System.out.print(" [ ");
|
|
for (int jndx = 0; jndx < DIM_Y; jndx++)
|
|
System.out.print(dset_data[indx][jndx] + " ");
|
|
System.out.println("]");
|
|
}
|
|
System.out.println();
|
|
|
|
// End access to the dataset and release resources used by it.
|
|
try {
|
|
if (dcpl_id >= 0)
|
|
H5.H5Pclose(dcpl_id);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
try {
|
|
if (dataset_id >= 0)
|
|
H5.H5Dclose(dataset_id);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
try {
|
|
if (filespace_id >= 0)
|
|
H5.H5Sclose(filespace_id);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
|
|
// Close the file.
|
|
try {
|
|
if (file_id >= 0)
|
|
H5.H5Fclose(file_id);
|
|
}
|
|
catch (Exception e) {
|
|
e.printStackTrace();
|
|
}
|
|
}
|
|
|
|
public static void main(String[] args) {
|
|
H5Ex_D_Compact.writeCompact();
|
|
H5Ex_D_Compact.readCompact();
|
|
}
|
|
|
|
}
|