下面是一个Java读写.hdf5多数据集,且数据集下面有多个Attributes的例子:
```java
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import ncsa.hdf.hdf5lib.H5;
import ncsa.hdf.hdf5lib.exceptions.HDF5Exception;
public class HDF5Example {
public static void main(String[] args) {
String filename = "example.h5";
// Create a new HDF5 file
createHDF5File(filename);
// Write data to multiple datasets with attributes
writeDataToHDF5File(filename);
// Read data from multiple datasets with attributes
readDataFromHDF5File(filename);
}
public static void createHDF5File(String filename) {
try {
// Create a new HDF5 file
int file_id = H5.H5Fcreate(filename, H5.H5F_ACC_TRUNC, H5.H5P_DEFAULT, H5.H5P_DEFAULT);
H5.H5Fclose(file_id);
System.out.println("Created HDF5 file: " + filename);
} catch (HDF5Exception e) {
System.out.println("Error creating HDF5 file: " + e.getMessage());
}
}
public static void writeDataToHDF5File(String filename) {
try {
// Open the HDF5 file for writing
int file_id = H5.H5Fopen(filename, H5.H5F_ACC_RDWR, H5.H5P_DEFAULT);
// Create a group for the datasets
int group_id = H5.H5Gcreate(file_id, "/data", H5.H5P_DEFAULT, H5.H5P_DEFAULT, H5.H5P_DEFAULT);
// Write data to the first dataset
int[] data1 = {1, 2, 3, 4, 5};
long[] dims1 = {5};
int dataspace_id1 = H5.H5Screate_simple(1, dims1, null);
int dataset_id1 = H5.H5Dcreate(group_id, "dataset1", H5.H5T_STD_I32LE, dataspace_id1, H5.H5P_DEFAULT, H5.H5P_DEFAULT, H5.H5P_DEFAULT);
H5.H5Dwrite(dataset_id1, H5.H5T_NATIVE_INT, H5.H5S_ALL, H5.H5S_ALL, H5.H5P_DEFAULT, data1);
// Add attributes to the first dataset
int attribute_id1 = H5.H5Acreate(dataset_id1, "attribute1", H5.H5T_STD_I32LE, dataspace_id1, H5.H5P_DEFAULT, H5.H5P_DEFAULT);
int[] attribute_data1 = {1};
H5.H5Awrite(attribute_id1, H5.H5T_NATIVE_INT, attribute_data1);
int attribute_id2 = H5.H5Acreate(dataset_id1, "attribute2", H5.H5T_STD_I32LE, dataspace_id1, H5.H5P_DEFAULT, H5.H5P_DEFAULT);
int[] attribute_data2 = {2};
H5.H5Awrite(attribute_id2, H5.H5T_NATIVE_INT, attribute_data2);
// Close the first dataset
H5.H5Aclose(attribute_id1);
H5.H5Aclose(attribute_id2);
H5.H5Dclose(dataset_id1);
// Write data to the second dataset
float[] data2 = {1.1f, 2.2f, 3.3f, 4.4f, 5.5f};
long[] dims2 = {5};
int dataspace_id2 = H5.H5Screate_simple(1, dims2, null);
int dataset_id2 = H5.H5Dcreate(group_id, "dataset2", H5.H5T_IEEE_F32LE, dataspace_id2, H5.H5P_DEFAULT, H5.H5P_DEFAULT, H5.H5P_DEFAULT);
H5.H5Dwrite(dataset_id2, H5.H5T_NATIVE_FLOAT, H5.H5S_ALL, H5.H5S_ALL, H5.H5P_DEFAULT, data2);
// Add attributes to the second dataset
int attribute_id3 = H5.H5Acreate(dataset_id2, "attribute3", H5.H5T_IEEE_F32LE, dataspace_id2, H5.H5P_DEFAULT, H5.H5P_DEFAULT);
float[] attribute_data3 = {1.1f};
H5.H5Awrite(attribute_id3, H5.H5T_NATIVE_FLOAT, attribute_data3);
int attribute_id4 = H5.H5Acreate(dataset_id2, "attribute4", H5.H5T_IEEE_F32LE, dataspace_id2, H5.H5P_DEFAULT, H5.H5P_DEFAULT);
float[] attribute_data4 = {2.2f};
H5.H5Awrite(attribute_id4, H5.H5T_NATIVE_FLOAT, attribute_data4);
// Close the second dataset
H5.H5Aclose(attribute_id3);
H5.H5Aclose(attribute_id4);
H5.H5Dclose(dataset_id2);
// Close the group
H5.H5Gclose(group_id);
// Close the HDF5 file
H5.H5Fclose(file_id);
System.out.println("Wrote data to HDF5 file: " + filename);
} catch (HDF5Exception e) {
System.out.println("Error writing data to HDF5 file: " + e.getMessage());
}
}
public static void readDataFromHDF5File(String filename) {
try {
// Open the HDF5 file for reading
int file_id = H5.H5Fopen(filename, H5.H5F_ACC_RDONLY, H5.H5P_DEFAULT);
// Open the group for the datasets
int group_id = H5.H5Gopen(file_id, "/data", H5.H5P_DEFAULT);
// Read data from the first dataset
int dataset_id1 = H5.H5Dopen(group_id, "dataset1", H5.H5P_DEFAULT);
int dataspace_id1 = H5.H5Dget_space(dataset_id1);
long[] dims1 = new long[1];
H5.H5Sget_simple_extent_dims(dataspace_id1, dims1, null);
int[] data1 = new int[(int)dims1[0]];
H5.H5Dread(dataset_id1, H5.H5T_NATIVE_INT, H5.H5S_ALL, H5.H5S_ALL, H5.H5P_DEFAULT, data1);
// Read attributes from the first dataset
int attribute_id1 = H5.H5Aopen(dataset_id1, "attribute1", H5.H5P_DEFAULT);
int[] attribute_data1 = new int[1];
H5.H5Aread(attribute_id1, H5.H5T_NATIVE_INT, attribute_data1);
int attribute_id2 = H5.H5Aopen(dataset_id1, "attribute2", H5.H5P_DEFAULT);
int[] attribute_data2 = new int[1];
H5.H5Aread(attribute_id2, H5.H5T_NATIVE_INT, attribute_data2);
// Close the first dataset and attributes
H5.H5Aclose(attribute_id1);
H5.H5Aclose(attribute_id2);
H5.H5Dclose(dataset_id1);
// Print the first dataset and attributes
System.out.println("Data from dataset1: " + Arrays.toString(data1));
System.out.println("Attribute1 from dataset1: " + attribute_data1[0]);
System.out.println("Attribute2 from dataset1: " + attribute_data2[0]);
// Read data from the second dataset
int dataset_id2 = H5.H5Dopen(group_id, "dataset2", H5.H5P_DEFAULT);
int dataspace_id2 = H5.H5Dget_space(dataset_id2);
long[] dims2 = new long[1];
H5.H5Sget_simple_extent_dims(dataspace_id2, dims2, null);
float[] data2 = new float[(int)dims2[0]];
H5.H5Dread(dataset_id2, H5.H5T_NATIVE_FLOAT, H5.H5S_ALL, H5.H5S_ALL, H5.H5P_DEFAULT, data2);
// Read attributes from the second dataset
int attribute_id3 = H5.H5Aopen(dataset_id2, "attribute3", H5.H5P_DEFAULT);
float[] attribute_data3 = new float[1];
H5.H5Aread(attribute_id3, H5.H5T_NATIVE_FLOAT, attribute_data3);
int attribute_id4 = H5.H5Aopen(dataset_id2, "attribute4", H5.H5P_DEFAULT);
float[] attribute_data4 = new float[1];
H5.H5Aread(attribute_id4, H5.H5T_NATIVE_FLOAT, attribute_data4);
// Close the second dataset and attributes
H5.H5Aclose(attribute_id3);
H5.H5Aclose(attribute_id4);
H5.H5Dclose(dataset_id2);
// Print the second dataset and attributes
System.out.println("Data from dataset2: " + Arrays.toString(data2));
System.out.println("Attribute3 from dataset2: " + attribute_data3[0]);
System.out.println("Attribute4 from dataset2: " + attribute_data4[0]);
// Close the group
H5.H5Gclose(group_id);
// Close the HDF5 file
H5.H5Fclose(file_id);
System.out.println("Read data from HDF5 file: " + filename);
} catch (HDF5Exception e) {
System.out.println("Error reading data from HDF5 file: " + e.getMessage());
}
}
}
```
这个例子创建了一个名为“example.h5”的新HDF5文件,然后向其中写入两个数据集“dataset1”和“dataset2”,每个数据集下都有两个属性。然后,它从文件中读取数据集和属性,并将它们打印到控制台上。