summaryrefslogtreecommitdiffstats
path: root/java/test/TestH5.java
diff options
context:
space:
mode:
authorAllen Byrne <byrn@hdfgroup.org>2019-11-07 15:48:56 (GMT)
committerDavid Young <dyoung@hdfgroup.org>2020-05-20 14:17:26 (GMT)
commit1e7387a5f86cb7ff4db1d1e168b9befe1db8d7da (patch)
tree9f2edcfefb51dcb4d5b1bde3a315c275d78c896c /java/test/TestH5.java
parent408146236d23351a5ae2e912cba02139ec78e233 (diff)
downloadhdf5-1e7387a5f86cb7ff4db1d1e168b9befe1db8d7da.zip
hdf5-1e7387a5f86cb7ff4db1d1e168b9befe1db8d7da.tar.gz
hdf5-1e7387a5f86cb7ff4db1d1e168b9befe1db8d7da.tar.bz2
HDFFV-10876 Update h5dump and h5ls for new ref api.
Diffstat (limited to 'java/test/TestH5.java')
-rw-r--r--java/test/TestH5.java143
1 files changed, 143 insertions, 0 deletions
diff --git a/java/test/TestH5.java b/java/test/TestH5.java
index 2bee075..6a860f3 100644
--- a/java/test/TestH5.java
+++ b/java/test/TestH5.java
@@ -22,12 +22,17 @@ import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
+import java.io.Reader;
+import java.io.StreamTokenizer;
import hdf.hdf5lib.H5;
import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
import org.junit.After;
import org.junit.Before;
@@ -50,6 +55,81 @@ public class TestH5 {
public void nextTestName() {
System.out.println();
}
+ private static final String H5_FILE = "testData.h5";
+ private static final String EXPORT_FILE = "testExport.txt";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
+ private static final int RANK = 2;
+ long H5fid = -1;
+ long H5dsid = -1;
+ long H5did = -1;
+ long[] H5dims = { DIM_X, DIM_Y };
+
+ private final void _deleteFile(String filename) {
+ File file = null;
+ try {
+ file = new File(filename);
+ }
+ catch (Throwable err) {}
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ private final long _createDataset(long fid, long dsid, String name, long dapl) {
+ long did = -1;
+ try {
+ did = H5.H5Dcreate(fid, name, HDF5Constants.H5T_STD_I32BE, dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5._createDataset: ", did > 0);
+
+ return did;
+ }
+
+ private final void _createH5File() {
+ try {
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5dsid = H5.H5Screate_simple(2, H5dims, null);
+ H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createH5file: " + err);
+ }
+ assertTrue("TestH5.createH5file: H5.H5Fcreate: ", H5fid > 0);
+ assertTrue("TestH5.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+ assertTrue("TestH5.createH5file: _createDataset: ", H5did > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ public final void _closeH5File() throws HDF5LibraryException {
+ if (H5did >= 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ H5fid = -1;
+ H5dsid = -1;
+ H5did = -1;
+ }
+
+ public final void _deleteH5file() {
+ _deleteFile(H5_FILE);
+ }
/**
* Test method for {@link hdf.hdf5lib.H5#J2C(int)}.
@@ -268,4 +348,67 @@ public class TestH5 {
fail("Exception thrown during test: " + ex.toString());
}
}
+
+ @Test
+ public void testH5export_dataset() {
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+ int[][] dset_indata = new int[DIM_X][DIM_Y];
+ int FILLVAL = 99;
+
+ _createH5File();
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = FILLVAL;
+
+ try {
+ if (H5did >= 0)
+ H5.H5Dwrite(H5did, HDF5Constants.H5T_NATIVE_INT,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data[0]);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ _closeH5File();
+
+ try {
+ H5.H5export_dataset(EXPORT_FILE, H5_FILE, "/dset", 99);
+ }
+ catch (HDF5LibraryException err) {
+ err.printStackTrace();
+ fail("H5export_dataset failed: " + err);
+ }
+
+ File file = new File(EXPORT_FILE);
+
+ try {
+ Reader reader = new FileReader(EXPORT_FILE);
+ StreamTokenizer streamTokenizer = new StreamTokenizer(reader);
+ int indx = 0;
+ int jndx = 0;
+ while(streamTokenizer.nextToken() != StreamTokenizer.TT_EOF){
+ if(streamTokenizer.ttype == StreamTokenizer.TT_NUMBER) {
+ dset_indata[indx][jndx] = (int)streamTokenizer.nval;
+ jndx++;
+ if (jndx >= DIM_Y) {
+ jndx = 0;
+ indx++;
+ }
+ }
+ }
+ reader.close();
+ }
+ catch (IOException err) {
+ err.printStackTrace();
+ fail("read file failed: " + err);
+ }
+ for(int row = 0; row < DIM_X; row++)
+ for(int col = 0; col < DIM_Y; col++) {
+ assertTrue("H5export_dataset: <"+row+","+col+">"+dset_indata[row][col]+"=99", dset_indata[row][col]==99);
+ }
+// _deleteH5file();
+ }
}