summaryrefslogtreecommitdiffstats
path: root/java/test/TestH5Drw.java
diff options
context:
space:
mode:
authorAllen Byrne <50328838+byrnHDF@users.noreply.github.com>2021-04-15 12:59:01 (GMT)
committerGitHub <noreply@github.com>2021-04-15 12:59:01 (GMT)
commite21f7aaac4a4d34a8a5aa1330fb2ed6814532cfb (patch)
treed43ff3c2b940cfbf3f16a67360bbe2070f3ec983 /java/test/TestH5Drw.java
parent052eed29d8922b87d4aa2734e4366de461326dd0 (diff)
downloadhdf5-e21f7aaac4a4d34a8a5aa1330fb2ed6814532cfb.zip
hdf5-e21f7aaac4a4d34a8a5aa1330fb2ed6814532cfb.tar.gz
hdf5-e21f7aaac4a4d34a8a5aa1330fb2ed6814532cfb.tar.bz2
Tools long double updates (#522)
* OESS-98 convert plugin option to FetchContent, add tests * Fixes for pkcfg files because of plugin option * OESS-98 fix tools test for plugins * Keep doxygen comments under 100 chars long - format hint * Whitespace * HDFFV-11144 - Reclassify CMake messages * HDFFV-11099/11100 added help text * Reworked switch statement to compare string instead * Fix typo * Update CDash mode * Correct name of threadsafe * Correct option name * Undo accidental commit * Note LLVM 10 to 11 format default changes * Update format plugin * Undo clang-format version 11 changes * One more correction * Update supported platforms * Revert whitespace changes * Correct whitespace * Changes from PR#3 * HDFFV-11213 added option to control gcc10 warnings diagnostics * HDFFV-11212 Use the new references correctly in JNI utility and tests * format source * Fix typo * Add new test file * HDFFV-11212 - update test and remove unused arg * Minor non-space formatting changes * Use H5I_INVALID_ID instead of "-1" * source formatting * add missing testfile, update jni function * Undo commit of debug code * remove mislocated file * Fix h5repack test for handling of fapls and id close * Update h5diff test files usage text * HDFFV-11212 add new ref tests for JNI export dataset * src format update * Remove blank line typo * src format typo * long double requires %Lg * Another long double foramt specifer S.B. %Lg * issue with t128bit test * Windows issue with h5dump and type. * Fix review issues * refactor function nesting and fix error checks * format fixes * Remove untested functions and javadoc quiet comments * Restore TRY block. * Change string append errors to memory exception * revert to H5_JNI_FATAL_ERROR - support functions need work * Add assertion error for h5util functions * remove duplicate function * format fix * Revert HD function error handling * Update copyright comments * GH #386 java folder copyright corrections * Whitespace * GH #359 implement and fix tools 1.6 API usage * remove excessive comments * Flip inits to correct ifdef section * rework ifdef to be simpler * format issue * Reformat ifdef inits * remove static attribute * format compliance * Update names * Revert because logic relies on float not being int * Changes noticed from creating merge of #412 * Double underscore change * Correct compiler version variable used * Remove header guard underscores * Whitespace cleanup * Split format source and commit changes on repo push * remove pre-split setting * Change windows TS to use older VS. * correct window os name * HDFFV-11212 JNI export util and Javadoc * Suggested review changes * Another change found * Committing clang-format changes * HDFFV-11113 long double in tools * HDFFV-11113 add note * Disable long double tests for now * HDFFV-11228 remove arbitrary CMake warning groups. * Make each flag a string * Some Javadoc warning fixes * Updated javadoc fixes * # WARNING: head commit changed in the meantime HDFFV-11229 Fix long double usage in tools and java Changed h5dump and h5ls to just print 128-bit for long double type. Added test and file for dataset and attributes with all three float types. * Committing clang-format changes * HDFFV-11229 Add release note * HDFFV-11229 windows testfile needed * fix typo * Remove non supported message text * HDFFV-11229 - change ldouble test to check both native and general * HDFFV-11229 add second file compare * HDFFV-11229 fix reference file * HDFFV-11229 autotools check two refs * HDFFV-11229 revert back to removal of NATIVE_LDOUBLE in tools output * Committing clang-format changes * Update release note * Update attribute type of ref file * Change source of ninja for macs * try port instead of brew * Recommended is to use brew. * Undo non long double changes * remove unneeded file Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
Diffstat (limited to 'java/test/TestH5Drw.java')
-rw-r--r--java/test/TestH5Drw.java378
1 files changed, 378 insertions, 0 deletions
diff --git a/java/test/TestH5Drw.java b/java/test/TestH5Drw.java
new file mode 100644
index 0000000..69fb9d7
--- /dev/null
+++ b/java/test/TestH5Drw.java
@@ -0,0 +1,378 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.HDFNativeData;
+import hdf.hdf5lib.callbacks.H5D_iterate_cb;
+import hdf.hdf5lib.callbacks.H5D_iterate_t;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Drw {
+ @Rule public TestName testname = new TestName();
+ private static final String H5_INTS_FILE = "tintsattrs.h5";
+ private static final String H5_FLTS_FILE = "tfloatsattrs.h5";
+ private static final int DIM_X = 8;
+ private static final int DIM8_Y = 8;
+ private static final int DIM16_Y = 16;
+ private static final int DIM32_Y = 32;
+ private static final int DIM64_Y = 64;
+ private static final int DIM128_Y = 128;
+ private static final String DATASETU08 = "DU08BITS";
+ private static final String DATASETS08 = "DS08BITS";
+ private static final String DATASETU16 = "DU16BITS";
+ private static final String DATASETS16 = "DS16BITS";
+ private static final String DATASETU32 = "DU32BITS";
+ private static final String DATASETS32 = "DS32BITS";
+ private static final String DATASETU64 = "DU64BITS";
+ private static final String DATASETS64 = "DS64BITS";
+ private static final String DATASETF32 = "DS32BITS";
+ private static final String DATASETF64 = "DS64BITS";
+ private static final String DATASETF128 = "DS128BITS";
+ private static final int RANK = 2;
+ long H5fid = HDF5Constants.H5I_INVALID_HID;
+ long H5did = HDF5Constants.H5I_INVALID_HID;
+
+ private final void _closeH5file() throws HDF5LibraryException {
+ if (H5did >= 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ }
+
+ public void openH5file(String filename, String dsetname) {
+ try {
+ H5fid = H5.H5Fopen(filename,
+ HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Drw._openH5file: " + err);
+ }
+ assertTrue("TestH5Drw._openH5file: H5.H5Fopen: ", H5fid >= 0);
+ try {
+ H5did = H5.H5Dopen(H5fid, dsetname, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Drw._openH5file: " + err);
+ }
+ assertTrue("TestH5Drw._openH5file: H5.H5Dopen: ", H5did >= 0);
+ }
+
+ @After
+ public void closeH5file() throws HDF5LibraryException {
+ if (H5did >= 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ H5fid = HDF5Constants.H5I_INVALID_HID;
+ H5did = HDF5Constants.H5I_INVALID_HID;
+ System.out.println();
+ }
+
+ @Before
+ public void verifyCount()
+ throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0", H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+ }
+
+ @Test
+ public void testH5Dread_8bit_ints() {
+ byte[][] dset_data = new byte[DIM_X][DIM8_Y];
+
+ try {
+ openH5file(H5_INTS_FILE, DATASETU08);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Dread_8bit_ints: openH5file: " + err);
+ }
+
+ // Read data.
+ try {
+ H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_UINT8, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dread_8bit_ints: H5Dread: " + err);
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ H5.H5Dclose(H5did);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ H5did = H5.H5Dopen(H5fid, DATASETS08, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dread_8bit_ints: H5Dopen: " + err);
+ }
+
+ // Read data.
+ try {
+ H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_INT8, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dread_8bit_ints: H5Dread: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Dread_16bit_ints() {
+ short[][] dset_data = new short[DIM_X][DIM16_Y];
+
+ try {
+ openH5file(H5_INTS_FILE, DATASETU16);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Dread_16bit_ints: openH5file: " + err);
+ }
+
+ // Read data.
+ try {
+ H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_UINT16, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dread_16bit_ints: H5Dread: " + err);
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ H5.H5Dclose(H5did);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ H5did = H5.H5Dopen(H5fid, DATASETS16, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dread_16bit_ints: H5Dopen: " + err);
+ }
+
+ // Read data.
+ try {
+ H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_INT16, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dread_16bit_ints: H5Dread: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Dread_32bit_ints() {
+ int[][] dset_data = new int[DIM_X][DIM16_Y];
+
+ try {
+ openH5file(H5_INTS_FILE, DATASETU32);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Dread_32bit_ints: openH5file: " + err);
+ }
+
+ // Read data.
+ try {
+ H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_UINT32, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dread_32bit_ints: H5Dread: " + err);
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ H5.H5Dclose(H5did);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ H5did = H5.H5Dopen(H5fid, DATASETS32, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dread_32bit_ints: H5Dopen: " + err);
+ }
+
+ // Read data.
+ try {
+ H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_INT32, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dread_32bit_ints: H5Dread: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Dread_64bit_ints() {
+ long[][] dset_data = new long[DIM_X][DIM64_Y];
+
+ try {
+ openH5file(H5_INTS_FILE, DATASETU64);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Dread_64bit_ints: openH5file: " + err);
+ }
+
+ // Read data.
+ try {
+ H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_UINT64, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dread_64bit_ints: H5Dread: " + err);
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ H5.H5Dclose(H5did);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ H5did = H5.H5Dopen(H5fid, DATASETS64, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dread_64bit_ints: H5Dopen: " + err);
+ }
+
+ // Read data.
+ try {
+ H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_INT64, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dread_64bit_ints: H5Dread: " + err);
+ }
+ }
+
+ @Test
+ public void testH5Dread_32bit_floats() {
+ float[][] dset_data = new float[DIM_X][DIM32_Y];
+
+ try {
+ openH5file(H5_FLTS_FILE, DATASETF32);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Dread_32bit_floats: openH5file: " + err);
+ }
+
+ // Read data.
+ try {
+ H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_FLOAT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dread_32bit_floats: H5Dread: " + err);
+ }
+ for (int i = 0; i < DIM_X; i++)
+ assertTrue("testH5Dread_32bit_floats - H5.H5Dread: ", dset_data[i][0] == (32 - i));
+ }
+
+ @Test
+ public void testH5Dread_64bit_floats() {
+ double[][] dset_data = new double[DIM_X][DIM64_Y];
+
+ try {
+ openH5file(H5_FLTS_FILE, DATASETF64);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Dread_64bit_floats: openH5file: " + err);
+ }
+
+ // Read data.
+ try {
+ H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dread_64bit_floats: H5Dread: " + err);
+ }
+ for (int i = 0; i < DIM_X; i++)
+ assertTrue("testH5Dread_64bit_floats - H5.H5Dread: ", dset_data[i][0] == (64 - i));
+ }
+
+ @Test
+ public void testH5Dread_128bit_floats() {
+ byte[][][] dset_data = new byte[DIM_X][DIM128_Y][8];
+
+ try {
+ openH5file(H5_FLTS_FILE, DATASETF128);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("testH5Dread_128bit_floats: openH5file: " + err);
+ }
+
+ // Read data.
+ try {
+ H5.H5Dread(H5did, HDF5Constants.H5T_NATIVE_LDOUBLE, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception err) {
+ err.printStackTrace();
+ fail("testH5Dread_128bit_floats: H5Dread: " + err);
+ }
+ }
+
+}