diff options
author | Allen Byrne <50328838+byrnHDF@users.noreply.github.com> | 2021-04-15 12:59:01 (GMT) |
---|---|---|
committer | GitHub <noreply@github.com> | 2021-04-15 12:59:01 (GMT) |
commit | e21f7aaac4a4d34a8a5aa1330fb2ed6814532cfb (patch) | |
tree | d43ff3c2b940cfbf3f16a67360bbe2070f3ec983 /java/test/TestH5Arw.java | |
parent | 052eed29d8922b87d4aa2734e4366de461326dd0 (diff) | |
download | hdf5-e21f7aaac4a4d34a8a5aa1330fb2ed6814532cfb.zip hdf5-e21f7aaac4a4d34a8a5aa1330fb2ed6814532cfb.tar.gz hdf5-e21f7aaac4a4d34a8a5aa1330fb2ed6814532cfb.tar.bz2 |
Tools long double updates (#522)
* OESS-98 convert plugin option to FetchContent, add tests
* Fixes for pkcfg files because of plugin option
* OESS-98 fix tools test for plugins
* Keep doxygen comments under 100 chars long - format hint
* Whitespace
* HDFFV-11144 - Reclassify CMake messages
* HDFFV-11099/11100 added help text
* Reworked switch statement to compare string instead
* Fix typo
* Update CDash mode
* Correct name of threadsafe
* Correct option name
* Undo accidental commit
* Note LLVM 10 to 11 format default changes
* Update format plugin
* Undo clang-format version 11 changes
* One more correction
* Update supported platforms
* Revert whitespace changes
* Correct whitespace
* Changes from PR#3
* HDFFV-11213 added option to control gcc10 warnings diagnostics
* HDFFV-11212 Use the new references correctly in JNI utility and tests
* format source
* Fix typo
* Add new test file
* HDFFV-11212 - update test and remove unused arg
* Minor non-space formatting changes
* Use H5I_INVALID_ID instead of "-1"
* source formatting
* add missing testfile, update jni function
* Undo commit of debug code
* remove mislocated file
* Fix h5repack test for handling of fapls and id close
* Update h5diff test files usage text
* HDFFV-11212 add new ref tests for JNI export dataset
* src format update
* Remove blank line typo
* src format typo
* long double requires %Lg
* Another long double foramt specifer S.B. %Lg
* issue with t128bit test
* Windows issue with h5dump and type.
* Fix review issues
* refactor function nesting and fix error checks
* format fixes
* Remove untested functions and javadoc quiet comments
* Restore TRY block.
* Change string append errors to memory exception
* revert to H5_JNI_FATAL_ERROR - support functions need work
* Add assertion error for h5util functions
* remove duplicate function
* format fix
* Revert HD function error handling
* Update copyright comments
* GH #386 java folder copyright corrections
* Whitespace
* GH #359 implement and fix tools 1.6 API usage
* remove excessive comments
* Flip inits to correct ifdef section
* rework ifdef to be simpler
* format issue
* Reformat ifdef inits
* remove static attribute
* format compliance
* Update names
* Revert because logic relies on float not being int
* Changes noticed from creating merge of #412
* Double underscore change
* Correct compiler version variable used
* Remove header guard underscores
* Whitespace cleanup
* Split format source and commit changes on repo push
* remove pre-split setting
* Change windows TS to use older VS.
* correct window os name
* HDFFV-11212 JNI export util and Javadoc
* Suggested review changes
* Another change found
* Committing clang-format changes
* HDFFV-11113 long double in tools
* HDFFV-11113 add note
* Disable long double tests for now
* HDFFV-11228 remove arbitrary CMake warning groups.
* Make each flag a string
* Some Javadoc warning fixes
* Updated javadoc fixes
* # WARNING: head commit changed in the meantime
HDFFV-11229 Fix long double usage in tools and java
Changed h5dump and h5ls to just print 128-bit for long double type.
Added test and file for dataset and attributes with all three float
types.
* Committing clang-format changes
* HDFFV-11229 Add release note
* HDFFV-11229 windows testfile needed
* fix typo
* Remove non supported message text
* HDFFV-11229 - change ldouble test to check both native and general
* HDFFV-11229 add second file compare
* HDFFV-11229 fix reference file
* HDFFV-11229 autotools check two refs
* HDFFV-11229 revert back to removal of NATIVE_LDOUBLE in tools output
* Committing clang-format changes
* Update release note
* Update attribute type of ref file
* Change source of ninja for macs
* try port instead of brew
* Recommended is to use brew.
* Undo non long double changes
* remove unneeded file
Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
Diffstat (limited to 'java/test/TestH5Arw.java')
-rw-r--r-- | java/test/TestH5Arw.java | 449 |
1 files changed, 449 insertions, 0 deletions
diff --git a/java/test/TestH5Arw.java b/java/test/TestH5Arw.java new file mode 100644 index 0000000..282b736 --- /dev/null +++ b/java/test/TestH5Arw.java @@ -0,0 +1,449 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the COPYING file, which can be found at the root of the source code * + * distribution tree, or in https://www.hdfgroup.org/licenses. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +package test; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.File; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.HDFNativeData; +import hdf.hdf5lib.callbacks.H5A_iterate_cb; +import hdf.hdf5lib.callbacks.H5A_iterate_t; +import hdf.hdf5lib.exceptions.HDF5Exception; +import hdf.hdf5lib.exceptions.HDF5LibraryException; + +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TestName; + +public class TestH5Arw { + @Rule public TestName testname = new TestName(); + private static final String H5_INTS_FILE = "tintsattrs.h5"; + private static final String H5_FLTS_FILE = "tfloatsattrs.h5"; + private static final int DIM_X = 8; + private static final int DIM8_Y = 8; + private static final int DIM16_Y = 16; + private static final int DIM32_Y = 32; + private static final int DIM64_Y = 64; + private static final int DIM128_Y = 128; + private static final String DATASETU08 = "DU08BITS"; + private static final String DATASETS08 = "DS08BITS"; + private static final String DATASETU16 = "DU16BITS"; + private static final String DATASETS16 = "DS16BITS"; + private static final String DATASETU32 = "DU32BITS"; + private static final String DATASETS32 = "DS32BITS"; + private static final String DATASETU64 = "DU64BITS"; + private static final String DATASETS64 = "DS64BITS"; + private static final String DATASETF32 = "DS32BITS"; + private static final String DATASETF64 = "DS64BITS"; + private static final String DATASETF128 = "DS128BITS"; + private static final int RANK = 2; + long H5fid = HDF5Constants.H5I_INVALID_HID; + long H5aid = HDF5Constants.H5I_INVALID_HID; + long H5did = HDF5Constants.H5I_INVALID_HID; + + private final void _closeH5file() throws HDF5LibraryException { + if (H5aid >= 0) + try {H5.H5Aclose(H5aid);} catch (Exception ex) {} + if (H5did >= 0) + try {H5.H5Dclose(H5did);} catch (Exception ex) {} + if (H5fid > 0) + try {H5.H5Fclose(H5fid);} catch (Exception ex) {} + } + + public void openH5file(String filename, String dsetname) { + try { + H5fid = H5.H5Fopen(filename, + HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT); + } + catch (Throwable err) { + err.printStackTrace(); + fail("TestH5Arw._openH5file: " + err); + } + assertTrue("TestH5Arw._openH5file: H5.H5Fopen: ", H5fid >= 0); + try { + H5did = H5.H5Dopen(H5fid, dsetname, HDF5Constants.H5P_DEFAULT); + } + catch (Throwable err) { + err.printStackTrace(); + fail("TestH5Arw._openH5file: " + err); + } + assertTrue("TestH5Arw._openH5file: H5.H5Dopen: ", H5did >= 0); + try { + H5aid = H5.H5Aopen(H5did, dsetname, HDF5Constants.H5P_DEFAULT); + } + catch (Throwable err) { + err.printStackTrace(); + fail("TestH5Arw._openH5file: " + err); + } + assertTrue("TestH5Arw._openH5file: H5.H5Aopen: ", H5aid >= 0); + } + + @After + public void closeH5file() throws HDF5LibraryException { + if (H5aid >= 0) + try {H5.H5Aclose(H5aid);} catch (Exception ex) {} + if (H5did >= 0) + try {H5.H5Aclose(H5did);} catch (Exception ex) {} + if (H5fid > 0) + try {H5.H5Fclose(H5fid);} catch (Exception ex) {} + H5fid = HDF5Constants.H5I_INVALID_HID; + H5did = HDF5Constants.H5I_INVALID_HID; + H5aid = HDF5Constants.H5I_INVALID_HID; + System.out.println(); + } + + @Before + public void verifyCount() + throws NullPointerException, HDF5Exception { + assertTrue("H5 open ids is 0", H5.getOpenIDCount()==0); + System.out.print(testname.getMethodName()); + } + + @Test + public void testH5Aread_8bit_ints() { + byte[][] attr_data = new byte[DIM_X][DIM8_Y]; + + try { + openH5file(H5_INTS_FILE, DATASETU08); + } + catch (Throwable err) { + err.printStackTrace(); + fail("testH5Aread_8bit_ints: openH5file: " + err); + } + + // Read data. + try { + H5.H5Aread(H5aid, HDF5Constants.H5T_NATIVE_UINT8, attr_data); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_8bit_ints: H5Aread: " + err); + } + + // End access to the attribute and release resources used by it. + try { + H5.H5Aclose(H5aid); + } + catch (Exception err) { + err.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + H5.H5Dclose(H5did); + } + catch (Exception err) { + err.printStackTrace(); + } + + // Open an existing dataset. + try { + H5did = H5.H5Dopen(H5fid, DATASETS08, HDF5Constants.H5P_DEFAULT); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_8bit_ints: H5Dopen: " + err); + } + + // Open an existing attribute. + try { + H5aid = H5.H5Aopen(H5did, DATASETS08, HDF5Constants.H5P_DEFAULT); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_8bit_ints: H5Aopen: " + err); + } + + // Read data. + try { + H5.H5Aread(H5aid, HDF5Constants.H5T_NATIVE_INT8, attr_data); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_8bit_ints: H5Aread: " + err); + } + } + + @Test + public void testH5Aread_16bit_ints() { + short[][] attr_data = new short[DIM_X][DIM16_Y]; + + try { + openH5file(H5_INTS_FILE, DATASETU16); + } + catch (Throwable err) { + err.printStackTrace(); + fail("testH5Aread_16bit_ints: openH5file: " + err); + } + + // Read data. + try { + H5.H5Aread(H5aid, HDF5Constants.H5T_NATIVE_UINT16, attr_data); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_16bit_ints: H5Aread: " + err); + } + + // End access to the attribute and release resources used by it. + try { + H5.H5Aclose(H5aid); + } + catch (Exception err) { + err.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + H5.H5Dclose(H5did); + } + catch (Exception err) { + err.printStackTrace(); + } + + // Open an existing dataset. + try { + H5did = H5.H5Dopen(H5fid, DATASETS16, HDF5Constants.H5P_DEFAULT); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_16bit_ints: H5Dopen: " + err); + } + + // Open an existing attribute. + try { + H5aid = H5.H5Aopen(H5did, DATASETS16, HDF5Constants.H5P_DEFAULT); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_16bit_ints: H5Aopen: " + err); + } + + // Read data. + try { + H5.H5Aread(H5aid, HDF5Constants.H5T_NATIVE_INT16, attr_data); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_16bit_ints: H5Aread: " + err); + } + } + + @Test + public void testH5Aread_32bit_ints() { + int[][] attr_data = new int[DIM_X][DIM16_Y]; + + try { + openH5file(H5_INTS_FILE, DATASETU32); + } + catch (Throwable err) { + err.printStackTrace(); + fail("testH5Aread_32bit_ints: openH5file: " + err); + } + + // Read data. + try { + H5.H5Aread(H5aid, HDF5Constants.H5T_NATIVE_UINT32, attr_data); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_32bit_ints: H5Aread: " + err); + } + + // End access to the attribute and release resources used by it. + try { + H5.H5Aclose(H5aid); + } + catch (Exception err) { + err.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + H5.H5Dclose(H5did); + } + catch (Exception err) { + err.printStackTrace(); + } + + // Open an existing dataset. + try { + H5did = H5.H5Dopen(H5fid, DATASETS32, HDF5Constants.H5P_DEFAULT); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_32bit_ints: H5Dopen: " + err); + } + + // Open an existing attribute. + try { + H5aid = H5.H5Aopen(H5did, DATASETS32, HDF5Constants.H5P_DEFAULT); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_32bit_ints: H5Aopen: " + err); + } + + // Read data. + try { + H5.H5Aread(H5aid, HDF5Constants.H5T_NATIVE_INT32, attr_data); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_32bit_ints: H5Aread: " + err); + } + } + + @Test + public void testH5Aread_64bit_ints() { + long[][] attr_data = new long[DIM_X][DIM64_Y]; + + try { + openH5file(H5_INTS_FILE, DATASETU64); + } + catch (Throwable err) { + err.printStackTrace(); + fail("testH5Aread_64bit_ints: openH5file: " + err); + } + + // Read data. + try { + H5.H5Aread(H5aid, HDF5Constants.H5T_NATIVE_UINT64, attr_data); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_64bit_ints: H5Aread: " + err); + } + + // End access to the attribute and release resources used by it. + try { + H5.H5Aclose(H5aid); + } + catch (Exception err) { + err.printStackTrace(); + } + + // End access to the dataset and release resources used by it. + try { + H5.H5Dclose(H5did); + } + catch (Exception err) { + err.printStackTrace(); + } + + // Open an existing dataset. + try { + H5did = H5.H5Dopen(H5fid, DATASETS64, HDF5Constants.H5P_DEFAULT); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_64bit_ints: H5Dopen: " + err); + } + + // Open an existing attribute. + try { + H5aid = H5.H5Aopen(H5did, DATASETS64, HDF5Constants.H5P_DEFAULT); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_64bit_ints: H5Aopen: " + err); + } + + // Read data. + try { + H5.H5Aread(H5aid, HDF5Constants.H5T_NATIVE_INT64, attr_data); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_64bit_ints: H5Aread: " + err); + } + } + + @Test + public void testH5Aread_32bit_floats() { + float[][] attr_data = new float[DIM_X][DIM32_Y]; + + try { + openH5file(H5_FLTS_FILE, DATASETF32); + } + catch (Throwable err) { + err.printStackTrace(); + fail("testH5Aread_32bit_floats: openH5file: " + err); + } + + // Read data. + try { + H5.H5Aread(H5aid, HDF5Constants.H5T_NATIVE_FLOAT, attr_data); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_32bit_floats: H5Aread: " + err); + } + for (int i = 0; i < DIM_X; i++) + assertTrue("testH5Aread_32bit_floats - H5.H5Aread: ", attr_data[i][0] == (32 - i)); + } + + @Test + public void testH5Aread_64bit_floats() { + double[][] attr_data = new double[DIM_X][DIM64_Y]; + + try { + openH5file(H5_FLTS_FILE, DATASETF64); + } + catch (Throwable err) { + err.printStackTrace(); + fail("testH5Aread_64bit_floats: openH5file: " + err); + } + + // Read data. + try { + H5.H5Aread(H5aid, HDF5Constants.H5T_NATIVE_DOUBLE, attr_data); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_64bit_floats: H5Aread: " + err); + } + for (int i = 0; i < DIM_X; i++) + assertTrue("testH5Aread_64bit_floats - H5.H5Aread: ", attr_data[i][0] == (64 - i)); + } + + @Test + public void testH5Aread_128bit_floats() { + byte[][][] attr_data = new byte[DIM_X][DIM128_Y][8]; + + try { + openH5file(H5_FLTS_FILE, DATASETF128); + } + catch (Throwable err) { + err.printStackTrace(); + fail("testH5Aread_128bit_floats: openH5file: " + err); + } + + // Read data. + try { + H5.H5Aread(H5aid, HDF5Constants.H5T_NATIVE_LDOUBLE, attr_data); + } + catch (Exception err) { + err.printStackTrace(); + fail("testH5Aread_128bit_floats: H5Aread: " + err); + } + } + +} |