diff options
author | Allen Byrne <byrn@hdfgroup.org> | 2016-08-01 18:17:36 (GMT) |
---|---|---|
committer | Allen Byrne <byrn@hdfgroup.org> | 2016-08-01 18:17:36 (GMT) |
commit | 0e99aa203a9f5071940e04ec17922d7afe70cc78 (patch) | |
tree | f328a427e331d4e1100c73143c976367ca30fd1a /java | |
parent | bac4cf8e92b8cd033c40160bde0f2b27fa648a5c (diff) | |
download | hdf5-0e99aa203a9f5071940e04ec17922d7afe70cc78.zip hdf5-0e99aa203a9f5071940e04ec17922d7afe70cc78.tar.gz hdf5-0e99aa203a9f5071940e04ec17922d7afe70cc78.tar.bz2 |
[svn-r30240] JAVA-1920: Create a filter plugin test that has a filter which calls a HDF5 function.
Diffstat (limited to 'java')
-rw-r--r-- | java/test/JUnit-interface.txt | 3 | ||||
-rw-r--r-- | java/test/TestH5PL.java | 122 |
2 files changed, 124 insertions, 1 deletions
diff --git a/java/test/JUnit-interface.txt b/java/test/JUnit-interface.txt index cbd93e0..9765d08 100644 --- a/java/test/JUnit-interface.txt +++ b/java/test/JUnit-interface.txt @@ -628,13 +628,14 @@ JUnit version 4.11 .testH5Ocomment_clear .testH5Ocopy_cur_not_exists .TestH5PLplugins +.TestH5PLdlopen .testH5Zfilter_avail .testH5Zunregister_predefined .testH5Zget_filter_info Time: XXXX -OK (632 tests) +OK (633 tests) HDF5-DIAG: Error detected in HDF5 (version (number)) thread (IDs): #000: (file name) line (number) in H5Fopen(): can't set access and transfer property lists diff --git a/java/test/TestH5PL.java b/java/test/TestH5PL.java index afcb88a..6a3324a 100644 --- a/java/test/TestH5PL.java +++ b/java/test/TestH5PL.java @@ -29,6 +29,15 @@ import org.junit.rules.TestName; public class TestH5PL { @Rule public TestName testname = new TestName(); + private static String FILENAME = "h5_dlopenChunk.h5"; + private static String DATASETNAME = "DS1"; + private static final int DIM_X = 6; + private static final int DIM_Y = 8; + private static final int CHUNK_X = 4; + private static final int CHUNK_Y = 4; + private static final int RANK = 2; + private static final int NDIMS = 2; + private static final int H5Z_FILTER_DYNLIB4 = 260; @Before public void checkOpenIDs() { @@ -58,4 +67,117 @@ public class TestH5PL { fail("TestH5PLplugins " + err); } } + + @Test + public void TestH5PLdlopen() { + try { + long file_id = -1; + long filespace_id = -1; + long dataset_id = -1; + long fapl_id = -1; + long dcpl_id = -1; + int[] cd_values = {9, 0, 0, 0}; + int[] libversion = {0, 0, 0}; + long[] dims = { DIM_X, DIM_Y }; + long[] chunk_dims = { CHUNK_X, CHUNK_Y }; + int[][] dset_data = new int[DIM_X][DIM_Y]; + int[] mdc_nelmts = {0}; + long[] rdcc_nelmts = {0}; + long[] rdcc_nbytes = {0}; + double[] rdcc_w0 = {0}; + + // Initialize data to "1", to make it easier to see the selections. + for (int indx = 0; indx < DIM_X; indx++) + for (int jndx = 0; jndx < DIM_Y; jndx++) + dset_data[indx][jndx] = 1; + + // Create a new file using default properties. + try { + file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, + HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + fail("TestH5PLdlopen H5Fcreate:" + e); + } + + // Create dataspace. Setting maximum size to NULL sets the maximum + // size to be the current size. + try { + filespace_id = H5.H5Screate_simple(RANK, dims, null); + } + catch (Exception e) { + e.printStackTrace(); + fail("TestH5PLdlopen H5Screate_simple:" + e); + } + + // Create the dataset creation property list. + try { + dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE); + } + catch (Exception e) { + e.printStackTrace(); + fail("TestH5PLdlopen H5Pcreate:" + e); + } + + // Set the chunk size. + try { + if (dcpl_id >= 0) + H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims); + } + catch (Exception e) { + e.printStackTrace(); + fail("TestH5PLdlopen H5Pset_chunk:" + e); + } + + try { + H5.H5get_libversion(libversion); + cd_values[1] = libversion[0]; + cd_values[2] = libversion[1]; + cd_values[3] = libversion[2]; + if (dcpl_id >= 0) + H5.H5Pset_filter(dcpl_id, H5Z_FILTER_DYNLIB4, HDF5Constants.H5Z_FLAG_MANDATORY, 4, cd_values); + } + catch (Exception e) { + e.printStackTrace(); + fail("TestH5PLdlopen H5Pset_filter:" + e); + } + + // Create the chunked dataset. + try { + if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0)) + dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_NATIVE_INT, filespace_id, + HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT); + } + catch (Exception e) { + e.printStackTrace(); + fail("TestH5PLdlopen H5Dcreate:" + e); + } + + try { + if (dataset_id >= 0) + H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL, + HDF5Constants.H5S_ALL, dset_data); + } + catch (Exception e) { + e.printStackTrace(); + fail("TestH5PLdlopen H5Dwrite:" + e); + } + finally { + // End access to the dataset and release resources used by it. + if (dcpl_id >= 0) + try {H5.H5Pclose_class(dcpl_id);} catch (Throwable err) {} + if (dataset_id >= 0) + try {H5.H5Dclose(dataset_id);} catch (Throwable err) {} + if (filespace_id >= 0) + try {H5.H5Sclose(filespace_id);} catch (Throwable err) {} + if (file_id >= 0) + try {H5.H5Fclose(file_id);} catch (Throwable err) {} + } + } + catch (Throwable err) { + err.printStackTrace(); + fail("TestH5PLdlopen " + err); + } + } } |