summaryrefslogtreecommitdiffstats
path: root/java
diff options
context:
space:
mode:
authorLarry Knox <lrknox@hdfgroup.org>2019-07-25 16:36:37 (GMT)
committerLarry Knox <lrknox@hdfgroup.org>2019-07-25 16:47:12 (GMT)
commit8008294578b5a133907d7ab1dd20e34735c54535 (patch)
treed1b9228d468afc05da9333567ea43a04bb0c4272 /java
parentd3fdcd8a680ad0f8b21304b35e8564b774a88ef0 (diff)
downloadhdf5-8008294578b5a133907d7ab1dd20e34735c54535.zip
hdf5-8008294578b5a133907d7ab1dd20e34735c54535.tar.gz
hdf5-8008294578b5a133907d7ab1dd20e34735c54535.tar.bz2
Squashed commit of the following:
Merge changes from update_merged_S3_HDFS branch into develop. commit d5034315aea88629929ac0c9c59ebfafd5f21a31 Merge: 9c48823 d3fdcd8 Author: Larry Knox <lrknox@hdfgroup.org> Date: Thu Jul 25 08:24:53 2019 -0500 Merge branch 'develop' into update_merged_S3_HDFS
Diffstat (limited to 'java')
-rw-r--r--java/examples/groups/JavaGroupExample.sh.in2
-rw-r--r--java/src/Makefile.am2
-rw-r--r--java/src/hdf/hdf5lib/CMakeLists.txt2
-rw-r--r--java/src/hdf/hdf5lib/H5.java10
-rw-r--r--java/src/hdf/hdf5lib/HDF5Constants.java6
-rw-r--r--java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java102
-rw-r--r--java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java121
-rw-r--r--java/src/jni/h5Constants.c4
-rw-r--r--java/test/CMakeLists.txt14
-rw-r--r--java/test/Makefile.am2
-rw-r--r--java/test/TestAll.java1
-rw-r--r--java/test/TestH5Pfapl.java4
-rw-r--r--java/test/TestH5Pfaplhdfs.java393
-rw-r--r--java/test/TestH5Pfapls3.java406
-rw-r--r--java/test/junit.sh.in48
-rw-r--r--java/test/testfiles/JUnit-TestH5Pfaplhdfs.txt9
-rw-r--r--java/test/testfiles/JUnit-TestH5Pfapls3.txt10
17 files changed, 1136 insertions, 0 deletions
diff --git a/java/examples/groups/JavaGroupExample.sh.in b/java/examples/groups/JavaGroupExample.sh.in
index 9d4673f..3ba512a 100644
--- a/java/examples/groups/JavaGroupExample.sh.in
+++ b/java/examples/groups/JavaGroupExample.sh.in
@@ -64,6 +64,8 @@ $HDFTEST_HOME/h5ex_g_iterate.h5
$HDFTEST_HOME/h5ex_g_visit.h5
"
LIST_DATA_FILES="
+$HDFTEST_HOME/h5ex_g_iterate.h5
+$HDFTEST_HOME/h5ex_g_visit.h5
$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Create.txt
$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Iterate.txt
$HDFTEST_HOME/../testfiles/examples.groups.H5Ex_G_Compact.txt
diff --git a/java/src/Makefile.am b/java/src/Makefile.am
index bd55c39..fcdeae9 100644
--- a/java/src/Makefile.am
+++ b/java/src/Makefile.am
@@ -98,6 +98,8 @@ hdf5_java_JAVA = \
${pkgpath}/structs/H5A_info_t.java \
${pkgpath}/structs/H5E_error2_t.java \
${pkgpath}/structs/H5F_info2_t.java \
+ ${pkgpath}/structs/H5FD_hdfs_fapl_t.java \
+ ${pkgpath}/structs/H5FD_ros3_fapl_t.java \
${pkgpath}/structs/H5G_info_t.java \
${pkgpath}/structs/H5L_info_t.java \
${pkgpath}/structs/H5O_info_t.java \
diff --git a/java/src/hdf/hdf5lib/CMakeLists.txt b/java/src/hdf/hdf5lib/CMakeLists.txt
index c171ea8..be8f60a 100644
--- a/java/src/hdf/hdf5lib/CMakeLists.txt
+++ b/java/src/hdf/hdf5lib/CMakeLists.txt
@@ -73,6 +73,8 @@ set (HDF5_JAVA_HDF_HDF5_STRUCTS_SOURCES
structs/H5AC_cache_config_t.java
structs/H5E_error2_t.java
structs/H5F_info2_t.java
+ structs/H5FD_ros3_fapl_t.java
+ structs/H5FD_hdfs_fapl_t.java
structs/H5G_info_t.java
structs/H5L_info_t.java
structs/H5O_hdr_info_t.java
diff --git a/java/src/hdf/hdf5lib/H5.java b/java/src/hdf/hdf5lib/H5.java
index a1a8ede..481ca8c 100644
--- a/java/src/hdf/hdf5lib/H5.java
+++ b/java/src/hdf/hdf5lib/H5.java
@@ -50,6 +50,8 @@ import hdf.hdf5lib.structs.H5AC_cache_config_t;
import hdf.hdf5lib.structs.H5A_info_t;
import hdf.hdf5lib.structs.H5E_error2_t;
import hdf.hdf5lib.structs.H5F_info2_t;
+import hdf.hdf5lib.structs.H5FD_hdfs_fapl_t;
+import hdf.hdf5lib.structs.H5FD_ros3_fapl_t;
import hdf.hdf5lib.structs.H5G_info_t;
import hdf.hdf5lib.structs.H5L_info_t;
import hdf.hdf5lib.structs.H5O_info_t;
@@ -7796,6 +7798,10 @@ public class H5 implements java.io.Serializable {
public synchronized static native int H5Pset_fapl_family(long fapl_id, long memb_size, long memb_fapl_id)
throws HDF5LibraryException, NullPointerException;
+ public synchronized static native int H5Pset_fapl_hdfs(long fapl_id, H5FD_hdfs_fapl_t fapl_conf) throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native H5FD_hdfs_fapl_t H5Pget_fapl_hdfs(long fapl_id) throws HDF5LibraryException, NullPointerException;
+
/**
* H5Pget_fapl_multi Sets up use of the multi I/O driver.
*
@@ -7880,6 +7886,10 @@ public class H5 implements java.io.Serializable {
public synchronized static native int H5Pset_fapl_windows(long fapl_id) throws HDF5LibraryException, NullPointerException;
+ public synchronized static native int H5Pset_fapl_ros3(long fapl_id, H5FD_ros3_fapl_t fapl_conf) throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native H5FD_ros3_fapl_t H5Pget_fapl_ros3(long fapl_id) throws HDF5LibraryException, NullPointerException;
+
// /////// unimplemented ////////
// Generic property list routines //
diff --git a/java/src/hdf/hdf5lib/HDF5Constants.java b/java/src/hdf/hdf5lib/HDF5Constants.java
index cb5ed22..2e80f2e 100644
--- a/java/src/hdf/hdf5lib/HDF5Constants.java
+++ b/java/src/hdf/hdf5lib/HDF5Constants.java
@@ -246,6 +246,8 @@ public class HDF5Constants {
public static final long H5FD_SEC2 = H5FD_SEC2();
public static final long H5FD_STDIO = H5FD_STDIO();
public static final long H5FD_WINDOWS = H5FD_WINDOWS();
+ public static final long H5FD_ROS3 = H5FD_ROS3();
+ public static final long H5FD_HDFS = H5FD_HDFS();
public static final int H5FD_LOG_LOC_READ = H5FD_LOG_LOC_READ();
public static final int H5FD_LOG_LOC_WRITE = H5FD_LOG_LOC_WRITE();
public static final int H5FD_LOG_LOC_SEEK = H5FD_LOG_LOC_SEEK();
@@ -1111,6 +1113,10 @@ public class HDF5Constants {
private static native final long H5FD_WINDOWS();
+ private static native final long H5FD_ROS3();
+
+ private static native final long H5FD_HDFS();
+
private static native final int H5FD_LOG_LOC_READ();
private static native final int H5FD_LOG_LOC_WRITE();
diff --git a/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java b/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java
new file mode 100644
index 0000000..f56a038
--- /dev/null
+++ b/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java
@@ -0,0 +1,102 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Read-Only HDFS Virtual File Driver (VFD) *
+ * Copyright (c) 2018, The HDF Group. *
+ * *
+ * All rights reserved. *
+ * *
+ * NOTICE: *
+ * All information contained herein is, and remains, the property of The HDF *
+ * Group. The intellectual and technical concepts contained herein are *
+ * proprietary to The HDF Group. Dissemination of this information or *
+ * reproduction of this material is strictly forbidden unless prior written *
+ * permission is obtained from The HDF Group. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+/*
+ * Java representation of the HDFS VFD file access property list (fapl)
+ * structure.
+ *
+ * Used for the access of files hosted on the Hadoop Distributed File System.
+ */
+
+@SuppressWarnings("serial") // mute default serialUID warnings until someone knowledgeable comes along or something breaks horribly
+public class H5FD_hdfs_fapl_t implements Serializable {
+
+ private long version;
+ private String namenode_name;
+ private String user_name;
+ private String kerberos_ticket_cache;
+ private int namenode_port;
+ private int stream_buffer_size;
+
+ /**
+ * Create a fapl_t structure with the specified components.
+ */
+ public H5FD_hdfs_fapl_t(
+ String namenode_name,
+ int namenode_port,
+ String user_name,
+ String kerberos_ticket_cache,
+ int stream_buffer_size)
+ {
+ this.version = 1;
+ this.namenode_name = namenode_name;
+ this.namenode_port = namenode_port;
+ this.user_name = user_name;
+ this.kerberos_ticket_cache = kerberos_ticket_cache;
+ this.stream_buffer_size = stream_buffer_size;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null)
+ return false;
+ if (!(o instanceof H5FD_hdfs_fapl_t))
+ return false;
+
+ H5FD_hdfs_fapl_t other = (H5FD_hdfs_fapl_t)o;
+ if (this.version != other.version)
+ return false;
+ if (!this.namenode_name.equals(other.namenode_name))
+ return false;
+ if (this.namenode_port != other.namenode_port)
+ return false;
+ if (!this.user_name.equals(other.user_name))
+ return false;
+ if (!this.kerberos_ticket_cache.equals(other.kerberos_ticket_cache))
+ return false;
+ if (this.stream_buffer_size != other.stream_buffer_size)
+ return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ /* this is a _very bad_ hash algorithm for purposes of hashing! */
+ /* implemented to satisfy the "contract" regarding equality */
+ int k = (int)this.version;
+ k += this.namenode_name.length();
+ k += this.user_name.length();
+ k += this.kerberos_ticket_cache.length();
+ k += namenode_port;
+ k += stream_buffer_size;
+ return k;
+ }
+
+ @Override
+ public String toString() {
+ return "H5FD_hdfs_fapl_t (Version: " + this.version + ") {" +
+ "\n namenode_name: '" + this.namenode_name +
+ "'\n namenode_port: " + this.namenode_port +
+ "\n user_name: '" + this.user_name +
+ "'\n kerberos_ticket_cache: '" + this.kerberos_ticket_cache +
+ "'\n stream_buffer_size: " + this.stream_buffer_size +
+ "\n}\n";
+ }
+}
+
+
diff --git a/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java b/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java
new file mode 100644
index 0000000..6b086c3
--- /dev/null
+++ b/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java
@@ -0,0 +1,121 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Read-Only S3 Virtual File Driver (VFD) *
+ * Copyright (c) 2017-2018, The HDF Group. *
+ * *
+ * All rights reserved. *
+ * *
+ * NOTICE: *
+ * All information contained herein is, and remains, the property of The HDF *
+ * Group. The intellectual and technical concepts contained herein are *
+ * proprietary to The HDF Group. Dissemination of this information or *
+ * reproduction of this material is strictly forbidden unless prior written *
+ * permission is obtained from The HDF Group. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+/*
+ * Java representation of the ROS3 VFD file access property list (fapl)
+ * structure.
+ *
+ * Used for the access of files hosted remotely on S3 by Amazon.
+ *
+ * For simplicity, implemented assuming that all ROS3 fapls have components:
+ * - version
+ * - aws_region
+ * - secret_id
+ * - secret_key
+ *
+ * Future implementations may be created to enable different fapl "shapes"
+ * depending on provided version.
+ *
+ * proposed:
+ *
+ * H5FD_ros3_fapl_t (super class, has only version field)
+ * H5FD_ros3_fapl_v1_t (extends super with Version 1 components)
+ * H5FD_ros3_fapl_v2_t (extends super with Version 2 components)
+ * and so on, for each version
+ *
+ * "super" is passed around, and is version-checked and re-cast as
+ * appropriate
+ */
+
+@SuppressWarnings("serial") // mute default serialUID warnings until someone knowledgeable comes along or something breaks horribly
+public class H5FD_ros3_fapl_t implements Serializable {
+
+ private long version;
+ private String aws_region;
+ private String secret_id;
+ private String secret_key;
+
+ /**
+ * Create a "default" fapl_t structure, for anonymous access.
+ */
+ public H5FD_ros3_fapl_t () {
+ /* H5FD_ros3_fapl_t("", "", ""); */ /* defer */
+ this.version = 1;
+ this.aws_region = "";
+ this.secret_id = "";
+ this.secret_key = "";
+ }
+
+ /**
+ * Create a fapl_t structure with the specified components.
+ * If all are the empty string, is anonymous (non-authenticating).
+ * Region and ID must both be supplied for authentication.
+ *
+ * @param region "aws region" for authenticating request
+ * @param id "secret id" or "access id" for authenticating request
+ * @param key "secret key" or "access key" for authenticating request
+ */
+ public H5FD_ros3_fapl_t (String region, String id, String key) {
+ this.version = 1; /* must equal H5FD__CURR_ROS3_FAPL_T_VERSION */
+ /* as found in H5FDros3.h */
+ this.aws_region = region;
+ this.secret_id = id;
+ this.secret_key = key;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null)
+ return false;
+ if (!(o instanceof H5FD_ros3_fapl_t))
+ return false;
+
+ H5FD_ros3_fapl_t other = (H5FD_ros3_fapl_t)o;
+ if (this.version != other.version)
+ return false;
+ if (!this.aws_region.equals(other.aws_region))
+ return false;
+ if (!this.secret_key.equals(other.secret_key))
+ return false;
+ if (!this.secret_id.equals(other.secret_id))
+ return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ /* this is a _very bad_ hash algorithm for purposes of hashing! */
+ /* implemented to satisfy the "contract" regarding equality */
+ int k = (int)this.version;
+ k += this.aws_region.length();
+ k += this.secret_id.length();
+ k += this.secret_key.length();
+ return k;
+ }
+
+ @Override
+ public String toString() {
+ return "H5FD_ros3_fapl_t (Version:" + this.version + ") {" +
+ "\n aws_region : " + this.aws_region +
+ "\n secret_id : " + this.secret_id +
+ "\n secret_key : " + this.secret_key +
+ "\n}\n";
+ }
+}
+
+
diff --git a/java/src/jni/h5Constants.c b/java/src/jni/h5Constants.c
index b9e320f..ac55a13 100644
--- a/java/src/jni/h5Constants.c
+++ b/java/src/jni/h5Constants.c
@@ -453,6 +453,8 @@ Java_hdf_hdf5lib_HDF5Constants_H5FD_1DIRECT(JNIEnv *env, jclass cls) {
JNIEXPORT jlong JNICALL
Java_hdf_hdf5lib_HDF5Constants_H5FD_1FAMILY(JNIEnv *env, jclass cls) { return H5FD_FAMILY; }
JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_HDF5Constants_H5FD_1HDFS(JNIEnv *env, jclass cls) { return H5FD_HDFS; }
+JNIEXPORT jlong JNICALL
Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG(JNIEnv *env, jclass cls) { return H5FD_LOG; }
JNIEXPORT jlong JNICALL
Java_hdf_hdf5lib_HDF5Constants_H5FD_1MPIO(JNIEnv *env, jclass cls) { return H5FD_MPIO; }
@@ -461,6 +463,8 @@ Java_hdf_hdf5lib_HDF5Constants_H5FD_1MULTI(JNIEnv *env, jclass cls) { return H5F
JNIEXPORT jlong JNICALL
Java_hdf_hdf5lib_HDF5Constants_H5FD_1SEC2(JNIEnv *env, jclass cls) { return H5FD_SEC2; }
JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_HDF5Constants_H5FD_1ROS3(JNIEnv *env, jclass cls) { return H5FD_ROS3; }
+JNIEXPORT jlong JNICALL
Java_hdf_hdf5lib_HDF5Constants_H5FD_1STDIO(JNIEnv *env, jclass cls) { return H5FD_STDIO; }
JNIEXPORT jlong JNICALL
Java_hdf_hdf5lib_HDF5Constants_H5FD_1WINDOWS(JNIEnv *env, jclass cls) {
diff --git a/java/test/CMakeLists.txt b/java/test/CMakeLists.txt
index 3298a47..dfb6e72 100644
--- a/java/test/CMakeLists.txt
+++ b/java/test/CMakeLists.txt
@@ -51,6 +51,20 @@ if (NOT HDF5_ENABLE_DEBUG_APIS)
)
endif ()
+if (HDF5_ENABLE_ROS3_VFD)
+ set (HDF5_JAVA_TEST_SOURCES
+ ${HDF5_JAVA_TEST_SOURCES}
+ TestH5Pfapls3
+ )
+endif ()
+
+if (HDF5_ENABLE_HDFS)
+ set (HDF5_JAVA_TEST_SOURCES
+ ${HDF5_JAVA_TEST_SOURCES}
+ TestH5Pfaplhdfs
+ )
+endif ()
+
set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_LIB_DIR}/junit.jar;${HDF5_JAVA_LIB_DIR}/hamcrest-core.jar;${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_SIMPLE_JAR}")
foreach (test_file ${HDF5_JAVA_TEST_SOURCES})
diff --git a/java/test/Makefile.am b/java/test/Makefile.am
index af99d92..c375938 100644
--- a/java/test/Makefile.am
+++ b/java/test/Makefile.am
@@ -61,6 +61,8 @@ noinst_JAVA = \
TestH5P.java \
TestH5PData.java \
TestH5Pfapl.java \
+ TestH5Pfaplhdfs.java \
+ TestH5Pfapls3.java \
TestH5Pvirtual.java \
TestH5Plist.java \
TestH5A.java \
diff --git a/java/test/TestAll.java b/java/test/TestAll.java
index 13cb597..c7c206c 100644
--- a/java/test/TestAll.java
+++ b/java/test/TestAll.java
@@ -27,6 +27,7 @@ import org.junit.runners.Suite;
TestH5Lparams.class, TestH5Lbasic.class, TestH5Lcreate.class,
TestH5R.class,
TestH5P.class, TestH5PData.class, TestH5Pfapl.class, TestH5Pvirtual.class, TestH5Plist.class,
+ TestH5Pfapls3.class, TestH5Pfaplhdfs.class,
TestH5A.class,
TestH5Oparams.class, TestH5Obasic.class, TestH5Ocopy.class, TestH5Ocreate.class,
TestH5PL.class, TestH5Z.class
diff --git a/java/test/TestH5Pfapl.java b/java/test/TestH5Pfapl.java
index 10a79dd..81a7ecb 100644
--- a/java/test/TestH5Pfapl.java
+++ b/java/test/TestH5Pfapl.java
@@ -15,6 +15,7 @@ package test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@@ -28,6 +29,8 @@ import hdf.hdf5lib.exceptions.HDF5Exception;
import hdf.hdf5lib.exceptions.HDF5LibraryException;
import hdf.hdf5lib.exceptions.HDF5PropertyListInterfaceException;
import hdf.hdf5lib.structs.H5AC_cache_config_t;
+import hdf.hdf5lib.structs.H5FD_hdfs_fapl_t;
+import hdf.hdf5lib.structs.H5FD_ros3_fapl_t;
import org.junit.After;
import org.junit.Before;
@@ -1398,4 +1401,5 @@ public class TestH5Pfapl {
fail("H5P_evict_on_close: " + err);
}
}
+
}
diff --git a/java/test/TestH5Pfaplhdfs.java b/java/test/TestH5Pfaplhdfs.java
new file mode 100644
index 0000000..2b0a808
--- /dev/null
+++ b/java/test/TestH5Pfaplhdfs.java
@@ -0,0 +1,393 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.text.DecimalFormat;
+import java.text.NumberFormat;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+import hdf.hdf5lib.exceptions.HDF5PropertyListInterfaceException;
+import hdf.hdf5lib.structs.H5AC_cache_config_t;
+import hdf.hdf5lib.structs.H5FD_hdfs_fapl_t;
+import hdf.hdf5lib.structs.H5FD_ros3_fapl_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Pfaplhdfs {
+ @Rule public TestName testname = new TestName();
+
+ private static final String H5_FILE = "testPf.h5";
+ private static final String H5_LOG_FILE = "testPf.log";
+ private static final String H5_FAMILY_FILE = "testPf%05d";
+ private static final String H5_MULTI_FILE = "testPfmulti";
+ private static char MULTI_LETTERS[] = {'X','s','b','r','g','l','o'};
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
+ private static final int DIMF_X = 12;
+ private static final int DIMF_Y = 18;
+ long H5fid = -1;
+ long H5dsid = -1;
+ long H5did = -1;
+ long H5Fdsid = -1;
+ long H5Fdid = -1;
+ long[] H5dims = { DIM_X, DIM_Y };
+ long fapl_id = -1;
+ long plapl_id = -1;
+ long dapl_id = -1;
+ long plist_id = -1;
+ long btplist_id = -1;
+ long[] H5Fdims = { DIMF_X, DIMF_Y };
+ double windchillF[][] =
+ {{36.0, 31.0, 25.0, 19.0, 13.0, 7.0, 1.0, -5.0, -11.0, -16.0, -22.0, -28.0, -34.0, -40.0, -46.0, -52.0, -57.0, -63.0},
+ {34.0, 27.0, 21.0, 15.0, 9.0, 3.0, -4.0, -10.0, -16.0, -22.0, -28.0, -35.0, -41.0, -47.0, -53.0, -59.0, -66.0, -72.0},
+ {32.0, 25.0, 19.0, 13.0, 6.0, 0.0, -7.0, -13.0, -19.0, -26.0, -32.0, -39.0, -45.0, -51.0, -58.0, -64.0, -71.0, -77.0},
+ {30.0, 24.0, 17.0, 11.0, 4.0, -2.0, -9.0, -15.0, -22.0, -29.0, -35.0, -42.0, -48.0, -55.0, -61.0, -68.0, -74.0, -81.0},
+ {29.0, 23.0, 16.0, 9.0, 3.0, -4.0, -11.0, -17.0, -24.0, -31.0, -37.0, -44.0, -51.0, -58.0, -64.0, -71.0, -78.0, -84.0},
+ {28.0, 22.0, 15.0, 8.0, 1.0, -5.0, -12.0, -19.0, -26.0, -33.0, -39.0, -46.0, -53.0, -60.0, -67.0, -73.0, -80.0, -87.0},
+ {28.0, 21.0, 14.0, 7.0, 0.0, -7.0, -14.0, -21.0, -27.0, -34.0, -41.0, -48.0, -55.0, -62.0, -69.0, -76.0, -82.0, -89.0},
+ {27.0, 20.0, 13.0, 6.0, -1.0, -8.0, -15.0, -22.0, -29.0, -36.0, -43.0, -50.0, -57.0, -64.0, -71.0, -78.0, -84.0, -91.0},
+ {26.0, 19.0, 12.0, 5.0, -2.0, -9.0, -16.0, -23.0, -30.0, -37.0, -44.0, -51.0, -58.0, -65.0, -72.0, -79.0, -86.0, -93.0},
+ {26.0, 19.0, 12.0, 4.0, -3.0, -10.0, -17.0, -24.0, -31.0, -38.0, -45.0, -52.0, -60.0, -67.0, -74.0, -81.0, -88.0, -95.0},
+ {25.0, 18.0, 11.0, 4.0, -3.0, -11.0, -18.0, -25.0, -32.0, -39.0, -46.0, -54.0, -61.0, -68.0, -75.0, -82.0, -89.0, -97.0},
+ {25.0, 17.0, 10.0, 3.0, -4.0, -11.0, -19.0, -26.0, -33.0, -40.0, -48.0, -55.0, -62.0, -69.0, -76.0, -84.0, -91.0, -98.0}
+ };
+
+ private final void _deleteFile(String filename) {
+ File file = null;
+ try {
+ file = new File(filename);
+ }
+ catch (Throwable err) {}
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ private final void _deleteLogFile() {
+ File file = null;
+ try {
+ file = new File(H5_LOG_FILE);
+ }
+ catch (Throwable err) {}
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ private final void _deleteFamilyFile() {
+ File file = null;
+ for(int indx = 0; ;indx++) {
+ java.text.DecimalFormat myFormat = new java.text.DecimalFormat("00000");
+ try {
+ file = new File("test"+myFormat.format(new Integer(indx))+".h5");
+ }
+ catch (Throwable err) {}
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ else
+ return;
+ }
+ }
+
+ private final void _deleteMultiFile() {
+ File file = null;
+ for(int indx = 1;indx<7;indx++) {
+ try {
+ file = new File(H5_MULTI_FILE+"-"+MULTI_LETTERS[indx]+".h5");
+ }
+ catch (Throwable err) {}
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+ }
+
+ private final long _createDataset(long fid, long dsid, String name, long dapl) {
+ long did = -1;
+ try {
+ did = H5.H5Dcreate(fid, name, HDF5Constants.H5T_STD_I32BE, dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5Pfapl._createDataset: ", did > 0);
+
+ return did;
+ }
+
+ private final void _createFloatDataset() {
+ try {
+ H5Fdsid = H5.H5Screate_simple(2, H5Fdims, null);
+ H5Fdid = H5.H5Dcreate(H5fid, "dsfloat", HDF5Constants.H5T_NATIVE_FLOAT, H5Fdsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5Pfapl._createFloatDataset: ", H5Fdid > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ private final void _createH5multiFileDS() {
+ try {
+ H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createH5file: " + err);
+ }
+ assertTrue("TestH5Pfapl.createH5file: _createDataset: ", H5did > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ private final void _createH5File(long fapl) {
+ try {
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, fapl);
+ H5dsid = H5.H5Screate_simple(2, H5dims, null);
+ H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createH5file: " + err);
+ }
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Fcreate: ", H5fid > 0);
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+ assertTrue("TestH5Pfapl.createH5file: _createDataset: ", H5did > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ private final void _createH5familyFile(long fapl) {
+ try {
+ H5fid = H5.H5Fcreate(H5_FAMILY_FILE+".h5", HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, fapl);
+ H5dsid = H5.H5Screate_simple(2, H5dims, null);
+ H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createH5file: " + err);
+ }
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Fcreate: ", H5fid > 0);
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+ assertTrue("TestH5Pfapl.createH5file: _createDataset: ", H5did > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ private final void _createH5multiFile(long fapl) {
+ try {
+ H5fid = H5.H5Fcreate(H5_MULTI_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, fapl);
+ H5dsid = H5.H5Screate_simple(2, H5dims, null);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createH5file: " + err);
+ }
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Fcreate: ", H5fid > 0);
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ public void deleteH5file() {
+ _deleteFile(H5_FILE);
+ }
+
+ public void deleteH5familyfile() {
+ _deleteFamilyFile();
+ }
+
+ public void deleteH5multifile() {
+ _deleteMultiFile();
+ }
+
+ @Before
+ public void createFileAccess()
+ throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ try {
+ fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createFileAccess: " + err);
+ }
+ assertTrue(fapl_id > 0);
+ try {
+ plapl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_ACCESS);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createFileAccess: " + err);
+ }
+ assertTrue(plapl_id > 0);
+ try {
+ plist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ btplist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ dapl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_ACCESS);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createFileAccess: " + err);
+ }
+ assertTrue(plist_id > 0);
+ assertTrue(btplist_id > 0);
+ assertTrue(dapl_id > 0);
+ }
+
+ @After
+ public void deleteFileAccess() throws HDF5LibraryException {
+ if (fapl_id > 0)
+ try {H5.H5Pclose(fapl_id);} catch (Exception ex) {}
+ if (plapl_id > 0)
+ try {H5.H5Pclose(plapl_id);} catch (Exception ex) {}
+ if (dapl_id > 0)
+ try {H5.H5Pclose(dapl_id);} catch (Exception ex) {}
+ if (plist_id > 0)
+ try {H5.H5Pclose(plist_id);} catch (Exception ex) {}
+ if (btplist_id > 0)
+ try {H5.H5Pclose(btplist_id);} catch (Exception ex) {}
+
+ if (H5Fdsid > 0)
+ try {H5.H5Sclose(H5Fdsid);} catch (Exception ex) {}
+ if (H5Fdid > 0)
+ try {H5.H5Dclose(H5Fdid);} catch (Exception ex) {}
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ System.out.println();
+ }
+
+ @Test
+ public void testHDFS_fapl()
+ throws Exception
+ {
+ if (HDF5Constants.H5FD_HDFS < 0)
+ throw new HDF5LibraryException("skip");
+
+ String nodename = "blues";
+ int nodeport = 12345;
+ String username = "sparticus";
+ String kerbcache = "/dev/null";
+ int streamsize = 1024;
+
+ final H5FD_hdfs_fapl_t config = new H5FD_hdfs_fapl_t(
+ nodename,
+ nodeport,
+ username,
+ kerbcache,
+ streamsize
+ );
+ assertTrue("setting fapl should succeed",
+ -1 < H5.H5Pset_fapl_hdfs(fapl_id, config));
+
+ assertEquals("driver types should match",
+ HDF5Constants.H5FD_HDFS,
+ H5.H5Pget_driver(fapl_id));
+
+ H5FD_hdfs_fapl_t copy = H5.H5Pget_fapl_hdfs(fapl_id);
+ assertEquals("fapl contents should match",
+ new H5FD_hdfs_fapl_t(
+ nodename,
+ nodeport,
+ username,
+ kerbcache,
+ streamsize),
+ copy);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Pget_fapl_hdfs_invalid_fapl_id()
+ throws Exception
+ {
+ if (HDF5Constants.H5FD_HDFS < 0)
+ throw new HDF5LibraryException("skip");
+ H5FD_hdfs_fapl_t fails = H5.H5Pget_fapl_hdfs(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Pget_fapl_hdfs_fapl_id_of_wrong_driver_type()
+ throws Exception
+ {
+ if (HDF5Constants.H5FD_HDFS < 0)
+ throw new HDF5LibraryException("skip");
+ if (HDF5Constants.H5FD_SEC2 < 0 )
+ throw new HDF5LibraryException("skip");
+ /* TODO: for now, test against a sec2 fapl only */
+
+ H5.H5Pset_fapl_sec2(fapl_id);
+ assertEquals("fapl_id was not set properly",
+ HDF5Constants.H5FD_SEC2,
+ H5.H5Pget_driver(fapl_id));
+ H5FD_hdfs_fapl_t fails = H5.H5Pget_fapl_hdfs(fapl_id);
+ }
+
+}
diff --git a/java/test/TestH5Pfapls3.java b/java/test/TestH5Pfapls3.java
new file mode 100644
index 0000000..3107bc8
--- /dev/null
+++ b/java/test/TestH5Pfapls3.java
@@ -0,0 +1,406 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.text.DecimalFormat;
+import java.text.NumberFormat;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+import hdf.hdf5lib.exceptions.HDF5PropertyListInterfaceException;
+import hdf.hdf5lib.structs.H5AC_cache_config_t;
+import hdf.hdf5lib.structs.H5FD_hdfs_fapl_t;
+import hdf.hdf5lib.structs.H5FD_ros3_fapl_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Pfapls3 {
+ @Rule public TestName testname = new TestName();
+
+ private static final String H5_FILE = "testPf.h5";
+ private static final String H5_LOG_FILE = "testPf.log";
+ private static final String H5_FAMILY_FILE = "testPf%05d";
+ private static final String H5_MULTI_FILE = "testPfmulti";
+ private static char MULTI_LETTERS[] = {'X','s','b','r','g','l','o'};
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 6;
+ private static final int DIMF_X = 12;
+ private static final int DIMF_Y = 18;
+ long H5fid = -1;
+ long H5dsid = -1;
+ long H5did = -1;
+ long H5Fdsid = -1;
+ long H5Fdid = -1;
+ long[] H5dims = { DIM_X, DIM_Y };
+ long fapl_id = -1;
+ long plapl_id = -1;
+ long dapl_id = -1;
+ long plist_id = -1;
+ long btplist_id = -1;
+ long[] H5Fdims = { DIMF_X, DIMF_Y };
+ double windchillF[][] =
+ {{36.0, 31.0, 25.0, 19.0, 13.0, 7.0, 1.0, -5.0, -11.0, -16.0, -22.0, -28.0, -34.0, -40.0, -46.0, -52.0, -57.0, -63.0},
+ {34.0, 27.0, 21.0, 15.0, 9.0, 3.0, -4.0, -10.0, -16.0, -22.0, -28.0, -35.0, -41.0, -47.0, -53.0, -59.0, -66.0, -72.0},
+ {32.0, 25.0, 19.0, 13.0, 6.0, 0.0, -7.0, -13.0, -19.0, -26.0, -32.0, -39.0, -45.0, -51.0, -58.0, -64.0, -71.0, -77.0},
+ {30.0, 24.0, 17.0, 11.0, 4.0, -2.0, -9.0, -15.0, -22.0, -29.0, -35.0, -42.0, -48.0, -55.0, -61.0, -68.0, -74.0, -81.0},
+ {29.0, 23.0, 16.0, 9.0, 3.0, -4.0, -11.0, -17.0, -24.0, -31.0, -37.0, -44.0, -51.0, -58.0, -64.0, -71.0, -78.0, -84.0},
+ {28.0, 22.0, 15.0, 8.0, 1.0, -5.0, -12.0, -19.0, -26.0, -33.0, -39.0, -46.0, -53.0, -60.0, -67.0, -73.0, -80.0, -87.0},
+ {28.0, 21.0, 14.0, 7.0, 0.0, -7.0, -14.0, -21.0, -27.0, -34.0, -41.0, -48.0, -55.0, -62.0, -69.0, -76.0, -82.0, -89.0},
+ {27.0, 20.0, 13.0, 6.0, -1.0, -8.0, -15.0, -22.0, -29.0, -36.0, -43.0, -50.0, -57.0, -64.0, -71.0, -78.0, -84.0, -91.0},
+ {26.0, 19.0, 12.0, 5.0, -2.0, -9.0, -16.0, -23.0, -30.0, -37.0, -44.0, -51.0, -58.0, -65.0, -72.0, -79.0, -86.0, -93.0},
+ {26.0, 19.0, 12.0, 4.0, -3.0, -10.0, -17.0, -24.0, -31.0, -38.0, -45.0, -52.0, -60.0, -67.0, -74.0, -81.0, -88.0, -95.0},
+ {25.0, 18.0, 11.0, 4.0, -3.0, -11.0, -18.0, -25.0, -32.0, -39.0, -46.0, -54.0, -61.0, -68.0, -75.0, -82.0, -89.0, -97.0},
+ {25.0, 17.0, 10.0, 3.0, -4.0, -11.0, -19.0, -26.0, -33.0, -40.0, -48.0, -55.0, -62.0, -69.0, -76.0, -84.0, -91.0, -98.0}
+ };
+
+ private final void _deleteFile(String filename) {
+ File file = null;
+ try {
+ file = new File(filename);
+ }
+ catch (Throwable err) {}
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ private final void _deleteLogFile() {
+ File file = null;
+ try {
+ file = new File(H5_LOG_FILE);
+ }
+ catch (Throwable err) {}
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+
+ private final void _deleteFamilyFile() {
+ File file = null;
+ for(int indx = 0; ;indx++) {
+ java.text.DecimalFormat myFormat = new java.text.DecimalFormat("00000");
+ try {
+ file = new File("test"+myFormat.format(new Integer(indx))+".h5");
+ }
+ catch (Throwable err) {}
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ else
+ return;
+ }
+ }
+
+ private final void _deleteMultiFile() {
+ File file = null;
+ for(int indx = 1;indx<7;indx++) {
+ try {
+ file = new File(H5_MULTI_FILE+"-"+MULTI_LETTERS[indx]+".h5");
+ }
+ catch (Throwable err) {}
+
+ if (file.exists()) {
+ try {file.delete();} catch (SecurityException e) {}
+ }
+ }
+ }
+
+ private final long _createDataset(long fid, long dsid, String name, long dapl) {
+ long did = -1;
+ try {
+ did = H5.H5Dcreate(fid, name, HDF5Constants.H5T_STD_I32BE, dsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, dapl);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5Pfapl._createDataset: ", did > 0);
+
+ return did;
+ }
+
+ private final void _createFloatDataset() {
+ try {
+ H5Fdsid = H5.H5Screate_simple(2, H5Fdims, null);
+ H5Fdid = H5.H5Dcreate(H5fid, "dsfloat", HDF5Constants.H5T_NATIVE_FLOAT, H5Fdsid,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("H5.H5Dcreate: " + err);
+ }
+ assertTrue("TestH5Pfapl._createFloatDataset: ", H5Fdid > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ private final void _createH5multiFileDS() {
+ try {
+ H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createH5file: " + err);
+ }
+ assertTrue("TestH5Pfapl.createH5file: _createDataset: ", H5did > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ private final void _createH5File(long fapl) {
+ try {
+ H5fid = H5.H5Fcreate(H5_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, fapl);
+ H5dsid = H5.H5Screate_simple(2, H5dims, null);
+ H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createH5file: " + err);
+ }
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Fcreate: ", H5fid > 0);
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+ assertTrue("TestH5Pfapl.createH5file: _createDataset: ", H5did > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ private final void _createH5familyFile(long fapl) {
+ try {
+ H5fid = H5.H5Fcreate(H5_FAMILY_FILE+".h5", HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, fapl);
+ H5dsid = H5.H5Screate_simple(2, H5dims, null);
+ H5did = _createDataset(H5fid, H5dsid, "dset", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createH5file: " + err);
+ }
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Fcreate: ", H5fid > 0);
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+ assertTrue("TestH5Pfapl.createH5file: _createDataset: ", H5did > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ private final void _createH5multiFile(long fapl) {
+ try {
+ H5fid = H5.H5Fcreate(H5_MULTI_FILE, HDF5Constants.H5F_ACC_TRUNC,
+ HDF5Constants.H5P_DEFAULT, fapl);
+ H5dsid = H5.H5Screate_simple(2, H5dims, null);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createH5file: " + err);
+ }
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Fcreate: ", H5fid > 0);
+ assertTrue("TestH5Pfapl.createH5file: H5.H5Screate_simple: ", H5dsid > 0);
+
+ try {
+ H5.H5Fflush(H5fid, HDF5Constants.H5F_SCOPE_LOCAL);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ public void deleteH5file() {
+ _deleteFile(H5_FILE);
+ }
+
+ public void deleteH5familyfile() {
+ _deleteFamilyFile();
+ }
+
+ public void deleteH5multifile() {
+ _deleteMultiFile();
+ }
+
+ @Before
+ public void createFileAccess()
+ throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ try {
+ fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createFileAccess: " + err);
+ }
+ assertTrue(fapl_id > 0);
+ try {
+ plapl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_ACCESS);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createFileAccess: " + err);
+ }
+ assertTrue(plapl_id > 0);
+ try {
+ plist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ btplist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ dapl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_ACCESS);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createFileAccess: " + err);
+ }
+ assertTrue(plist_id > 0);
+ assertTrue(btplist_id > 0);
+ assertTrue(dapl_id > 0);
+ }
+
+ @After
+ public void deleteFileAccess() throws HDF5LibraryException {
+ if (fapl_id > 0)
+ try {H5.H5Pclose(fapl_id);} catch (Exception ex) {}
+ if (plapl_id > 0)
+ try {H5.H5Pclose(plapl_id);} catch (Exception ex) {}
+ if (dapl_id > 0)
+ try {H5.H5Pclose(dapl_id);} catch (Exception ex) {}
+ if (plist_id > 0)
+ try {H5.H5Pclose(plist_id);} catch (Exception ex) {}
+ if (btplist_id > 0)
+ try {H5.H5Pclose(btplist_id);} catch (Exception ex) {}
+
+ if (H5Fdsid > 0)
+ try {H5.H5Sclose(H5Fdsid);} catch (Exception ex) {}
+ if (H5Fdid > 0)
+ try {H5.H5Dclose(H5Fdid);} catch (Exception ex) {}
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Pset_fapl_ros3()
+ throws Exception
+ {
+ if (HDF5Constants.H5FD_ROS3 < 0)
+ return;
+
+ final H5FD_ros3_fapl_t config = new H5FD_ros3_fapl_t();
+ assertEquals("Default fapl has unexpected contents",
+ new H5FD_ros3_fapl_t("", "", ""),
+ config);
+
+ H5.H5Pset_fapl_ros3(fapl_id, config);
+
+ assertEquals("driver types don't match",
+ HDF5Constants.H5FD_ROS3,
+ H5.H5Pget_driver(fapl_id));
+
+ /* get_fapl_ros3 can throw exception in error cases */
+ H5FD_ros3_fapl_t copy = H5.H5Pget_fapl_ros3(fapl_id);
+ assertEquals("contents of fapl set and get don't match",
+ new H5FD_ros3_fapl_t("", "", ""),
+ copy);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Pget_fapl_ros3_invalid_fapl_id()
+ throws Exception
+ {
+ if (HDF5Constants.H5FD_ROS3 < 0)
+ throw new HDF5LibraryException("skip");
+ H5FD_ros3_fapl_t fails = H5.H5Pget_fapl_ros3(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Pget_fapl_ros3_fapl_id_of_wrong_driver_type()
+ throws Exception
+ {
+ if (HDF5Constants.H5FD_ROS3 < 0)
+ throw new HDF5LibraryException("skip");
+ if (HDF5Constants.H5FD_SEC2 < 0 )
+ throw new HDF5LibraryException("skip");
+ /* TODO: for now, test against a sec2 fapl only */
+
+ H5.H5Pset_fapl_sec2(fapl_id);
+ assertEquals("fapl_id was not set properly",
+ HDF5Constants.H5FD_SEC2,
+ H5.H5Pget_driver(fapl_id));
+ H5FD_ros3_fapl_t fails = H5.H5Pget_fapl_ros3(fapl_id);
+ }
+
+ @Test
+ public void testH5Pset_fapl_ros3_specified()
+ throws Exception
+ {
+ if (HDF5Constants.H5FD_ROS3 < 0)
+ return;
+
+ String region = "us-east-1";
+ String acc_id = "my_access_id";
+ String acc_key = "my_access_key";
+
+ final H5FD_ros3_fapl_t config = new H5FD_ros3_fapl_t(
+ region,
+ acc_id,
+ acc_key);
+ H5.H5Pset_fapl_ros3(fapl_id, config);
+ assertEquals("driver types don't match",
+ HDF5Constants.H5FD_ROS3,
+ H5.H5Pget_driver(fapl_id));
+
+ H5FD_ros3_fapl_t copy = H5.H5Pget_fapl_ros3(fapl_id);
+ assertEquals("contents of fapl set and get don't match",
+ new H5FD_ros3_fapl_t(region, acc_id, acc_key),
+ copy);
+ }
+
+}
diff --git a/java/test/junit.sh.in b/java/test/junit.sh.in
index 7fb1bae..008c89c 100644
--- a/java/test/junit.sh.in
+++ b/java/test/junit.sh.in
@@ -18,6 +18,8 @@ srcdir=@srcdir@
USE_FILTER_SZIP="@USE_FILTER_SZIP@"
USE_FILTER_DEFLATE="@USE_FILTER_DEFLATE@"
+USE_ROS3_VFD="@HAVE_ROS3_VFD@"
+USE_HDFS_VFD="@HAVE_LIBHDFS@"
TESTNAME=JUnitInterface
EXIT_SUCCESS=0
@@ -93,6 +95,8 @@ $HDFTEST_HOME/testfiles/JUnit-TestH5R.txt
$HDFTEST_HOME/testfiles/JUnit-TestH5P.txt
$HDFTEST_HOME/testfiles/JUnit-TestH5PData.txt
$HDFTEST_HOME/testfiles/JUnit-TestH5Pfapl.txt
+$HDFTEST_HOME/testfiles/JUnit-TestH5Pfapls3.txt
+$HDFTEST_HOME/testfiles/JUnit-TestH5Pfaplhdfs.txt
$HDFTEST_HOME/testfiles/JUnit-TestH5Pvirtual.txt
$HDFTEST_HOME/testfiles/JUnit-TestH5Plist.txt
$HDFTEST_HOME/testfiles/JUnit-TestH5A.txt
@@ -1096,6 +1100,50 @@ if test $USE_FILTER_SZIP = "yes"; then
test yes = "$verbose" && $DIFF JUnit-TestH5Giterate.txt JUnit-TestH5Giterate.out |sed 's/^/ /'
fi
fi
+if test $ROS3_VFD = "yes"; then
+ echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH -ea org.junit.runner.JUnitCore test.TestH5Pfapls3"
+ TESTING JUnit-TestH5Pfapls3
+ ($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH -ea org.junit.runner.JUnitCore test.TestH5Pfapls3 > JUnit-TestH5Pfapls3.ext)
+
+ # Extract file name, line number, version and thread IDs because they may be different
+ sed -e 's/thread [0-9]*/thread (IDs)/' -e 's/: .*\.c /: (file name) /' \
+ -e 's/line [0-9]*/line (number)/' \
+ -e 's/Time: [0-9]*\.[0-9]*/Time: XXXX/' \
+ -e 's/v[1-9]*\.[0-9]*\./version (number)\./' \
+ -e 's/[1-9]*\.[0-9]*\.[0-9]*[^)]*/version (number)/' \
+ JUnit-TestH5Pfapls3.ext > JUnit-TestH5Pfapls3.out
+
+ if diff JUnit-TestH5Pfapls3.out JUnit-TestH5Pfapls3.txt > /dev/null; then
+ echo " PASSED JUnit-TestH5Pfapls3"
+ else
+ echo "**FAILED** JUnit-TestH5Pfapls3"
+ echo " Expected result differs from actual result"
+ nerrors="`expr $nerrors + 1`"
+ test yes = "$verbose" && $DIFF JUnit-TestH5Pfapls3.txt JUnit-TestH5Pfapls3.out |sed 's/^/ /'
+ fi
+fi
+if test $HAVE_LIBHDFS = "yes"; then
+ echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH -ea org.junit.runner.JUnitCore test.TestH5Pfaplhdfs"
+ TESTING JUnit-TestH5Pfaplhdfs
+ ($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH -ea org.junit.runner.JUnitCore test.TestH5Pfaplhdfs > JUnit-TestH5Pfaplhdfs.ext)
+
+ # Extract file name, line number, version and thread IDs because they may be different
+ sed -e 's/thread [0-9]*/thread (IDs)/' -e 's/: .*\.c /: (file name) /' \
+ -e 's/line [0-9]*/line (number)/' \
+ -e 's/Time: [0-9]*\.[0-9]*/Time: XXXX/' \
+ -e 's/v[1-9]*\.[0-9]*\./version (number)\./' \
+ -e 's/[1-9]*\.[0-9]*\.[0-9]*[^)]*/version (number)/' \
+ JUnit-TestH5Pfaplhdfs.ext > JUnit-TestH5Pfaplhdfs.out
+
+ if diff JUnit-TestH5Pfaplhdfs.out JUnit-TestH5Pfaplhdfs.txt > /dev/null; then
+ echo " PASSED JUnit-TestH5Pfaplhdfs"
+ else
+ echo "**FAILED** JUnit-TestH5Pfaplhdfs"
+ echo " Expected result differs from actual result"
+ nerrors="`expr $nerrors + 1`"
+ test yes = "$verbose" && $DIFF JUnit-TestH5Pfaplhdfs.txt JUnit-TestH5Pfaplhdfs.out |sed 's/^/ /'
+ fi
+fi
# Clean up temporary files/directories
diff --git a/java/test/testfiles/JUnit-TestH5Pfaplhdfs.txt b/java/test/testfiles/JUnit-TestH5Pfaplhdfs.txt
new file mode 100644
index 0000000..47a00a4
--- /dev/null
+++ b/java/test/testfiles/JUnit-TestH5Pfaplhdfs.txt
@@ -0,0 +1,9 @@
+JUnit version 4.11
+.testH5Pget_fapl_hdfs_invalid_fapl_id
+.testH5Pget_fapl_hdfs_fapl_id_of_wrong_driver_type
+.testHDFS_fapl
+
+Time: XXXX
+
+OK (3 tests)
+
diff --git a/java/test/testfiles/JUnit-TestH5Pfapls3.txt b/java/test/testfiles/JUnit-TestH5Pfapls3.txt
new file mode 100644
index 0000000..3f46342
--- /dev/null
+++ b/java/test/testfiles/JUnit-TestH5Pfapls3.txt
@@ -0,0 +1,10 @@
+JUnit version 4.11
+.testH5Pset_fapl_ros3_specified
+.testH5Pset_fapl_ros3
+.testH5Pget_fapl_ros3_invalid_fapl_id
+.testH5Pget_fapl_ros3_fapl_id_of_wrong_driver_type
+
+Time: XXXX
+
+OK (4 tests)
+