summaryrefslogtreecommitdiffstats
path: root/java
diff options
context:
space:
mode:
authorAllen Byrne <byrn@hdfgroup.org>2019-07-29 21:10:45 (GMT)
committerAllen Byrne <byrn@hdfgroup.org>2019-07-29 21:10:45 (GMT)
commit517cadac2647bf325720c74285381045fb84be30 (patch)
treefa3b7712ca6f4370cb989ae669c9425f09c21e76 /java
parentda2ec6163d4a8f76ef9c32518b5a0b5810f2d63f (diff)
parentf97e11e7635a0cd8728d4604ca5dceb3925ba44c (diff)
downloadhdf5-517cadac2647bf325720c74285381045fb84be30.zip
hdf5-517cadac2647bf325720c74285381045fb84be30.tar.gz
hdf5-517cadac2647bf325720c74285381045fb84be30.tar.bz2
Merging in latest from upstream (HDFFV/hdf5:refs/heads/develop)
* commit 'f97e11e7635a0cd8728d4604ca5dceb3925ba44c': Update comment and check for strtoumax. Modify CMakeLists.txt file for renamed h5tools_test_utils files. Add HD to string functions. Switched strtoul to strtoumax in H5FDs3comms.c. Removed unused functions and variables in s3 and hdfs TestH5Pfapl*.java. Update Copyright headers. Squashed commit of the following:
Diffstat (limited to 'java')
-rw-r--r--java/src/Makefile.am2
-rw-r--r--java/src/hdf/hdf5lib/CMakeLists.txt2
-rw-r--r--java/src/hdf/hdf5lib/H5.java10
-rw-r--r--java/src/hdf/hdf5lib/HDF5Constants.java6
-rw-r--r--java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java102
-rw-r--r--java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java121
-rw-r--r--java/src/jni/h5Constants.c4
-rw-r--r--java/test/CMakeLists.txt14
-rw-r--r--java/test/Makefile.am2
-rw-r--r--java/test/TestAll.java1
-rw-r--r--java/test/TestH5Pfapl.java4
-rw-r--r--java/test/TestH5Pfaplhdfs.java181
-rw-r--r--java/test/TestH5Pfapls3.java194
-rw-r--r--java/test/junit.sh.in48
-rw-r--r--java/test/testfiles/JUnit-TestH5Pfaplhdfs.txt9
-rw-r--r--java/test/testfiles/JUnit-TestH5Pfapls3.txt10
16 files changed, 710 insertions, 0 deletions
diff --git a/java/src/Makefile.am b/java/src/Makefile.am
index bd55c39..fcdeae9 100644
--- a/java/src/Makefile.am
+++ b/java/src/Makefile.am
@@ -98,6 +98,8 @@ hdf5_java_JAVA = \
${pkgpath}/structs/H5A_info_t.java \
${pkgpath}/structs/H5E_error2_t.java \
${pkgpath}/structs/H5F_info2_t.java \
+ ${pkgpath}/structs/H5FD_hdfs_fapl_t.java \
+ ${pkgpath}/structs/H5FD_ros3_fapl_t.java \
${pkgpath}/structs/H5G_info_t.java \
${pkgpath}/structs/H5L_info_t.java \
${pkgpath}/structs/H5O_info_t.java \
diff --git a/java/src/hdf/hdf5lib/CMakeLists.txt b/java/src/hdf/hdf5lib/CMakeLists.txt
index c171ea8..be8f60a 100644
--- a/java/src/hdf/hdf5lib/CMakeLists.txt
+++ b/java/src/hdf/hdf5lib/CMakeLists.txt
@@ -73,6 +73,8 @@ set (HDF5_JAVA_HDF_HDF5_STRUCTS_SOURCES
structs/H5AC_cache_config_t.java
structs/H5E_error2_t.java
structs/H5F_info2_t.java
+ structs/H5FD_ros3_fapl_t.java
+ structs/H5FD_hdfs_fapl_t.java
structs/H5G_info_t.java
structs/H5L_info_t.java
structs/H5O_hdr_info_t.java
diff --git a/java/src/hdf/hdf5lib/H5.java b/java/src/hdf/hdf5lib/H5.java
index a1a8ede..481ca8c 100644
--- a/java/src/hdf/hdf5lib/H5.java
+++ b/java/src/hdf/hdf5lib/H5.java
@@ -50,6 +50,8 @@ import hdf.hdf5lib.structs.H5AC_cache_config_t;
import hdf.hdf5lib.structs.H5A_info_t;
import hdf.hdf5lib.structs.H5E_error2_t;
import hdf.hdf5lib.structs.H5F_info2_t;
+import hdf.hdf5lib.structs.H5FD_hdfs_fapl_t;
+import hdf.hdf5lib.structs.H5FD_ros3_fapl_t;
import hdf.hdf5lib.structs.H5G_info_t;
import hdf.hdf5lib.structs.H5L_info_t;
import hdf.hdf5lib.structs.H5O_info_t;
@@ -7796,6 +7798,10 @@ public class H5 implements java.io.Serializable {
public synchronized static native int H5Pset_fapl_family(long fapl_id, long memb_size, long memb_fapl_id)
throws HDF5LibraryException, NullPointerException;
+ public synchronized static native int H5Pset_fapl_hdfs(long fapl_id, H5FD_hdfs_fapl_t fapl_conf) throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native H5FD_hdfs_fapl_t H5Pget_fapl_hdfs(long fapl_id) throws HDF5LibraryException, NullPointerException;
+
/**
* H5Pget_fapl_multi Sets up use of the multi I/O driver.
*
@@ -7880,6 +7886,10 @@ public class H5 implements java.io.Serializable {
public synchronized static native int H5Pset_fapl_windows(long fapl_id) throws HDF5LibraryException, NullPointerException;
+ public synchronized static native int H5Pset_fapl_ros3(long fapl_id, H5FD_ros3_fapl_t fapl_conf) throws HDF5LibraryException, NullPointerException;
+
+ public synchronized static native H5FD_ros3_fapl_t H5Pget_fapl_ros3(long fapl_id) throws HDF5LibraryException, NullPointerException;
+
// /////// unimplemented ////////
// Generic property list routines //
diff --git a/java/src/hdf/hdf5lib/HDF5Constants.java b/java/src/hdf/hdf5lib/HDF5Constants.java
index cb5ed22..2e80f2e 100644
--- a/java/src/hdf/hdf5lib/HDF5Constants.java
+++ b/java/src/hdf/hdf5lib/HDF5Constants.java
@@ -246,6 +246,8 @@ public class HDF5Constants {
public static final long H5FD_SEC2 = H5FD_SEC2();
public static final long H5FD_STDIO = H5FD_STDIO();
public static final long H5FD_WINDOWS = H5FD_WINDOWS();
+ public static final long H5FD_ROS3 = H5FD_ROS3();
+ public static final long H5FD_HDFS = H5FD_HDFS();
public static final int H5FD_LOG_LOC_READ = H5FD_LOG_LOC_READ();
public static final int H5FD_LOG_LOC_WRITE = H5FD_LOG_LOC_WRITE();
public static final int H5FD_LOG_LOC_SEEK = H5FD_LOG_LOC_SEEK();
@@ -1111,6 +1113,10 @@ public class HDF5Constants {
private static native final long H5FD_WINDOWS();
+ private static native final long H5FD_ROS3();
+
+ private static native final long H5FD_HDFS();
+
private static native final int H5FD_LOG_LOC_READ();
private static native final int H5FD_LOG_LOC_WRITE();
diff --git a/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java b/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java
new file mode 100644
index 0000000..f56a038
--- /dev/null
+++ b/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java
@@ -0,0 +1,102 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Read-Only HDFS Virtual File Driver (VFD) *
+ * Copyright (c) 2018, The HDF Group. *
+ * *
+ * All rights reserved. *
+ * *
+ * NOTICE: *
+ * All information contained herein is, and remains, the property of The HDF *
+ * Group. The intellectual and technical concepts contained herein are *
+ * proprietary to The HDF Group. Dissemination of this information or *
+ * reproduction of this material is strictly forbidden unless prior written *
+ * permission is obtained from The HDF Group. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+/*
+ * Java representation of the HDFS VFD file access property list (fapl)
+ * structure.
+ *
+ * Used for the access of files hosted on the Hadoop Distributed File System.
+ */
+
+@SuppressWarnings("serial") // mute default serialUID warnings until someone knowledgeable comes along or something breaks horribly
+public class H5FD_hdfs_fapl_t implements Serializable {
+
+ private long version;
+ private String namenode_name;
+ private String user_name;
+ private String kerberos_ticket_cache;
+ private int namenode_port;
+ private int stream_buffer_size;
+
+ /**
+ * Create a fapl_t structure with the specified components.
+ */
+ public H5FD_hdfs_fapl_t(
+ String namenode_name,
+ int namenode_port,
+ String user_name,
+ String kerberos_ticket_cache,
+ int stream_buffer_size)
+ {
+ this.version = 1;
+ this.namenode_name = namenode_name;
+ this.namenode_port = namenode_port;
+ this.user_name = user_name;
+ this.kerberos_ticket_cache = kerberos_ticket_cache;
+ this.stream_buffer_size = stream_buffer_size;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null)
+ return false;
+ if (!(o instanceof H5FD_hdfs_fapl_t))
+ return false;
+
+ H5FD_hdfs_fapl_t other = (H5FD_hdfs_fapl_t)o;
+ if (this.version != other.version)
+ return false;
+ if (!this.namenode_name.equals(other.namenode_name))
+ return false;
+ if (this.namenode_port != other.namenode_port)
+ return false;
+ if (!this.user_name.equals(other.user_name))
+ return false;
+ if (!this.kerberos_ticket_cache.equals(other.kerberos_ticket_cache))
+ return false;
+ if (this.stream_buffer_size != other.stream_buffer_size)
+ return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ /* this is a _very bad_ hash algorithm for purposes of hashing! */
+ /* implemented to satisfy the "contract" regarding equality */
+ int k = (int)this.version;
+ k += this.namenode_name.length();
+ k += this.user_name.length();
+ k += this.kerberos_ticket_cache.length();
+ k += namenode_port;
+ k += stream_buffer_size;
+ return k;
+ }
+
+ @Override
+ public String toString() {
+ return "H5FD_hdfs_fapl_t (Version: " + this.version + ") {" +
+ "\n namenode_name: '" + this.namenode_name +
+ "'\n namenode_port: " + this.namenode_port +
+ "\n user_name: '" + this.user_name +
+ "'\n kerberos_ticket_cache: '" + this.kerberos_ticket_cache +
+ "'\n stream_buffer_size: " + this.stream_buffer_size +
+ "\n}\n";
+ }
+}
+
+
diff --git a/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java b/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java
new file mode 100644
index 0000000..6b086c3
--- /dev/null
+++ b/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java
@@ -0,0 +1,121 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Read-Only S3 Virtual File Driver (VFD) *
+ * Copyright (c) 2017-2018, The HDF Group. *
+ * *
+ * All rights reserved. *
+ * *
+ * NOTICE: *
+ * All information contained herein is, and remains, the property of The HDF *
+ * Group. The intellectual and technical concepts contained herein are *
+ * proprietary to The HDF Group. Dissemination of this information or *
+ * reproduction of this material is strictly forbidden unless prior written *
+ * permission is obtained from The HDF Group. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package hdf.hdf5lib.structs;
+
+import java.io.Serializable;
+
+/*
+ * Java representation of the ROS3 VFD file access property list (fapl)
+ * structure.
+ *
+ * Used for the access of files hosted remotely on S3 by Amazon.
+ *
+ * For simplicity, implemented assuming that all ROS3 fapls have components:
+ * - version
+ * - aws_region
+ * - secret_id
+ * - secret_key
+ *
+ * Future implementations may be created to enable different fapl "shapes"
+ * depending on provided version.
+ *
+ * proposed:
+ *
+ * H5FD_ros3_fapl_t (super class, has only version field)
+ * H5FD_ros3_fapl_v1_t (extends super with Version 1 components)
+ * H5FD_ros3_fapl_v2_t (extends super with Version 2 components)
+ * and so on, for each version
+ *
+ * "super" is passed around, and is version-checked and re-cast as
+ * appropriate
+ */
+
+@SuppressWarnings("serial") // mute default serialUID warnings until someone knowledgeable comes along or something breaks horribly
+public class H5FD_ros3_fapl_t implements Serializable {
+
+ private long version;
+ private String aws_region;
+ private String secret_id;
+ private String secret_key;
+
+ /**
+ * Create a "default" fapl_t structure, for anonymous access.
+ */
+ public H5FD_ros3_fapl_t () {
+ /* H5FD_ros3_fapl_t("", "", ""); */ /* defer */
+ this.version = 1;
+ this.aws_region = "";
+ this.secret_id = "";
+ this.secret_key = "";
+ }
+
+ /**
+ * Create a fapl_t structure with the specified components.
+ * If all are the empty string, is anonymous (non-authenticating).
+ * Region and ID must both be supplied for authentication.
+ *
+ * @param region "aws region" for authenticating request
+ * @param id "secret id" or "access id" for authenticating request
+ * @param key "secret key" or "access key" for authenticating request
+ */
+ public H5FD_ros3_fapl_t (String region, String id, String key) {
+ this.version = 1; /* must equal H5FD__CURR_ROS3_FAPL_T_VERSION */
+ /* as found in H5FDros3.h */
+ this.aws_region = region;
+ this.secret_id = id;
+ this.secret_key = key;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null)
+ return false;
+ if (!(o instanceof H5FD_ros3_fapl_t))
+ return false;
+
+ H5FD_ros3_fapl_t other = (H5FD_ros3_fapl_t)o;
+ if (this.version != other.version)
+ return false;
+ if (!this.aws_region.equals(other.aws_region))
+ return false;
+ if (!this.secret_key.equals(other.secret_key))
+ return false;
+ if (!this.secret_id.equals(other.secret_id))
+ return false;
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ /* this is a _very bad_ hash algorithm for purposes of hashing! */
+ /* implemented to satisfy the "contract" regarding equality */
+ int k = (int)this.version;
+ k += this.aws_region.length();
+ k += this.secret_id.length();
+ k += this.secret_key.length();
+ return k;
+ }
+
+ @Override
+ public String toString() {
+ return "H5FD_ros3_fapl_t (Version:" + this.version + ") {" +
+ "\n aws_region : " + this.aws_region +
+ "\n secret_id : " + this.secret_id +
+ "\n secret_key : " + this.secret_key +
+ "\n}\n";
+ }
+}
+
+
diff --git a/java/src/jni/h5Constants.c b/java/src/jni/h5Constants.c
index b9e320f..ac55a13 100644
--- a/java/src/jni/h5Constants.c
+++ b/java/src/jni/h5Constants.c
@@ -453,6 +453,8 @@ Java_hdf_hdf5lib_HDF5Constants_H5FD_1DIRECT(JNIEnv *env, jclass cls) {
JNIEXPORT jlong JNICALL
Java_hdf_hdf5lib_HDF5Constants_H5FD_1FAMILY(JNIEnv *env, jclass cls) { return H5FD_FAMILY; }
JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_HDF5Constants_H5FD_1HDFS(JNIEnv *env, jclass cls) { return H5FD_HDFS; }
+JNIEXPORT jlong JNICALL
Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG(JNIEnv *env, jclass cls) { return H5FD_LOG; }
JNIEXPORT jlong JNICALL
Java_hdf_hdf5lib_HDF5Constants_H5FD_1MPIO(JNIEnv *env, jclass cls) { return H5FD_MPIO; }
@@ -461,6 +463,8 @@ Java_hdf_hdf5lib_HDF5Constants_H5FD_1MULTI(JNIEnv *env, jclass cls) { return H5F
JNIEXPORT jlong JNICALL
Java_hdf_hdf5lib_HDF5Constants_H5FD_1SEC2(JNIEnv *env, jclass cls) { return H5FD_SEC2; }
JNIEXPORT jlong JNICALL
+Java_hdf_hdf5lib_HDF5Constants_H5FD_1ROS3(JNIEnv *env, jclass cls) { return H5FD_ROS3; }
+JNIEXPORT jlong JNICALL
Java_hdf_hdf5lib_HDF5Constants_H5FD_1STDIO(JNIEnv *env, jclass cls) { return H5FD_STDIO; }
JNIEXPORT jlong JNICALL
Java_hdf_hdf5lib_HDF5Constants_H5FD_1WINDOWS(JNIEnv *env, jclass cls) {
diff --git a/java/test/CMakeLists.txt b/java/test/CMakeLists.txt
index 3298a47..dfb6e72 100644
--- a/java/test/CMakeLists.txt
+++ b/java/test/CMakeLists.txt
@@ -51,6 +51,20 @@ if (NOT HDF5_ENABLE_DEBUG_APIS)
)
endif ()
+if (HDF5_ENABLE_ROS3_VFD)
+ set (HDF5_JAVA_TEST_SOURCES
+ ${HDF5_JAVA_TEST_SOURCES}
+ TestH5Pfapls3
+ )
+endif ()
+
+if (HDF5_ENABLE_HDFS)
+ set (HDF5_JAVA_TEST_SOURCES
+ ${HDF5_JAVA_TEST_SOURCES}
+ TestH5Pfaplhdfs
+ )
+endif ()
+
set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_LIB_DIR}/junit.jar;${HDF5_JAVA_LIB_DIR}/hamcrest-core.jar;${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_SIMPLE_JAR}")
foreach (test_file ${HDF5_JAVA_TEST_SOURCES})
diff --git a/java/test/Makefile.am b/java/test/Makefile.am
index af99d92..c375938 100644
--- a/java/test/Makefile.am
+++ b/java/test/Makefile.am
@@ -61,6 +61,8 @@ noinst_JAVA = \
TestH5P.java \
TestH5PData.java \
TestH5Pfapl.java \
+ TestH5Pfaplhdfs.java \
+ TestH5Pfapls3.java \
TestH5Pvirtual.java \
TestH5Plist.java \
TestH5A.java \
diff --git a/java/test/TestAll.java b/java/test/TestAll.java
index 13cb597..c7c206c 100644
--- a/java/test/TestAll.java
+++ b/java/test/TestAll.java
@@ -27,6 +27,7 @@ import org.junit.runners.Suite;
TestH5Lparams.class, TestH5Lbasic.class, TestH5Lcreate.class,
TestH5R.class,
TestH5P.class, TestH5PData.class, TestH5Pfapl.class, TestH5Pvirtual.class, TestH5Plist.class,
+ TestH5Pfapls3.class, TestH5Pfaplhdfs.class,
TestH5A.class,
TestH5Oparams.class, TestH5Obasic.class, TestH5Ocopy.class, TestH5Ocreate.class,
TestH5PL.class, TestH5Z.class
diff --git a/java/test/TestH5Pfapl.java b/java/test/TestH5Pfapl.java
index 10a79dd..81a7ecb 100644
--- a/java/test/TestH5Pfapl.java
+++ b/java/test/TestH5Pfapl.java
@@ -15,6 +15,7 @@ package test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@@ -28,6 +29,8 @@ import hdf.hdf5lib.exceptions.HDF5Exception;
import hdf.hdf5lib.exceptions.HDF5LibraryException;
import hdf.hdf5lib.exceptions.HDF5PropertyListInterfaceException;
import hdf.hdf5lib.structs.H5AC_cache_config_t;
+import hdf.hdf5lib.structs.H5FD_hdfs_fapl_t;
+import hdf.hdf5lib.structs.H5FD_ros3_fapl_t;
import org.junit.After;
import org.junit.Before;
@@ -1398,4 +1401,5 @@ public class TestH5Pfapl {
fail("H5P_evict_on_close: " + err);
}
}
+
}
diff --git a/java/test/TestH5Pfaplhdfs.java b/java/test/TestH5Pfaplhdfs.java
new file mode 100644
index 0000000..30d326e
--- /dev/null
+++ b/java/test/TestH5Pfaplhdfs.java
@@ -0,0 +1,181 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.text.DecimalFormat;
+import java.text.NumberFormat;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+import hdf.hdf5lib.exceptions.HDF5PropertyListInterfaceException;
+import hdf.hdf5lib.structs.H5AC_cache_config_t;
+import hdf.hdf5lib.structs.H5FD_hdfs_fapl_t;
+import hdf.hdf5lib.structs.H5FD_ros3_fapl_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Pfaplhdfs {
+ @Rule public TestName testname = new TestName();
+
+ long H5fid = -1;
+ long H5dsid = -1;
+ long H5did = -1;
+ long H5Fdsid = -1;
+ long H5Fdid = -1;
+ long fapl_id = -1;
+ long plapl_id = -1;
+ long dapl_id = -1;
+ long plist_id = -1;
+ long btplist_id = -1;
+
+ @Before
+ public void createFileAccess()
+ throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ try {
+ fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createFileAccess: " + err);
+ }
+ assertTrue(fapl_id > 0);
+ try {
+ plapl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_ACCESS);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createFileAccess: " + err);
+ }
+ assertTrue(plapl_id > 0);
+ try {
+ plist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ btplist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ dapl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_ACCESS);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createFileAccess: " + err);
+ }
+ assertTrue(plist_id > 0);
+ assertTrue(btplist_id > 0);
+ assertTrue(dapl_id > 0);
+ }
+
+ @After
+ public void deleteFileAccess() throws HDF5LibraryException {
+ if (fapl_id > 0)
+ try {H5.H5Pclose(fapl_id);} catch (Exception ex) {}
+ if (plapl_id > 0)
+ try {H5.H5Pclose(plapl_id);} catch (Exception ex) {}
+ if (dapl_id > 0)
+ try {H5.H5Pclose(dapl_id);} catch (Exception ex) {}
+ if (plist_id > 0)
+ try {H5.H5Pclose(plist_id);} catch (Exception ex) {}
+ if (btplist_id > 0)
+ try {H5.H5Pclose(btplist_id);} catch (Exception ex) {}
+
+ if (H5Fdsid > 0)
+ try {H5.H5Sclose(H5Fdsid);} catch (Exception ex) {}
+ if (H5Fdid > 0)
+ try {H5.H5Dclose(H5Fdid);} catch (Exception ex) {}
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ System.out.println();
+ }
+
+ @Test
+ public void testHDFS_fapl()
+ throws Exception
+ {
+ if (HDF5Constants.H5FD_HDFS < 0)
+ throw new HDF5LibraryException("skip");
+
+ String nodename = "blues";
+ int nodeport = 12345;
+ String username = "sparticus";
+ String kerbcache = "/dev/null";
+ int streamsize = 1024;
+
+ final H5FD_hdfs_fapl_t config = new H5FD_hdfs_fapl_t(
+ nodename,
+ nodeport,
+ username,
+ kerbcache,
+ streamsize
+ );
+ assertTrue("setting fapl should succeed",
+ -1 < H5.H5Pset_fapl_hdfs(fapl_id, config));
+
+ assertEquals("driver types should match",
+ HDF5Constants.H5FD_HDFS,
+ H5.H5Pget_driver(fapl_id));
+
+ H5FD_hdfs_fapl_t copy = H5.H5Pget_fapl_hdfs(fapl_id);
+ assertEquals("fapl contents should match",
+ new H5FD_hdfs_fapl_t(
+ nodename,
+ nodeport,
+ username,
+ kerbcache,
+ streamsize),
+ copy);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Pget_fapl_hdfs_invalid_fapl_id()
+ throws Exception
+ {
+ if (HDF5Constants.H5FD_HDFS < 0)
+ throw new HDF5LibraryException("skip");
+ H5FD_hdfs_fapl_t fails = H5.H5Pget_fapl_hdfs(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Pget_fapl_hdfs_fapl_id_of_wrong_driver_type()
+ throws Exception
+ {
+ if (HDF5Constants.H5FD_HDFS < 0)
+ throw new HDF5LibraryException("skip");
+ if (HDF5Constants.H5FD_SEC2 < 0 )
+ throw new HDF5LibraryException("skip");
+ /* TODO: for now, test against a sec2 fapl only */
+
+ H5.H5Pset_fapl_sec2(fapl_id);
+ assertEquals("fapl_id was not set properly",
+ HDF5Constants.H5FD_SEC2,
+ H5.H5Pget_driver(fapl_id));
+ H5FD_hdfs_fapl_t fails = H5.H5Pget_fapl_hdfs(fapl_id);
+ }
+
+}
diff --git a/java/test/TestH5Pfapls3.java b/java/test/TestH5Pfapls3.java
new file mode 100644
index 0000000..00a2a73
--- /dev/null
+++ b/java/test/TestH5Pfapls3.java
@@ -0,0 +1,194 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+package test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.text.DecimalFormat;
+import java.text.NumberFormat;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.exceptions.HDF5Exception;
+import hdf.hdf5lib.exceptions.HDF5LibraryException;
+import hdf.hdf5lib.exceptions.HDF5PropertyListInterfaceException;
+import hdf.hdf5lib.structs.H5AC_cache_config_t;
+import hdf.hdf5lib.structs.H5FD_hdfs_fapl_t;
+import hdf.hdf5lib.structs.H5FD_ros3_fapl_t;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TestName;
+
+public class TestH5Pfapls3 {
+ @Rule public TestName testname = new TestName();
+
+ long H5fid = -1;
+ long H5dsid = -1;
+ long H5did = -1;
+ long H5Fdsid = -1;
+ long H5Fdid = -1;
+ long fapl_id = -1;
+ long plapl_id = -1;
+ long dapl_id = -1;
+ long plist_id = -1;
+ long btplist_id = -1;
+
+ @Before
+ public void createFileAccess()
+ throws NullPointerException, HDF5Exception {
+ assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0);
+ System.out.print(testname.getMethodName());
+
+ try {
+ fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createFileAccess: " + err);
+ }
+ assertTrue(fapl_id > 0);
+ try {
+ plapl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_ACCESS);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createFileAccess: " + err);
+ }
+ assertTrue(plapl_id > 0);
+ try {
+ plist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ btplist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ dapl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_ACCESS);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ fail("TestH5Pfapl.createFileAccess: " + err);
+ }
+ assertTrue(plist_id > 0);
+ assertTrue(btplist_id > 0);
+ assertTrue(dapl_id > 0);
+ }
+
+ @After
+ public void deleteFileAccess() throws HDF5LibraryException {
+ if (fapl_id > 0)
+ try {H5.H5Pclose(fapl_id);} catch (Exception ex) {}
+ if (plapl_id > 0)
+ try {H5.H5Pclose(plapl_id);} catch (Exception ex) {}
+ if (dapl_id > 0)
+ try {H5.H5Pclose(dapl_id);} catch (Exception ex) {}
+ if (plist_id > 0)
+ try {H5.H5Pclose(plist_id);} catch (Exception ex) {}
+ if (btplist_id > 0)
+ try {H5.H5Pclose(btplist_id);} catch (Exception ex) {}
+
+ if (H5Fdsid > 0)
+ try {H5.H5Sclose(H5Fdsid);} catch (Exception ex) {}
+ if (H5Fdid > 0)
+ try {H5.H5Dclose(H5Fdid);} catch (Exception ex) {}
+ if (H5dsid > 0)
+ try {H5.H5Sclose(H5dsid);} catch (Exception ex) {}
+ if (H5did > 0)
+ try {H5.H5Dclose(H5did);} catch (Exception ex) {}
+ if (H5fid > 0)
+ try {H5.H5Fclose(H5fid);} catch (Exception ex) {}
+ System.out.println();
+ }
+
+ @Test
+ public void testH5Pset_fapl_ros3()
+ throws Exception
+ {
+ if (HDF5Constants.H5FD_ROS3 < 0)
+ return;
+
+ final H5FD_ros3_fapl_t config = new H5FD_ros3_fapl_t();
+ assertEquals("Default fapl has unexpected contents",
+ new H5FD_ros3_fapl_t("", "", ""),
+ config);
+
+ H5.H5Pset_fapl_ros3(fapl_id, config);
+
+ assertEquals("driver types don't match",
+ HDF5Constants.H5FD_ROS3,
+ H5.H5Pget_driver(fapl_id));
+
+ /* get_fapl_ros3 can throw exception in error cases */
+ H5FD_ros3_fapl_t copy = H5.H5Pget_fapl_ros3(fapl_id);
+ assertEquals("contents of fapl set and get don't match",
+ new H5FD_ros3_fapl_t("", "", ""),
+ copy);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Pget_fapl_ros3_invalid_fapl_id()
+ throws Exception
+ {
+ if (HDF5Constants.H5FD_ROS3 < 0)
+ throw new HDF5LibraryException("skip");
+ H5FD_ros3_fapl_t fails = H5.H5Pget_fapl_ros3(-1);
+ }
+
+ @Test(expected = HDF5LibraryException.class)
+ public void testH5Pget_fapl_ros3_fapl_id_of_wrong_driver_type()
+ throws Exception
+ {
+ if (HDF5Constants.H5FD_ROS3 < 0)
+ throw new HDF5LibraryException("skip");
+ if (HDF5Constants.H5FD_SEC2 < 0 )
+ throw new HDF5LibraryException("skip");
+ /* TODO: for now, test against a sec2 fapl only */
+
+ H5.H5Pset_fapl_sec2(fapl_id);
+ assertEquals("fapl_id was not set properly",
+ HDF5Constants.H5FD_SEC2,
+ H5.H5Pget_driver(fapl_id));
+ H5FD_ros3_fapl_t fails = H5.H5Pget_fapl_ros3(fapl_id);
+ }
+
+ @Test
+ public void testH5Pset_fapl_ros3_specified()
+ throws Exception
+ {
+ if (HDF5Constants.H5FD_ROS3 < 0)
+ return;
+
+ String region = "us-east-1";
+ String acc_id = "my_access_id";
+ String acc_key = "my_access_key";
+
+ final H5FD_ros3_fapl_t config = new H5FD_ros3_fapl_t(
+ region,
+ acc_id,
+ acc_key);
+ H5.H5Pset_fapl_ros3(fapl_id, config);
+ assertEquals("driver types don't match",
+ HDF5Constants.H5FD_ROS3,
+ H5.H5Pget_driver(fapl_id));
+
+ H5FD_ros3_fapl_t copy = H5.H5Pget_fapl_ros3(fapl_id);
+ assertEquals("contents of fapl set and get don't match",
+ new H5FD_ros3_fapl_t(region, acc_id, acc_key),
+ copy);
+ }
+
+}
diff --git a/java/test/junit.sh.in b/java/test/junit.sh.in
index 7fb1bae..008c89c 100644
--- a/java/test/junit.sh.in
+++ b/java/test/junit.sh.in
@@ -18,6 +18,8 @@ srcdir=@srcdir@
USE_FILTER_SZIP="@USE_FILTER_SZIP@"
USE_FILTER_DEFLATE="@USE_FILTER_DEFLATE@"
+USE_ROS3_VFD="@HAVE_ROS3_VFD@"
+USE_HDFS_VFD="@HAVE_LIBHDFS@"
TESTNAME=JUnitInterface
EXIT_SUCCESS=0
@@ -93,6 +95,8 @@ $HDFTEST_HOME/testfiles/JUnit-TestH5R.txt
$HDFTEST_HOME/testfiles/JUnit-TestH5P.txt
$HDFTEST_HOME/testfiles/JUnit-TestH5PData.txt
$HDFTEST_HOME/testfiles/JUnit-TestH5Pfapl.txt
+$HDFTEST_HOME/testfiles/JUnit-TestH5Pfapls3.txt
+$HDFTEST_HOME/testfiles/JUnit-TestH5Pfaplhdfs.txt
$HDFTEST_HOME/testfiles/JUnit-TestH5Pvirtual.txt
$HDFTEST_HOME/testfiles/JUnit-TestH5Plist.txt
$HDFTEST_HOME/testfiles/JUnit-TestH5A.txt
@@ -1096,6 +1100,50 @@ if test $USE_FILTER_SZIP = "yes"; then
test yes = "$verbose" && $DIFF JUnit-TestH5Giterate.txt JUnit-TestH5Giterate.out |sed 's/^/ /'
fi
fi
+if test $ROS3_VFD = "yes"; then
+ echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH -ea org.junit.runner.JUnitCore test.TestH5Pfapls3"
+ TESTING JUnit-TestH5Pfapls3
+ ($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH -ea org.junit.runner.JUnitCore test.TestH5Pfapls3 > JUnit-TestH5Pfapls3.ext)
+
+ # Extract file name, line number, version and thread IDs because they may be different
+ sed -e 's/thread [0-9]*/thread (IDs)/' -e 's/: .*\.c /: (file name) /' \
+ -e 's/line [0-9]*/line (number)/' \
+ -e 's/Time: [0-9]*\.[0-9]*/Time: XXXX/' \
+ -e 's/v[1-9]*\.[0-9]*\./version (number)\./' \
+ -e 's/[1-9]*\.[0-9]*\.[0-9]*[^)]*/version (number)/' \
+ JUnit-TestH5Pfapls3.ext > JUnit-TestH5Pfapls3.out
+
+ if diff JUnit-TestH5Pfapls3.out JUnit-TestH5Pfapls3.txt > /dev/null; then
+ echo " PASSED JUnit-TestH5Pfapls3"
+ else
+ echo "**FAILED** JUnit-TestH5Pfapls3"
+ echo " Expected result differs from actual result"
+ nerrors="`expr $nerrors + 1`"
+ test yes = "$verbose" && $DIFF JUnit-TestH5Pfapls3.txt JUnit-TestH5Pfapls3.out |sed 's/^/ /'
+ fi
+fi
+if test $HAVE_LIBHDFS = "yes"; then
+ echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH -ea org.junit.runner.JUnitCore test.TestH5Pfaplhdfs"
+ TESTING JUnit-TestH5Pfaplhdfs
+ ($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH -ea org.junit.runner.JUnitCore test.TestH5Pfaplhdfs > JUnit-TestH5Pfaplhdfs.ext)
+
+ # Extract file name, line number, version and thread IDs because they may be different
+ sed -e 's/thread [0-9]*/thread (IDs)/' -e 's/: .*\.c /: (file name) /' \
+ -e 's/line [0-9]*/line (number)/' \
+ -e 's/Time: [0-9]*\.[0-9]*/Time: XXXX/' \
+ -e 's/v[1-9]*\.[0-9]*\./version (number)\./' \
+ -e 's/[1-9]*\.[0-9]*\.[0-9]*[^)]*/version (number)/' \
+ JUnit-TestH5Pfaplhdfs.ext > JUnit-TestH5Pfaplhdfs.out
+
+ if diff JUnit-TestH5Pfaplhdfs.out JUnit-TestH5Pfaplhdfs.txt > /dev/null; then
+ echo " PASSED JUnit-TestH5Pfaplhdfs"
+ else
+ echo "**FAILED** JUnit-TestH5Pfaplhdfs"
+ echo " Expected result differs from actual result"
+ nerrors="`expr $nerrors + 1`"
+ test yes = "$verbose" && $DIFF JUnit-TestH5Pfaplhdfs.txt JUnit-TestH5Pfaplhdfs.out |sed 's/^/ /'
+ fi
+fi
# Clean up temporary files/directories
diff --git a/java/test/testfiles/JUnit-TestH5Pfaplhdfs.txt b/java/test/testfiles/JUnit-TestH5Pfaplhdfs.txt
new file mode 100644
index 0000000..47a00a4
--- /dev/null
+++ b/java/test/testfiles/JUnit-TestH5Pfaplhdfs.txt
@@ -0,0 +1,9 @@
+JUnit version 4.11
+.testH5Pget_fapl_hdfs_invalid_fapl_id
+.testH5Pget_fapl_hdfs_fapl_id_of_wrong_driver_type
+.testHDFS_fapl
+
+Time: XXXX
+
+OK (3 tests)
+
diff --git a/java/test/testfiles/JUnit-TestH5Pfapls3.txt b/java/test/testfiles/JUnit-TestH5Pfapls3.txt
new file mode 100644
index 0000000..3f46342
--- /dev/null
+++ b/java/test/testfiles/JUnit-TestH5Pfapls3.txt
@@ -0,0 +1,10 @@
+JUnit version 4.11
+.testH5Pset_fapl_ros3_specified
+.testH5Pset_fapl_ros3
+.testH5Pget_fapl_ros3_invalid_fapl_id
+.testH5Pget_fapl_ros3_fapl_id_of_wrong_driver_type
+
+Time: XXXX
+
+OK (4 tests)
+