diff options
Diffstat (limited to 'java')
-rw-r--r-- | java/src/Makefile.am | 2 | ||||
-rw-r--r-- | java/src/hdf/hdf5lib/CMakeLists.txt | 2 | ||||
-rw-r--r-- | java/src/hdf/hdf5lib/H5.java | 10 | ||||
-rw-r--r-- | java/src/hdf/hdf5lib/HDF5Constants.java | 6 | ||||
-rw-r--r-- | java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java | 102 | ||||
-rw-r--r-- | java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java | 123 | ||||
-rw-r--r-- | java/src/jni/h5Constants.c | 4 | ||||
-rw-r--r-- | java/src/jni/h5fImp.c | 4 | ||||
-rw-r--r-- | java/src/jni/h5pFAPLImp.c | 334 | ||||
-rw-r--r-- | java/src/jni/h5pFAPLImp.h | 36 | ||||
-rw-r--r-- | java/test/CMakeLists.txt | 14 | ||||
-rw-r--r-- | java/test/Makefile.am | 2 | ||||
-rw-r--r-- | java/test/TestAll.java | 1 | ||||
-rw-r--r-- | java/test/TestH5Pfaplhdfs.java | 138 | ||||
-rw-r--r-- | java/test/TestH5Pfapls3.java | 159 | ||||
-rw-r--r-- | java/test/junit.sh.in | 48 | ||||
-rw-r--r-- | java/test/testfiles/JUnit-TestH5Pfaplhdfs.txt | 9 | ||||
-rw-r--r-- | java/test/testfiles/JUnit-TestH5Pfapls3.txt | 10 |
18 files changed, 1004 insertions, 0 deletions
diff --git a/java/src/Makefile.am b/java/src/Makefile.am index bd55c39..fcdeae9 100644 --- a/java/src/Makefile.am +++ b/java/src/Makefile.am @@ -98,6 +98,8 @@ hdf5_java_JAVA = \ ${pkgpath}/structs/H5A_info_t.java \ ${pkgpath}/structs/H5E_error2_t.java \ ${pkgpath}/structs/H5F_info2_t.java \ + ${pkgpath}/structs/H5FD_hdfs_fapl_t.java \ + ${pkgpath}/structs/H5FD_ros3_fapl_t.java \ ${pkgpath}/structs/H5G_info_t.java \ ${pkgpath}/structs/H5L_info_t.java \ ${pkgpath}/structs/H5O_info_t.java \ diff --git a/java/src/hdf/hdf5lib/CMakeLists.txt b/java/src/hdf/hdf5lib/CMakeLists.txt index c171ea8..be8f60a 100644 --- a/java/src/hdf/hdf5lib/CMakeLists.txt +++ b/java/src/hdf/hdf5lib/CMakeLists.txt @@ -73,6 +73,8 @@ set (HDF5_JAVA_HDF_HDF5_STRUCTS_SOURCES structs/H5AC_cache_config_t.java structs/H5E_error2_t.java structs/H5F_info2_t.java + structs/H5FD_ros3_fapl_t.java + structs/H5FD_hdfs_fapl_t.java structs/H5G_info_t.java structs/H5L_info_t.java structs/H5O_hdr_info_t.java diff --git a/java/src/hdf/hdf5lib/H5.java b/java/src/hdf/hdf5lib/H5.java index 6e37d77..2a76b89 100644 --- a/java/src/hdf/hdf5lib/H5.java +++ b/java/src/hdf/hdf5lib/H5.java @@ -50,6 +50,8 @@ import hdf.hdf5lib.structs.H5AC_cache_config_t; import hdf.hdf5lib.structs.H5A_info_t; import hdf.hdf5lib.structs.H5E_error2_t; import hdf.hdf5lib.structs.H5F_info2_t; +import hdf.hdf5lib.structs.H5FD_hdfs_fapl_t; +import hdf.hdf5lib.structs.H5FD_ros3_fapl_t; import hdf.hdf5lib.structs.H5G_info_t; import hdf.hdf5lib.structs.H5L_info_t; import hdf.hdf5lib.structs.H5O_info_t; @@ -7759,6 +7761,10 @@ public class H5 implements java.io.Serializable { public synchronized static native int H5Pset_fapl_family(long fapl_id, long memb_size, long memb_fapl_id) throws HDF5LibraryException, NullPointerException; + public synchronized static native int H5Pset_fapl_hdfs(long fapl_id, H5FD_hdfs_fapl_t fapl_conf) throws HDF5LibraryException, NullPointerException; + + public synchronized static native H5FD_hdfs_fapl_t H5Pget_fapl_hdfs(long fapl_id) throws HDF5LibraryException, NullPointerException; + /** * H5Pget_fapl_multi Sets up use of the multi I/O driver. * @@ -7843,6 +7849,10 @@ public class H5 implements java.io.Serializable { public synchronized static native int H5Pset_fapl_windows(long fapl_id) throws HDF5LibraryException, NullPointerException; + public synchronized static native int H5Pset_fapl_ros3(long fapl_id, H5FD_ros3_fapl_t fapl_conf) throws HDF5LibraryException, NullPointerException; + + public synchronized static native H5FD_ros3_fapl_t H5Pget_fapl_ros3(long fapl_id) throws HDF5LibraryException, NullPointerException; + // /////// unimplemented //////// // Generic property list routines // diff --git a/java/src/hdf/hdf5lib/HDF5Constants.java b/java/src/hdf/hdf5lib/HDF5Constants.java index 7eddac0..3f8e5d1 100644 --- a/java/src/hdf/hdf5lib/HDF5Constants.java +++ b/java/src/hdf/hdf5lib/HDF5Constants.java @@ -238,6 +238,8 @@ public class HDF5Constants { public static final long H5FD_SEC2 = H5FD_SEC2(); public static final long H5FD_STDIO = H5FD_STDIO(); public static final long H5FD_WINDOWS = H5FD_WINDOWS(); + public static final long H5FD_ROS3 = H5FD_ROS3(); + public static final long H5FD_HDFS = H5FD_HDFS(); public static final int H5FD_LOG_LOC_READ = H5FD_LOG_LOC_READ(); public static final int H5FD_LOG_LOC_WRITE = H5FD_LOG_LOC_WRITE(); public static final int H5FD_LOG_LOC_SEEK = H5FD_LOG_LOC_SEEK(); @@ -1072,6 +1074,10 @@ public class HDF5Constants { private static native final long H5FD_WINDOWS(); + private static native final long H5FD_ROS3(); + + private static native final long H5FD_HDFS(); + private static native final int H5FD_LOG_LOC_READ(); private static native final int H5FD_LOG_LOC_WRITE(); diff --git a/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java b/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java new file mode 100644 index 0000000..9fcff2e --- /dev/null +++ b/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java @@ -0,0 +1,102 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Read-Only HDFS Virtual File Driver (VFD) * + * Copyright (c) 2018, The HDF Group. * + * * + * All rights reserved. * + * * + * NOTICE: * + * All information contained herein is, and remains, the property of The HDF * + * Group. The intellectual and technical concepts contained herein are * + * proprietary to The HDF Group. Dissemination of this information or * + * reproduction of this material is strictly forbidden unless prior written * + * permission is obtained from The HDF Group. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +package hdf.hdf5lib.structs; + +import java.io.Serializable; + +/* + * Java representation of the HDFS VFD file access property list (fapl) + * structure. + * + * Used for the access of files hosted on the Hadoop Distributed File System. + */ + +public class H5FD_hdfs_fapl_t implements Serializable { + private static final long serialVersionUID = 2072473407027648309L; + + private int version; + private String namenode_name; + private int namenode_port; + private String user_name; + private String kerberos_ticket_cache; + private int stream_buffer_size; + + /* + * Create a fapl_t structure with the specified components. + */ + public H5FD_hdfs_fapl_t( + String namenode_name, + int namenode_port, + String user_name, + String kerberos_ticket_cache, + int stream_buffer_size) + { + this.version = 1; + this.namenode_name = namenode_name; + this.namenode_port = namenode_port; + this.user_name = user_name; + this.kerberos_ticket_cache = kerberos_ticket_cache; + this.stream_buffer_size = stream_buffer_size; + } + + @Override + public boolean equals(Object o) { + if (o == null) + return false; + if (!(o instanceof H5FD_hdfs_fapl_t)) + return false; + + H5FD_hdfs_fapl_t other = (H5FD_hdfs_fapl_t)o; + if (this.version != other.version) + return false; + if (!this.namenode_name.equals(other.namenode_name)) + return false; + if (this.namenode_port != other.namenode_port) + return false; + if (!this.user_name.equals(other.user_name)) + return false; + if (!this.kerberos_ticket_cache.equals(other.kerberos_ticket_cache)) + return false; + if (this.stream_buffer_size != other.stream_buffer_size) + return false; + return true; + } + + @Override + public int hashCode() { + /* this is a _very bad_ hash algorithm for purposes of hashing! */ + /* implemented to satisfy the "contract" regarding equality */ + int k = (int)this.version; + k += this.namenode_name.length(); + k += this.user_name.length(); + k += this.kerberos_ticket_cache.length(); + k += namenode_port; + k += stream_buffer_size; + return k; + } + + @Override + public String toString() { + return "H5FD_hdfs_fapl_t (Version: " + this.version + ") {" + + "\n namenode_name: '" + this.namenode_name + + "'\n namenode_port: " + this.namenode_port + + "\n user_name: '" + this.user_name + + "'\n kerberos_ticket_cache: '" + this.kerberos_ticket_cache + + "'\n stream_buffer_size: " + this.stream_buffer_size + + "\n}\n"; + } +} + + diff --git a/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java b/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java new file mode 100644 index 0000000..a899e10 --- /dev/null +++ b/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java @@ -0,0 +1,123 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Read-Only S3 Virtual File Driver (VFD) * + * Copyright (c) 2017-2018, The HDF Group. * + * * + * All rights reserved. * + * * + * NOTICE: * + * All information contained herein is, and remains, the property of The HDF * + * Group. The intellectual and technical concepts contained herein are * + * proprietary to The HDF Group. Dissemination of this information or * + * reproduction of this material is strictly forbidden unless prior written * + * permission is obtained from The HDF Group. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +package hdf.hdf5lib.structs; + +import java.io.Serializable; + +/* + * Java representation of the ROS3 VFD file access property list (fapl) + * structure. + * + * Used for the access of files hosted remotely on S3 by Amazon. + * + * For simplicity, implemented assuming that all ROS3 fapls have components: + * - version + * - authenticate + * - aws_region + * - secret_id + * - secret_key + * + * Future implementations may be created to enable different fapl "shapes" + * depending on provided version. + * + * proposed: + * + * H5FD_ros3_fapl_t (super class, has only version field) + * H5FD_ros3_fapl_v1_t (extends super with Version 1 components) + * H5FD_ros3_fapl_v2_t (extends super with Version 2 components) + * and so on, for each version + * + * "super" is passed around, and is version-checked and re-cast as + * appropriate + */ + +public class H5FD_ros3_fapl_t implements Serializable { + private static final long serialVersionUID = 8985533001471224030L; + + private int version; + private boolean authenticate; + private String aws_region; + private String secret_id; + private String secret_key; + + /** + * Create a "default" fapl_t structure, for anonymous access. + */ + public H5FD_ros3_fapl_t () { + /* H5FD_ros3_fapl_t("", "", ""); */ /* defer */ + this.version = 1; + this.aws_region = ""; + this.secret_id = ""; + this.secret_key = ""; + } + + /** + * Create a fapl_t structure with the specified components. + * If all are the empty string, is anonymous (non-authenticating). + * Region and ID must both be supplied for authentication. + * + * @param region "aws region" for authenticating request + * @param id "secret id" or "access id" for authenticating request + * @param key "secret key" or "access key" for authenticating request + */ + public H5FD_ros3_fapl_t (String region, String id, String key) { + this.version = 1; /* must equal H5FD_CURR_ROS3_FAPL_T_VERSION */ + /* as found in H5FDros3.h */ + this.aws_region = region; + this.secret_id = id; + this.secret_key = key; + } + + @Override + public boolean equals(Object o) { + if (o == null) + return false; + if (!(o instanceof H5FD_ros3_fapl_t)) + return false; + + H5FD_ros3_fapl_t other = (H5FD_ros3_fapl_t)o; + if (this.version != other.version) + return false; + if (!this.aws_region.equals(other.aws_region)) + return false; + if (!this.secret_key.equals(other.secret_key)) + return false; + if (!this.secret_id.equals(other.secret_id)) + return false; + return true; + } + + @Override + public int hashCode() { + /* this is a _very bad_ hash algorithm for purposes of hashing! */ + /* implemented to satisfy the "contract" regarding equality */ + int k = (int)this.version; + k += this.aws_region.length(); + k += this.secret_id.length(); + k += this.secret_key.length(); + return k; + } + + @Override + public String toString() { + return "H5FD_ros3_fapl_t (Version:" + this.version + ") {" + + "\n aws_region : " + this.aws_region + + "\n secret_id : " + this.secret_id + + "\n secret_key : " + this.secret_key + + "\n}\n"; + } +} + + diff --git a/java/src/jni/h5Constants.c b/java/src/jni/h5Constants.c index 1ea549e..f1bd951 100644 --- a/java/src/jni/h5Constants.c +++ b/java/src/jni/h5Constants.c @@ -438,6 +438,8 @@ Java_hdf_hdf5lib_HDF5Constants_H5FD_1DIRECT(JNIEnv *env, jclass cls) { JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1FAMILY(JNIEnv *env, jclass cls) { return H5FD_FAMILY; } JNIEXPORT jlong JNICALL +Java_hdf_hdf5lib_HDF5Constants_H5FD_1HDFS(JNIEnv *env, jclass cls) { return H5FD_HDFS; } +JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1LOG(JNIEnv *env, jclass cls) { return H5FD_LOG; } JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1MPIO(JNIEnv *env, jclass cls) { return H5FD_MPIO; } @@ -446,6 +448,8 @@ Java_hdf_hdf5lib_HDF5Constants_H5FD_1MULTI(JNIEnv *env, jclass cls) { return H5F JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1SEC2(JNIEnv *env, jclass cls) { return H5FD_SEC2; } JNIEXPORT jlong JNICALL +Java_hdf_hdf5lib_HDF5Constants_H5FD_1ROS3(JNIEnv *env, jclass cls) { return H5FD_ROS3; } +JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1STDIO(JNIEnv *env, jclass cls) { return H5FD_STDIO; } JNIEXPORT jlong JNICALL Java_hdf_hdf5lib_HDF5Constants_H5FD_1WINDOWS(JNIEnv *env, jclass cls) { diff --git a/java/src/jni/h5fImp.c b/java/src/jni/h5fImp.c index 115e8f4..2924d8f 100644 --- a/java/src/jni/h5fImp.c +++ b/java/src/jni/h5fImp.c @@ -161,6 +161,10 @@ Java_hdf_hdf5lib_H5_H5Fis_1hdf5 UNUSED(clss); +#ifdef H5_NO_DEPRECATED_SYMBOLS + H5_UNIMPLEMENTED(ENVONLY, "H5Fis_hdf5: not implemented"); +#endif + if (NULL == name) H5_NULL_ARGUMENT_ERROR(ENVONLY, "H5Fis_hdf5: file name is NULL"); diff --git a/java/src/jni/h5pFAPLImp.c b/java/src/jni/h5pFAPLImp.c index acfc853..006707a 100644 --- a/java/src/jni/h5pFAPLImp.c +++ b/java/src/jni/h5pFAPLImp.c @@ -370,6 +370,179 @@ done: return (jlong)offset; } /* end Java_hdf_hdf5lib_H5_H5Pget_1family_1offset */ +/* Class: hdf_hdf5lib_H5 + * Method: H5Pset_fapl_hdfs + * Signature: (J)Lhdf/hdf5lib/structs/H5FD_hdfs_fapl_t; + */ +JNIEXPORT jobject JNICALL +Java_hdf_hdf5lib_H5_H5Pget_1fapl_1hdfs + (JNIEnv *env, jclass clss, jlong fapl_id) +{ +#ifdef H5_HAVE_LIBHDFS + H5FD_hdfs_fapl_t fa; + jvalue args[5]; + jint j_namenode_port = 0; + jstring j_namenode_name = NULL; + jstring j_user_name = NULL; + jstring j_kerb_cache_path = NULL; + jint j_stream_buffer_size = 0; +#endif /* H5_HAVE_LIBHDFS */ + jobject ret_obj = NULL; + + UNUSED(clss); + +#ifdef H5_HAVE_LIBHDFS + if (H5Pget_fapl_hdfs((hid_t)fapl_id, &fa) < 0) + H5_LIBRARY_ERROR(ENVONLY); + + if (NULL != fa.namenode_name) { + if (NULL == (j_namenode_name = ENVPTR->NewStringUTF(ENVONLY, fa.namenode_name))) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_JNI_FATAL_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create namenode_name string"); + } + } + args[0].l = j_namenode_name; + + args[1].i = (jint)fa.namenode_port; + + if (NULL != fa.user_name) { + if (NULL == (j_user_name = ENVPTR->NewStringUTF(ENVONLY, fa.user_name))) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_JNI_FATAL_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create user_name string"); + } + } + args[2].l = j_user_name; + + if (NULL != fa.kerberos_ticket_cache) { + if (NULL == (j_kerb_cache_path = ENVPTR->NewStringUTF(ENVONLY, fa.kerberos_ticket_cache))) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_JNI_FATAL_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create kerberos_ticket_cache string"); + } + } + args[3].l = j_kerb_cache_path; + + args[4].i = (jint)fa.stream_buffer_size; + + CALL_CONSTRUCTOR(ENVONLY, "hdf/hdf5lib/structs/H5FD_hdfs_fapl_t", "(Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;I)V", args, ret_obj); +#else + H5_UNIMPLEMENTED(ENVONLY, "H5Pget_fapl_hdfs: not implemented"); +#endif /* H5_HAVE_LIBHDFS */ + +done: + return ret_obj; +} /* end Java_hdf_hdf5lib_H5_H5Pget_1fapl_1hdfs */ + +/* + * Class: hdf_hdf5lib_H5 + * Method: H5Pset_fapl_hdfs + * Signature: (JLhdf/hdf5lib/structs/H5FD_hdfs_fapl_t;)V + */ +JNIEXPORT void JNICALL +Java_hdf_hdf5lib_H5_H5Pset_1fapl_1hdfs + (JNIEnv *env, jclass clss, jlong fapl_id, jobject fapl_config) +{ +#ifdef H5_HAVE_LIBHDFS + H5FD_hdfs_fapl_t instance; + const char *str = NULL; + jfieldID fid; + jstring j_str; + jclass cls; +#endif /* H5_HAVE_LIBHDFS */ + + UNUSED(clss); + +#ifdef H5_HAVE_LIBHDFS + HDmemset(&instance, 0, sizeof(H5FD_hdfs_fapl_t)); + + if (NULL == (cls = ENVPTR->GetObjectClass(ENVONLY, fapl_config))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "version", "I"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + instance.version = ENVPTR->GetIntField(ENVONLY, fapl_config, fid); + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "namenode_name", "Ljava/lang/String;"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (j_str = (jstring)ENVPTR->GetObjectField(ENVONLY, fapl_config, fid))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (j_str) { + PIN_JAVA_STRING(ENVONLY, j_str, str, NULL, "H5FDset_fapl_hdfs: fapl_config namenode_name not pinned"); + + HDstrncpy(instance.namenode_name, str, H5FD__HDFS_NODE_NAME_SPACE + 1); + instance.namenode_name[H5FD__HDFS_NODE_NAME_SPACE] = '\0'; + + UNPIN_JAVA_STRING(ENVONLY, j_str, str); + str = NULL; + } + else + HDmemset(instance.namenode_name, 0, H5FD__HDFS_NODE_NAME_SPACE + 1); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "namenode_port", "I"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + instance.namenode_port = ENVPTR->GetIntField(ENVONLY, fapl_config, fid); + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "user_name", "Ljava/lang/String;"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (j_str = (jstring)ENVPTR->GetObjectField(ENVONLY, fapl_config, fid))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (j_str) { + PIN_JAVA_STRING(ENVONLY, j_str, str, NULL, "H5FDset_fapl_hdfs: fapl_config user_name not pinned"); + + HDstrncpy(instance.user_name, str, H5FD__HDFS_USER_NAME_SPACE + 1); + instance.user_name[H5FD__HDFS_USER_NAME_SPACE] = '\0'; + + UNPIN_JAVA_STRING(ENVONLY, j_str, str); + str = NULL; + } + else + HDmemset(instance.user_name, 0, H5FD__HDFS_USER_NAME_SPACE + 1); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "kerberos_ticket_cache", "Ljava/lang/String;"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (j_str = (jstring)ENVPTR->GetObjectField(ENVONLY, fapl_config, fid))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (j_str) { + PIN_JAVA_STRING(ENVONLY, j_str, str, NULL, "H5FDset_fapl_hdfs: fapl_config kerberos_ticket_cache not pinned"); + + HDstrncpy(instance.kerberos_ticket_cache, str, H5FD__HDFS_KERB_CACHE_PATH_SPACE + 1); + instance.kerberos_ticket_cache[H5FD__HDFS_KERB_CACHE_PATH_SPACE] = '\0'; + + UNPIN_JAVA_STRING(ENVONLY, j_str, str); + str = NULL; + } + else + HDmemset(instance.kerberos_ticket_cache, 0, H5FD__HDFS_KERB_CACHE_PATH_SPACE + 1); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "stream_buffer_size", "I"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + instance.stream_buffer_size = ENVPTR->GetIntField(ENVONLY, fapl_config, fid); + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (H5Pset_fapl_hdfs((hid_t)fapl_id, &instance) < 0) + H5_LIBRARY_ERROR(ENVONLY); +#else + H5_UNIMPLEMENTED(ENVONLY, "H5Pset_fapl_hdfs: not implemented"); +#endif /* H5_HAVE_LIBHDFS */ + +done: + /* NOP */; +#ifdef H5_HAVE_LIBHDFS + if (str) + UNPIN_JAVA_STRING(ENVONLY, j_str, str); +#endif /* H5_HAVE_LIBHDFS */ +} /* end Java_hdf_hdf5lib_H5_H5Pset_1fapl_1hdfs */ + /* * Class: hdf_hdf5lib_H5 * Method: H5Pset_fapl_log @@ -617,6 +790,167 @@ done: */ /* + * Class: hdf5_hdf5lib_H5 + * Method: H5Pget_fapl_ros3 + * Signature: (J)Lhdf/hdf5lib/structs/H5FD_ros3_fapl_t; + */ +JNIEXPORT jobject JNICALL +Java_hdf_hdf5lib_H5_H5Pget_1fapl_1ros3 + (JNIEnv *env, jclass clss, jlong fapl_id) +{ +#ifdef H5_HAVE_ROS3_VFD + H5FD_ros3_fapl_t fa; + jvalue args[3]; + jstring j_aws = NULL; + jstring j_id = NULL; + jstring j_key = NULL; +#endif /* H5_HAVE_ROS3_VFD */ + jobject ret_obj = NULL; + + UNUSED(clss); + +#ifdef H5_HAVE_ROS3_VFD + /* pass fapl and fapl_t instance into library get_fapl */ + /* store fapl details in ros3_fapl_t instance `fa` */ + if (H5Pget_fapl_ros3((hid_t)fapl_id, &fa) < 0) + H5_LIBRARY_ERROR(ENVONLY); + + if (NULL != fa.aws_region) { + if (NULL == (j_aws = ENVPTR->NewStringUTF(ENVONLY, fa.aws_region))) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_JNI_FATAL_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create aws_region string"); + } + } + args[0].l = j_aws; + + if (NULL != fa.secret_id) { + if (NULL == (j_id = ENVPTR->NewStringUTF(ENVONLY, fa.secret_id))) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_JNI_FATAL_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create secret_id string"); + } + } + args[1].l = j_id; + + if (NULL != fa.secret_key) { + if (NULL == (j_key = ENVPTR->NewStringUTF(ENVONLY, fa.secret_key))) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_JNI_FATAL_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create secret_key string"); + } + } + args[2].l = j_key; + + CALL_CONSTRUCTOR(ENVONLY, "hdf/hdf5lib/structs/H5FD_ros3_fapl_t", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V", args, ret_obj); +#else + H5_UNIMPLEMENTED(ENVONLY, "H5Pget_fapl_ros3: not implemented"); +#endif /* H5_HAVE_ROS3_VFD */ + +done: + return ret_obj; +} /* end Java_hdf_hdf5lib_H5_H5Pget_1fapl_1ros3 */ + +/* + * Class: hdf_hdf5lib_H5 + * Method: H5Pset_fapl_ros3 + * Signature: (JLhdf/hdf5lib/structs/H5FD_ros3_fapl_t;)V + */ +JNIEXPORT void JNICALL +Java_hdf_hdf5lib_H5_H5Pset_1fapl_1ros3 + (JNIEnv *env, jclass clss, jlong fapl_id, jobject fapl_config) +{ +#ifdef H5_HAVE_ROS3_VFD + H5FD_ros3_fapl_t instance; + const char *str = NULL; + jfieldID fid; + jstring j_str; + jclass cls; +#endif /* H5_HAVE_ROS3_VFD */ + + UNUSED(clss); + +#ifdef H5_HAVE_ROS3_VFD + HDmemset(&instance, 0, sizeof(H5FD_ros3_fapl_t)); + + if (NULL == (cls = ENVPTR->GetObjectClass(ENVONLY, fapl_config))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "version", "I"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + instance.version = ENVPTR->GetIntField(ENVONLY, fapl_config, fid); + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "aws_region", "Ljava/lang/String;"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (j_str = (jstring)ENVPTR->GetObjectField(ENVONLY, fapl_config, fid))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (j_str) { + PIN_JAVA_STRING(ENVONLY, j_str, str, NULL, "H5Pset_fapl_ros3: fapl_config aws_region not pinned"); + + HDstrncpy(instance.aws_region, str, H5FD_ROS3_MAX_REGION_LEN + 1); + instance.aws_region[H5FD_ROS3_MAX_REGION_LEN] = '\0'; + + UNPIN_JAVA_STRING(ENVONLY, j_str, str); + str = NULL; + } + else + HDmemset(instance.aws_region, 0, H5FD_ROS3_MAX_REGION_LEN + 1); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "secret_id", "Ljava/lang/String;"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (j_str = (jstring)ENVPTR->GetObjectField(ENVONLY, fapl_config, fid))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (j_str) { + PIN_JAVA_STRING(ENVONLY, j_str, str, NULL, "H5Pset_fapl_ros3: fapl_config secret_id not pinned"); + + HDstrncpy(instance.secret_id, str, H5FD_ROS3_MAX_SECRET_ID_LEN + 1); + instance.secret_id[H5FD_ROS3_MAX_SECRET_ID_LEN] = '\0'; + + UNPIN_JAVA_STRING(ENVONLY, j_str, str); + str = NULL; + } + else + HDmemset(instance.secret_id, 0, H5FD_ROS3_MAX_SECRET_ID_LEN + 1); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "secret_key", "Ljava/lang/String;"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (j_str = (jstring)ENVPTR->GetObjectField(ENVONLY, fapl_config, fid))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (j_str) { + PIN_JAVA_STRING(ENVONLY, j_str, str, NULL, "H5Pset_fapl_ros3: fapl_config secret_key not pinned"); + + HDstrncpy(instance.secret_key, str, H5FD_ROS3_MAX_SECRET_KEY_LEN + 1); + instance.secret_key[H5FD_ROS3_MAX_SECRET_KEY_LEN] = '\0'; + + UNPIN_JAVA_STRING(ENVONLY, j_str, str); + str = NULL; + } + else + HDmemset(instance.secret_key, 0, H5FD_ROS3_MAX_SECRET_KEY_LEN + 1); + + if (instance.aws_region[0] != '\0' && instance.secret_id[0] !='\0' && instance.secret_key[0] !='\0') + instance.authenticate = TRUE; + + if (H5Pset_fapl_ros3((hid_t)fapl_id, &instance) < 0) + H5_LIBRARY_ERROR(ENVONLY); +#else + H5_UNIMPLEMENTED(ENVONLY, "H5Pset_fapl_ros3: not implemented"); +#endif /* H5_HAVE_ROS3_VFD */ + +done: + /* NOP */; +#ifdef H5_HAVE_ROS3_VFD + if (str) + UNPIN_JAVA_STRING(ENVONLY, j_str, str); +#endif /* H5_HAVE_LIBHDFS */ +} /* end Java_hdf_hdf5lib_H5_H5Pset_1fapl_1ros3 */ + +/* * Class: hdf_hdf5lib_H5 * Method: H5Pset_fapl_split * Signature: (JLjava/lang/String;JLjava/lang/String;J)V diff --git a/java/src/jni/h5pFAPLImp.h b/java/src/jni/h5pFAPLImp.h index 28b1d95..9b353e6 100644 --- a/java/src/jni/h5pFAPLImp.h +++ b/java/src/jni/h5pFAPLImp.h @@ -137,6 +137,24 @@ Java_hdf_hdf5lib_H5_H5Pget_1family_1offset /* * Class: hdf_hdf5lib_H5 + * Method: H5Pget_fapl_hdfs + * Signature: (J)Lhdf/hdf5lib/structs/H5FD_hdfs_fapl_t; + */ +JNIEXPORT jobject JNICALL +Java_hdf_hdf5lib_H5_H5Pget_1fapl_1hdfs +(JNIEnv *, jclass, jlong); + +/* + * Class: hdf_hdf5lib_H5 + * Method: H5Pset_fapl_hdfs + * Signature: (JLhdf/hdf5lib/structs/H5FD_hdfs_fapl_t;)V + */ +JNIEXPORT void JNICALL +Java_hdf_hdf5lib_H5_H5Pset_1fapl_1hdfs +(JNIEnv *, jclass, jlong, jobject); + +/* + * Class: hdf_hdf5lib_H5 * Method: H5Pset_fapl_log * Signature: (JLjava/lang/String;JJ)V */ @@ -188,6 +206,24 @@ Java_hdf_hdf5lib_H5_H5Pget_1fapl_1multi /* * Class: hdf_hdf5lib_H5 + * Method: H5Pget_fapl_ros3 + * Signature: (J)Lhdf/hdf5lib/structs/H5FD_ros3_fapl_t; + */ +JNIEXPORT jobject JNICALL +Java_hdf_hdf5lib_H5_H5Pget_1fapl_1ros3 +(JNIEnv *, jclass, jlong); + +/* + * Class: hdf_hdf5lib_H5 + * Method: H5Pset_fapl_ros3 + * Signature: (JLhdf/hdf5lib/structs/H5FD_ros3_fapl_t;)V + */ +JNIEXPORT void JNICALL +Java_hdf_hdf5lib_H5_H5Pset_1fapl_1ros3 +(JNIEnv *, jclass, jlong, jobject); + +/* + * Class: hdf_hdf5lib_H5 * Method: H5Pset_fapl_split * Signature: (JLjava/lang/String;JLjava/lang/String;J)V */ diff --git a/java/test/CMakeLists.txt b/java/test/CMakeLists.txt index b119dd2..d44bc2f 100644 --- a/java/test/CMakeLists.txt +++ b/java/test/CMakeLists.txt @@ -50,6 +50,20 @@ if (NOT HDF5_ENABLE_DEBUG_APIS) ) endif () +if (HDF5_ENABLE_ROS3_VFD) + set (HDF5_JAVA_TEST_SOURCES + ${HDF5_JAVA_TEST_SOURCES} + TestH5Pfapls3 + ) +endif () + +if (HDF5_ENABLE_HDFS) + set (HDF5_JAVA_TEST_SOURCES + ${HDF5_JAVA_TEST_SOURCES} + TestH5Pfaplhdfs + ) +endif () + set (CMAKE_JAVA_INCLUDE_PATH "${HDF5_JAVA_LIB_DIR}/junit.jar;${HDF5_JAVA_LIB_DIR}/hamcrest-core.jar;${HDF5_JAVA_JARS};${HDF5_JAVA_LOGGING_JAR};${HDF5_JAVA_LOGGING_SIMPLE_JAR}") foreach (test_file ${HDF5_JAVA_TEST_SOURCES}) diff --git a/java/test/Makefile.am b/java/test/Makefile.am index 6635ef7..e6c9b16 100644 --- a/java/test/Makefile.am +++ b/java/test/Makefile.am @@ -61,6 +61,8 @@ noinst_JAVA = \ TestH5P.java \ TestH5PData.java \ TestH5Pfapl.java \ + TestH5Pfaplhdfs.java \ + TestH5Pfapls3.java \ TestH5Pvirtual.java \ TestH5Plist.java \ TestH5A.java \ diff --git a/java/test/TestAll.java b/java/test/TestAll.java index 13cb597..c7c206c 100644 --- a/java/test/TestAll.java +++ b/java/test/TestAll.java @@ -27,6 +27,7 @@ import org.junit.runners.Suite; TestH5Lparams.class, TestH5Lbasic.class, TestH5Lcreate.class, TestH5R.class, TestH5P.class, TestH5PData.class, TestH5Pfapl.class, TestH5Pvirtual.class, TestH5Plist.class, + TestH5Pfapls3.class, TestH5Pfaplhdfs.class, TestH5A.class, TestH5Oparams.class, TestH5Obasic.class, TestH5Ocopy.class, TestH5Ocreate.class, TestH5PL.class, TestH5Z.class diff --git a/java/test/TestH5Pfaplhdfs.java b/java/test/TestH5Pfaplhdfs.java new file mode 100644 index 0000000..b0d42d8 --- /dev/null +++ b/java/test/TestH5Pfaplhdfs.java @@ -0,0 +1,138 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the COPYING file, which can be found at the root of the source code * + * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +package test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.exceptions.HDF5Exception; +import hdf.hdf5lib.exceptions.HDF5LibraryException; +import hdf.hdf5lib.structs.H5FD_hdfs_fapl_t; + +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TestName; + +public class TestH5Pfaplhdfs { + @Rule public TestName testname = new TestName(); + + long fapl_id = -1; + long plapl_id = -1; + long dapl_id = -1; + long plist_id = -1; + long btplist_id = -1; + + @Before + public void createFileAccess() throws NullPointerException, HDF5Exception + { + assertTrue("H5 open ids is 0", H5.getOpenIDCount() == 0); + System.out.print(testname.getMethodName()); + + try { + fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); + } + catch (Throwable err) { + err.printStackTrace(); + fail("TestH5Pfapl.createFileAccess: " + err); + } + assertTrue(fapl_id > 0); + try { + plapl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_ACCESS); + } + catch (Throwable err) { + err.printStackTrace(); + fail("TestH5Pfapl.createFileAccess: " + err); + } + assertTrue(plapl_id > 0); + try { + plist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER); + btplist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER); + dapl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_ACCESS); + } + catch (Throwable err) { + err.printStackTrace(); + fail("TestH5Pfapl.createFileAccess: " + err); + } + assertTrue(plist_id > 0); + assertTrue(btplist_id > 0); + assertTrue(dapl_id > 0); + } + + @After + public void deleteFileAccess() throws HDF5LibraryException + { + if (fapl_id > 0) + try {H5.H5Pclose(fapl_id);} catch (Exception ex) {} + if (plapl_id > 0) + try {H5.H5Pclose(plapl_id);} catch (Exception ex) {} + if (dapl_id > 0) + try {H5.H5Pclose(dapl_id);} catch (Exception ex) {} + if (plist_id > 0) + try {H5.H5Pclose(plist_id);} catch (Exception ex) {} + if (btplist_id > 0) + try {H5.H5Pclose(btplist_id);} catch (Exception ex) {} + System.out.println(); + } + + @Test + public void testHDFS_fapl() throws Exception + { + if (HDF5Constants.H5FD_HDFS < 0) + throw new HDF5LibraryException("skip"); + + String nodename = "blues"; + int nodeport = 12345; + String username = "sparticus"; + String kerbcache = "/dev/null"; + int streamsize = 1024; + + final H5FD_hdfs_fapl_t config = new H5FD_hdfs_fapl_t(nodename, nodeport, username, kerbcache, streamsize); + assertTrue("setting fapl should succeed", -1 < H5.H5Pset_fapl_hdfs(fapl_id, config)); + + assertEquals("driver types should match", HDF5Constants.H5FD_HDFS, H5.H5Pget_driver(fapl_id)); + + H5FD_hdfs_fapl_t copy = H5.H5Pget_fapl_hdfs(fapl_id); + assertEquals("fapl contents should match", new H5FD_hdfs_fapl_t(nodename, nodeport, username, kerbcache, streamsize), copy); + } + + @Test(expected = HDF5LibraryException.class) + public void testH5Pget_fapl_hdfs_invalid_fapl_id() throws Exception + { + if (HDF5Constants.H5FD_HDFS < 0) + throw new HDF5LibraryException("skip"); + H5FD_hdfs_fapl_t fails = H5.H5Pget_fapl_hdfs(-1); + } + + @Test(expected = HDF5LibraryException.class) + public void testH5Pget_fapl_hdfs_fapl_id_of_wrong_driver_type() throws Exception + { + if (HDF5Constants.H5FD_HDFS < 0) + throw new HDF5LibraryException("skip"); + if (HDF5Constants.H5FD_SEC2 < 0 ) + throw new HDF5LibraryException("skip"); + /* TODO: for now, test against a sec2 fapl only */ + + H5.H5Pset_fapl_sec2(fapl_id); + assertEquals("fapl_id was not set properly", HDF5Constants.H5FD_SEC2, H5.H5Pget_driver(fapl_id)); + H5FD_hdfs_fapl_t fails = H5.H5Pget_fapl_hdfs(fapl_id); + } + +} diff --git a/java/test/TestH5Pfapls3.java b/java/test/TestH5Pfapls3.java new file mode 100644 index 0000000..ba10524 --- /dev/null +++ b/java/test/TestH5Pfapls3.java @@ -0,0 +1,159 @@ +/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * + * Copyright by The HDF Group. * + * Copyright by the Board of Trustees of the University of Illinois. * + * All rights reserved. * + * * + * This file is part of HDF5. The full HDF5 copyright notice, including * + * terms governing use, modification, and redistribution, is contained in * + * the COPYING file, which can be found at the root of the source code * + * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. * + * If you do not have access to either file, you may request a copy from * + * help@hdfgroup.org. * + * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ + +package test; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import hdf.hdf5lib.H5; +import hdf.hdf5lib.HDF5Constants; +import hdf.hdf5lib.exceptions.HDF5Exception; +import hdf.hdf5lib.exceptions.HDF5LibraryException; +import hdf.hdf5lib.structs.H5FD_ros3_fapl_t; + +import org.junit.After; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.TestName; + +public class TestH5Pfapls3 { + @Rule public TestName testname = new TestName(); + + long fapl_id = -1; + long plapl_id = -1; + long dapl_id = -1; + long plist_id = -1; + long btplist_id = -1; + + @Before + public void createFileAccess() throws NullPointerException, HDF5Exception + { + assertTrue("H5 open ids is 0", H5.getOpenIDCount() == 0); + System.out.print(testname.getMethodName()); + + try { + fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS); + } + catch (Throwable err) { + err.printStackTrace(); + fail("TestH5Pfapl.createFileAccess: " + err); + } + assertTrue(fapl_id > 0); + try { + plapl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_ACCESS); + } + catch (Throwable err) { + err.printStackTrace(); + fail("TestH5Pfapl.createFileAccess: " + err); + } + assertTrue(plapl_id > 0); + try { + plist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER); + btplist_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER); + dapl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_ACCESS); + } + catch (Throwable err) { + err.printStackTrace(); + fail("TestH5Pfapl.createFileAccess: " + err); + } + assertTrue(plist_id > 0); + assertTrue(btplist_id > 0); + assertTrue(dapl_id > 0); + } + + @After + public void deleteFileAccess() throws HDF5LibraryException + { + if (fapl_id > 0) + try {H5.H5Pclose(fapl_id);} catch (Exception ex) {} + if (plapl_id > 0) + try {H5.H5Pclose(plapl_id);} catch (Exception ex) {} + if (dapl_id > 0) + try {H5.H5Pclose(dapl_id);} catch (Exception ex) {} + if (plist_id > 0) + try {H5.H5Pclose(plist_id);} catch (Exception ex) {} + if (btplist_id > 0) + try {H5.H5Pclose(btplist_id);} catch (Exception ex) {} + System.out.println(); + } + + @Test + public void testH5Pset_fapl_ros3() throws Exception + { + if (HDF5Constants.H5FD_ROS3 < 0) + return; + + final H5FD_ros3_fapl_t config = new H5FD_ros3_fapl_t(); + assertEquals("Default fapl has unexpected contents", + new H5FD_ros3_fapl_t("", "", ""), config); + + H5.H5Pset_fapl_ros3(fapl_id, config); + + assertEquals("driver types don't match", + HDF5Constants.H5FD_ROS3, H5.H5Pget_driver(fapl_id)); + + /* get_fapl_ros3 can throw exception in error cases */ + H5FD_ros3_fapl_t copy = H5.H5Pget_fapl_ros3(fapl_id); + assertEquals("contents of fapl set and get don't match", + new H5FD_ros3_fapl_t("", "", ""), copy); + } + + @Test(expected = HDF5LibraryException.class) + public void testH5Pget_fapl_ros3_invalid_fapl_id() throws Exception + { + if (HDF5Constants.H5FD_ROS3 < 0) + throw new HDF5LibraryException("skip"); + H5FD_ros3_fapl_t fails = H5.H5Pget_fapl_ros3(-1); + } + + @Test(expected = HDF5LibraryException.class) + public void testH5Pget_fapl_ros3_fapl_id_of_wrong_driver_type() throws Exception + { + if (HDF5Constants.H5FD_ROS3 < 0) + throw new HDF5LibraryException("skip"); + if (HDF5Constants.H5FD_SEC2 < 0 ) + throw new HDF5LibraryException("skip"); + /* TODO: for now, test against a sec2 fapl only */ + + H5.H5Pset_fapl_sec2(fapl_id); + assertEquals("fapl_id was not set properly", + HDF5Constants.H5FD_SEC2, H5.H5Pget_driver(fapl_id)); + H5FD_ros3_fapl_t fails = H5.H5Pget_fapl_ros3(fapl_id); + } + + @Test + public void testH5Pset_fapl_ros3_specified() throws Exception + { + if (HDF5Constants.H5FD_ROS3 < 0) + return; + + String region = "us-east-1"; + String acc_id = "my_access_id"; + String acc_key = "my_access_key"; + + final H5FD_ros3_fapl_t config = new H5FD_ros3_fapl_t(region, acc_id, acc_key); + H5.H5Pset_fapl_ros3(fapl_id, config); + assertEquals("driver types don't match", + HDF5Constants.H5FD_ROS3, H5.H5Pget_driver(fapl_id)); + + H5FD_ros3_fapl_t copy = H5.H5Pget_fapl_ros3(fapl_id); + assertEquals("contents of fapl set and get don't match", + new H5FD_ros3_fapl_t(region, acc_id, acc_key), copy); + } + +} diff --git a/java/test/junit.sh.in b/java/test/junit.sh.in index 1a9b93f..96ea796 100644 --- a/java/test/junit.sh.in +++ b/java/test/junit.sh.in @@ -19,6 +19,8 @@ prefix=@prefix@ USE_FILTER_SZIP="@USE_FILTER_SZIP@" USE_FILTER_DEFLATE="@USE_FILTER_DEFLATE@" +USE_ROS3_VFD="@HAVE_ROS3_VFD@" +USE_HDFS_VFD="@HAVE_LIBHDFS@" TESTNAME=JUnitInterface EXIT_SUCCESS=0 @@ -94,6 +96,8 @@ $HDFTEST_HOME/testfiles/JUnit-TestH5R.txt $HDFTEST_HOME/testfiles/JUnit-TestH5P.txt $HDFTEST_HOME/testfiles/JUnit-TestH5PData.txt $HDFTEST_HOME/testfiles/JUnit-TestH5Pfapl.txt +$HDFTEST_HOME/testfiles/JUnit-TestH5Pfapls3.txt +$HDFTEST_HOME/testfiles/JUnit-TestH5Pfaplhdfs.txt $HDFTEST_HOME/testfiles/JUnit-TestH5Pvirtual.txt $HDFTEST_HOME/testfiles/JUnit-TestH5Plist.txt $HDFTEST_HOME/testfiles/JUnit-TestH5A.txt @@ -1081,6 +1085,50 @@ if test $USE_FILTER_SZIP = "yes"; then test yes = "$verbose" && $DIFF JUnit-TestH5Giterate.txt JUnit-TestH5Giterate.out |sed 's/^/ /' fi fi +if test "X$ROS3_VFD" = "Xyes"; then + echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH -ea org.junit.runner.JUnitCore test.TestH5Pfapls3" + TESTING JUnit-TestH5Pfapls3 + ($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH -ea org.junit.runner.JUnitCore test.TestH5Pfapls3 > JUnit-TestH5Pfapls3.ext) + + # Extract file name, line number, version and thread IDs because they may be different + sed -e 's/thread [0-9]*/thread (IDs)/' -e 's/: .*\.c /: (file name) /' \ + -e 's/line [0-9]*/line (number)/' \ + -e 's/Time: [0-9]*\.[0-9]*/Time: XXXX/' \ + -e 's/v[1-9]*\.[0-9]*\./version (number)\./' \ + -e 's/[1-9]*\.[0-9]*\.[0-9]*[^)]*/version (number)/' \ + JUnit-TestH5Pfapls3.ext > JUnit-TestH5Pfapls3.out + + if diff JUnit-TestH5Pfapls3.out JUnit-TestH5Pfapls3.txt > /dev/null; then + echo " PASSED JUnit-TestH5Pfapls3" + else + echo "**FAILED** JUnit-TestH5Pfapls3" + echo " Expected result differs from actual result" + nerrors="`expr $nerrors + 1`" + test yes = "$verbose" && $DIFF JUnit-TestH5Pfapls3.txt JUnit-TestH5Pfapls3.out |sed 's/^/ /' + fi +fi +if test "X$HAVE_LIBHDFS" = "Xyes"; then + echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH -ea org.junit.runner.JUnitCore test.TestH5Pfaplhdfs" + TESTING JUnit-TestH5Pfaplhdfs + ($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Dorg.slf4j.simpleLogger.defaultLog=trace -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH -ea org.junit.runner.JUnitCore test.TestH5Pfaplhdfs > JUnit-TestH5Pfaplhdfs.ext) + + # Extract file name, line number, version and thread IDs because they may be different + sed -e 's/thread [0-9]*/thread (IDs)/' -e 's/: .*\.c /: (file name) /' \ + -e 's/line [0-9]*/line (number)/' \ + -e 's/Time: [0-9]*\.[0-9]*/Time: XXXX/' \ + -e 's/v[1-9]*\.[0-9]*\./version (number)\./' \ + -e 's/[1-9]*\.[0-9]*\.[0-9]*[^)]*/version (number)/' \ + JUnit-TestH5Pfaplhdfs.ext > JUnit-TestH5Pfaplhdfs.out + + if diff JUnit-TestH5Pfaplhdfs.out JUnit-TestH5Pfaplhdfs.txt > /dev/null; then + echo " PASSED JUnit-TestH5Pfaplhdfs" + else + echo "**FAILED** JUnit-TestH5Pfaplhdfs" + echo " Expected result differs from actual result" + nerrors="`expr $nerrors + 1`" + test yes = "$verbose" && $DIFF JUnit-TestH5Pfaplhdfs.txt JUnit-TestH5Pfaplhdfs.out |sed 's/^/ /' + fi +fi # Clean up temporary files/directories diff --git a/java/test/testfiles/JUnit-TestH5Pfaplhdfs.txt b/java/test/testfiles/JUnit-TestH5Pfaplhdfs.txt new file mode 100644 index 0000000..47a00a4 --- /dev/null +++ b/java/test/testfiles/JUnit-TestH5Pfaplhdfs.txt @@ -0,0 +1,9 @@ +JUnit version 4.11 +.testH5Pget_fapl_hdfs_invalid_fapl_id +.testH5Pget_fapl_hdfs_fapl_id_of_wrong_driver_type +.testHDFS_fapl + +Time: XXXX + +OK (3 tests) + diff --git a/java/test/testfiles/JUnit-TestH5Pfapls3.txt b/java/test/testfiles/JUnit-TestH5Pfapls3.txt new file mode 100644 index 0000000..3f46342 --- /dev/null +++ b/java/test/testfiles/JUnit-TestH5Pfapls3.txt @@ -0,0 +1,10 @@ +JUnit version 4.11 +.testH5Pset_fapl_ros3_specified +.testH5Pset_fapl_ros3 +.testH5Pget_fapl_ros3_invalid_fapl_id +.testH5Pget_fapl_ros3_fapl_id_of_wrong_driver_type + +Time: XXXX + +OK (4 tests) + |