diff options
author | Allen Byrne <byrn@hdfgroup.org> | 2019-09-05 19:35:33 (GMT) |
---|---|---|
committer | Allen Byrne <byrn@hdfgroup.org> | 2019-09-05 19:35:33 (GMT) |
commit | dcab85c83b72bd41fcefd8d989b3d17db8b44981 (patch) | |
tree | 37c044719e1a85f994f4bca0a2b808df4abf6798 /java | |
parent | e3e9cf04b07310873c0916448229a0d45eb83c11 (diff) | |
download | hdf5-dcab85c83b72bd41fcefd8d989b3d17db8b44981.zip hdf5-dcab85c83b72bd41fcefd8d989b3d17db8b44981.tar.gz hdf5-dcab85c83b72bd41fcefd8d989b3d17db8b44981.tar.bz2 |
Add back missing java implementation
Diffstat (limited to 'java')
-rw-r--r-- | java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java | 6 | ||||
-rw-r--r-- | java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java | 20 | ||||
-rw-r--r-- | java/src/jni/h5pFAPLImp.c | 328 | ||||
-rw-r--r-- | java/src/jni/h5pFAPLImp.h | 36 | ||||
-rw-r--r-- | java/test/TestH5Pfaplhdfs.java | 69 | ||||
-rw-r--r-- | java/test/TestH5Pfapls3.java | 67 |
6 files changed, 407 insertions, 119 deletions
diff --git a/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java b/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java index c13473c..9fcff2e 100644 --- a/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java +++ b/java/src/hdf/hdf5lib/structs/H5FD_hdfs_fapl_t.java @@ -23,14 +23,14 @@ import java.io.Serializable; * Used for the access of files hosted on the Hadoop Distributed File System. */ -@SuppressWarnings("serial") // mute default serialUID warnings until someone knowledgeable comes along or something breaks horribly public class H5FD_hdfs_fapl_t implements Serializable { + private static final long serialVersionUID = 2072473407027648309L; - private long version; + private int version; private String namenode_name; + private int namenode_port; private String user_name; private String kerberos_ticket_cache; - private int namenode_port; private int stream_buffer_size; /* diff --git a/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java b/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java index 881aad3..a899e10 100644 --- a/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java +++ b/java/src/hdf/hdf5lib/structs/H5FD_ros3_fapl_t.java @@ -17,13 +17,14 @@ package hdf.hdf5lib.structs; import java.io.Serializable; /* - * Java representation of the ROS3 VFD file access property list (fapl) + * Java representation of the ROS3 VFD file access property list (fapl) * structure. * * Used for the access of files hosted remotely on S3 by Amazon. - * + * * For simplicity, implemented assuming that all ROS3 fapls have components: * - version + * - authenticate * - aws_region * - secret_id * - secret_key @@ -38,14 +39,15 @@ import java.io.Serializable; * H5FD_ros3_fapl_v2_t (extends super with Version 2 components) * and so on, for each version * - * "super" is passed around, and is version-checked and re-cast as + * "super" is passed around, and is version-checked and re-cast as * appropriate */ -@SuppressWarnings("serial") // mute default serialUID warnings until someone knowledgeable comes along or something breaks horribly public class H5FD_ros3_fapl_t implements Serializable { + private static final long serialVersionUID = 8985533001471224030L; - private long version; + private int version; + private boolean authenticate; private String aws_region; private String secret_id; private String secret_key; @@ -111,10 +113,10 @@ public class H5FD_ros3_fapl_t implements Serializable { @Override public String toString() { return "H5FD_ros3_fapl_t (Version:" + this.version + ") {" + - "\n aws_region : " + this.aws_region + - "\n secret_id : " + this.secret_id + - "\n secret_key : " + this.secret_key + - "\n}\n"; + "\n aws_region : " + this.aws_region + + "\n secret_id : " + this.secret_id + + "\n secret_key : " + this.secret_key + + "\n}\n"; } } diff --git a/java/src/jni/h5pFAPLImp.c b/java/src/jni/h5pFAPLImp.c index acfc853..4241758 100644 --- a/java/src/jni/h5pFAPLImp.c +++ b/java/src/jni/h5pFAPLImp.c @@ -370,6 +370,176 @@ done: return (jlong)offset; } /* end Java_hdf_hdf5lib_H5_H5Pget_1family_1offset */ +/* Class: hdf_hdf5lib_H5 + * Method: H5Pset_fapl_hdfs + * Signature: (J)Lhdf/hdf5lib/structs/H5FD_hdfs_fapl_t; + */ +JNIEXPORT jobject JNICALL +Java_hdf_hdf5lib_H5_H5Pget_1fapl_1hdfs + (JNIEnv *env, jclass clss, jlong fapl_id) +{ +#ifdef H5_HAVE_LIBHDFS + H5FD_hdfs_fapl_t fa; + jvalue args[5]; + jint j_namenode_port = 0; + jstring j_namenode_name = NULL; + jstring j_user_name = NULL; + jstring j_kerb_cache_path = NULL; + jint j_stream_buffer_size = 0; +#endif /* H5_HAVE_LIBHDFS */ + jobject ret_obj = NULL; + + UNUSED(clss); + +#ifdef H5_HAVE_LIBHDFS + if (H5Pget_fapl_hdfs((hid_t)fapl_id, &fa) < 0) + H5_LIBRARY_ERROR(ENVONLY); + + if (NULL != fa.namenode_name) { + if (NULL == (j_namenode_name = ENVPTR->NewStringUTF(ENVONLY, fa.namenode_name))) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_JNI_FATAL_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create namenode_name string"); + } + } + args[0].l = j_namenode_name; + + args[1].i = (jint)fa.namenode_port; + + if (NULL != fa.user_name) { + if (NULL == (j_user_name = ENVPTR->NewStringUTF(ENVONLY, fa.user_name))) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_JNI_FATAL_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create user_name string"); + } + } + args[2].l = j_user_name; + + if (NULL != fa.kerberos_ticket_cache) { + if (NULL == (j_kerb_cache_path = ENVPTR->NewStringUTF(ENVONLY, fa.kerberos_ticket_cache))) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_JNI_FATAL_ERROR(ENVONLY, "H5Pget_fapl_hdfs: out of memory - can't create kerberos_ticket_cache string"); + } + } + args[3].l = j_kerb_cache_path; + + args[4].i = (jint)fa.stream_buffer_size; + + CALL_CONSTRUCTOR(ENVONLY, "hdf/hdf5lib/structs/H5FD_hdfs_fapl_t", "(Ljava/lang/String;ILjava/lang/String;Ljava/lang/String;I)V", args, ret_obj); + +done: +#else + H5_UNIMPLEMENTED(ENVONLY, "H5Pget_fapl_hdfs: not implemented"); +#endif /* H5_HAVE_LIBHDFS */ + return ret_obj; +} /* end Java_hdf_hdf5lib_H5_H5Pget_1fapl_1hdfs */ + +/* + * Class: hdf_hdf5lib_H5 + * Method: H5Pset_fapl_hdfs + * Signature: (JLhdf/hdf5lib/structs/H5FD_hdfs_fapl_t;)V + */ +JNIEXPORT void JNICALL +Java_hdf_hdf5lib_H5_H5Pset_1fapl_1hdfs + (JNIEnv *env, jclass clss, jlong fapl_id, jobject fapl_config) +{ +#ifdef H5_HAVE_LIBHDFS + H5FD_hdfs_fapl_t instance; + const char *str = NULL; + jfieldID fid; + jstring j_str; + jclass cls; +#endif /* H5_HAVE_LIBHDFS */ + + UNUSED(clss); + +#ifdef H5_HAVE_LIBHDFS + HDmemset(&instance, 0, sizeof(H5FD_hdfs_fapl_t)); + + if (NULL == (cls = ENVPTR->GetObjectClass(ENVONLY, fapl_config))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "version", "I"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + instance.version = ENVPTR->GetIntField(ENVONLY, fapl_config, fid); + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "namenode_name", "Ljava/lang/String;"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (j_str = (jstring)ENVPTR->GetObjectField(ENVONLY, fapl_config, fid))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (j_str) { + PIN_JAVA_STRING(ENVONLY, j_str, str, NULL, "H5FDset_fapl_hdfs: fapl_config namenode_name not pinned"); + + HDstrncpy(instance.namenode_name, str, H5FD__HDFS_NODE_NAME_SPACE + 1); + instance.namenode_name[H5FD__HDFS_NODE_NAME_SPACE] = '\0'; + + UNPIN_JAVA_STRING(ENVONLY, j_str, str); + str = NULL; + } + else + HDmemset(instance.namenode_name, 0, H5FD__HDFS_NODE_NAME_SPACE + 1); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "namenode_port", "I"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + instance.namenode_port = ENVPTR->GetIntField(ENVONLY, fapl_config, fid); + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "user_name", "Ljava/lang/String;"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (j_str = (jstring)ENVPTR->GetObjectField(ENVONLY, fapl_config, fid))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (j_str) { + PIN_JAVA_STRING(ENVONLY, j_str, str, NULL, "H5FDset_fapl_hdfs: fapl_config user_name not pinned"); + + HDstrncpy(instance.user_name, str, H5FD__HDFS_USER_NAME_SPACE + 1); + instance.user_name[H5FD__HDFS_USER_NAME_SPACE] = '\0'; + + UNPIN_JAVA_STRING(ENVONLY, j_str, str); + str = NULL; + } + else + HDmemset(instance.user_name, 0, H5FD__HDFS_USER_NAME_SPACE + 1); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "kerberos_ticket_cache", "Ljava/lang/String;"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (j_str = (jstring)ENVPTR->GetObjectField(ENVONLY, fapl_config, fid))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (j_str) { + PIN_JAVA_STRING(ENVONLY, j_str, str, NULL, "H5FDset_fapl_hdfs: fapl_config kerberos_ticket_cache not pinned"); + + HDstrncpy(instance.kerberos_ticket_cache, str, H5FD__HDFS_KERB_CACHE_PATH_SPACE + 1); + instance.kerberos_ticket_cache[H5FD__HDFS_KERB_CACHE_PATH_SPACE] = '\0'; + + UNPIN_JAVA_STRING(ENVONLY, j_str, str); + str = NULL; + } + else + HDmemset(instance.kerberos_ticket_cache, 0, H5FD__HDFS_KERB_CACHE_PATH_SPACE + 1); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "stream_buffer_size", "I"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + instance.stream_buffer_size = ENVPTR->GetIntField(ENVONLY, fapl_config, fid); + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (H5Pset_fapl_hdfs((hid_t) fapl_id, &instance) < 0) + H5_LIBRARY_ERROR(ENVONLY); + +done: + if (str) + UNPIN_JAVA_STRING(ENVONLY, j_str, str); +#else + H5_UNIMPLEMENTED(ENVONLY, "H5Pset_fapl_hdfs: not implemented"); +#endif /* H5_HAVE_LIBHDFS */ +} /* end Java_hdf_hdf5lib_H5_H5Pset_1fapl_1hdfs */ + /* * Class: hdf_hdf5lib_H5 * Method: H5Pset_fapl_log @@ -617,6 +787,164 @@ done: */ /* + * Class: hdf5_hdf5lib_H5 + * Method: H5Pget_fapl_ros3 + * Signature: (J)Lhdf/hdf5lib/structs/H5FD_ros3_fapl_t; + */ +JNIEXPORT jobject JNICALL +Java_hdf_hdf5lib_H5_H5Pget_1fapl_1ros3 + (JNIEnv *env, jclass clss, jlong fapl_id) +{ +#ifdef H5_HAVE_ROS3_VFD + H5FD_ros3_fapl_t fa; + jvalue args[3]; + jstring j_aws = NULL; + jstring j_id = NULL; + jstring j_key = NULL; +#endif /* H5_HAVE_ROS3_VFD */ + jobject ret_obj = NULL; + + UNUSED(clss); + +#ifdef H5_HAVE_ROS3_VFD + /* pass fapl and fapl_t instance into library get_fapl */ + /* store fapl details in ros3_fapl_t instance `fa` */ + if (H5Pget_fapl_ros3((hid_t)loc_id, &fa) < 0) + H5_LIBRARY_ERROR(ENVONLY); + + if (NULL != fa.aws_region) { + if (NULL == (j_aws = ENVPTR->NewStringUTF(ENVONLY, fa.aws_region))) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_JNI_FATAL_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create aws_region string"); + } + } + args[0].l = j_aws; + + if (NULL != fa.secret_id) { + if (NULL == (j_id = ENVPTR->NewStringUTF(ENVONLY, fa.secret_id))) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_JNI_FATAL_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create secret_id string"); + } + } + args[1].l = j_id; + + if (NULL != fa.secret_key) { + if (NULL == (j_key = ENVPTR->NewStringUTF(ENVONLY, fa.secret_key))) { + CHECK_JNI_EXCEPTION(ENVONLY, JNI_TRUE); + H5_JNI_FATAL_ERROR(ENVONLY, "H5Pget_fapl_ros3: out of memory - can't create secret_key string"); + } + } + args[2].l = j_key; + + CALL_CONSTRUCTOR(ENVONLY, "hdf/hdf5lib/structs/H5FD_ros3_fapl_t", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V", args, ret_obj); + +done: +#else + H5_UNIMPLEMENTED(ENVONLY, "H5Pget_fapl_ros3: not implemented"); +#endif /* H5_HAVE_ROS3_VFD */ + return ret_obj; +} /* end Java_hdf_hdf5lib_H5_H5Pget_1fapl_1ros3 */ + +/* + * Class: hdf_hdf5lib_H5 + * Method: H5Pset_fapl_ros3 + * Signature: (JLhdf/hdf5lib/structs/H5FD_ros3_fapl_t;)V + */ +JNIEXPORT void JNICALL +Java_hdf_hdf5lib_H5_H5Pset_1fapl_1ros3 + (JNIEnv *env, jclass clss, jlong fapl_id, jobject fapl_config) +{ +#ifdef H5_HAVE_ROS3_VFD + H5FD_ros3_fapl_t instance; + const char *str = NULL; + jfieldID fid; + jstring j_str; + jclass cls; +#endif /* H5_HAVE_ROS3_VFD */ + + UNUSED(clss); + +#ifdef H5_HAVE_ROS3_VFD + HDmemset(&instance, 0, sizeof(H5FD_ros3_fapl_t)); + + if (NULL == (cls = ENVPTR->GetObjectClass(ENVONLY, fapl_config))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "version", "I"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + instance.version = ENVPTR->GetIntField(ENVONLY, fapl_config, fid); + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "aws_region", "Ljava/lang/String;"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (j_str = (jstring)ENVPTR->GetObjectField(ENVONLY, fapl_config, fid))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (j_str) { + PIN_JAVA_STRING(ENVONLY, j_str, str, NULL, "H5Pset_fapl_ros3: fapl_config aws_region not pinned"); + + HDstrncpy(instance.aws_region, str, H5FD_ROS3_MAX_REGION_LEN + 1); + instance.aws_region[H5FD_ROS3_MAX_REGION_LEN] = '\0'; + + UNPIN_JAVA_STRING(ENVONLY, j_str, str); + str = NULL; + } + else + HDmemset(instance.aws_region, 0, H5FD_ROS3_MAX_REGION_LEN + 1); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "secret_id", "Ljava/lang/String;"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (j_str = (jstring)ENVPTR->GetObjectField(ENVONLY, fapl_config, fid))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (j_str) { + PIN_JAVA_STRING(ENVONLY, j_str, str, NULL, "H5Pset_fapl_ros3: fapl_config secret_id not pinned"); + + HDstrncpy(instance.secret_id, str, H5FD_ROS3_MAX_SECRET_ID_LEN + 1); + instance.secret_id[H5FD_ROS3_MAX_SECRET_ID_LEN] = '\0'; + + UNPIN_JAVA_STRING(ENVONLY, j_str, str); + str = NULL; + } + else + HDmemset(instance.secret_id, 0, H5FD_ROS3_MAX_SECRET_ID_LEN + 1); + + if (NULL == (fid = ENVPTR->GetFieldID(ENVONLY, cls, "secret_key", "Ljava/lang/String;"))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (NULL == (j_str = (jstring)ENVPTR->GetObjectField(ENVONLY, fapl_config, fid))) + CHECK_JNI_EXCEPTION(ENVONLY, JNI_FALSE); + + if (j_str) { + PIN_JAVA_STRING(ENVONLY, j_str, str, NULL, "H5Pset_fapl_ros3: fapl_config secret_key not pinned"); + + HDstrncpy(instance.secret_key, str, H5FD_ROS3_MAX_SECRET_KEY_LEN + 1); + instance.secret_key[H5FD_ROS3_MAX_SECRET_KEY_LEN] = '\0'; + + UNPIN_JAVA_STRING(ENVONLY, j_str, str); + str = NULL; + } + else + HDmemset(instance.secret_key, 0, H5FD_ROS3_MAX_SECRET_KEY_LEN + 1); + + if (instance.aws_region[0] != '\0' && instance.secret_id[0] !='\0' && instance.secret_key[0] !='\0') + instance.authenticate = TRUE; + + if (H5Pset_fapl_ros3((hid_t)fapl_id, &instance) < 0) + H5_LIBRARY_ERROR(ENVONLY); + +done: + if (str) + UNPIN_JAVA_STRING(ENVONLY, j_str, str); +#else + H5_UNIMPLEMENTED(ENVONLY, "H5Pset_fapl_ros3: not implemented"); +#endif /* H5_HAVE_ROS3_VFD */ +} /* end Java_hdf_hdf5lib_H5_H5Pset_1fapl_1ros3 */ + +/* * Class: hdf_hdf5lib_H5 * Method: H5Pset_fapl_split * Signature: (JLjava/lang/String;JLjava/lang/String;J)V diff --git a/java/src/jni/h5pFAPLImp.h b/java/src/jni/h5pFAPLImp.h index 28b1d95..9b353e6 100644 --- a/java/src/jni/h5pFAPLImp.h +++ b/java/src/jni/h5pFAPLImp.h @@ -137,6 +137,24 @@ Java_hdf_hdf5lib_H5_H5Pget_1family_1offset /* * Class: hdf_hdf5lib_H5 + * Method: H5Pget_fapl_hdfs + * Signature: (J)Lhdf/hdf5lib/structs/H5FD_hdfs_fapl_t; + */ +JNIEXPORT jobject JNICALL +Java_hdf_hdf5lib_H5_H5Pget_1fapl_1hdfs +(JNIEnv *, jclass, jlong); + +/* + * Class: hdf_hdf5lib_H5 + * Method: H5Pset_fapl_hdfs + * Signature: (JLhdf/hdf5lib/structs/H5FD_hdfs_fapl_t;)V + */ +JNIEXPORT void JNICALL +Java_hdf_hdf5lib_H5_H5Pset_1fapl_1hdfs +(JNIEnv *, jclass, jlong, jobject); + +/* + * Class: hdf_hdf5lib_H5 * Method: H5Pset_fapl_log * Signature: (JLjava/lang/String;JJ)V */ @@ -188,6 +206,24 @@ Java_hdf_hdf5lib_H5_H5Pget_1fapl_1multi /* * Class: hdf_hdf5lib_H5 + * Method: H5Pget_fapl_ros3 + * Signature: (J)Lhdf/hdf5lib/structs/H5FD_ros3_fapl_t; + */ +JNIEXPORT jobject JNICALL +Java_hdf_hdf5lib_H5_H5Pget_1fapl_1ros3 +(JNIEnv *, jclass, jlong); + +/* + * Class: hdf_hdf5lib_H5 + * Method: H5Pset_fapl_ros3 + * Signature: (JLhdf/hdf5lib/structs/H5FD_ros3_fapl_t;)V + */ +JNIEXPORT void JNICALL +Java_hdf_hdf5lib_H5_H5Pset_1fapl_1ros3 +(JNIEnv *, jclass, jlong, jobject); + +/* + * Class: hdf_hdf5lib_H5 * Method: H5Pset_fapl_split * Signature: (JLjava/lang/String;JLjava/lang/String;J)V */ diff --git a/java/test/TestH5Pfaplhdfs.java b/java/test/TestH5Pfaplhdfs.java index 30d326e..b0d42d8 100644 --- a/java/test/TestH5Pfaplhdfs.java +++ b/java/test/TestH5Pfaplhdfs.java @@ -19,18 +19,11 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import java.io.File; -import java.text.DecimalFormat; -import java.text.NumberFormat; - import hdf.hdf5lib.H5; import hdf.hdf5lib.HDF5Constants; import hdf.hdf5lib.exceptions.HDF5Exception; import hdf.hdf5lib.exceptions.HDF5LibraryException; -import hdf.hdf5lib.exceptions.HDF5PropertyListInterfaceException; -import hdf.hdf5lib.structs.H5AC_cache_config_t; import hdf.hdf5lib.structs.H5FD_hdfs_fapl_t; -import hdf.hdf5lib.structs.H5FD_ros3_fapl_t; import org.junit.After; import org.junit.Before; @@ -41,11 +34,6 @@ import org.junit.rules.TestName; public class TestH5Pfaplhdfs { @Rule public TestName testname = new TestName(); - long H5fid = -1; - long H5dsid = -1; - long H5did = -1; - long H5Fdsid = -1; - long H5Fdid = -1; long fapl_id = -1; long plapl_id = -1; long dapl_id = -1; @@ -53,9 +41,9 @@ public class TestH5Pfaplhdfs { long btplist_id = -1; @Before - public void createFileAccess() - throws NullPointerException, HDF5Exception { - assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0); + public void createFileAccess() throws NullPointerException, HDF5Exception + { + assertTrue("H5 open ids is 0", H5.getOpenIDCount() == 0); System.out.print(testname.getMethodName()); try { @@ -89,7 +77,8 @@ public class TestH5Pfaplhdfs { } @After - public void deleteFileAccess() throws HDF5LibraryException { + public void deleteFileAccess() throws HDF5LibraryException + { if (fapl_id > 0) try {H5.H5Pclose(fapl_id);} catch (Exception ex) {} if (plapl_id > 0) @@ -100,23 +89,11 @@ public class TestH5Pfaplhdfs { try {H5.H5Pclose(plist_id);} catch (Exception ex) {} if (btplist_id > 0) try {H5.H5Pclose(btplist_id);} catch (Exception ex) {} - - if (H5Fdsid > 0) - try {H5.H5Sclose(H5Fdsid);} catch (Exception ex) {} - if (H5Fdid > 0) - try {H5.H5Dclose(H5Fdid);} catch (Exception ex) {} - if (H5dsid > 0) - try {H5.H5Sclose(H5dsid);} catch (Exception ex) {} - if (H5did > 0) - try {H5.H5Dclose(H5did);} catch (Exception ex) {} - if (H5fid > 0) - try {H5.H5Fclose(H5fid);} catch (Exception ex) {} System.out.println(); } @Test - public void testHDFS_fapl() - throws Exception + public void testHDFS_fapl() throws Exception { if (HDF5Constants.H5FD_HDFS < 0) throw new HDF5LibraryException("skip"); @@ -127,34 +104,17 @@ public class TestH5Pfaplhdfs { String kerbcache = "/dev/null"; int streamsize = 1024; - final H5FD_hdfs_fapl_t config = new H5FD_hdfs_fapl_t( - nodename, - nodeport, - username, - kerbcache, - streamsize - ); - assertTrue("setting fapl should succeed", - -1 < H5.H5Pset_fapl_hdfs(fapl_id, config)); + final H5FD_hdfs_fapl_t config = new H5FD_hdfs_fapl_t(nodename, nodeport, username, kerbcache, streamsize); + assertTrue("setting fapl should succeed", -1 < H5.H5Pset_fapl_hdfs(fapl_id, config)); - assertEquals("driver types should match", - HDF5Constants.H5FD_HDFS, - H5.H5Pget_driver(fapl_id)); + assertEquals("driver types should match", HDF5Constants.H5FD_HDFS, H5.H5Pget_driver(fapl_id)); H5FD_hdfs_fapl_t copy = H5.H5Pget_fapl_hdfs(fapl_id); - assertEquals("fapl contents should match", - new H5FD_hdfs_fapl_t( - nodename, - nodeport, - username, - kerbcache, - streamsize), - copy); + assertEquals("fapl contents should match", new H5FD_hdfs_fapl_t(nodename, nodeport, username, kerbcache, streamsize), copy); } @Test(expected = HDF5LibraryException.class) - public void testH5Pget_fapl_hdfs_invalid_fapl_id() - throws Exception + public void testH5Pget_fapl_hdfs_invalid_fapl_id() throws Exception { if (HDF5Constants.H5FD_HDFS < 0) throw new HDF5LibraryException("skip"); @@ -162,8 +122,7 @@ public class TestH5Pfaplhdfs { } @Test(expected = HDF5LibraryException.class) - public void testH5Pget_fapl_hdfs_fapl_id_of_wrong_driver_type() - throws Exception + public void testH5Pget_fapl_hdfs_fapl_id_of_wrong_driver_type() throws Exception { if (HDF5Constants.H5FD_HDFS < 0) throw new HDF5LibraryException("skip"); @@ -172,9 +131,7 @@ public class TestH5Pfaplhdfs { /* TODO: for now, test against a sec2 fapl only */ H5.H5Pset_fapl_sec2(fapl_id); - assertEquals("fapl_id was not set properly", - HDF5Constants.H5FD_SEC2, - H5.H5Pget_driver(fapl_id)); + assertEquals("fapl_id was not set properly", HDF5Constants.H5FD_SEC2, H5.H5Pget_driver(fapl_id)); H5FD_hdfs_fapl_t fails = H5.H5Pget_fapl_hdfs(fapl_id); } diff --git a/java/test/TestH5Pfapls3.java b/java/test/TestH5Pfapls3.java index 00a2a73..ba10524 100644 --- a/java/test/TestH5Pfapls3.java +++ b/java/test/TestH5Pfapls3.java @@ -19,17 +19,10 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import java.io.File; -import java.text.DecimalFormat; -import java.text.NumberFormat; - import hdf.hdf5lib.H5; import hdf.hdf5lib.HDF5Constants; import hdf.hdf5lib.exceptions.HDF5Exception; import hdf.hdf5lib.exceptions.HDF5LibraryException; -import hdf.hdf5lib.exceptions.HDF5PropertyListInterfaceException; -import hdf.hdf5lib.structs.H5AC_cache_config_t; -import hdf.hdf5lib.structs.H5FD_hdfs_fapl_t; import hdf.hdf5lib.structs.H5FD_ros3_fapl_t; import org.junit.After; @@ -41,11 +34,6 @@ import org.junit.rules.TestName; public class TestH5Pfapls3 { @Rule public TestName testname = new TestName(); - long H5fid = -1; - long H5dsid = -1; - long H5did = -1; - long H5Fdsid = -1; - long H5Fdid = -1; long fapl_id = -1; long plapl_id = -1; long dapl_id = -1; @@ -53,9 +41,9 @@ public class TestH5Pfapls3 { long btplist_id = -1; @Before - public void createFileAccess() - throws NullPointerException, HDF5Exception { - assertTrue("H5 open ids is 0",H5.getOpenIDCount()==0); + public void createFileAccess() throws NullPointerException, HDF5Exception + { + assertTrue("H5 open ids is 0", H5.getOpenIDCount() == 0); System.out.print(testname.getMethodName()); try { @@ -89,7 +77,8 @@ public class TestH5Pfapls3 { } @After - public void deleteFileAccess() throws HDF5LibraryException { + public void deleteFileAccess() throws HDF5LibraryException + { if (fapl_id > 0) try {H5.H5Pclose(fapl_id);} catch (Exception ex) {} if (plapl_id > 0) @@ -100,48 +89,32 @@ public class TestH5Pfapls3 { try {H5.H5Pclose(plist_id);} catch (Exception ex) {} if (btplist_id > 0) try {H5.H5Pclose(btplist_id);} catch (Exception ex) {} - - if (H5Fdsid > 0) - try {H5.H5Sclose(H5Fdsid);} catch (Exception ex) {} - if (H5Fdid > 0) - try {H5.H5Dclose(H5Fdid);} catch (Exception ex) {} - if (H5dsid > 0) - try {H5.H5Sclose(H5dsid);} catch (Exception ex) {} - if (H5did > 0) - try {H5.H5Dclose(H5did);} catch (Exception ex) {} - if (H5fid > 0) - try {H5.H5Fclose(H5fid);} catch (Exception ex) {} System.out.println(); } @Test - public void testH5Pset_fapl_ros3() - throws Exception + public void testH5Pset_fapl_ros3() throws Exception { if (HDF5Constants.H5FD_ROS3 < 0) return; final H5FD_ros3_fapl_t config = new H5FD_ros3_fapl_t(); assertEquals("Default fapl has unexpected contents", - new H5FD_ros3_fapl_t("", "", ""), - config); + new H5FD_ros3_fapl_t("", "", ""), config); H5.H5Pset_fapl_ros3(fapl_id, config); assertEquals("driver types don't match", - HDF5Constants.H5FD_ROS3, - H5.H5Pget_driver(fapl_id)); + HDF5Constants.H5FD_ROS3, H5.H5Pget_driver(fapl_id)); /* get_fapl_ros3 can throw exception in error cases */ H5FD_ros3_fapl_t copy = H5.H5Pget_fapl_ros3(fapl_id); assertEquals("contents of fapl set and get don't match", - new H5FD_ros3_fapl_t("", "", ""), - copy); + new H5FD_ros3_fapl_t("", "", ""), copy); } @Test(expected = HDF5LibraryException.class) - public void testH5Pget_fapl_ros3_invalid_fapl_id() - throws Exception + public void testH5Pget_fapl_ros3_invalid_fapl_id() throws Exception { if (HDF5Constants.H5FD_ROS3 < 0) throw new HDF5LibraryException("skip"); @@ -149,8 +122,7 @@ public class TestH5Pfapls3 { } @Test(expected = HDF5LibraryException.class) - public void testH5Pget_fapl_ros3_fapl_id_of_wrong_driver_type() - throws Exception + public void testH5Pget_fapl_ros3_fapl_id_of_wrong_driver_type() throws Exception { if (HDF5Constants.H5FD_ROS3 < 0) throw new HDF5LibraryException("skip"); @@ -160,14 +132,12 @@ public class TestH5Pfapls3 { H5.H5Pset_fapl_sec2(fapl_id); assertEquals("fapl_id was not set properly", - HDF5Constants.H5FD_SEC2, - H5.H5Pget_driver(fapl_id)); + HDF5Constants.H5FD_SEC2, H5.H5Pget_driver(fapl_id)); H5FD_ros3_fapl_t fails = H5.H5Pget_fapl_ros3(fapl_id); } @Test - public void testH5Pset_fapl_ros3_specified() - throws Exception + public void testH5Pset_fapl_ros3_specified() throws Exception { if (HDF5Constants.H5FD_ROS3 < 0) return; @@ -176,19 +146,14 @@ public class TestH5Pfapls3 { String acc_id = "my_access_id"; String acc_key = "my_access_key"; - final H5FD_ros3_fapl_t config = new H5FD_ros3_fapl_t( - region, - acc_id, - acc_key); + final H5FD_ros3_fapl_t config = new H5FD_ros3_fapl_t(region, acc_id, acc_key); H5.H5Pset_fapl_ros3(fapl_id, config); assertEquals("driver types don't match", - HDF5Constants.H5FD_ROS3, - H5.H5Pget_driver(fapl_id)); + HDF5Constants.H5FD_ROS3, H5.H5Pget_driver(fapl_id)); H5FD_ros3_fapl_t copy = H5.H5Pget_fapl_ros3(fapl_id); assertEquals("contents of fapl set and get don't match", - new H5FD_ros3_fapl_t(region, acc_id, acc_key), - copy); + new H5FD_ros3_fapl_t(region, acc_id, acc_key), copy); } } |