summaryrefslogtreecommitdiffstats
path: root/HDF5Examples/JAVA/H5J
diff options
context:
space:
mode:
authorAllen Byrne <50328838+byrnHDF@users.noreply.github.com>2023-11-27 21:30:15 (GMT)
committerGitHub <noreply@github.com>2023-11-27 21:30:15 (GMT)
commitfc88fcde1091cf12c1e88c783a14ee0f1cffe31c (patch)
tree91b88b62cd30ed37ee9227e43989e95035be43c3 /HDF5Examples/JAVA/H5J
parenta067bf71f57723d2dfca7dfe2ffd9ea502eccd4f (diff)
downloadhdf5-fc88fcde1091cf12c1e88c783a14ee0f1cffe31c.zip
hdf5-fc88fcde1091cf12c1e88c783a14ee0f1cffe31c.tar.gz
hdf5-fc88fcde1091cf12c1e88c783a14ee0f1cffe31c.tar.bz2
Develop merge examples (#3851)
* Merge examples repo into library * Change grepTest to be more fault-tolerant * Update examples macro file * Exclude all Fortran examples from doxygen
Diffstat (limited to 'HDF5Examples/JAVA/H5J')
-rw-r--r--HDF5Examples/JAVA/H5J/110/HDF5FileStructure.java340
-rw-r--r--HDF5Examples/JAVA/H5J/CMakeLists.txt92
-rw-r--r--HDF5Examples/JAVA/H5J/H5_CreateGroupAbsoluteRelative.java114
-rw-r--r--HDF5Examples/JAVA/H5J/HDF5AttributeCreate.java278
-rw-r--r--HDF5Examples/JAVA/H5J/HDF5DatasetCreate.java192
-rw-r--r--HDF5Examples/JAVA/H5J/HDF5DatasetRead.java235
-rw-r--r--HDF5Examples/JAVA/H5J/HDF5FileCreate.java57
-rw-r--r--HDF5Examples/JAVA/H5J/HDF5FileStructure.java348
-rw-r--r--HDF5Examples/JAVA/H5J/HDF5GroupCreate.java138
-rw-r--r--HDF5Examples/JAVA/H5J/HDF5GroupDatasetCreate.java204
-rw-r--r--HDF5Examples/JAVA/H5J/HDF5SubsetSelect.java264
-rw-r--r--HDF5Examples/JAVA/H5J/Java_sourcefiles.cmake21
-rw-r--r--HDF5Examples/JAVA/H5J/Makefile.am55
-rw-r--r--HDF5Examples/JAVA/H5J/runExample.sh.in299
-rw-r--r--HDF5Examples/JAVA/H5J/tfiles/110/HDF5AttributeCreate.txt2
-rw-r--r--HDF5Examples/JAVA/H5J/tfiles/110/HDF5DatasetCreate.txt0
-rw-r--r--HDF5Examples/JAVA/H5J/tfiles/110/HDF5DatasetRead.txt47
-rw-r--r--HDF5Examples/JAVA/H5J/tfiles/110/HDF5FileCreate.txt0
-rw-r--r--HDF5Examples/JAVA/H5J/tfiles/110/HDF5FileStructure.txt6
-rw-r--r--HDF5Examples/JAVA/H5J/tfiles/110/HDF5GroupCreate.txt0
-rw-r--r--HDF5Examples/JAVA/H5J/tfiles/110/HDF5GroupDatasetCreate.txt0
-rw-r--r--HDF5Examples/JAVA/H5J/tfiles/110/HDF5SubsetSelect.txt9
22 files changed, 2701 insertions, 0 deletions
diff --git a/HDF5Examples/JAVA/H5J/110/HDF5FileStructure.java b/HDF5Examples/JAVA/H5J/110/HDF5FileStructure.java
new file mode 100644
index 0000000..cddad57
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/110/HDF5FileStructure.java
@@ -0,0 +1,340 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.structs.H5G_info_t;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to retrieve HDF file structure using the
+ * "HDF Native Package (Java)". The example created the group structure and
+ * datasets, and print out the file structure:
+ *
+ * <pre>
+ * "/" (root)
+ * integer arrays
+ * 2D 32-bit integer 20x10
+ * 3D unsigned 8-bit integer 20x10x5
+ * float arrays
+ * 2D 64-bit double 20x10
+ * 3D 32-bit float 20x10x5
+ * </pre>
+ *
+ * </p>
+ */
+public class HDF5FileStructure {
+ private static String fname = "HDF5FileStructure.h5";
+ private static long[] dims2D = {20, 10};
+ private static long[] dims3D = {20, 10, 5};
+
+ public static void main(String args[]) throws Exception
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+
+ // create the file and add groups and dataset into the file
+ try {
+ createFile();
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(fname, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open the group, obtaining a new handle.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gopen(file_id, "/", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ printGroup(group_id, "/", "");
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Recursively print a group and its members.
+ *
+ * @throws Exception
+ */
+ private static void printGroup(long g_id, String gname, String indent) throws Exception
+ {
+ if (g_id < 0)
+ return;
+
+ H5G_info_t members = H5.H5Gget_info(g_id);
+ String objNames[] = new String[(int)members.nlinks];
+ int objTypes[] = new int[(int)members.nlinks];
+ int lnkTypes[] = new int[(int)members.nlinks];
+ long objRefs[] = new long[(int)members.nlinks];
+ int names_found = 0;
+ try {
+ names_found = H5.H5Gget_obj_info_all(g_id, null, objNames, objTypes, lnkTypes, objRefs,
+ HDF5Constants.H5_INDEX_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+
+ indent += " ";
+ for (int i = 0; i < names_found; i++) {
+ System.out.println(indent + objNames[i]);
+ long group_id = -1;
+ if (objTypes[i] == HDF5Constants.H5O_TYPE_GROUP) {
+ // Open the group, obtaining a new handle.
+ try {
+ if (g_id >= 0)
+ group_id = H5.H5Gopen(g_id, objNames[i], HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ if (group_id >= 0)
+ printGroup(group_id, objNames[i], indent);
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+ /**
+ * create the file and add groups ans dataset into the file, which is the
+ * same as javaExample.H5DatasetCreate
+ *
+ * @see javaExample.HDF5DatasetCreate
+ * @throws Exception
+ */
+ private static void createFile() throws Exception
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id1 = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id2 = HDF5Constants.H5I_INVALID_HID;
+ long group_id1 = HDF5Constants.H5I_INVALID_HID;
+ long group_id2 = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create groups in the file.
+ try {
+ if (file_id >= 0) {
+ group_id1 = H5.H5Gcreate(file_id,
+ "/"
+ + "integer arrays",
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ group_id1 = H5.H5Gcreate(file_id,
+ "/"
+ + "float arrays",
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the datasets.
+ try {
+ dataspace_id1 = H5.H5Screate_simple(2, dims2D, null);
+ dataspace_id2 = H5.H5Screate_simple(3, dims3D, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 2D 32-bit (4 bytes) integer dataset of 20 by 10
+ try {
+ if ((file_id >= 0) && (dataspace_id1 >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id,
+ "/"
+ + "integer arrays"
+ + "/"
+ + "2D 32-bit integer 20x10",
+ HDF5Constants.H5T_STD_I32LE, dataspace_id1, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 3D 8-bit (1 byte) unsigned integer dataset of 20 by 10 by 5
+ try {
+ if ((file_id >= 0) && (dataspace_id2 >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id,
+ "/"
+ + "integer arrays"
+ + "/"
+ + "3D 8-bit unsigned integer 20x10x5",
+ HDF5Constants.H5T_STD_I64LE, dataspace_id2, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 2D 64-bit (8 bytes) double dataset of 20 by 10
+ try {
+ if ((file_id >= 0) && (dataspace_id1 >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id,
+ "/"
+ + "float arrays"
+ + "/"
+ + "2D 64-bit double 20x10",
+ HDF5Constants.H5T_NATIVE_DOUBLE, dataspace_id1, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 3D 32-bit (4 bytes) float dataset of 20 by 10 by 5
+ try {
+ if ((file_id >= 0) && (dataspace_id2 >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id,
+ "/"
+ + "float arrays"
+ + "/"
+ + "3D 32-bit float 20x10x5",
+ HDF5Constants.H5T_NATIVE_FLOAT, dataspace_id2, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the data space.
+ try {
+ if (dataspace_id1 >= 0)
+ H5.H5Sclose(dataspace_id1);
+ dataspace_id1 = HDF5Constants.H5I_INVALID_HID;
+ if (dataspace_id2 >= 0)
+ H5.H5Sclose(dataspace_id2);
+ dataspace_id2 = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the groups.
+ try {
+ if (group_id1 >= 0)
+ H5.H5Gclose(group_id1);
+ if (group_id2 >= 0)
+ H5.H5Gclose(group_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/HDF5Examples/JAVA/H5J/CMakeLists.txt b/HDF5Examples/JAVA/H5J/CMakeLists.txt
new file mode 100644
index 0000000..8f1c195
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/CMakeLists.txt
@@ -0,0 +1,92 @@
+cmake_minimum_required (VERSION 3.18)
+project (HDF5Examples_JAVA_INTRO Java)
+
+set (CMAKE_VERBOSE_MAKEFILE 1)
+
+INCLUDE_DIRECTORIES (
+ ${HDFJAVA_LIB_DIR}
+ ${JAVA_INCLUDE_PATH}
+ ${JAVA_INCLUDE_PATH2}
+)
+
+#-----------------------------------------------------------------------------
+# Define Sources
+#-----------------------------------------------------------------------------
+include (Java_sourcefiles.cmake)
+
+if (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
+else ()
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
+endif ()
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
+endforeach ()
+
+foreach (HCP_JAR ${CMAKE_JAVA_INCLUDE_PATH})
+ get_filename_component (_HCP_FILE ${HCP_JAR} NAME)
+ set (HDFJAVA_CLASSJARS "${_HCP_FILE} ${HDFJAVA_CLASSJARS}")
+endforeach ()
+
+foreach (example ${HDF_JAVA_EXAMPLES})
+ get_filename_component (example_name ${example} NAME_WE)
+ file (WRITE ${PROJECT_BINARY_DIR}/Manifest.txt
+ "Main-Class: ${example_name}
+Class-Path: ${HDFJAVA_CLASSJARS}
+"
+ )
+ add_jar (${EXAMPLE_VARNAME}_${example_name} SOURCES ${example} MANIFEST ${PROJECT_BINARY_DIR}/Manifest.txt)
+ get_target_property (${EXAMPLE_VARNAME}_${example_name}_JAR_FILE ${EXAMPLE_VARNAME}_${example_name} JAR_FILE)
+endforeach ()
+
+if (H5EX_BUILD_TESTING)
+ macro (ADD_H5_TEST resultfile resultcode)
+ add_test (
+ NAME ${EXAMPLE_VARNAME}_jnative-h5-${resultfile}
+ COMMAND "${CMAKE_COMMAND}"
+ -D "TEST_TESTER=${CMAKE_Java_RUNTIME}"
+ -D "TEST_PROGRAM=${resultfile}"
+ -D "TEST_ARGS:STRING=${ARGN}"
+ -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${EXAMPLE_VARNAME}_${resultfile}_JAR_FILE}"
+ -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}"
+ -D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
+ -D "TEST_OUTPUT=${PROJECT_BINARY_DIR}/${resultfile}.out"
+ -D "TEST_REFERENCE=${resultfile}.txt"
+ -D "TEST_EXPECT=${resultcode}"
+ -D "TEST_SKIP_COMPARE=TRUE"
+ -P "${${EXAMPLE_PACKAGE_NAME}_RESOURCES_DIR}/jrunTest.cmake"
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (${EXAMPLE_VARNAME}_jnative-h5-${resultfile} PROPERTIES DEPENDS ${last_test})
+ endif ()
+ set (last_test "${EXAMPLE_VARNAME}_jnative-h5-${resultfile}")
+ endmacro ()
+
+ foreach (example ${HDF_JAVA_EXAMPLES})
+ get_filename_component (example_name ${example} NAME_WE)
+ add_test (
+ NAME ${EXAMPLE_VARNAME}_jnative-h5-${example_name}-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${HDFJAVA_EXAMPLES_INTRO_BINARY_DIR}/${example_name}.h5
+ ${example_name}.out
+ ${example_name}.out.err
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (${EXAMPLE_VARNAME}_jnative-h5-${example_name}-clearall-objects PROPERTIES DEPENDS ${last_test})
+ endif ()
+ add_test (
+ NAME ${EXAMPLE_VARNAME}_jnative-h5-${example_name}-copy-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E copy_if_different
+ ${PROJECT_SOURCE_DIR}/tfiles/110/${example_name}.txt
+ ${PROJECT_BINARY_DIR}/${example_name}.txt
+ )
+ set_tests_properties (${EXAMPLE_VARNAME}_jnative-h5-${example_name}-copy-objects PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_jnative-h5-${example_name}-clearall-objects)
+ set (last_test "${EXAMPLE_VARNAME}_jnative-h5-${example_name}-copy-objects")
+ ADD_H5_TEST (${example_name} 0)
+ endforeach ()
+
+endif ()
diff --git a/HDF5Examples/JAVA/H5J/H5_CreateGroupAbsoluteRelative.java b/HDF5Examples/JAVA/H5J/H5_CreateGroupAbsoluteRelative.java
new file mode 100644
index 0000000..934242d
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/H5_CreateGroupAbsoluteRelative.java
@@ -0,0 +1,114 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ Creating groups using absolute and relative names.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5_CreateGroupAbsoluteRelative {
+ private static String FILENAME = "H5_CreateGroupAbsoluteRelative.h5";
+ private static String GROUPNAME = "MyGroup";
+ private static String GROUPNAME_A = "GroupA";
+ private static String GROUPNAME_B = "GroupB";
+
+ private static void CreateGroupAbsoluteAndRelative()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long group1_id = HDF5Constants.H5I_INVALID_HID;
+ long group2_id = HDF5Constants.H5I_INVALID_HID;
+ long group3_id = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a group named "/MyGroup" in the file.
+ try {
+ if (file_id >= 0)
+ group1_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create group "Group_A" in group "MyGroup" using absolute name.
+ try {
+ if (file_id >= 0)
+ group2_id =
+ H5.H5Gcreate(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create group "Group_B" in group "MyGroup" using relative name.
+ try {
+ if (group1_id >= 0)
+ group3_id = H5.H5Gcreate(group1_id, GROUPNAME_B, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group3.
+ try {
+ if (group3_id >= 0)
+ H5.H5Gclose(group3_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group2.
+ try {
+ if (group2_id >= 0)
+ H5.H5Gclose(group2_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group1.
+ try {
+ if (group1_id >= 0)
+ H5.H5Gclose(group1_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5_CreateGroupAbsoluteRelative.CreateGroupAbsoluteAndRelative();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5J/HDF5AttributeCreate.java b/HDF5Examples/JAVA/H5J/HDF5AttributeCreate.java
new file mode 100644
index 0000000..faa2418
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/HDF5AttributeCreate.java
@@ -0,0 +1,278 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to create/read/write HDF attribute using
+ * the "HDF Native Package (Java)". The example creates an attribute and, read
+ * and write the attribute value:
+ *
+ * <pre>
+ * "/" (root)
+ * 2D 32-bit integer 20x10
+ * (attribute: name="data range", value=[0, 10000])
+ * </pre>
+ *
+ * </p>
+ */
+public class HDF5AttributeCreate {
+ private static String fname = "HDF5AttributeCreate.h5";
+ private static String dsname = "2D 32-bit integer 20x10";
+ private static String attrname = "data range";
+ private static long[] dims2D = {20, 10};
+
+ private static void CreateDatasetAttribute()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+
+ // create the file and add groups and dataset into the file
+ try {
+ createFile();
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(fname, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, dsname, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ long[] attrDims = {2}; // 1D of size two
+ int[] attrValue = {0, 10000}; // attribute value
+
+ // Create the data space for the attribute.
+ try {
+ dataspace_id = H5.H5Screate_simple(1, attrDims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a dataset attribute.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0))
+ attribute_id = H5.H5Acreate(dataset_id, attrname, HDF5Constants.H5T_STD_I32BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Write the attribute data.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_INT, attrValue);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the attribute.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", attrname, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (attribute_id >= 0)
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, attrDims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ int[] attrData = new int[(int)attrDims[0]];
+
+ // Read data.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aread(attribute_id, HDF5Constants.H5T_NATIVE_INT, attrData);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // print out attribute value
+ System.out.println(attrname);
+ System.out.println(attrData[0] + " " + attrData[1]);
+
+ // Close the dataspace.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * create the file and add groups and dataset into the file, which is the
+ * same as javaExample.H5DatasetCreate
+ *
+ * @see javaExample.HDF5DatasetCreate
+ * @throws Exception
+ */
+ private static void createFile() throws Exception
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the dataset.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims2D, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, dsname, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // set the data values
+ int[] dataIn = new int[20 * 10];
+ for (int i = 0; i < 20; i++) {
+ for (int j = 0; j < 10; j++) {
+ dataIn[i * 10 + j] = i * 100 + j;
+ }
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dataIn);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { HDF5AttributeCreate.CreateDatasetAttribute(); }
+}
diff --git a/HDF5Examples/JAVA/H5J/HDF5DatasetCreate.java b/HDF5Examples/JAVA/H5J/HDF5DatasetCreate.java
new file mode 100644
index 0000000..05fea5f
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/HDF5DatasetCreate.java
@@ -0,0 +1,192 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to create HDF5 datasets using the
+ * "HDF Native Package (Java)". The example created the group structure and
+ * datasets:
+ *
+ * <pre>
+ * "/" (root)
+ * integer arrays
+ * 2D 32-bit integer 20x10
+ * 3D 16-bit integer 20x10x5
+ * float arrays
+ * 2D 64-bit double 20x10
+ * 3D 32-bit float 20x10x5
+ * </pre>
+ *
+ * </p>
+ */
+public class HDF5DatasetCreate {
+ private static String fname = "HDF5DatasetCreate.h5";
+ private static long[] dims2D = {20, 10};
+ private static long[] dims3D = {20, 10, 5};
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id1 = HDF5Constants.H5I_INVALID_HID;
+ long group_id2 = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id1 = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id2 = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ System.err.println("Failed to create file:" + fname);
+ return;
+ }
+
+ // Create a group in the file.
+ try {
+ if (file_id >= 0) {
+ group_id1 = H5.H5Gcreate(file_id, "g1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ group_id2 = H5.H5Gcreate(file_id, "g2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the 2D dataset.
+ try {
+ dataspace_id1 = H5.H5Screate_simple(2, dims2D, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the 3D dataset.
+ try {
+ dataspace_id2 = H5.H5Screate_simple(3, dims3D, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 2D 32-bit (4 bytes) integer dataset of 20 by 10
+ try {
+ if ((group_id1 >= 0) && (dataspace_id1 >= 0)) {
+ dataset_id = H5.H5Dcreate(
+ group_id1, "2D 32-bit integer 20x10", HDF5Constants.H5T_NATIVE_INT32, dataspace_id1,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 3D 8-bit (1 byte) unsigned integer dataset of 20 by 10 by 5
+ try {
+ if ((group_id1 >= 0) && (dataspace_id2 >= 0)) {
+ dataset_id =
+ H5.H5Dcreate(group_id1, "3D 8-bit unsigned integer 20x10x5",
+ HDF5Constants.H5T_NATIVE_INT8, dataspace_id2, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 2D 64-bit (8 bytes) double dataset of 20 by 10
+ try {
+ if ((group_id2 >= 0) && (dataspace_id1 >= 0)) {
+ dataset_id = H5.H5Dcreate(
+ group_id2, "2D 64-bit double 20x10", HDF5Constants.H5T_NATIVE_DOUBLE, dataspace_id1,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 3D 32-bit (4 bytes) float dataset of 20 by 10 by 5
+ try {
+ if ((group_id2 >= 0) && (dataspace_id2 >= 0)) {
+ dataset_id = H5.H5Dcreate(
+ group_id2, "3D 32-bit float 20x10x5", HDF5Constants.H5T_NATIVE_FLOAT, dataspace_id2,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id2 >= 0)
+ H5.H5Sclose(dataspace_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataspace_id1 >= 0)
+ H5.H5Sclose(dataspace_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the groups.
+ try {
+ if (group_id2 >= 0)
+ H5.H5Gclose(group_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (group_id1 >= 0)
+ H5.H5Gclose(group_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { HDF5DatasetCreate.CreateDataset(); }
+}
diff --git a/HDF5Examples/JAVA/H5J/HDF5DatasetRead.java b/HDF5Examples/JAVA/H5J/HDF5DatasetRead.java
new file mode 100644
index 0000000..e0771a8
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/HDF5DatasetRead.java
@@ -0,0 +1,235 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to read/write HDF datasets using the
+ * "HDF Native Package (Java)". The example creates an integer dataset, and read
+ * and write data values:
+ *
+ * <pre>
+ * "/" (root)
+ * 2D 32-bit integer 20x10
+ * </pre>
+ *
+ * </p>
+ */
+public class HDF5DatasetRead {
+ private static String fname = "HDF5DatasetRead.h5";
+ private static String dsname = "2D 32-bit integer 20x10";
+ private static long[] dims2D = {20, 10};
+
+ private static void ReadWriteDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+
+ // create the file and add groups and dataset into the file
+ try {
+ createFile();
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(fname, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, dsname, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ int[][] dataRead = new int[(int)dims2D[0]][(int)(dims2D[1])];
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dataRead);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // print out the data values
+ System.out.println("\n\nOriginal Data Values");
+ for (int i = 0; i < 20; i++) {
+ System.out.print("\n" + dataRead[i][0]);
+ for (int j = 1; j < 10; j++) {
+ System.out.print(", " + dataRead[i][j]);
+ }
+ }
+
+ // change data value and write it to file.
+ for (int i = 0; i < 20; i++) {
+ for (int j = 0; j < 10; j++) {
+ dataRead[i][j]++;
+ }
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dataRead);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // reload the data value
+ int[][] dataModified = new int[(int)dims2D[0]][(int)(dims2D[1])];
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dataModified);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // print out the modified data values
+ System.out.println("\n\nModified Data Values");
+ for (int i = 0; i < 20; i++) {
+ System.out.print("\n" + dataModified[i][0]);
+ for (int j = 1; j < 10; j++) {
+ System.out.print(", " + dataModified[i][j]);
+ }
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * create the file and add groups ans dataset into the file, which is the
+ * same as javaExample.H5DatasetCreate
+ *
+ * @see HDF5DatasetCreate.H5DatasetCreate
+ * @throws Exception
+ */
+ private static void createFile() throws Exception
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the dataset.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims2D, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, dsname, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // set the data values
+ int[] dataIn = new int[20 * 10];
+ for (int i = 0; i < 20; i++) {
+ for (int j = 0; j < 10; j++) {
+ dataIn[i * 10 + j] = i * 100 + j;
+ }
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dataIn);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { HDF5DatasetRead.ReadWriteDataset(); }
+}
diff --git a/HDF5Examples/JAVA/H5J/HDF5FileCreate.java b/HDF5Examples/JAVA/H5J/HDF5FileCreate.java
new file mode 100644
index 0000000..fbfc247
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/HDF5FileCreate.java
@@ -0,0 +1,57 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: This example shows how to create an empty HDF5 file using the
+ * "HDF Native Package (Java)". If the file (H5FileCreate.h5) already exists, it
+ * will be truncated to zero length.
+ * </p>
+ */
+public class HDF5FileCreate {
+ // The name of the file we'll create.
+ private static String fname = "HDF5FileCreate.h5";
+
+ private static void CreateFile()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ System.err.println("Failed to create file:" + fname);
+ return;
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { HDF5FileCreate.CreateFile(); }
+}
diff --git a/HDF5Examples/JAVA/H5J/HDF5FileStructure.java b/HDF5Examples/JAVA/H5J/HDF5FileStructure.java
new file mode 100644
index 0000000..8ea80a8
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/HDF5FileStructure.java
@@ -0,0 +1,348 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.structs.H5G_info_t;
+import hdf.hdf5lib.structs.H5O_token_t;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to retrieve HDF file structure using the
+ * "HDF Native Package (Java)". The example created the group structure and
+ * datasets, and print out the file structure:
+ *
+ * <pre>
+ * "/" (root)
+ * integer arrays
+ * 2D 32-bit integer 20x10
+ * 3D unsigned 8-bit integer 20x10x5
+ * float arrays
+ * 2D 64-bit double 20x10
+ * 3D 32-bit float 20x10x5
+ * </pre>
+ *
+ * </p>
+ */
+public class HDF5FileStructure {
+ private static String fname = "HDF5FileStructure.h5";
+
+ private static void FileStructure()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+
+ // create the file and add groups and dataset into the file
+ try {
+ createFile();
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(fname, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open the group, obtaining a new handle.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gopen(file_id, "/", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ printGroup(group_id, "/", "");
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Recursively print a group and its members.
+ *
+ * @throws Exception
+ */
+ private static void printGroup(long g_id, String gname, String indent) throws Exception
+ {
+ if (g_id < 0)
+ return;
+
+ H5G_info_t members = H5.H5Gget_info(g_id);
+ String objNames[] = new String[(int)members.nlinks];
+ int objTypes[] = new int[(int)members.nlinks];
+ int lnkTypes[] = new int[(int)members.nlinks];
+ H5O_token_t objTokens[] = new H5O_token_t[(int)members.nlinks];
+ int names_found = 0;
+ try {
+ names_found = H5.H5Gget_obj_info_all(g_id, null, objNames, objTypes, lnkTypes, objTokens,
+ HDF5Constants.H5_INDEX_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+
+ indent += " ";
+ for (int i = 0; i < names_found; i++) {
+ System.out.println(indent + objNames[i]);
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ if (objTypes[i] == HDF5Constants.H5O_TYPE_GROUP) {
+ // Open the group, obtaining a new handle.
+ try {
+ if (g_id >= 0)
+ group_id = H5.H5Gopen(g_id, objNames[i], HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ if (group_id >= 0)
+ printGroup(group_id, objNames[i], indent);
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+ /**
+ * create the file and add groups ans dataset into the file, which is the
+ * same as javaExample.H5DatasetCreate
+ *
+ * @see javaExample.HDF5DatasetCreate
+ * @throws Exception
+ */
+ private static void createFile() throws Exception
+ {
+ long[] dims2D = {20, 10};
+ long[] dims3D = {20, 10, 5};
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id1 = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id2 = HDF5Constants.H5I_INVALID_HID;
+ long group_id1 = HDF5Constants.H5I_INVALID_HID;
+ long group_id2 = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create groups in the file.
+ try {
+ if (file_id >= 0) {
+ group_id1 = H5.H5Gcreate(file_id,
+ "/"
+ + "integer arrays",
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ group_id1 = H5.H5Gcreate(file_id,
+ "/"
+ + "float arrays",
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the datasets.
+ try {
+ dataspace_id1 = H5.H5Screate_simple(2, dims2D, null);
+ dataspace_id2 = H5.H5Screate_simple(3, dims3D, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 2D 32-bit (4 bytes) integer dataset of 20 by 10
+ try {
+ if ((file_id >= 0) && (dataspace_id1 >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id,
+ "/"
+ + "integer arrays"
+ + "/"
+ + "2D 32-bit integer 20x10",
+ HDF5Constants.H5T_STD_I32LE, dataspace_id1, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 3D 8-bit (1 byte) unsigned integer dataset of 20 by 10 by 5
+ try {
+ if ((file_id >= 0) && (dataspace_id2 >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id,
+ "/"
+ + "integer arrays"
+ + "/"
+ + "3D 8-bit unsigned integer 20x10x5",
+ HDF5Constants.H5T_STD_I64LE, dataspace_id2, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 2D 64-bit (8 bytes) double dataset of 20 by 10
+ try {
+ if ((file_id >= 0) && (dataspace_id1 >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id,
+ "/"
+ + "float arrays"
+ + "/"
+ + "2D 64-bit double 20x10",
+ HDF5Constants.H5T_NATIVE_DOUBLE, dataspace_id1, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 3D 32-bit (4 bytes) float dataset of 20 by 10 by 5
+ try {
+ if ((file_id >= 0) && (dataspace_id2 >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id,
+ "/"
+ + "float arrays"
+ + "/"
+ + "3D 32-bit float 20x10x5",
+ HDF5Constants.H5T_NATIVE_FLOAT, dataspace_id2, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the data space.
+ try {
+ if (dataspace_id1 >= 0)
+ H5.H5Sclose(dataspace_id1);
+ dataspace_id1 = HDF5Constants.H5I_INVALID_HID;
+ if (dataspace_id2 >= 0)
+ H5.H5Sclose(dataspace_id2);
+ dataspace_id2 = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the groups.
+ try {
+ if (group_id1 >= 0)
+ H5.H5Gclose(group_id1);
+ if (group_id2 >= 0)
+ H5.H5Gclose(group_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { HDF5FileStructure.FileStructure(); }
+}
diff --git a/HDF5Examples/JAVA/H5J/HDF5GroupCreate.java b/HDF5Examples/JAVA/H5J/HDF5GroupCreate.java
new file mode 100644
index 0000000..4a31c8f
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/HDF5GroupCreate.java
@@ -0,0 +1,138 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to create HDF5 groups using the
+ * "HDF Native Package (Java)". The example created the group structure:
+ *
+ * <pre>
+ * "/" (root)
+ * g1
+ * g11
+ * g12
+ * g2
+ * g21
+ * g22
+ * </pre>
+ *
+ * </p>
+ */
+public class HDF5GroupCreate {
+ private static String fname = "HDF5GroupCreate.h5";
+
+ private static void CreateGroup()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long subgroup_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id1 = HDF5Constants.H5I_INVALID_HID;
+ long group_id2 = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ System.err.println("Failed to create file:" + fname);
+ return;
+ }
+
+ // Create a group in the file.
+ try {
+ if (file_id >= 0) {
+ group_id1 = H5.H5Gcreate(file_id, "g1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ if (group_id1 >= 0) {
+ subgroup_id = H5.H5Gcreate(group_id1, "g11", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ if (subgroup_id >= 0)
+ H5.H5Gclose(subgroup_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ subgroup_id = H5.H5Gcreate(group_id1, "g12", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ if (subgroup_id >= 0)
+ H5.H5Gclose(subgroup_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ group_id2 = H5.H5Gcreate(file_id, "g2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ if (group_id2 >= 0) {
+ subgroup_id = H5.H5Gcreate(group_id2, "g21", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ if (subgroup_id >= 0)
+ H5.H5Gclose(subgroup_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ subgroup_id = H5.H5Gcreate(group_id2, "g22", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ if (subgroup_id >= 0)
+ H5.H5Gclose(subgroup_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the groups.
+ try {
+ if (group_id2 >= 0)
+ H5.H5Gclose(group_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (group_id1 >= 0)
+ H5.H5Gclose(group_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { HDF5GroupCreate.CreateGroup(); }
+}
diff --git a/HDF5Examples/JAVA/H5J/HDF5GroupDatasetCreate.java b/HDF5Examples/JAVA/H5J/HDF5GroupDatasetCreate.java
new file mode 100644
index 0000000..b89cd9c
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/HDF5GroupDatasetCreate.java
@@ -0,0 +1,204 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ Create two datasets within groups.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class HDF5GroupDatasetCreate {
+ private static String FILENAME = "HDF5GroupDatasetCreate.h5";
+ private static String GROUPNAME = "MyGroup";
+ private static String GROUPNAME_A = "GroupA";
+ private static String DATASETNAME1 = "dset1";
+ private static String DATASETNAME2 = "dset2";
+ private static final int DIM1_X = 3;
+ private static final int DIM1_Y = 3;
+ private static final int DIM2_X = 2;
+ private static final int DIM2_Y = 10;
+
+ private static void h5_crtgrpd()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long group1_id = HDF5Constants.H5I_INVALID_HID;
+ long group2_id = HDF5Constants.H5I_INVALID_HID;
+ int[][] dset1_data = new int[DIM1_X][DIM1_Y];
+ int[][] dset2_data = new int[DIM2_X][DIM2_Y];
+ long[] dims1 = {DIM1_X, DIM1_Y};
+ long[] dims2 = {DIM2_X, DIM2_Y};
+
+ // Initialize the first dataset.
+ for (int indx = 0; indx < DIM1_X; indx++)
+ for (int jndx = 0; jndx < DIM1_Y; jndx++)
+ dset1_data[indx][jndx] = jndx + 1;
+
+ // Initialize the second dataset.
+ for (int indx = 0; indx < DIM2_X; indx++)
+ for (int jndx = 0; jndx < DIM2_Y; jndx++)
+ dset2_data[indx][jndx] = jndx + 1;
+
+ // Create a file.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ // Create a group named "/MyGroup" in the file.
+ if (file_id >= 0) {
+ group1_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ // Create group "Group_A" in group "MyGroup" using absolute name.
+ if (group1_id >= 0) {
+ group2_id =
+ H5.H5Gcreate(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (group2_id >= 0)
+ H5.H5Gclose(group2_id);
+ }
+ if (group1_id >= 0)
+ H5.H5Gclose(group1_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the first dataset.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims1, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset in group "MyGroup".
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(
+ file_id, "/" + GROUPNAME + "/" + DATASETNAME1, HDF5Constants.H5T_STD_I32BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the first dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset1_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the data space for the first dataset.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the first dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing group of the specified file.
+ try {
+ if (file_id >= 0)
+ group_id =
+ H5.H5Gopen(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the second dataset.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims2, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the second dataset in group "Group_A".
+ try {
+ if ((group_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(group_id, DATASETNAME2, HDF5Constants.H5T_STD_I32BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the second dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset2_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the data space for the second dataset.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the second dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { HDF5GroupDatasetCreate.h5_crtgrpd(); }
+}
diff --git a/HDF5Examples/JAVA/H5J/HDF5SubsetSelect.java b/HDF5Examples/JAVA/H5J/HDF5SubsetSelect.java
new file mode 100644
index 0000000..a00f5be
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/HDF5SubsetSelect.java
@@ -0,0 +1,264 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to select a subset using the
+ * "HDF Native Package (Java)". The example creates an integer dataset, and read
+ * subset of the dataset:
+ *
+ * <pre>
+ * "/" (root)
+ * 2D 32-bit integer 20x10
+ * </pre>
+ *
+ * The whole 20x10 data set is
+ *
+ * <pre>
+ * 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1009
+ * 1100, 1101, 1102, 1103, 1104, 1105, 1106, 1107, 1108, 1109
+ * 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209
+ * 1300, 1301, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309
+ * 1400, 1401, 1402, 1403, 1404, 1405, 1406, 1407, 1408, 1409
+ * 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509
+ * 1600, 1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609
+ * 1700, 1701, 1702, 1703, 1704, 1705, 1706, 1707, 1708, 1709
+ * 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809
+ * 1900, 1901, 1902, 1903, 1904, 1905, 1906, 1907, 1908, 1909
+ * 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ * 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2108, 2109
+ * 2200, 2201, 2202, 2203, 2204, 2205, 2206, 2207, 2208, 2209
+ * 2300, 2301, 2302, 2303, 2304, 2305, 2306, 2307, 2308, 2309
+ * 2400, 2401, 2402, 2403, 2404, 2405, 2406, 2407, 2408, 2409
+ * 2500, 2501, 2502, 2503, 2504, 2505, 2506, 2507, 2508, 2509
+ * 2600, 2601, 2602, 2603, 2604, 2605, 2606, 2607, 2608, 2609
+ * 2700, 2701, 2702, 2703, 2704, 2705, 2706, 2707, 2708, 2709
+ * 2800, 2801, 2802, 2803, 2804, 2805, 2806, 2807, 2808, 2809
+ * 2900, 2901, 2902, 2903, 2904, 2905, 2906, 2907, 2908, 2909
+ * </pre>
+ *
+ * Subset: start=(4, 2), size=(5, 3) and stride=(3, 2). The subset values are:
+ *
+ * <pre>
+ * 1402,1404,1406
+ * 1702,1704,1706
+ * 2002,2004,2006
+ * 2302,2304,2306
+ * 2602,2604,2606
+ * </pre>
+ *
+ * </p>
+ *
+ * @author Peter X. Cao
+ * @version 2.4
+ */
+public class HDF5SubsetSelect {
+ private static String fname = "HDF5SubsetSelect.h5";
+ private static String dsname = "2D 32-bit integer 20x10";
+ private static long[] dims2D = {20, 10};
+
+ private static void SubsetSelect()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long memspace_id = HDF5Constants.H5I_INVALID_HID;
+
+ // create the file and add groups and dataset into the file
+ try {
+ createFile();
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(fname, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, dsname, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ int[][] dataRead = new int[5][3];
+
+ // Define and select the hyperslab to use for reading.
+ try {
+ if (dataset_id >= 0) {
+ filespace_id = H5.H5Dget_space(dataset_id);
+
+ long[] start = {4, 2};
+ long[] stride = {3, 2};
+ long[] count = {5, 3};
+ long[] block = null;
+
+ if (filespace_id >= 0) {
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count,
+ block);
+
+ memspace_id = H5.H5Screate_simple(2, count, null);
+ // Read the data using the previously defined hyperslab.
+ if ((dataset_id >= 0) && (filespace_id >= 0) && (memspace_id >= 0))
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, memspace_id, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dataRead);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // print out the data values
+ System.out.println("\n\nSubset Data Values");
+ for (int i = 0; i < 5; i++) {
+ System.out.print("\n" + dataRead[i][0]);
+ for (int j = 1; j < 3; j++) {
+ System.out.print("," + dataRead[i][j]);
+ }
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * create the file and add groups ans dataset into the file, which is the
+ * same as javaExample.H5DatasetCreate
+ *
+ * @see javaExample.HDF5DatasetCreate
+ * @throws Exception
+ */
+ private static void createFile() throws Exception
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the dataset.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims2D, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, dsname, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // set the data values
+ int[] dataIn = new int[20 * 10];
+ for (int i = 0; i < 20; i++) {
+ for (int j = 0; j < 10; j++) {
+ dataIn[i * 10 + j] = 1000 + i * 100 + j;
+ }
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dataIn);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { HDF5SubsetSelect.SubsetSelect(); }
+}
diff --git a/HDF5Examples/JAVA/H5J/Java_sourcefiles.cmake b/HDF5Examples/JAVA/H5J/Java_sourcefiles.cmake
new file mode 100644
index 0000000..0a8a40b
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/Java_sourcefiles.cmake
@@ -0,0 +1,21 @@
+#-----------------------------------------------------------------------------
+# Define Sources, one file per application
+#-----------------------------------------------------------------------------
+set (HDF_JAVA_EXAMPLES
+ HDF5FileCreate.java
+ HDF5GroupCreate.java
+ HDF5DatasetCreate.java
+ HDF5AttributeCreate.java
+ HDF5DatasetRead.java
+ HDF5GroupDatasetCreate.java
+ HDF5SubsetSelect.java
+)
+if (NOT ${${EXAMPLE_VARNAME}_USE_110_API} AND ${H5_LIBVER_DIR} EQUAL 110)
+ set (HDF_JAVA_EXAMPLES ${HDF_JAVA_EXAMPLES}
+ 110/HDF5FileStructure.java
+ )
+else ()
+ set (HDF_JAVA_EXAMPLES ${HDF_JAVA_EXAMPLES}
+ HDF5FileStructure.java
+ )
+endif ()
diff --git a/HDF5Examples/JAVA/H5J/Makefile.am b/HDF5Examples/JAVA/H5J/Makefile.am
new file mode 100644
index 0000000..6b615a2
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/Makefile.am
@@ -0,0 +1,55 @@
+#
+# Copyright by The HDF Group.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+##
+#
+
+JAVAROOT = .classes
+
+classes:
+ $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+pkgpath = examples/intro
+hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$$CLASSPATH
+
+jarfile = jar$(PACKAGE_TARNAME)intro.jar
+
+AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation
+
+TESTPACKAGE =
+
+noinst_JAVA = \
+ HDF5AttributeCreate.java \
+ HDF5DatasetCreate.java \
+ HDF5DatasetRead.java \
+ HDF5FileCreate.java \
+ HDF5FileStructure.java \
+ HDF5GroupCreate.java \
+ HDF5SubsetSelect.java
+
+$(jarfile): classnoinst.stamp classes
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+noinst_DATA = $(jarfile)
+
+TESTS = runExample.sh
+
+CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class runExample.sh
+
+clean:
+ rm -rf $(JAVAROOT)
+ rm -f $(jarfile)
+ rm -f classnoinst.stamp
diff --git a/HDF5Examples/JAVA/H5J/runExample.sh.in b/HDF5Examples/JAVA/H5J/runExample.sh.in
new file mode 100644
index 0000000..3a42b2b
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/runExample.sh.in
@@ -0,0 +1,299 @@
+#! /bin/sh
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+#
+
+top_builddir=@top_builddir@
+top_srcdir=@top_srcdir@
+srcdir=@srcdir@
+
+TESTNAME=EX_Intro
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+
+# Set up default variable values if not supplied by the user.
+RM='rm -rf'
+CMP='cmp'
+DIFF='diff -c'
+CP='cp'
+DIRNAME='dirname'
+LS='ls'
+AWK='awk'
+
+nerrors=0
+
+# where the libs exist
+BLDLIBDIR="./lib"
+BLDDIR="."
+HDFTEST_HOME="$top_srcdir/JAVA/intro"
+JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar
+TESTJARFILE=jar@PACKAGE_TARNAME@intro.jar
+test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR
+
+######################################################################
+# library files
+# --------------------------------------------------------------------
+# All the library files copy from source directory to test directory
+# NOTE: Keep this framework to add/remove test files.
+# This list are also used for checking exist.
+# Comment '#' without space can be used.
+# --------------------------------------------------------------------
+LIST_LIBRARY_FILES="
+$top_builddir/lib/libhdf5.*
+$top_builddir/lib/libhdf5_java.*
+$top_builddir/JAVA/intro/$JARFILE
+"
+LIST_DATA_FILES="
+$HDFTEST_HOME/tfiles/114/HDF5AttributeCreate.txt
+$HDFTEST_HOME/tfiles/114/HDF5DatasetCreate.txt
+$HDFTEST_HOME/tfiles/114/HDF5DatasetRead.txt
+$HDFTEST_HOME/tfiles/114/HDF5FileCreate.txt
+$HDFTEST_HOME/tfiles/114/HDF5FileStructure.txt
+$HDFTEST_HOME/tfiles/114/HDF5GroupCreate.txt
+$HDFTEST_HOME/tfiles/114/HDF5SubsetSelect.txt
+"
+
+#
+# copy files from source dirs to test dir
+#
+COPY_LIBFILES="$LIST_LIBRARY_FILES"
+
+COPY_LIBFILES_TO_BLDLIBDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_LIBFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -fR $tstfile $BLDLIBDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ BNAME=`$BASENAME $tstfile`
+ if [ "$BNAME" = "libhdf5_java.dylib" ]; then
+ COPIED_LIBHDF5_JAVA=1
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_LIBFILES_AND_BLDLIBDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM $BLDLIBDIR
+ fi
+}
+
+COPY_DATAFILES="$LIST_DATA_FILES"
+
+COPY_DATAFILES_TO_BLDDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_DATAFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_DATAFILES_AND_BLDDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM $BLDDIR/*.txt
+ $RM $BLDDIR/*.out
+ fi
+}
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+#
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
+}
+
+# where Java is installed (requires jdk1.7.x)
+JAVAEXE=@JAVA@
+export JAVAEXE
+
+###############################################################################
+# DO NOT MODIFY BELOW THIS LINE
+###############################################################################
+
+# prepare for test
+COPY_LIBFILES_TO_BLDLIBDIR
+COPY_DATAFILES_TO_BLDDIR
+
+CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$TESTJARFILE""
+
+TEST=/usr/bin/test
+if [ ! -x /usr/bin/test ]
+then
+TEST=`which test`
+fi
+
+if $TEST -z "$CLASSPATH"; then
+ CLASSPATH=""
+fi
+CLASSPATH=$CPATH":"$CLASSPATH
+export CLASSPATH
+
+if $TEST -n "$JAVAPATH" ; then
+ PATH=$JAVAPATH":"$PATH
+ export PATH
+fi
+
+if $TEST -e /bin/uname; then
+ os_name=`/bin/uname -s`
+elif $TEST -e /usr/bin/uname; then
+ os_name=`/usr/bin/uname -s`
+else
+ os_name=unknown
+fi
+
+if $TEST -z "$LD_LIBRARY_PATH" ; then
+ LD_LIBRARY_PATH=""
+fi
+
+case $os_name in
+ Darwin)
+ DYLD_LIBRARY_PATH=$BLDLIBDIR:$DYLD_LIBRARY_PATH
+ export DYLD_LIBRARY_PATH
+ LD_LIBRARY_PATH=$DYLD_LIBRARY_PATH
+ ;;
+ *)
+ LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH
+ ;;
+esac
+
+export LD_LIBRARY_PATH
+
+echo "$JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5AttributeCreate"
+($JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5AttributeCreate > HDF5AttributeCreate.out)
+if diff HDF5AttributeCreate.out HDF5AttributeCreate.txt > /dev/null; then
+ echo " PASSED HDF5AttributeCreate"
+else
+ echo "**FAILED** HDF5AttributeCreate"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5DatasetCreate"
+($JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5DatasetCreate > HDF5DatasetCreate.out)
+if diff HDF5DatasetCreate.out HDF5DatasetCreate.txt > /dev/null; then
+ echo " PASSED HDF5DatasetCreate"
+else
+ echo "**FAILED** HDF5DatasetCreate"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5DatasetRead"
+($JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5DatasetRead > HDF5DatasetRead.out)
+if diff HDF5DatasetRead.out HDF5DatasetRead.txt > /dev/null; then
+ echo " PASSED HDF5DatasetRead"
+else
+ echo "**FAILED** HDF5DatasetRead"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5FileCreate"
+($JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5FileCreate > HDF5FileCreate.out)
+if diff HDF5FileCreate.out HDF5FileCreate.txt > /dev/null; then
+ echo " PASSED HDF5FileCreate"
+else
+ echo "**FAILED** HDF5FileCreate"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5FileStructure"
+($JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5FileStructure > HDF5FileStructure.out)
+if diff HDF5FileStructure.out HDF5FileStructure.txt > /dev/null; then
+ echo " PASSED HDF5FileStructure"
+else
+ echo "**FAILED** HDF5FileStructure"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5GroupCreate"
+($JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5GroupCreate > HDF5GroupCreate.out)
+if diff HDF5GroupCreate.out HDF5GroupCreate.txt > /dev/null; then
+ echo " PASSED HDF5GroupCreate"
+else
+ echo "**FAILED** HDF5GroupCreate"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5SubsetSelect"
+($JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5SubsetSelect > HDF5SubsetSelect.out)
+if diff HDF5SubsetSelect.out HDF5SubsetSelect.txt > /dev/null; then
+ echo " PASSED HDF5SubsetSelect"
+else
+ echo "**FAILED** HDF5SubsetSelect"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+# Clean up temporary files/directories
+CLEAN_LIBFILES_AND_BLDLIBDIR
+CLEAN_DATAFILES_AND_BLDDIR
+
+# Report test results and exit
+if test $nerrors -eq 0 ; then
+ echo "All $TESTNAME tests passed."
+ exit $EXIT_SUCCESS
+else
+ echo "$TESTNAME tests failed with $nerrors errors."
+ exit $EXIT_FAILURE
+fi
diff --git a/HDF5Examples/JAVA/H5J/tfiles/110/HDF5AttributeCreate.txt b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5AttributeCreate.txt
new file mode 100644
index 0000000..e45aa6b
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5AttributeCreate.txt
@@ -0,0 +1,2 @@
+data range
+0 10000
diff --git a/HDF5Examples/JAVA/H5J/tfiles/110/HDF5DatasetCreate.txt b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5DatasetCreate.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5DatasetCreate.txt
diff --git a/HDF5Examples/JAVA/H5J/tfiles/110/HDF5DatasetRead.txt b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5DatasetRead.txt
new file mode 100644
index 0000000..078410f
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5DatasetRead.txt
@@ -0,0 +1,47 @@
+
+
+Original Data Values
+
+0, 1, 2, 3, 4, 5, 6, 7, 8, 9
+100, 101, 102, 103, 104, 105, 106, 107, 108, 109
+200, 201, 202, 203, 204, 205, 206, 207, 208, 209
+300, 301, 302, 303, 304, 305, 306, 307, 308, 309
+400, 401, 402, 403, 404, 405, 406, 407, 408, 409
+500, 501, 502, 503, 504, 505, 506, 507, 508, 509
+600, 601, 602, 603, 604, 605, 606, 607, 608, 609
+700, 701, 702, 703, 704, 705, 706, 707, 708, 709
+800, 801, 802, 803, 804, 805, 806, 807, 808, 809
+900, 901, 902, 903, 904, 905, 906, 907, 908, 909
+1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1009
+1100, 1101, 1102, 1103, 1104, 1105, 1106, 1107, 1108, 1109
+1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209
+1300, 1301, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309
+1400, 1401, 1402, 1403, 1404, 1405, 1406, 1407, 1408, 1409
+1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509
+1600, 1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609
+1700, 1701, 1702, 1703, 1704, 1705, 1706, 1707, 1708, 1709
+1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809
+1900, 1901, 1902, 1903, 1904, 1905, 1906, 1907, 1908, 1909
+
+Modified Data Values
+
+1, 2, 3, 4, 5, 6, 7, 8, 9, 10
+101, 102, 103, 104, 105, 106, 107, 108, 109, 110
+201, 202, 203, 204, 205, 206, 207, 208, 209, 210
+301, 302, 303, 304, 305, 306, 307, 308, 309, 310
+401, 402, 403, 404, 405, 406, 407, 408, 409, 410
+501, 502, 503, 504, 505, 506, 507, 508, 509, 510
+601, 602, 603, 604, 605, 606, 607, 608, 609, 610
+701, 702, 703, 704, 705, 706, 707, 708, 709, 710
+801, 802, 803, 804, 805, 806, 807, 808, 809, 810
+901, 902, 903, 904, 905, 906, 907, 908, 909, 910
+1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1009, 1010
+1101, 1102, 1103, 1104, 1105, 1106, 1107, 1108, 1109, 1110
+1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210
+1301, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310
+1401, 1402, 1403, 1404, 1405, 1406, 1407, 1408, 1409, 1410
+1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510
+1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609, 1610
+1701, 1702, 1703, 1704, 1705, 1706, 1707, 1708, 1709, 1710
+1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810
+1901, 1902, 1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910 \ No newline at end of file
diff --git a/HDF5Examples/JAVA/H5J/tfiles/110/HDF5FileCreate.txt b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5FileCreate.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5FileCreate.txt
diff --git a/HDF5Examples/JAVA/H5J/tfiles/110/HDF5FileStructure.txt b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5FileStructure.txt
new file mode 100644
index 0000000..820bd9f
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5FileStructure.txt
@@ -0,0 +1,6 @@
+ float arrays
+ 2D 64-bit double 20x10
+ 3D 32-bit float 20x10x5
+ integer arrays
+ 2D 32-bit integer 20x10
+ 3D 8-bit unsigned integer 20x10x5
diff --git a/HDF5Examples/JAVA/H5J/tfiles/110/HDF5GroupCreate.txt b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5GroupCreate.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5GroupCreate.txt
diff --git a/HDF5Examples/JAVA/H5J/tfiles/110/HDF5GroupDatasetCreate.txt b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5GroupDatasetCreate.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5GroupDatasetCreate.txt
diff --git a/HDF5Examples/JAVA/H5J/tfiles/110/HDF5SubsetSelect.txt b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5SubsetSelect.txt
new file mode 100644
index 0000000..93bec79
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5SubsetSelect.txt
@@ -0,0 +1,9 @@
+
+
+Subset Data Values
+
+1402,1404,1406
+1702,1704,1706
+2002,2004,2006
+2302,2304,2306
+2602,2604,2606 \ No newline at end of file