summaryrefslogtreecommitdiffstats
path: root/HDF5Examples/JAVA
diff options
context:
space:
mode:
authorAllen Byrne <50328838+byrnHDF@users.noreply.github.com>2023-11-27 21:30:15 (GMT)
committerGitHub <noreply@github.com>2023-11-27 21:30:15 (GMT)
commitfc88fcde1091cf12c1e88c783a14ee0f1cffe31c (patch)
tree91b88b62cd30ed37ee9227e43989e95035be43c3 /HDF5Examples/JAVA
parenta067bf71f57723d2dfca7dfe2ffd9ea502eccd4f (diff)
downloadhdf5-fc88fcde1091cf12c1e88c783a14ee0f1cffe31c.zip
hdf5-fc88fcde1091cf12c1e88c783a14ee0f1cffe31c.tar.gz
hdf5-fc88fcde1091cf12c1e88c783a14ee0f1cffe31c.tar.bz2
Develop merge examples (#3851)
* Merge examples repo into library * Change grepTest to be more fault-tolerant * Update examples macro file * Exclude all Fortran examples from doxygen
Diffstat (limited to 'HDF5Examples/JAVA')
-rw-r--r--HDF5Examples/JAVA/CMakeLists.txt7
-rw-r--r--HDF5Examples/JAVA/H5D/CMakeLists.txt92
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_Alloc.java293
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_Checksum.java349
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_Chunk.java370
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_Compact.java290
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_External.java235
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_FillValue.java239
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_Gzip.java335
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_Hyperslab.java271
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_Nbit.java303
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_ReadWrite.java178
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_Shuffle.java373
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_Sofloat.java361
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_Soint.java335
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_Szip.java336
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_Transform.java249
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedAdd.java392
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedGzip.java504
-rw-r--r--HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedMod.java378
-rw-r--r--HDF5Examples/JAVA/H5D/JavaDatasetExample.sh.in493
-rw-r--r--HDF5Examples/JAVA/H5D/Java_sourcefiles.cmake38
-rw-r--r--HDF5Examples/JAVA/H5D/Makefile.am75
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Alloc.txt16
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Checksum.txt3
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Chunk.txt26
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Compact.txt8
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_External.txt7
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_FillValue.txt20
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Gzip.txt3
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Hyperslab.txt24
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Nbit.txt3
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_ReadWrite.txt6
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Shuffle.txt5
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Sofloat.txt6
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Soint.txt3
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Szip.txt3
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Transform.txt15
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_UnlimitedAdd.txt14
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_UnlimitedGzip.txt16
-rw-r--r--HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_UnlimitedMod.txt14
-rw-r--r--HDF5Examples/JAVA/H5G/110/H5Ex_G_Intermediate.java125
-rw-r--r--HDF5Examples/JAVA/H5G/110/H5Ex_G_Iterate.java109
-rw-r--r--HDF5Examples/JAVA/H5G/110/H5Ex_G_Traverse.java166
-rw-r--r--HDF5Examples/JAVA/H5G/110/H5Ex_G_Visit.java153
-rw-r--r--HDF5Examples/JAVA/H5G/110/h5ex_g_iterate.h5bin0 -> 2928 bytes
-rw-r--r--HDF5Examples/JAVA/H5G/110/h5ex_g_visit.h5bin0 -> 6312 bytes
-rw-r--r--HDF5Examples/JAVA/H5G/CMakeLists.txt92
-rw-r--r--HDF5Examples/JAVA/H5G/H5Ex_G_Compact.java259
-rw-r--r--HDF5Examples/JAVA/H5G/H5Ex_G_Corder.java116
-rw-r--r--HDF5Examples/JAVA/H5G/H5Ex_G_Create.java86
-rw-r--r--HDF5Examples/JAVA/H5G/H5Ex_G_Intermediate.java125
-rw-r--r--HDF5Examples/JAVA/H5G/H5Ex_G_Iterate.java110
-rw-r--r--HDF5Examples/JAVA/H5G/H5Ex_G_Phase.java234
-rw-r--r--HDF5Examples/JAVA/H5G/H5Ex_G_Traverse.java166
-rw-r--r--HDF5Examples/JAVA/H5G/H5Ex_G_Visit.java153
-rw-r--r--HDF5Examples/JAVA/H5G/JavaGroupExample.sh.in377
-rw-r--r--HDF5Examples/JAVA/H5G/Java_sourcefiles.cmake24
-rw-r--r--HDF5Examples/JAVA/H5G/Makefile.am65
-rw-r--r--HDF5Examples/JAVA/H5G/h5ex_g_iterate.h5bin0 -> 2928 bytes
-rw-r--r--HDF5Examples/JAVA/H5G/h5ex_g_visit.h5bin0 -> 6312 bytes
-rw-r--r--HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Compact.txt5
-rw-r--r--HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Corder.txt10
-rw-r--r--HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Create.txt0
-rw-r--r--HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Intermediate.txt5
-rw-r--r--HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Iterate.txt5
-rw-r--r--HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Phase.txt15
-rw-r--r--HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Visit.txt19
-rw-r--r--HDF5Examples/JAVA/H5J/110/HDF5FileStructure.java340
-rw-r--r--HDF5Examples/JAVA/H5J/CMakeLists.txt92
-rw-r--r--HDF5Examples/JAVA/H5J/H5_CreateGroupAbsoluteRelative.java114
-rw-r--r--HDF5Examples/JAVA/H5J/HDF5AttributeCreate.java278
-rw-r--r--HDF5Examples/JAVA/H5J/HDF5DatasetCreate.java192
-rw-r--r--HDF5Examples/JAVA/H5J/HDF5DatasetRead.java235
-rw-r--r--HDF5Examples/JAVA/H5J/HDF5FileCreate.java57
-rw-r--r--HDF5Examples/JAVA/H5J/HDF5FileStructure.java348
-rw-r--r--HDF5Examples/JAVA/H5J/HDF5GroupCreate.java138
-rw-r--r--HDF5Examples/JAVA/H5J/HDF5GroupDatasetCreate.java204
-rw-r--r--HDF5Examples/JAVA/H5J/HDF5SubsetSelect.java264
-rw-r--r--HDF5Examples/JAVA/H5J/Java_sourcefiles.cmake21
-rw-r--r--HDF5Examples/JAVA/H5J/Makefile.am55
-rw-r--r--HDF5Examples/JAVA/H5J/runExample.sh.in299
-rw-r--r--HDF5Examples/JAVA/H5J/tfiles/110/HDF5AttributeCreate.txt2
-rw-r--r--HDF5Examples/JAVA/H5J/tfiles/110/HDF5DatasetCreate.txt0
-rw-r--r--HDF5Examples/JAVA/H5J/tfiles/110/HDF5DatasetRead.txt47
-rw-r--r--HDF5Examples/JAVA/H5J/tfiles/110/HDF5FileCreate.txt0
-rw-r--r--HDF5Examples/JAVA/H5J/tfiles/110/HDF5FileStructure.txt6
-rw-r--r--HDF5Examples/JAVA/H5J/tfiles/110/HDF5GroupCreate.txt0
-rw-r--r--HDF5Examples/JAVA/H5J/tfiles/110/HDF5GroupDatasetCreate.txt0
-rw-r--r--HDF5Examples/JAVA/H5J/tfiles/110/HDF5SubsetSelect.txt9
-rw-r--r--HDF5Examples/JAVA/H5T/110/H5Ex_T_ObjectReference.java341
-rw-r--r--HDF5Examples/JAVA/H5T/110/H5Ex_T_ObjectReferenceAttribute.java381
-rw-r--r--HDF5Examples/JAVA/H5T/CMakeLists.txt92
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_Array.java278
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_ArrayAttribute.java318
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_Bit.java223
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_BitAttribute.java264
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_Commit.java258
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_Compound.java460
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_CompoundAttribute.java502
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_Float.java225
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_FloatAttribute.java262
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_Integer.java222
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_IntegerAttribute.java260
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_ObjectReference.java323
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_ObjectReferenceAttribute.java366
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_Opaque.java266
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_OpaqueAttribute.java303
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_RegionReference.java315
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_RegionReferenceAttribute.java340
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_String.java307
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_StringAttribute.java347
-rw-r--r--HDF5Examples/JAVA/H5T/H5Ex_T_VLString.java135
-rw-r--r--HDF5Examples/JAVA/H5T/JavaDatatypeExample.sh.in447
-rw-r--r--HDF5Examples/JAVA/H5T/Java_sourcefiles.cmake36
-rw-r--r--HDF5Examples/JAVA/H5T/Makefile.am75
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Array.txt21
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_ArrayAttribute.txt21
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Bit.txt6
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_BitAttribute.txt6
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Commit.txt6
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Compound.txt25
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_CompoundAttribute.txt25
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Float.txt6
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_FloatAttribute.txt6
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Integer.txt6
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_IntegerAttribute.txt6
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_ObjectReference.txt4
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_ObjectReferenceAttribute.txt4
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Opaque.txt6
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_OpaqueAttribute.txt6
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_RegionReference.txt4
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_RegionReferenceAttribute.txt4
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_String.txt5
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_StringAttribute.txt5
-rw-r--r--HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_VLString.txt4
-rw-r--r--HDF5Examples/JAVA/Makefile.am28
137 files changed, 19358 insertions, 0 deletions
diff --git a/HDF5Examples/JAVA/CMakeLists.txt b/HDF5Examples/JAVA/CMakeLists.txt
new file mode 100644
index 0000000..5063e5b
--- /dev/null
+++ b/HDF5Examples/JAVA/CMakeLists.txt
@@ -0,0 +1,7 @@
+cmake_minimum_required (VERSION 3.18)
+project (HDFJAVA_EXAMPLES Java)
+
+add_subdirectory (H5D)
+add_subdirectory (H5T)
+add_subdirectory (H5G)
+add_subdirectory (H5J)
diff --git a/HDF5Examples/JAVA/H5D/CMakeLists.txt b/HDF5Examples/JAVA/H5D/CMakeLists.txt
new file mode 100644
index 0000000..60c35dc
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/CMakeLists.txt
@@ -0,0 +1,92 @@
+cmake_minimum_required (VERSION 3.18)
+project (HDF5Examples_JAVA_H5D Java)
+
+set (CMAKE_VERBOSE_MAKEFILE 1)
+
+INCLUDE_DIRECTORIES (
+ ${HDFJAVA_LIB_DIR}
+ ${JAVA_INCLUDE_PATH}
+ ${JAVA_INCLUDE_PATH2}
+)
+
+#-----------------------------------------------------------------------------
+# Define Sources
+#-----------------------------------------------------------------------------
+include (Java_sourcefiles.cmake)
+
+if (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
+else ()
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
+endif ()
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
+endforeach ()
+
+foreach (HCP_JAR ${CMAKE_JAVA_INCLUDE_PATH})
+ get_filename_component (_HCP_FILE ${HCP_JAR} NAME)
+ set (HDFJAVA_CLASSJARS "${_HCP_FILE} ${HDFJAVA_CLASSJARS}")
+endforeach ()
+
+foreach (example ${HDF_JAVA_EXAMPLES})
+ get_filename_component (example_name ${example} NAME_WE)
+ file (WRITE ${PROJECT_BINARY_DIR}/Manifest.txt
+ "Main-Class: ${example_name}
+Class-Path: ${HDFJAVA_CLASSJARS}
+"
+ )
+ add_jar (${EXAMPLE_VARNAME}_${example_name} SOURCES ${example} MANIFEST ${PROJECT_BINARY_DIR}/Manifest.txt)
+ get_target_property (${EXAMPLE_VARNAME}_${example_name}_JAR_FILE ${EXAMPLE_VARNAME}_${example_name} JAR_FILE)
+endforeach ()
+
+if (H5EX_BUILD_TESTING)
+ macro (ADD_H5_TEST resultfile resultcode)
+ add_test (
+ NAME ${EXAMPLE_VARNAME}_jnative-h5-${resultfile}
+ COMMAND "${CMAKE_COMMAND}"
+ -D "TEST_TESTER=${CMAKE_Java_RUNTIME}"
+ -D "TEST_PROGRAM=${resultfile}"
+ -D "TEST_ARGS:STRING=${ARGN}"
+ -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${EXAMPLE_VARNAME}_${resultfile}_JAR_FILE}"
+ -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}"
+ -D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
+ -D "TEST_OUTPUT=${PROJECT_BINARY_DIR}/${resultfile}.out"
+ -D "TEST_REFERENCE=${resultfile}.txt"
+ -D "TEST_EXPECT=${resultcode}"
+ -D "TEST_SKIP_COMPARE=TRUE"
+ -P "${${EXAMPLE_PACKAGE_NAME}_RESOURCES_DIR}/jrunTest.cmake"
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (${EXAMPLE_VARNAME}_jnative-h5-${resultfile} PROPERTIES DEPENDS ${last_test})
+ endif ()
+ set (last_test "${EXAMPLE_VARNAME}_jnative-h5-${resultfile}")
+ endmacro ()
+
+ foreach (example ${HDF_JAVA_EXAMPLES})
+ get_filename_component (example_name ${example} NAME_WE)
+ add_test (
+ NAME ${EXAMPLE_VARNAME}_jnative-h5-${example_name}-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${PROJECT_BINARY_DIR}/${example_name}.h5
+ ${example_name}.out
+ ${example_name}.out.err
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (${EXAMPLE_VARNAME}_jnative-h5-${example_name}-clearall-objects PROPERTIES DEPENDS ${last_test})
+ endif ()
+ add_test (
+ NAME ${EXAMPLE_VARNAME}_jnative-h5-${example_name}-copy-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E copy_if_different
+ ${PROJECT_SOURCE_DIR}/tfiles/110/${example_name}.txt
+ ${PROJECT_BINARY_DIR}/${example_name}.txt
+ )
+ set_tests_properties (${EXAMPLE_VARNAME}_jnative-h5-${example_name}-copy-objects PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_jnative-h5-${example_name}-clearall-objects)
+ set (last_test "${EXAMPLE_VARNAME}_jnative-h5-${example_name}-copy-objects")
+ ADD_H5_TEST (${example_name} 0)
+ endforeach ()
+
+endif ()
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_Alloc.java b/HDF5Examples/JAVA/H5D/H5Ex_D_Alloc.java
new file mode 100644
index 0000000..09bb180
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_Alloc.java
@@ -0,0 +1,293 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to set the space allocation time
+ for a dataset. The program first creates two datasets,
+ one with the default allocation time (late) and one with
+ early allocation time, and displays whether each has been
+ allocated and their allocation size. Next, it writes data
+ to the datasets, and again displays whether each has been
+ allocated and their allocation size.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Alloc {
+ private static String FILENAME = "H5Ex_D_Alloc.h5";
+ private static String DATASETNAME1 = "DS1";
+ private static String DATASETNAME2 = "DS2";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int FILLVAL = 99;
+ private static final int RANK = 2;
+
+ // Values for the status of space allocation
+ enum H5D_space_status {
+ H5D_SPACE_STATUS_ERROR(-1),
+ H5D_SPACE_STATUS_NOT_ALLOCATED(0),
+ H5D_SPACE_STATUS_PART_ALLOCATED(1),
+ H5D_SPACE_STATUS_ALLOCATED(2);
+ private static final Map<Integer, H5D_space_status> lookup = new HashMap<Integer, H5D_space_status>();
+
+ static
+ {
+ for (H5D_space_status s : EnumSet.allOf(H5D_space_status.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5D_space_status(int space_status) { this.code = space_status; }
+
+ public int getCode() { return this.code; }
+
+ public static H5D_space_status get(int code) { return lookup.get(code); }
+ }
+
+ private static void allocation()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id1 = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id2 = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+ int space_status = 0;
+ long storage_size = 0;
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = FILLVAL;
+
+ // Create a file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, and set the chunk size.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the allocation time to "early". This way we can be sure
+ // that reading from the dataset immediately after creation will
+ // return the fill value.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_alloc_time(dcpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ System.out.println("Creating datasets...");
+ System.out.println(DATASETNAME1 + " has allocation time H5D_ALLOC_TIME_LATE");
+ System.out.println(DATASETNAME2 + " has allocation time H5D_ALLOC_TIME_EARLY");
+ System.out.println();
+
+ // Create the dataset using the dataset default creation property list.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0))
+ dataset_id1 = H5.H5Dcreate(file_id, DATASETNAME1, HDF5Constants.H5T_NATIVE_INT, filespace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset using the dataset creation property list.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id2 = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_NATIVE_INT, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print space status and storage size for dset1.
+ try {
+ if (dataset_id1 >= 0)
+ space_status = H5.H5Dget_space_status(dataset_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id1 >= 0)
+ storage_size = H5.H5Dget_storage_size(dataset_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ String the_space = " ";
+ if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
+ the_space += "not ";
+ System.out.println("Space for " + DATASETNAME1 + " has" + the_space + "been allocated.");
+ System.out.println("Storage size for " + DATASETNAME1 + " is: " + storage_size + " bytes.");
+
+ // Retrieve and print space status and storage size for dset2.
+ try {
+ if (dataset_id2 >= 0)
+ space_status = H5.H5Dget_space_status(dataset_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id2 >= 0)
+ storage_size = H5.H5Dget_storage_size(dataset_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ the_space = " ";
+ if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
+ the_space += "not ";
+ System.out.println("Space for " + DATASETNAME2 + " has" + the_space + "been allocated.");
+ System.out.println("Storage size for " + DATASETNAME2 + " is: " + storage_size + " bytes.");
+ System.out.println();
+
+ System.out.println("Writing data...");
+ System.out.println();
+
+ // Write the data to the datasets.
+ try {
+ if (dataset_id1 >= 0)
+ H5.H5Dwrite(dataset_id1, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data[0]);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id2 >= 0)
+ H5.H5Dwrite(dataset_id2, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data[0]);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print space status and storage size for dset1.
+ try {
+ if (dataset_id1 >= 0)
+ space_status = H5.H5Dget_space_status(dataset_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id1 >= 0)
+ storage_size = H5.H5Dget_storage_size(dataset_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ the_space = " ";
+ if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
+ the_space += "not ";
+ System.out.println("Space for " + DATASETNAME1 + " has" + the_space + "been allocated.");
+ System.out.println("Storage size for " + DATASETNAME1 + " is: " + storage_size + " bytes.");
+
+ // Retrieve and print space status and storage size for dset2.
+ try {
+ if (dataset_id2 >= 0)
+ space_status = H5.H5Dget_space_status(dataset_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id2 >= 0)
+ storage_size = H5.H5Dget_storage_size(dataset_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ the_space = " ";
+ if (H5D_space_status.get(space_status) != H5D_space_status.H5D_SPACE_STATUS_ALLOCATED)
+ the_space += "not ";
+ System.out.println("Space for " + DATASETNAME2 + " has" + the_space + "been allocated.");
+ System.out.println("Storage size for " + DATASETNAME2 + " is: " + storage_size + " bytes.");
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id1 >= 0)
+ H5.H5Dclose(dataset_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id2 >= 0)
+ H5.H5Dclose(dataset_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { H5Ex_D_Alloc.allocation(); }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_Checksum.java b/HDF5Examples/JAVA/H5D/H5Ex_D_Checksum.java
new file mode 100644
index 0000000..f4716ad
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_Checksum.java
@@ -0,0 +1,349 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using the Fletcher32 checksum filter. The program first
+ checks if the Fletcher32 filter is available, then if it
+ is it writes integers to a dataset using Fletcher32, then
+ closes the file. Next, it reopens the file, reads back
+ the data, checks if the filter detected an error and
+ outputs the type of filter and the maximum value in the
+ dataset to the screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Checksum {
+ private static String FILENAME = "H5Ex_D_Checksum.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(-1),
+ H5Z_FILTER_NONE(0),
+ H5Z_FILTER_DEFLATE(1),
+ H5Z_FILTER_SHUFFLE(2),
+ H5Z_FILTER_FLETCHER32(3),
+ H5Z_FILTER_SZIP(4),
+ H5Z_FILTER_NBIT(5),
+ H5Z_FILTER_SCALEOFFSET(6),
+ H5Z_FILTER_RESERVED(256),
+ H5Z_FILTER_MAX(65535);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static
+ {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5Z_filter get(int code) { return lookup.get(code); }
+ }
+
+ private static boolean checkFletcher32Filter()
+ {
+ try {
+ int available = H5.H5Zfilter_avail(H5Z_filter.H5Z_FILTER_FLETCHER32.getCode());
+ if (available == 0) {
+ System.out.println("N-Bit filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_FLETCHER32);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("N-Bit filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeChecksum()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the N-Bit filter.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_fletcher32(dcpl_id);
+ // Set the chunk size.
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readChecksum()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+ filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ int status = H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ // Check if the read was successful. Normally we do not perform
+ // error checking in these examples for the sake of clarity, but in
+ // this case we will make an exception because this is how the
+ // fletcher32 checksum filter reports data errors.
+ if (status < 0) {
+ System.out.print("Dataset read failed!");
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return;
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read
+ // correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++) {
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ // Check if the Fletcher32 filter is available and can be used for
+ // both encoding and decoding. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ // size to be the current size.
+ if (H5Ex_D_Checksum.checkFletcher32Filter()) {
+ H5Ex_D_Checksum.writeChecksum();
+ H5Ex_D_Checksum.readChecksum();
+ }
+ }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_Chunk.java b/HDF5Examples/JAVA/H5D/H5Ex_D_Chunk.java
new file mode 100644
index 0000000..6ca2608
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_Chunk.java
@@ -0,0 +1,370 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create a chunked dataset. The
+ program first writes integers in a hyperslab selection to
+ a chunked dataset with dataspace dimensions of DIM_XxDIM_Y
+ and chunk size of CHUNK_XxCHUNK_Y, then closes the file.
+ Next, it reopens the file, reads back the data, and
+ outputs it to the screen. Finally it reads the data again
+ using a different hyperslab selection, and outputs
+ the result to the screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Chunk {
+ private static String FILENAME = "H5Ex_D_Chunk.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 6;
+ private static final int DIM_Y = 8;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5D_layout {
+ H5D_LAYOUT_ERROR(-1),
+ H5D_COMPACT(0),
+ H5D_CONTIGUOUS(1),
+ H5D_CHUNKED(2),
+ H5D_VIRTUAL(3),
+ H5D_NLAYOUTS(4);
+ private static final Map<Integer, H5D_layout> lookup = new HashMap<Integer, H5D_layout>();
+
+ static
+ {
+ for (H5D_layout s : EnumSet.allOf(H5D_layout.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5D_layout(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5D_layout get(int code) { return lookup.get(code); }
+ }
+
+ private static void writeChunk()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data to "1", to make it easier to see the selections.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = 1;
+
+ // Print the data to the screen.
+ System.out.println("Original Data:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the chunk size.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the chunked dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Define and select the first part of the hyperslab selection.
+ long[] start = {0, 0};
+ long[] stride = {3, 3};
+ long[] count = {2, 3};
+ long[] block = {2, 2};
+ try {
+ if ((filespace_id >= 0))
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count,
+ block);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Define and select the second part of the hyperslab selection,
+ // which is subtracted from the first selection by the use of
+ // H5S_SELECT_NOTB
+ block[0] = 1;
+ block[1] = 1;
+ try {
+ if ((filespace_id >= 0)) {
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_NOTB, start, stride, count,
+ block);
+
+ // Write the data to the dataset.
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readChunk()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Print the storage layout.
+ try {
+ if (dcpl_id >= 0) {
+ int layout_type = H5.H5Pget_layout(dcpl_id);
+ System.out.print("Storage layout for " + DATASETNAME + " is: ");
+ switch (H5D_layout.get(layout_type)) {
+ case H5D_COMPACT:
+ System.out.println("H5D_COMPACT");
+ break;
+ case H5D_CONTIGUOUS:
+ System.out.println("H5D_CONTIGUOUS");
+ break;
+ case H5D_CHUNKED:
+ System.out.println("H5D_CHUNKED");
+ break;
+ case H5D_VIRTUAL:
+ System.out.println("H5D_VIRTUAL");
+ break;
+ case H5D_LAYOUT_ERROR:
+ break;
+ case H5D_NLAYOUTS:
+ break;
+ default:
+ break;
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data as written to disk by hyberslabs:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Initialize the read array.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = 0;
+
+ // Define and select the hyperslab to use for reading.
+ try {
+ if (dataset_id >= 0) {
+ filespace_id = H5.H5Dget_space(dataset_id);
+
+ long[] start = {0, 1};
+ long[] stride = {4, 4};
+ long[] count = {2, 2};
+ long[] block = {2, 3};
+
+ if (filespace_id >= 0) {
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count,
+ block);
+
+ // Read the data using the previously defined hyperslab.
+ if ((dataset_id >= 0) && (filespace_id >= 0))
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ filespace_id, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data as read from disk by hyberslab:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_D_Chunk.writeChunk();
+ H5Ex_D_Chunk.readChunk();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_Compact.java b/HDF5Examples/JAVA/H5D/H5Ex_D_Compact.java
new file mode 100644
index 0000000..cd70926
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_Compact.java
@@ -0,0 +1,290 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a compact
+ dataset. The program first writes integers to a compact
+ dataset with dataspace dimensions of DIM_XxDIM_Y, then
+ closes the file. Next, it reopens the file, reads back
+ the data, and outputs it to the screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Compact {
+ private static String FILENAME = "H5Ex_D_Compact.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int RANK = 2;
+
+ // Values for the status of space allocation
+ enum H5D_layout {
+ H5D_LAYOUT_ERROR(-1),
+ H5D_COMPACT(0),
+ H5D_CONTIGUOUS(1),
+ H5D_CHUNKED(2),
+ H5D_VIRTUAL(3),
+ H5D_NLAYOUTS(4);
+ private static final Map<Integer, H5D_layout> lookup = new HashMap<Integer, H5D_layout>();
+
+ static
+ {
+ for (H5D_layout s : EnumSet.allOf(H5D_layout.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5D_layout(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5D_layout get(int code) { return lookup.get(code); }
+ }
+
+ private static void writeCompact()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the layout to compact.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_layout(dcpl_id, H5D_layout.H5D_COMPACT.getCode());
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset. We will use all default properties for this example.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readCompact()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open file and dataset using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Print the storage layout.
+ try {
+ if (dcpl_id >= 0) {
+ int layout_type = H5.H5Pget_layout(dcpl_id);
+ System.out.print("Storage layout for " + DATASETNAME + " is: ");
+ switch (H5D_layout.get(layout_type)) {
+ case H5D_COMPACT:
+ System.out.println("H5D_COMPACT");
+ break;
+ case H5D_CONTIGUOUS:
+ System.out.println("H5D_CONTIGUOUS");
+ break;
+ case H5D_CHUNKED:
+ System.out.println("H5D_CHUNKED");
+ break;
+ case H5D_VIRTUAL:
+ System.out.println("H5D_VIRTUAL");
+ break;
+ case H5D_LAYOUT_ERROR:
+ break;
+ case H5D_NLAYOUTS:
+ break;
+ default:
+ break;
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data for " + DATASETNAME + " is: ");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_D_Compact.writeCompact();
+ H5Ex_D_Compact.readCompact();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_External.java b/HDF5Examples/JAVA/H5D/H5Ex_D_External.java
new file mode 100644
index 0000000..ebccc7a
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_External.java
@@ -0,0 +1,235 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to an
+ external dataset. The program first writes integers to an
+ external dataset with dataspace dimensions of DIM_XxDIM_Y,
+ then closes the file. Next, it reopens the file, reads
+ back the data, and outputs the name of the external data
+ file and the data to the screen.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_External {
+ private static String FILENAME = "H5Ex_D_External.h5";
+ private static String EXTERNALNAME = "H5Ex_D_External.data";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int RANK = 2;
+ private static final int NAME_BUF_SIZE = 32;
+
+ private static void writeExternal()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // set the external file.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_external(dcpl_id, EXTERNALNAME, 0, HDF5Constants.H5F_UNLIMITED);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the HDF5Constants.dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readExternal()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+ String[] Xname = new String[1];
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the name of the external file.
+ long[] Xsize = new long[NAME_BUF_SIZE];
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pget_external(dcpl_id, 0, Xsize.length, Xname, Xsize);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ System.out.println(DATASETNAME + " is stored in file: " + Xname[0]);
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println(DATASETNAME + ":");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_D_External.writeExternal();
+ H5Ex_D_External.readExternal();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_FillValue.java b/HDF5Examples/JAVA/H5D/H5Ex_D_FillValue.java
new file mode 100644
index 0000000..84de844
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_FillValue.java
@@ -0,0 +1,239 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to set the fill value for a
+ dataset. The program first sets the fill value to
+ FILLVAL, creates a dataset with dimensions of DIM_XxDIM_Y,
+ reads from the uninitialized dataset, and outputs the
+ contents to the screen. Next, it writes integers to the
+ dataset, reads the data back, and outputs it to the
+ screen. Finally it extends the dataset, reads from it,
+ and outputs the result to the screen.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_FillValue {
+ private static String FILENAME = "H5Ex_D_FillValue.h5";
+ private static String DATASETNAME = "ExtendibleArray";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+ private static final int FILLVAL = 99;
+
+ private static void fillValue()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] extdims = {EDIM_X, EDIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ long[] maxdims = {HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED};
+ int[][] write_dset_data = new int[DIM_X][DIM_Y];
+ int[][] read_dset_data = new int[DIM_X][DIM_Y];
+ int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ write_dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace with unlimited dimensions.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the chunk size.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the fill value for the dataset
+ try {
+ int[] fill_value = {FILLVAL};
+ if (dcpl_id >= 0)
+ H5.H5Pset_fill_value(dcpl_id, HDF5Constants.H5T_NATIVE_INT, fill_value);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the allocation time to "early". This way we can be sure
+ // that reading from the dataset immediately after creation will
+ // return the fill value.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_alloc_time(dcpl_id, HDF5Constants.H5D_ALLOC_TIME_EARLY);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset using the dataset creation property list.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read values from the dataset, which has not been written to yet.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, read_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset before being written to:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(read_dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, write_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data back.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, read_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset after being written to:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(read_dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Extend the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dset_extent(dataset_id, extdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read from the extended dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset after extension:");
+ for (int indx = 0; indx < EDIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < EDIM_Y; jndx++)
+ System.out.print(extend_dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { H5Ex_D_FillValue.fillValue(); }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_Gzip.java b/HDF5Examples/JAVA/H5D/H5Ex_D_Gzip.java
new file mode 100644
index 0000000..da91015
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_Gzip.java
@@ -0,0 +1,335 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using gzip compression (also called zlib or deflate). The
+ program first checks if gzip compression is available,
+ then if it is it writes integers to a dataset using gzip,
+ then closes the file. Next, it reopens the file, reads
+ back the data, and outputs the type of compression and the
+ maximum value in the dataset to the screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Gzip {
+ private static String FILENAME = "H5Ex_D_Gzip.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
+ H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
+ H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
+ H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
+ H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
+ H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
+ H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
+ H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
+ H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
+ H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static
+ {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5Z_filter get(int code) { return lookup.get(code); }
+ }
+
+ private static boolean checkGzipFilter()
+ {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (available == 0) {
+ System.out.println("gzip filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("gzip filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeGzip()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the gzip compression
+ // filter.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_deflate(dcpl_id, 9);
+ // Set the chunk size.
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readGzip()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+ filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read
+ // correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++) {
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ // Check if gzip compression is available and can be used for both
+ // compression and decompression. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ if (H5Ex_D_Gzip.checkGzipFilter()) {
+ H5Ex_D_Gzip.writeGzip();
+ H5Ex_D_Gzip.readGzip();
+ }
+ }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_Hyperslab.java b/HDF5Examples/JAVA/H5D/H5Ex_D_Hyperslab.java
new file mode 100644
index 0000000..3803f1f
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_Hyperslab.java
@@ -0,0 +1,271 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a
+ dataset by hyberslabs. The program first writes integers
+ in a hyperslab selection to a dataset with dataspace
+ dimensions of DIM_XxDIM_Y, then closes the file. Next, it
+ reopens the file, reads back the data, and outputs it to
+ the screen. Finally it reads the data again using a
+ different hyperslab selection, and outputs the result to
+ the screen.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Hyperslab {
+ private static String FILENAME = "H5Ex_D_Hyperslab.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 6;
+ private static final int DIM_Y = 8;
+ private static final int RANK = 2;
+
+ private static void writeHyperslab()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data to "1", to make it easier to see the selections.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = 1;
+
+ // Print the data to the screen.
+ System.out.println("Original Data:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset. We will use all default properties for this example.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Define and select the first part of the hyperslab selection.
+ long[] start = {0, 0};
+ long[] stride = {3, 3};
+ long[] count = {2, 3};
+ long[] block = {2, 2};
+ try {
+ if ((filespace_id >= 0))
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count,
+ block);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Define and select the second part of the hyperslab selection,
+ // which is subtracted from the first selection by the use of
+ // H5S_SELECT_NOTB
+ block[0] = 1;
+ block[1] = 1;
+ try {
+ if ((filespace_id >= 0)) {
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_NOTB, start, stride, count,
+ block);
+
+ // Write the data to the dataset.
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readHyperslab()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data as written to disk by hyberslabs:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Initialize the read array.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = 0;
+
+ // Define and select the hyperslab to use for reading.
+ try {
+ if (dataset_id >= 0) {
+ filespace_id = H5.H5Dget_space(dataset_id);
+
+ long[] start = {0, 1};
+ long[] stride = {4, 4};
+ long[] count = {2, 2};
+ long[] block = {2, 3};
+
+ if (filespace_id >= 0) {
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count,
+ block);
+
+ // Read the data using the previously defined hyperslab.
+ if ((dataset_id >= 0) && (filespace_id >= 0))
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ filespace_id, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data as read from disk by hyberslab:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_D_Hyperslab.writeHyperslab();
+ H5Ex_D_Hyperslab.readHyperslab();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_Nbit.java b/HDF5Examples/JAVA/H5D/H5Ex_D_Nbit.java
new file mode 100644
index 0000000..e26894f
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_Nbit.java
@@ -0,0 +1,303 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using the N-Bit filter. The program first checks if the
+ N-Bit filter is available, then if it is it writes integers
+ to a dataset using N-Bit, then closes the file. Next, it
+ reopens the file, reads back the data, and outputs the type
+ of filter and the maximum value in the dataset to the screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Nbit {
+ private static String FILENAME = "H5Ex_D_Nbit.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
+ H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
+ H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
+ H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
+ H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
+ H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
+ H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
+ H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
+ H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
+ H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static
+ {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5Z_filter get(int code) { return lookup.get(code); }
+ }
+
+ private static boolean checkNbitFilter()
+ {
+ try {
+ // Check if N-Bit compression is available and can be used for both compression and decompression.
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_NBIT);
+ if (available == 0) {
+ System.out.println("N-Bit filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_NBIT);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("N-Bit filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeData() throws Exception
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ try {
+ // Create a new file using the default properties.
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+
+ // Create the datatype to use with the N-Bit filter. It has an uncompressed size of 32 bits,
+ // but will have a size of 16 bits after being packed by the N-Bit filter.
+ dtype_id = H5.H5Tcopy(HDF5Constants.H5T_STD_I32LE);
+ H5.H5Tset_precision(dtype_id, 16);
+ H5.H5Tset_offset(dtype_id, 5);
+
+ // Create the dataset creation property list, add the N-Bit filter and set the chunk size.
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ H5.H5Pset_nbit(dcpl_id);
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+
+ // Create the dataset.
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, dtype_id, filespace_id, HDF5Constants.H5P_DEFAULT,
+ dcpl_id, HDF5Constants.H5P_DEFAULT);
+
+ // Write the data to the dataset.
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ finally {
+ // Close and release resources.
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ if (dtype_id >= 0)
+ H5.H5Tclose(dtype_id);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ }
+
+ private static void readData() throws Exception
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+ filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ int status = H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ // Check if the read was successful.
+ if (status < 0)
+ System.out.print("Dataset read failed!");
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read
+ // correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++) {
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ /*
+ * Check if N-Bit compression is available and can be used for both compression and decompression.
+ * Normally we do not perform error checking in these examples for the sake of clarity, but in this
+ * case we will make an exception because this filter is an optional part of the hdf5 library.
+ */
+ try {
+ if (H5Ex_D_Nbit.checkNbitFilter()) {
+ H5Ex_D_Nbit.writeData();
+ H5Ex_D_Nbit.readData();
+ }
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_ReadWrite.java b/HDF5Examples/JAVA/H5D/H5Ex_D_ReadWrite.java
new file mode 100644
index 0000000..f850252
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_ReadWrite.java
@@ -0,0 +1,178 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+
+ This example shows how to read and write data to a
+ dataset. The program first writes integers to a dataset
+ with dataspace dimensions of DIM_XxDIM_Y, then closes the
+ file. Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_ReadWrite {
+ private static String FILENAME = "H5Ex_D_ReadWrite.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int RANK = 2;
+
+ private static void WriteDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset. We will use all default properties for this example.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println(DATASETNAME + ":");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_D_ReadWrite.WriteDataset();
+ H5Ex_D_ReadWrite.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_Shuffle.java b/HDF5Examples/JAVA/H5D/H5Ex_D_Shuffle.java
new file mode 100644
index 0000000..13f5eed
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_Shuffle.java
@@ -0,0 +1,373 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using the shuffle filter with gzip compression. The
+ program first checks if the shuffle and gzip filters are
+ available, then if they are it writes integers to a
+ dataset using shuffle+gzip, then closes the file. Next,
+ it reopens the file, reads back the data, and outputs the
+ types of filters and the maximum value in the dataset to
+ the screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Shuffle {
+ private static String FILENAME = "H5Ex_D_Shuffle.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
+ H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
+ H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
+ H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
+ H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
+ H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
+ H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
+ H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
+ H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
+ H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static
+ {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5Z_filter get(int code) { return lookup.get(code); }
+ }
+
+ private static boolean checkGzipFilter()
+ {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (available == 0) {
+ System.out.println("gzip filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("gzip filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static boolean checkShuffleFilter()
+ {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SHUFFLE);
+ if (available == 0) {
+ System.out.println("Shuffle filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SHUFFLE);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("Shuffle filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeShuffle()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the shuffle
+ // filter and the gzip compression filter.
+ // The order in which the filters are added here is significant -
+ // we will see much greater results when the shuffle is applied
+ // first. The order in which the filters are added to the property
+ // list is the order in which they will be invoked when writing
+ // data.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_shuffle(dcpl_id);
+ H5.H5Pset_deflate(dcpl_id, 9);
+ // Set the chunk size.
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readShuffle()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the number of filters, and retrieve and print the
+ // type of each.
+ try {
+ if (dcpl_id >= 0) {
+ int nfilters = H5.H5Pget_nfilters(dcpl_id);
+ for (int indx = 0; indx < nfilters; indx++) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+ filter_type = H5.H5Pget_filter(dcpl_id, indx, flags, cd_nelmts, cd_values, 120,
+ filter_name, filter_config);
+ System.out.print("Filter " + indx + ": Type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read
+ // correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++) {
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ // Check if gzip compression is available and can be used for both
+ // compression and decompression. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ // Similarly, check for availability of the shuffle filter.
+ if (H5Ex_D_Shuffle.checkGzipFilter() && H5Ex_D_Shuffle.checkShuffleFilter()) {
+ H5Ex_D_Shuffle.writeShuffle();
+ H5Ex_D_Shuffle.readShuffle();
+ }
+ }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_Sofloat.java b/HDF5Examples/JAVA/H5D/H5Ex_D_Sofloat.java
new file mode 100644
index 0000000..17f6e30
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_Sofloat.java
@@ -0,0 +1,361 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using the Scale-Offset filter. The program first checks
+ if the Scale-Offset filter is available, then if it is it
+ writes floating point numbers to a dataset using
+ Scale-Offset, then closes the file Next, it reopens the
+ file, reads back the data, and outputs the type of filter
+ and the maximum value in the dataset to the screen.
+ ************************************************************/
+
+import java.text.DecimalFormat;
+import java.text.DecimalFormatSymbols;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Locale;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Sofloat {
+
+ private static String FILENAME = "H5Ex_D_Sofloat.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
+ H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
+ H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
+ H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
+ H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
+ H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
+ H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
+ H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
+ H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
+ H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static
+ {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5Z_filter get(int code) { return lookup.get(code); }
+ }
+
+ private static boolean checkScaleoffsetFilter()
+ {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
+ if (available == 0) {
+ System.out.println("Scale-Offset filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("Scale-Offset filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeData()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ double[][] dset_data = new double[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ double x = indx;
+ double y = jndx;
+ dset_data[indx][jndx] = (x + 1) / (y + 0.3) + y;
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read correctly.
+ double max = dset_data[0][0];
+ double min = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ if (min > dset_data[indx][jndx])
+ min = dset_data[indx][jndx];
+ }
+
+ // Print the maximum value.
+ DecimalFormat df = new DecimalFormat("#,##0.000000", new DecimalFormatSymbols(Locale.US));
+ System.out.println("Maximum value in write buffer is: " + df.format(max));
+ System.out.println("Minimum value in write buffer is: " + df.format(min));
+
+ // Create a new file using the default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the Scale-Offset
+ // filter and set the chunk size.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_scaleoffset(dcpl_id, HDF5Constants.H5Z_SO_FLOAT_DSCALE, 2);
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_IEEE_F64LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close and release resources.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close file
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readData()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ double[][] dset_data = new double[DIM_X][DIM_Y];
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+
+ filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read correctly.
+ double max = dset_data[0][0];
+ double min = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ if (min > dset_data[indx][jndx])
+ min = dset_data[indx][jndx];
+ }
+
+ // Print the maximum value.
+ DecimalFormat df = new DecimalFormat("#,##0.000000", new DecimalFormatSymbols(Locale.US));
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + df.format(max));
+ System.out.println("Minimum value in " + DATASETNAME + " is: " + df.format(min));
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+
+ // Check if Scale-Offset compression is available and can be used
+ // for both compression and decompression. Normally we do not
+ // perform error checking in these examples for the sake of
+ // clarity, but in this case we will make an exception because this
+ // filter is an optional part of the hdf5 library.
+ if (H5Ex_D_Sofloat.checkScaleoffsetFilter()) {
+ H5Ex_D_Sofloat.writeData();
+ H5Ex_D_Sofloat.readData();
+ }
+ }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_Soint.java b/HDF5Examples/JAVA/H5D/H5Ex_D_Soint.java
new file mode 100644
index 0000000..5691a8c
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_Soint.java
@@ -0,0 +1,335 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using the Scale-Offset filter. The program first checks
+ if the Scale-Offset filter is available, then if it is it
+ writes integers to a dataset using Scale-Offset, then
+ closes the file Next, it reopens the file, reads back the
+ data, and outputs the type of filter and the maximum value
+ in the dataset to the screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Soint {
+
+ private static String FILENAME = "H5Ex_D_Soint.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
+ H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
+ H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
+ H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
+ H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
+ H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
+ H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
+ H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
+ H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
+ H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static
+ {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5Z_filter get(int code) { return lookup.get(code); }
+ }
+
+ private static boolean checkScaleoffsetFilter()
+ {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
+ if (available == 0) {
+ System.out.println("Scale-Offset filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SCALEOFFSET);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("Scale-Offset filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeData()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using the default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the Scale-Offset
+ // filter and set the chunk size.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_scaleoffset(dcpl_id, HDF5Constants.H5Z_SO_INT,
+ HDF5Constants.H5Z_SO_INT_MINBITS_DEFAULT);
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close and release resources.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close file
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readData()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+
+ filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++) {
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+
+ // Check if Scale-Offset compression is available and can be used
+ // for both compression and decompression. Normally we do not
+ // perform error checking in these examples for the sake of
+ // clarity, but in this case we will make an exception because this
+ // filter is an optional part of the hdf5 library.
+ if (H5Ex_D_Soint.checkScaleoffsetFilter()) {
+ H5Ex_D_Soint.writeData();
+ H5Ex_D_Soint.readData();
+ }
+ }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_Szip.java b/HDF5Examples/JAVA/H5D/H5Ex_D_Szip.java
new file mode 100644
index 0000000..158df2c
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_Szip.java
@@ -0,0 +1,336 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using szip compression. The program first checks if
+ szip compression is available, then if it is it writes
+ integers to a dataset using szip, then closes the file.
+ Next, it reopens the file, reads back the data, and
+ outputs the type of compression and the maximum value in
+ the dataset to the screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Szip {
+ private static String FILENAME = "H5Ex_D_Szip.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 32;
+ private static final int DIM_Y = 64;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 8;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
+ H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
+ H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
+ H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
+ H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
+ H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
+ H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
+ H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
+ H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
+ H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static
+ {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5Z_filter get(int code) { return lookup.get(code); }
+ }
+
+ private static boolean checkSzipFilter()
+ {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_SZIP);
+ if (available == 0) {
+ System.out.println("szip filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_SZIP);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("szip filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeSzip()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the szip compression
+ // filter.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_szip(dcpl_id, HDF5Constants.H5_SZIP_NN_OPTION_MASK, 8);
+ // Set the chunk size.
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readSzip()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+
+ filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ case H5Z_FILTER_NBIT:
+ System.out.println("H5Z_FILTER_NBIT");
+ break;
+ case H5Z_FILTER_SCALEOFFSET:
+ System.out.println("H5Z_FILTER_SCALEOFFSET");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Find the maximum value in the dataset, to verify that it was read
+ // correctly.
+ int max = dset_data[0][0];
+ for (int indx = 0; indx < DIM_X; indx++) {
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ if (max < dset_data[indx][jndx])
+ max = dset_data[indx][jndx];
+ }
+ // Print the maximum value.
+ System.out.println("Maximum value in " + DATASETNAME + " is: " + max);
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ // Check if gzip compression is available and can be used for both
+ // compression and decompression. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ if (H5Ex_D_Szip.checkSzipFilter()) {
+ H5Ex_D_Szip.writeSzip();
+ H5Ex_D_Szip.readSzip();
+ }
+ }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_Transform.java b/HDF5Examples/JAVA/H5D/H5Ex_D_Transform.java
new file mode 100644
index 0000000..9ad7e79
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_Transform.java
@@ -0,0 +1,249 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write data to a dataset
+ using a data transform expression. The program first
+ writes integers to a dataset using the transform
+ expression TRANSFORM, then closes the file. Next, it
+ reopens the file, reads back the data without a transform,
+ and outputs the data to the screen. Finally it reads the
+ data using the transform expression RTRANSFORM and outputs
+ the results to the screen.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_Transform {
+
+ private static String FILE = "H5Ex_D_Transform.h5";
+ private static String DATASET = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static String TRANSFORM = "x+1";
+ private static String RTRANSFORM = "x-1";
+
+ private static void writeData()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dxpl_id = HDF5Constants.H5I_INVALID_HID;
+
+ long[] dims = {DIM_X, DIM_Y};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize data.
+ for (int i = 0; i < DIM_X; i++)
+ for (int j = 0; j < DIM_Y; j++)
+ dset_data[i][j] = i * j - j;
+
+ // Output the data to the screen.
+ System.out.println("Original Data:");
+ for (int i = 0; i < DIM_X; i++) {
+ System.out.print(" [");
+ for (int j = 0; j < DIM_Y; j++)
+ System.out.print(" " + dset_data[i][j] + " ");
+ System.out.println("]");
+ }
+
+ // Create a new file using the default properties.
+ try {
+ file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(2, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset transfer property list and define the transform expression.
+ try {
+ dxpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ if (dxpl_id >= 0)
+ H5.H5Pset_data_transform(dxpl_id, TRANSFORM);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset using the default properties. Unfortunately we must save as
+ // a native type or the transform operation will fail.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASET, HDF5Constants.H5T_NATIVE_INT, filespace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset using the dataset transfer property list.
+ try {
+ if ((dataset_id >= 0) && (dxpl_id >= 0))
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, dxpl_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dxpl_id >= 0)
+ H5.H5Pclose(dxpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readData()
+ {
+
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dxpl_id = HDF5Constants.H5I_INVALID_HID;
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Open an existing file using the default properties.
+ try {
+ file_id = H5.H5Fopen(FILE, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASET, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Data as written with transform '" + TRANSFORM + "'");
+ for (int i = 0; i < DIM_X; i++) {
+ System.out.print(" [");
+ for (int j = 0; j < DIM_Y; j++)
+ System.out.print(" " + dset_data[i][j] + " ");
+ System.out.println("]");
+ }
+
+ // Create the dataset transfer property list and define the transform expression.
+ try {
+ dxpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_XFER);
+ if (dxpl_id >= 0)
+ H5.H5Pset_data_transform(dxpl_id, RTRANSFORM);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read the data using the dataset transfer property list.
+ try {
+ if ((dataset_id >= 0) && (dxpl_id >= 0))
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, dxpl_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+
+ System.out.println("Data as written with transform '" + TRANSFORM + "' and read with transform '" +
+ RTRANSFORM + "'");
+ for (int i = 0; i < DIM_X; i++) {
+ System.out.print(" [");
+ for (int j = 0; j < DIM_Y; j++)
+ System.out.print(" " + dset_data[i][j] + " ");
+ System.out.println("]");
+ }
+
+ // Close and release resources.
+ try {
+ if (dxpl_id >= 0)
+ H5.H5Pclose(dxpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_D_Transform.writeData();
+ H5Ex_D_Transform.readData();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedAdd.java b/HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedAdd.java
new file mode 100644
index 0000000..13b9295
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedAdd.java
@@ -0,0 +1,392 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create and extend an unlimited
+ dataset. The program first writes integers to a dataset
+ with dataspace dimensions of DIM_XxDIM_Y, then closes the
+ file. Next, it reopens the file, reads back the data,
+ outputs it to the screen, extends the dataset, and writes
+ new data to the extended portions of the dataset. Finally
+ it reopens the file again, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_UnlimitedAdd {
+ private static String FILENAME = "H5Ex_D_UnlimitedAdd.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ private static void writeUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ long[] maxdims = {HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace with unlimited dimensions.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the chunk size.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the unlimited dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void extendUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] extdims = {EDIM_X, EDIM_Y};
+ long[] start = {0, 0};
+ long[] count = new long[2];
+ int[][] dset_data;
+ int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer. This is a
+ // two dimensional dataset so the dynamic allocation must be done
+ // in steps.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int)dims[0]][(int)dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset before extension:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Extend the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dset_extent(dataset_id, extdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataspace for the newly extended dataset.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Initialize data for writing to the extended dataset.
+ for (int indx = 0; indx < EDIM_X; indx++)
+ for (int jndx = 0; jndx < EDIM_Y; jndx++)
+ extend_dset_data[indx][jndx] = jndx;
+
+ // Select the entire dataspace.
+ try {
+ if (dataspace_id >= 0) {
+ H5.H5Sselect_all(dataspace_id);
+
+ // Subtract a hyperslab reflecting the original dimensions from the
+ // selection. The selection now contains only the newly extended
+ // portions of the dataset.
+ count[0] = dims[0];
+ count[1] = dims[1];
+ H5.H5Sselect_hyperslab(dataspace_id, HDF5Constants.H5S_SELECT_NOTB, start, null, count, null);
+
+ // Write the data to the selected portion of the dataset.
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for the read buffer as before.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int)dims[0]][(int)dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset after extension:");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < dims[1]; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_D_UnlimitedAdd.writeUnlimited();
+ H5Ex_D_UnlimitedAdd.extendUnlimited();
+ H5Ex_D_UnlimitedAdd.readUnlimited();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedGzip.java b/HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedGzip.java
new file mode 100644
index 0000000..d15bbf8
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedGzip.java
@@ -0,0 +1,504 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create and extend an unlimited
+ dataset with gzip compression. The program first writes
+ integers to a gzip compressed dataset with dataspace
+ dimensions of DIM_XxDIM_Y, then closes the file. Next, it
+ reopens the file, reads back the data, outputs it to the
+ screen, extends the dataset, and writes new data to the
+ extended portions of the dataset. Finally it reopens the
+ file again, reads back the data, and outputs it to the
+ screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_UnlimitedGzip {
+ private static String FILENAME = "H5Ex_D_UnlimitedGzip.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ // Values for the status of space allocation
+ enum H5Z_filter {
+ H5Z_FILTER_ERROR(HDF5Constants.H5Z_FILTER_ERROR),
+ H5Z_FILTER_NONE(HDF5Constants.H5Z_FILTER_NONE),
+ H5Z_FILTER_DEFLATE(HDF5Constants.H5Z_FILTER_DEFLATE),
+ H5Z_FILTER_SHUFFLE(HDF5Constants.H5Z_FILTER_SHUFFLE),
+ H5Z_FILTER_FLETCHER32(HDF5Constants.H5Z_FILTER_FLETCHER32),
+ H5Z_FILTER_SZIP(HDF5Constants.H5Z_FILTER_SZIP),
+ H5Z_FILTER_NBIT(HDF5Constants.H5Z_FILTER_NBIT),
+ H5Z_FILTER_SCALEOFFSET(HDF5Constants.H5Z_FILTER_SCALEOFFSET),
+ H5Z_FILTER_RESERVED(HDF5Constants.H5Z_FILTER_RESERVED),
+ H5Z_FILTER_MAX(HDF5Constants.H5Z_FILTER_MAX);
+ private static final Map<Integer, H5Z_filter> lookup = new HashMap<Integer, H5Z_filter>();
+
+ static
+ {
+ for (H5Z_filter s : EnumSet.allOf(H5Z_filter.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5Z_filter(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5Z_filter get(int code) { return lookup.get(code); }
+ }
+
+ private static boolean checkGzipFilter()
+ {
+ try {
+ int available = H5.H5Zfilter_avail(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (available == 0) {
+ System.out.println("gzip filter not available.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ int filter_info = H5.H5Zget_filter_info(HDF5Constants.H5Z_FILTER_DEFLATE);
+ if (((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_ENCODE_ENABLED) == 0) ||
+ ((filter_info & HDF5Constants.H5Z_FILTER_CONFIG_DECODE_ENABLED) == 0)) {
+ System.out.println("gzip filter not available for encoding and decoding.");
+ return false;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ return true;
+ }
+
+ private static void writeUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ long[] maxdims = {HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace with unlimited dimensions.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list, add the gzip compression
+ // filter.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ if (dcpl_id >= 0) {
+ H5.H5Pset_deflate(dcpl_id, 9);
+ // Set the chunk size.
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the unlimited dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void extendUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] extdims = {EDIM_X, EDIM_Y};
+ long[] start = {0, 0};
+ long[] count = new long[2];
+ int[][] dset_data;
+ int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer. This is a
+ // two dimensional dataset so the dynamic allocation must be done
+ // in steps.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int)dims[0]][(int)dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset before extension:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Extend the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dset_extent(dataset_id, extdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataspace for the newly extended dataset.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Initialize data for writing to the extended dataset.
+ for (int indx = 0; indx < EDIM_X; indx++)
+ for (int jndx = 0; jndx < EDIM_Y; jndx++)
+ extend_dset_data[indx][jndx] = jndx;
+
+ // Select the entire dataspace.
+ try {
+ if (dataspace_id >= 0) {
+ H5.H5Sselect_all(dataspace_id);
+
+ // Subtract a hyperslab reflecting the original dimensions from the
+ // selection. The selection now contains only the newly extended
+ // portions of the dataset.
+ count[0] = dims[0];
+ count[1] = dims[1];
+ H5.H5Sselect_hyperslab(dataspace_id, HDF5Constants.H5S_SELECT_NOTB, start, null, count, null);
+
+ // Write the data to the selected portion of the dataset.
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataset creation property list.
+ try {
+ if (dataset_id >= 0)
+ dcpl_id = H5.H5Dget_create_plist(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve and print the filter type. Here we only retrieve the
+ // first filter because we know that we only added one filter.
+ try {
+ if (dcpl_id >= 0) {
+ // Java lib requires a valid filter_name object and cd_values
+ int[] flags = {0};
+ long[] cd_nelmts = {1};
+ int[] cd_values = {0};
+ String[] filter_name = {""};
+ int[] filter_config = {0};
+ int filter_type = -1;
+ filter_type = H5.H5Pget_filter(dcpl_id, 0, flags, cd_nelmts, cd_values, 120, filter_name,
+ filter_config);
+ System.out.print("Filter type is: ");
+ switch (H5Z_filter.get(filter_type)) {
+ case H5Z_FILTER_DEFLATE:
+ System.out.println("H5Z_FILTER_DEFLATE");
+ break;
+ case H5Z_FILTER_SHUFFLE:
+ System.out.println("H5Z_FILTER_SHUFFLE");
+ break;
+ case H5Z_FILTER_FLETCHER32:
+ System.out.println("H5Z_FILTER_FLETCHER32");
+ break;
+ case H5Z_FILTER_SZIP:
+ System.out.println("H5Z_FILTER_SZIP");
+ break;
+ default:
+ System.out.println("H5Z_FILTER_ERROR");
+ }
+ System.out.println();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for the read buffer as before.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int)dims[0]][(int)dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset after extension:");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < dims[1]; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ // Check if gzip compression is available and can be used for both
+ // compression and decompression. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ if (H5Ex_D_UnlimitedGzip.checkGzipFilter()) {
+ H5Ex_D_UnlimitedGzip.writeUnlimited();
+ H5Ex_D_UnlimitedGzip.extendUnlimited();
+ H5Ex_D_UnlimitedGzip.readUnlimited();
+ }
+ }
+}
diff --git a/HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedMod.java b/HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedMod.java
new file mode 100644
index 0000000..3adc116
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/H5Ex_D_UnlimitedMod.java
@@ -0,0 +1,378 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create and extend an unlimited
+ dataset. The program first writes integers to a dataset
+ with dataspace dimensions of DIM_XxDIM_Y, then closes the
+ file. Next, it reopens the file, reads back the data,
+ outputs it to the screen, extends the dataset, and writes
+ new data to the entire extended dataset. Finally it
+ reopens the file again, reads back the data, and outputs it
+ to the screen.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_D_UnlimitedMod {
+ private static String FILENAME = "H5Ex_D_UnlimitedMod.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM_X = 4;
+ private static final int DIM_Y = 7;
+ private static final int EDIM_X = 6;
+ private static final int EDIM_Y = 10;
+ private static final int CHUNK_X = 4;
+ private static final int CHUNK_Y = 4;
+ private static final int RANK = 2;
+ private static final int NDIMS = 2;
+
+ private static void writeUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dcpl_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] chunk_dims = {CHUNK_X, CHUNK_Y};
+ long[] maxdims = {HDF5Constants.H5S_UNLIMITED, HDF5Constants.H5S_UNLIMITED};
+ int[][] dset_data = new int[DIM_X][DIM_Y];
+
+ // Initialize the dataset.
+ for (int indx = 0; indx < DIM_X; indx++)
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ dset_data[indx][jndx] = indx * jndx - jndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace with unlimited dimensions.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, maxdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset creation property list.
+ try {
+ dcpl_id = H5.H5Pcreate(HDF5Constants.H5P_DATASET_CREATE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set the chunk size.
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pset_chunk(dcpl_id, NDIMS, chunk_dims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the unlimited dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (dcpl_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, dcpl_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dcpl_id >= 0)
+ H5.H5Pclose(dcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void extendUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ long[] extdims = {EDIM_X, EDIM_Y};
+ int[][] dset_data;
+ int[][] extend_dset_data = new int[EDIM_X][EDIM_Y];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer. This is a
+ // two dimensional dataset so the dynamic allocation must be done
+ // in steps.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int)dims[0]][(int)dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset before extension:");
+ for (int indx = 0; indx < DIM_X; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < DIM_Y; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Extend the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dset_extent(dataset_id, extdims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Retrieve the dataspace for the newly extended dataset.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Initialize data for writing to the extended dataset.
+ for (int indx = 0; indx < EDIM_X; indx++)
+ for (int jndx = 0; jndx < EDIM_Y; jndx++)
+ extend_dset_data[indx][jndx] = jndx;
+
+ // Write the data tto the extended dataset.
+ try {
+ if ((dataspace_id >= 0) && (dataset_id >= 0))
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, extend_dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readUnlimited()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM_X, DIM_Y};
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for the read buffer as before.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Allocate array of pointers to rows.
+ dset_data = new int[(int)dims[0]][(int)dims[1]];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Dataset after extension:");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [ ");
+ for (int jndx = 0; jndx < dims[1]; jndx++)
+ System.out.print(dset_data[indx][jndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_D_UnlimitedMod.writeUnlimited();
+ H5Ex_D_UnlimitedMod.extendUnlimited();
+ H5Ex_D_UnlimitedMod.readUnlimited();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5D/JavaDatasetExample.sh.in b/HDF5Examples/JAVA/H5D/JavaDatasetExample.sh.in
new file mode 100644
index 0000000..c2699a3
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/JavaDatasetExample.sh.in
@@ -0,0 +1,493 @@
+#! /bin/sh
+#
+# Copyright by The HDF Group.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the COPYING file, which can be found at the root of the source code
+# distribution tree, or in https://www.hdfgroup.org/licenses.
+# If you do not have access to either file, you may request a copy from
+# help@hdfgroup.org.
+#
+
+top_builddir=@top_builddir@
+top_srcdir=@top_srcdir@
+srcdir=@srcdir@
+IS_DARWIN="@H5_IS_DARWIN@"
+
+USE_FILTER_SZIP="@USE_FILTER_SZIP@"
+USE_FILTER_DEFLATE="@USE_FILTER_DEFLATE@"
+
+TESTNAME=EX_Datasets
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+
+# Set up default variable values if not supplied by the user.
+RM='rm -rf'
+CMP='cmp'
+DIFF='diff -c'
+CP='cp'
+DIRNAME='dirname'
+BASENAME='basename'
+LS='ls'
+AWK='awk'
+
+nerrors=0
+
+# where the libs exist
+HDFLIB_HOME="$top_srcdir/java/lib"
+BLDDIR="."
+BLDLIBDIR="$BLDDIR/testlibs"
+HDFTEST_HOME="$top_srcdir/java/examples/datasets"
+JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar
+TESTJARFILE=jar@PACKAGE_TARNAME@datasets.jar
+test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR
+
+######################################################################
+# library files
+# --------------------------------------------------------------------
+# All the library files copy from source directory to test directory
+# NOTE: Keep this framework to add/remove test files.
+# This list are also used for checking exist.
+# Comment '#' without space can be used.
+# --------------------------------------------------------------------
+LIST_LIBRARY_FILES="
+$top_builddir/src/.libs/libhdf5.*
+$top_builddir/java/src/jni/.libs/libhdf5_java.*
+$top_builddir/java/src/$JARFILE
+"
+LIST_DATA_FILES="
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_Alloc.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_Checksum.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_Chunk.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_Compact.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_External.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_FillValue.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_Gzip.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_Hyperslab.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_ReadWrite.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_Shuffle.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_Szip.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_UnlimitedAdd.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_UnlimitedGzip.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_UnlimitedMod.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_Nbit.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_Transform.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_Sofloat.txt
+$HDFTEST_HOME/../tfiles/examples.datasets.H5Ex_D_Soint.txt
+"
+
+#
+# copy files from source dirs to test dir
+#
+COPY_LIBFILES="$LIST_LIBRARY_FILES"
+COPY_JARTESTFILES="$LIST_JAR_TESTFILES"
+
+COPY_LIBFILES_TO_BLDLIBDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_LIBFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -fR $tstfile $BLDLIBDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ BNAME=`$BASENAME $tstfile`
+ if [ "$BNAME" = "libhdf5_java.dylib" ]; then
+ COPIED_LIBHDF5_JAVA=1
+ fi
+ fi
+ fi
+ done
+ if [[ "$IS_DARWIN" = "yes" ]] && [[ $COPIED_LIBHDF5_JAVA -eq 1 ]]; then
+ (cd $BLDLIBDIR; \
+ install_name_tool -add_rpath @loader_path libhdf5_java.dylib; \
+ exist_path=` otool -l libhdf5_java.dylib | grep libhdf5 | grep -v java | awk '{print $2}'`; \
+ echo $exist_path; \
+ install_name_tool -change $exist_path @rpath/libhdf5.dylib libhdf5_java.dylib)
+ fi
+ # copy jar files. Used -f to make sure get a new copy
+ for tstfile in $COPY_JARTESTFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -fR $tstfile $BLDLIBDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_LIBFILES_AND_BLDLIBDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=$HDFLIB_HOME
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM -rf $BLDLIBDIR
+ fi
+}
+
+COPY_DATAFILES="$LIST_DATA_FILES"
+
+COPY_DATAFILES_TO_BLDDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_DATAFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_DATAFILES_AND_BLDDIR()
+{
+ $RM $BLDDIR/examples.datasets.H5Ex_D_*.txt
+ $RM $BLDDIR/H5Ex_D_*.out
+ $RM $BLDDIR/H5Ex_D_*.h5
+ $RM $BLDDIR/H5Ex_D_External.data
+}
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+#
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
+}
+
+# where Java is installed (requires jdk1.7.x)
+JAVAEXE=@JAVA@
+JAVAEXEFLAGS=@H5_JAVAFLAGS@
+
+###############################################################################
+# DO NOT MODIFY BELOW THIS LINE
+###############################################################################
+
+# prepare for test
+COPY_LIBFILES_TO_BLDLIBDIR
+COPY_DATAFILES_TO_BLDDIR
+
+CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$TESTJARFILE""
+
+TEST=/usr/bin/test
+if [ ! -x /usr/bin/test ]
+then
+TEST=`which test`
+fi
+
+if $TEST -z "$CLASSPATH"; then
+ CLASSPATH=""
+fi
+CLASSPATH=$CPATH":"$CLASSPATH
+export CLASSPATH
+
+if $TEST -n "$JAVAPATH" ; then
+ PATH=$JAVAPATH":"$PATH
+ export PATH
+fi
+
+if $TEST -e /bin/uname; then
+ os_name=`/bin/uname -s`
+elif $TEST -e /usr/bin/uname; then
+ os_name=`/usr/bin/uname -s`
+else
+ os_name=unknown
+fi
+
+if $TEST -z "$LD_LIBRARY_PATH" ; then
+ LD_LIBRARY_PATH=""
+fi
+
+case $os_name in
+ *)
+ LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH
+ ;;
+esac
+
+export LD_LIBRARY_PATH
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Alloc"
+TESTING examples.datasets.H5Ex_D_Alloc
+(
+$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Alloc > H5Ex_D_Alloc.out
+)
+if diff H5Ex_D_Alloc.out examples.datasets.H5Ex_D_Alloc.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Alloc"
+else
+ echo "**FAILED** datasets.H5Ex_D_Alloc"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Checksum"
+TESTING examples.datasets.H5Ex_D_Checksum
+(
+$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Checksum > H5Ex_D_Checksum.out
+)
+if diff H5Ex_D_Checksum.out examples.datasets.H5Ex_D_Checksum.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Checksum"
+else
+ echo "**FAILED** datasets.H5Ex_D_Checksum"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Chunk"
+TESTING examples.datasets.H5Ex_D_Chunk
+(
+$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Chunk > H5Ex_D_Chunk.out
+)
+if diff H5Ex_D_Chunk.out examples.datasets.H5Ex_D_Chunk.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Chunk"
+else
+ echo "**FAILED** datasets.H5Ex_D_Chunk"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Compact"
+TESTING examples.datasets.H5Ex_D_Compact
+(
+$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Compact > H5Ex_D_Compact.out
+)
+if diff H5Ex_D_Compact.out examples.datasets.H5Ex_D_Compact.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Compact"
+else
+ echo "**FAILED** datasets.H5Ex_D_Compact"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_External"
+TESTING examples.datasets.H5Ex_D_External
+(
+$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_External > H5Ex_D_External.out
+)
+if diff H5Ex_D_External.out examples.datasets.H5Ex_D_External.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_External"
+else
+ echo "**FAILED** datasets.H5Ex_D_External"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_FillValue"
+TESTING examples.datasets.H5Ex_D_FillValue
+(
+$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_FillValue > H5Ex_D_FillValue.out
+)
+if diff H5Ex_D_FillValue.out examples.datasets.H5Ex_D_FillValue.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_FillValue"
+else
+ echo "**FAILED** datasets.H5Ex_D_FillValue"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+if test $USE_FILTER_DEFLATE = "yes"; then
+ echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Gzip"
+ TESTING examples.datasets.H5Ex_D_Gzip
+ (
+ $RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Gzip > H5Ex_D_Gzip.out
+ )
+ if diff H5Ex_D_Gzip.out examples.datasets.H5Ex_D_Gzip.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Gzip"
+ else
+ echo "**FAILED** datasets.H5Ex_D_Gzip"
+ nerrors="`expr $nerrors + 1`"
+ fi
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Hyperslab"
+TESTING examples.datasets.H5Ex_D_Hyperslab
+(
+$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Hyperslab > H5Ex_D_Hyperslab.out
+)
+if diff H5Ex_D_Hyperslab.out examples.datasets.H5Ex_D_Hyperslab.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Hyperslab"
+else
+ echo "**FAILED** datasets.H5Ex_D_Hyperslab"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_ReadWrite"
+TESTING examples.datasets.H5Ex_D_ReadWrite
+(
+$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_ReadWrite > H5Ex_D_ReadWrite.out
+)
+if diff H5Ex_D_ReadWrite.out examples.datasets.H5Ex_D_ReadWrite.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_ReadWrite"
+else
+ echo "**FAILED** datasets.H5Ex_D_ReadWrite"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+if test $USE_FILTER_DEFLATE = "yes"; then
+ echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Shuffle"
+ TESTING examples.datasets.H5Ex_D_Shuffle
+ (
+ $RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Shuffle > H5Ex_D_Shuffle.out
+ )
+ if diff H5Ex_D_Shuffle.out examples.datasets.H5Ex_D_Shuffle.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Shuffle"
+ else
+ echo "**FAILED** datasets.H5Ex_D_Shuffle"
+ nerrors="`expr $nerrors + 1`"
+ fi
+fi
+
+if test $USE_FILTER_SZIP = "yes"; then
+ echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Szip"
+ TESTING examples.datasets.H5Ex_D_Szip
+ (
+ $RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Szip > H5Ex_D_Szip.out
+ )
+ if diff H5Ex_D_Szip.out examples.datasets.H5Ex_D_Szip.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Szip"
+ else
+ echo "**FAILED** datasets.H5Ex_D_Szip"
+ nerrors="`expr $nerrors + 1`"
+ fi
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedAdd"
+TESTING examples.datasets.H5Ex_D_UnlimitedAdd
+(
+$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedAdd > H5Ex_D_UnlimitedAdd.out
+)
+if diff H5Ex_D_UnlimitedAdd.out examples.datasets.H5Ex_D_UnlimitedAdd.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_UnlimitedAdd"
+else
+ echo "**FAILED** datasets.H5Ex_D_UnlimitedAdd"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedGzip"
+TESTING examples.datasets.H5Ex_D_UnlimitedGzip
+(
+$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedGzip > H5Ex_D_UnlimitedGzip.out
+)
+if diff H5Ex_D_External.out examples.datasets.H5Ex_D_External.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_UnlimitedGzip"
+else
+ echo "**FAILED** datasets.H5Ex_D_UnlimitedGzip"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedMod"
+TESTING examples.datasets.H5Ex_D_UnlimitedMod
+(
+$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_UnlimitedMod > H5Ex_D_UnlimitedMod.out
+)
+if diff H5Ex_D_UnlimitedMod.out examples.datasets.H5Ex_D_UnlimitedMod.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_UnlimitedMod"
+else
+ echo "**FAILED** datasets.H5Ex_D_UnlimitedMod"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Nbit"
+TESTING examples.datasets.H5Ex_D_Nbit
+(
+$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Nbit > H5Ex_D_Nbit.out
+)
+if diff H5Ex_D_Nbit.out examples.datasets.H5Ex_D_Nbit.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Nbit"
+else
+ echo "**FAILED** datasets.H5Ex_D_Nbit"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Transform"
+TESTING examples.datasets.H5Ex_D_Transform
+(
+$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Transform > H5Ex_D_Transform.out
+)
+if diff H5Ex_D_Transform.out examples.datasets.H5Ex_D_Transform.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Transform"
+else
+ echo "**FAILED** datasets.H5Ex_D_Transform"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Sofloat"
+TESTING examples.datasets.H5Ex_D_Sofloat
+(
+$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Sofloat > H5Ex_D_Sofloat.out
+)
+if diff H5Ex_D_Sofloat.out examples.datasets.H5Ex_D_Sofloat.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Sofloat"
+else
+ echo "**FAILED** datasets.H5Ex_D_Sofloat"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Soint"
+TESTING examples.datasets.H5Ex_D_Soint
+(
+$RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datasets.H5Ex_D_Soint > H5Ex_D_Soint.out
+)
+if diff H5Ex_D_Soint.out examples.datasets.H5Ex_D_Soint.txt > /dev/null; then
+ echo " PASSED datasets.H5Ex_D_Soint"
+else
+ echo "**FAILED** datasets.H5Ex_D_Soint"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+# Clean up temporary files/directories
+CLEAN_LIBFILES_AND_BLDLIBDIR
+CLEAN_DATAFILES_AND_BLDDIR
+
+# Report test results and exit
+if test $nerrors -eq 0 ; then
+ echo "All $TESTNAME tests passed."
+ exit $EXIT_SUCCESS
+else
+ echo "$TESTNAME tests failed with $nerrors errors."
+ exit $EXIT_FAILURE
+fi
diff --git a/HDF5Examples/JAVA/H5D/Java_sourcefiles.cmake b/HDF5Examples/JAVA/H5D/Java_sourcefiles.cmake
new file mode 100644
index 0000000..2977d85
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/Java_sourcefiles.cmake
@@ -0,0 +1,38 @@
+#-----------------------------------------------------------------------------
+# Define Sources, one file per application
+#-----------------------------------------------------------------------------
+set (HDF_JAVA_EXAMPLES
+ H5Ex_D_Alloc.java
+ H5Ex_D_Checksum.java
+ H5Ex_D_Chunk.java
+ H5Ex_D_Compact.java
+ H5Ex_D_External.java
+ H5Ex_D_FillValue.java
+ H5Ex_D_Hyperslab.java
+ H5Ex_D_ReadWrite.java
+ H5Ex_D_UnlimitedAdd.java
+ H5Ex_D_UnlimitedMod.java
+ H5Ex_D_Nbit.java
+ H5Ex_D_Transform.java
+ H5Ex_D_Sofloat.java
+ H5Ex_D_Soint.java
+)
+
+set (HDF_JAVA_ZLIB_EXAMPLES
+ H5Ex_D_Gzip.java
+ H5Ex_D_Shuffle.java
+ H5Ex_D_UnlimitedGzip.java
+)
+
+set (HDF_JAVA_SZIP_EXAMPLES
+ H5Ex_D_Szip.java
+)
+
+# detect whether the encoder is present.
+ if (${HDF5_ENABLE_Z_LIB_SUPPORT})
+ set (HDF_JAVA_EXAMPLES ${HDF_JAVA_EXAMPLES} ${HDF_JAVA_ZLIB_EXAMPLES})
+ endif ()
+
+ if (${HDF5_ENABLE_SZIP_SUPPORT})
+ set (HDF_JAVA_EXAMPLES ${HDF_JAVA_EXAMPLES} ${HDF_JAVA_SZIP_EXAMPLES})
+ endif ()
diff --git a/HDF5Examples/JAVA/H5D/Makefile.am b/HDF5Examples/JAVA/H5D/Makefile.am
new file mode 100644
index 0000000..abcf64b
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/Makefile.am
@@ -0,0 +1,75 @@
+#
+# Copyright by The HDF Group.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the COPYING file, which can be found at the root of the source code
+# distribution tree, or in https://www.hdfgroup.org/licenses.
+# If you do not have access to either file, you may request a copy from
+# help@hdfgroup.org.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+##
+#
+# HDF5 Java Library Examples Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+
+JAVAROOT = .classes
+
+classes:
+ test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+pkgpath = examples/datasets
+hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$$CLASSPATH
+
+jarfile = jar$(PACKAGE_TARNAME)datasets.jar
+
+AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation
+
+TESTPACKAGE =
+
+noinst_JAVA = \
+ H5Ex_D_Alloc.java \
+ H5Ex_D_Checksum.java \
+ H5Ex_D_Chunk.java \
+ H5Ex_D_Compact.java \
+ H5Ex_D_External.java \
+ H5Ex_D_FillValue.java \
+ H5Ex_D_Gzip.java \
+ H5Ex_D_Hyperslab.java \
+ H5Ex_D_ReadWrite.java \
+ H5Ex_D_Shuffle.java \
+ H5Ex_D_Szip.java \
+ H5Ex_D_UnlimitedAdd.java \
+ H5Ex_D_UnlimitedGzip.java \
+ H5Ex_D_UnlimitedMod.java \
+ H5Ex_D_Nbit.java \
+ H5Ex_D_Transform.java \
+ H5Ex_D_Sofloat.java \
+ H5Ex_D_Soint.java
+
+$(jarfile): classnoinst.stamp classes
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+noinst_DATA = $(jarfile)
+
+.PHONY: classes
+
+check_SCRIPTS = JavaDatasetExample.sh
+TEST_SCRIPT = $(check_SCRIPTS)
+
+CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class JavaDatasetExample.sh
+
+clean:
+ rm -rf $(JAVAROOT)/*
+ rm -f $(jarfile)
+ rm -f classnoinst.stamp
+
+include $(top_srcdir)/config/conclude.am
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Alloc.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Alloc.txt
new file mode 100644
index 0000000..6fd810b
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Alloc.txt
@@ -0,0 +1,16 @@
+Creating datasets...
+DS1 has allocation time H5D_ALLOC_TIME_LATE
+DS2 has allocation time H5D_ALLOC_TIME_EARLY
+
+Space for DS1 has not been allocated.
+Storage size for DS1 is: 0 bytes.
+Space for DS2 has been allocated.
+Storage size for DS2 is: 112 bytes.
+
+Writing data...
+
+Space for DS1 has been allocated.
+Storage size for DS1 is: 112 bytes.
+Space for DS2 has been allocated.
+Storage size for DS2 is: 112 bytes.
+
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Checksum.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Checksum.txt
new file mode 100644
index 0000000..676aebb
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Checksum.txt
@@ -0,0 +1,3 @@
+Filter type is: H5Z_FILTER_FLETCHER32
+
+Maximum value in DS1 is: 1890
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Chunk.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Chunk.txt
new file mode 100644
index 0000000..5f4c2de
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Chunk.txt
@@ -0,0 +1,26 @@
+Original Data:
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+
+Storage layout for DS1 is: H5D_CHUNKED
+
+Data as written to disk by hyberslabs:
+ [ 0 1 0 0 1 0 0 1 ]
+ [ 1 1 0 1 1 0 1 1 ]
+ [ 0 0 0 0 0 0 0 0 ]
+ [ 0 1 0 0 1 0 0 1 ]
+ [ 1 1 0 1 1 0 1 1 ]
+ [ 0 0 0 0 0 0 0 0 ]
+
+Data as read from disk by hyberslab:
+ [ 0 1 0 0 0 0 0 1 ]
+ [ 0 1 0 1 0 0 1 1 ]
+ [ 0 0 0 0 0 0 0 0 ]
+ [ 0 0 0 0 0 0 0 0 ]
+ [ 0 1 0 1 0 0 1 1 ]
+ [ 0 0 0 0 0 0 0 0 ]
+
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Compact.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Compact.txt
new file mode 100644
index 0000000..e34f3c1
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Compact.txt
@@ -0,0 +1,8 @@
+Storage layout for DS1 is: H5D_COMPACT
+
+Data for DS1 is:
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
+
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_External.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_External.txt
new file mode 100644
index 0000000..5878149
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_External.txt
@@ -0,0 +1,7 @@
+DS1 is stored in file: H5Ex_D_External.data
+DS1:
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
+
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_FillValue.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_FillValue.txt
new file mode 100644
index 0000000..68d826b
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_FillValue.txt
@@ -0,0 +1,20 @@
+Dataset before being written to:
+ [ 99 99 99 99 99 99 99 ]
+ [ 99 99 99 99 99 99 99 ]
+ [ 99 99 99 99 99 99 99 ]
+ [ 99 99 99 99 99 99 99 ]
+
+Dataset after being written to:
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
+
+Dataset after extension:
+ [ 0 -1 -2 -3 -4 -5 -6 99 99 99 ]
+ [ 0 0 0 0 0 0 0 99 99 99 ]
+ [ 0 1 2 3 4 5 6 99 99 99 ]
+ [ 0 2 4 6 8 10 12 99 99 99 ]
+ [ 99 99 99 99 99 99 99 99 99 99 ]
+ [ 99 99 99 99 99 99 99 99 99 99 ]
+
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Gzip.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Gzip.txt
new file mode 100644
index 0000000..255a561
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Gzip.txt
@@ -0,0 +1,3 @@
+Filter type is: H5Z_FILTER_DEFLATE
+
+Maximum value in DS1 is: 1890
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Hyperslab.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Hyperslab.txt
new file mode 100644
index 0000000..823dfcc
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Hyperslab.txt
@@ -0,0 +1,24 @@
+Original Data:
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+ [ 1 1 1 1 1 1 1 1 ]
+
+Data as written to disk by hyberslabs:
+ [ 0 1 0 0 1 0 0 1 ]
+ [ 1 1 0 1 1 0 1 1 ]
+ [ 0 0 0 0 0 0 0 0 ]
+ [ 0 1 0 0 1 0 0 1 ]
+ [ 1 1 0 1 1 0 1 1 ]
+ [ 0 0 0 0 0 0 0 0 ]
+
+Data as read from disk by hyberslab:
+ [ 0 1 0 0 0 0 0 1 ]
+ [ 0 1 0 1 0 0 1 1 ]
+ [ 0 0 0 0 0 0 0 0 ]
+ [ 0 0 0 0 0 0 0 0 ]
+ [ 0 1 0 1 0 0 1 1 ]
+ [ 0 0 0 0 0 0 0 0 ]
+
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Nbit.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Nbit.txt
new file mode 100644
index 0000000..a768ba0
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Nbit.txt
@@ -0,0 +1,3 @@
+Filter type is: H5Z_FILTER_NBIT
+
+Maximum value in DS1 is: 1890
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_ReadWrite.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_ReadWrite.txt
new file mode 100644
index 0000000..e021029
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_ReadWrite.txt
@@ -0,0 +1,6 @@
+DS1:
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
+
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Shuffle.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Shuffle.txt
new file mode 100644
index 0000000..ea95f11
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Shuffle.txt
@@ -0,0 +1,5 @@
+Filter 0: Type is: H5Z_FILTER_SHUFFLE
+
+Filter 1: Type is: H5Z_FILTER_DEFLATE
+
+Maximum value in DS1 is: 1890
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Sofloat.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Sofloat.txt
new file mode 100644
index 0000000..9025ce2
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Sofloat.txt
@@ -0,0 +1,6 @@
+Maximum value in write buffer is: 106.666667
+Minimum value in write buffer is: 1.769231
+Filter type is: H5Z_FILTER_SCALEOFFSET
+
+Maximum value in DS1 is: 106.661698
+Minimum value in DS1 is: 1.769231
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Soint.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Soint.txt
new file mode 100644
index 0000000..48d0d8c
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Soint.txt
@@ -0,0 +1,3 @@
+Filter type is: H5Z_FILTER_SCALEOFFSET
+
+Maximum value in DS1 is: 1890
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Szip.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Szip.txt
new file mode 100644
index 0000000..a1c0d19
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Szip.txt
@@ -0,0 +1,3 @@
+Filter type is: H5Z_FILTER_SZIP
+
+Maximum value in DS1 is: 1890
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Transform.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Transform.txt
new file mode 100644
index 0000000..05257bc
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_Transform.txt
@@ -0,0 +1,15 @@
+Original Data:
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
+Data as written with transform 'x+1'
+ [ 1 0 -1 -2 -3 -4 -5 ]
+ [ 1 1 1 1 1 1 1 ]
+ [ 1 2 3 4 5 6 7 ]
+ [ 1 3 5 7 9 11 13 ]
+Data as written with transform 'x+1' and read with transform 'x-1'
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_UnlimitedAdd.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_UnlimitedAdd.txt
new file mode 100644
index 0000000..d3a7281
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_UnlimitedAdd.txt
@@ -0,0 +1,14 @@
+Dataset before extension:
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
+
+Dataset after extension:
+ [ 0 -1 -2 -3 -4 -5 -6 7 8 9 ]
+ [ 0 0 0 0 0 0 0 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 2 4 6 8 10 12 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_UnlimitedGzip.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_UnlimitedGzip.txt
new file mode 100644
index 0000000..9e36281
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_UnlimitedGzip.txt
@@ -0,0 +1,16 @@
+Dataset before extension:
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
+
+Filter type is: H5Z_FILTER_DEFLATE
+
+Dataset after extension:
+ [ 0 -1 -2 -3 -4 -5 -6 7 8 9 ]
+ [ 0 0 0 0 0 0 0 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 2 4 6 8 10 12 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+
diff --git a/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_UnlimitedMod.txt b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_UnlimitedMod.txt
new file mode 100644
index 0000000..15eee16
--- /dev/null
+++ b/HDF5Examples/JAVA/H5D/tfiles/110/H5Ex_D_UnlimitedMod.txt
@@ -0,0 +1,14 @@
+Dataset before extension:
+ [ 0 -1 -2 -3 -4 -5 -6 ]
+ [ 0 0 0 0 0 0 0 ]
+ [ 0 1 2 3 4 5 6 ]
+ [ 0 2 4 6 8 10 12 ]
+
+Dataset after extension:
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+ [ 0 1 2 3 4 5 6 7 8 9 ]
+
diff --git a/HDF5Examples/JAVA/H5G/110/H5Ex_G_Intermediate.java b/HDF5Examples/JAVA/H5G/110/H5Ex_G_Intermediate.java
new file mode 100644
index 0000000..cf6d1b4
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/110/H5Ex_G_Intermediate.java
@@ -0,0 +1,125 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create intermediate groups with
+ a single call to H5Gcreate.
+ ************************************************************/
+
+import java.util.ArrayList;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.callbacks.H5O_iterate_cb;
+import hdf.hdf5lib.callbacks.H5O_iterate_t;
+import hdf.hdf5lib.structs.H5O_info_t;
+
+public class H5Ex_G_Intermediate {
+
+ private static String FILE = "H5Ex_G_Intermediate.h5";
+
+ private void CreateGroup() throws Exception
+ {
+
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long gcpl_id = HDF5Constants.H5I_INVALID_HID;
+
+ try {
+ // Create a new file_id using the default properties.
+ file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+
+ // Create group_id creation property list and set it to allow creation of intermediate group_ids.
+ gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_CREATE);
+ H5.H5Pset_create_intermediate_group(gcpl_id, true);
+
+ /*
+ * Create the group_id /G1/G2/G3. Note that /G1 and /G1/G2 do not exist yet. This call would cause
+ * an error if we did not use the previously created property list.
+ */
+ group_id = H5.H5Gcreate(file_id, "/G1/G2/G3", gcpl_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ // Print all the objects in the file_ids to show that intermediate group_ids have been created.
+ System.out.println("Objects in the file_id:");
+
+ // H5O_iterate_t iter_data = null;
+ H5O_iterate_t iter_data = new H5O_iter_data();
+ H5O_iterate_cb iter_cb = new H5O_iter_callback();
+
+ H5.H5Ovisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb,
+ iter_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ finally {
+ // Close and release resources.
+ if (gcpl_id >= 0)
+ H5.H5Pclose(gcpl_id);
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ try {
+ (new H5Ex_G_Intermediate()).CreateGroup();
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ }
+
+ private class idata {
+ public String link_name = null;
+ public int link_type = -1;
+
+ idata(String name, int type)
+ {
+ this.link_name = name;
+ this.link_type = type;
+ }
+ }
+
+ private class H5O_iter_data implements H5O_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+
+ private class H5O_iter_callback implements H5O_iterate_cb {
+ public int callback(long group, String name, H5O_info_t info, H5O_iterate_t op_data)
+ {
+ idata id = new idata(name, info.type);
+ ((H5O_iter_data)op_data).iterdata.add(id);
+
+ System.out.print("/"); /* Print root group in object path */
+
+ // Check if the current object is the root group, and if not print the full path name and type.
+
+ if (name.charAt(0) == '.') /* Root group, do not print '.' */
+ System.out.println(" (Group)");
+ else if (info.type == HDF5Constants.H5O_TYPE_GROUP)
+ System.out.println(name + " (Group)");
+ else if (info.type == HDF5Constants.H5O_TYPE_DATASET)
+ System.out.println(name + " (Dataset)");
+ else if (info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE)
+ System.out.println(name + " (Datatype)");
+ else
+ System.out.println(name + " (Unknown)");
+
+ return 0;
+ }
+ }
+}
diff --git a/HDF5Examples/JAVA/H5G/110/H5Ex_G_Iterate.java b/HDF5Examples/JAVA/H5G/110/H5Ex_G_Iterate.java
new file mode 100644
index 0000000..9359483
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/110/H5Ex_G_Iterate.java
@@ -0,0 +1,109 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to iterate over group members using
+ H5Gget_obj_info_all.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_G_Iterate {
+ private static String FILENAME = "groups/h5ex_g_iterate.h5";
+ private static String DATASETNAME = "/";
+
+ enum H5O_type {
+ H5O_TYPE_UNKNOWN(-1), // Unknown object type
+ H5O_TYPE_GROUP(0), // Object is a group
+ H5O_TYPE_DATASET(1), // Object is a dataset
+ H5O_TYPE_NAMED_DATATYPE(2), // Object is a named data type
+ H5O_TYPE_NTYPES(3); // Number of different object types
+ private static final Map<Integer, H5O_type> lookup = new HashMap<Integer, H5O_type>();
+
+ static
+ {
+ for (H5O_type s : EnumSet.allOf(H5O_type.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5O_type(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5O_type get(int code) { return lookup.get(code); }
+ }
+
+ private static void do_iterate()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+
+ // Open a file using default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Begin iteration.
+ System.out.println("Objects in root group:");
+ try {
+ if (file_id >= 0) {
+ int count = (int)H5.H5Gn_members(file_id, DATASETNAME);
+ String[] oname = new String[count];
+ int[] otype = new int[count];
+ int[] ltype = new int[count];
+ long[] orefs = new long[count];
+ H5.H5Gget_obj_info_all(file_id, DATASETNAME, oname, otype, ltype, orefs,
+ HDF5Constants.H5_INDEX_NAME);
+
+ // Get type of the object and display its name and type.
+ for (int indx = 0; indx < otype.length; indx++) {
+ switch (H5O_type.get(otype[indx])) {
+ case H5O_TYPE_GROUP:
+ System.out.println(" Group: " + oname[indx]);
+ break;
+ case H5O_TYPE_DATASET:
+ System.out.println(" Dataset: " + oname[indx]);
+ break;
+ case H5O_TYPE_NAMED_DATATYPE:
+ System.out.println(" Datatype: " + oname[indx]);
+ break;
+ default:
+ System.out.println(" Unknown: " + oname[indx]);
+ }
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { H5Ex_G_Iterate.do_iterate(); }
+}
diff --git a/HDF5Examples/JAVA/H5G/110/H5Ex_G_Traverse.java b/HDF5Examples/JAVA/H5G/110/H5Ex_G_Traverse.java
new file mode 100644
index 0000000..0ef0e39
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/110/H5Ex_G_Traverse.java
@@ -0,0 +1,166 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+This example shows a way to recursively traverse the file
+using H5Literate. The method shown here guarantees that
+the recursion will not enter an infinite loop, but does
+not prevent objects from being visited more than once.
+The program prints the directory structure of the file
+specified in FILE. The default file used by this example
+implements the structure described in the User Guide,
+chapter 4, figure 26.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.callbacks.H5L_iterate_cb;
+import hdf.hdf5lib.callbacks.H5L_iterate_t;
+import hdf.hdf5lib.structs.H5L_info_t;
+import hdf.hdf5lib.structs.H5O_info_t;
+
+import examples.groups.H5Ex_G_Iterate.H5O_type;
+
+class opdata implements H5L_iterate_t {
+ int recurs;
+ opdata prev;
+ long addr;
+}
+
+public class H5Ex_G_Traverse {
+
+ private static String FILE = "h5ex_g_traverse.h5";
+ public static H5L_iterate_cb iter_cb = new H5L_iter_callbackT();
+
+ private static void OpenGroup()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ H5O_info_t infobuf;
+ opdata od = new opdata();
+
+ // Open file and initialize the operator data structure.
+ try {
+ file_id = H5.H5Fopen(FILE, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ if (file_id >= 0) {
+ infobuf = H5.H5Oget_info(file_id);
+ od.recurs = 0;
+ od.prev = null;
+ od.addr = infobuf.addr;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Print the root group and formatting, begin iteration.
+ try {
+ System.out.println("/ {");
+ // H5L_iterate_cb iter_cb = new H5L_iter_callbackT();
+ H5.H5Literate(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, 0L, iter_cb,
+ od);
+ System.out.println("}");
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close and release resources.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { H5Ex_G_Traverse.OpenGroup(); }
+}
+
+class H5L_iter_callbackT implements H5L_iterate_cb {
+ public int callback(long group, String name, H5L_info_t info, H5L_iterate_t op_data)
+ {
+
+ H5O_info_t infobuf;
+ int return_val = 0;
+ opdata od = (opdata)op_data; // Type conversion
+ int spaces = 2 * (od.recurs + 1); // Number of white spaces to prepend to output.
+
+ // Get type of the object and display its name and type.
+ // The name of the object is passed to this function by the Library.
+ try {
+ infobuf = H5.H5Oget_info_by_name(group, name, HDF5Constants.H5P_DEFAULT);
+
+ for (int i = 0; i < spaces; i++)
+ System.out.print(" "); // Format output.
+ switch (H5O_type.get(infobuf.type)) {
+ case H5O_TYPE_GROUP:
+ System.out.println("Group: " + name + " { ");
+ // Check group address against linked list of operator
+ // data structures. We will always run the check, as the
+ // reference count cannot be relied upon if there are
+ // symbolic links, and H5Oget_info_by_name always follows
+ // symbolic links. Alternatively we could use H5Lget_info
+ // and never recurse on groups discovered by symbolic
+ // links, however it could still fail if an object's
+ // reference count was manually manipulated with
+ // H5Odecr_refcount.
+ if (group_check(od, infobuf.addr)) {
+ for (int i = 0; i < spaces; i++)
+ System.out.print(" ");
+ System.out.println(" Warning: Loop detected!");
+ }
+ else {
+ // Initialize new object of type opdata and begin
+ // recursive iteration on the discovered
+ // group. The new opdata is given a pointer to the
+ // current one.
+ opdata nextod = new opdata();
+ nextod.recurs = od.recurs + 1;
+ nextod.prev = od;
+ nextod.addr = infobuf.addr;
+ H5L_iterate_cb iter_cb2 = new H5L_iter_callbackT();
+ return_val = H5.H5Literate_by_name(group, name, HDF5Constants.H5_INDEX_NAME,
+ HDF5Constants.H5_ITER_NATIVE, 0L, iter_cb2, nextod,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ for (int i = 0; i < spaces; i++)
+ System.out.print(" ");
+ System.out.println("}");
+ break;
+ case H5O_TYPE_DATASET:
+ System.out.println("Dataset: " + name);
+ break;
+ case H5O_TYPE_NAMED_DATATYPE:
+ System.out.println("Datatype: " + name);
+ break;
+ default:
+ System.out.println("Unknown: " + name);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ return return_val;
+ }
+
+ public boolean group_check(opdata od, long target_addr)
+ {
+ if (od.addr == target_addr)
+ return true; // Addresses match
+ else if (od.recurs == 0)
+ return false; // Root group reached with no matches
+ else
+ return group_check(od.prev, target_addr); // Recursively examine the next node
+ }
+}
diff --git a/HDF5Examples/JAVA/H5G/110/H5Ex_G_Visit.java b/HDF5Examples/JAVA/H5G/110/H5Ex_G_Visit.java
new file mode 100644
index 0000000..8e81ff3
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/110/H5Ex_G_Visit.java
@@ -0,0 +1,153 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to recursively traverse a file
+ using H5Ovisit and H5Lvisit. The program prints all of
+ the objects in the file specified in FILE, then prints all
+ of the links in that file. The default file used by this
+ example implements the structure described in the User
+ Guide, chapter 4, figure 26.
+ ************************************************************/
+
+import java.util.ArrayList;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.callbacks.H5L_iterate_cb;
+import hdf.hdf5lib.callbacks.H5L_iterate_t;
+import hdf.hdf5lib.callbacks.H5O_iterate_cb;
+import hdf.hdf5lib.callbacks.H5O_iterate_t;
+import hdf.hdf5lib.structs.H5L_info_t;
+import hdf.hdf5lib.structs.H5O_info_t;
+
+public class H5Ex_G_Visit {
+
+ private static String FILE = "groups/h5ex_g_visit.h5";
+
+ public static void main(String[] args)
+ {
+ try {
+ (new H5Ex_G_Visit()).VisitGroup();
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ }
+
+ private void VisitGroup() throws Exception
+ {
+
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+
+ try {
+ // Open file
+ file_id = H5.H5Fopen(FILE, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+
+ // Begin iteration using H5Ovisit
+ System.out.println("Objects in the file:");
+ H5O_iterate_t iter_data = new H5O_iter_data();
+ H5O_iterate_cb iter_cb = new H5O_iter_callback();
+ H5.H5Ovisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb,
+ iter_data);
+ System.out.println();
+ // Repeat the same process using H5Lvisit
+ H5L_iterate_t iter_data2 = new H5L_iter_data();
+ H5L_iterate_cb iter_cb2 = new H5L_iter_callback();
+ System.out.println("Links in the file:");
+ H5.H5Lvisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb2,
+ iter_data2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ finally {
+ // Close and release resources.
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ }
+
+ /************************************************************
+ * Operator function for H5Lvisit. This function simply retrieves the info for the object the current link
+ *points to, and calls the operator function for H5Ovisit.
+ ************************************************************/
+
+ private class idata {
+ public String link_name = null;
+ public int link_type = -1;
+
+ idata(String name, int type)
+ {
+ this.link_name = name;
+ this.link_type = type;
+ }
+ }
+
+ private class H5L_iter_data implements H5L_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+
+ private class H5L_iter_callback implements H5L_iterate_cb {
+ public int callback(long group, String name, H5L_info_t info, H5L_iterate_t op_data)
+ {
+
+ idata id = new idata(name, info.type);
+ ((H5L_iter_data)op_data).iterdata.add(id);
+
+ H5O_info_t infobuf;
+ int ret = 0;
+ try {
+ // Get type of the object and display its name and type. The name of the object is passed to
+ // this function by the Library.
+ infobuf = H5.H5Oget_info_by_name(group, name, HDF5Constants.H5P_DEFAULT);
+ H5O_iterate_cb iter_cbO = new H5O_iter_callback();
+ H5O_iterate_t iter_dataO = new H5O_iter_data();
+ ret = iter_cbO.callback(group, name, infobuf, iter_dataO);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ return ret;
+ }
+ }
+
+ private class H5O_iter_data implements H5O_iterate_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+
+ private class H5O_iter_callback implements H5O_iterate_cb {
+ public int callback(long group, String name, H5O_info_t info, H5O_iterate_t op_data)
+ {
+ idata id = new idata(name, info.type);
+ ((H5O_iter_data)op_data).iterdata.add(id);
+
+ System.out.print("/"); /* Print root group in object path */
+
+ // Check if the current object is the root group, and if not print the full path name and type.
+
+ if (name.charAt(0) == '.') /* Root group, do not print '.' */
+ System.out.println(" (Group)");
+ else if (info.type == HDF5Constants.H5O_TYPE_GROUP)
+ System.out.println(name + " (Group)");
+ else if (info.type == HDF5Constants.H5O_TYPE_DATASET)
+ System.out.println(name + " (Dataset)");
+ else if (info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE)
+ System.out.println(name + " (Datatype)");
+ else
+ System.out.println(name + " (Unknown)");
+
+ return 0;
+ }
+ }
+}
diff --git a/HDF5Examples/JAVA/H5G/110/h5ex_g_iterate.h5 b/HDF5Examples/JAVA/H5G/110/h5ex_g_iterate.h5
new file mode 100644
index 0000000..e462703
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/110/h5ex_g_iterate.h5
Binary files differ
diff --git a/HDF5Examples/JAVA/H5G/110/h5ex_g_visit.h5 b/HDF5Examples/JAVA/H5G/110/h5ex_g_visit.h5
new file mode 100644
index 0000000..d8267b1
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/110/h5ex_g_visit.h5
Binary files differ
diff --git a/HDF5Examples/JAVA/H5G/CMakeLists.txt b/HDF5Examples/JAVA/H5G/CMakeLists.txt
new file mode 100644
index 0000000..5f47d59
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/CMakeLists.txt
@@ -0,0 +1,92 @@
+cmake_minimum_required (VERSION 3.18)
+project (HDF5Examples_JAVA_GROUPS Java)
+
+set (CMAKE_VERBOSE_MAKEFILE 1)
+
+INCLUDE_DIRECTORIES (
+ ${HDFJAVA_LIB_DIR}
+ ${JAVA_INCLUDE_PATH}
+ ${JAVA_INCLUDE_PATH2}
+)
+
+#-----------------------------------------------------------------------------
+# Define Sources
+#-----------------------------------------------------------------------------
+include (Java_sourcefiles.cmake)
+
+if (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
+else ()
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
+endif ()
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
+endforeach ()
+
+foreach (HCP_JAR ${CMAKE_JAVA_INCLUDE_PATH})
+ get_filename_component (_HCP_FILE ${HCP_JAR} NAME)
+ set (HDFJAVA_CLASSJARS "${_HCP_FILE} ${HDFJAVA_CLASSJARS}")
+endforeach ()
+
+foreach (example ${HDF_JAVA_EXAMPLES})
+ get_filename_component (example_name ${example} NAME_WE)
+ file (WRITE ${PROJECT_BINARY_DIR}/Manifest.txt
+ "Main-Class: ${example_name}
+Class-Path: ${HDFJAVA_CLASSJARS}
+"
+ )
+ add_jar (${EXAMPLE_VARNAME}_${example_name} SOURCES ${example} MANIFEST ${PROJECT_BINARY_DIR}/Manifest.txt)
+ get_target_property (${EXAMPLE_VARNAME}_${example_name}_JAR_FILE ${EXAMPLE_VARNAME}_${example_name} JAR_FILE)
+endforeach ()
+
+if (H5EX_BUILD_TESTING)
+ macro (ADD_H5_TEST resultfile resultcode)
+ add_test (
+ NAME ${EXAMPLE_VARNAME}_jnative-h5-${resultfile}
+ COMMAND "${CMAKE_COMMAND}"
+ -D "TEST_TESTER=${CMAKE_Java_RUNTIME}"
+ -D "TEST_PROGRAM=${resultfile}"
+ -D "TEST_ARGS:STRING=${ARGN}"
+ -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${EXAMPLE_VARNAME}_${resultfile}_JAR_FILE}"
+ -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}"
+ -D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
+ -D "TEST_OUTPUT=${PROJECT_BINARY_DIR}/${resultfile}.out"
+ -D "TEST_REFERENCE=${resultfile}.txt"
+ -D "TEST_EXPECT=${resultcode}"
+ -D "TEST_SKIP_COMPARE=TRUE"
+ -P "${${EXAMPLE_PACKAGE_NAME}_RESOURCES_DIR}/jrunTest.cmake"
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (${EXAMPLE_VARNAME}_jnative-h5-${resultfile} PROPERTIES DEPENDS ${last_test})
+ endif ()
+ set (last_test "${EXAMPLE_VARNAME}_jnative-h5-${resultfile}")
+ endmacro ()
+
+ foreach (example ${HDF_JAVA_EXAMPLES})
+ get_filename_component (example_name ${example} NAME_WE)
+ add_test (
+ NAME ${EXAMPLE_VARNAME}_jnative-h5-${example_name}-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${PROJECT_BINARY_DIR}/${example_name}.h5
+ ${example_name}.out
+ ${example_name}.out.err
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (${EXAMPLE_VARNAME}_jnative-h5-${example_name}-clearall-objects PROPERTIES DEPENDS ${last_test})
+ endif ()
+ add_test (
+ NAME ${EXAMPLE_VARNAME}_jnative-h5-${example_name}-copy-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E copy_if_different
+ ${PROJECT_SOURCE_DIR}/tfiles/110/${example_name}.txt
+ ${PROJECT_BINARY_DIR}/${example_name}.txt
+ )
+ set_tests_properties (${EXAMPLE_VARNAME}_jnative-h5-${example_name}-copy-objects PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_jnative-h5-${example_name}-clearall-objects)
+ set (last_test "${EXAMPLE_VARNAME}_jnative-h5-${example_name}-copy-objects")
+ ADD_H5_TEST (${example_name} 0)
+ endforeach ()
+
+endif ()
diff --git a/HDF5Examples/JAVA/H5G/H5Ex_G_Compact.java b/HDF5Examples/JAVA/H5G/H5Ex_G_Compact.java
new file mode 100644
index 0000000..865040e
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/H5Ex_G_Compact.java
@@ -0,0 +1,259 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ Creating a file and print the storage layout.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.structs.H5G_info_t;
+
+public class H5Ex_G_Compact {
+
+ private static final String FILE1 = "H5Ex_G_Compact1.h5";
+ private static final String FILE2 = "H5Ex_G_Compact2.h5";
+ private static final String GROUP = "G1";
+
+ enum H5G_storage {
+ H5G_STORAGE_TYPE_UNKNOWN(-1),
+ H5G_STORAGE_TYPE_SYMBOL_TABLE(0),
+ H5G_STORAGE_TYPE_COMPACT(1),
+ H5G_STORAGE_TYPE_DENSE(2);
+
+ private static final Map<Integer, H5G_storage> lookup = new HashMap<Integer, H5G_storage>();
+
+ static
+ {
+ for (H5G_storage s : EnumSet.allOf(H5G_storage.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5G_storage(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5G_storage get(int code) { return lookup.get(code); }
+ }
+
+ public static void CreateGroup()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long fapl_id = HDF5Constants.H5I_INVALID_HID;
+ H5G_info_t ginfo;
+ long size;
+
+ // Create file 1. This file will use original format groups.
+ try {
+ file_id = H5.H5Fcreate(FILE1, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Create a group in the file1.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gcreate(file_id, GROUP, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Obtain the group info and print the group storage type.
+ try {
+ if (group_id >= 0) {
+ ginfo = H5.H5Gget_info(group_id);
+ System.out.print("Group storage type for " + FILE1 + " is: ");
+ switch (H5G_storage.get(ginfo.storage_type)) {
+ case H5G_STORAGE_TYPE_COMPACT:
+ System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format
+ break;
+ case H5G_STORAGE_TYPE_DENSE:
+ System.out.println("H5G_STORAGE_TYPE_DENSE"); // New dense (indexed) format
+ break;
+ case H5G_STORAGE_TYPE_SYMBOL_TABLE:
+ System.out.println("H5G_STORAGE_TYPE_SYMBOL_TABLE"); // Original format
+ break;
+ case H5G_STORAGE_TYPE_UNKNOWN:
+ System.out.println("H5G_STORAGE_TYPE_UNKNOWN");
+ break;
+ default:
+ System.out.println("Storage Type Invalid");
+ break;
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // close the file 1.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Re-open file 1. Need to get the correct file size.
+ try {
+ file_id = H5.H5Fopen(FILE1, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Obtain and print the file size.
+ try {
+ if (file_id >= 0) {
+ size = H5.H5Fget_filesize(file_id);
+ System.out.println("File size for " + FILE1 + " is: " + size + " bytes");
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close FILE1.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Set file access property list to allow the latest file format.
+ // This will allow the library to create new compact format groups.
+ try {
+ fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
+ if (fapl_id >= 0)
+ H5.H5Pset_libver_bounds(fapl_id, HDF5Constants.H5F_LIBVER_LATEST,
+ HDF5Constants.H5F_LIBVER_LATEST);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ System.out.println();
+ // Create file 2 using the new file access property list.
+ try {
+ file_id = H5.H5Fcreate(FILE2, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, fapl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Create group in file2.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gcreate(file_id, GROUP, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Obtain the group info and print the group storage type.
+ try {
+ if (group_id >= 0) {
+ ginfo = H5.H5Gget_info(group_id);
+ System.out.print("Group storage type for " + FILE2 + " is: ");
+ switch (H5G_storage.get(ginfo.storage_type)) {
+ case H5G_STORAGE_TYPE_COMPACT:
+ System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format
+ break;
+ case H5G_STORAGE_TYPE_DENSE:
+ System.out.println("H5G_STORAGE_TYPE_DENSE"); // New dense (indexed) format
+ break;
+ case H5G_STORAGE_TYPE_SYMBOL_TABLE:
+ System.out.println("H5G_STORAGE_TYPE_SYMBOL_TABLE"); // Original format
+ break;
+ case H5G_STORAGE_TYPE_UNKNOWN:
+ System.out.println("H5G_STORAGE_TYPE_UNKNOWN");
+ break;
+ default:
+ System.out.println("Storage Type Invalid");
+ break;
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // close the file 2.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Re-open file 2. Needed to get the correct file size.
+ try {
+ file_id = H5.H5Fopen(FILE2, HDF5Constants.H5F_ACC_RDONLY, fapl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Obtain and print the file size.
+ try {
+ if (file_id >= 0) {
+ size = H5.H5Fget_filesize(file_id);
+ System.out.println("File size for " + FILE2 + " is: " + size + " bytes");
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close FILE2.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { H5Ex_G_Compact.CreateGroup(); }
+}
diff --git a/HDF5Examples/JAVA/H5G/H5Ex_G_Corder.java b/HDF5Examples/JAVA/H5G/H5Ex_G_Corder.java
new file mode 100644
index 0000000..23d1840
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/H5Ex_G_Corder.java
@@ -0,0 +1,116 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+/************************************************************
+ Creating a file with creation properties and traverse the
+ groups in alphabetical and creation order.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.structs.H5G_info_t;
+
+public class H5Ex_G_Corder {
+ private static String FILE = "H5Ex_G_Corder.h5";
+
+ private static void CreateGroup() throws Exception
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long subgroup_id = HDF5Constants.H5I_INVALID_HID;
+ long gcpl_id = HDF5Constants.H5I_INVALID_HID;
+ int status;
+ H5G_info_t ginfo;
+ int i;
+ String name;
+
+ try {
+ // Create a new file using default properties.
+ file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+
+ // Create group creation property list and enable link creation order tracking.
+ gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE);
+ status = H5.H5Pset_link_creation_order(gcpl_id, HDF5Constants.H5P_CRT_ORDER_TRACKED +
+ HDF5Constants.H5P_CRT_ORDER_INDEXED);
+
+ // Create primary group using the property list.
+ if (status >= 0)
+ group_id = H5.H5Gcreate(file_id, "index_group", HDF5Constants.H5P_DEFAULT, gcpl_id,
+ HDF5Constants.H5P_DEFAULT);
+
+ try {
+ /*
+ * Create subgroups in the primary group. These will be tracked by creation order. Note that
+ * these groups do not have to have the creation order tracking property set.
+ */
+ subgroup_id = H5.H5Gcreate(group_id, "H", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ status = H5.H5Gclose(subgroup_id);
+ subgroup_id = H5.H5Gcreate(group_id, "D", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ status = H5.H5Gclose(subgroup_id);
+ subgroup_id = H5.H5Gcreate(group_id, "F", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ status = H5.H5Gclose(subgroup_id);
+ subgroup_id = H5.H5Gcreate(group_id, "5", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ status = H5.H5Gclose(subgroup_id);
+
+ // Get group info.
+ ginfo = H5.H5Gget_info(group_id);
+
+ // Traverse links in the primary group using alphabetical indices (H5_INDEX_NAME).
+ System.out.println("Traversing group using alphabetical indices:");
+ for (i = 0; i < ginfo.nlinks; i++) {
+ // Retrieve the name of the ith link in a group
+ name = H5.H5Lget_name_by_idx(group_id, ".", HDF5Constants.H5_INDEX_NAME,
+ HDF5Constants.H5_ITER_INC, i, HDF5Constants.H5P_DEFAULT);
+ System.out.println("Index " + i + ": " + name);
+ }
+
+ // Traverse links in the primary group by creation order (H5_INDEX_CRT_ORDER).
+ System.out.println("Traversing group using creation order indices:");
+ for (i = 0; i < ginfo.nlinks; i++) {
+ // Retrieve the name of the ith link in a group
+ name = H5.H5Lget_name_by_idx(group_id, ".", HDF5Constants.H5_INDEX_CRT_ORDER,
+ HDF5Constants.H5_ITER_INC, i, HDF5Constants.H5P_DEFAULT);
+ System.out.println("Index " + i + ": " + name);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ finally {
+ // Close and release resources.
+ if (gcpl_id >= 0)
+ H5.H5Pclose(gcpl_id);
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ try {
+ H5Ex_G_Corder.CreateGroup();
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ }
+}
diff --git a/HDF5Examples/JAVA/H5G/H5Ex_G_Create.java b/HDF5Examples/JAVA/H5G/H5Ex_G_Create.java
new file mode 100644
index 0000000..b842af6
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/H5Ex_G_Create.java
@@ -0,0 +1,86 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create, open, and close a group.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_G_Create {
+ private static String FILENAME = "H5Ex_G_Create.h5";
+ private static String GROUPNAME = "G1";
+
+ private static void CreateGroup()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a group in the file.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group. The handle "group" can no longer be used.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Re-open the group, obtaining a new handle.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gopen(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { H5Ex_G_Create.CreateGroup(); }
+}
diff --git a/HDF5Examples/JAVA/H5G/H5Ex_G_Intermediate.java b/HDF5Examples/JAVA/H5G/H5Ex_G_Intermediate.java
new file mode 100644
index 0000000..5461725
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/H5Ex_G_Intermediate.java
@@ -0,0 +1,125 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to create intermediate groups with
+ a single call to H5Gcreate.
+ ************************************************************/
+
+import java.util.ArrayList;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.callbacks.H5O_iterate_opdata_t;
+import hdf.hdf5lib.callbacks.H5O_iterate_t;
+import hdf.hdf5lib.structs.H5O_info_t;
+
+public class H5Ex_G_Intermediate {
+
+ private static String FILE = "H5Ex_G_Intermediate.h5";
+
+ private void CreateGroup() throws Exception
+ {
+
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long gcpl_id = HDF5Constants.H5I_INVALID_HID;
+
+ try {
+ // Create a new file_id using the default properties.
+ file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+
+ // Create group_id creation property list and set it to allow creation of intermediate group_ids.
+ gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_LINK_CREATE);
+ H5.H5Pset_create_intermediate_group(gcpl_id, true);
+
+ /*
+ * Create the group_id /G1/G2/G3. Note that /G1 and /G1/G2 do not exist yet. This call would cause
+ * an error if we did not use the previously created property list.
+ */
+ group_id = H5.H5Gcreate(file_id, "/G1/G2/G3", gcpl_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ // Print all the objects in the file_ids to show that intermediate group_ids have been created.
+ System.out.println("Objects in the file_id:");
+
+ // H5O_iterate_opdata_t iter_data = null;
+ H5O_iterate_opdata_t iter_data = new H5O_iter_data();
+ H5O_iterate_t iter_cb = new H5O_iter_callback();
+
+ H5.H5Ovisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb,
+ iter_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ finally {
+ // Close and release resources.
+ if (gcpl_id >= 0)
+ H5.H5Pclose(gcpl_id);
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ try {
+ (new H5Ex_G_Intermediate()).CreateGroup();
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ }
+
+ private class idata {
+ public String link_name = null;
+ public int link_type = -1;
+
+ idata(String name, int type)
+ {
+ this.link_name = name;
+ this.link_type = type;
+ }
+ }
+
+ private class H5O_iter_data implements H5O_iterate_opdata_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+
+ private class H5O_iter_callback implements H5O_iterate_t {
+ public int callback(long group, String name, H5O_info_t info, H5O_iterate_opdata_t op_data)
+ {
+ idata id = new idata(name, info.type);
+ ((H5O_iter_data)op_data).iterdata.add(id);
+
+ System.out.print("/"); /* Print root group in object path */
+
+ // Check if the current object is the root group, and if not print the full path name and type.
+
+ if (name.charAt(0) == '.') /* Root group, do not print '.' */
+ System.out.println(" (Group)");
+ else if (info.type == HDF5Constants.H5O_TYPE_GROUP)
+ System.out.println(name + " (Group)");
+ else if (info.type == HDF5Constants.H5O_TYPE_DATASET)
+ System.out.println(name + " (Dataset)");
+ else if (info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE)
+ System.out.println(name + " (Datatype)");
+ else
+ System.out.println(name + " (Unknown)");
+
+ return 0;
+ }
+ }
+}
diff --git a/HDF5Examples/JAVA/H5G/H5Ex_G_Iterate.java b/HDF5Examples/JAVA/H5G/H5Ex_G_Iterate.java
new file mode 100644
index 0000000..86d9eb4
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/H5Ex_G_Iterate.java
@@ -0,0 +1,110 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to iterate over group members using
+ H5Gget_obj_info_all.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.structs.H5O_token_t;
+
+public class H5Ex_G_Iterate {
+ private static String FILENAME = "groups/h5ex_g_iterate.h5";
+ private static String DATASETNAME = "/";
+
+ enum H5O_type {
+ H5O_TYPE_UNKNOWN(-1), // Unknown object type
+ H5O_TYPE_GROUP(0), // Object is a group
+ H5O_TYPE_DATASET(1), // Object is a dataset
+ H5O_TYPE_NAMED_DATATYPE(2), // Object is a named data type
+ H5O_TYPE_NTYPES(3); // Number of different object types
+ private static final Map<Integer, H5O_type> lookup = new HashMap<Integer, H5O_type>();
+
+ static
+ {
+ for (H5O_type s : EnumSet.allOf(H5O_type.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5O_type(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5O_type get(int code) { return lookup.get(code); }
+ }
+
+ private static void do_iterate()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+
+ // Open a file using default properties.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Begin iteration.
+ System.out.println("Objects in root group:");
+ try {
+ if (file_id >= 0) {
+ int count = (int)H5.H5Gn_members(file_id, DATASETNAME);
+ String[] oname = new String[count];
+ int[] otype = new int[count];
+ int[] ltype = new int[count];
+ H5O_token_t[] otokens = new H5O_token_t[count];
+ H5.H5Gget_obj_info_all(file_id, DATASETNAME, oname, otype, ltype, otokens,
+ HDF5Constants.H5_INDEX_NAME);
+
+ // Get type of the object and display its name and type.
+ for (int indx = 0; indx < otype.length; indx++) {
+ switch (H5O_type.get(otype[indx])) {
+ case H5O_TYPE_GROUP:
+ System.out.println(" Group: " + oname[indx]);
+ break;
+ case H5O_TYPE_DATASET:
+ System.out.println(" Dataset: " + oname[indx]);
+ break;
+ case H5O_TYPE_NAMED_DATATYPE:
+ System.out.println(" Datatype: " + oname[indx]);
+ break;
+ default:
+ System.out.println(" Unknown: " + oname[indx]);
+ }
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { H5Ex_G_Iterate.do_iterate(); }
+}
diff --git a/HDF5Examples/JAVA/H5G/H5Ex_G_Phase.java b/HDF5Examples/JAVA/H5G/H5Ex_G_Phase.java
new file mode 100644
index 0000000..b21fc09
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/H5Ex_G_Phase.java
@@ -0,0 +1,234 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to set the conditions for
+ conversion between compact and dense (indexed) groups.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.structs.H5G_info_t;
+
+public class H5Ex_G_Phase {
+ private static String FILE = "H5Ex_G_Phase.h5";
+ private static int MAX_GROUPS = 7;
+ private static int MAX_COMPACT = 5;
+ private static int MIN_DENSE = 3;
+
+ enum H5G_storage {
+ H5G_STORAGE_TYPE_UNKNOWN(-1),
+ H5G_STORAGE_TYPE_SYMBOL_TABLE(0),
+ H5G_STORAGE_TYPE_COMPACT(1),
+ H5G_STORAGE_TYPE_DENSE(2);
+
+ private static final Map<Integer, H5G_storage> lookup = new HashMap<Integer, H5G_storage>();
+
+ static
+ {
+ for (H5G_storage s : EnumSet.allOf(H5G_storage.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5G_storage(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5G_storage get(int code) { return lookup.get(code); }
+ }
+
+ private static void CreateGroup()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long subgroup_id = HDF5Constants.H5I_INVALID_HID;
+ long fapl_id = HDF5Constants.H5I_INVALID_HID;
+ long gcpl_id = HDF5Constants.H5I_INVALID_HID;
+ H5G_info_t ginfo;
+ String name = "G0"; // Name of subgroup_id
+ int i;
+
+ // Set file access property list to allow the latest file format.This will allow the library to create
+ // new format groups.
+ try {
+ fapl_id = H5.H5Pcreate(HDF5Constants.H5P_FILE_ACCESS);
+ if (fapl_id >= 0)
+ H5.H5Pset_libver_bounds(fapl_id, HDF5Constants.H5F_LIBVER_LATEST,
+ HDF5Constants.H5F_LIBVER_LATEST);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create group access property list and set the phase change conditions.
+ try {
+ gcpl_id = H5.H5Pcreate(HDF5Constants.H5P_GROUP_CREATE);
+ if (gcpl_id >= 0)
+ H5.H5Pset_link_phase_change(gcpl_id, MAX_COMPACT, MIN_DENSE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a new file using the default properties.
+ try {
+ if (fapl_id >= 0)
+ file_id = H5.H5Fcreate(FILE, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT, fapl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create primary group.
+ try {
+ if ((file_id >= 0) && (gcpl_id >= 0))
+ group_id = H5.H5Gcreate(file_id, name, HDF5Constants.H5P_DEFAULT, gcpl_id,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Add subgroups to "group" one at a time, print the storage type for "group" after each subgroup is
+ // created.
+ for (i = 1; i <= MAX_GROUPS; i++) {
+ // Define the subgroup name and create the subgroup.
+ char append = (char)(((char)i) + '0');
+ name = name + append; /* G1, G2, G3 etc. */
+ try {
+ if (group_id >= 0) {
+ subgroup_id = H5.H5Gcreate(group_id, name, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Gclose(subgroup_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Obtain the group info and print the group storage type
+ try {
+ if (group_id >= 0) {
+ ginfo = H5.H5Gget_info(group_id);
+ System.out.print(ginfo.nlinks + " Group" + (ginfo.nlinks == 1 ? " " : "s") +
+ ": Storage type is ");
+ switch (H5G_storage.get(ginfo.storage_type)) {
+ case H5G_STORAGE_TYPE_COMPACT:
+ System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format
+ break;
+ case H5G_STORAGE_TYPE_DENSE:
+ System.out.println("H5G_STORAGE_TYPE_DENSE"); // New dense (indexed) format
+ break;
+ case H5G_STORAGE_TYPE_SYMBOL_TABLE:
+ System.out.println("H5G_STORAGE_TYPE_SYMBOL_TABLE"); // Original format
+ break;
+ case H5G_STORAGE_TYPE_UNKNOWN:
+ System.out.println("H5G_STORAGE_TYPE_UNKNOWN");
+ break;
+ default:
+ System.out.println("Storage Type Invalid");
+ break;
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ System.out.println();
+
+ // Delete subgroups one at a time, print the storage type for "group" after each subgroup is deleted.
+ for (i = MAX_GROUPS; i >= 1; i--) {
+ // Define the subgroup name and delete the subgroup.
+ try {
+ H5.H5Ldelete(group_id, name, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ name = name.substring(0, i + 1);
+
+ // Obtain the group info and print the group storage type
+ try {
+ if (group_id >= 0) {
+ ginfo = H5.H5Gget_info(group_id);
+ System.out.print(ginfo.nlinks + " Group" + (ginfo.nlinks == 1 ? " " : "s") +
+ ": Storage type is ");
+ switch (H5G_storage.get(ginfo.storage_type)) {
+ case H5G_STORAGE_TYPE_COMPACT:
+ System.out.println("H5G_STORAGE_TYPE_COMPACT"); // New compact format
+ break;
+ case H5G_STORAGE_TYPE_DENSE:
+ System.out.println("H5G_STORAGE_TYPE_DENSE"); // New dense (indexed) format
+ break;
+ case H5G_STORAGE_TYPE_SYMBOL_TABLE:
+ System.out.println("H5G_STORAGE_TYPE_SYMBOL_TABLE"); // Original format
+ break;
+ case H5G_STORAGE_TYPE_UNKNOWN:
+ System.out.println("H5G_STORAGE_TYPE_UNKNOWN");
+ break;
+ default:
+ System.out.println("Storage Type Invalid");
+ break;
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // Close and release resources
+ try {
+ if (fapl_id >= 0)
+ H5.H5Pclose(fapl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (gcpl_id >= 0)
+ H5.H5Pclose(gcpl_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { H5Ex_G_Phase.CreateGroup(); }
+}
diff --git a/HDF5Examples/JAVA/H5G/H5Ex_G_Traverse.java b/HDF5Examples/JAVA/H5G/H5Ex_G_Traverse.java
new file mode 100644
index 0000000..ba1955b
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/H5Ex_G_Traverse.java
@@ -0,0 +1,166 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+This example shows a way to recursively traverse the file
+using H5Literate. The method shown here guarantees that
+the recursion will not enter an infinite loop, but does
+not prevent objects from being visited more than once.
+The program prints the directory structure of the file
+specified in FILE. The default file used by this example
+implements the structure described in the User's Guide,
+chapter 4, figure 26.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.callbacks.H5L_iterate_opdata_t;
+import hdf.hdf5lib.callbacks.H5L_iterate_t;
+import hdf.hdf5lib.structs.H5L_info_t;
+import hdf.hdf5lib.structs.H5O_info_t;
+
+import examples.groups.H5Ex_G_Iterate.H5O_type;
+
+class opdata implements H5L_iterate_opdata_t {
+ int recurs;
+ opdata prev;
+ H5O_token_t obj_token;
+}
+
+public class H5Ex_G_Traverse {
+
+ private static String FILE = "h5ex_g_traverse.h5";
+ public static H5L_iterate_t iter_cb = new H5L_iter_callbackT();
+
+ private static void OpenGroup()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ H5O_info_t infobuf;
+ opdata od = new opdata();
+
+ // Open file and initialize the operator data structure.
+ try {
+ file_id = H5.H5Fopen(FILE, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ if (file_id >= 0) {
+ infobuf = H5.H5Oget_info(file_id);
+ od.recurs = 0;
+ od.prev = null;
+ od.obj_token = infobuf.token;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Print the root group and formatting, begin iteration.
+ try {
+ System.out.println("/ {");
+ // H5L_iterate_t iter_cb = new H5L_iter_callbackT();
+ H5.H5Literate(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, 0L, iter_cb,
+ od);
+ System.out.println("}");
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close and release resources.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { H5Ex_G_Traverse.OpenGroup(); }
+}
+
+class H5L_iter_callbackT implements H5L_iterate_t {
+ public int callback(long group, String name, H5L_info_t info, H5L_iterate_opdata_t op_data)
+ {
+
+ H5O_info_t infobuf;
+ int return_val = 0;
+ opdata od = (opdata)op_data; // Type conversion
+ int spaces = 2 * (od.recurs + 1); // Number of white spaces to prepend to output.
+
+ // Get type of the object and display its name and type.
+ // The name of the object is passed to this function by the Library.
+ try {
+ infobuf = H5.H5Oget_info_by_name(group, name, HDF5Constants.H5P_DEFAULT);
+
+ for (int i = 0; i < spaces; i++)
+ System.out.print(" "); // Format output.
+ switch (H5O_type.get(infobuf.type)) {
+ case H5O_TYPE_GROUP:
+ System.out.println("Group: " + name + " { ");
+ // Check group object token against linked list of operator
+ // data structures. We will always run the check, as the
+ // reference count cannot be relied upon if there are
+ // symbolic links, and H5Oget_info_by_name always follows
+ // symbolic links. Alternatively we could use H5Lget_info
+ // and never recurse on groups discovered by symbolic
+ // links, however it could still fail if an object's
+ // reference count was manually manipulated with
+ // H5Odecr_refcount.
+ if (group_check(od, infobuf.token)) {
+ for (int i = 0; i < spaces; i++)
+ System.out.print(" ");
+ System.out.println(" Warning: Loop detected!");
+ }
+ else {
+ // Initialize new object of type opdata and begin
+ // recursive iteration on the discovered
+ // group. The new opdata is given a pointer to the
+ // current one.
+ opdata nextod = new opdata();
+ nextod.recurs = od.recurs + 1;
+ nextod.prev = od;
+ nextod.obj_token = infobuf.token;
+ H5L_iterate_t iter_cb2 = new H5L_iter_callbackT();
+ return_val = H5.H5Literate_by_name(group, name, HDF5Constants.H5_INDEX_NAME,
+ HDF5Constants.H5_ITER_NATIVE, 0L, iter_cb2, nextod,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ for (int i = 0; i < spaces; i++)
+ System.out.print(" ");
+ System.out.println("}");
+ break;
+ case H5O_TYPE_DATASET:
+ System.out.println("Dataset: " + name);
+ break;
+ case H5O_TYPE_NAMED_DATATYPE:
+ System.out.println("Datatype: " + name);
+ break;
+ default:
+ System.out.println("Unknown: " + name);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ return return_val;
+ }
+
+ public boolean group_check(opdata od, H5O_token_t target_token)
+ {
+ if (od.obj_token.equals(target_token))
+ return true; // Object tokens match
+ else if (od.recurs == 0)
+ return false; // Root group reached with no matches
+ else
+ return group_check(od.prev, target_token); // Recursively examine the next node
+ }
+}
diff --git a/HDF5Examples/JAVA/H5G/H5Ex_G_Visit.java b/HDF5Examples/JAVA/H5G/H5Ex_G_Visit.java
new file mode 100644
index 0000000..7d37c59
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/H5Ex_G_Visit.java
@@ -0,0 +1,153 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to recursively traverse a file
+ using H5Ovisit and H5Lvisit. The program prints all of
+ the objects in the file specified in FILE, then prints all
+ of the links in that file. The default file used by this
+ example implements the structure described in the User
+ Guide, chapter 4, figure 26.
+ ************************************************************/
+
+import java.util.ArrayList;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.callbacks.H5L_iterate_opdata_t;
+import hdf.hdf5lib.callbacks.H5L_iterate_t;
+import hdf.hdf5lib.callbacks.H5O_iterate_opdata_t;
+import hdf.hdf5lib.callbacks.H5O_iterate_t;
+import hdf.hdf5lib.structs.H5L_info_t;
+import hdf.hdf5lib.structs.H5O_info_t;
+
+public class H5Ex_G_Visit {
+
+ private static String FILE = "groups/h5ex_g_visit.h5";
+
+ public static void main(String[] args)
+ {
+ try {
+ (new H5Ex_G_Visit()).VisitGroup();
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ }
+
+ private void VisitGroup() throws Exception
+ {
+
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+
+ try {
+ // Open file
+ file_id = H5.H5Fopen(FILE, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+
+ // Begin iteration using H5Ovisit
+ System.out.println("Objects in the file:");
+ H5O_iterate_opdata_t iter_data = new H5O_iter_data();
+ H5O_iterate_t iter_cb = new H5O_iter_callback();
+ H5.H5Ovisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb,
+ iter_data);
+ System.out.println();
+ // Repeat the same process using H5Lvisit
+ H5L_iterate_opdata_t iter_data2 = new H5L_iter_data();
+ H5L_iterate_t iter_cb2 = new H5L_iter_callback();
+ System.out.println("Links in the file:");
+ H5.H5Lvisit(file_id, HDF5Constants.H5_INDEX_NAME, HDF5Constants.H5_ITER_NATIVE, iter_cb2,
+ iter_data2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ finally {
+ // Close and release resources.
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ }
+
+ /************************************************************
+ * Operator function for H5Lvisit. This function simply retrieves the info for the object the current link
+ *points to, and calls the operator function for H5Ovisit.
+ ************************************************************/
+
+ private class idata {
+ public String link_name = null;
+ public int link_type = -1;
+
+ idata(String name, int type)
+ {
+ this.link_name = name;
+ this.link_type = type;
+ }
+ }
+
+ private class H5L_iter_data implements H5L_iterate_opdata_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+
+ private class H5L_iter_callback implements H5L_iterate_t {
+ public int callback(long group, String name, H5L_info_t info, H5L_iterate_opdata_t op_data)
+ {
+
+ idata id = new idata(name, info.type);
+ ((H5L_iter_data)op_data).iterdata.add(id);
+
+ H5O_info_t infobuf;
+ int ret = 0;
+ try {
+ // Get type of the object and display its name and type. The name of the object is passed to
+ // this function by the Library.
+ infobuf = H5.H5Oget_info_by_name(group, name, HDF5Constants.H5P_DEFAULT);
+ H5O_iterate_t iter_cbO = new H5O_iter_callback();
+ H5O_iterate_opdata_t iter_dataO = new H5O_iter_data();
+ ret = iter_cbO.callback(group, name, infobuf, iter_dataO);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ return ret;
+ }
+ }
+
+ private class H5O_iter_data implements H5O_iterate_opdata_t {
+ public ArrayList<idata> iterdata = new ArrayList<idata>();
+ }
+
+ private class H5O_iter_callback implements H5O_iterate_t {
+ public int callback(long group, String name, H5O_info_t info, H5O_iterate_opdata_t op_data)
+ {
+ idata id = new idata(name, info.type);
+ ((H5O_iter_data)op_data).iterdata.add(id);
+
+ System.out.print("/"); /* Print root group in object path */
+
+ // Check if the current object is the root group, and if not print the full path name and type.
+
+ if (name.charAt(0) == '.') /* Root group, do not print '.' */
+ System.out.println(" (Group)");
+ else if (info.type == HDF5Constants.H5O_TYPE_GROUP)
+ System.out.println(name + " (Group)");
+ else if (info.type == HDF5Constants.H5O_TYPE_DATASET)
+ System.out.println(name + " (Dataset)");
+ else if (info.type == HDF5Constants.H5O_TYPE_NAMED_DATATYPE)
+ System.out.println(name + " (Datatype)");
+ else
+ System.out.println(name + " (Unknown)");
+
+ return 0;
+ }
+ }
+}
diff --git a/HDF5Examples/JAVA/H5G/JavaGroupExample.sh.in b/HDF5Examples/JAVA/H5G/JavaGroupExample.sh.in
new file mode 100644
index 0000000..933f35e
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/JavaGroupExample.sh.in
@@ -0,0 +1,377 @@
+#! /bin/sh
+#
+# Copyright by The HDF Group.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the COPYING file, which can be found at the root of the source code
+# distribution tree, or in https://www.hdfgroup.org/licenses.
+# If you do not have access to either file, you may request a copy from
+# help@hdfgroup.org.
+#
+
+top_builddir=@top_builddir@
+top_srcdir=@top_srcdir@
+srcdir=@srcdir@
+IS_DARWIN="@H5_IS_DARWIN@"
+
+TESTNAME=EX_Groups
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+
+# Set up default variable values if not supplied by the user.
+RM='rm -rf'
+CMP='cmp'
+DIFF='diff -c'
+CP='cp'
+DIRNAME='dirname'
+BASENAME='basename'
+LS='ls'
+AWK='awk'
+
+nerrors=0
+
+# where the libs exist
+HDFLIB_HOME="$top_srcdir/java/lib"
+BLDDIR="."
+BLDLIBDIR="$BLDDIR/testlibs"
+BLDITERDIR="./groups"
+HDFTEST_HOME="$top_srcdir/java/examples/groups"
+JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar
+TESTJARFILE=jar@PACKAGE_TARNAME@groups.jar
+test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR
+test -d $BLDITERDIR || mkdir -p $BLDITERDIR
+
+######################################################################
+# library files
+# --------------------------------------------------------------------
+# All the library files copy from source directory to test directory
+# NOTE: Keep this framework to add/remove test files.
+# This list are also used for checking exist.
+# Comment '#' without space can be used.
+# --------------------------------------------------------------------
+LIST_LIBRARY_FILES="
+$top_builddir/src/.libs/libhdf5.*
+$top_builddir/java/src/jni/.libs/libhdf5_java.*
+$top_builddir/java/src/$JARFILE
+"
+LIST_ITER_FILES="
+$HDFTEST_HOME/h5ex_g_iterate.h5
+$HDFTEST_HOME/h5ex_g_visit.h5
+"
+LIST_DATA_FILES="
+$HDFTEST_HOME/../tfiles/examples.groups.H5Ex_G_Create.txt
+$HDFTEST_HOME/../tfiles/examples.groups.H5Ex_G_Iterate.txt
+$HDFTEST_HOME/../tfiles/examples.groups.H5Ex_G_Compact.txt
+$HDFTEST_HOME/../tfiles/examples.groups.H5Ex_G_Corder.txt
+$HDFTEST_HOME/../tfiles/examples.groups.H5Ex_G_Intermediate.txt
+$HDFTEST_HOME/../tfiles/examples.groups.H5Ex_G_Phase.txt
+$HDFTEST_HOME/../tfiles/examples.groups.H5Ex_G_Visit.txt
+"
+
+#
+# copy files from source dirs to test dir
+#
+COPY_LIBFILES="$LIST_LIBRARY_FILES"
+COPY_JARTESTFILES="$LIST_JAR_TESTFILES"
+
+COPY_LIBFILES_TO_BLDLIBDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_LIBFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -fR $tstfile $BLDLIBDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ BNAME=`$BASENAME $tstfile`
+ if [ "$BNAME" = "libhdf5_java.dylib" ]; then
+ COPIED_LIBHDF5_JAVA=1
+ fi
+ fi
+ fi
+ done
+ if [[ "$IS_DARWIN" = "yes" ]] && [[ $COPIED_LIBHDF5_JAVA -eq 1 ]]; then
+ (cd $BLDLIBDIR; \
+ install_name_tool -add_rpath @loader_path libhdf5_java.dylib; \
+ exist_path=` otool -l libhdf5_java.dylib | grep libhdf5 | grep -v java | awk '{print $2}'`; \
+ echo $exist_path; \
+ install_name_tool -change $exist_path @rpath/libhdf5.dylib libhdf5_java.dylib)
+ fi
+ # copy jar files. Used -f to make sure get a new copy
+ for tstfile in $COPY_JARTESTFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -fR $tstfile $BLDLIBDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_LIBFILES_AND_BLDLIBDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=$HDFLIB_HOME
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM -rf $BLDLIBDIR
+ fi
+}
+
+COPY_DATAFILES="$LIST_DATA_FILES"
+
+COPY_DATAFILES_TO_BLDDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_DATAFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_DATAFILES_AND_BLDDIR()
+{
+ $RM $BLDDIR/examples.groups.H5Ex_G_*.txt
+ $RM $BLDDIR/H5Ex_G_*.out
+ $RM $BLDDIR/H5Ex_G_*.h5
+}
+
+COPY_ITERFILES="$LIST_ITER_FILES"
+
+COPY_ITERFILES_TO_BLDITERDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_ITERFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDITERDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDITERDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_ITERFILES_AND_BLDITERDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $HDFTEST_HOME/h5ex_g_iterate.h5`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDITERDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM $BLDITERDIR
+ fi
+}
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+#
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
+}
+
+# where Java is installed (requires jdk1.7.x)
+JAVAEXE=@JAVA@
+JAVAEXEFLAGS=@H5_JAVAFLAGS@
+
+###############################################################################
+# DO NOT MODIFY BELOW THIS LINE
+###############################################################################
+
+# prepare for test
+COPY_LIBFILES_TO_BLDLIBDIR
+COPY_DATAFILES_TO_BLDDIR
+COPY_ITERFILES_TO_BLDITERDIR
+
+CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$TESTJARFILE""
+
+TEST=/usr/bin/test
+if [ ! -x /usr/bin/test ]
+then
+TEST=`which test`
+fi
+
+if $TEST -z "$CLASSPATH"; then
+ CLASSPATH=""
+fi
+CLASSPATH=$CPATH":"$CLASSPATH
+export CLASSPATH
+
+if $TEST -n "$JAVAPATH" ; then
+ PATH=$JAVAPATH":"$PATH
+ export PATH
+fi
+
+if $TEST -e /bin/uname; then
+ os_name=`/bin/uname -s`
+elif $TEST -e /usr/bin/uname; then
+ os_name=`/usr/bin/uname -s`
+else
+ os_name=unknown
+fi
+
+if $TEST -z "$LD_LIBRARY_PATH" ; then
+ LD_LIBRARY_PATH=""
+fi
+
+case $os_name in
+ *)
+ LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH
+ ;;
+esac
+
+export LD_LIBRARY_PATH
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Create"
+TESTING examples.groups.H5Ex_G_Create
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Create > H5Ex_G_Create.out)
+if diff H5Ex_G_Create.out examples.groups.H5Ex_G_Create.txt > /dev/null; then
+ echo " PASSED groups.H5Ex_G_Create"
+else
+ echo "**FAILED** groups.H5Ex_G_Create"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Iterate"
+TESTING examples.groups.H5Ex_G_Iterate
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Iterate > H5Ex_G_Iterate.out)
+if diff H5Ex_G_Iterate.out examples.groups.H5Ex_G_Iterate.txt > /dev/null; then
+ echo " PASSED groups.H5Ex_G_Iterate"
+else
+ echo "**FAILED** groups.H5Ex_G_Iterate"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Compact"
+TESTING examples.groups.H5Ex_G_Compact
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Compact > H5Ex_G_Compact.out)
+if diff H5Ex_G_Compact.out examples.groups.H5Ex_G_Compact.txt > /dev/null; then
+ echo " PASSED groups.H5Ex_G_Compact"
+else
+ echo "**FAILED** groups.H5Ex_G_Compact"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Corder"
+TESTING examples.groups.H5Ex_G_Corder
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Corder > H5Ex_G_Corder.out)
+if diff H5Ex_G_Corder.out examples.groups.H5Ex_G_Corder.txt > /dev/null; then
+ echo " PASSED groups.H5Ex_G_Corder"
+else
+ echo "**FAILED** groups.H5Ex_G_Corder"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Intermediate"
+TESTING examples.groups.H5Ex_G_Intermediate
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Intermediate > H5Ex_G_Intermediate.out)
+if diff H5Ex_G_Intermediate.out examples.groups.H5Ex_G_Intermediate.txt > /dev/null; then
+ echo " PASSED groups.H5Ex_G_Intermediate"
+else
+ echo "**FAILED** groups.H5Ex_G_Intermediate"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Phase"
+TESTING examples.groups.H5Ex_G_Phase
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Phase > H5Ex_G_Phase.out)
+if diff H5Ex_G_Phase.out examples.groups.H5Ex_G_Phase.txt > /dev/null; then
+ echo " PASSED groups.H5Ex_G_Phase"
+else
+ echo "**FAILED** groups.H5Ex_G_Phase"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Visit"
+TESTING examples.groups.H5Ex_G_Visit
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.groups.H5Ex_G_Visit > H5Ex_G_Visit.out)
+if diff H5Ex_G_Visit.out examples.groups.H5Ex_G_Visit.txt > /dev/null; then
+ echo " PASSED groups.H5Ex_G_Visit"
+else
+ echo "**FAILED** groups.H5Ex_G_Visit"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+# Clean up temporary files/directories
+CLEAN_ITERFILES_AND_BLDITERDIR
+CLEAN_LIBFILES_AND_BLDLIBDIR
+CLEAN_DATAFILES_AND_BLDDIR
+
+# Report test results and exit
+if test $nerrors -eq 0 ; then
+ echo "All $TESTNAME tests passed."
+ exit $EXIT_SUCCESS
+else
+ echo "$TESTNAME tests failed with $nerrors errors."
+ exit $EXIT_FAILURE
+fi
diff --git a/HDF5Examples/JAVA/H5G/Java_sourcefiles.cmake b/HDF5Examples/JAVA/H5G/Java_sourcefiles.cmake
new file mode 100644
index 0000000..d5142bd
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/Java_sourcefiles.cmake
@@ -0,0 +1,24 @@
+#-----------------------------------------------------------------------------
+# Define Sources, one file per application
+#-----------------------------------------------------------------------------
+set (HDF_JAVA_EXAMPLES
+ H5Ex_G_Create.java
+ H5Ex_G_Compact.java
+ H5Ex_G_Corder.java
+ H5Ex_G_Phase.java
+)
+if (${H5_LIBVER_DIR} GREATER 18)
+ if ((${H5_LIBVER_DIR} EQUAL 110) AND NOT (${${EXAMPLE_VARNAME}_USE_16_API} OR ${${EXAMPLE_VARNAME}_USE_18_API} OR ${${EXAMPLE_VARNAME}_USE_110_API}))
+ set (HDF_JAVA_EXAMPLES ${HDF_JAVA_EXAMPLES}
+ 110/H5Ex_G_Iterate.java
+ 110/H5Ex_G_Intermediate.java
+ 110/H5Ex_G_Visit.java
+ )
+ else ()
+ set (HDF_JAVA_EXAMPLES ${HDF_JAVA_EXAMPLES}
+ H5Ex_G_Iterate.java
+ H5Ex_G_Intermediate.java
+ H5Ex_G_Visit.java
+ )
+ endif ()
+endif ()
diff --git a/HDF5Examples/JAVA/H5G/Makefile.am b/HDF5Examples/JAVA/H5G/Makefile.am
new file mode 100644
index 0000000..d3b59cf
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/Makefile.am
@@ -0,0 +1,65 @@
+#
+# Copyright by The HDF Group.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the COPYING file, which can be found at the root of the source code
+# distribution tree, or in https://www.hdfgroup.org/licenses.
+# If you do not have access to either file, you may request a copy from
+# help@hdfgroup.org.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+##
+#
+# HDF5 Java Library Examples Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+
+JAVAROOT = .classes
+
+classes:
+ test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+pkgpath = examples/groups
+hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$$CLASSPATH
+
+jarfile = jar$(PACKAGE_TARNAME)groups.jar
+
+AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation
+
+TESTPACKAGE =
+
+noinst_JAVA = \
+ H5Ex_G_Create.java \
+ H5Ex_G_Iterate.java \
+ H5Ex_G_Compact.java \
+ H5Ex_G_Corder.java \
+ H5Ex_G_Intermediate.java \
+ H5Ex_G_Phase.java \
+ H5Ex_G_Visit.java
+
+
+$(jarfile): classnoinst.stamp classes
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+noinst_DATA = $(jarfile)
+
+.PHONY: classes
+
+check_SCRIPTS = JavaGroupExample.sh
+TEST_SCRIPT = $(check_SCRIPTS)
+
+CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class JavaGroupExample.sh
+
+clean:
+ rm -rf $(JAVAROOT)/*
+ rm -f $(jarfile)
+ rm -f classnoinst.stamp
+
+include $(top_srcdir)/config/conclude.am
diff --git a/HDF5Examples/JAVA/H5G/h5ex_g_iterate.h5 b/HDF5Examples/JAVA/H5G/h5ex_g_iterate.h5
new file mode 100644
index 0000000..e462703
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/h5ex_g_iterate.h5
Binary files differ
diff --git a/HDF5Examples/JAVA/H5G/h5ex_g_visit.h5 b/HDF5Examples/JAVA/H5G/h5ex_g_visit.h5
new file mode 100644
index 0000000..d8267b1
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/h5ex_g_visit.h5
Binary files differ
diff --git a/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Compact.txt b/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Compact.txt
new file mode 100644
index 0000000..0a88d3f
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Compact.txt
@@ -0,0 +1,5 @@
+Group storage type for H5Ex_G_Compact1.h5 is: H5G_STORAGE_TYPE_SYMBOL_TABLE
+File size for H5Ex_G_Compact1.h5 is: 1832 bytes
+
+Group storage type for H5Ex_G_Compact2.h5 is: H5G_STORAGE_TYPE_COMPACT
+File size for H5Ex_G_Compact2.h5 is: 342 bytes
diff --git a/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Corder.txt b/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Corder.txt
new file mode 100644
index 0000000..2d959fc
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Corder.txt
@@ -0,0 +1,10 @@
+Traversing group using alphabetical indices:
+Index 0: 5
+Index 1: D
+Index 2: F
+Index 3: H
+Traversing group using creation order indices:
+Index 0: H
+Index 1: D
+Index 2: F
+Index 3: 5
diff --git a/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Create.txt b/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Create.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Create.txt
diff --git a/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Intermediate.txt b/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Intermediate.txt
new file mode 100644
index 0000000..65a0fc2
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Intermediate.txt
@@ -0,0 +1,5 @@
+Objects in the file_id:
+/ (Group)
+/G1 (Group)
+/G1/G2 (Group)
+/G1/G2/G3 (Group)
diff --git a/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Iterate.txt b/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Iterate.txt
new file mode 100644
index 0000000..66a4ae9
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Iterate.txt
@@ -0,0 +1,5 @@
+Objects in root group:
+ Dataset: DS1
+ Datatype: DT1
+ Group: G1
+ Dataset: L1
diff --git a/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Phase.txt b/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Phase.txt
new file mode 100644
index 0000000..9e666d4
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Phase.txt
@@ -0,0 +1,15 @@
+1 Group : Storage type is H5G_STORAGE_TYPE_COMPACT
+2 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
+3 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
+4 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
+5 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
+6 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
+7 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
+
+6 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
+5 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
+4 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
+3 Groups: Storage type is H5G_STORAGE_TYPE_DENSE
+2 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
+1 Group : Storage type is H5G_STORAGE_TYPE_COMPACT
+0 Groups: Storage type is H5G_STORAGE_TYPE_COMPACT
diff --git a/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Visit.txt b/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Visit.txt
new file mode 100644
index 0000000..126a588
--- /dev/null
+++ b/HDF5Examples/JAVA/H5G/tfiles/110/H5Ex_G_Visit.txt
@@ -0,0 +1,19 @@
+Objects in the file:
+/ (Group)
+/group1 (Group)
+/group1/dset1 (Dataset)
+/group1/group3 (Group)
+/group1/group3/group4 (Group)
+/group1/group3/group4/group1 (Group)
+/group1/group3/group4/group2 (Group)
+
+Links in the file:
+/group1 (Group)
+/group1/dset1 (Dataset)
+/group1/group3 (Group)
+/group1/group3/dset2 (Dataset)
+/group1/group3/group4 (Group)
+/group1/group3/group4/group1 (Group)
+/group1/group3/group4/group1/group5 (Group)
+/group1/group3/group4/group2 (Group)
+/group2 (Group)
diff --git a/HDF5Examples/JAVA/H5J/110/HDF5FileStructure.java b/HDF5Examples/JAVA/H5J/110/HDF5FileStructure.java
new file mode 100644
index 0000000..cddad57
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/110/HDF5FileStructure.java
@@ -0,0 +1,340 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.structs.H5G_info_t;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to retrieve HDF file structure using the
+ * "HDF Native Package (Java)". The example created the group structure and
+ * datasets, and print out the file structure:
+ *
+ * <pre>
+ * "/" (root)
+ * integer arrays
+ * 2D 32-bit integer 20x10
+ * 3D unsigned 8-bit integer 20x10x5
+ * float arrays
+ * 2D 64-bit double 20x10
+ * 3D 32-bit float 20x10x5
+ * </pre>
+ *
+ * </p>
+ */
+public class HDF5FileStructure {
+ private static String fname = "HDF5FileStructure.h5";
+ private static long[] dims2D = {20, 10};
+ private static long[] dims3D = {20, 10, 5};
+
+ public static void main(String args[]) throws Exception
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+
+ // create the file and add groups and dataset into the file
+ try {
+ createFile();
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(fname, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open the group, obtaining a new handle.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gopen(file_id, "/", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ printGroup(group_id, "/", "");
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Recursively print a group and its members.
+ *
+ * @throws Exception
+ */
+ private static void printGroup(long g_id, String gname, String indent) throws Exception
+ {
+ if (g_id < 0)
+ return;
+
+ H5G_info_t members = H5.H5Gget_info(g_id);
+ String objNames[] = new String[(int)members.nlinks];
+ int objTypes[] = new int[(int)members.nlinks];
+ int lnkTypes[] = new int[(int)members.nlinks];
+ long objRefs[] = new long[(int)members.nlinks];
+ int names_found = 0;
+ try {
+ names_found = H5.H5Gget_obj_info_all(g_id, null, objNames, objTypes, lnkTypes, objRefs,
+ HDF5Constants.H5_INDEX_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+
+ indent += " ";
+ for (int i = 0; i < names_found; i++) {
+ System.out.println(indent + objNames[i]);
+ long group_id = -1;
+ if (objTypes[i] == HDF5Constants.H5O_TYPE_GROUP) {
+ // Open the group, obtaining a new handle.
+ try {
+ if (g_id >= 0)
+ group_id = H5.H5Gopen(g_id, objNames[i], HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ if (group_id >= 0)
+ printGroup(group_id, objNames[i], indent);
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+ /**
+ * create the file and add groups ans dataset into the file, which is the
+ * same as javaExample.H5DatasetCreate
+ *
+ * @see javaExample.HDF5DatasetCreate
+ * @throws Exception
+ */
+ private static void createFile() throws Exception
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id1 = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id2 = HDF5Constants.H5I_INVALID_HID;
+ long group_id1 = HDF5Constants.H5I_INVALID_HID;
+ long group_id2 = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create groups in the file.
+ try {
+ if (file_id >= 0) {
+ group_id1 = H5.H5Gcreate(file_id,
+ "/"
+ + "integer arrays",
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ group_id1 = H5.H5Gcreate(file_id,
+ "/"
+ + "float arrays",
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the datasets.
+ try {
+ dataspace_id1 = H5.H5Screate_simple(2, dims2D, null);
+ dataspace_id2 = H5.H5Screate_simple(3, dims3D, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 2D 32-bit (4 bytes) integer dataset of 20 by 10
+ try {
+ if ((file_id >= 0) && (dataspace_id1 >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id,
+ "/"
+ + "integer arrays"
+ + "/"
+ + "2D 32-bit integer 20x10",
+ HDF5Constants.H5T_STD_I32LE, dataspace_id1, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 3D 8-bit (1 byte) unsigned integer dataset of 20 by 10 by 5
+ try {
+ if ((file_id >= 0) && (dataspace_id2 >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id,
+ "/"
+ + "integer arrays"
+ + "/"
+ + "3D 8-bit unsigned integer 20x10x5",
+ HDF5Constants.H5T_STD_I64LE, dataspace_id2, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 2D 64-bit (8 bytes) double dataset of 20 by 10
+ try {
+ if ((file_id >= 0) && (dataspace_id1 >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id,
+ "/"
+ + "float arrays"
+ + "/"
+ + "2D 64-bit double 20x10",
+ HDF5Constants.H5T_NATIVE_DOUBLE, dataspace_id1, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 3D 32-bit (4 bytes) float dataset of 20 by 10 by 5
+ try {
+ if ((file_id >= 0) && (dataspace_id2 >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id,
+ "/"
+ + "float arrays"
+ + "/"
+ + "3D 32-bit float 20x10x5",
+ HDF5Constants.H5T_NATIVE_FLOAT, dataspace_id2, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the data space.
+ try {
+ if (dataspace_id1 >= 0)
+ H5.H5Sclose(dataspace_id1);
+ dataspace_id1 = HDF5Constants.H5I_INVALID_HID;
+ if (dataspace_id2 >= 0)
+ H5.H5Sclose(dataspace_id2);
+ dataspace_id2 = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the groups.
+ try {
+ if (group_id1 >= 0)
+ H5.H5Gclose(group_id1);
+ if (group_id2 >= 0)
+ H5.H5Gclose(group_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/HDF5Examples/JAVA/H5J/CMakeLists.txt b/HDF5Examples/JAVA/H5J/CMakeLists.txt
new file mode 100644
index 0000000..8f1c195
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/CMakeLists.txt
@@ -0,0 +1,92 @@
+cmake_minimum_required (VERSION 3.18)
+project (HDF5Examples_JAVA_INTRO Java)
+
+set (CMAKE_VERBOSE_MAKEFILE 1)
+
+INCLUDE_DIRECTORIES (
+ ${HDFJAVA_LIB_DIR}
+ ${JAVA_INCLUDE_PATH}
+ ${JAVA_INCLUDE_PATH2}
+)
+
+#-----------------------------------------------------------------------------
+# Define Sources
+#-----------------------------------------------------------------------------
+include (Java_sourcefiles.cmake)
+
+if (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
+else ()
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
+endif ()
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
+endforeach ()
+
+foreach (HCP_JAR ${CMAKE_JAVA_INCLUDE_PATH})
+ get_filename_component (_HCP_FILE ${HCP_JAR} NAME)
+ set (HDFJAVA_CLASSJARS "${_HCP_FILE} ${HDFJAVA_CLASSJARS}")
+endforeach ()
+
+foreach (example ${HDF_JAVA_EXAMPLES})
+ get_filename_component (example_name ${example} NAME_WE)
+ file (WRITE ${PROJECT_BINARY_DIR}/Manifest.txt
+ "Main-Class: ${example_name}
+Class-Path: ${HDFJAVA_CLASSJARS}
+"
+ )
+ add_jar (${EXAMPLE_VARNAME}_${example_name} SOURCES ${example} MANIFEST ${PROJECT_BINARY_DIR}/Manifest.txt)
+ get_target_property (${EXAMPLE_VARNAME}_${example_name}_JAR_FILE ${EXAMPLE_VARNAME}_${example_name} JAR_FILE)
+endforeach ()
+
+if (H5EX_BUILD_TESTING)
+ macro (ADD_H5_TEST resultfile resultcode)
+ add_test (
+ NAME ${EXAMPLE_VARNAME}_jnative-h5-${resultfile}
+ COMMAND "${CMAKE_COMMAND}"
+ -D "TEST_TESTER=${CMAKE_Java_RUNTIME}"
+ -D "TEST_PROGRAM=${resultfile}"
+ -D "TEST_ARGS:STRING=${ARGN}"
+ -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${EXAMPLE_VARNAME}_${resultfile}_JAR_FILE}"
+ -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}"
+ -D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
+ -D "TEST_OUTPUT=${PROJECT_BINARY_DIR}/${resultfile}.out"
+ -D "TEST_REFERENCE=${resultfile}.txt"
+ -D "TEST_EXPECT=${resultcode}"
+ -D "TEST_SKIP_COMPARE=TRUE"
+ -P "${${EXAMPLE_PACKAGE_NAME}_RESOURCES_DIR}/jrunTest.cmake"
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (${EXAMPLE_VARNAME}_jnative-h5-${resultfile} PROPERTIES DEPENDS ${last_test})
+ endif ()
+ set (last_test "${EXAMPLE_VARNAME}_jnative-h5-${resultfile}")
+ endmacro ()
+
+ foreach (example ${HDF_JAVA_EXAMPLES})
+ get_filename_component (example_name ${example} NAME_WE)
+ add_test (
+ NAME ${EXAMPLE_VARNAME}_jnative-h5-${example_name}-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${HDFJAVA_EXAMPLES_INTRO_BINARY_DIR}/${example_name}.h5
+ ${example_name}.out
+ ${example_name}.out.err
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (${EXAMPLE_VARNAME}_jnative-h5-${example_name}-clearall-objects PROPERTIES DEPENDS ${last_test})
+ endif ()
+ add_test (
+ NAME ${EXAMPLE_VARNAME}_jnative-h5-${example_name}-copy-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E copy_if_different
+ ${PROJECT_SOURCE_DIR}/tfiles/110/${example_name}.txt
+ ${PROJECT_BINARY_DIR}/${example_name}.txt
+ )
+ set_tests_properties (${EXAMPLE_VARNAME}_jnative-h5-${example_name}-copy-objects PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_jnative-h5-${example_name}-clearall-objects)
+ set (last_test "${EXAMPLE_VARNAME}_jnative-h5-${example_name}-copy-objects")
+ ADD_H5_TEST (${example_name} 0)
+ endforeach ()
+
+endif ()
diff --git a/HDF5Examples/JAVA/H5J/H5_CreateGroupAbsoluteRelative.java b/HDF5Examples/JAVA/H5J/H5_CreateGroupAbsoluteRelative.java
new file mode 100644
index 0000000..934242d
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/H5_CreateGroupAbsoluteRelative.java
@@ -0,0 +1,114 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ Creating groups using absolute and relative names.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5_CreateGroupAbsoluteRelative {
+ private static String FILENAME = "H5_CreateGroupAbsoluteRelative.h5";
+ private static String GROUPNAME = "MyGroup";
+ private static String GROUPNAME_A = "GroupA";
+ private static String GROUPNAME_B = "GroupB";
+
+ private static void CreateGroupAbsoluteAndRelative()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long group1_id = HDF5Constants.H5I_INVALID_HID;
+ long group2_id = HDF5Constants.H5I_INVALID_HID;
+ long group3_id = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a group named "/MyGroup" in the file.
+ try {
+ if (file_id >= 0)
+ group1_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create group "Group_A" in group "MyGroup" using absolute name.
+ try {
+ if (file_id >= 0)
+ group2_id =
+ H5.H5Gcreate(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create group "Group_B" in group "MyGroup" using relative name.
+ try {
+ if (group1_id >= 0)
+ group3_id = H5.H5Gcreate(group1_id, GROUPNAME_B, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group3.
+ try {
+ if (group3_id >= 0)
+ H5.H5Gclose(group3_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group2.
+ try {
+ if (group2_id >= 0)
+ H5.H5Gclose(group2_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group1.
+ try {
+ if (group1_id >= 0)
+ H5.H5Gclose(group1_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5_CreateGroupAbsoluteRelative.CreateGroupAbsoluteAndRelative();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5J/HDF5AttributeCreate.java b/HDF5Examples/JAVA/H5J/HDF5AttributeCreate.java
new file mode 100644
index 0000000..faa2418
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/HDF5AttributeCreate.java
@@ -0,0 +1,278 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to create/read/write HDF attribute using
+ * the "HDF Native Package (Java)". The example creates an attribute and, read
+ * and write the attribute value:
+ *
+ * <pre>
+ * "/" (root)
+ * 2D 32-bit integer 20x10
+ * (attribute: name="data range", value=[0, 10000])
+ * </pre>
+ *
+ * </p>
+ */
+public class HDF5AttributeCreate {
+ private static String fname = "HDF5AttributeCreate.h5";
+ private static String dsname = "2D 32-bit integer 20x10";
+ private static String attrname = "data range";
+ private static long[] dims2D = {20, 10};
+
+ private static void CreateDatasetAttribute()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+
+ // create the file and add groups and dataset into the file
+ try {
+ createFile();
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(fname, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, dsname, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ long[] attrDims = {2}; // 1D of size two
+ int[] attrValue = {0, 10000}; // attribute value
+
+ // Create the data space for the attribute.
+ try {
+ dataspace_id = H5.H5Screate_simple(1, attrDims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a dataset attribute.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0))
+ attribute_id = H5.H5Acreate(dataset_id, attrname, HDF5Constants.H5T_STD_I32BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Write the attribute data.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_INT, attrValue);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the attribute.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", attrname, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (attribute_id >= 0)
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, attrDims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ int[] attrData = new int[(int)attrDims[0]];
+
+ // Read data.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aread(attribute_id, HDF5Constants.H5T_NATIVE_INT, attrData);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // print out attribute value
+ System.out.println(attrname);
+ System.out.println(attrData[0] + " " + attrData[1]);
+
+ // Close the dataspace.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * create the file and add groups and dataset into the file, which is the
+ * same as javaExample.H5DatasetCreate
+ *
+ * @see javaExample.HDF5DatasetCreate
+ * @throws Exception
+ */
+ private static void createFile() throws Exception
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the dataset.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims2D, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, dsname, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // set the data values
+ int[] dataIn = new int[20 * 10];
+ for (int i = 0; i < 20; i++) {
+ for (int j = 0; j < 10; j++) {
+ dataIn[i * 10 + j] = i * 100 + j;
+ }
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dataIn);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { HDF5AttributeCreate.CreateDatasetAttribute(); }
+}
diff --git a/HDF5Examples/JAVA/H5J/HDF5DatasetCreate.java b/HDF5Examples/JAVA/H5J/HDF5DatasetCreate.java
new file mode 100644
index 0000000..05fea5f
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/HDF5DatasetCreate.java
@@ -0,0 +1,192 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to create HDF5 datasets using the
+ * "HDF Native Package (Java)". The example created the group structure and
+ * datasets:
+ *
+ * <pre>
+ * "/" (root)
+ * integer arrays
+ * 2D 32-bit integer 20x10
+ * 3D 16-bit integer 20x10x5
+ * float arrays
+ * 2D 64-bit double 20x10
+ * 3D 32-bit float 20x10x5
+ * </pre>
+ *
+ * </p>
+ */
+public class HDF5DatasetCreate {
+ private static String fname = "HDF5DatasetCreate.h5";
+ private static long[] dims2D = {20, 10};
+ private static long[] dims3D = {20, 10, 5};
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id1 = HDF5Constants.H5I_INVALID_HID;
+ long group_id2 = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id1 = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id2 = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ System.err.println("Failed to create file:" + fname);
+ return;
+ }
+
+ // Create a group in the file.
+ try {
+ if (file_id >= 0) {
+ group_id1 = H5.H5Gcreate(file_id, "g1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ group_id2 = H5.H5Gcreate(file_id, "g2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the 2D dataset.
+ try {
+ dataspace_id1 = H5.H5Screate_simple(2, dims2D, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the 3D dataset.
+ try {
+ dataspace_id2 = H5.H5Screate_simple(3, dims3D, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 2D 32-bit (4 bytes) integer dataset of 20 by 10
+ try {
+ if ((group_id1 >= 0) && (dataspace_id1 >= 0)) {
+ dataset_id = H5.H5Dcreate(
+ group_id1, "2D 32-bit integer 20x10", HDF5Constants.H5T_NATIVE_INT32, dataspace_id1,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 3D 8-bit (1 byte) unsigned integer dataset of 20 by 10 by 5
+ try {
+ if ((group_id1 >= 0) && (dataspace_id2 >= 0)) {
+ dataset_id =
+ H5.H5Dcreate(group_id1, "3D 8-bit unsigned integer 20x10x5",
+ HDF5Constants.H5T_NATIVE_INT8, dataspace_id2, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 2D 64-bit (8 bytes) double dataset of 20 by 10
+ try {
+ if ((group_id2 >= 0) && (dataspace_id1 >= 0)) {
+ dataset_id = H5.H5Dcreate(
+ group_id2, "2D 64-bit double 20x10", HDF5Constants.H5T_NATIVE_DOUBLE, dataspace_id1,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 3D 32-bit (4 bytes) float dataset of 20 by 10 by 5
+ try {
+ if ((group_id2 >= 0) && (dataspace_id2 >= 0)) {
+ dataset_id = H5.H5Dcreate(
+ group_id2, "3D 32-bit float 20x10x5", HDF5Constants.H5T_NATIVE_FLOAT, dataspace_id2,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id2 >= 0)
+ H5.H5Sclose(dataspace_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (dataspace_id1 >= 0)
+ H5.H5Sclose(dataspace_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the groups.
+ try {
+ if (group_id2 >= 0)
+ H5.H5Gclose(group_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (group_id1 >= 0)
+ H5.H5Gclose(group_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { HDF5DatasetCreate.CreateDataset(); }
+}
diff --git a/HDF5Examples/JAVA/H5J/HDF5DatasetRead.java b/HDF5Examples/JAVA/H5J/HDF5DatasetRead.java
new file mode 100644
index 0000000..e0771a8
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/HDF5DatasetRead.java
@@ -0,0 +1,235 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to read/write HDF datasets using the
+ * "HDF Native Package (Java)". The example creates an integer dataset, and read
+ * and write data values:
+ *
+ * <pre>
+ * "/" (root)
+ * 2D 32-bit integer 20x10
+ * </pre>
+ *
+ * </p>
+ */
+public class HDF5DatasetRead {
+ private static String fname = "HDF5DatasetRead.h5";
+ private static String dsname = "2D 32-bit integer 20x10";
+ private static long[] dims2D = {20, 10};
+
+ private static void ReadWriteDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+
+ // create the file and add groups and dataset into the file
+ try {
+ createFile();
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(fname, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, dsname, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ int[][] dataRead = new int[(int)dims2D[0]][(int)(dims2D[1])];
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dataRead);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // print out the data values
+ System.out.println("\n\nOriginal Data Values");
+ for (int i = 0; i < 20; i++) {
+ System.out.print("\n" + dataRead[i][0]);
+ for (int j = 1; j < 10; j++) {
+ System.out.print(", " + dataRead[i][j]);
+ }
+ }
+
+ // change data value and write it to file.
+ for (int i = 0; i < 20; i++) {
+ for (int j = 0; j < 10; j++) {
+ dataRead[i][j]++;
+ }
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dataRead);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // reload the data value
+ int[][] dataModified = new int[(int)dims2D[0]][(int)(dims2D[1])];
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dataModified);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // print out the modified data values
+ System.out.println("\n\nModified Data Values");
+ for (int i = 0; i < 20; i++) {
+ System.out.print("\n" + dataModified[i][0]);
+ for (int j = 1; j < 10; j++) {
+ System.out.print(", " + dataModified[i][j]);
+ }
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * create the file and add groups ans dataset into the file, which is the
+ * same as javaExample.H5DatasetCreate
+ *
+ * @see HDF5DatasetCreate.H5DatasetCreate
+ * @throws Exception
+ */
+ private static void createFile() throws Exception
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the dataset.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims2D, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, dsname, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // set the data values
+ int[] dataIn = new int[20 * 10];
+ for (int i = 0; i < 20; i++) {
+ for (int j = 0; j < 10; j++) {
+ dataIn[i * 10 + j] = i * 100 + j;
+ }
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dataIn);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { HDF5DatasetRead.ReadWriteDataset(); }
+}
diff --git a/HDF5Examples/JAVA/H5J/HDF5FileCreate.java b/HDF5Examples/JAVA/H5J/HDF5FileCreate.java
new file mode 100644
index 0000000..fbfc247
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/HDF5FileCreate.java
@@ -0,0 +1,57 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: This example shows how to create an empty HDF5 file using the
+ * "HDF Native Package (Java)". If the file (H5FileCreate.h5) already exists, it
+ * will be truncated to zero length.
+ * </p>
+ */
+public class HDF5FileCreate {
+ // The name of the file we'll create.
+ private static String fname = "HDF5FileCreate.h5";
+
+ private static void CreateFile()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ System.err.println("Failed to create file:" + fname);
+ return;
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { HDF5FileCreate.CreateFile(); }
+}
diff --git a/HDF5Examples/JAVA/H5J/HDF5FileStructure.java b/HDF5Examples/JAVA/H5J/HDF5FileStructure.java
new file mode 100644
index 0000000..8ea80a8
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/HDF5FileStructure.java
@@ -0,0 +1,348 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+import hdf.hdf5lib.structs.H5G_info_t;
+import hdf.hdf5lib.structs.H5O_token_t;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to retrieve HDF file structure using the
+ * "HDF Native Package (Java)". The example created the group structure and
+ * datasets, and print out the file structure:
+ *
+ * <pre>
+ * "/" (root)
+ * integer arrays
+ * 2D 32-bit integer 20x10
+ * 3D unsigned 8-bit integer 20x10x5
+ * float arrays
+ * 2D 64-bit double 20x10
+ * 3D 32-bit float 20x10x5
+ * </pre>
+ *
+ * </p>
+ */
+public class HDF5FileStructure {
+ private static String fname = "HDF5FileStructure.h5";
+
+ private static void FileStructure()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+
+ // create the file and add groups and dataset into the file
+ try {
+ createFile();
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(fname, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open the group, obtaining a new handle.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gopen(file_id, "/", HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ printGroup(group_id, "/", "");
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * Recursively print a group and its members.
+ *
+ * @throws Exception
+ */
+ private static void printGroup(long g_id, String gname, String indent) throws Exception
+ {
+ if (g_id < 0)
+ return;
+
+ H5G_info_t members = H5.H5Gget_info(g_id);
+ String objNames[] = new String[(int)members.nlinks];
+ int objTypes[] = new int[(int)members.nlinks];
+ int lnkTypes[] = new int[(int)members.nlinks];
+ H5O_token_t objTokens[] = new H5O_token_t[(int)members.nlinks];
+ int names_found = 0;
+ try {
+ names_found = H5.H5Gget_obj_info_all(g_id, null, objNames, objTypes, lnkTypes, objTokens,
+ HDF5Constants.H5_INDEX_NAME);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+
+ indent += " ";
+ for (int i = 0; i < names_found; i++) {
+ System.out.println(indent + objNames[i]);
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ if (objTypes[i] == HDF5Constants.H5O_TYPE_GROUP) {
+ // Open the group, obtaining a new handle.
+ try {
+ if (g_id >= 0)
+ group_id = H5.H5Gopen(g_id, objNames[i], HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ if (group_id >= 0)
+ printGroup(group_id, objNames[i], indent);
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+
+ /**
+ * create the file and add groups ans dataset into the file, which is the
+ * same as javaExample.H5DatasetCreate
+ *
+ * @see javaExample.HDF5DatasetCreate
+ * @throws Exception
+ */
+ private static void createFile() throws Exception
+ {
+ long[] dims2D = {20, 10};
+ long[] dims3D = {20, 10, 5};
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id1 = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id2 = HDF5Constants.H5I_INVALID_HID;
+ long group_id1 = HDF5Constants.H5I_INVALID_HID;
+ long group_id2 = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create groups in the file.
+ try {
+ if (file_id >= 0) {
+ group_id1 = H5.H5Gcreate(file_id,
+ "/"
+ + "integer arrays",
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ group_id1 = H5.H5Gcreate(file_id,
+ "/"
+ + "float arrays",
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the datasets.
+ try {
+ dataspace_id1 = H5.H5Screate_simple(2, dims2D, null);
+ dataspace_id2 = H5.H5Screate_simple(3, dims3D, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 2D 32-bit (4 bytes) integer dataset of 20 by 10
+ try {
+ if ((file_id >= 0) && (dataspace_id1 >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id,
+ "/"
+ + "integer arrays"
+ + "/"
+ + "2D 32-bit integer 20x10",
+ HDF5Constants.H5T_STD_I32LE, dataspace_id1, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 3D 8-bit (1 byte) unsigned integer dataset of 20 by 10 by 5
+ try {
+ if ((file_id >= 0) && (dataspace_id2 >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id,
+ "/"
+ + "integer arrays"
+ + "/"
+ + "3D 8-bit unsigned integer 20x10x5",
+ HDF5Constants.H5T_STD_I64LE, dataspace_id2, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 2D 64-bit (8 bytes) double dataset of 20 by 10
+ try {
+ if ((file_id >= 0) && (dataspace_id1 >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id,
+ "/"
+ + "float arrays"
+ + "/"
+ + "2D 64-bit double 20x10",
+ HDF5Constants.H5T_NATIVE_DOUBLE, dataspace_id1, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // create 3D 32-bit (4 bytes) float dataset of 20 by 10 by 5
+ try {
+ if ((file_id >= 0) && (dataspace_id2 >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id,
+ "/"
+ + "float arrays"
+ + "/"
+ + "3D 32-bit float 20x10x5",
+ HDF5Constants.H5T_NATIVE_FLOAT, dataspace_id2, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the data space.
+ try {
+ if (dataspace_id1 >= 0)
+ H5.H5Sclose(dataspace_id1);
+ dataspace_id1 = HDF5Constants.H5I_INVALID_HID;
+ if (dataspace_id2 >= 0)
+ H5.H5Sclose(dataspace_id2);
+ dataspace_id2 = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the groups.
+ try {
+ if (group_id1 >= 0)
+ H5.H5Gclose(group_id1);
+ if (group_id2 >= 0)
+ H5.H5Gclose(group_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { HDF5FileStructure.FileStructure(); }
+}
diff --git a/HDF5Examples/JAVA/H5J/HDF5GroupCreate.java b/HDF5Examples/JAVA/H5J/HDF5GroupCreate.java
new file mode 100644
index 0000000..4a31c8f
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/HDF5GroupCreate.java
@@ -0,0 +1,138 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to create HDF5 groups using the
+ * "HDF Native Package (Java)". The example created the group structure:
+ *
+ * <pre>
+ * "/" (root)
+ * g1
+ * g11
+ * g12
+ * g2
+ * g21
+ * g22
+ * </pre>
+ *
+ * </p>
+ */
+public class HDF5GroupCreate {
+ private static String fname = "HDF5GroupCreate.h5";
+
+ private static void CreateGroup()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long subgroup_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id1 = HDF5Constants.H5I_INVALID_HID;
+ long group_id2 = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ System.err.println("Failed to create file:" + fname);
+ return;
+ }
+
+ // Create a group in the file.
+ try {
+ if (file_id >= 0) {
+ group_id1 = H5.H5Gcreate(file_id, "g1", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ if (group_id1 >= 0) {
+ subgroup_id = H5.H5Gcreate(group_id1, "g11", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ if (subgroup_id >= 0)
+ H5.H5Gclose(subgroup_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ subgroup_id = H5.H5Gcreate(group_id1, "g12", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ if (subgroup_id >= 0)
+ H5.H5Gclose(subgroup_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ group_id2 = H5.H5Gcreate(file_id, "g2", HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ if (group_id2 >= 0) {
+ subgroup_id = H5.H5Gcreate(group_id2, "g21", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ if (subgroup_id >= 0)
+ H5.H5Gclose(subgroup_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ subgroup_id = H5.H5Gcreate(group_id2, "g22", HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ try {
+ if (subgroup_id >= 0)
+ H5.H5Gclose(subgroup_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the groups.
+ try {
+ if (group_id2 >= 0)
+ H5.H5Gclose(group_id2);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ if (group_id1 >= 0)
+ H5.H5Gclose(group_id1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { HDF5GroupCreate.CreateGroup(); }
+}
diff --git a/HDF5Examples/JAVA/H5J/HDF5GroupDatasetCreate.java b/HDF5Examples/JAVA/H5J/HDF5GroupDatasetCreate.java
new file mode 100644
index 0000000..b89cd9c
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/HDF5GroupDatasetCreate.java
@@ -0,0 +1,204 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ Create two datasets within groups.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class HDF5GroupDatasetCreate {
+ private static String FILENAME = "HDF5GroupDatasetCreate.h5";
+ private static String GROUPNAME = "MyGroup";
+ private static String GROUPNAME_A = "GroupA";
+ private static String DATASETNAME1 = "dset1";
+ private static String DATASETNAME2 = "dset2";
+ private static final int DIM1_X = 3;
+ private static final int DIM1_Y = 3;
+ private static final int DIM2_X = 2;
+ private static final int DIM2_Y = 10;
+
+ private static void h5_crtgrpd()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long group1_id = HDF5Constants.H5I_INVALID_HID;
+ long group2_id = HDF5Constants.H5I_INVALID_HID;
+ int[][] dset1_data = new int[DIM1_X][DIM1_Y];
+ int[][] dset2_data = new int[DIM2_X][DIM2_Y];
+ long[] dims1 = {DIM1_X, DIM1_Y};
+ long[] dims2 = {DIM2_X, DIM2_Y};
+
+ // Initialize the first dataset.
+ for (int indx = 0; indx < DIM1_X; indx++)
+ for (int jndx = 0; jndx < DIM1_Y; jndx++)
+ dset1_data[indx][jndx] = jndx + 1;
+
+ // Initialize the second dataset.
+ for (int indx = 0; indx < DIM2_X; indx++)
+ for (int jndx = 0; jndx < DIM2_Y; jndx++)
+ dset2_data[indx][jndx] = jndx + 1;
+
+ // Create a file.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ // Create a group named "/MyGroup" in the file.
+ if (file_id >= 0) {
+ group1_id = H5.H5Gcreate(file_id, "/" + GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ // Create group "Group_A" in group "MyGroup" using absolute name.
+ if (group1_id >= 0) {
+ group2_id =
+ H5.H5Gcreate(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (group2_id >= 0)
+ H5.H5Gclose(group2_id);
+ }
+ if (group1_id >= 0)
+ H5.H5Gclose(group1_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the first dataset.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims1, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset in group "MyGroup".
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(
+ file_id, "/" + GROUPNAME + "/" + DATASETNAME1, HDF5Constants.H5T_STD_I32BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the first dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset1_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the data space for the first dataset.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the first dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing group of the specified file.
+ try {
+ if (file_id >= 0)
+ group_id =
+ H5.H5Gopen(file_id, "/" + GROUPNAME + "/" + GROUPNAME_A, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the second dataset.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims2, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the second dataset in group "Group_A".
+ try {
+ if ((group_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(group_id, DATASETNAME2, HDF5Constants.H5T_STD_I32BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the second dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset2_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the data space for the second dataset.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the second dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the group.
+ try {
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { HDF5GroupDatasetCreate.h5_crtgrpd(); }
+}
diff --git a/HDF5Examples/JAVA/H5J/HDF5SubsetSelect.java b/HDF5Examples/JAVA/H5J/HDF5SubsetSelect.java
new file mode 100644
index 0000000..a00f5be
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/HDF5SubsetSelect.java
@@ -0,0 +1,264 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * Copyright by the Board of Trustees of the University of Illinois. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the files COPYING and Copyright.html. COPYING can be found at the root *
+ * of the source code distribution tree; Copyright.html can be found at the *
+ * root level of an installed copy of the electronic HDF5 document set and *
+ * is linked from the top-level documents page. It can also be found at *
+ * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have *
+ * access to either file, you may request a copy from help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+/**
+ * <p>
+ * Title: HDF Native Package (Java) Example
+ * </p>
+ * <p>
+ * Description: this example shows how to select a subset using the
+ * "HDF Native Package (Java)". The example creates an integer dataset, and read
+ * subset of the dataset:
+ *
+ * <pre>
+ * "/" (root)
+ * 2D 32-bit integer 20x10
+ * </pre>
+ *
+ * The whole 20x10 data set is
+ *
+ * <pre>
+ * 1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1009
+ * 1100, 1101, 1102, 1103, 1104, 1105, 1106, 1107, 1108, 1109
+ * 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209
+ * 1300, 1301, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309
+ * 1400, 1401, 1402, 1403, 1404, 1405, 1406, 1407, 1408, 1409
+ * 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509
+ * 1600, 1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609
+ * 1700, 1701, 1702, 1703, 1704, 1705, 1706, 1707, 1708, 1709
+ * 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809
+ * 1900, 1901, 1902, 1903, 1904, 1905, 1906, 1907, 1908, 1909
+ * 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ * 2100, 2101, 2102, 2103, 2104, 2105, 2106, 2107, 2108, 2109
+ * 2200, 2201, 2202, 2203, 2204, 2205, 2206, 2207, 2208, 2209
+ * 2300, 2301, 2302, 2303, 2304, 2305, 2306, 2307, 2308, 2309
+ * 2400, 2401, 2402, 2403, 2404, 2405, 2406, 2407, 2408, 2409
+ * 2500, 2501, 2502, 2503, 2504, 2505, 2506, 2507, 2508, 2509
+ * 2600, 2601, 2602, 2603, 2604, 2605, 2606, 2607, 2608, 2609
+ * 2700, 2701, 2702, 2703, 2704, 2705, 2706, 2707, 2708, 2709
+ * 2800, 2801, 2802, 2803, 2804, 2805, 2806, 2807, 2808, 2809
+ * 2900, 2901, 2902, 2903, 2904, 2905, 2906, 2907, 2908, 2909
+ * </pre>
+ *
+ * Subset: start=(4, 2), size=(5, 3) and stride=(3, 2). The subset values are:
+ *
+ * <pre>
+ * 1402,1404,1406
+ * 1702,1704,1706
+ * 2002,2004,2006
+ * 2302,2304,2306
+ * 2602,2604,2606
+ * </pre>
+ *
+ * </p>
+ *
+ * @author Peter X. Cao
+ * @version 2.4
+ */
+public class HDF5SubsetSelect {
+ private static String fname = "HDF5SubsetSelect.h5";
+ private static String dsname = "2D 32-bit integer 20x10";
+ private static long[] dims2D = {20, 10};
+
+ private static void SubsetSelect()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long memspace_id = HDF5Constants.H5I_INVALID_HID;
+
+ // create the file and add groups and dataset into the file
+ try {
+ createFile();
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open file using the default properties.
+ try {
+ file_id = H5.H5Fopen(fname, HDF5Constants.H5F_ACC_RDWR, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open dataset using the default properties.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, dsname, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ int[][] dataRead = new int[5][3];
+
+ // Define and select the hyperslab to use for reading.
+ try {
+ if (dataset_id >= 0) {
+ filespace_id = H5.H5Dget_space(dataset_id);
+
+ long[] start = {4, 2};
+ long[] stride = {3, 2};
+ long[] count = {5, 3};
+ long[] block = null;
+
+ if (filespace_id >= 0) {
+ H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count,
+ block);
+
+ memspace_id = H5.H5Screate_simple(2, count, null);
+ // Read the data using the previously defined hyperslab.
+ if ((dataset_id >= 0) && (filespace_id >= 0) && (memspace_id >= 0))
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, memspace_id, filespace_id,
+ HDF5Constants.H5P_DEFAULT, dataRead);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // print out the data values
+ System.out.println("\n\nSubset Data Values");
+ for (int i = 0; i < 5; i++) {
+ System.out.print("\n" + dataRead[i][0]);
+ for (int j = 1; j < 3; j++) {
+ System.out.print("," + dataRead[i][j]);
+ }
+ }
+
+ // Close the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ /**
+ * create the file and add groups ans dataset into the file, which is the
+ * same as javaExample.H5DatasetCreate
+ *
+ * @see javaExample.HDF5DatasetCreate
+ * @throws Exception
+ */
+ private static void createFile() throws Exception
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(fname, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the data space for the dataset.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims2D, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, dsname, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // set the data values
+ int[] dataIn = new int[20 * 10];
+ for (int i = 0; i < 20; i++) {
+ for (int j = 0; j < 10; j++) {
+ dataIn[i * 10 + j] = 1000 + i * 100 + j;
+ }
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dataIn);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args) { HDF5SubsetSelect.SubsetSelect(); }
+}
diff --git a/HDF5Examples/JAVA/H5J/Java_sourcefiles.cmake b/HDF5Examples/JAVA/H5J/Java_sourcefiles.cmake
new file mode 100644
index 0000000..0a8a40b
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/Java_sourcefiles.cmake
@@ -0,0 +1,21 @@
+#-----------------------------------------------------------------------------
+# Define Sources, one file per application
+#-----------------------------------------------------------------------------
+set (HDF_JAVA_EXAMPLES
+ HDF5FileCreate.java
+ HDF5GroupCreate.java
+ HDF5DatasetCreate.java
+ HDF5AttributeCreate.java
+ HDF5DatasetRead.java
+ HDF5GroupDatasetCreate.java
+ HDF5SubsetSelect.java
+)
+if (NOT ${${EXAMPLE_VARNAME}_USE_110_API} AND ${H5_LIBVER_DIR} EQUAL 110)
+ set (HDF_JAVA_EXAMPLES ${HDF_JAVA_EXAMPLES}
+ 110/HDF5FileStructure.java
+ )
+else ()
+ set (HDF_JAVA_EXAMPLES ${HDF_JAVA_EXAMPLES}
+ HDF5FileStructure.java
+ )
+endif ()
diff --git a/HDF5Examples/JAVA/H5J/Makefile.am b/HDF5Examples/JAVA/H5J/Makefile.am
new file mode 100644
index 0000000..6b615a2
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/Makefile.am
@@ -0,0 +1,55 @@
+#
+# Copyright by The HDF Group.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+##
+#
+
+JAVAROOT = .classes
+
+classes:
+ $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+pkgpath = examples/intro
+hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$$CLASSPATH
+
+jarfile = jar$(PACKAGE_TARNAME)intro.jar
+
+AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation
+
+TESTPACKAGE =
+
+noinst_JAVA = \
+ HDF5AttributeCreate.java \
+ HDF5DatasetCreate.java \
+ HDF5DatasetRead.java \
+ HDF5FileCreate.java \
+ HDF5FileStructure.java \
+ HDF5GroupCreate.java \
+ HDF5SubsetSelect.java
+
+$(jarfile): classnoinst.stamp classes
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+noinst_DATA = $(jarfile)
+
+TESTS = runExample.sh
+
+CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class runExample.sh
+
+clean:
+ rm -rf $(JAVAROOT)
+ rm -f $(jarfile)
+ rm -f classnoinst.stamp
diff --git a/HDF5Examples/JAVA/H5J/runExample.sh.in b/HDF5Examples/JAVA/H5J/runExample.sh.in
new file mode 100644
index 0000000..3a42b2b
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/runExample.sh.in
@@ -0,0 +1,299 @@
+#! /bin/sh
+#
+# Copyright by The HDF Group.
+# Copyright by the Board of Trustees of the University of Illinois.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the files COPYING and Copyright.html. COPYING can be found at the root
+# of the source code distribution tree; Copyright.html can be found at the
+# root level of an installed copy of the electronic HDF5 document set and
+# is linked from the top-level documents page. It can also be found at
+# http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have
+# access to either file, you may request a copy from help@hdfgroup.org.
+#
+
+top_builddir=@top_builddir@
+top_srcdir=@top_srcdir@
+srcdir=@srcdir@
+
+TESTNAME=EX_Intro
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+
+# Set up default variable values if not supplied by the user.
+RM='rm -rf'
+CMP='cmp'
+DIFF='diff -c'
+CP='cp'
+DIRNAME='dirname'
+LS='ls'
+AWK='awk'
+
+nerrors=0
+
+# where the libs exist
+BLDLIBDIR="./lib"
+BLDDIR="."
+HDFTEST_HOME="$top_srcdir/JAVA/intro"
+JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar
+TESTJARFILE=jar@PACKAGE_TARNAME@intro.jar
+test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR
+
+######################################################################
+# library files
+# --------------------------------------------------------------------
+# All the library files copy from source directory to test directory
+# NOTE: Keep this framework to add/remove test files.
+# This list are also used for checking exist.
+# Comment '#' without space can be used.
+# --------------------------------------------------------------------
+LIST_LIBRARY_FILES="
+$top_builddir/lib/libhdf5.*
+$top_builddir/lib/libhdf5_java.*
+$top_builddir/JAVA/intro/$JARFILE
+"
+LIST_DATA_FILES="
+$HDFTEST_HOME/tfiles/114/HDF5AttributeCreate.txt
+$HDFTEST_HOME/tfiles/114/HDF5DatasetCreate.txt
+$HDFTEST_HOME/tfiles/114/HDF5DatasetRead.txt
+$HDFTEST_HOME/tfiles/114/HDF5FileCreate.txt
+$HDFTEST_HOME/tfiles/114/HDF5FileStructure.txt
+$HDFTEST_HOME/tfiles/114/HDF5GroupCreate.txt
+$HDFTEST_HOME/tfiles/114/HDF5SubsetSelect.txt
+"
+
+#
+# copy files from source dirs to test dir
+#
+COPY_LIBFILES="$LIST_LIBRARY_FILES"
+
+COPY_LIBFILES_TO_BLDLIBDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_LIBFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -fR $tstfile $BLDLIBDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ BNAME=`$BASENAME $tstfile`
+ if [ "$BNAME" = "libhdf5_java.dylib" ]; then
+ COPIED_LIBHDF5_JAVA=1
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_LIBFILES_AND_BLDLIBDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM $BLDLIBDIR
+ fi
+}
+
+COPY_DATAFILES="$LIST_DATA_FILES"
+
+COPY_DATAFILES_TO_BLDDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_DATAFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_DATAFILES_AND_BLDDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $TESTDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM $BLDDIR/*.txt
+ $RM $BLDDIR/*.out
+ fi
+}
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+#
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
+}
+
+# where Java is installed (requires jdk1.7.x)
+JAVAEXE=@JAVA@
+export JAVAEXE
+
+###############################################################################
+# DO NOT MODIFY BELOW THIS LINE
+###############################################################################
+
+# prepare for test
+COPY_LIBFILES_TO_BLDLIBDIR
+COPY_DATAFILES_TO_BLDDIR
+
+CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$TESTJARFILE""
+
+TEST=/usr/bin/test
+if [ ! -x /usr/bin/test ]
+then
+TEST=`which test`
+fi
+
+if $TEST -z "$CLASSPATH"; then
+ CLASSPATH=""
+fi
+CLASSPATH=$CPATH":"$CLASSPATH
+export CLASSPATH
+
+if $TEST -n "$JAVAPATH" ; then
+ PATH=$JAVAPATH":"$PATH
+ export PATH
+fi
+
+if $TEST -e /bin/uname; then
+ os_name=`/bin/uname -s`
+elif $TEST -e /usr/bin/uname; then
+ os_name=`/usr/bin/uname -s`
+else
+ os_name=unknown
+fi
+
+if $TEST -z "$LD_LIBRARY_PATH" ; then
+ LD_LIBRARY_PATH=""
+fi
+
+case $os_name in
+ Darwin)
+ DYLD_LIBRARY_PATH=$BLDLIBDIR:$DYLD_LIBRARY_PATH
+ export DYLD_LIBRARY_PATH
+ LD_LIBRARY_PATH=$DYLD_LIBRARY_PATH
+ ;;
+ *)
+ LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH
+ ;;
+esac
+
+export LD_LIBRARY_PATH
+
+echo "$JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5AttributeCreate"
+($JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5AttributeCreate > HDF5AttributeCreate.out)
+if diff HDF5AttributeCreate.out HDF5AttributeCreate.txt > /dev/null; then
+ echo " PASSED HDF5AttributeCreate"
+else
+ echo "**FAILED** HDF5AttributeCreate"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5DatasetCreate"
+($JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5DatasetCreate > HDF5DatasetCreate.out)
+if diff HDF5DatasetCreate.out HDF5DatasetCreate.txt > /dev/null; then
+ echo " PASSED HDF5DatasetCreate"
+else
+ echo "**FAILED** HDF5DatasetCreate"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5DatasetRead"
+($JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5DatasetRead > HDF5DatasetRead.out)
+if diff HDF5DatasetRead.out HDF5DatasetRead.txt > /dev/null; then
+ echo " PASSED HDF5DatasetRead"
+else
+ echo "**FAILED** HDF5DatasetRead"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5FileCreate"
+($JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5FileCreate > HDF5FileCreate.out)
+if diff HDF5FileCreate.out HDF5FileCreate.txt > /dev/null; then
+ echo " PASSED HDF5FileCreate"
+else
+ echo "**FAILED** HDF5FileCreate"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5FileStructure"
+($JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5FileStructure > HDF5FileStructure.out)
+if diff HDF5FileStructure.out HDF5FileStructure.txt > /dev/null; then
+ echo " PASSED HDF5FileStructure"
+else
+ echo "**FAILED** HDF5FileStructure"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5GroupCreate"
+($JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5GroupCreate > HDF5GroupCreate.out)
+if diff HDF5GroupCreate.out HDF5GroupCreate.txt > /dev/null; then
+ echo " PASSED HDF5GroupCreate"
+else
+ echo "**FAILED** HDF5GroupCreate"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5SubsetSelect"
+($JAVAEXE -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH HDF5SubsetSelect > HDF5SubsetSelect.out)
+if diff HDF5SubsetSelect.out HDF5SubsetSelect.txt > /dev/null; then
+ echo " PASSED HDF5SubsetSelect"
+else
+ echo "**FAILED** HDF5SubsetSelect"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+# Clean up temporary files/directories
+CLEAN_LIBFILES_AND_BLDLIBDIR
+CLEAN_DATAFILES_AND_BLDDIR
+
+# Report test results and exit
+if test $nerrors -eq 0 ; then
+ echo "All $TESTNAME tests passed."
+ exit $EXIT_SUCCESS
+else
+ echo "$TESTNAME tests failed with $nerrors errors."
+ exit $EXIT_FAILURE
+fi
diff --git a/HDF5Examples/JAVA/H5J/tfiles/110/HDF5AttributeCreate.txt b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5AttributeCreate.txt
new file mode 100644
index 0000000..e45aa6b
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5AttributeCreate.txt
@@ -0,0 +1,2 @@
+data range
+0 10000
diff --git a/HDF5Examples/JAVA/H5J/tfiles/110/HDF5DatasetCreate.txt b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5DatasetCreate.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5DatasetCreate.txt
diff --git a/HDF5Examples/JAVA/H5J/tfiles/110/HDF5DatasetRead.txt b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5DatasetRead.txt
new file mode 100644
index 0000000..078410f
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5DatasetRead.txt
@@ -0,0 +1,47 @@
+
+
+Original Data Values
+
+0, 1, 2, 3, 4, 5, 6, 7, 8, 9
+100, 101, 102, 103, 104, 105, 106, 107, 108, 109
+200, 201, 202, 203, 204, 205, 206, 207, 208, 209
+300, 301, 302, 303, 304, 305, 306, 307, 308, 309
+400, 401, 402, 403, 404, 405, 406, 407, 408, 409
+500, 501, 502, 503, 504, 505, 506, 507, 508, 509
+600, 601, 602, 603, 604, 605, 606, 607, 608, 609
+700, 701, 702, 703, 704, 705, 706, 707, 708, 709
+800, 801, 802, 803, 804, 805, 806, 807, 808, 809
+900, 901, 902, 903, 904, 905, 906, 907, 908, 909
+1000, 1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1009
+1100, 1101, 1102, 1103, 1104, 1105, 1106, 1107, 1108, 1109
+1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209
+1300, 1301, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309
+1400, 1401, 1402, 1403, 1404, 1405, 1406, 1407, 1408, 1409
+1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509
+1600, 1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609
+1700, 1701, 1702, 1703, 1704, 1705, 1706, 1707, 1708, 1709
+1800, 1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809
+1900, 1901, 1902, 1903, 1904, 1905, 1906, 1907, 1908, 1909
+
+Modified Data Values
+
+1, 2, 3, 4, 5, 6, 7, 8, 9, 10
+101, 102, 103, 104, 105, 106, 107, 108, 109, 110
+201, 202, 203, 204, 205, 206, 207, 208, 209, 210
+301, 302, 303, 304, 305, 306, 307, 308, 309, 310
+401, 402, 403, 404, 405, 406, 407, 408, 409, 410
+501, 502, 503, 504, 505, 506, 507, 508, 509, 510
+601, 602, 603, 604, 605, 606, 607, 608, 609, 610
+701, 702, 703, 704, 705, 706, 707, 708, 709, 710
+801, 802, 803, 804, 805, 806, 807, 808, 809, 810
+901, 902, 903, 904, 905, 906, 907, 908, 909, 910
+1001, 1002, 1003, 1004, 1005, 1006, 1007, 1008, 1009, 1010
+1101, 1102, 1103, 1104, 1105, 1106, 1107, 1108, 1109, 1110
+1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210
+1301, 1302, 1303, 1304, 1305, 1306, 1307, 1308, 1309, 1310
+1401, 1402, 1403, 1404, 1405, 1406, 1407, 1408, 1409, 1410
+1501, 1502, 1503, 1504, 1505, 1506, 1507, 1508, 1509, 1510
+1601, 1602, 1603, 1604, 1605, 1606, 1607, 1608, 1609, 1610
+1701, 1702, 1703, 1704, 1705, 1706, 1707, 1708, 1709, 1710
+1801, 1802, 1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810
+1901, 1902, 1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910 \ No newline at end of file
diff --git a/HDF5Examples/JAVA/H5J/tfiles/110/HDF5FileCreate.txt b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5FileCreate.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5FileCreate.txt
diff --git a/HDF5Examples/JAVA/H5J/tfiles/110/HDF5FileStructure.txt b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5FileStructure.txt
new file mode 100644
index 0000000..820bd9f
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5FileStructure.txt
@@ -0,0 +1,6 @@
+ float arrays
+ 2D 64-bit double 20x10
+ 3D 32-bit float 20x10x5
+ integer arrays
+ 2D 32-bit integer 20x10
+ 3D 8-bit unsigned integer 20x10x5
diff --git a/HDF5Examples/JAVA/H5J/tfiles/110/HDF5GroupCreate.txt b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5GroupCreate.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5GroupCreate.txt
diff --git a/HDF5Examples/JAVA/H5J/tfiles/110/HDF5GroupDatasetCreate.txt b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5GroupDatasetCreate.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5GroupDatasetCreate.txt
diff --git a/HDF5Examples/JAVA/H5J/tfiles/110/HDF5SubsetSelect.txt b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5SubsetSelect.txt
new file mode 100644
index 0000000..93bec79
--- /dev/null
+++ b/HDF5Examples/JAVA/H5J/tfiles/110/HDF5SubsetSelect.txt
@@ -0,0 +1,9 @@
+
+
+Subset Data Values
+
+1402,1404,1406
+1702,1704,1706
+2002,2004,2006
+2302,2304,2306
+2602,2604,2606 \ No newline at end of file
diff --git a/HDF5Examples/JAVA/H5T/110/H5Ex_T_ObjectReference.java b/HDF5Examples/JAVA/H5T/110/H5Ex_T_ObjectReference.java
new file mode 100644
index 0000000..0e147d8
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/110/H5Ex_T_ObjectReference.java
@@ -0,0 +1,341 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write object references
+ to a dataset. The program first creates objects in the
+ file and writes references to those objects to a dataset
+ with a dataspace of DIM0, then closes the file. Next, it
+ reopens the file, dereferences the references, and outputs
+ the names of their targets to the screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_ObjectReference {
+ private static String FILENAME = "H5Ex_T_ObjectReference.h5";
+ private static String DATASETNAME = "DS1";
+ private static String DATASETNAME2 = "DS2";
+ private static String GROUPNAME = "G1";
+ private static final int DIM0 = 2;
+ private static final int RANK = 1;
+
+ // Values for the status of space allocation
+ enum H5G_obj {
+ H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN), /* Unknown object type */
+ H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP), /* Object is a group */
+ H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET), /* Object is a dataset */
+ H5G_TYPE(HDF5Constants.H5O_TYPE_NAMED_DATATYPE); /* Object is a named data type */
+ private static final Map<Integer, H5G_obj> lookup = new HashMap<Integer, H5G_obj>();
+
+ static
+ {
+ for (H5G_obj s : EnumSet.allOf(H5G_obj.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5G_obj(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5G_obj get(int code) { return lookup.get(code); }
+ }
+
+ private static void writeObjRef()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[][] dset_data = new byte[DIM0][8];
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if ((file_id >= 0) && (dataspace_id >= 0)) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a group in the file.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ group_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create references to the previously created objects. Passing -1
+ // as space_id causes this parameter to be ignored. Other values
+ // besides valid dataspaces result in an error.
+ try {
+ if (file_id >= 0) {
+ byte rbuf0[] = H5.H5Rcreate(file_id, GROUPNAME, HDF5Constants.H5R_OBJECT, -1);
+ byte rbuf1[] = H5.H5Rcreate(file_id, DATASETNAME2, HDF5Constants.H5R_OBJECT, -1);
+ for (int indx = 0; indx < 8; indx++) {
+ dset_data[0][indx] = rbuf0[indx];
+ dset_data[1][indx] = rbuf1[indx];
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_REF_OBJ, filespace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the object references to it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_STD_REF_OBJ, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readObjRef()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ int object_type = -1;
+ long object_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new byte[(int)dims[0]][8];
+
+ // Read the data using the default properties.
+ try {
+ if (dataset_id >= 0) {
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_STD_REF_OBJ, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(DATASETNAME + "[" + indx + "]:");
+ System.out.print(" ->");
+ // Open the referenced object, get its name and type.
+ try {
+ if (dataset_id >= 0) {
+ object_id = H5.H5Rdereference(dataset_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5R_OBJECT, dset_data[indx]);
+ object_type = H5.H5Rget_obj_type(dataset_id, HDF5Constants.H5R_OBJECT, dset_data[indx]);
+ }
+ String obj_name = null;
+ if (object_type >= 0) {
+ // Get the length of the name and retrieve the name.
+ obj_name = H5.H5Iget_name(object_id);
+ }
+ if ((object_id >= 0) && (object_type >= -1)) {
+ switch (H5G_obj.get(object_type)) {
+ case H5G_GROUP:
+ System.out.print("H5G_GROUP");
+ try {
+ if (object_id >= 0)
+ H5.H5Gclose(object_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ break;
+ case H5G_DATASET:
+ System.out.print("H5G_DATASET");
+ try {
+ if (object_id >= 0)
+ H5.H5Dclose(object_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ break;
+ case H5G_TYPE:
+ System.out.print("H5G_TYPE");
+ try {
+ if (object_id >= 0)
+ H5.H5Tclose(object_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ break;
+ default:
+ System.out.print("UNHANDLED");
+ }
+ }
+ // Print the name.
+ System.out.println(": " + obj_name);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ // Check if gzip compression is available and can be used for both
+ // compression and decompression. Normally we do not perform error
+ // checking in these examples for the sake of clarity, but in this
+ // case we will make an exception because this filter is an
+ // optional part of the hdf5 library.
+ H5Ex_T_ObjectReference.writeObjRef();
+ H5Ex_T_ObjectReference.readObjRef();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/110/H5Ex_T_ObjectReferenceAttribute.java b/HDF5Examples/JAVA/H5T/110/H5Ex_T_ObjectReferenceAttribute.java
new file mode 100644
index 0000000..9818dba
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/110/H5Ex_T_ObjectReferenceAttribute.java
@@ -0,0 +1,381 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write object references
+ to an attribute. The program first creates objects in the
+ file and writes references to those objects to an
+ attribute with a dataspace of DIM0, then closes the file.
+ Next, it reopens the file, dereferences the references,
+ and outputs the names of their targets to the screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_ObjectReferenceAttribute {
+ private static String FILENAME = "H5Ex_T_ObjectReferenceAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static String DATASETNAME2 = "DS2";
+ private static String GROUPNAME = "G1";
+ private static final int DIM0 = 2;
+ private static final int RANK = 1;
+
+ // Values for the status of space allocation
+ enum H5G_obj {
+ H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN), /* Unknown object type */
+ H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP), /* Object is a group */
+ H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET), /* Object is a dataset */
+ H5G_TYPE(HDF5Constants.H5O_TYPE_NAMED_DATATYPE); /* Object is a named data type */
+ private static final Map<Integer, H5G_obj> lookup = new HashMap<Integer, H5G_obj>();
+
+ static
+ {
+ for (H5G_obj s : EnumSet.allOf(H5G_obj.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5G_obj(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5G_obj get(int code) { return lookup.get(code); }
+ }
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[][] dset_data = new byte[DIM0][8];
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a group in the file.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ group_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create references to the previously created objects. Passing -1
+ // as space_id causes this parameter to be ignored. Other values
+ // besides valid dataspaces result in an error.
+ try {
+ if (file_id >= 0) {
+ byte rbuf0[] = H5.H5Rcreate(file_id, GROUPNAME, HDF5Constants.H5R_OBJECT, -1);
+ byte rbuf1[] = H5.H5Rcreate(file_id, DATASETNAME2, HDF5Constants.H5R_OBJECT, -1);
+ for (int indx = 0; indx < 8; indx++) {
+ dset_data[0][indx] = rbuf0[indx];
+ dset_data[1][indx] = rbuf1[indx];
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace to serve as the parent
+ // for the attribute.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute and write the array data to it.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0))
+ attribute_id =
+ H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_REF_OBJ, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Awrite(attribute_id, HDF5Constants.H5T_STD_REF_OBJ, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ int object_type = -1;
+ long object_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (attribute_id >= 0)
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new byte[(int)dims[0]][8];
+
+ // Read data.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aread(attribute_id, HDF5Constants.H5T_STD_REF_OBJ, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(ATTRIBUTENAME + "[" + indx + "]:");
+ System.out.print(" ->");
+ // Open the referenced object, get its name and type.
+ try {
+ if (dataset_id >= 0) {
+ object_id = H5.H5Rdereference(dataset_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5R_OBJECT, dset_data[indx]);
+ object_type = H5.H5Rget_obj_type(dataset_id, HDF5Constants.H5R_OBJECT, dset_data[indx]);
+ }
+ String obj_name = null;
+ if (object_type >= 0) {
+ // Get the length of the name and retrieve the name.
+ obj_name = H5.H5Iget_name(object_id);
+ }
+ if ((object_id >= 0) && (object_type >= -1)) {
+ switch (H5G_obj.get(object_type)) {
+ case H5G_GROUP:
+ System.out.print("H5G_GROUP");
+ try {
+ if (object_id >= 0)
+ H5.H5Gclose(object_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ break;
+ case H5G_DATASET:
+ System.out.print("H5G_DATASET");
+ try {
+ if (object_id >= 0)
+ H5.H5Dclose(object_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ break;
+ case H5G_TYPE:
+ System.out.print("H5G_TYPE");
+ try {
+ if (object_id >= 0)
+ H5.H5Tclose(object_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ break;
+ default:
+ System.out.print("UNHANDLED");
+ }
+ }
+ // Print the name.
+ System.out.println(": " + obj_name);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_ObjectReferenceAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_ObjectReferenceAttribute.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/CMakeLists.txt b/HDF5Examples/JAVA/H5T/CMakeLists.txt
new file mode 100644
index 0000000..a779a53
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/CMakeLists.txt
@@ -0,0 +1,92 @@
+cmake_minimum_required (VERSION 3.18)
+project (HDF5Examples_JAVA_H5T Java)
+
+set (CMAKE_VERBOSE_MAKEFILE 1)
+
+INCLUDE_DIRECTORIES (
+ ${HDFJAVA_LIB_DIR}
+ ${JAVA_INCLUDE_PATH}
+ ${JAVA_INCLUDE_PATH2}
+)
+
+#-----------------------------------------------------------------------------
+# Define Sources
+#-----------------------------------------------------------------------------
+include (Java_sourcefiles.cmake)
+
+if (WIN32)
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ";")
+else ()
+ set (CMAKE_JAVA_INCLUDE_FLAG_SEP ":")
+endif ()
+
+set (CMAKE_JAVA_CLASSPATH ".")
+foreach (CMAKE_INCLUDE_PATH ${CMAKE_JAVA_INCLUDE_PATH})
+ set (CMAKE_JAVA_CLASSPATH "${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${CMAKE_INCLUDE_PATH}")
+endforeach ()
+
+foreach (HCP_JAR ${CMAKE_JAVA_INCLUDE_PATH})
+ get_filename_component (_HCP_FILE ${HCP_JAR} NAME)
+ set (HDFJAVA_CLASSJARS "${_HCP_FILE} ${HDFJAVA_CLASSJARS}")
+endforeach ()
+
+foreach (example ${HDF_JAVA_EXAMPLES})
+ get_filename_component (example_name ${example} NAME_WE)
+ file (WRITE ${PROJECT_BINARY_DIR}/Manifest.txt
+ "Main-Class: ${example_name}
+Class-Path: ${HDFJAVA_CLASSJARS}
+"
+ )
+ add_jar (${EXAMPLE_VARNAME}_${example_name} SOURCES ${example} MANIFEST ${PROJECT_BINARY_DIR}/Manifest.txt)
+ get_target_property (${EXAMPLE_VARNAME}_${example_name}_JAR_FILE ${EXAMPLE_VARNAME}_${example_name} JAR_FILE)
+endforeach ()
+
+if (H5EX_BUILD_TESTING)
+ macro (ADD_H5_TEST resultfile resultcode)
+ add_test (
+ NAME ${EXAMPLE_VARNAME}_jnative-h5-${resultfile}
+ COMMAND "${CMAKE_COMMAND}"
+ -D "TEST_TESTER=${CMAKE_Java_RUNTIME}"
+ -D "TEST_PROGRAM=${resultfile}"
+ -D "TEST_ARGS:STRING=${ARGN}"
+ -D "TEST_CLASSPATH:STRING=${CMAKE_JAVA_CLASSPATH}${CMAKE_JAVA_INCLUDE_FLAG_SEP}${${EXAMPLE_VARNAME}_${resultfile}_JAR_FILE}"
+ -D "TEST_LIBRARY_DIRECTORY=${CMAKE_TEST_LIB_DIRECTORY}"
+ -D "TEST_FOLDER=${PROJECT_BINARY_DIR}"
+ -D "TEST_OUTPUT=${PROJECT_BINARY_DIR}/${resultfile}.out"
+ -D "TEST_REFERENCE=${resultfile}.txt"
+ -D "TEST_EXPECT=${resultcode}"
+ -D "TEST_SKIP_COMPARE=TRUE"
+ -P "${${EXAMPLE_PACKAGE_NAME}_RESOURCES_DIR}/jrunTest.cmake"
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (${EXAMPLE_VARNAME}_jnative-h5-${resultfile} PROPERTIES DEPENDS ${last_test})
+ endif ()
+ set (last_test "${EXAMPLE_VARNAME}_jnative-h5-${resultfile}")
+ endmacro ()
+
+ foreach (example ${HDF_JAVA_EXAMPLES})
+ get_filename_component (example_name ${example} NAME_WE)
+ add_test (
+ NAME ${EXAMPLE_VARNAME}_jnative-h5-${example_name}-clearall-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E remove
+ ${PROJECT_BINARY_DIR}/${example_name}.h5
+ ${example_name}.out
+ ${example_name}.out.err
+ )
+ if (NOT "${last_test}" STREQUAL "")
+ set_tests_properties (${EXAMPLE_VARNAME}_jnative-h5-${example_name}-clearall-objects PROPERTIES DEPENDS ${last_test})
+ endif ()
+ add_test (
+ NAME ${EXAMPLE_VARNAME}_jnative-h5-${example_name}-copy-objects
+ COMMAND ${CMAKE_COMMAND}
+ -E copy_if_different
+ ${PROJECT_SOURCE_DIR}/tfiles/110/${example_name}.txt
+ ${PROJECT_BINARY_DIR}/${example_name}.txt
+ )
+ set_tests_properties (${EXAMPLE_VARNAME}_jnative-h5-${example_name}-copy-objects PROPERTIES DEPENDS ${EXAMPLE_VARNAME}_jnative-h5-${example_name}-clearall-objects)
+ set (last_test "${EXAMPLE_VARNAME}_jnative-h5-${example_name}-copy-objects")
+ ADD_H5_TEST (${example_name} 0)
+ endforeach ()
+
+endif ()
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_Array.java b/HDF5Examples/JAVA/H5T/H5Ex_T_Array.java
new file mode 100644
index 0000000..489367b
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_Array.java
@@ -0,0 +1,278 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write array datatypes
+ to a dataset. The program first writes integers arrays of
+ dimension ADIM0xADIM1 to a dataset with a dataspace of
+ DIM0, then closes the file. Next, it reopens the file,
+ reads back the data, and outputs it to the screen.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_Array {
+ private static String FILENAME = "H5Ex_T_Array.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM0 = 4;
+ private static final int ADIM0 = 3;
+ private static final int ADIM1 = 5;
+ private static final int RANK = 1;
+ private static final int NDIMS = 2;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ long[] adims = {ADIM0, ADIM1};
+ int[][][] dset_data = new int[DIM0][ADIM0][ADIM1];
+
+ // Initialize data. indx is the element in the dataspace, jndx and kndx the
+ // elements within the array datatype.
+ for (int indx = 0; indx < DIM0; indx++)
+ for (int jndx = 0; jndx < ADIM0; jndx++)
+ for (int kndx = 0; kndx < ADIM1; kndx++)
+ dset_data[indx][jndx][kndx] = indx * jndx - jndx * kndx + indx * kndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create array datatypes for file.
+ try {
+ filetype_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_I64LE, NDIMS, adims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create array datatypes for memory.
+ try {
+ memtype_id = H5.H5Tarray_create(HDF5Constants.H5T_NATIVE_INT, NDIMS, adims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ if ((dataset_id >= 0) && (memtype_id >= 0))
+ H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ long[] adims = {ADIM0, ADIM1};
+ int[][][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get the datatype.
+ try {
+ if (dataset_id >= 0)
+ filetype_id = H5.H5Dget_type(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get the datatype's dimensions.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tget_array_dims(filetype_id, adims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new int[(int)dims[0]][(int)(adims[0])][(int)(adims[1])];
+
+ // Create array datatypes for memory.
+ try {
+ memtype_id = H5.H5Tarray_create(HDF5Constants.H5T_NATIVE_INT, 2, adims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read data.
+ try {
+ if ((dataset_id >= 0) && (memtype_id >= 0))
+ H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(DATASETNAME + " [" + indx + "]:");
+ for (int jndx = 0; jndx < adims[0]; jndx++) {
+ System.out.print(" [");
+ for (int kndx = 0; kndx < adims[1]; kndx++)
+ System.out.print(dset_data[indx][jndx][kndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_Array.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_Array.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_ArrayAttribute.java b/HDF5Examples/JAVA/H5T/H5Ex_T_ArrayAttribute.java
new file mode 100644
index 0000000..9a2aca5
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_ArrayAttribute.java
@@ -0,0 +1,318 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write array datatypes
+ to an attribute. The program first writes integers arrays
+ of dimension ADIM0xADIM1 to an attribute with a dataspace
+ of DIM0, then closes the file. Next, it reopens the
+ file, reads back the data, and outputs it to the screen.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_ArrayAttribute {
+ private static String FILENAME = "H5Ex_T_ArrayAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static final int DIM0 = 4;
+ private static final int ADIM0 = 3;
+ private static final int ADIM1 = 5;
+ private static final int RANK = 1;
+ private static final int NDIMS = 2;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ long[] adims = {ADIM0, ADIM1};
+ int[][][] dset_data = new int[DIM0][ADIM0][ADIM1];
+
+ // Initialize data. indx is the element in the dataspace, jndx and kndx the
+ // elements within the array datatype.
+ for (int indx = 0; indx < DIM0; indx++)
+ for (int jndx = 0; jndx < ADIM0; jndx++)
+ for (int kndx = 0; kndx < ADIM1; kndx++)
+ dset_data[indx][jndx][kndx] = indx * jndx - jndx * kndx + indx * kndx;
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create array datatypes for file.
+ try {
+ filetype_id = H5.H5Tarray_create(HDF5Constants.H5T_STD_I64LE, NDIMS, adims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create array datatypes for memory.
+ try {
+ memtype_id = H5.H5Tarray_create(HDF5Constants.H5T_NATIVE_INT, NDIMS, adims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute and write the array data to it.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
+ attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ if ((attribute_id >= 0) && (memtype_id >= 0))
+ H5.H5Awrite(attribute_id, memtype_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ long[] adims = {ADIM0, ADIM1};
+ int[][][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get the datatype.
+ try {
+ if (attribute_id >= 0)
+ filetype_id = H5.H5Aget_type(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get the datatype's dimensions.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tget_array_dims(filetype_id, adims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new int[(int)dims[0]][(int)(adims[0])][(int)(adims[1])];
+
+ // Create array datatypes for memory.
+ try {
+ memtype_id = H5.H5Tarray_create(HDF5Constants.H5T_NATIVE_INT, 2, adims);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read data.
+ try {
+ if ((attribute_id >= 0) && (memtype_id >= 0))
+ H5.H5Aread(attribute_id, memtype_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(ATTRIBUTENAME + " [" + indx + "]:");
+ for (int jndx = 0; jndx < adims[0]; jndx++) {
+ System.out.print(" [");
+ for (int kndx = 0; kndx < adims[1]; kndx++)
+ System.out.print(dset_data[indx][jndx][kndx] + " ");
+ System.out.println("]");
+ }
+ System.out.println();
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_ArrayAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_ArrayAttribute.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_Bit.java b/HDF5Examples/JAVA/H5T/H5Ex_T_Bit.java
new file mode 100644
index 0000000..54a467e
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_Bit.java
@@ -0,0 +1,223 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write bitfield
+ datatypes to a dataset. The program first writes bit
+ fields to a dataset with a dataspace of DIM0xDIM1, then
+ closes the file. Next, it reopens the file, reads back
+ the data, and outputs it to the screen.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_Bit {
+ private static String FILENAME = "H5Ex_T_Bit.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
+ int[][] dset_data = new int[DIM0][DIM1];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM0; indx++)
+ for (int jndx = 0; jndx < DIM1; jndx++) {
+ dset_data[indx][jndx] = 0;
+ dset_data[indx][jndx] |= (indx * jndx - jndx) & 0x03; /* Field "A" */
+ dset_data[indx][jndx] |= (indx & 0x03) << 2; /* Field "B" */
+ dset_data[indx][jndx] |= (jndx & 0x03) << 4; /* Field "C" */
+ dset_data[indx][jndx] |= ((indx + jndx) & 0x03) << 6; /* Field "D" */
+ }
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_B8BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the bitfield data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new int[(int)dims[0]][(int)(dims[1])];
+
+ // Read data.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_B8, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println(DATASETNAME + ":");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [");
+ for (int jndx = 0; jndx < dims[1]; jndx++) {
+ System.out.print("{" + (dset_data[indx][jndx] & 0x03) + ", ");
+ System.out.print(((dset_data[indx][jndx] >> 2) & 0x03) + ", ");
+ System.out.print(((dset_data[indx][jndx] >> 4) & 0x03) + ", ");
+ System.out.print(((dset_data[indx][jndx] >> 6) & 0x03) + "}");
+ }
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_Bit.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_Bit.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_BitAttribute.java b/HDF5Examples/JAVA/H5T/H5Ex_T_BitAttribute.java
new file mode 100644
index 0000000..5be1b91
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_BitAttribute.java
@@ -0,0 +1,264 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write bitfield
+ datatypes to an attribute. The program first writes bit
+ fields to an attribute with a dataspace of DIM0xDIM1, then
+ closes the file. Next, it reopens the file, reads back
+ the data, and outputs it to the screen.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_BitAttribute {
+ private static String FILENAME = "H5Ex_T_BitAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
+ int[][] dset_data = new int[DIM0][DIM1];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM0; indx++)
+ for (int jndx = 0; jndx < DIM1; jndx++) {
+ dset_data[indx][jndx] = 0;
+ dset_data[indx][jndx] |= (indx * jndx - jndx) & 0x03; /* Field "A" */
+ dset_data[indx][jndx] |= (indx & 0x03) << 2; /* Field "B" */
+ dset_data[indx][jndx] |= (jndx & 0x03) << 4; /* Field "C" */
+ dset_data[indx][jndx] |= ((indx + jndx) & 0x03) << 6; /* Field "D" */
+ }
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute and write the array data to it.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0))
+ attribute_id =
+ H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_B8BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_B8, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (attribute_id >= 0)
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new int[(int)dims[0]][(int)(dims[1])];
+
+ // Read data.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aread(attribute_id, HDF5Constants.H5T_NATIVE_B8, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println(ATTRIBUTENAME + ":");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [");
+ for (int jndx = 0; jndx < dims[1]; jndx++) {
+ System.out.print("{" + (dset_data[indx][jndx] & 0x03) + ", ");
+ System.out.print(((dset_data[indx][jndx] >> 2) & 0x03) + ", ");
+ System.out.print(((dset_data[indx][jndx] >> 4) & 0x03) + ", ");
+ System.out.print(((dset_data[indx][jndx] >> 6) & 0x03) + "}");
+ }
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_BitAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_BitAttribute.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_Commit.java b/HDF5Examples/JAVA/H5T/H5Ex_T_Commit.java
new file mode 100644
index 0000000..cd26a96
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_Commit.java
@@ -0,0 +1,258 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to commit a named datatype to a
+ file, and read back that datatype. The program first
+ defines a compound datatype, commits it to a file, then
+ closes the file. Next, it reopens the file, opens the
+ datatype, and outputs the names of its fields to the
+ screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_Commit {
+ private static String FILENAME = "H5Ex_T_Commit.h5";
+ private static String DATATYPENAME = "Sensor_Type";
+ protected static final int INTEGERSIZE = 4;
+ protected static final int DOUBLESIZE = 8;
+ protected final static int MAXSTRINGSIZE = 80;
+
+ // Values for the various classes of datatypes
+ enum H5T_class {
+ H5T_NO_CLASS(HDF5Constants.H5T_NO_CLASS), // error
+ H5T_INTEGER(HDF5Constants.H5T_INTEGER), // integer types
+ H5T_FLOAT(HDF5Constants.H5T_FLOAT), // floating-point types
+ H5T_TIME(HDF5Constants.H5T_TIME), // date and time types
+ H5T_STRING(HDF5Constants.H5T_STRING), // character string types
+ H5T_BITFIELD(HDF5Constants.H5T_BITFIELD), // bit field types
+ H5T_OPAQUE(HDF5Constants.H5T_OPAQUE), // opaque types
+ H5T_COMPOUND(HDF5Constants.H5T_COMPOUND), // compound types
+ H5T_REFERENCE(HDF5Constants.H5T_REFERENCE), // reference types
+ H5T_ENUM(HDF5Constants.H5T_ENUM), // enumeration types
+ H5T_VLEN(HDF5Constants.H5T_VLEN), // Variable-Length types
+ H5T_ARRAY(HDF5Constants.H5T_ARRAY), // Array types
+ H5T_NCLASSES(11); // this must be last
+
+ private static final Map<Long, H5T_class> lookup = new HashMap<Long, H5T_class>();
+
+ static
+ {
+ for (H5T_class s : EnumSet.allOf(H5T_class.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private long code;
+
+ H5T_class(long layout_type) { this.code = layout_type; }
+
+ public long getCode() { return this.code; }
+
+ public static H5T_class get(long typeclass_id) { return lookup.get(typeclass_id); }
+ }
+
+ // The supporting Sensor_Datatype class.
+ private static class Sensor_Datatype {
+ static int numberMembers = 4;
+ static int[] memberDims = {1, 1, 1, 1};
+
+ String[] memberNames = {"Serial number", "Location", "Temperature (F)", "Pressure (inHg)"};
+ long[] memberFileTypes = {HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE};
+ static int[] memberStorage = {INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE};
+
+ // Data size is the storage size for the members not the object.
+ static long getDataSize()
+ {
+ long data_size = 0;
+ for (int indx = 0; indx < numberMembers; indx++)
+ data_size += memberStorage[indx] * memberDims[indx];
+ return data_size;
+ }
+
+ static int getOffset(int memberItem)
+ {
+ int data_offset = 0;
+ for (int indx = 0; indx < memberItem; indx++)
+ data_offset += memberStorage[indx];
+ return data_offset;
+ }
+ }
+
+ private static void CreateDataType()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long strtype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ Sensor_Datatype datatypes = new Sensor_Datatype();
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create string datatype.
+ try {
+ strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (strtype_id >= 0)
+ H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the compound datatype for the file. Because the standard
+ // types we are using for the file may have different sizes than
+ // the corresponding native types, we must manually calculate the
+ // offset of each member.
+ try {
+ filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
+ if (filetype_id >= 0) {
+ for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
+ long type_id = datatypes.memberFileTypes[indx];
+ if (type_id == HDF5Constants.H5T_C_S1)
+ type_id = strtype_id;
+ H5.H5Tinsert(filetype_id, datatypes.memberNames[indx], Sensor_Datatype.getOffset(indx),
+ type_id);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Commit the compound datatype to the file, creating a named datatype.
+ try {
+ if ((file_id >= 0) && (filetype_id >= 0))
+ H5.H5Tcommit(file_id, DATATYPENAME, filetype_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the str type.
+ try {
+ if (strtype_id >= 0)
+ H5.H5Tclose(strtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataType()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long typeclass_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open named datatype.
+ try {
+ if (file_id >= 0)
+ filetype_id = H5.H5Topen(file_id, DATATYPENAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Named datatype: " + DATATYPENAME + ":");
+
+ // Get datatype class. If it isn't compound, we won't print anything.
+ try {
+ if (filetype_id >= 0)
+ typeclass_id = H5.H5Tget_class(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ // Read data.
+ try {
+ if (H5T_class.get(typeclass_id) == H5T_class.H5T_COMPOUND) {
+ System.out.println(" Class: H5T_COMPOUND");
+ int nmembs = H5.H5Tget_nmembers(filetype_id);
+ // Iterate over compound datatype members.
+ for (int indx = 0; indx < nmembs; indx++) {
+ String member_name = H5.H5Tget_member_name(filetype_id, indx);
+ System.out.println(" " + member_name);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_Commit.CreateDataType();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_Commit.ReadDataType();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_Compound.java b/HDF5Examples/JAVA/H5T/H5Ex_T_Compound.java
new file mode 100644
index 0000000..21aeabc
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_Compound.java
@@ -0,0 +1,460 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write compound
+ datatypes to a dataset. The program first writes
+ compound structures to a dataset with a dataspace of DIM0,
+ then closes the file. Next, it reopens the file, reads
+ back the data, and outputs it to the screen.
+ ************************************************************/
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_Compound {
+ private static String FILENAME = "H5Ex_T_Compound.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM0 = 4;
+ private static final int RANK = 1;
+ protected static final int INTEGERSIZE = 4;
+ protected static final int DOUBLESIZE = 8;
+ protected final static int MAXSTRINGSIZE = 80;
+
+ static class Sensor_Datatype {
+ static int numberMembers = 4;
+ static int[] memberDims = {1, 1, 1, 1};
+
+ static String[] memberNames = {"Serial number", "Location", "Temperature (F)", "Pressure (inHg)"};
+ static long[] memberMemTypes = {HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE};
+ static long[] memberFileTypes = {HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE};
+ static int[] memberStorage = {INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE};
+
+ // Data size is the storage size for the members.
+ static long getTotalDataSize()
+ {
+ long data_size = 0;
+ for (int indx = 0; indx < numberMembers; indx++)
+ data_size += memberStorage[indx] * memberDims[indx];
+ return DIM0 * data_size;
+ }
+
+ static long getDataSize()
+ {
+ long data_size = 0;
+ for (int indx = 0; indx < numberMembers; indx++)
+ data_size += memberStorage[indx] * memberDims[indx];
+ return data_size;
+ }
+
+ static int getOffset(int memberItem)
+ {
+ int data_offset = 0;
+ for (int indx = 0; indx < memberItem; indx++)
+ data_offset += memberStorage[indx];
+ return data_offset;
+ }
+ }
+
+ static class Sensor {
+ public int serial_no;
+ public String location;
+ public double temperature;
+ public double pressure;
+
+ Sensor(int serial_no, String location, double temperature, double pressure)
+ {
+ this.serial_no = serial_no;
+ this.location = location;
+ this.temperature = temperature;
+ this.pressure = pressure;
+ }
+
+ Sensor(List data)
+ {
+ this.serial_no = (int)data.get(0);
+ this.location = (String)data.get(1);
+ this.temperature = (double)data.get(2);
+ this.pressure = (double)data.get(3);
+ }
+
+ Sensor(ByteBuffer databuf, int dbposition) { readBuffer(databuf, dbposition); }
+
+ void writeBuffer(ByteBuffer databuf, int dbposition)
+ {
+ databuf.putInt(dbposition + Sensor_Datatype.getOffset(0), serial_no);
+ byte[] temp_str = location.getBytes(Charset.forName("UTF-8"));
+ int arraylen = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length;
+ for (int ndx = 0; ndx < arraylen; ndx++)
+ databuf.put(dbposition + Sensor_Datatype.getOffset(1) + ndx, temp_str[ndx]);
+ for (int ndx = arraylen; ndx < MAXSTRINGSIZE; ndx++)
+ databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte)0);
+ databuf.putDouble(dbposition + Sensor_Datatype.getOffset(2), temperature);
+ databuf.putDouble(dbposition + Sensor_Datatype.getOffset(3), pressure);
+ }
+
+ void readBuffer(ByteBuffer databuf, int dbposition)
+ {
+ this.serial_no = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
+ ByteBuffer stringbuf = databuf.duplicate();
+ stringbuf.position(dbposition + Sensor_Datatype.getOffset(1));
+ stringbuf.limit(dbposition + Sensor_Datatype.getOffset(1) + MAXSTRINGSIZE);
+ byte[] bytearr = new byte[stringbuf.remaining()];
+ stringbuf.get(bytearr);
+ this.location = new String(bytearr, Charset.forName("UTF-8")).trim();
+ this.temperature = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(2));
+ this.pressure = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
+ }
+
+ List get()
+ {
+ List data = new ArrayList<>();
+ data.add(this.serial_no);
+ data.add(this.location);
+ data.add(this.temperature);
+ data.add(this.pressure);
+ return data;
+ }
+
+ void put(List data)
+ {
+ this.serial_no = (int)data.get(0);
+ this.location = (String)data.get(1);
+ this.temperature = (double)data.get(2);
+ this.pressure = (double)data.get(3);
+ }
+
+ @Override
+ public String toString()
+ {
+ return String.format("Serial number : " + serial_no + "%n"
+ + "Location : " + location + "%n"
+ + "Temperature (F) : " + temperature + "%n"
+ + "Pressure (inHg) : " + pressure + "%n");
+ }
+ }
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long strtype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ ArrayList[] object_data = new ArrayList[DIM0];
+ byte[] dset_data = null;
+
+ // Initialize data.
+ object_data[0] = (ArrayList) new Sensor(1153, new String("Exterior (static)"), 53.23, 24.57).get();
+ object_data[1] = (ArrayList) new Sensor(1184, new String("Intake"), 55.12, 22.95).get();
+ object_data[2] = (ArrayList) new Sensor(1027, new String("Intake manifold"), 103.55, 31.23).get();
+ object_data[3] = (ArrayList) new Sensor(1313, new String("Exhaust manifold"), 1252.89, 84.11).get();
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create string datatype.
+ try {
+ strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (strtype_id >= 0)
+ H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the compound datatype for memory.
+ try {
+ memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
+ if (memtype_id >= 0) {
+ for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
+ long type_id = Sensor_Datatype.memberMemTypes[indx];
+ if (type_id == HDF5Constants.H5T_C_S1)
+ type_id = strtype_id;
+ H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the compound datatype for the file. Because the standard
+ // types we are using for the file may have different sizes than
+ // the corresponding native types, we must manually calculate the
+ // offset of each member.
+ try {
+ filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
+ if (filetype_id >= 0) {
+ for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
+ long type_id = Sensor_Datatype.memberFileTypes[indx];
+ if (type_id == HDF5Constants.H5T_C_S1)
+ type_id = strtype_id;
+ H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the compound data to the dataset.
+ try {
+ if ((dataset_id >= 0) && (memtype_id >= 0))
+ H5.H5DwriteVL(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, (Object[])object_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (strtype_id >= 0)
+ H5.H5Tclose(strtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long strtype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ Sensor[] object_data2 = new Sensor[(int)dims[0]];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create string datatype.
+ try {
+ strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (strtype_id >= 0)
+ H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the compound datatype for memory.
+ try {
+ memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
+ if (memtype_id >= 0) {
+ for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
+ long type_id = Sensor_Datatype.memberMemTypes[indx];
+ if (type_id == HDF5Constants.H5T_C_S1)
+ type_id = strtype_id;
+ H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ ArrayList[] object_data = new ArrayList[(int)dims[0]];
+
+ // Read data.
+ try {
+ if ((dataset_id >= 0) && (memtype_id >= 0))
+ H5.H5DreadVL(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, (Object[])object_data);
+
+ for (int indx = 0; indx < (int)dims[0]; indx++) {
+ object_data2[indx] = new Sensor(object_data[indx]);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(DATASETNAME + " [" + indx + "]:");
+ System.out.println(object_data2[indx].toString());
+ }
+ System.out.println();
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (strtype_id >= 0)
+ H5.H5Tclose(strtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_Compound.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_Compound.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_CompoundAttribute.java b/HDF5Examples/JAVA/H5T/H5Ex_T_CompoundAttribute.java
new file mode 100644
index 0000000..a33faee
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_CompoundAttribute.java
@@ -0,0 +1,502 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write compound
+ datatypes to an attribute. The program first writes
+ compound structures to an attribute with a dataspace of
+ DIM0, then closes the file. Next, it reopens the file,
+ reads back the data, and outputs it to the screen.
+ ************************************************************/
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_CompoundAttribute {
+ private static String FILENAME = "H5Ex_T_CompoundAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static final int DIM0 = 4;
+ private static final int RANK = 1;
+ protected static final int INTEGERSIZE = 4;
+ protected static final int DOUBLESIZE = 8;
+ protected final static int MAXSTRINGSIZE = 80;
+
+ static class Sensor_Datatype {
+ static int numberMembers = 4;
+ static int[] memberDims = {1, 1, 1, 1};
+
+ static String[] memberNames = {"Serial number", "Location", "Temperature (F)", "Pressure (inHg)"};
+ static long[] memberMemTypes = {HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5T_NATIVE_DOUBLE};
+ static long[] memberFileTypes = {HDF5Constants.H5T_STD_I32BE, HDF5Constants.H5T_C_S1,
+ HDF5Constants.H5T_IEEE_F64BE, HDF5Constants.H5T_IEEE_F64BE};
+ static int[] memberStorage = {INTEGERSIZE, MAXSTRINGSIZE, DOUBLESIZE, DOUBLESIZE};
+
+ // Data size is the storage size for the members not the object.
+ static long getTotalDataSize()
+ {
+ long data_size = 0;
+ for (int indx = 0; indx < numberMembers; indx++)
+ data_size += memberStorage[indx] * memberDims[indx];
+ return DIM0 * data_size;
+ }
+
+ static long getDataSize()
+ {
+ long data_size = 0;
+ for (int indx = 0; indx < numberMembers; indx++)
+ data_size += memberStorage[indx] * memberDims[indx];
+ return data_size;
+ }
+
+ static int getOffset(int memberItem)
+ {
+ int data_offset = 0;
+ for (int indx = 0; indx < memberItem; indx++)
+ data_offset += memberStorage[indx];
+ return data_offset;
+ }
+ }
+
+ static class Sensor {
+ public int serial_no;
+ public String location;
+ public double temperature;
+ public double pressure;
+
+ Sensor(int serial_no, String location, double temperature, double pressure)
+ {
+ this.serial_no = serial_no;
+ this.location = location;
+ this.temperature = temperature;
+ this.pressure = pressure;
+ }
+
+ Sensor(List data)
+ {
+ this.serial_no = (int)data.get(0);
+ this.location = (String)data.get(1);
+ this.temperature = (double)data.get(2);
+ this.pressure = (double)data.get(3);
+ }
+
+ Sensor(ByteBuffer databuf, int dbposition) { readBuffer(databuf, dbposition); }
+
+ void writeBuffer(ByteBuffer databuf, int dbposition)
+ {
+ databuf.putInt(dbposition + Sensor_Datatype.getOffset(0), serial_no);
+ byte[] temp_str = location.getBytes(Charset.forName("UTF-8"));
+ int arraylen = (temp_str.length > MAXSTRINGSIZE) ? MAXSTRINGSIZE : temp_str.length;
+ for (int ndx = 0; ndx < arraylen; ndx++)
+ databuf.put(dbposition + Sensor_Datatype.getOffset(1) + ndx, temp_str[ndx]);
+ for (int ndx = arraylen; ndx < MAXSTRINGSIZE; ndx++)
+ databuf.put(dbposition + Sensor_Datatype.getOffset(1) + arraylen, (byte)0);
+ databuf.putDouble(dbposition + Sensor_Datatype.getOffset(2), temperature);
+ databuf.putDouble(dbposition + Sensor_Datatype.getOffset(3), pressure);
+ }
+
+ void readBuffer(ByteBuffer databuf, int dbposition)
+ {
+ this.serial_no = databuf.getInt(dbposition + Sensor_Datatype.getOffset(0));
+ ByteBuffer stringbuf = databuf.duplicate();
+ stringbuf.position(dbposition + Sensor_Datatype.getOffset(1));
+ stringbuf.limit(dbposition + Sensor_Datatype.getOffset(1) + MAXSTRINGSIZE);
+ byte[] bytearr = new byte[stringbuf.remaining()];
+ stringbuf.get(bytearr);
+ this.location = new String(bytearr, Charset.forName("UTF-8")).trim();
+ this.temperature = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(2));
+ this.pressure = databuf.getDouble(dbposition + Sensor_Datatype.getOffset(3));
+ }
+
+ List get()
+ {
+ List data = new ArrayList<>();
+ data.add(this.serial_no);
+ data.add(this.location);
+ data.add(this.temperature);
+ data.add(this.pressure);
+ return data;
+ }
+
+ void put(List data)
+ {
+ this.serial_no = (int)data.get(0);
+ this.location = (String)data.get(1);
+ this.temperature = (double)data.get(2);
+ this.pressure = (double)data.get(3);
+ }
+
+ @Override
+ public String toString()
+ {
+ return String.format("Serial number : " + serial_no + "%n"
+ + "Location : " + location + "%n"
+ + "Temperature (F) : " + temperature + "%n"
+ + "Pressure (inHg) : " + pressure + "%n");
+ }
+ }
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long strtype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ ArrayList[] object_data = new ArrayList[DIM0];
+ byte[] dset_data = null;
+
+ // Initialize data.
+ object_data[0] = (ArrayList) new Sensor(1153, new String("Exterior (static)"), 53.23, 24.57).get();
+ object_data[1] = (ArrayList) new Sensor(1184, new String("Intake"), 55.12, 22.95).get();
+ object_data[2] = (ArrayList) new Sensor(1027, new String("Intake manifold"), 103.55, 31.23).get();
+ object_data[3] = (ArrayList) new Sensor(1313, new String("Exhaust manifold"), 1252.89, 84.11).get();
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create string datatype.
+ try {
+ strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (strtype_id >= 0)
+ H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the compound datatype for memory.
+ try {
+ memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
+ if (memtype_id >= 0) {
+ for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
+ long type_id = Sensor_Datatype.memberMemTypes[indx];
+ if (type_id == HDF5Constants.H5T_C_S1)
+ type_id = strtype_id;
+ H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the compound datatype for the file. Because the standard
+ // types we are using for the file may have different sizes than
+ // the corresponding native types, we must manually calculate the
+ // offset of each member.
+ try {
+ filetype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
+ if (filetype_id >= 0) {
+ for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
+ long type_id = Sensor_Datatype.memberFileTypes[indx];
+ if (type_id == HDF5Constants.H5T_C_S1)
+ type_id = strtype_id;
+ H5.H5Tinsert(filetype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
+ attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the compound data.
+ try {
+ if ((attribute_id >= 0) && (memtype_id >= 0))
+ H5.H5AwriteVL(attribute_id, memtype_id, (Object[])object_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (strtype_id >= 0)
+ H5.H5Tclose(strtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long strtype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ Sensor[] object_data2 = new Sensor[(int)dims[0]];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer. This is a
+ // three dimensional dataset when the array datatype is included so
+ // the dynamic allocation must be done in steps.
+ try {
+ if (attribute_id >= 0)
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create string datatype.
+ try {
+ strtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (strtype_id >= 0)
+ H5.H5Tset_size(strtype_id, MAXSTRINGSIZE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the compound datatype for memory.
+ try {
+ memtype_id = H5.H5Tcreate(HDF5Constants.H5T_COMPOUND, Sensor_Datatype.getDataSize());
+ if (memtype_id >= 0) {
+ for (int indx = 0; indx < Sensor_Datatype.numberMembers; indx++) {
+ long type_id = Sensor_Datatype.memberMemTypes[indx];
+ if (type_id == HDF5Constants.H5T_C_S1)
+ type_id = strtype_id;
+ H5.H5Tinsert(memtype_id, Sensor_Datatype.memberNames[indx],
+ Sensor_Datatype.getOffset(indx), type_id);
+ }
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ ArrayList[] object_data = new ArrayList[(int)dims[0]];
+
+ // Read data.
+ try {
+ if ((attribute_id >= 0) && (memtype_id >= 0))
+ H5.H5AreadVL(attribute_id, memtype_id, (Object[])object_data);
+
+ for (int indx = 0; indx < (int)dims[0]; indx++) {
+ object_data2[indx] = new Sensor(object_data[indx]);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(ATTRIBUTENAME + " [" + indx + "]:");
+ System.out.println(object_data2[indx].toString());
+ }
+ System.out.println();
+
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (strtype_id >= 0)
+ H5.H5Tclose(strtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_CompoundAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_CompoundAttribute.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_Float.java b/HDF5Examples/JAVA/H5T/H5Ex_T_Float.java
new file mode 100644
index 0000000..e062588
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_Float.java
@@ -0,0 +1,225 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write integer datatypes
+ to a dataset. The program first writes integers to a
+ dataset with a dataspace of DIM0xDIM1, then closes the
+ file. Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+
+import java.text.DecimalFormat;
+import java.text.DecimalFormatSymbols;
+import java.util.Locale;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_Float {
+ private static String FILENAME = "H5Ex_T_Float.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
+ double[][] dset_data = new double[DIM0][DIM1];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM0; indx++)
+ for (int jndx = 0; jndx < DIM1; jndx++) {
+ dset_data[indx][jndx] = indx / (jndx + 0.5) + jndx;
+ }
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset and write the floating point data to it. In
+ // this example we will save the data as 64 bit little endian IEEE
+ // floating point numbers, regardless of the native type. The HDF5
+ // library automatically converts between different floating point
+ // types.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_IEEE_F64LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
+ double[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new double[(int)dims[0]][(int)(dims[1])];
+
+ // Read data.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_DOUBLE, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ DecimalFormat df = new DecimalFormat("#,##0.0000", new DecimalFormatSymbols(Locale.US));
+ System.out.println(DATASETNAME + ":");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [");
+ for (int jndx = 0; jndx < dims[1]; jndx++) {
+ System.out.print(" " + df.format(dset_data[indx][jndx]));
+ }
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_Float.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_Float.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_FloatAttribute.java b/HDF5Examples/JAVA/H5T/H5Ex_T_FloatAttribute.java
new file mode 100644
index 0000000..ffb8467
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_FloatAttribute.java
@@ -0,0 +1,262 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write floating point
+ datatypes to an attribute. The program first writes
+ floating point numbers to an attribute with a dataspace of
+ DIM0xDIM1, then closes the file. Next, it reopens the
+ file, reads back the data, and outputs it to the screen.
+ ************************************************************/
+
+import java.text.DecimalFormat;
+import java.text.DecimalFormatSymbols;
+import java.util.Locale;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_FloatAttribute {
+ private static String FILENAME = "H5Ex_T_FloatAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
+ double[][] dset_data = new double[DIM0][DIM1];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM0; indx++)
+ for (int jndx = 0; jndx < DIM1; jndx++) {
+ dset_data[indx][jndx] = indx / (jndx + 0.5) + jndx;
+ }
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute and write the array data to it.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0))
+ attribute_id =
+ H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_IEEE_F64LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_DOUBLE, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
+ double[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (attribute_id >= 0)
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new double[(int)dims[0]][(int)(dims[1])];
+
+ // Read data.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aread(attribute_id, HDF5Constants.H5T_NATIVE_DOUBLE, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ DecimalFormat df = new DecimalFormat("#,##0.0000", new DecimalFormatSymbols(Locale.US));
+ System.out.println(ATTRIBUTENAME + ":");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [");
+ for (int jndx = 0; jndx < dims[1]; jndx++) {
+ System.out.print(" " + df.format(dset_data[indx][jndx]));
+ }
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_FloatAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_FloatAttribute.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_Integer.java b/HDF5Examples/JAVA/H5T/H5Ex_T_Integer.java
new file mode 100644
index 0000000..afae2b0
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_Integer.java
@@ -0,0 +1,222 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write integer datatypes
+ to a dataset. The program first writes integers to a
+ dataset with a dataspace of DIM0xDIM1, then closes the
+ file. Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+
+import java.text.DecimalFormat;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_Integer {
+ private static String FILENAME = "H5Ex_T_Integer.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
+ int[][] dset_data = new int[DIM0][DIM1];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM0; indx++)
+ for (int jndx = 0; jndx < DIM1; jndx++) {
+ dset_data[indx][jndx] = indx * jndx - jndx;
+ }
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset and write the integer data to it. In this
+ // example we will save the data as 64 bit big endian integers,
+ // regardless of the native integer type. The HDF5 library
+ // automatically converts between different integer types.
+ try {
+ if ((file_id >= 0) && (dataspace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I64BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new int[(int)dims[0]][(int)(dims[1])];
+
+ // Read data.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_NATIVE_INT, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ DecimalFormat df = new DecimalFormat("#,##0");
+ System.out.println(DATASETNAME + ":");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [");
+ for (int jndx = 0; jndx < dims[1]; jndx++) {
+ System.out.print(" " + df.format(dset_data[indx][jndx]));
+ }
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_Integer.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_Integer.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_IntegerAttribute.java b/HDF5Examples/JAVA/H5T/H5Ex_T_IntegerAttribute.java
new file mode 100644
index 0000000..be4a878
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_IntegerAttribute.java
@@ -0,0 +1,260 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write integer datatypes
+ to an attribute. The program first writes integers to an
+ attribute with a dataspace of DIM0xDIM1, then closes the
+ file. Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+
+import java.text.DecimalFormat;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_IntegerAttribute {
+ private static String FILENAME = "H5Ex_T_IntegerAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static final int DIM0 = 4;
+ private static final int DIM1 = 7;
+ private static final int RANK = 2;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
+ int[][] dset_data = new int[DIM0][DIM1];
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM0; indx++)
+ for (int jndx = 0; jndx < DIM1; jndx++) {
+ dset_data[indx][jndx] = indx * jndx - jndx;
+ }
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute and write the array data to it.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0))
+ attribute_id =
+ H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_I64BE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Awrite(attribute_id, HDF5Constants.H5T_NATIVE_INT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0, DIM1};
+ int[][] dset_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (attribute_id >= 0)
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate array of pointers to two-dimensional arrays (the
+ // elements of the dataset.
+ dset_data = new int[(int)dims[0]][(int)(dims[1])];
+
+ // Read data.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aread(attribute_id, HDF5Constants.H5T_NATIVE_INT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ DecimalFormat df = new DecimalFormat("#,##0");
+ System.out.println(ATTRIBUTENAME + ":");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(" [");
+ for (int jndx = 0; jndx < dims[1]; jndx++) {
+ System.out.print(" " + df.format(dset_data[indx][jndx]));
+ }
+ System.out.println("]");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_IntegerAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_IntegerAttribute.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_ObjectReference.java b/HDF5Examples/JAVA/H5T/H5Ex_T_ObjectReference.java
new file mode 100644
index 0000000..2b61794
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_ObjectReference.java
@@ -0,0 +1,323 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write object references
+ to a dataset. The program first creates objects in the
+ file and writes references to those objects to a dataset
+ with a dataspace of DIM0, then closes the file. Next, it
+ reopens the file, dereferences the references, and outputs
+ the names of their targets to the screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_ObjectReference {
+ private static String FILENAME = "H5Ex_T_ObjectReference.h5";
+ private static String DATASETNAME = "DS1";
+ private static String DATASETNAME2 = "DS2";
+ private static String GROUPNAME = "G1";
+ private static final int DIM0 = 2;
+ private static final int RANK = 1;
+
+ // Values for the status of space allocation
+ enum H5G_obj {
+ H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN), /* Unknown object type */
+ H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP), /* Object is a group */
+ H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET), /* Object is a dataset */
+ H5G_TYPE(HDF5Constants.H5O_TYPE_NAMED_DATATYPE); /* Object is a named data type */
+ private static final Map<Integer, H5G_obj> lookup = new HashMap<Integer, H5G_obj>();
+
+ static
+ {
+ for (H5G_obj s : EnumSet.allOf(H5G_obj.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5G_obj(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5G_obj get(int code) { return lookup.get(code); }
+ }
+
+ private static void writeObjRef()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if ((file_id >= 0) && (dataspace_id >= 0)) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a group in the file.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ group_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (file_id >= 0) {
+ try {
+ dset_data[0] = H5.H5Rcreate_object(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+
+ try {
+ dset_data[1] = H5.H5Rcreate_object(file_id, DATASETNAME2, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ filespace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset.
+ try {
+ if ((file_id >= 0) && (filespace_id >= 0))
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_REF, filespace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the object references to it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_STD_REF, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Rdestroy(dset_data[1]);
+ }
+ catch (Exception ex) {
+ }
+ try {
+ H5.H5Rdestroy(dset_data[0]);
+ }
+ catch (Exception ex) {
+ }
+ }
+
+ // End access to the dataset and release resources used by it.
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readObjRef()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ int object_type = -1;
+ long object_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+
+ // Open an existing dataset.
+ try {
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+
+ try {
+ // Get dataspace and allocate memory for read buffer.
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+
+ // Read data.
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_STD_REF, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(DATASETNAME + "[" + indx + "]:");
+ System.out.print(" ->");
+ // Open the referenced object, get its name and type.
+ try {
+ object_id = H5.H5Ropen_object(dset_data[indx], HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ try {
+ object_type = H5.H5Rget_obj_type3(dset_data[indx], HDF5Constants.H5R_OBJECT);
+ String obj_name = null;
+ if (object_type >= 0) {
+ // Get the name.
+ obj_name = H5.H5Iget_name(object_id);
+ }
+ if ((object_id >= 0) && (object_type >= -1)) {
+ switch (H5G_obj.get(object_type)) {
+ case H5G_GROUP:
+ System.out.print("H5G_GROUP");
+ break;
+ case H5G_DATASET:
+ System.out.print("H5G_DATASET");
+ break;
+ case H5G_TYPE:
+ System.out.print("H5G_TYPE");
+ break;
+ default:
+ System.out.print("UNHANDLED");
+ }
+ }
+ // Print the name.
+ System.out.println(": " + obj_name);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Oclose(object_id);
+ }
+ catch (Exception e) {
+ }
+ }
+ }
+ catch (Exception e4) {
+ e4.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Rdestroy(dset_data[indx]);
+ }
+ catch (Exception e4) {
+ }
+ }
+ } // end for
+ }
+ catch (Exception e3) {
+ e3.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e3) {
+ }
+ }
+ }
+ catch (Exception e2) {
+ e2.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e2) {
+ }
+ }
+ }
+ catch (Exception e1) {
+ e1.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e1) {
+ }
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_ObjectReference.writeObjRef();
+ H5Ex_T_ObjectReference.readObjRef();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_ObjectReferenceAttribute.java b/HDF5Examples/JAVA/H5T/H5Ex_T_ObjectReferenceAttribute.java
new file mode 100644
index 0000000..d2117bd
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_ObjectReferenceAttribute.java
@@ -0,0 +1,366 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write object references
+ to an attribute. The program first creates objects in the
+ file and writes references to those objects to an
+ attribute with a dataspace of DIM0, then closes the file.
+ Next, it reopens the file, dereferences the references,
+ and outputs the names of their targets to the screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_ObjectReferenceAttribute {
+ private static String FILENAME = "H5Ex_T_ObjectReferenceAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static String DATASETNAME2 = "DS2";
+ private static String GROUPNAME = "G1";
+ private static final int DIM0 = 2;
+ private static final int RANK = 1;
+
+ // Values for the status of space allocation
+ enum H5G_obj {
+ H5G_UNKNOWN(HDF5Constants.H5O_TYPE_UNKNOWN), /* Unknown object type */
+ H5G_GROUP(HDF5Constants.H5O_TYPE_GROUP), /* Object is a group */
+ H5G_DATASET(HDF5Constants.H5O_TYPE_DATASET), /* Object is a dataset */
+ H5G_TYPE(HDF5Constants.H5O_TYPE_NAMED_DATATYPE); /* Object is a named data type */
+ private static final Map<Integer, H5G_obj> lookup = new HashMap<Integer, H5G_obj>();
+
+ static
+ {
+ for (H5G_obj s : EnumSet.allOf(H5G_obj.class))
+ lookup.put(s.getCode(), s);
+ }
+
+ private int code;
+
+ H5G_obj(int layout_type) { this.code = layout_type; }
+
+ public int getCode() { return this.code; }
+
+ public static H5G_obj get(int code) { return lookup.get(code); }
+ }
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if ((file_id >= 0) && (dataspace_id >= 0)) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ dataset_id = HDF5Constants.H5I_INVALID_HID;
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create a group in the file.
+ try {
+ if (file_id >= 0)
+ group_id = H5.H5Gcreate(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ if (group_id >= 0)
+ H5.H5Gclose(group_id);
+ group_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (file_id >= 0) {
+ try {
+ dset_data[0] = H5.H5Rcreate_object(file_id, GROUPNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+
+ try {
+ dset_data[1] = H5.H5Rcreate_object(file_id, DATASETNAME2, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Throwable err) {
+ err.printStackTrace();
+ }
+ }
+
+ // Create dataset with a scalar dataspace to serve as the parent
+ // for the attribute.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute and write the array data to it.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0))
+ attribute_id =
+ H5.H5Acreate(dataset_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_REF, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Awrite(attribute_id, HDF5Constants.H5T_STD_REF, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Rdestroy(dset_data[1]);
+ }
+ catch (Exception ex) {
+ }
+ try {
+ H5.H5Rdestroy(dset_data[0]);
+ }
+ catch (Exception ex) {
+ }
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ int object_type = -1;
+ long object_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+
+ // Open an existing dataset.
+ try {
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+
+ try {
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+
+ // Read data.
+ H5.H5Aread(attribute_id, HDF5Constants.H5T_STD_REF, dset_data);
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(ATTRIBUTENAME + "[" + indx + "]:");
+ System.out.print(" ->");
+ // Open the referenced object, get its name and type.
+ try {
+ object_id = H5.H5Ropen_object(dset_data[indx], HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ try {
+ object_type =
+ H5.H5Rget_obj_type3(dset_data[indx], HDF5Constants.H5R_OBJECT);
+ String obj_name = null;
+ if (object_type >= 0) {
+ // Get the name.
+ obj_name = H5.H5Iget_name(object_id);
+ }
+ if ((object_id >= 0) && (object_type >= -1)) {
+ switch (H5G_obj.get(object_type)) {
+ case H5G_GROUP:
+ System.out.print("H5G_GROUP");
+ break;
+ case H5G_DATASET:
+ System.out.print("H5G_DATASET");
+ break;
+ case H5G_TYPE:
+ System.out.print("H5G_TYPE");
+ break;
+ default:
+ System.out.print("UNHANDLED");
+ }
+ }
+ // Print the name.
+ System.out.println(": " + obj_name);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Oclose(object_id);
+ }
+ catch (Exception e) {
+ }
+ }
+ }
+ catch (Exception e5) {
+ e5.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Rdestroy(dset_data[indx]);
+ }
+ catch (Exception e5) {
+ }
+ }
+ } // end for
+ }
+ catch (Exception e4) {
+ e4.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e3) {
+ }
+ }
+ }
+ catch (Exception e3) {
+ e3.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e4) {
+ }
+ }
+ }
+ catch (Exception e2) {
+ e2.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e2) {
+ }
+ }
+ }
+ catch (Exception e1) {
+ e1.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e1) {
+ }
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_ObjectReferenceAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_ObjectReferenceAttribute.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_Opaque.java b/HDF5Examples/JAVA/H5T/H5Ex_T_Opaque.java
new file mode 100644
index 0000000..c9628d6
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_Opaque.java
@@ -0,0 +1,266 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write opaque datatypes
+ to a dataset. The program first writes opaque data to a
+ dataset with a dataspace of DIM0, then closes the file.
+ Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_Opaque {
+ private static String FILENAME = "H5Ex_T_Opaque.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM0 = 4;
+ private static final int LEN = 7;
+ private static final int RANK = 1;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long datatype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[] dset_data = new byte[DIM0 * LEN];
+ byte[] str_data = {'O', 'P', 'A', 'Q', 'U', 'E'};
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM0; indx++) {
+ for (int jndx = 0; jndx < LEN - 1; jndx++)
+ dset_data[jndx + indx * LEN] = str_data[jndx];
+ dset_data[LEN - 1 + indx * LEN] = (byte)(indx + '0');
+ }
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create opaque datatype and set the tag to something appropriate.
+ // For this example we will write and view the data as a character
+ // array.
+ try {
+ datatype_id = H5.H5Tcreate(HDF5Constants.H5T_OPAQUE, (long)LEN);
+ if (datatype_id >= 0)
+ H5.H5Tset_tag(datatype_id, "Character array");
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset and write the integer data to it. In this
+ // example we will save the data as 64 bit big endian integers,
+ // regardless of the native integer type. The HDF5 library
+ // automatically converts between different integer types.
+ try {
+ if ((file_id >= 0) && (datatype_id >= 0) && (dataspace_id >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id, DATASETNAME, datatype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the opaque data to the dataset.
+ try {
+ if ((dataset_id >= 0) && (datatype_id >= 0))
+ H5.H5Dwrite(dataset_id, datatype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (datatype_id >= 0)
+ H5.H5Tclose(datatype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long datatype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long type_len = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[] dset_data;
+ String tag_name = null;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get datatype and properties for the datatype.
+ try {
+ if (dataset_id >= 0)
+ datatype_id = H5.H5Dget_type(dataset_id);
+ if (datatype_id >= 0) {
+ type_len = H5.H5Tget_size(datatype_id);
+ tag_name = H5.H5Tget_tag(datatype_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate buffer.
+ dset_data = new byte[(int)(dims[0] * type_len)];
+
+ // Read data.
+ try {
+ if ((dataset_id >= 0) && (datatype_id >= 0))
+ H5.H5Dread(dataset_id, datatype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Datatype tag for " + DATASETNAME + " is: \"" + tag_name + "\"");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(DATASETNAME + "[" + indx + "]: ");
+ for (int jndx = 0; jndx < type_len; jndx++) {
+ char temp = (char)dset_data[jndx + indx * (int)type_len];
+ System.out.print(temp);
+ }
+ System.out.println("");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (datatype_id >= 0)
+ H5.H5Tclose(datatype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_Opaque.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_Opaque.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_OpaqueAttribute.java b/HDF5Examples/JAVA/H5T/H5Ex_T_OpaqueAttribute.java
new file mode 100644
index 0000000..02f7bd5
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_OpaqueAttribute.java
@@ -0,0 +1,303 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write opaque datatypes
+ to an attribute. The program first writes opaque data to
+ an attribute with a dataspace of DIM0, then closes the
+ file. Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_OpaqueAttribute {
+ private static String FILENAME = "H5Ex_T_OpaqueAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static final int DIM0 = 4;
+ private static final int LEN = 7;
+ private static final int RANK = 1;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long datatype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[] dset_data = new byte[DIM0 * LEN];
+ byte[] str_data = {'O', 'P', 'A', 'Q', 'U', 'E'};
+
+ // Initialize data.
+ for (int indx = 0; indx < DIM0; indx++) {
+ for (int jndx = 0; jndx < LEN - 1; jndx++)
+ dset_data[jndx + indx * LEN] = str_data[jndx];
+ dset_data[LEN - 1 + indx * LEN] = (byte)(indx + '0');
+ }
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create opaque datatype and set the tag to something appropriate.
+ // For this example we will write and view the data as a character
+ // array.
+ try {
+ datatype_id = H5.H5Tcreate(HDF5Constants.H5T_OPAQUE, (long)LEN);
+ if (datatype_id >= 0)
+ H5.H5Tset_tag(datatype_id, "Character array");
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute and write the array data to it.
+ try {
+ if ((dataset_id >= 0) && (datatype_id >= 0) && (dataspace_id >= 0))
+ attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, datatype_id, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the dataset.
+ try {
+ if ((attribute_id >= 0) && (datatype_id >= 0))
+ H5.H5Awrite(attribute_id, datatype_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (datatype_id >= 0)
+ H5.H5Tclose(datatype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long datatype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long type_len = -1;
+ long[] dims = {DIM0};
+ byte[] dset_data;
+ String tag_name = null;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get datatype and properties for the datatype.
+ try {
+ if (attribute_id >= 0)
+ datatype_id = H5.H5Aget_type(attribute_id);
+ if (datatype_id >= 0) {
+ type_len = H5.H5Tget_size(datatype_id);
+ tag_name = H5.H5Tget_tag(datatype_id);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (attribute_id >= 0)
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate buffer.
+ dset_data = new byte[(int)(dims[0] * type_len)];
+
+ // Read data.
+ try {
+ if ((attribute_id >= 0) && (datatype_id >= 0))
+ H5.H5Aread(attribute_id, datatype_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ System.out.println("Datatype tag for " + ATTRIBUTENAME + " is: \"" + tag_name + "\"");
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.print(ATTRIBUTENAME + "[" + indx + "]: ");
+ for (int jndx = 0; jndx < type_len; jndx++) {
+ char temp = (char)dset_data[jndx + indx * (int)type_len];
+ System.out.print(temp);
+ }
+ System.out.println("");
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (datatype_id >= 0)
+ H5.H5Tclose(datatype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_OpaqueAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_OpaqueAttribute.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_RegionReference.java b/HDF5Examples/JAVA/H5T/H5Ex_T_RegionReference.java
new file mode 100644
index 0000000..0c7f6ab
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_RegionReference.java
@@ -0,0 +1,315 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write object references
+ to a dataset. The program first creates objects in the
+ file and writes references to those objects to a dataset
+ with a dataspace of DIM0, then closes the file. Next, it
+ reopens the file, dereferences the references, and outputs
+ the names of their targets to the screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_RegionReference {
+ private static String FILENAME = "H5Ex_T_RegionReference.h5";
+ private static String DATASETNAME = "DS1";
+ private static String DATASETNAME2 = "DS2";
+ private static String GROUPNAME = "G1";
+ private static final int DIM0 = 2;
+ private static final int DS2DIM0 = 3;
+ private static final int DS2DIM1 = 16;
+ private static final int RANK = 1;
+
+ private static void writeRegRef()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ long[] dims2 = {DS2DIM0, DS2DIM1};
+ // data buffer for writing region reference
+ byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
+ // data buffer for writing dataset
+ byte[][] write_data = new byte[DS2DIM0][DS2DIM1];
+ StringBuffer[] str_data = {new StringBuffer("The quick brown"), new StringBuffer("fox jumps over "),
+ new StringBuffer("the 5 lazy dogs")};
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with character data.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims2, null);
+ if ((file_id >= 0) && (dataspace_id >= 0)) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_STD_I8LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ for (int indx = 0; indx < DS2DIM0; indx++) {
+ for (int jndx = 0; jndx < DS2DIM1; jndx++) {
+ if (jndx < str_data[indx].length())
+ write_data[indx][jndx] = (byte)str_data[indx].charAt(jndx);
+ else
+ write_data[indx][jndx] = 0;
+ }
+ }
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_CHAR, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, write_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create reference to a list of elements in dset2.
+ try {
+ long[][] coords = {{0, 1}, {2, 11}, {1, 0}, {2, 4}};
+
+ H5.H5Sselect_elements(dataspace_id, HDF5Constants.H5S_SELECT_SET, 4, coords);
+ if (file_id >= 0)
+ dset_data[0] =
+ H5.H5Rcreate_region(file_id, DATASETNAME2, dataspace_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+
+ // Create reference to a hyperslab in dset2.
+ try {
+ long[] start = {0, 0}; // Starting location of hyperslab
+ long[] stride = {2, 11}; // Stride of hyperslab
+ long[] count = {2, 2}; // Element count of hyperslab
+ long[] block = {1, 3}; // Block size of hyperslab
+
+ H5.H5Sselect_hyperslab(dataspace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+ if (file_id >= 0)
+ dset_data[1] =
+ H5.H5Rcreate_region(file_id, DATASETNAME2, dataspace_id, HDF5Constants.H5P_DEFAULT);
+ ;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ }
+
+ // Create the dataset and write the region references to it.
+ try {
+ dataspace_id = H5.H5Screate_simple(1, dims, null);
+ if ((file_id >= 0) && (dataspace_id >= 0)) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_REF, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_STD_REF, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ H5.H5Rdestroy(dset_data[0]);
+ }
+ catch (Exception ex) {
+ }
+
+ try {
+ H5.H5Rdestroy(dset_data[1]);
+ }
+ catch (Exception ex) {
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readRegRef()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ int object_type = -1;
+ long object_id = HDF5Constants.H5I_INVALID_HID;
+ long region_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
+ StringBuffer str_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+
+ // Open an existing dataset.
+ try {
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+
+ try {
+ // Get dataspace and allocate memory for read buffer.
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+
+ // Read data.
+ H5.H5Dread(dataset_id, HDF5Constants.H5T_STD_REF, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, dset_data);
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(DATASETNAME + "[" + indx + "]:");
+ System.out.print(" ->");
+ // Open the referenced object.
+ try {
+ object_id = H5.H5Ropen_object(dset_data[indx], HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ try {
+ String obj_name = H5.H5Iget_name(object_id);
+
+ region_id = H5.H5Ropen_region(dset_data[indx], HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ if ((object_id >= 0) && (region_id >= 0)) {
+ try {
+ long reg_npoints = H5.H5Sget_select_npoints(region_id);
+ long[] dims2 = new long[1];
+ dims2[0] = (int)reg_npoints;
+ dataspace_id = H5.H5Screate_simple(1, dims2, null);
+
+ // Read data.
+ byte[] refbuf = new byte[(int)reg_npoints + 1];
+ H5.H5Dread(object_id, HDF5Constants.H5T_NATIVE_CHAR, dataspace_id,
+ region_id, HDF5Constants.H5P_DEFAULT, refbuf);
+ refbuf[(int)reg_npoints] = 0;
+ str_data = new StringBuffer(new String(refbuf).trim());
+
+ System.out.println(" " + obj_name + ": " + str_data);
+ }
+ catch (Throwable err2) {
+ err2.printStackTrace();
+ }
+ }
+ }
+ catch (Throwable err1) {
+ err1.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Sclose(region_id);
+ }
+ catch (Exception ex) {
+ }
+ }
+ }
+ catch (Throwable err0) {
+ err0.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Dclose(object_id);
+ }
+ catch (Exception ex) {
+ }
+ }
+ try {
+ H5.H5Rdestroy(dset_data[indx]);
+ }
+ catch (Exception e4) {
+ }
+ } // end for
+ }
+ catch (Exception e4) {
+ e4.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Sclose(dataspace_id);
+ for (int indx = 0; indx < dims[0]; indx++)
+ H5.H5Rdestroy(dset_data[indx]);
+ }
+ catch (Exception e4) {
+ }
+ }
+ }
+ catch (Exception e3) {
+ e3.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e3) {
+ }
+ }
+ }
+ catch (Exception e2) {
+ e2.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e2) {
+ }
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_RegionReference.writeRegRef();
+ H5Ex_T_RegionReference.readRegRef();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_RegionReferenceAttribute.java b/HDF5Examples/JAVA/H5T/H5Ex_T_RegionReferenceAttribute.java
new file mode 100644
index 0000000..f09f15f
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_RegionReferenceAttribute.java
@@ -0,0 +1,340 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write object references
+ to a dataset. The program first creates objects in the
+ file and writes references to those objects to a dataset
+ with a dataspace of DIM0, then closes the file. Next, it
+ reopens the file, dereferences the references, and outputs
+ the names of their targets to the screen.
+ ************************************************************/
+
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_RegionReferenceAttribute {
+ private static String FILENAME = "H5Ex_T_RegionReferenceAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static String DATASETNAME2 = "DS2";
+ private static String GROUPNAME = "G1";
+ private static final int DIM0 = 2;
+ private static final int DS2DIM0 = 3;
+ private static final int DS2DIM1 = 16;
+ private static final int RANK = 1;
+
+ private static void writeRegRef()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long filespace_id = HDF5Constants.H5I_INVALID_HID;
+ long group_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ long[] dims2 = {DS2DIM0, DS2DIM1};
+ // data buffer for writing region reference
+ byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
+ // data buffer for writing dataset
+ byte[][] write_data = new byte[DS2DIM0][DS2DIM1];
+ StringBuffer[] str_data = {new StringBuffer("The quick brown"), new StringBuffer("fox jumps over "),
+ new StringBuffer("the 5 lazy dogs")};
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with character data.
+ try {
+ dataspace_id = H5.H5Screate_simple(2, dims2, null);
+ if ((file_id >= 0) && (dataspace_id >= 0)) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME2, HDF5Constants.H5T_STD_I8LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ for (int indx = 0; indx < DS2DIM0; indx++) {
+ for (int jndx = 0; jndx < DS2DIM1; jndx++) {
+ if (jndx < str_data[indx].length())
+ write_data[indx][jndx] = (byte)str_data[indx].charAt(jndx);
+ else
+ write_data[indx][jndx] = 0;
+ }
+ }
+ H5.H5Dwrite(dataset_id, HDF5Constants.H5T_NATIVE_CHAR, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5S_ALL, HDF5Constants.H5P_DEFAULT, write_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create reference to a list of elements in dset2.
+ try {
+ long[][] coords = {{0, 1}, {2, 11}, {1, 0}, {2, 4}};
+
+ H5.H5Sselect_elements(dataspace_id, HDF5Constants.H5S_SELECT_SET, 4, coords);
+ if (file_id >= 0)
+ dset_data[0] =
+ H5.H5Rcreate_region(file_id, DATASETNAME2, dataspace_id, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception ex) {
+ ex.printStackTrace();
+ }
+
+ // Create reference to a hyperslab in dset2.
+ try {
+ long[] start = {0, 0}; // Starting location of hyperslab
+ long[] stride = {2, 11}; // Stride of hyperslab
+ long[] count = {2, 2}; // Element count of hyperslab
+ long[] block = {1, 3}; // Block size of hyperslab
+
+ H5.H5Sselect_hyperslab(dataspace_id, HDF5Constants.H5S_SELECT_SET, start, stride, count, block);
+ if (file_id >= 0)
+ dset_data[1] =
+ H5.H5Rcreate_region(file_id, DATASETNAME2, dataspace_id, HDF5Constants.H5P_DEFAULT);
+ ;
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ }
+
+ // Create dataset with a null dataspace to serve as the parent for the attribute.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_NULL);
+ dataset_id =
+ H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ }
+
+ // Create the attribute and write the region references to it.
+ try {
+ dataspace_id = H5.H5Screate_simple(1, dims, null);
+ if ((file_id >= 0) && (attribute_id >= 0)) {
+ attribute_id = H5.H5Acreate(file_id, ATTRIBUTENAME, HDF5Constants.H5T_STD_REF, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ H5.H5Awrite(attribute_id, HDF5Constants.H5T_STD_REF, dset_data);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ H5.H5Rdestroy(dset_data[0]);
+ }
+ catch (Exception ex) {
+ }
+
+ try {
+ H5.H5Rdestroy(dset_data[1]);
+ }
+ catch (Exception ex) {
+ }
+
+ // End access to theattribute, dataset and release resources used by it.
+ try {
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ }
+
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (filespace_id >= 0)
+ H5.H5Sclose(filespace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readRegRef()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ int object_type = -1;
+ long object_id = HDF5Constants.H5I_INVALID_HID;
+ long region_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[][] dset_data = new byte[DIM0][HDF5Constants.H5R_REF_BUF_SIZE];
+ StringBuffer str_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+
+ // Open an existing attribute.
+ try {
+ attribute_id = H5.H5Aopen(file_id, ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT);
+
+ try {
+ // Get dataspace and allocate memory for read buffer.
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ H5.H5Sget_simple_extent_dims(attribute_id, dims, null);
+
+ // Read data.
+ H5.H5Aread(attribute_id, HDF5Constants.H5T_STD_REF, dset_data);
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(DATASETNAME + "[" + indx + "]:");
+ System.out.print(" ->");
+ // Open the referenced object.
+ try {
+ object_id = H5.H5Ropen_object(dset_data[indx], HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ try {
+ String obj_name = H5.H5Iget_name(object_id);
+
+ region_id = H5.H5Ropen_region(dset_data[indx], HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ if ((object_id >= 0) && (region_id >= 0)) {
+ try {
+ long reg_npoints = H5.H5Sget_select_npoints(region_id);
+ long[] dims2 = new long[1];
+ dims2[0] = (int)reg_npoints;
+ dataspace_id = H5.H5Screate_simple(1, dims2, null);
+
+ // Read data.
+ byte[] refbuf = new byte[(int)reg_npoints + 1];
+ H5.H5Dread(object_id, HDF5Constants.H5T_NATIVE_CHAR, dataspace_id,
+ region_id, HDF5Constants.H5P_DEFAULT, refbuf);
+ refbuf[(int)reg_npoints] = 0;
+ str_data = new StringBuffer(new String(refbuf).trim());
+
+ System.out.println(" " + obj_name + ": " + str_data);
+ }
+ catch (Throwable err2) {
+ err2.printStackTrace();
+ }
+ }
+ }
+ catch (Throwable err1) {
+ err1.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Sclose(region_id);
+ }
+ catch (Exception ex) {
+ }
+ }
+ }
+ catch (Throwable err0) {
+ err0.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Dclose(object_id);
+ }
+ catch (Exception ex) {
+ }
+ }
+ try {
+ H5.H5Rdestroy(dset_data[indx]);
+ }
+ catch (Exception e4) {
+ }
+ } // end for
+ }
+ catch (Exception e4) {
+ e4.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Sclose(dataspace_id);
+ for (int indx = 0; indx < dims[0]; indx++)
+ H5.H5Rdestroy(dset_data[indx]);
+ }
+ catch (Exception e4) {
+ }
+ }
+ }
+ catch (Exception e3) {
+ e3.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e3) {
+ }
+ }
+ }
+ catch (Exception e2) {
+ e2.printStackTrace();
+ }
+ finally {
+ try {
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e2) {
+ }
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_RegionReferenceAttribute.writeRegRef();
+ H5Ex_T_RegionReferenceAttribute.readRegRef();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_String.java b/HDF5Examples/JAVA/H5T/H5Ex_T_String.java
new file mode 100644
index 0000000..fa16464
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_String.java
@@ -0,0 +1,307 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write string datatypes
+ to a dataset. The program first writes strings to a
+ dataset with a dataspace of DIM0, then closes the file.
+ Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_String {
+ private static String FILENAME = "H5Ex_T_String.h5";
+ private static String DATASETNAME = "DS1";
+ private static final int DIM0 = 4;
+ private static final int SDIM = 8;
+ private static final int RANK = 1;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[][] dset_data = new byte[DIM0][SDIM];
+ StringBuffer[] str_data = {new StringBuffer("Parting"), new StringBuffer("is such"),
+ new StringBuffer("sweet"), new StringBuffer("sorrow.")};
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create file and memory datatypes. For this example we will save
+ // the strings as FORTRAN strings, therefore they do not need space
+ // for the null terminator in the file.
+ try {
+ filetype_id = H5.H5Tcopy(HDF5Constants.H5T_FORTRAN_S1);
+ if (filetype_id >= 0)
+ H5.H5Tset_size(filetype_id, SDIM - 1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ memtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (memtype_id >= 0)
+ H5.H5Tset_size(memtype_id, SDIM);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset and write the string data to it.
+ try {
+ if ((file_id >= 0) && (filetype_id >= 0) && (dataspace_id >= 0))
+ dataset_id =
+ H5.H5Dcreate(file_id, DATASETNAME, filetype_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ for (int indx = 0; indx < DIM0; indx++) {
+ for (int jndx = 0; jndx < SDIM; jndx++) {
+ if (jndx < str_data[indx].length())
+ dset_data[indx][jndx] = (byte)str_data[indx].charAt(jndx);
+ else
+ dset_data[indx][jndx] = 0;
+ }
+ }
+ if ((dataset_id >= 0) && (memtype_id >= 0))
+ H5.H5Dwrite(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long sdim = 0;
+ long[] dims = {DIM0};
+ byte[][] dset_data;
+ StringBuffer[] str_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get the datatype and its size.
+ try {
+ if (dataset_id >= 0)
+ filetype_id = H5.H5Dget_type(dataset_id);
+ if (filetype_id >= 0) {
+ sdim = H5.H5Tget_size(filetype_id);
+ sdim++; // Make room for null terminator
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (dataset_id >= 0)
+ dataspace_id = H5.H5Dget_space(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate space for data.
+ dset_data = new byte[(int)dims[0]][(int)sdim];
+ str_data = new StringBuffer[(int)dims[0]];
+
+ // Create the memory datatype.
+ try {
+ memtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (memtype_id >= 0)
+ H5.H5Tset_size(memtype_id, sdim);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read data.
+ try {
+ if ((dataset_id >= 0) && (memtype_id >= 0))
+ H5.H5Dread(dataset_id, memtype_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, dset_data);
+ byte[] tempbuf = new byte[(int)sdim];
+ for (int indx = 0; indx < (int)dims[0]; indx++) {
+ for (int jndx = 0; jndx < sdim; jndx++) {
+ tempbuf[jndx] = dset_data[indx][jndx];
+ }
+ str_data[indx] = new StringBuffer(new String(tempbuf).trim());
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(DATASETNAME + " [" + indx + "]: " + str_data[indx]);
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_String.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_String.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_StringAttribute.java b/HDF5Examples/JAVA/H5T/H5Ex_T_StringAttribute.java
new file mode 100644
index 0000000..62f4aa0
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_StringAttribute.java
@@ -0,0 +1,347 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ This example shows how to read and write string datatypes
+ to an attribute. The program first writes strings to an
+ attribute with a dataspace of DIM0, then closes the file.
+ Next, it reopens the file, reads back the data, and
+ outputs it to the screen.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_StringAttribute {
+ private static String FILENAME = "H5Ex_T_StringAttribute.h5";
+ private static String DATASETNAME = "DS1";
+ private static String ATTRIBUTENAME = "A1";
+ private static final int DIM0 = 4;
+ private static final int SDIM = 8;
+ private static final int RANK = 1;
+
+ private static void CreateDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long[] dims = {DIM0};
+ byte[][] dset_data = new byte[DIM0][SDIM];
+ StringBuffer[] str_data = {new StringBuffer("Parting"), new StringBuffer("is such"),
+ new StringBuffer("sweet"), new StringBuffer("sorrow.")};
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create file and memory datatypes. For this example we will save
+ // the strings as FORTRAN strings, therefore they do not need space
+ // for the null terminator in the file.
+ try {
+ filetype_id = H5.H5Tcopy(HDF5Constants.H5T_FORTRAN_S1);
+ if (filetype_id >= 0)
+ H5.H5Tset_size(filetype_id, SDIM - 1);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ try {
+ memtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (memtype_id >= 0)
+ H5.H5Tset_size(memtype_id, SDIM);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataset with a scalar dataspace.
+ try {
+ dataspace_id = H5.H5Screate(HDF5Constants.H5S_SCALAR);
+ if (dataspace_id >= 0) {
+ dataset_id = H5.H5Dcreate(file_id, DATASETNAME, HDF5Constants.H5T_STD_I32LE, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ H5.H5Sclose(dataspace_id);
+ dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(RANK, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the attribute.
+ try {
+ if ((dataset_id >= 0) && (dataspace_id >= 0) && (filetype_id >= 0))
+ attribute_id = H5.H5Acreate(dataset_id, ATTRIBUTENAME, filetype_id, dataspace_id,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ for (int indx = 0; indx < DIM0; indx++) {
+ for (int jndx = 0; jndx < SDIM; jndx++) {
+ if (jndx < str_data[indx].length())
+ dset_data[indx][jndx] = (byte)str_data[indx].charAt(jndx);
+ else
+ dset_data[indx][jndx] = 0;
+ }
+ }
+ if ((attribute_id >= 0) && (memtype_id >= 0))
+ H5.H5Awrite(attribute_id, memtype_id, dset_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void ReadDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long filetype_id = HDF5Constants.H5I_INVALID_HID;
+ long memtype_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ long attribute_id = HDF5Constants.H5I_INVALID_HID;
+ long sdim = 0;
+ long[] dims = {DIM0};
+ byte[][] dset_data;
+ StringBuffer[] str_data;
+
+ // Open an existing file.
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Open an existing dataset.
+ try {
+ if (file_id >= 0)
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ attribute_id = H5.H5Aopen_by_name(dataset_id, ".", ATTRIBUTENAME, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get the datatype and its size.
+ try {
+ if (attribute_id >= 0)
+ filetype_id = H5.H5Aget_type(attribute_id);
+ if (filetype_id >= 0) {
+ sdim = H5.H5Tget_size(filetype_id);
+ sdim++; // Make room for null terminator
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Get dataspace and allocate memory for read buffer.
+ try {
+ if (attribute_id >= 0)
+ dataspace_id = H5.H5Aget_space(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sget_simple_extent_dims(dataspace_id, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Allocate space for data.
+ dset_data = new byte[(int)dims[0]][(int)sdim];
+ str_data = new StringBuffer[(int)dims[0]];
+
+ // Create the memory datatype.
+ try {
+ memtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ if (memtype_id >= 0)
+ H5.H5Tset_size(memtype_id, sdim);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Read data.
+ try {
+ if ((attribute_id >= 0) && (memtype_id >= 0))
+ H5.H5Aread(attribute_id, memtype_id, dset_data);
+ byte[] tempbuf = new byte[(int)sdim];
+ for (int indx = 0; indx < (int)dims[0]; indx++) {
+ for (int jndx = 0; jndx < sdim; jndx++) {
+ tempbuf[jndx] = dset_data[indx][jndx];
+ }
+ str_data[indx] = new StringBuffer(new String(tempbuf).trim());
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Output the data to the screen.
+ for (int indx = 0; indx < dims[0]; indx++) {
+ System.out.println(DATASETNAME + " [" + indx + "]: " + str_data[indx]);
+ }
+ System.out.println();
+
+ // End access to the dataset and release resources used by it.
+ try {
+ if (attribute_id >= 0)
+ H5.H5Aclose(attribute_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ if (dataset_id >= 0)
+ H5.H5Dclose(dataset_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the data space.
+ try {
+ if (dataspace_id >= 0)
+ H5.H5Sclose(dataspace_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the file type.
+ try {
+ if (filetype_id >= 0)
+ H5.H5Tclose(filetype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Terminate access to the mem type.
+ try {
+ if (memtype_id >= 0)
+ H5.H5Tclose(memtype_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Close the file.
+ try {
+ if (file_id >= 0)
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_StringAttribute.CreateDataset();
+ // Now we begin the read section of this example. Here we assume
+ // the dataset and array have the same name and rank, but can have
+ // any size. Therefore we must allocate a new array to read in
+ // data using malloc().
+ H5Ex_T_StringAttribute.ReadDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/H5Ex_T_VLString.java b/HDF5Examples/JAVA/H5T/H5Ex_T_VLString.java
new file mode 100644
index 0000000..d4d74e7
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/H5Ex_T_VLString.java
@@ -0,0 +1,135 @@
+/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
+ * Copyright by The HDF Group. *
+ * All rights reserved. *
+ * *
+ * This file is part of HDF5. The full HDF5 copyright notice, including *
+ * terms governing use, modification, and redistribution, is contained in *
+ * the COPYING file, which can be found at the root of the source code *
+ * distribution tree, or in https://www.hdfgroup.org/licenses. *
+ * If you do not have access to either file, you may request a copy from *
+ * help@hdfgroup.org. *
+ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
+
+/************************************************************
+ Creating and writing a VL string to a file.
+ ************************************************************/
+
+import hdf.hdf5lib.H5;
+import hdf.hdf5lib.HDF5Constants;
+
+public class H5Ex_T_VLString {
+ private static String FILENAME = "H5Ex_T_VLString.h5";
+ private static String DATASETNAME = "DS1";
+
+ private static void createDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long type_id = HDF5Constants.H5I_INVALID_HID;
+ long dataspace_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ int rank = 1;
+ String[] str_data = {"Parting", "is such", "sweet", "sorrow."};
+ long[] dims = {str_data.length};
+
+ // Create a new file using default properties.
+ try {
+ file_id = H5.H5Fcreate(FILENAME, HDF5Constants.H5F_ACC_TRUNC, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ type_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1);
+ H5.H5Tset_size(type_id, HDF5Constants.H5T_VARIABLE);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create dataspace. Setting maximum size to NULL sets the maximum
+ // size to be the current size.
+ try {
+ dataspace_id = H5.H5Screate_simple(rank, dims, null);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Create the dataset and write the string data to it.
+ try {
+ if ((file_id >= 0) && (type_id >= 0) && (dataspace_id >= 0)) {
+ dataset_id =
+ H5.H5Dcreate(file_id, DATASETNAME, type_id, dataspace_id, HDF5Constants.H5P_DEFAULT,
+ HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT);
+ }
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // Write the data to the dataset.
+ try {
+ if (dataset_id >= 0)
+ H5.H5DwriteVL(dataset_id, type_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, str_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ H5.H5Sclose(dataspace_id);
+ H5.H5Tclose(type_id);
+ H5.H5Dclose(dataset_id);
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private static void readDataset()
+ {
+ long file_id = HDF5Constants.H5I_INVALID_HID;
+ long type_id = HDF5Constants.H5I_INVALID_HID;
+ long dataset_id = HDF5Constants.H5I_INVALID_HID;
+ String[] str_data = {"", "", "", ""};
+
+ try {
+ file_id = H5.H5Fopen(FILENAME, HDF5Constants.H5F_ACC_RDONLY, HDF5Constants.H5P_DEFAULT);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ try {
+ dataset_id = H5.H5Dopen(file_id, DATASETNAME, HDF5Constants.H5P_DEFAULT);
+ type_id = H5.H5Dget_type(dataset_id);
+ H5.H5DreadVL(dataset_id, type_id, HDF5Constants.H5S_ALL, HDF5Constants.H5S_ALL,
+ HDF5Constants.H5P_DEFAULT, str_data);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ for (int indx = 0; indx < str_data.length; indx++)
+ System.out.println(DATASETNAME + " [" + indx + "]: " + str_data[indx]);
+
+ try {
+ H5.H5Tclose(type_id);
+ H5.H5Dclose(dataset_id);
+ H5.H5Fclose(file_id);
+ }
+ catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public static void main(String[] args)
+ {
+ H5Ex_T_VLString.createDataset();
+ H5Ex_T_VLString.readDataset();
+ }
+}
diff --git a/HDF5Examples/JAVA/H5T/JavaDatatypeExample.sh.in b/HDF5Examples/JAVA/H5T/JavaDatatypeExample.sh.in
new file mode 100644
index 0000000..7683798
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/JavaDatatypeExample.sh.in
@@ -0,0 +1,447 @@
+#! /bin/sh
+#
+# Copyright by The HDF Group.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the COPYING file, which can be found at the root of the source code
+# distribution tree, or in https://www.hdfgroup.org/licenses.
+# If you do not have access to either file, you may request a copy from
+# help@hdfgroup.org.
+#
+
+top_builddir=@top_builddir@
+top_srcdir=@top_srcdir@
+srcdir=@srcdir@
+IS_DARWIN="@H5_IS_DARWIN@"
+
+TESTNAME=EX_Datatypes
+EXIT_SUCCESS=0
+EXIT_FAILURE=1
+
+# Set up default variable values if not supplied by the user.
+RM='rm -rf'
+CMP='cmp'
+DIFF='diff -c'
+CP='cp'
+DIRNAME='dirname'
+BASENAME='basename'
+LS='ls'
+AWK='awk'
+
+nerrors=0
+
+# where the libs exist
+HDFLIB_HOME="$top_srcdir/java/lib"
+BLDDIR="."
+BLDLIBDIR="$BLDDIR/testlibs"
+HDFTEST_HOME="$top_srcdir/java/examples/datatypes"
+JARFILE=jar@PACKAGE_TARNAME@-@PACKAGE_VERSION@.jar
+TESTJARFILE=jar@PACKAGE_TARNAME@datatypes.jar
+test -d $BLDLIBDIR || mkdir -p $BLDLIBDIR
+
+######################################################################
+# library files
+# --------------------------------------------------------------------
+# All the library files copy from source directory to test directory
+# NOTE: Keep this framework to add/remove test files.
+# This list are also used for checking exist.
+# Comment '#' without space can be used.
+# --------------------------------------------------------------------
+LIST_LIBRARY_FILES="
+$top_builddir/src/.libs/libhdf5.*
+$top_builddir/java/src/jni/.libs/libhdf5_java.*
+$top_builddir/java/src/$JARFILE
+"
+LIST_DATA_FILES="
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_Array.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_ArrayAttribute.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_Bit.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_BitAttribute.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_Commit.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_Compound.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_CompoundAttribute.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_Float.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_FloatAttribute.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_Integer.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_IntegerAttribute.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_ObjectReference.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_Opaque.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_OpaqueAttribute.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_String.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_StringAttribute.txt
+$HDFTEST_HOME/../tfiles/examples.datatypes.H5Ex_T_VLString.txt
+"
+
+#
+# copy files from source dirs to test dir
+#
+COPY_LIBFILES="$LIST_LIBRARY_FILES"
+COPY_JARTESTFILES="$LIST_JAR_TESTFILES"
+
+COPY_LIBFILES_TO_BLDLIBDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_LIBFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -fR $tstfile $BLDLIBDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ BNAME=`$BASENAME $tstfile`
+ if [ "$BNAME" = "libhdf5_java.dylib" ]; then
+ COPIED_LIBHDF5_JAVA=1
+ fi
+ fi
+ fi
+ done
+ if [[ "$IS_DARWIN" = "yes" ]] && [[ $COPIED_LIBHDF5_JAVA -eq 1 ]]; then
+ (cd $BLDLIBDIR; \
+ install_name_tool -add_rpath @loader_path libhdf5_java.dylib; \
+ exist_path=` otool -l libhdf5_java.dylib | grep libhdf5 | grep -v java | awk '{print $2}'`; \
+ echo $exist_path; \
+ install_name_tool -change $exist_path @rpath/libhdf5.dylib libhdf5_java.dylib)
+ fi
+ # copy jar files. Used -f to make sure get a new copy
+ for tstfile in $COPY_JARTESTFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -fR $tstfile $BLDLIBDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_LIBFILES_AND_BLDLIBDIR()
+{
+ # skip rm if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=$HDFLIB_HOME
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDLIBDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $RM -rf $BLDLIBDIR
+ fi
+}
+
+COPY_DATAFILES="$LIST_DATA_FILES"
+
+COPY_DATAFILES_TO_BLDDIR()
+{
+ # copy test files. Used -f to make sure get a new copy
+ for tstfile in $COPY_DATAFILES
+ do
+ # ignore '#' comment
+ echo $tstfile | tr -d ' ' | grep '^#' > /dev/null
+ RET=$?
+ if [ $RET -eq 1 ]; then
+ # skip cp if srcdir is same as destdir
+ # this occurs when build/test performed in source dir and
+ # make cp fail
+ SDIR=`$DIRNAME $tstfile`
+ INODE_SDIR=`$LS -i -d $SDIR | $AWK -F' ' '{print $1}'`
+ INODE_DDIR=`$LS -i -d $BLDDIR | $AWK -F' ' '{print $1}'`
+ if [ "$INODE_SDIR" != "$INODE_DDIR" ]; then
+ $CP -f $tstfile $BLDDIR
+ if [ $? -ne 0 ]; then
+ echo "Error: FAILED to copy $tstfile ."
+
+ # Comment out this to CREATE expected file
+ exit $EXIT_FAILURE
+ fi
+ fi
+ fi
+ done
+}
+
+CLEAN_DATAFILES_AND_BLDDIR()
+{
+ $RM $BLDDIR/examples.datatypes.H5Ex_T_*.txt
+ $RM $BLDDIR/H5Ex_T_*.out
+ $RM $BLDDIR/H5Ex_T_*.h5
+}
+
+# Print a line-line message left justified in a field of 70 characters
+# beginning with the word "Testing".
+#
+TESTING() {
+ SPACES=" "
+ echo "Testing $* $SPACES" | cut -c1-70 | tr -d '\012'
+}
+
+# where Java is installed (requires jdk1.7.x)
+JAVAEXE=@JAVA@
+JAVAEXEFLAGS=@H5_JAVAFLAGS@
+
+###############################################################################
+# DO NOT MODIFY BELOW THIS LINE
+###############################################################################
+
+# prepare for test
+COPY_LIBFILES_TO_BLDLIBDIR
+COPY_DATAFILES_TO_BLDDIR
+
+CPATH=".:"$BLDLIBDIR"/"$JARFILE":"$TESTJARFILE""
+
+TEST=/usr/bin/test
+if [ ! -x /usr/bin/test ]
+then
+TEST=`which test`
+fi
+
+if $TEST -z "$CLASSPATH"; then
+ CLASSPATH=""
+fi
+CLASSPATH=$CPATH":"$CLASSPATH
+export CLASSPATH
+
+if $TEST -n "$JAVAPATH" ; then
+ PATH=$JAVAPATH":"$PATH
+ export PATH
+fi
+
+if $TEST -e /bin/uname; then
+ os_name=`/bin/uname -s`
+elif $TEST -e /usr/bin/uname; then
+ os_name=`/usr/bin/uname -s`
+else
+ os_name=unknown
+fi
+
+if $TEST -z "$LD_LIBRARY_PATH" ; then
+ LD_LIBRARY_PATH=""
+fi
+
+case $os_name in
+ *)
+ LD_LIBRARY_PATH=$BLDLIBDIR:$LD_LIBRARY_PATH
+ ;;
+esac
+
+export LD_LIBRARY_PATH
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Array"
+TESTING examples.datatypes.H5Ex_T_Array
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Array > H5Ex_T_Array.out)
+if diff H5Ex_T_Array.out examples.datatypes.H5Ex_T_Array.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_Array"
+else
+ echo "**FAILED** datatypes.H5Ex_T_Array"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ArrayAttribute"
+TESTING examples.datatypes.H5Ex_T_ArrayAttribute
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ArrayAttribute > H5Ex_T_ArrayAttribute.out)
+if diff H5Ex_T_ArrayAttribute.out examples.datatypes.H5Ex_T_ArrayAttribute.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_ArrayAttribute"
+else
+ echo "**FAILED** datatypes.H5Ex_T_ArrayAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Bit"
+TESTING examples.datatypes.H5Ex_T_Bit
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Bit > H5Ex_T_Bit.out)
+if diff H5Ex_T_Bit.out examples.datatypes.H5Ex_T_Bit.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_Bit"
+else
+ echo "**FAILED** datatypes.H5Ex_T_Bit"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_BitAttribute"
+TESTING examples.datatypes.H5Ex_T_BitAttribute
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_BitAttribute > H5Ex_T_BitAttribute.out)
+if diff H5Ex_T_BitAttribute.out examples.datatypes.H5Ex_T_BitAttribute.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_BitAttribute"
+else
+ echo "**FAILED** datatypes.H5Ex_T_BitAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Commit"
+TESTING examples.datasets.H5Ex_T_Commit
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Commit > H5Ex_T_Commit.out)
+if diff H5Ex_T_Commit.out examples.datatypes.H5Ex_T_Commit.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_Commit"
+else
+ echo "**FAILED** datatypes.H5Ex_T_Commit"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Compound"
+TESTING examples.datatypes.H5Ex_T_Compound
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Compound > H5Ex_T_Compound.out)
+if diff H5Ex_T_Compound.out examples.datatypes.H5Ex_T_Compound.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_Compound"
+else
+ echo "**FAILED** datatypes.H5Ex_T_Compound"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_CompoundAttribute"
+TESTING examples.datatypes.H5Ex_T_CompoundAttribute
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_CompoundAttribute > H5Ex_T_CompoundAttribute.out)
+if diff H5Ex_T_CompoundAttribute.out examples.datatypes.H5Ex_T_CompoundAttribute.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_CompoundAttribute"
+else
+ echo "**FAILED** datatypes.H5Ex_T_CompoundAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Float"
+TESTING examples.datatypes.H5Ex_T_Float
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Float > H5Ex_T_Float.out)
+if diff H5Ex_T_Float.out examples.datatypes.H5Ex_T_Float.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_Float"
+else
+ echo "**FAILED** datatypes.H5Ex_T_Float"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_FloatAttribute"
+TESTING examples.datatypes.H5Ex_T_FloatAttribute
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_FloatAttribute > H5Ex_T_FloatAttribute.out)
+if diff H5Ex_T_FloatAttribute.out examples.datatypes.H5Ex_T_FloatAttribute.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_FloatAttribute"
+else
+ echo "**FAILED** datatypes.H5Ex_T_FloatAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Integer"
+TESTING examples.datatypes.H5Ex_T_Integer
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Integer > H5Ex_T_Integer.out)
+if diff H5Ex_T_Integer.out examples.datatypes.H5Ex_T_Integer.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_Integer"
+else
+ echo "**FAILED** datatypes.H5Ex_T_Integer"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_IntegerAttribute"
+TESTING examples.datatypes.H5Ex_T_IntegerAttribute
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_IntegerAttribute > H5Ex_T_IntegerAttribute.out)
+if diff H5Ex_T_IntegerAttribute.out examples.datatypes.H5Ex_T_IntegerAttribute.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_IntegerAttribute"
+else
+ echo "**FAILED** datatypes.H5Ex_T_IntegerAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ObjectReference"
+TESTING examples.datatypes.H5Ex_T_ObjectReference
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ObjectReference > H5Ex_T_ObjectReference.out)
+if diff H5Ex_T_ObjectReference.out examples.datatypes.H5Ex_T_ObjectReference.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_ObjectReference"
+else
+ echo "**FAILED** datatypes.H5Ex_T_ObjectReference"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ObjectReferenceAttribute"
+TESTING examples.datatypes.H5Ex_T_ObjectReferenceAttribute
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_ObjectReferenceAttribute > H5Ex_T_ObjectReferenceAttribute.out)
+if diff H5Ex_T_ObjectReferenceAttribute.out examples.datatypes.H5Ex_T_ObjectReferenceAttribute.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_ObjectReferenceAttribute"
+else
+ echo "**FAILED** datatypes.H5Ex_T_ObjectReferenceAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Opaque"
+TESTING examples.datatypes.H5Ex_T_Opaque
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_Opaque > H5Ex_T_Opaque.out)
+if diff H5Ex_T_Opaque.out examples.datatypes.H5Ex_T_Opaque.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_Opaque"
+else
+ echo "**FAILED** datatypes.H5Ex_T_Opaque"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_OpaqueAttribute"
+TESTING examples.datatypes.H5Ex_T_OpaqueAttribute
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_OpaqueAttribute > H5Ex_T_OpaqueAttribute.out)
+if diff H5Ex_T_OpaqueAttribute.out examples.datatypes.H5Ex_T_OpaqueAttribute.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_OpaqueAttribute"
+else
+ echo "**FAILED** datatypes.H5Ex_T_OpaqueAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_String"
+TESTING examples.datatypes.H5Ex_T_String
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_String > H5Ex_T_String.out)
+if diff H5Ex_T_String.out examples.datatypes.H5Ex_T_String.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_String"
+else
+ echo "**FAILED** datatypes.H5Ex_T_String"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_StringAttribute"
+TESTING examples.datatypes.H5Ex_T_StringAttribute
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_StringAttribute > H5Ex_T_StringAttribute.out)
+if diff H5Ex_T_StringAttribute.out examples.datatypes.H5Ex_T_StringAttribute.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_StringAttribute"
+else
+ echo "**FAILED** datatypes.H5Ex_T_StringAttribute"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+echo "$JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_VLString"
+TESTING examples.datatypes.H5Ex_T_VLString
+($RUNSERIAL $JAVAEXE $JAVAEXEFLAGS -Xmx1024M -Djava.library.path=$BLDLIBDIR -cp $CLASSPATH examples.datatypes.H5Ex_T_VLString > H5Ex_T_VLString.out)
+if diff H5Ex_T_VLString.out examples.datatypes.H5Ex_T_VLString.txt > /dev/null; then
+ echo " PASSED datatypes.H5Ex_T_VLString"
+else
+ echo "**FAILED** datatypes.H5Ex_T_VLString"
+ nerrors="`expr $nerrors + 1`"
+fi
+
+# Clean up temporary files/directories
+CLEAN_LIBFILES_AND_BLDLIBDIR
+CLEAN_DATAFILES_AND_BLDDIR
+
+# Report test results and exit
+if test $nerrors -eq 0 ; then
+ echo "All $TESTNAME tests passed."
+ exit $EXIT_SUCCESS
+else
+ echo "$TESTNAME tests failed with $nerrors errors."
+ exit $EXIT_FAILURE
+fi
diff --git a/HDF5Examples/JAVA/H5T/Java_sourcefiles.cmake b/HDF5Examples/JAVA/H5T/Java_sourcefiles.cmake
new file mode 100644
index 0000000..43e90c2
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/Java_sourcefiles.cmake
@@ -0,0 +1,36 @@
+#-----------------------------------------------------------------------------
+# Define Sources, one file per application
+#-----------------------------------------------------------------------------
+set (HDF_JAVA_EXAMPLES
+ H5Ex_T_Array.java
+ H5Ex_T_ArrayAttribute.java
+ H5Ex_T_Bit.java
+ H5Ex_T_BitAttribute.java
+ H5Ex_T_Commit.java
+ H5Ex_T_Compound.java
+ H5Ex_T_CompoundAttribute.java
+ H5Ex_T_Float.java
+ H5Ex_T_FloatAttribute.java
+ H5Ex_T_Integer.java
+ H5Ex_T_IntegerAttribute.java
+ H5Ex_T_Opaque.java
+ H5Ex_T_OpaqueAttribute.java
+ H5Ex_T_String.java
+ H5Ex_T_StringAttribute.java
+ H5Ex_T_VLString.java
+)
+if (${H5_LIBVER_DIR} GREATER 18)
+ if (${H5_LIBVER_DIR} EQUAL 110)
+ set (HDF_JAVA_EXAMPLES ${HDF_JAVA_EXAMPLES}
+ 110/H5Ex_T_ObjectReference.java
+ 110/H5Ex_T_ObjectReferenceAttribute.java
+ )
+ else ()
+ set (HDF_JAVA_EXAMPLES ${HDF_JAVA_EXAMPLES}
+ H5Ex_T_ObjectReference.java
+ H5Ex_T_ObjectReferenceAttribute.java
+ H5Ex_T_RegionReference.java
+ H5Ex_T_RegionReferenceAttribute.java
+ )
+ endif ()
+endif ()
diff --git a/HDF5Examples/JAVA/H5T/Makefile.am b/HDF5Examples/JAVA/H5T/Makefile.am
new file mode 100644
index 0000000..2e744ab
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/Makefile.am
@@ -0,0 +1,75 @@
+#
+# Copyright by The HDF Group.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the COPYING file, which can be found at the root of the source code
+# distribution tree, or in https://www.hdfgroup.org/licenses.
+# If you do not have access to either file, you may request a copy from
+# help@hdfgroup.org.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+##
+#
+# HDF5 Java Library Examples Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+# Mark this directory as part of the JNI API
+JAVA_API=yes
+
+JAVAROOT = .classes
+
+classes:
+ test -d $(@D)/$(JAVAROOT) || $(MKDIR_P) $(@D)/$(JAVAROOT)
+
+pkgpath = examples/datatypes
+hdfjarfile = jar$(PACKAGE_TARNAME)-$(PACKAGE_VERSION).jar
+CLASSPATH_ENV=CLASSPATH=.:$(JAVAROOT):$(top_builddir)/java/src/$(hdfjarfile):$$CLASSPATH
+
+jarfile = jar$(PACKAGE_TARNAME)datatypes.jar
+
+AM_JAVACFLAGS = $(H5_JAVACFLAGS) -deprecation
+
+TESTPACKAGE =
+
+noinst_JAVA = \
+ H5Ex_T_Array.java \
+ H5Ex_T_ArrayAttribute.java \
+ H5Ex_T_Bit.java \
+ H5Ex_T_BitAttribute.java \
+ H5Ex_T_Commit.java \
+ H5Ex_T_Compound.java \
+ H5Ex_T_CompoundAttribute.java \
+ H5Ex_T_Float.java \
+ H5Ex_T_FloatAttribute.java \
+ H5Ex_T_Integer.java \
+ H5Ex_T_IntegerAttribute.java \
+ H5Ex_T_ObjectReference.java \
+ H5Ex_T_ObjectReferenceAttribute.java \
+ H5Ex_T_Opaque.java \
+ H5Ex_T_OpaqueAttribute.java \
+ H5Ex_T_String.java \
+ H5Ex_T_StringAttribute.java \
+ H5Ex_T_VLString.java
+
+$(jarfile): classnoinst.stamp classes
+ $(JAR) cvf $@ -C $(JAVAROOT)/ $(pkgpath)
+
+noinst_DATA = $(jarfile)
+
+.PHONY: classes
+
+check_SCRIPTS = JavaDatatypeExample.sh
+TEST_SCRIPT = $(check_SCRIPTS)
+
+CLEANFILES = classnoinst.stamp $(jarfile) $(JAVAROOT)/$(pkgpath)/*.class JavaDatatypeExample.sh
+
+clean:
+ rm -rf $(JAVAROOT)/*
+ rm -f $(jarfile)
+ rm -f classnoinst.stamp
+
+include $(top_srcdir)/config/conclude.am
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Array.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Array.txt
new file mode 100644
index 0000000..7bcd8fa
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Array.txt
@@ -0,0 +1,21 @@
+DS1 [0]:
+ [0 0 0 0 0 ]
+ [0 -1 -2 -3 -4 ]
+ [0 -2 -4 -6 -8 ]
+
+DS1 [1]:
+ [0 1 2 3 4 ]
+ [1 1 1 1 1 ]
+ [2 1 0 -1 -2 ]
+
+DS1 [2]:
+ [0 2 4 6 8 ]
+ [2 3 4 5 6 ]
+ [4 4 4 4 4 ]
+
+DS1 [3]:
+ [0 3 6 9 12 ]
+ [3 5 7 9 11 ]
+ [6 7 8 9 10 ]
+
+
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_ArrayAttribute.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_ArrayAttribute.txt
new file mode 100644
index 0000000..7d27c0b
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_ArrayAttribute.txt
@@ -0,0 +1,21 @@
+A1 [0]:
+ [0 0 0 0 0 ]
+ [0 -1 -2 -3 -4 ]
+ [0 -2 -4 -6 -8 ]
+
+A1 [1]:
+ [0 1 2 3 4 ]
+ [1 1 1 1 1 ]
+ [2 1 0 -1 -2 ]
+
+A1 [2]:
+ [0 2 4 6 8 ]
+ [2 3 4 5 6 ]
+ [4 4 4 4 4 ]
+
+A1 [3]:
+ [0 3 6 9 12 ]
+ [3 5 7 9 11 ]
+ [6 7 8 9 10 ]
+
+
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Bit.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Bit.txt
new file mode 100644
index 0000000..57769b2
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Bit.txt
@@ -0,0 +1,6 @@
+DS1:
+ [{0, 0, 0, 0}{3, 0, 1, 1}{2, 0, 2, 2}{1, 0, 3, 3}{0, 0, 0, 0}{3, 0, 1, 1}{2, 0, 2, 2}]
+ [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
+ [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
+ [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
+
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_BitAttribute.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_BitAttribute.txt
new file mode 100644
index 0000000..683bc7f
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_BitAttribute.txt
@@ -0,0 +1,6 @@
+A1:
+ [{0, 0, 0, 0}{3, 0, 1, 1}{2, 0, 2, 2}{1, 0, 3, 3}{0, 0, 0, 0}{3, 0, 1, 1}{2, 0, 2, 2}]
+ [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
+ [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
+ [{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}{0, 0, 0, 0}]
+
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Commit.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Commit.txt
new file mode 100644
index 0000000..e6d0bef
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Commit.txt
@@ -0,0 +1,6 @@
+Named datatype: Sensor_Type:
+ Class: H5T_COMPOUND
+ Serial number
+ Location
+ Temperature (F)
+ Pressure (inHg)
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Compound.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Compound.txt
new file mode 100644
index 0000000..0505c78
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Compound.txt
@@ -0,0 +1,25 @@
+DS1 [0]:
+Serial number : 1153
+Location : Exterior (static)
+Temperature (F) : 53.23
+Pressure (inHg) : 24.57
+
+DS1 [1]:
+Serial number : 1184
+Location : Intake
+Temperature (F) : 55.12
+Pressure (inHg) : 22.95
+
+DS1 [2]:
+Serial number : 1027
+Location : Intake manifold
+Temperature (F) : 103.55
+Pressure (inHg) : 31.23
+
+DS1 [3]:
+Serial number : 1313
+Location : Exhaust manifold
+Temperature (F) : 1252.89
+Pressure (inHg) : 84.11
+
+
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_CompoundAttribute.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_CompoundAttribute.txt
new file mode 100644
index 0000000..dd77f8d
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_CompoundAttribute.txt
@@ -0,0 +1,25 @@
+A1 [0]:
+Serial number : 1153
+Location : Exterior (static)
+Temperature (F) : 53.23
+Pressure (inHg) : 24.57
+
+A1 [1]:
+Serial number : 1184
+Location : Intake
+Temperature (F) : 55.12
+Pressure (inHg) : 22.95
+
+A1 [2]:
+Serial number : 1027
+Location : Intake manifold
+Temperature (F) : 103.55
+Pressure (inHg) : 31.23
+
+A1 [3]:
+Serial number : 1313
+Location : Exhaust manifold
+Temperature (F) : 1252.89
+Pressure (inHg) : 84.11
+
+
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Float.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Float.txt
new file mode 100644
index 0000000..85d8ced
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Float.txt
@@ -0,0 +1,6 @@
+DS1:
+ [ 0.0000 1.0000 2.0000 3.0000 4.0000 5.0000 6.0000]
+ [ 2.0000 1.6667 2.4000 3.2857 4.2222 5.1818 6.1538]
+ [ 4.0000 2.3333 2.8000 3.5714 4.4444 5.3636 6.3077]
+ [ 6.0000 3.0000 3.2000 3.8571 4.6667 5.5455 6.4615]
+
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_FloatAttribute.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_FloatAttribute.txt
new file mode 100644
index 0000000..cfa1f92
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_FloatAttribute.txt
@@ -0,0 +1,6 @@
+A1:
+ [ 0.0000 1.0000 2.0000 3.0000 4.0000 5.0000 6.0000]
+ [ 2.0000 1.6667 2.4000 3.2857 4.2222 5.1818 6.1538]
+ [ 4.0000 2.3333 2.8000 3.5714 4.4444 5.3636 6.3077]
+ [ 6.0000 3.0000 3.2000 3.8571 4.6667 5.5455 6.4615]
+
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Integer.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Integer.txt
new file mode 100644
index 0000000..f686bd1
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Integer.txt
@@ -0,0 +1,6 @@
+DS1:
+ [ 0 -1 -2 -3 -4 -5 -6]
+ [ 0 0 0 0 0 0 0]
+ [ 0 1 2 3 4 5 6]
+ [ 0 2 4 6 8 10 12]
+
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_IntegerAttribute.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_IntegerAttribute.txt
new file mode 100644
index 0000000..dccd4a6
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_IntegerAttribute.txt
@@ -0,0 +1,6 @@
+A1:
+ [ 0 -1 -2 -3 -4 -5 -6]
+ [ 0 0 0 0 0 0 0]
+ [ 0 1 2 3 4 5 6]
+ [ 0 2 4 6 8 10 12]
+
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_ObjectReference.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_ObjectReference.txt
new file mode 100644
index 0000000..d8afa56
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_ObjectReference.txt
@@ -0,0 +1,4 @@
+DS1[0]:
+ ->H5G_GROUP: /G1
+DS1[1]:
+ ->H5G_DATASET: /DS2
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_ObjectReferenceAttribute.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_ObjectReferenceAttribute.txt
new file mode 100644
index 0000000..3fabd66
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_ObjectReferenceAttribute.txt
@@ -0,0 +1,4 @@
+A1[0]:
+ ->H5G_GROUP: /G1
+A1[1]:
+ ->H5G_DATASET: /DS2
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Opaque.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Opaque.txt
new file mode 100644
index 0000000..fb74236
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_Opaque.txt
@@ -0,0 +1,6 @@
+Datatype tag for DS1 is: "Character array"
+DS1[0]: OPAQUE0
+DS1[1]: OPAQUE1
+DS1[2]: OPAQUE2
+DS1[3]: OPAQUE3
+
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_OpaqueAttribute.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_OpaqueAttribute.txt
new file mode 100644
index 0000000..bc9a730
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_OpaqueAttribute.txt
@@ -0,0 +1,6 @@
+Datatype tag for A1 is: "Character array"
+A1[0]: OPAQUE0
+A1[1]: OPAQUE1
+A1[2]: OPAQUE2
+A1[3]: OPAQUE3
+
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_RegionReference.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_RegionReference.txt
new file mode 100644
index 0000000..63c1f9e
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_RegionReference.txt
@@ -0,0 +1,4 @@
+DS1[0]:
+ -> /DS2: hdf5
+DS1[1]:
+ -> /DS2: Therowthedog
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_RegionReferenceAttribute.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_RegionReferenceAttribute.txt
new file mode 100644
index 0000000..d50fc76
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_RegionReferenceAttribute.txt
@@ -0,0 +1,4 @@
+A1[0]:
+ -> /DS2: hdf5
+A1[1]:
+ -> /DS2: Therowthedog
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_String.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_String.txt
new file mode 100644
index 0000000..4df6a41
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_String.txt
@@ -0,0 +1,5 @@
+DS1 [0]: Parting
+DS1 [1]: is such
+DS1 [2]: sweet
+DS1 [3]: sorrow.
+
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_StringAttribute.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_StringAttribute.txt
new file mode 100644
index 0000000..4df6a41
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_StringAttribute.txt
@@ -0,0 +1,5 @@
+DS1 [0]: Parting
+DS1 [1]: is such
+DS1 [2]: sweet
+DS1 [3]: sorrow.
+
diff --git a/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_VLString.txt b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_VLString.txt
new file mode 100644
index 0000000..0322953
--- /dev/null
+++ b/HDF5Examples/JAVA/H5T/tfiles/110/H5Ex_T_VLString.txt
@@ -0,0 +1,4 @@
+DS1 [0]: Parting
+DS1 [1]: is such
+DS1 [2]: sweet
+DS1 [3]: sorrow.
diff --git a/HDF5Examples/JAVA/Makefile.am b/HDF5Examples/JAVA/Makefile.am
new file mode 100644
index 0000000..f34394d
--- /dev/null
+++ b/HDF5Examples/JAVA/Makefile.am
@@ -0,0 +1,28 @@
+#
+# Copyright by The HDF Group.
+# All rights reserved.
+#
+# This file is part of HDF5. The full HDF5 copyright notice, including
+# terms governing use, modification, and redistribution, is contained in
+# the COPYING file, which can be found at the root of the source code
+# distribution tree, or in https://www.hdfgroup.org/licenses.
+# If you do not have access to either file, you may request a copy from
+# help@hdfgroup.org.
+#
+#
+# This makefile mostly just reinvokes make in the various subdirectories
+# but does so in the correct order. You can alternatively invoke make from
+# each subdirectory manually.
+##
+## Makefile.am
+## Run automake to generate a Makefile.in from this file.
+##
+#
+# HDF5 Java Library Examples Makefile(.in)
+
+include $(top_srcdir)/config/commence.am
+
+## Only recurse into subdirectories if the Java (JNI) interface is enabled.
+ SUBDIRS=H5J H5G H5D H5T
+
+include $(top_srcdir)/config/conclude.am